/* * Copyright (c) 2003, 2022, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. *
*/
// This flag is global as the constructor does not reset it: bool VM_RedefineClasses::_has_redefined_Object = false;
u8 VM_RedefineClasses::_id_counter = 0;
// If any of the classes are being redefined, wait // Parallel constant pool merging leads to indeterminate constant pools. void VM_RedefineClasses::lock_classes() {
JvmtiThreadState *state = JvmtiThreadState::state_for(JavaThread::current());
GrowableArray<Klass*>* redef_classes = state->get_classes_being_redefined();
MonitorLocker ml(RedefineClasses_lock);
if (redef_classes == NULL) {
redef_classes = new (mtClass) GrowableArray<Klass*>(1, mtClass);
state->set_classes_being_redefined(redef_classes);
}
bool has_redefined; do {
has_redefined = false; // Go through classes each time until none are being redefined. Skip // the ones that are being redefined by this thread currently. Class file // load hook event may trigger new class redefine when we are redefining // a class (after lock_classes()). for (int i = 0; i < _class_count; i++) {
InstanceKlass* ik = get_ik(_class_defs[i].klass); // Check if we are currently redefining the class in this thread already. if (redef_classes->contains(ik)) {
assert(ik->is_being_redefined(), "sanity");
} else { if (ik->is_being_redefined()) {
ml.wait();
has_redefined = true; break; // for loop
}
}
}
} while (has_redefined);
for (int i = 0; i < _class_count; i++) {
InstanceKlass* ik = get_ik(_class_defs[i].klass);
redef_classes->push(ik); // Add to the _classes_being_redefined list
ik->set_is_being_redefined(true);
}
ml.notify_all();
}
void VM_RedefineClasses::unlock_classes() {
JvmtiThreadState *state = JvmtiThreadState::state_for(JavaThread::current());
GrowableArray<Klass*>* redef_classes = state->get_classes_being_redefined();
assert(redef_classes != NULL, "_classes_being_redefined is not allocated");
MonitorLocker ml(RedefineClasses_lock);
for (int i = _class_count - 1; i >= 0; i--) {
InstanceKlass* def_ik = get_ik(_class_defs[i].klass); if (redef_classes->length() > 0) { // Remove the class from _classes_being_redefined list
Klass* k = redef_classes->pop();
assert(def_ik == k, "unlocking wrong class");
}
assert(def_ik->is_being_redefined(), "should be being redefined to get here");
// Unlock after we finish all redefines for this class within // the thread. Same class can be pushed to the list multiple // times (not more than once by each recursive redefinition). if (!redef_classes->contains(def_ik)) {
def_ik->set_is_being_redefined(false);
}
}
ml.notify_all();
}
for (int i = 0; i < _class_count; i++) { if (_class_defs[i].klass == NULL) {
_res = JVMTI_ERROR_INVALID_CLASS; returnfalse;
} if (_class_defs[i].class_byte_count == 0) {
_res = JVMTI_ERROR_INVALID_CLASS_FORMAT; returnfalse;
} if (_class_defs[i].class_bytes == NULL) {
_res = JVMTI_ERROR_NULL_POINTER; returnfalse;
}
oop mirror = JNIHandles::resolve_non_null(_class_defs[i].klass); // classes for primitives, arrays, and hidden classes // cannot be redefined. if (!is_modifiable_class(mirror)) {
_res = JVMTI_ERROR_UNMODIFIABLE_CLASS; returnfalse;
}
}
// Start timer after all the sanity checks; not quite accurate, but // better than adding a bunch of stop() calls. if (log_is_enabled(Info, redefine, class, timer)) {
_timer_vm_op_prologue.start();
}
lock_classes(); // We first load new class versions in the prologue, because somewhere down the // call chain it is required that the current thread is a Java thread.
_res = load_new_class_versions(); if (_res != JVMTI_ERROR_NONE) { // free any successfully created classes, since none are redefined for (int i = 0; i < _class_count; i++) { if (_scratch_classes[i] != NULL) {
ClassLoaderData* cld = _scratch_classes[i]->class_loader_data(); // Free the memory for this class at class unloading time. Not before // because CMS might think this is still live.
InstanceKlass* ik = get_ik(_class_defs[i].klass); if (ik->get_cached_class_file() == _scratch_classes[i]->get_cached_class_file()) { // Don't double-free cached_class_file copied from the original class if error.
_scratch_classes[i]->set_cached_class_file(NULL);
}
cld->add_to_deallocate_list(InstanceKlass::cast(_scratch_classes[i]));
}
} // Free os::malloc allocated memory in load_new_class_version.
os::free(_scratch_classes);
_timer_vm_op_prologue.stop();
unlock_classes(); returnfalse;
}
_timer_vm_op_prologue.stop(); returntrue;
}
void VM_RedefineClasses::doit() {
Thread* current = Thread::current();
if (log_is_enabled(Info, redefine, class, timer)) {
_timer_vm_op_doit.start();
}
#if INCLUDE_CDS if (UseSharedSpaces) { // Sharing is enabled so we remap the shared readonly space to // shared readwrite, private just in case we need to redefine // a shared class. We do the remap during the doit() phase of // the safepoint to be safer. if (!MetaspaceShared::remap_shared_readonly_as_readwrite()) {
log_info(redefine, class, load)("failed to remap shared readonly space to readwrite, private");
_res = JVMTI_ERROR_INTERNAL;
_timer_vm_op_doit.stop(); return;
}
} #endif
// Mark methods seen on stack and everywhere else so old methods are not // cleaned up if they're on the stack.
MetadataOnStackMark md_on_stack(/*walk_all_metadata*/true, /*redefinition_walk*/true);
HandleMark hm(current); // make sure any handles created are deleted // before the stack walk again.
for (int i = 0; i < _class_count; i++) {
redefine_single_class(current, _class_defs[i].klass, _scratch_classes[i]);
}
// Flush all compiled code that depends on the classes redefined.
flush_dependent_code();
// Adjust constantpool caches and vtables for all classes // that reference methods of the evolved classes. // Have to do this after all classes are redefined and all methods that // are redefined are marked as old.
AdjustAndCleanMetadata adjust_and_clean_metadata(current);
ClassLoaderDataGraph::classes_do(&adjust_and_clean_metadata);
// JSR-292 support if (_any_class_has_resolved_methods) { bool trace_name_printed = false;
ResolvedMethodTable::adjust_method_entries(&trace_name_printed);
}
// Increment flag indicating that some invariants are no longer true. // See jvmtiExport.hpp for detailed explanation.
JvmtiExport::increment_redefinition_count();
// check_class() is optionally called for product bits, but is // always called for non-product bits. #ifdef PRODUCT if (log_is_enabled(Trace, redefine, class, obsolete, metadata)) { #endif
log_trace(redefine, class, obsolete, metadata)("calling check_class");
CheckClass check_class(current);
ClassLoaderDataGraph::classes_do(&check_class); #ifdef PRODUCT
} #endif
// Clean up any metadata now unreferenced while MetadataOnStackMark is set.
ClassLoaderDataGraph::clean_deallocate_lists(false);
// Reset the_class to null for error printing.
_the_class = NULL;
if (log_is_enabled(Info, redefine, class, timer)) { // Used to have separate timers for "doit" and "all", but the timer // overhead skewed the measurements.
julong doit_time = _timer_vm_op_doit.milliseconds();
julong all_time = _timer_vm_op_prologue.milliseconds() + doit_time;
bool VM_RedefineClasses::is_modifiable_class(oop klass_mirror) { // classes for primitives cannot be redefined if (java_lang_Class::is_primitive(klass_mirror)) { returnfalse;
}
Klass* k = java_lang_Class::as_Klass(klass_mirror); // classes for arrays cannot be redefined if (k == NULL || !k->is_instance_klass()) { returnfalse;
}
// Cannot redefine or retransform a hidden class. if (InstanceKlass::cast(k)->is_hidden()) { returnfalse;
} returntrue;
}
// Append the current entry at scratch_i in scratch_cp to *merge_cp_p // where the end of *merge_cp_p is specified by *merge_cp_length_p. For // direct CP entries, there is just the current entry to append. For // indirect and double-indirect CP entries, there are zero or more // referenced CP entries along with the current entry to append. // Indirect and double-indirect CP entries are handled by recursive // calls to append_entry() as needed. The referenced CP entries are // always appended to *merge_cp_p before the referee CP entry. These // referenced CP entries may already exist in *merge_cp_p in which case // there is nothing extra to append and only the current entry is // appended. void VM_RedefineClasses::append_entry(const constantPoolHandle& scratch_cp, int scratch_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p) {
// append is different depending on entry tag type switch (scratch_cp->tag_at(scratch_i).value()) {
// The old verifier is implemented outside the VM. It loads classes, // but does not resolve constant pool entries directly so we never // see Class entries here with the old verifier. Similarly the old // verifier does not like Class entries in the input constant pool. // The split-verifier is implemented in the VM so it can optionally // and directly resolve constant pool entries to load classes. The // split-verifier can accept either Class entries or UnresolvedClass // entries in the input constant pool. We revert the appended copy // back to UnresolvedClass so that either verifier will be happy // with the constant pool entry. // // this is an indirect CP entry so it needs special handling case JVM_CONSTANT_Class: case JVM_CONSTANT_UnresolvedClass:
{ int name_i = scratch_cp->klass_name_index_at(scratch_i); int new_name_i = find_or_append_indirect_entry(scratch_cp, name_i, merge_cp_p,
merge_cp_length_p);
if (new_name_i != name_i) {
log_trace(redefine, class, constantpool)
("Class entry@%d name_index change: %d to %d",
*merge_cp_length_p, name_i, new_name_i);
}
(*merge_cp_p)->temp_unresolved_klass_at_put(*merge_cp_length_p, new_name_i); if (scratch_i != *merge_cp_length_p) { // The new entry in *merge_cp_p is at a different index than // the new entry in scratch_cp so we need to map the index values.
map_index(scratch_cp, scratch_i, *merge_cp_length_p);
}
(*merge_cp_length_p)++;
} break;
// these are direct CP entries so they can be directly appended, // but double and long take two constant pool entries case JVM_CONSTANT_Double: // fall through case JVM_CONSTANT_Long:
{
ConstantPool::copy_entry_to(scratch_cp, scratch_i, *merge_cp_p, *merge_cp_length_p);
if (scratch_i != *merge_cp_length_p) { // The new entry in *merge_cp_p is at a different index than // the new entry in scratch_cp so we need to map the index values.
map_index(scratch_cp, scratch_i, *merge_cp_length_p);
}
(*merge_cp_length_p) += 2;
} break;
// these are direct CP entries so they can be directly appended case JVM_CONSTANT_Float: // fall through case JVM_CONSTANT_Integer: // fall through case JVM_CONSTANT_Utf8: // fall through
// This was an indirect CP entry, but it has been changed into // Symbol*s so this entry can be directly appended. case JVM_CONSTANT_String: // fall through
{
ConstantPool::copy_entry_to(scratch_cp, scratch_i, *merge_cp_p, *merge_cp_length_p);
if (scratch_i != *merge_cp_length_p) { // The new entry in *merge_cp_p is at a different index than // the new entry in scratch_cp so we need to map the index values.
map_index(scratch_cp, scratch_i, *merge_cp_length_p);
}
(*merge_cp_length_p)++;
} break;
// this is an indirect CP entry so it needs special handling case JVM_CONSTANT_NameAndType:
{ int name_ref_i = scratch_cp->name_ref_index_at(scratch_i); int new_name_ref_i = find_or_append_indirect_entry(scratch_cp, name_ref_i, merge_cp_p,
merge_cp_length_p);
int signature_ref_i = scratch_cp->signature_ref_index_at(scratch_i); int new_signature_ref_i = find_or_append_indirect_entry(scratch_cp, signature_ref_i,
merge_cp_p, merge_cp_length_p);
// If the referenced entries already exist in *merge_cp_p, then // both new_name_ref_i and new_signature_ref_i will both be 0. // In that case, all we are appending is the current entry. if (new_name_ref_i != name_ref_i) {
log_trace(redefine, class, constantpool)
("NameAndType entry@%d name_ref_index change: %d to %d",
*merge_cp_length_p, name_ref_i, new_name_ref_i);
} if (new_signature_ref_i != signature_ref_i) {
log_trace(redefine, class, constantpool)
("NameAndType entry@%d signature_ref_index change: %d to %d",
*merge_cp_length_p, signature_ref_i, new_signature_ref_i);
}
(*merge_cp_p)->name_and_type_at_put(*merge_cp_length_p,
new_name_ref_i, new_signature_ref_i); if (scratch_i != *merge_cp_length_p) { // The new entry in *merge_cp_p is at a different index than // the new entry in scratch_cp so we need to map the index values.
map_index(scratch_cp, scratch_i, *merge_cp_length_p);
}
(*merge_cp_length_p)++;
} break;
// this is a double-indirect CP entry so it needs special handling case JVM_CONSTANT_Fieldref: // fall through case JVM_CONSTANT_InterfaceMethodref: // fall through case JVM_CONSTANT_Methodref:
{ int klass_ref_i = scratch_cp->uncached_klass_ref_index_at(scratch_i); int new_klass_ref_i = find_or_append_indirect_entry(scratch_cp, klass_ref_i,
merge_cp_p, merge_cp_length_p);
int name_and_type_ref_i = scratch_cp->uncached_name_and_type_ref_index_at(scratch_i); int new_name_and_type_ref_i = find_or_append_indirect_entry(scratch_cp, name_and_type_ref_i,
merge_cp_p, merge_cp_length_p);
if (klass_ref_i != new_klass_ref_i) {
log_trace(redefine, class, constantpool)
("%s entry@%d class_index changed: %d to %d", entry_name, *merge_cp_length_p, klass_ref_i, new_klass_ref_i);
} if (name_and_type_ref_i != new_name_and_type_ref_i) {
log_trace(redefine, class, constantpool)
("%s entry@%d name_and_type_index changed: %d to %d",
entry_name, *merge_cp_length_p, name_and_type_ref_i, new_name_and_type_ref_i);
}
if (scratch_i != *merge_cp_length_p) { // The new entry in *merge_cp_p is at a different index than // the new entry in scratch_cp so we need to map the index values.
map_index(scratch_cp, scratch_i, *merge_cp_length_p);
}
(*merge_cp_length_p)++;
} break;
// this is an indirect CP entry so it needs special handling case JVM_CONSTANT_MethodType:
{ int ref_i = scratch_cp->method_type_index_at(scratch_i); int new_ref_i = find_or_append_indirect_entry(scratch_cp, ref_i, merge_cp_p,
merge_cp_length_p); if (new_ref_i != ref_i) {
log_trace(redefine, class, constantpool)
("MethodType entry@%d ref_index change: %d to %d", *merge_cp_length_p, ref_i, new_ref_i);
}
(*merge_cp_p)->method_type_index_at_put(*merge_cp_length_p, new_ref_i); if (scratch_i != *merge_cp_length_p) { // The new entry in *merge_cp_p is at a different index than // the new entry in scratch_cp so we need to map the index values.
map_index(scratch_cp, scratch_i, *merge_cp_length_p);
}
(*merge_cp_length_p)++;
} break;
// this is an indirect CP entry so it needs special handling case JVM_CONSTANT_MethodHandle:
{ int ref_kind = scratch_cp->method_handle_ref_kind_at(scratch_i); int ref_i = scratch_cp->method_handle_index_at(scratch_i); int new_ref_i = find_or_append_indirect_entry(scratch_cp, ref_i, merge_cp_p,
merge_cp_length_p); if (new_ref_i != ref_i) {
log_trace(redefine, class, constantpool)
("MethodHandle entry@%d ref_index change: %d to %d", *merge_cp_length_p, ref_i, new_ref_i);
}
(*merge_cp_p)->method_handle_index_at_put(*merge_cp_length_p, ref_kind, new_ref_i); if (scratch_i != *merge_cp_length_p) { // The new entry in *merge_cp_p is at a different index than // the new entry in scratch_cp so we need to map the index values.
map_index(scratch_cp, scratch_i, *merge_cp_length_p);
}
(*merge_cp_length_p)++;
} break;
// this is an indirect CP entry so it needs special handling case JVM_CONSTANT_Dynamic: // fall through case JVM_CONSTANT_InvokeDynamic:
{ // Index of the bootstrap specifier in the operands array int old_bs_i = scratch_cp->bootstrap_methods_attribute_index(scratch_i); int new_bs_i = find_or_append_operand(scratch_cp, old_bs_i, merge_cp_p,
merge_cp_length_p); // The bootstrap method NameAndType_info index int old_ref_i = scratch_cp->bootstrap_name_and_type_ref_index_at(scratch_i); int new_ref_i = find_or_append_indirect_entry(scratch_cp, old_ref_i, merge_cp_p,
merge_cp_length_p); if (new_bs_i != old_bs_i) {
log_trace(redefine, class, constantpool)
("Dynamic entry@%d bootstrap_method_attr_index change: %d to %d",
*merge_cp_length_p, old_bs_i, new_bs_i);
} if (new_ref_i != old_ref_i) {
log_trace(redefine, class, constantpool)
("Dynamic entry@%d name_and_type_index change: %d to %d", *merge_cp_length_p, old_ref_i, new_ref_i);
}
if (scratch_cp->tag_at(scratch_i).is_dynamic_constant())
(*merge_cp_p)->dynamic_constant_at_put(*merge_cp_length_p, new_bs_i, new_ref_i); else
(*merge_cp_p)->invoke_dynamic_at_put(*merge_cp_length_p, new_bs_i, new_ref_i); if (scratch_i != *merge_cp_length_p) { // The new entry in *merge_cp_p is at a different index than // the new entry in scratch_cp so we need to map the index values.
map_index(scratch_cp, scratch_i, *merge_cp_length_p);
}
(*merge_cp_length_p)++;
} break;
// At this stage, Class or UnresolvedClass could be in scratch_cp, but not // ClassIndex case JVM_CONSTANT_ClassIndex: // fall through
// Invalid is used as the tag for the second constant pool entry // occupied by JVM_CONSTANT_Double or JVM_CONSTANT_Long. It should // not be seen by itself. case JVM_CONSTANT_Invalid: // fall through
// At this stage, String could be here, but not StringIndex case JVM_CONSTANT_StringIndex: // fall through
// At this stage JVM_CONSTANT_UnresolvedClassInError should not be // here case JVM_CONSTANT_UnresolvedClassInError: // fall through
default:
{ // leave a breadcrumb
jbyte bad_value = scratch_cp->tag_at(scratch_i).value();
ShouldNotReachHere();
} break;
} // end switch tag value
} // end append_entry()
int VM_RedefineClasses::find_or_append_indirect_entry(const constantPoolHandle& scratch_cp, int ref_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p) {
int new_ref_i = ref_i; bool match = (ref_i < *merge_cp_length_p) &&
scratch_cp->compare_entry_to(ref_i, *merge_cp_p, ref_i);
if (!match) { // forward reference in *merge_cp_p or not a direct match int found_i = scratch_cp->find_matching_entry(ref_i, *merge_cp_p); if (found_i != 0) {
guarantee(found_i != ref_i, "compare_entry_to() and find_matching_entry() do not agree"); // Found a matching entry somewhere else in *merge_cp_p so just need a mapping entry.
new_ref_i = found_i;
map_index(scratch_cp, ref_i, found_i);
} else { // no match found so we have to append this entry to *merge_cp_p
append_entry(scratch_cp, ref_i, merge_cp_p, merge_cp_length_p); // The above call to append_entry() can only append one entry // so the post call query of *merge_cp_length_p is only for // the sake of consistency.
new_ref_i = *merge_cp_length_p - 1;
}
}
return new_ref_i;
} // end find_or_append_indirect_entry()
// Append a bootstrap specifier into the merge_cp operands that is semantically equal // to the scratch_cp operands bootstrap specifier passed by the old_bs_i index. // Recursively append new merge_cp entries referenced by the new bootstrap specifier. void VM_RedefineClasses::append_operand(const constantPoolHandle& scratch_cp, int old_bs_i,
constantPoolHandle *merge_cp_p, int *merge_cp_length_p) {
int old_ref_i = scratch_cp->operand_bootstrap_method_ref_index_at(old_bs_i); int new_ref_i = find_or_append_indirect_entry(scratch_cp, old_ref_i, merge_cp_p,
merge_cp_length_p); if (new_ref_i != old_ref_i) {
log_trace(redefine, class, constantpool)
("operands entry@%d bootstrap method ref_index change: %d to %d", _operands_cur_length, old_ref_i, new_ref_i);
}
Array<u2>* merge_ops = (*merge_cp_p)->operands(); int new_bs_i = _operands_cur_length; // We have _operands_cur_length == 0 when the merge_cp operands is empty yet. // However, the operand_offset_at(0) was set in the extend_operands() call. int new_base = (new_bs_i == 0) ? (*merge_cp_p)->operand_offset_at(0)
: (*merge_cp_p)->operand_next_offset_at(new_bs_i - 1); int argc = scratch_cp->operand_argument_count_at(old_bs_i);
for (int i = 0; i < argc; i++) { int old_arg_ref_i = scratch_cp->operand_argument_index_at(old_bs_i, i); int new_arg_ref_i = find_or_append_indirect_entry(scratch_cp, old_arg_ref_i, merge_cp_p,
merge_cp_length_p);
merge_ops->at_put(new_base++, new_arg_ref_i); if (new_arg_ref_i != old_arg_ref_i) {
log_trace(redefine, class, constantpool)
("operands entry@%d bootstrap method argument ref_index change: %d to %d",
_operands_cur_length, old_arg_ref_i, new_arg_ref_i);
}
} if (old_bs_i != _operands_cur_length) { // The bootstrap specifier in *merge_cp_p is at a different index than // that in scratch_cp so we need to map the index values.
map_operand_index(old_bs_i, new_bs_i);
}
_operands_cur_length++;
} // end append_operand()
int VM_RedefineClasses::find_or_append_operand(const constantPoolHandle& scratch_cp, int old_bs_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p) {
int new_bs_i = old_bs_i; // bootstrap specifier index bool match = (old_bs_i < _operands_cur_length) &&
scratch_cp->compare_operand_to(old_bs_i, *merge_cp_p, old_bs_i);
if (!match) { // forward reference in *merge_cp_p or not a direct match int found_i = scratch_cp->find_matching_operand(old_bs_i, *merge_cp_p,
_operands_cur_length); if (found_i != -1) {
guarantee(found_i != old_bs_i, "compare_operand_to() and find_matching_operand() disagree"); // found a matching operand somewhere else in *merge_cp_p so just need a mapping
new_bs_i = found_i;
map_operand_index(old_bs_i, found_i);
} else { // no match found so we have to append this bootstrap specifier to *merge_cp_p
append_operand(scratch_cp, old_bs_i, merge_cp_p, merge_cp_length_p);
new_bs_i = _operands_cur_length - 1;
}
} return new_bs_i;
} // end find_or_append_operand()
void VM_RedefineClasses::finalize_operands_merge(const constantPoolHandle& merge_cp, TRAPS) { if (merge_cp->operands() == NULL) { return;
} // Shrink the merge_cp operands
merge_cp->shrink_operands(_operands_cur_length, CHECK);
if (log_is_enabled(Trace, redefine, class, constantpool)) { // don't want to loop unless we are tracing int count = 0; for (int i = 1; i < _operands_index_map_p->length(); i++) { int value = _operands_index_map_p->at(i); if (value != -1) {
log_trace(redefine, class, constantpool)("operands_index_map[%d]: old=%d new=%d", count, i, value);
count++;
}
}
} // Clean-up
_operands_index_map_p = NULL;
_operands_cur_length = 0;
_operands_index_map_count = 0;
} // end finalize_operands_merge()
// Symbol* comparator for qsort // The caller must have an active ResourceMark. staticint symcmp(constvoid* a, constvoid* b) { char* astr = (*(Symbol**)a)->as_C_string(); char* bstr = (*(Symbol**)b)->as_C_string(); return strcmp(astr, bstr);
}
// The caller must have an active ResourceMark. static jvmtiError check_attribute_arrays(constchar* attr_name,
InstanceKlass* the_class, InstanceKlass* scratch_class,
Array<u2>* the_array, Array<u2>* scr_array) { bool the_array_exists = the_array != Universe::the_empty_short_array(); bool scr_array_exists = scr_array != Universe::the_empty_short_array();
int array_len = the_array->length(); if (the_array_exists && scr_array_exists) { if (array_len != scr_array->length()) {
log_trace(redefine, class)
("redefined class %s attribute change error: %s len=%d changed to len=%d",
the_class->external_name(), attr_name, array_len, scr_array->length()); return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
}
// The order of entries in the attribute array is not specified so we // have to explicitly check for the same contents. We do this by copying // the referenced symbols into their own arrays, sorting them and then // comparing each element pair.
for (int i = 0; i < array_len; i++) { int the_cp_index = the_array->at(i); int scr_cp_index = scr_array->at(i);
the_syms[i] = the_class->constants()->klass_name_at(the_cp_index);
scr_syms[i] = scratch_class->constants()->klass_name_at(scr_cp_index);
}
// Check whether the class NestMembers attribute has been changed. return check_attribute_arrays("NestMembers",
the_class, scratch_class,
the_class->nest_members(),
scratch_class->nest_members());
}
// Return an error status if the class Record attribute was changed. static jvmtiError check_record_attribute(InstanceKlass* the_class, InstanceKlass* scratch_class) { // Get lists of record components.
Array<RecordComponent*>* the_record = the_class->record_components();
Array<RecordComponent*>* scr_record = scratch_class->record_components(); bool the_record_exists = the_record != NULL; bool scr_record_exists = scr_record != NULL;
if (the_record_exists && scr_record_exists) { int the_num_components = the_record->length(); int scr_num_components = scr_record->length(); if (the_num_components != scr_num_components) {
log_info(redefine, class, record)
("redefined class %s attribute change error: Record num_components=%d changed to num_components=%d",
the_class->external_name(), the_num_components, scr_num_components); return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
}
// Compare each field in each record component.
ConstantPool* the_cp = the_class->constants();
ConstantPool* scr_cp = scratch_class->constants(); for (int x = 0; x < the_num_components; x++) {
RecordComponent* the_component = the_record->at(x);
RecordComponent* scr_component = scr_record->at(x); const Symbol* const the_name = the_cp->symbol_at(the_component->name_index()); const Symbol* const scr_name = scr_cp->symbol_at(scr_component->name_index()); const Symbol* const the_descr = the_cp->symbol_at(the_component->descriptor_index()); const Symbol* const scr_descr = scr_cp->symbol_at(scr_component->descriptor_index()); if (the_name != scr_name || the_descr != scr_descr) {
log_info(redefine, class, record)
("redefined class %s attribute change error: Record name_index, descriptor_index, and/or attributes_count changed",
the_class->external_name()); return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
}
// Check whether the class PermittedSubclasses attribute has been changed. return check_attribute_arrays("PermittedSubclasses",
the_class, scratch_class,
the_class->permitted_subclasses(),
scratch_class->permitted_subclasses());
}
jvmtiError VM_RedefineClasses::compare_and_normalize_class_versions(
InstanceKlass* the_class,
InstanceKlass* scratch_class) { int i;
// Check superclasses, or rather their names, since superclasses themselves can be // requested to replace. // Check for NULL superclass first since this might be java.lang.Object if (the_class->super() != scratch_class->super() &&
(the_class->super() == NULL || scratch_class->super() == NULL ||
the_class->super()->name() !=
scratch_class->super()->name())) {
log_info(redefine, class, normalize)
("redefined class %s superclass change error: superclass changed from %s to %s.",
the_class->external_name(),
the_class->super() == NULL ? "NULL" : the_class->super()->external_name(),
scratch_class->super() == NULL ? "NULL" : scratch_class->super()->external_name()); return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
}
// Check if the number, names and order of directly implemented interfaces are the same. // I think in principle we should just check if the sets of names of directly implemented // interfaces are the same, i.e. the order of declaration (which, however, if changed in the // .java file, also changes in .class file) should not matter. However, comparing sets is // technically a bit more difficult, and, more importantly, I am not sure at present that the // order of interfaces does not matter on the implementation level, i.e. that the VM does not // rely on it somewhere.
Array<InstanceKlass*>* k_interfaces = the_class->local_interfaces();
Array<InstanceKlass*>* k_new_interfaces = scratch_class->local_interfaces(); int n_intfs = k_interfaces->length(); if (n_intfs != k_new_interfaces->length()) {
log_info(redefine, class, normalize)
("redefined class %s interfaces change error: number of implemented interfaces changed from %d to %d.",
the_class->external_name(), n_intfs, k_new_interfaces->length()); return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
} for (i = 0; i < n_intfs; i++) { if (k_interfaces->at(i)->name() !=
k_new_interfaces->at(i)->name()) {
log_info(redefine, class, normalize)
("redefined class %s interfaces change error: interface changed from %s to %s.",
the_class->external_name(),
k_interfaces->at(i)->external_name(), k_new_interfaces->at(i)->external_name()); return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
}
}
// Check whether class is in the error init state. if (the_class->is_in_error_state()) {
log_info(redefine, class, normalize)
("redefined class %s is in error init state.", the_class->external_name()); // TBD #5057930: special error code is needed in 1.6 return JVMTI_ERROR_INVALID_CLASS;
}
// Check whether the nest-related attributes have been changed.
jvmtiError err = check_nest_attributes(the_class, scratch_class); if (err != JVMTI_ERROR_NONE) { return err;
}
// Check whether the Record attribute has been changed.
err = check_record_attribute(the_class, scratch_class); if (err != JVMTI_ERROR_NONE) { return err;
}
// Check whether the PermittedSubclasses attribute has been changed.
err = check_permitted_subclasses_attribute(the_class, scratch_class); if (err != JVMTI_ERROR_NONE) { return err;
}
// Check whether class modifiers are the same.
jushort old_flags = (jushort) the_class->access_flags().get_flags();
jushort new_flags = (jushort) scratch_class->access_flags().get_flags(); if (old_flags != new_flags) {
log_info(redefine, class, normalize)
("redefined class %s modifiers change error: modifiers changed from %d to %d.",
the_class->external_name(), old_flags, new_flags); return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_MODIFIERS_CHANGED;
}
// Check if the number, names, types and order of fields declared in these classes // are the same.
JavaFieldStream old_fs(the_class);
JavaFieldStream new_fs(scratch_class); for (; !old_fs.done() && !new_fs.done(); old_fs.next(), new_fs.next()) { // name and signature
Symbol* name_sym1 = the_class->constants()->symbol_at(old_fs.name_index());
Symbol* sig_sym1 = the_class->constants()->symbol_at(old_fs.signature_index());
Symbol* name_sym2 = scratch_class->constants()->symbol_at(new_fs.name_index());
Symbol* sig_sym2 = scratch_class->constants()->symbol_at(new_fs.signature_index()); if (name_sym1 != name_sym2 || sig_sym1 != sig_sym2) {
log_info(redefine, class, normalize)
("redefined class %s fields change error: field %s %s changed to %s %s.",
the_class->external_name(),
sig_sym1->as_C_string(), name_sym1->as_C_string(),
sig_sym2->as_C_string(), name_sym2->as_C_string()); return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
} // offset if (old_fs.offset() != new_fs.offset()) {
log_info(redefine, class, normalize)
("redefined class %s field %s change error: offset changed from %d to %d.",
the_class->external_name(), name_sym2->as_C_string(), old_fs.offset(), new_fs.offset()); return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
} // access
old_flags = old_fs.access_flags().as_short();
new_flags = new_fs.access_flags().as_short(); if ((old_flags ^ new_flags) & JVM_RECOGNIZED_FIELD_MODIFIERS) {
log_info(redefine, class, normalize)
("redefined class %s field %s change error: modifiers changed from %d to %d.",
the_class->external_name(), name_sym2->as_C_string(), old_flags, new_flags); return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
}
}
// If both streams aren't done then we have a differing number of // fields. if (!old_fs.done() || !new_fs.done()) { constchar* action = old_fs.done() ? "added" : "deleted";
log_info(redefine, class, normalize)
("redefined class %s fields change error: some fields were %s.",
the_class->external_name(), action); return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
}
// Do a parallel walk through the old and new methods. Detect // cases where they match (exist in both), have been added in // the new methods, or have been deleted (exist only in the // old methods). The class file parser places methods in order // by method name, but does not order overloaded methods by // signature. In order to determine what fate befell the methods, // this code places the overloaded new methods that have matching // old methods in the same order as the old methods and places // new overloaded methods at the end of overloaded methods of // that name. The code for this order normalization is adapted // from the algorithm used in InstanceKlass::find_method(). // Since we are swapping out of order entries as we find them, // we only have to search forward through the overloaded methods. // Methods which are added and have the same name as an existing // method (but different signature) will be put at the end of // the methods with that name, and the name mismatch code will // handle them.
Array<Method*>* k_old_methods(the_class->methods());
Array<Method*>* k_new_methods(scratch_class->methods()); int n_old_methods = k_old_methods->length(); int n_new_methods = k_new_methods->length();
Thread* thread = Thread::current();
int ni = 0; int oi = 0; while (true) {
Method* k_old_method;
Method* k_new_method; enum { matched, added, deleted, undetermined } method_was = undetermined;
if (oi >= n_old_methods) { if (ni >= n_new_methods) { break; // we've looked at everything, done
} // New method at the end
k_new_method = k_new_methods->at(ni);
method_was = added;
} elseif (ni >= n_new_methods) { // Old method, at the end, is deleted
k_old_method = k_old_methods->at(oi);
method_was = deleted;
} else { // There are more methods in both the old and new lists
k_old_method = k_old_methods->at(oi);
k_new_method = k_new_methods->at(ni); if (k_old_method->name() != k_new_method->name()) { // Methods are sorted by method name, so a mismatch means added // or deleted if (k_old_method->name()->fast_compare(k_new_method->name()) > 0) {
method_was = added;
} else {
method_was = deleted;
}
} elseif (k_old_method->signature() == k_new_method->signature()) { // Both the name and signature match
method_was = matched;
} else { // The name matches, but the signature doesn't, which means we have to // search forward through the new overloaded methods. int nj; // outside the loop for post-loop check for (nj = ni + 1; nj < n_new_methods; nj++) {
Method* m = k_new_methods->at(nj); if (k_old_method->name() != m->name()) { // reached another method name so no more overloaded methods
method_was = deleted; break;
} if (k_old_method->signature() == m->signature()) { // found a match so swap the methods
k_new_methods->at_put(ni, m);
k_new_methods->at_put(nj, k_new_method);
k_new_method = m;
method_was = matched; break;
}
}
if (nj >= n_new_methods) { // reached the end without a match; so method was deleted
method_was = deleted;
}
}
}
switch (method_was) { case matched: // methods match, be sure modifiers do too
old_flags = (jushort) k_old_method->access_flags().get_flags();
new_flags = (jushort) k_new_method->access_flags().get_flags(); if ((old_flags ^ new_flags) & ~(JVM_ACC_NATIVE)) {
log_info(redefine, class, normalize)
("redefined class %s method %s modifiers error: modifiers changed from %d to %d",
the_class->external_name(), k_old_method->name_and_sig_as_C_string(), old_flags, new_flags); return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_MODIFIERS_CHANGED;
}
{
u2 new_num = k_new_method->method_idnum();
u2 old_num = k_old_method->method_idnum(); if (new_num != old_num) {
Method* idnum_owner = scratch_class->method_with_idnum(old_num); if (idnum_owner != NULL) { // There is already a method assigned this idnum -- switch them // Take current and original idnum from the new_method
idnum_owner->set_method_idnum(new_num);
idnum_owner->set_orig_method_idnum(k_new_method->orig_method_idnum());
} // Take current and original idnum from the old_method
k_new_method->set_method_idnum(old_num);
k_new_method->set_orig_method_idnum(k_old_method->orig_method_idnum()); if (thread->has_pending_exception()) { return JVMTI_ERROR_OUT_OF_MEMORY;
}
}
}
log_trace(redefine, class, normalize)
("Method matched: new: %s [%d] == old: %s [%d]",
k_new_method->name_and_sig_as_C_string(), ni, k_old_method->name_and_sig_as_C_string(), oi); // advance to next pair of methods
++oi;
++ni; break; case added: // method added, see if it is OK if (!can_add_or_delete(k_new_method)) {
log_info(redefine, class, normalize)
("redefined class %s methods error: added method: %s [%d]",
the_class->external_name(), k_new_method->name_and_sig_as_C_string(), ni); return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_ADDED;
}
{
u2 num = the_class->next_method_idnum(); if (num == ConstMethod::UNSET_IDNUM) { // cannot add any more methods
log_info(redefine, class, normalize)
("redefined class %s methods error: can't create ID for new method %s [%d]",
the_class->external_name(), k_new_method->name_and_sig_as_C_string(), ni); return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_ADDED;
}
u2 new_num = k_new_method->method_idnum();
Method* idnum_owner = scratch_class->method_with_idnum(num); if (idnum_owner != NULL) { // There is already a method assigned this idnum -- switch them // Take current and original idnum from the new_method
idnum_owner->set_method_idnum(new_num);
idnum_owner->set_orig_method_idnum(k_new_method->orig_method_idnum());
}
k_new_method->set_method_idnum(num);
k_new_method->set_orig_method_idnum(num); if (thread->has_pending_exception()) { return JVMTI_ERROR_OUT_OF_MEMORY;
}
}
log_trace(redefine, class, normalize)
("Method added: new: %s [%d]", k_new_method->name_and_sig_as_C_string(), ni);
++ni; // advance to next new method break; case deleted: // method deleted, see if it is OK if (!can_add_or_delete(k_old_method)) {
log_info(redefine, class, normalize)
("redefined class %s methods error: deleted method %s [%d]",
the_class->external_name(), k_old_method->name_and_sig_as_C_string(), oi); return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_DELETED;
}
log_trace(redefine, class, normalize)
("Method deleted: old: %s [%d]", k_old_method->name_and_sig_as_C_string(), oi);
++oi; // advance to next old method break; default:
ShouldNotReachHere();
}
}
return JVMTI_ERROR_NONE;
}
// Find new constant pool index value for old constant pool index value // by searching the index map. Returns zero (0) if there is no mapped // value for the old constant pool index. int VM_RedefineClasses::find_new_index(int old_index) { if (_index_map_count == 0) { // map is empty so nothing can be found return 0;
}
if (old_index < 1 || old_index >= _index_map_p->length()) { // The old_index is out of range so it is not mapped. This should // not happen in regular constant pool merging use, but it can // happen if a corrupt annotation is processed. return 0;
}
int value = _index_map_p->at(old_index); if (value == -1) { // the old_index is not mapped return 0;
}
return value;
} // end find_new_index()
// Find new bootstrap specifier index value for old bootstrap specifier index // value by searching the index map. Returns unused index (-1) if there is // no mapped value for the old bootstrap specifier index. int VM_RedefineClasses::find_new_operand_index(int old_index) { if (_operands_index_map_count == 0) { // map is empty so nothing can be found return -1;
}
if (old_index == -1 || old_index >= _operands_index_map_p->length()) { // The old_index is out of range so it is not mapped. // This should not happen in regular constant pool merging use. return -1;
}
int value = _operands_index_map_p->at(old_index); if (value == -1) { // the old_index is not mapped return -1;
}
return value;
} // end find_new_operand_index()
// Returns true if the current mismatch is due to a resolved/unresolved // class pair. Otherwise, returns false. bool VM_RedefineClasses::is_unresolved_class_mismatch(const constantPoolHandle& cp1, int index1, const constantPoolHandle& cp2, int index2) {
jbyte t1 = cp1->tag_at(index1).value(); if (t1 != JVM_CONSTANT_Class && t1 != JVM_CONSTANT_UnresolvedClass) { returnfalse; // wrong entry type; not our special case
}
jbyte t2 = cp2->tag_at(index2).value(); if (t2 != JVM_CONSTANT_Class && t2 != JVM_CONSTANT_UnresolvedClass) { returnfalse; // wrong entry type; not our special case
}
if (t1 == t2) { returnfalse; // not a mismatch; not our special case
}
char *s1 = cp1->klass_name_at(index1)->as_C_string(); char *s2 = cp2->klass_name_at(index2)->as_C_string(); if (strcmp(s1, s2) != 0) { returnfalse; // strings don't match; not our special case
}
returntrue; // made it through the gauntlet; this is our special case
} // end is_unresolved_class_mismatch()
// The bug 6214132 caused the verification to fail. // 1. What's done in RedefineClasses() before verification: // a) A reference to the class being redefined (_the_class) and a // reference to new version of the class (_scratch_class) are // saved here for use during the bytecode verification phase of // RedefineClasses. // b) The _java_mirror field from _the_class is copied to the // _java_mirror field in _scratch_class. This means that a jclass // returned for _the_class or _scratch_class will refer to the // same Java mirror. The verifier will see the "one true mirror" // for the class being verified. // 2. See comments in JvmtiThreadState for what is done during verification.
class RedefineVerifyMark : public StackObj { private:
JvmtiThreadState* _state;
Klass* _scratch_class;
OopHandle _scratch_mirror;
public:
RedefineVerifyMark(Klass* the_class, Klass* scratch_class,
JvmtiThreadState* state) : _state(state), _scratch_class(scratch_class)
{
_state->set_class_versions_map(the_class, scratch_class);
_scratch_mirror = the_class->java_mirror_handle(); // this is a copy that is swapped
_scratch_class->swap_java_mirror_handle(_scratch_mirror);
}
~RedefineVerifyMark() { // Restore the scratch class's mirror, so when scratch_class is removed // the correct mirror pointing to it can be cleared.
_scratch_class->swap_java_mirror_handle(_scratch_mirror);
_state->clear_class_versions_map();
}
};
// For consistency allocate memory using os::malloc wrapper.
_scratch_classes = (InstanceKlass**)
os::malloc(sizeof(InstanceKlass*) * _class_count, mtClass); if (_scratch_classes == NULL) { return JVMTI_ERROR_OUT_OF_MEMORY;
} // Zero initialize the _scratch_classes array. for (int i = 0; i < _class_count; i++) {
_scratch_classes[i] = NULL;
}
JavaThread* current = JavaThread::current();
ResourceMark rm(current);
JvmtiThreadState *state = JvmtiThreadState::state_for(current); // state can only be NULL if the current thread is exiting which // should not happen since we're trying to do a RedefineClasses
guarantee(state != NULL, "exiting thread calling load_new_class_versions"); for (int i = 0; i < _class_count; i++) { // Create HandleMark so that any handles created while loading new class // versions are deleted. Constant pools are deallocated while merging // constant pools
HandleMark hm(current);
InstanceKlass* the_class = get_ik(_class_defs[i].klass);
// Set redefined class handle in JvmtiThreadState class. // This redefined class is sent to agent event handler for class file // load hook event.
state->set_class_being_redefined(the_class, _class_load_kind);
JavaThread* THREAD = current; // For exception macros.
ExceptionMark em(THREAD);
Handle protection_domain(THREAD, the_class->protection_domain());
ClassLoadInfo cl_info(protection_domain); // Parse and create a class from the bytes, but this class isn't added // to the dictionary, so do not call resolve_from_stream.
InstanceKlass* scratch_class = KlassFactory::create_from_stream(&st,
the_class->name(),
the_class->class_loader_data(),
cl_info,
THREAD);
// Clear class_being_redefined just to be sure.
state->clear_class_being_redefined();
// TODO: if this is retransform, and nothing changed we can skip it
// Need to clean up allocated InstanceKlass if there's an error so assign // the result here. Caller deallocates all the scratch classes in case of // an error.
_scratch_classes[i] = scratch_class;
// Do the validity checks in compare_and_normalize_class_versions() // before verifying the byte codes. By doing these checks first, we // limit the number of functions that require redirection from // the_class to scratch_class. In particular, we don't have to // modify JNI GetSuperclass() and thus won't change its performance.
jvmtiError res = compare_and_normalize_class_versions(the_class,
scratch_class); if (res != JVMTI_ERROR_NONE) { return res;
}
// verify what the caller passed us
{ // The bug 6214132 caused the verification to fail. // Information about the_class and scratch_class is temporarily // recorded into jvmtiThreadState. This data is used to redirect // the_class to scratch_class in the JVM_* functions called by the // verifier. Please, refer to jvmtiThreadState.hpp for the detailed // description.
RedefineVerifyMark rvm(the_class, scratch_class, state);
Verifier::verify(scratch_class, true, THREAD);
}
if (HAS_PENDING_EXCEPTION) {
Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
log_info(redefine, class, load, exceptions)("verify_byte_codes exception: '%s'", ex_name->as_C_string());
CLEAR_PENDING_EXCEPTION; if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) { return JVMTI_ERROR_OUT_OF_MEMORY;
} else { // tell the caller the bytecodes are bad return JVMTI_ERROR_FAILS_VERIFICATION;
}
}
#ifdef ASSERT
{ // verify what we have done during constant pool merging
{
RedefineVerifyMark rvm(the_class, scratch_class, state);
Verifier::verify(scratch_class, true, THREAD);
}
if (HAS_PENDING_EXCEPTION) {
Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
log_info(redefine, class, load, exceptions)
("verify_byte_codes post merge-CP exception: '%s'", ex_name->as_C_string());
CLEAR_PENDING_EXCEPTION; if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) { return JVMTI_ERROR_OUT_OF_MEMORY;
} else { // tell the caller that constant pool merging screwed up return JVMTI_ERROR_INTERNAL;
}
}
} #endif// ASSERT
Rewriter::rewrite(scratch_class, THREAD); if (!HAS_PENDING_EXCEPTION) {
scratch_class->link_methods(THREAD);
} if (HAS_PENDING_EXCEPTION) {
Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
log_info(redefine, class, load, exceptions)
("Rewriter::rewrite or link_methods exception: '%s'", ex_name->as_C_string());
CLEAR_PENDING_EXCEPTION; if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) { return JVMTI_ERROR_OUT_OF_MEMORY;
} else { return JVMTI_ERROR_INTERNAL;
}
}
// Map old_index to new_index as needed. scratch_cp is only needed // for log calls. void VM_RedefineClasses::map_index(const constantPoolHandle& scratch_cp, int old_index, int new_index) { if (find_new_index(old_index) != 0) { // old_index is already mapped return;
}
if (old_index == new_index) { // no mapping is needed return;
}
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.