diff --git a/make/bsd/makefiles/gcc.make b/make/bsd/makefiles/gcc.make index 2310141..8ddb2ab 100644 --- a/make/bsd/makefiles/gcc.make +++ b/make/bsd/makefiles/gcc.make @@ -116,7 +116,10 @@ CFLAGS += $(VM_PICFLAG) CFLAGS += -fno-rtti CFLAGS += -fno-exceptions CFLAGS += -pthread -CFLAGS += -fcheck-new +## well, strictly speaking we should check for clang not Darwin +ifneq ($(OS_VENDOR), Darwin) + CFLAGS += -fcheck-new +endif # version 4 and above support fvisibility=hidden (matches jni_x86.h file) # except 4.1.2 gives pointless warnings that can't be disabled (afaik) ifneq "$(shell expr \( $(CC_VER_MAJOR) \> 4 \) \| \( \( $(CC_VER_MAJOR) = 4 \) \& \( $(CC_VER_MINOR) \>= 3 \) \))" "0" diff --git a/make/openjdk_distro b/make/openjdk_distro index 520b33d..ea7eff0 100644 --- a/make/openjdk_distro +++ b/make/openjdk_distro @@ -27,6 +27,6 @@ # # Don't put quotes (fail windows build). -HOTSPOT_VM_DISTRO=OpenJDK +HOTSPOT_VM_DISTRO=Dynamic Code Evolution COMPANY_NAME= PRODUCT_NAME=OpenJDK diff --git a/src/cpu/x86/vm/templateTable_x86_32.cpp b/src/cpu/x86/vm/templateTable_x86_32.cpp index fc19edc..d2cddd3 100644 --- a/src/cpu/x86/vm/templateTable_x86_32.cpp +++ b/src/cpu/x86/vm/templateTable_x86_32.cpp @@ -2109,6 +2109,22 @@ void TemplateTable::resolve_cache_and_index(int byte_no, // resolve first time through address entry; switch (bytecode()) { + case Bytecodes::_fast_agetfield : // fall through + case Bytecodes::_fast_bgetfield : // fall through + case Bytecodes::_fast_cgetfield : // fall through + case Bytecodes::_fast_dgetfield : // fall through + case Bytecodes::_fast_fgetfield : // fall through + case Bytecodes::_fast_igetfield : // fall through + case Bytecodes::_fast_lgetfield : // fall through + case Bytecodes::_fast_sgetfield : // fall through + case Bytecodes::_fast_aputfield : // fall through + case Bytecodes::_fast_bputfield : // fall through + case Bytecodes::_fast_cputfield : // fall through + case Bytecodes::_fast_dputfield : // fall through + case Bytecodes::_fast_fputfield : // fall through + case Bytecodes::_fast_iputfield : // fall through + case Bytecodes::_fast_lputfield : // fall through + case Bytecodes::_fast_sputfield : // fall through case Bytecodes::_getstatic : // fall through case Bytecodes::_putstatic : // fall through case Bytecodes::_getfield : // fall through @@ -2211,6 +2227,7 @@ void TemplateTable::load_invoke_cp_cache_entry(int byte_no, // Correct values of the cache and index registers are preserved. void TemplateTable::jvmti_post_field_access(Register cache, Register index, + int byte_no, bool is_static, bool has_tos) { if (JvmtiExport::can_post_field_access()) { @@ -2237,7 +2254,11 @@ void TemplateTable::jvmti_post_field_access(Register cache, // cache: cache entry pointer __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), rax, cache); - __ get_cache_and_index_at_bcp(cache, index, 1); + + // DCEVM: Redefinition might have occured => reresolve the cp entry. + __ restore_bcp(); + resolve_cache_and_index(byte_no, noreg, cache, index, sizeof(u2)); + __ bind(L1); } } @@ -2258,7 +2279,7 @@ void TemplateTable::getfield_or_static(int byte_no, bool is_static) { const Register flags = rax; resolve_cache_and_index(byte_no, noreg, cache, index, sizeof(u2)); - jvmti_post_field_access(cache, index, is_static, false); + jvmti_post_field_access(cache, index, byte_no, is_static, false); load_field_cp_cache_entry(obj, cache, index, off, flags, is_static); if (!is_static) pop_and_check_object(obj); @@ -2393,7 +2414,7 @@ void TemplateTable::getstatic(int byte_no) { // The registers cache and index expected to be set before call. // The function may destroy various registers, just not the cache and index registers. -void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) { +void TemplateTable::jvmti_post_field_mod(Register cache, Register index, int byte_no, bool is_static) { ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset(); @@ -2451,7 +2472,11 @@ void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is // rcx: jvalue object on the stack __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, rax, rcx); - __ get_cache_and_index_at_bcp(cache, index, 1); + + // (tw) Redefinition might have occured => reresolve the cp entry. + __ restore_bcp(); + resolve_cache_and_index(byte_no, noreg, cache, index, sizeof(u2)); + __ bind(L1); } } @@ -2467,7 +2492,7 @@ void TemplateTable::putfield_or_static(int byte_no, bool is_static) { const Register flags = rax; resolve_cache_and_index(byte_no, noreg, cache, index, sizeof(u2)); - jvmti_post_field_mod(cache, index, is_static); + jvmti_post_field_mod(cache, index, byte_no, is_static); load_field_cp_cache_entry(obj, cache, index, off, flags, is_static); // Doug Lea believes this is not needed with current Sparcs (TSO) and Intel (PSO). @@ -2818,6 +2843,11 @@ void TemplateTable::fast_accessfield(TosState state) { // rcx: cache entry pointer __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), rax, rcx); __ pop_ptr(rax); // restore object pointer + + // DCEVM: Redefinition might have occured => reresolve the cp entry. + __ restore_bcp(); + resolve_cache_and_index(1, noreg, rax, rcx, sizeof(u2)); + __ bind(L1); } @@ -3008,6 +3038,26 @@ void TemplateTable::invokevirtual_helper(Register index, __ bind(notFinal); + // DCEVM: Check if we are calling an old method (and have to go slow path) + Label notOld; + __ movl(rax, flags); + __ andl(rax, (1 << ConstantPoolCacheEntry::is_old_method_shift)); + __ jcc(Assembler::zero, notOld); + + // Need a null check here! + __ null_check(recv); + + // Call out to VM to do look up based on correct vTable version (has to iterate back over the class history of the receiver class) + // DCEVM: TODO: Check if we can improve performance by inlining. + // DCEVM: TODO: Check if this additional branch affects normal execution time. + __ call_VM(method, CAST_FROM_FN_PTR(address, InterpreterRuntime::find_correct_method), recv, index); + + // profile this call + __ profile_final_call(rax); + __ jump_from_interpreted(method, rdx); + + __ bind(notOld); + // get receiver klass __ null_check(recv, oopDesc::klass_offset_in_bytes()); __ load_klass(rax, recv); @@ -3093,6 +3143,31 @@ void TemplateTable::invokeinterface(int byte_no) { invokevirtual_helper(rbx, rcx, rdx); __ bind(notMethod); + // DCEVM: Check if we are calling an old method (and have to go slow path) + //__ movl(rax, rdx); + Label notOld; + __ andl(rdx, (1 << ConstantPoolCacheEntry::is_old_method_shift)); + __ jcc(Assembler::zero, notOld); + + // Get receiver klass into rdx - also a null check + __ movptr(rdx, Address(rcx, oopDesc::klass_offset_in_bytes())); + __ verify_oop(rdx); + + // Call out to VM to do look up based on correct vTable version (has to iterate back over the class history of the receiver class) + // DCEVM: TODO: Check if we can improve performance by inlining. + // DCEVM: TODO: Check if this additional branch affects normal execution time. + // DCEVM: TODO: Check the exact semantic (with respect to destoying registers) of call_VM + __ call_VM(rbx, CAST_FROM_FN_PTR(address, InterpreterRuntime::find_correct_interface_method), rcx, rax, rbx); + + // DCEVM: TODO: Check if resolved method could be null. + + // profile this call + __ profile_virtual_call(rdx, rsi, rdi); + + __ jump_from_interpreted(rbx, rdx); + + __ bind(notOld); + // Get receiver klass into rdx - also a null check __ restore_locals(); // restore rdi __ null_check(rcx, oopDesc::klass_offset_in_bytes()); diff --git a/src/cpu/x86/vm/templateTable_x86_64.cpp b/src/cpu/x86/vm/templateTable_x86_64.cpp index 932ee97..67bb710 100644 --- a/src/cpu/x86/vm/templateTable_x86_64.cpp +++ b/src/cpu/x86/vm/templateTable_x86_64.cpp @@ -2151,6 +2151,22 @@ void TemplateTable::resolve_cache_and_index(int byte_no, // resolve first time through address entry; switch (bytecode()) { + case Bytecodes::_fast_agetfield : // fall through + case Bytecodes::_fast_bgetfield : // fall through + case Bytecodes::_fast_cgetfield : // fall through + case Bytecodes::_fast_dgetfield : // fall through + case Bytecodes::_fast_fgetfield : // fall through + case Bytecodes::_fast_igetfield : // fall through + case Bytecodes::_fast_lgetfield : // fall through + case Bytecodes::_fast_sgetfield : // fall through + case Bytecodes::_fast_aputfield : // fall through + case Bytecodes::_fast_bputfield : // fall through + case Bytecodes::_fast_cputfield : // fall through + case Bytecodes::_fast_dputfield : // fall through + case Bytecodes::_fast_fputfield : // fall through + case Bytecodes::_fast_iputfield : // fall through + case Bytecodes::_fast_lputfield : // fall through + case Bytecodes::_fast_sputfield : // fall through case Bytecodes::_getstatic: case Bytecodes::_putstatic: case Bytecodes::_getfield: @@ -2267,7 +2283,7 @@ void TemplateTable::load_invoke_cp_cache_entry(int byte_no, // The registers cache and index expected to be set before call. // Correct values of the cache and index registers are preserved. void TemplateTable::jvmti_post_field_access(Register cache, Register index, - bool is_static, bool has_tos) { + int byte_no, bool is_static, bool has_tos) { // do the JVMTI work here to avoid disturbing the register state below // We use c_rarg registers here because we want to use the register used in // the call to the VM @@ -2298,7 +2314,11 @@ void TemplateTable::jvmti_post_field_access(Register cache, Register index, __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2, c_rarg3); - __ get_cache_and_index_at_bcp(cache, index, 1); + + // DCEVM: Redefinition might have occured => reresolve the cp entry. + __ restore_bcp(); + resolve_cache_and_index(byte_no, noreg, cache, index, sizeof(u2)); + __ bind(L1); } } @@ -2320,7 +2340,7 @@ void TemplateTable::getfield_or_static(int byte_no, bool is_static) { const Register bc = c_rarg3; // uses same reg as obj, so don't mix them resolve_cache_and_index(byte_no, noreg, cache, index, sizeof(u2)); - jvmti_post_field_access(cache, index, is_static, false); + jvmti_post_field_access(cache, index, byte_no, is_static, false); load_field_cp_cache_entry(obj, cache, index, off, flags, is_static); if (!is_static) { @@ -2455,7 +2475,7 @@ void TemplateTable::getstatic(int byte_no) { // The registers cache and index expected to be set before call. // The function may destroy various registers, just not the cache and index registers. -void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) { +void TemplateTable::jvmti_post_field_mod(Register cache, Register index, int byte_no, bool is_static) { transition(vtos, vtos); ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset(); @@ -2507,7 +2527,11 @@ void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), c_rarg1, c_rarg2, c_rarg3); - __ get_cache_and_index_at_bcp(cache, index, 1); + + // DCEVM: Redefinition might have occured => reresolve the cp entry. + __ restore_bcp(); + resolve_cache_and_index(byte_no, noreg, cache, index, sizeof(u2)); + __ bind(L1); } } @@ -2523,7 +2547,7 @@ void TemplateTable::putfield_or_static(int byte_no, bool is_static) { const Register bc = c_rarg3; resolve_cache_and_index(byte_no, noreg, cache, index, sizeof(u2)); - jvmti_post_field_mod(cache, index, is_static); + jvmti_post_field_mod(cache, index, byte_no, is_static); load_field_cp_cache_entry(obj, cache, index, off, flags, is_static); // [jk] not needed currently @@ -2837,6 +2861,11 @@ void TemplateTable::fast_accessfield(TosState state) { InterpreterRuntime::post_field_access), c_rarg1, c_rarg2); __ pop_ptr(rax); // restore object pointer + + // DCEVM: Redefinition might have occured => reresolve the cp entry. + __ restore_bcp(); + resolve_cache_and_index(1, noreg, rax, rcx, sizeof(u2)); + __ bind(L1); } @@ -3073,6 +3102,26 @@ void TemplateTable::invokevirtual_helper(Register index, __ bind(notFinal); + // DCEVM: Check if we are calling an old method (and have to go slow path) + Label notOld; + __ movl(rax, flags); + __ andl(rax, (1 << ConstantPoolCacheEntry::is_old_method_shift)); + __ jcc(Assembler::zero, notOld); + + // Need a null check here! + __ null_check(recv); + + // Call out to VM to do look up based on correct vTable version (has to iterate back over the class history of the receiver class) + // DCEVM: TODO: Check if we can improve performance by inlining. + // DCEVM: TODO: Check if this additional branch affects normal execution time. + __ call_VM(method, CAST_FROM_FN_PTR(address, InterpreterRuntime::find_correct_method), recv, index); + + // profile this call + __ profile_final_call(rax); + __ jump_from_interpreted(method, rdx); + + __ bind(notOld); + // get receiver klass __ null_check(recv, oopDesc::klass_offset_in_bytes()); __ load_klass(rax, recv); @@ -3156,6 +3205,35 @@ void TemplateTable::invokeinterface(int byte_no) { invokevirtual_helper(rbx, rcx, rdx); __ bind(notMethod); + // DCEVM: Check if we are calling an old method (and have to go slow path) + Label notOld; + __ andl(rdx, (1 << ConstantPoolCacheEntry::is_old_method_shift)); + __ jcc(Assembler::zero, notOld); + + // Call out to VM to do look up based on correct vTable version (has to iterate back over the class history of the receiver class) + // DCEVM: TODO: Check if we can improve performance by inlining. + // DCEVM: TODO: Check if this additional branch affects normal execution time. + // DCEVM: TODO: Check the exact semantic (with respect to destoying registers) of call_VM + // DCEVM: FIXME: What exactly should we store here? + __ push(rcx); // destroyed by Linux arguments passing conventions + __ movptr(r14, rcx); + __ call_VM(rbx, CAST_FROM_FN_PTR(address, InterpreterRuntime::find_correct_interface_method), r14, rax, rbx); + __ pop(rcx); + + // Get receiver klass into rdx - also a null check + __ restore_locals(); // restore r14 + __ load_klass(rdx, rcx); + __ verify_oop(rdx); + + // DCEVM: TODO: Check if resolved method could be null. + + // profile this call + __ profile_virtual_call(rdx, r13, r14); + + __ jump_from_interpreted(rbx, rdx); + + __ bind(notOld); + // Get receiver klass into rdx - also a null check __ restore_locals(); // restore r14 __ null_check(rcx, oopDesc::klass_offset_in_bytes()); diff --git a/src/os/bsd/vm/attachListener_bsd.cpp b/src/os/bsd/vm/attachListener_bsd.cpp index dac5195..e939dcd 100644 --- a/src/os/bsd/vm/attachListener_bsd.cpp +++ b/src/os/bsd/vm/attachListener_bsd.cpp @@ -460,14 +460,14 @@ AttachOperation* AttachListener::dequeue() { void AttachListener::vm_start() { char fn[UNIX_PATH_MAX]; - struct stat64 st; + struct stat st; int ret; int n = snprintf(fn, UNIX_PATH_MAX, "%s/.java_pid%d", os::get_temp_directory(), os::current_process_id()); assert(n < (int)UNIX_PATH_MAX, "java_pid file name buffer overflow"); - RESTARTABLE(::stat64(fn, &st), ret); + RESTARTABLE(::stat(fn, &st), ret); if (ret == 0) { ret = ::unlink(fn); if (ret == -1) { diff --git a/src/share/vm/c1/c1_Compilation.hpp b/src/share/vm/c1/c1_Compilation.hpp index 9a8ca61..196ab25 100644 --- a/src/share/vm/c1/c1_Compilation.hpp +++ b/src/share/vm/c1/c1_Compilation.hpp @@ -242,8 +242,8 @@ class Compilation: public StackObj { #define BAILOUT(msg) { bailout(msg); return; } #define BAILOUT_(msg, res) { bailout(msg); return res; } -#define CHECK_BAILOUT() { if (bailed_out()) return; } -#define CHECK_BAILOUT_(res) { if (bailed_out()) return res; } +#define CHECK_BAILOUT() { if (((CompilerThread *)Thread::current())->should_bailout()) bailout("Aborted externally"); if (bailed_out()) return; } +#define CHECK_BAILOUT_(res) { if (((CompilerThread *)Thread::current())->should_bailout()) bailout("Aborted externally"); if (bailed_out()) return res; } class InstructionMark: public StackObj { diff --git a/src/share/vm/ci/ciEnv.cpp b/src/share/vm/ci/ciEnv.cpp index e20db5d..57f37db 100644 --- a/src/share/vm/ci/ciEnv.cpp +++ b/src/share/vm/ci/ciEnv.cpp @@ -1172,3 +1172,11 @@ void ciEnv::record_out_of_memory_failure() { // If memory is low, we stop compiling methods. record_method_not_compilable("out of memory"); } + +// DCEVM: Called after class redefinition to clean up possibly invalidated state. +void ciEnv::cleanup_after_redefinition() { + + if (_factory != NULL) { + _factory->cleanup_after_redefinition(); + } +} diff --git a/src/share/vm/ci/ciEnv.hpp b/src/share/vm/ci/ciEnv.hpp index 103e532..abe2e37 100644 --- a/src/share/vm/ci/ciEnv.hpp +++ b/src/share/vm/ci/ciEnv.hpp @@ -417,6 +417,8 @@ public: void record_failure(const char* reason); void record_method_not_compilable(const char* reason, bool all_tiers = true); void record_out_of_memory_failure(); + + void cleanup_after_redefinition(); }; #endif // SHARE_VM_CI_CIENV_HPP diff --git a/src/share/vm/ci/ciObjectFactory.cpp b/src/share/vm/ci/ciObjectFactory.cpp index e0ab96b..36efef4 100644 --- a/src/share/vm/ci/ciObjectFactory.cpp +++ b/src/share/vm/ci/ciObjectFactory.cpp @@ -296,6 +296,11 @@ ciObject* ciObjectFactory::get(oop key) { // into the table. We need to recompute our index. index = find(keyHandle(), _ci_objects); } + + if (is_found_at(index, keyHandle(), _ci_objects)) { + // DCEVM: Check if this is an error? Can occur when redefining classes. + return _ci_objects->at(index); + } assert(!is_found_at(index, keyHandle(), _ci_objects), "no double insert"); insert(index, new_object, _ci_objects); return new_object; @@ -764,3 +769,50 @@ void ciObjectFactory::print() { _unloaded_instances->length(), _unloaded_klasses->length()); } + +// DCEVM: Resoring the ciObject arrays after class redefinition +void ciObjectFactory::sort_ci_objects(GrowableArray* objects) { + + // Resort the _ci_objects array. The order of two class pointers can be changed during class redefinition. + oop last = NULL; + for (int j = 0; j< objects->length(); j++) { + oop o = objects->at(j)->get_oop(); + if (last >= o) { + int cur_last_index = j - 1; + oop cur_last = last; + while (cur_last >= o) { + + // Swap the two objects to guarantee ordering + ciObject *tmp = objects->at(cur_last_index); + objects->at_put(cur_last_index, objects->at(cur_last_index + 1)); + objects->at_put(cur_last_index + 1, tmp); + + // Decrement index to move one step to the left + cur_last_index--; + if (cur_last_index < 0) { + break; + } + cur_last = objects->at(cur_last_index)->get_oop(); + } + } else { + assert(last < o, "out of order"); + last = o; + } + } + +#ifdef ASSERT + if (CIObjectFactoryVerify) { + oop last = NULL; + for (int j = 0; j< objects->length(); j++) { + oop o = objects->at(j)->get_oop(); + assert(last < o, "out of order"); + last = o; + } + } +#endif // ASSERT +} + +// DCEVM: Called after class redefinition to clean up possibly invalidated state. +void ciObjectFactory::cleanup_after_redefinition() { + sort_ci_objects(_ci_objects); +} diff --git a/src/share/vm/ci/ciObjectFactory.hpp b/src/share/vm/ci/ciObjectFactory.hpp index 26cc2c3..855a4ac 100644 --- a/src/share/vm/ci/ciObjectFactory.hpp +++ b/src/share/vm/ci/ciObjectFactory.hpp @@ -38,6 +38,7 @@ class ciObjectFactory : public ResourceObj { friend class VMStructs; friend class ciEnv; + friend class CompileBroker; private: static volatile bool _initialized; @@ -137,6 +138,11 @@ public: void print_contents(); void print(); + +private: + + static void sort_ci_objects(GrowableArray* objects); + void cleanup_after_redefinition(); }; #endif // SHARE_VM_CI_CIOBJECTFACTORY_HPP diff --git a/src/share/vm/classfile/classFileParser.cpp b/src/share/vm/classfile/classFileParser.cpp index 6ffa4bf..4ea4e2b 100644 --- a/src/share/vm/classfile/classFileParser.cpp +++ b/src/share/vm/classfile/classFileParser.cpp @@ -795,6 +795,7 @@ objArrayHandle ClassFileParser::parse_interfaces(constantPoolHandle cp, Handle class_loader, Handle protection_domain, Symbol* class_name, + KlassHandle old_klass, TRAPS) { ClassFileStream* cfs = stream(); assert(length > 0, "only called for length>0"); @@ -813,6 +814,9 @@ objArrayHandle ClassFileParser::parse_interfaces(constantPoolHandle cp, interface_index, CHECK_(nullHandle)); if (cp->tag_at(interface_index).is_klass()) { interf = KlassHandle(THREAD, cp->resolved_klass_at(interface_index)); + if (!old_klass.is_null() && !interf->is_newest_version()) { + interf = KlassHandle(THREAD, interf->newest_version()); + } } else { Symbol* unresolved_klass = cp->klass_name_at(interface_index); @@ -825,6 +829,9 @@ objArrayHandle ClassFileParser::parse_interfaces(constantPoolHandle cp, klassOop k = SystemDictionary::resolve_super_or_fail(class_name, unresolved_klass, class_loader, protection_domain, false, CHECK_(nullHandle)); + if (!old_klass.is_null()) { + k = k->klass_part()->newest_version(); + } interf = KlassHandle(THREAD, k); } @@ -1912,6 +1919,8 @@ methodHandle ClassFileParser::parse_method(constantPoolHandle cp, bool is_interf int runtime_invisible_parameter_annotations_length = 0; u1* annotation_default = NULL; int annotation_default_length = 0; + u2 code_section_table_length; + typeArrayHandle code_section_table; // Parse code and exceptions attribute u2 method_attributes_count = cfs->get_u2_fast(); @@ -2081,6 +2090,24 @@ methodHandle ClassFileParser::parse_method(constantPoolHandle cp, bool is_interf parse_stackmap_table(code_attribute_length, CHECK_(nullHandle)); stackmap_data = typeArrayHandle(THREAD, sm); parsed_stackmap_attribute = true; + } else if (UseMethodForwardPoints && cp->symbol_at(code_attribute_name_index) == vmSymbols::tag_code_sections()) { + int length = code_attribute_length; + int value_count = length / sizeof(u2); + int line_count = length / 3; + if (TraceRedefineClasses >= 3) { + tty->print_cr("Found code section attribute when loading class with %d entries", value_count, line_count); + } + code_section_table_length = value_count; + code_section_table = oopFactory::new_permanent_shortArray(value_count, CHECK_NULL); + code_section_table->set_length(value_count); + + for (int i = 0; i < value_count; ++i) { + u2 value = cfs->get_u2(CHECK_(nullHandle)); + code_section_table->short_at_put(i, value); + if (TraceRedefineClasses >= 4) { + tty->print_cr("Code section table at %d: %d", i, value); + } + } } else { // Skip unknown attributes cfs->skip_u1(code_attribute_length, CHECK_(nullHandle)); @@ -2206,6 +2233,18 @@ methodHandle ClassFileParser::parse_method(constantPoolHandle cp, bool is_interf } #endif + // DCEVM: TODO: Get a different solution for the problem of method forward + // points and variable sized interpreter frames. + if (UseMethodForwardPoints) { + if (max_stack > MethodForwardPointsMaxStack) { + fatal(err_msg("Method has too large stack (%d), increase the value of MethodForwardPointsMaxStack (%d)", max_stack, MethodForwardPointsMaxStack)); + } + if (max_locals > MethodForwardPointsMaxLocals) { + fatal(err_msg("Method has too many locals (%d), increase the value of MethodForwardPointsMaxLocals (%d)", max_stack, MethodForwardPointsMaxStack)); + } + max_stack = MethodForwardPointsMaxStack; + max_locals = MethodForwardPointsMaxLocals; + } // Fill in code attribute information m->set_max_stack(max_stack); m->set_max_locals(max_locals); @@ -2219,6 +2258,8 @@ methodHandle ClassFileParser::parse_method(constantPoolHandle cp, bool is_interf */ m->constMethod()->set_stackmap_data(stackmap_data()); + m->constMethod()->set_code_section_table(code_section_table()); + // Copy byte codes m->set_code(code_start); @@ -2792,6 +2833,15 @@ void ClassFileParser::parse_classfile_attributes(constantPoolHandle cp, "Invalid Deprecated classfile attribute length %u in class file %s", attribute_length, CHECK); } + } else if (tag == vmSymbols::tag_field_redefinition_policy()) { + // DCEVM: Check for deleted field attribute + _field_redefinition_policy = cfs->get_u1_fast(); + } else if (tag == vmSymbols::tag_static_field_redefinition_policy()) { + // DCEVM: Check for deleted static field attribute + _static_field_redefinition_policy = cfs->get_u1_fast(); + } else if (tag == vmSymbols::tag_method_redefinition_policy()) { + // DCEVM: Check for deleted method attribute + _method_redefinition_policy = cfs->get_u1_fast(); } else if (_major_version >= JAVA_1_5_VERSION) { if (tag == vmSymbols::tag_signature()) { if (attribute_length != 2) { @@ -2895,6 +2945,17 @@ void ClassFileParser::apply_parsed_class_attributes(instanceKlassHandle k) { } k->set_inner_classes(_inner_classes()); k->set_class_annotations(_annotations()); + + + if (_field_redefinition_policy != 0xff) { + k->set_field_redefinition_policy(_field_redefinition_policy); + } + if (_static_field_redefinition_policy != 0xff) { + k->set_static_field_redefinition_policy(_static_field_redefinition_policy); + } + if (_method_redefinition_policy != 0xff) { + k->set_method_redefinition_policy(_method_redefinition_policy); + } } typeArrayHandle ClassFileParser::assemble_annotations(u1* runtime_visible_annotations, @@ -2918,9 +2979,126 @@ typeArrayHandle ClassFileParser::assemble_annotations(u1* runtime_visible_annota } +// DCEVM: Finds the super symbols by reading the bytes of the class and returns +// them in a growable array. +void ClassFileParser::findSuperSymbols(Symbol* name, + Handle class_loader, + Handle protection_domain, + KlassHandle old_klass, + GrowableArray &handles, + TRAPS) { + + _cp_patches = NULL; + // So that JVMTI can cache class file in the state before retransformable agents + // have modified it + unsigned char *cached_class_file_bytes = NULL; + + ClassFileStream* cfs = stream(); + + _has_finalizer = _has_empty_finalizer = _has_vanilla_constructor = false; + + instanceKlassHandle nullHandle; + + // Save the class file name for easier error message printing. + _class_name = name != NULL ? name : vmSymbols::unknown_class_name(); + + cfs->guarantee_more(8, CHECK); // magic, major, minor + // Magic value + u4 magic = cfs->get_u4_fast(); + if (magic != JAVA_CLASSFILE_MAGIC) { + // Invalid class file! + return; + } + + // Version numbers + u2 minor_version = cfs->get_u2_fast(); + u2 major_version = cfs->get_u2_fast(); + + // Check version numbers - we check this even with verifier off + if (!is_supported_version(major_version, minor_version)) { + + // Unsupported version! + return; + } + + _major_version = major_version; + _minor_version = minor_version; + + + // Check if verification needs to be relaxed for this class file + // Do not restrict it to jdk1.0 or jdk1.1 to maintain backward compatibility (4982376) + _relax_verify = Verifier::relax_verify_for(class_loader()); + _need_verify = false; + + // Constant pool + constantPoolHandle cp = parse_constant_pool(class_loader(), CHECK); + int cp_size = cp->length(); + + cfs->guarantee_more(8, CHECK); // flags, this_class, super_class, infs_len + + // Access flags + AccessFlags access_flags; + jint flags = cfs->get_u2_fast() & JVM_RECOGNIZED_CLASS_MODIFIERS; + + if ((flags & JVM_ACC_INTERFACE) && _major_version < JAVA_6_VERSION) { + // Set abstract bit for old class files for backward compatibility + flags |= JVM_ACC_ABSTRACT; + } + access_flags.set_flags(flags); + + // This class and superclass + instanceKlassHandle super_klass; + u2 this_class_index = cfs->get_u2_fast(); + check_property( + valid_cp_range(this_class_index, cp_size) && + cp->tag_at(this_class_index).is_unresolved_klass(), + "Invalid this class index %u in constant pool in class file %s", + this_class_index, CHECK); + + Symbol* class_name = cp->unresolved_klass_at(this_class_index); + assert(class_name != NULL, "class_name can't be null"); + + // Update _class_name which could be null previously to be class_name + _class_name = class_name; + + // DCEVM: DO NOT release all handles when parsing is done + {// HandleMark hm(THREAD); + + // Checks if name in class file matches requested name + if (name != NULL && class_name != name) { + return; + } + + u2 super_class_index = cfs->get_u2_fast(); + + if (super_class_index != 0) { + Symbol* super_class = cp->klass_name_at(super_class_index); + handles.append(super_class); + } else { + // DCEVM: This redefinition must be for the Object class. + } + + // Interfaces + u2 itfs_len = cfs->get_u2_fast(); + objArrayHandle local_interfaces; + if (itfs_len == 0) { + local_interfaces = objArrayHandle(THREAD, Universe::the_empty_system_obj_array()); + } else { + local_interfaces = parse_interfaces(cp, itfs_len, class_loader, protection_domain, _class_name, old_klass, CHECK); + } + + for (int i=0; ilength(); i++) { + oop o = local_interfaces->obj_at(i); + Symbol* interface_handle = ((klassOop)o)->klass_part()->name(); + handles.append(interface_handle); + } + } +} + instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name, Handle class_loader, Handle protection_domain, + KlassHandle old_klass, KlassHandle host_klass, GrowableArray* cp_patches, TempNewSymbol& parsed_name, @@ -2971,10 +3149,13 @@ instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name, unsigned char* ptr = cfs->buffer(); unsigned char* end_ptr = cfs->buffer() + cfs->length(); + bool pretend_new_universe = Thread::current()->pretend_new_universe(); + Thread::current()->set_pretend_new_universe(false); JvmtiExport::post_class_file_load_hook(name, class_loader, protection_domain, &ptr, &end_ptr, &cached_class_file_bytes, &cached_class_file_length); + Thread::current()->set_pretend_new_universe(pretend_new_universe); if (ptr != cfs->buffer()) { // JVMTI agent has modified class file data. @@ -3130,7 +3311,11 @@ instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name, // However, make sure it is not an array type. bool is_array = false; if (cp->tag_at(super_class_index).is_klass()) { - super_klass = instanceKlassHandle(THREAD, cp->resolved_klass_at(super_class_index)); + klassOop resolved_klass = cp->resolved_klass_at(super_class_index); + if (!old_klass.is_null()) { + resolved_klass = resolved_klass->klass_part()->newest_version(); + } + super_klass = instanceKlassHandle(THREAD, resolved_klass); if (_need_verify) is_array = super_klass->oop_is_array(); } else if (_need_verify) { @@ -3148,7 +3333,7 @@ instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name, if (itfs_len == 0) { local_interfaces = objArrayHandle(THREAD, Universe::the_empty_system_obj_array()); } else { - local_interfaces = parse_interfaces(cp, itfs_len, class_loader, protection_domain, _class_name, CHECK_(nullHandle)); + local_interfaces = parse_interfaces(cp, itfs_len, class_loader, protection_domain, _class_name, old_klass, CHECK_(nullHandle)); } u2 java_fields_count = 0; @@ -3202,7 +3387,9 @@ instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name, protection_domain, true, CHECK_(nullHandle)); - + if (!old_klass.is_null()) { + k = k->klass_part()->newest_version(); + } KlassHandle kh (THREAD, k); super_klass = instanceKlassHandle(THREAD, kh()); } @@ -3591,6 +3778,19 @@ instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name, rt = REF_NONE; } else { rt = super_klass->reference_type(); + + // DCEVM: With class redefinition, it can also happen that special classes are loaded. + if (name == vmSymbols::java_lang_ref_Reference()) { + rt = REF_OTHER; + } else if (name == vmSymbols::java_lang_ref_SoftReference()) { + rt = REF_SOFT; + } else if (name == vmSymbols::java_lang_ref_WeakReference()) { + rt = REF_WEAK; + } else if (name == vmSymbols::java_lang_ref_FinalReference()) { + rt = REF_FINAL; + } else if (name == vmSymbols::java_lang_ref_PhantomReference()) { + rt = REF_PHANTOM; + } } // We can now create the basic klassOop for this klass @@ -3691,7 +3891,7 @@ instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name, fill_oop_maps(this_klass, nonstatic_oop_map_count, nonstatic_oop_offsets, nonstatic_oop_counts); // Fill in has_finalizer, has_vanilla_constructor, and layout_helper - set_precomputed_flags(this_klass); + set_precomputed_flags(this_klass, old_klass); // reinitialize modifiers, using the InnerClasses attribute int computed_modifiers = this_klass->compute_modifier_flags(CHECK_(nullHandle)); @@ -3714,6 +3914,10 @@ instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name, // Allocate mirror and initialize static fields java_lang_Class::create_mirror(this_klass, CHECK_(nullHandle)); + if (rt == REF_OTHER) { + instanceRefKlass::update_nonstatic_oop_maps(ik); + } + ClassLoadingService::notify_class_loaded(instanceKlass::cast(this_klass()), false /* not shared class */); @@ -3856,7 +4060,7 @@ void ClassFileParser::fill_oop_maps(instanceKlassHandle k, } -void ClassFileParser::set_precomputed_flags(instanceKlassHandle k) { +void ClassFileParser::set_precomputed_flags(instanceKlassHandle k, KlassHandle old_klass) { klassOop super = k->super(); // Check if this klass has an empty finalize method (i.e. one with return bytecode only), @@ -3864,7 +4068,9 @@ void ClassFileParser::set_precomputed_flags(instanceKlassHandle k) { if (!_has_empty_finalizer) { if (_has_finalizer || (super != NULL && super->klass_part()->has_finalizer())) { - k->set_has_finalizer(); + if (old_klass.is_null() || old_klass->has_finalizer()) { + k->set_has_finalizer(); + } } } @@ -3880,7 +4086,7 @@ void ClassFileParser::set_precomputed_flags(instanceKlassHandle k) { // Check if this klass supports the java.lang.Cloneable interface if (SystemDictionary::Cloneable_klass_loaded()) { - if (k->is_subtype_of(SystemDictionary::Cloneable_klass())) { + if (k->is_subtype_of(SystemDictionary::Cloneable_klass()) || k->is_subtype_of(SystemDictionary::Cloneable_klass()->klass_part()->newest_version())) { k->set_is_cloneable(); } } diff --git a/src/share/vm/classfile/classFileParser.hpp b/src/share/vm/classfile/classFileParser.hpp index 314ec5e..a35bf67 100644 --- a/src/share/vm/classfile/classFileParser.hpp +++ b/src/share/vm/classfile/classFileParser.hpp @@ -64,6 +64,9 @@ class ClassFileParser VALUE_OBJ_CLASS_SPEC { int _sde_length; typeArrayHandle _inner_classes; typeArrayHandle _annotations; + u1 _field_redefinition_policy; + u1 _static_field_redefinition_policy; + u1 _method_redefinition_policy; void set_class_synthetic_flag(bool x) { _synthetic_flag = x; } void set_class_sourcefile(Symbol* x) { _sourcefile = x; } @@ -151,6 +154,7 @@ class ClassFileParser VALUE_OBJ_CLASS_SPEC { Handle class_loader, Handle protection_domain, Symbol* class_name, + KlassHandle old_klass, TRAPS); // Field parsing @@ -237,7 +241,7 @@ class ClassFileParser VALUE_OBJ_CLASS_SPEC { unsigned int nonstatic_oop_map_count, int* nonstatic_oop_offsets, unsigned int* nonstatic_oop_counts); - void set_precomputed_flags(instanceKlassHandle k); + void set_precomputed_flags(instanceKlassHandle k, KlassHandle old_klass); objArrayHandle compute_transitive_interfaces(instanceKlassHandle super, objArrayHandle local_ifs, TRAPS); @@ -337,7 +341,12 @@ class ClassFileParser VALUE_OBJ_CLASS_SPEC { public: // Constructor - ClassFileParser(ClassFileStream* st) { set_stream(st); } + ClassFileParser(ClassFileStream* st) { + set_stream(st); + _field_redefinition_policy = 0xff; + _static_field_redefinition_policy = 0xff; + _method_redefinition_policy = 0xff; + } // Parse .class file and return new klassOop. The klassOop is not hooked up // to the system dictionary or any other structures, so a .class file can @@ -349,21 +358,33 @@ class ClassFileParser VALUE_OBJ_CLASS_SPEC { instanceKlassHandle parseClassFile(Symbol* name, Handle class_loader, Handle protection_domain, + KlassHandle old_klass, TempNewSymbol& parsed_name, bool verify, TRAPS) { KlassHandle no_host_klass; - return parseClassFile(name, class_loader, protection_domain, no_host_klass, NULL, parsed_name, verify, THREAD); + return parseClassFile(name, class_loader, protection_domain, old_klass, no_host_klass, NULL, parsed_name, verify, THREAD); } instanceKlassHandle parseClassFile(Symbol* name, Handle class_loader, Handle protection_domain, + KlassHandle old_klass, KlassHandle host_klass, GrowableArray* cp_patches, TempNewSymbol& parsed_name, bool verify, TRAPS); + static void initialize_static_field(fieldDescriptor* fd, TRAPS); + + // DCEVM: Creates symbol handles for the super class and the interfaces + void findSuperSymbols(Symbol* name, + Handle class_loader, + Handle protection_domain, + KlassHandle old_klass, + GrowableArray &handles, + TRAPS); + // Verifier checks static void check_super_class_access(instanceKlassHandle this_klass, TRAPS); static void check_super_interface_access(instanceKlassHandle this_klass, TRAPS); diff --git a/src/share/vm/classfile/classLoader.cpp b/src/share/vm/classfile/classLoader.cpp index a2e61a4..450e19f 100644 --- a/src/share/vm/classfile/classLoader.cpp +++ b/src/share/vm/classfile/classLoader.cpp @@ -915,6 +915,7 @@ instanceKlassHandle ClassLoader::load_classfile(Symbol* h_name, TRAPS) { instanceKlassHandle result = parser.parseClassFile(h_name, class_loader, protection_domain, + KlassHandle(), parsed_name, false, CHECK_(h)); diff --git a/src/share/vm/classfile/dictionary.cpp b/src/share/vm/classfile/dictionary.cpp index 78e76cc..ee21f3a 100644 --- a/src/share/vm/classfile/dictionary.cpp +++ b/src/share/vm/classfile/dictionary.cpp @@ -326,6 +326,21 @@ void Dictionary::classes_do(void f(klassOop)) { } } + +// DCEVM: Just the classes from defining class loaders +void Dictionary::classes_do(ObjectClosure *closure) { + for (int index = 0; index < table_size(); index++) { + for (DictionaryEntry* probe = bucket(index); + probe != NULL; + probe = probe->next()) { + klassOop k = probe->klass(); + if (probe->loader() == instanceKlass::cast(k)->class_loader()) { + closure->do_object(k); + } + } + } +} + // Added for initialize_itable_for_klass to handle exceptions // Just the classes from defining class loaders void Dictionary::classes_do(void f(klassOop, TRAPS), TRAPS) { @@ -433,6 +448,33 @@ void Dictionary::add_klass(Symbol* class_name, Handle class_loader, add_entry(index, entry); } +// DCEVM: Updates the klass entry to point to the new klassOop. Necessary only for class redefinition. +bool Dictionary::update_klass(int index, unsigned int hash, Symbol* name, Handle loader, KlassHandle k, KlassHandle old_class) { + + // There are several entries for the same class in the dictionary: One extra entry for each parent classloader of the classloader of the class. + bool found = false; + for (int index = 0; index < table_size(); index++) { + for (DictionaryEntry* entry = bucket(index); entry != NULL; entry = entry->next()) { + if (entry->klass() == old_class()) { + entry->set_literal(k()); + found = true; + } + } + } + + return found; +} + +// DCEVM: Undo previous updates to the system dictionary +void Dictionary::rollback_redefinition() { + for (int index = 0; index < table_size(); index++) { + for (DictionaryEntry* entry = bucket(index); entry != NULL; entry = entry->next()) { + if (entry->klass()->klass_part()->is_redefining()) { + entry->set_literal(entry->klass()->klass_part()->old_version()); + } + } + } +} // This routine does not lock the system dictionary. // @@ -459,12 +501,22 @@ DictionaryEntry* Dictionary::get_entry(int index, unsigned int hash, return NULL; } +// DCEVM: return old version if we are not in the new universe? +klassOop Dictionary::intercept_for_version(klassOop k) { + if (k == NULL) return k; + + if (k->klass_part()->is_redefining() && !Thread::current()->pretend_new_universe()) { + return k->klass_part()->old_version(); + } + + return k; +} klassOop Dictionary::find(int index, unsigned int hash, Symbol* name, Handle loader, Handle protection_domain, TRAPS) { DictionaryEntry* entry = get_entry(index, hash, name, loader); if (entry != NULL && entry->is_valid_protection_domain(protection_domain)) { - return entry->klass(); + return intercept_for_version(entry->klass()); } else { return NULL; } @@ -477,7 +529,7 @@ klassOop Dictionary::find_class(int index, unsigned int hash, assert (index == index_for(name, loader), "incorrect index?"); DictionaryEntry* entry = get_entry(index, hash, name, loader); - return (entry != NULL) ? entry->klass() : (klassOop)NULL; + return intercept_for_version((entry != NULL) ? entry->klass() : (klassOop)NULL); } @@ -489,7 +541,7 @@ klassOop Dictionary::find_shared_class(int index, unsigned int hash, assert (index == index_for(name, Handle()), "incorrect index?"); DictionaryEntry* entry = get_entry(index, hash, name, Handle()); - return (entry != NULL) ? entry->klass() : (klassOop)NULL; + return intercept_for_version((entry != NULL) ? entry->klass() : (klassOop)NULL); } diff --git a/src/share/vm/classfile/dictionary.hpp b/src/share/vm/classfile/dictionary.hpp index bd33760..186d0eb 100644 --- a/src/share/vm/classfile/dictionary.hpp +++ b/src/share/vm/classfile/dictionary.hpp @@ -73,6 +73,10 @@ public: void add_klass(Symbol* class_name, Handle class_loader,KlassHandle obj); + bool update_klass(int index, unsigned int hash, Symbol* name, Handle loader, KlassHandle k, KlassHandle old_class); + + void rollback_redefinition(); + klassOop find_class(int index, unsigned int hash, Symbol* name, Handle loader); @@ -89,6 +93,7 @@ public: void classes_do(void f(klassOop, TRAPS), TRAPS); void classes_do(void f(klassOop, oop)); void classes_do(void f(klassOop, oop, TRAPS), TRAPS); + void classes_do(ObjectClosure *closure); void methods_do(void f(methodOop)); @@ -105,6 +110,7 @@ public: bool do_unloading(BoolObjectClosure* is_alive); // Protection domains + static klassOop intercept_for_version(klassOop k); klassOop find(int index, unsigned int hash, Symbol* name, Handle loader, Handle protection_domain, TRAPS); bool is_valid_protection_domain(int index, unsigned int hash, diff --git a/src/share/vm/classfile/javaClasses.cpp b/src/share/vm/classfile/javaClasses.cpp index f8b10b3..cb68d82 100644 --- a/src/share/vm/classfile/javaClasses.cpp +++ b/src/share/vm/classfile/javaClasses.cpp @@ -1798,7 +1798,7 @@ Handle java_lang_reflect_Method::create(TRAPS) { klassOop klass = SystemDictionary::reflect_Method_klass(); // This class is eagerly initialized during VM initialization, since we keep a refence // to one of the methods - assert(instanceKlass::cast(klass)->is_initialized(), "must be initialized"); + assert(instanceKlass::cast(klass)->is_initialized() || klass->klass_part()->old_version() != NULL, "must be initialized"); return instanceKlass::cast(klass)->allocate_instance_handle(CHECK_NH); } diff --git a/src/share/vm/classfile/javaClasses.hpp b/src/share/vm/classfile/javaClasses.hpp index b741cfa..8ce5287 100644 --- a/src/share/vm/classfile/javaClasses.hpp +++ b/src/share/vm/classfile/javaClasses.hpp @@ -213,7 +213,6 @@ class java_lang_String : AllStatic { class java_lang_Class : AllStatic { friend class VMStructs; - private: // The fake offsets are added by the class loader when java.lang.Class is loaded diff --git a/src/share/vm/classfile/loaderConstraints.cpp b/src/share/vm/classfile/loaderConstraints.cpp index 8650cd9..965cce2 100644 --- a/src/share/vm/classfile/loaderConstraints.cpp +++ b/src/share/vm/classfile/loaderConstraints.cpp @@ -449,7 +449,7 @@ void LoaderConstraintTable::verify(Dictionary* dictionary, if (k != NULL) { // We found the class in the system dictionary, so we should // make sure that the klassOop matches what we already have. - guarantee(k == probe->klass(), "klass should be in dictionary"); + guarantee(k == probe->klass()->klass_part()->newest_version(), "klass should be in dictionary"); } else { // If we don't find the class in the system dictionary, it // has to be in the placeholders table. diff --git a/src/share/vm/classfile/loaderConstraints.hpp b/src/share/vm/classfile/loaderConstraints.hpp index d01b2c4..1ad80f7 100644 --- a/src/share/vm/classfile/loaderConstraints.hpp +++ b/src/share/vm/classfile/loaderConstraints.hpp @@ -106,7 +106,7 @@ public: klassOop klass() { return literal(); } klassOop* klass_addr() { return literal_addr(); } - void set_klass(klassOop k) { set_literal(k); } + void set_klass(klassOop k) { set_literal(k); assert(k == NULL || !k->klass_part()->is_redefining(), "just checking"); } LoaderConstraintEntry* next() { return (LoaderConstraintEntry*)HashtableEntry::next(); diff --git a/src/share/vm/classfile/systemDictionary.cpp b/src/share/vm/classfile/systemDictionary.cpp index 899153a..fa45c6d 100644 --- a/src/share/vm/classfile/systemDictionary.cpp +++ b/src/share/vm/classfile/systemDictionary.cpp @@ -157,6 +157,7 @@ klassOop SystemDictionary::resolve_or_fail(Symbol* class_name, Handle class_load // can return a null klass klass = handle_resolution_exception(class_name, class_loader, protection_domain, throw_error, k_h, THREAD); } + assert(klass == NULL || klass->klass_part()->is_newest_version() || klass->klass_part()->newest_version()->klass_part()->is_redefining(), "must be"); return klass; } @@ -199,7 +200,8 @@ klassOop SystemDictionary::resolve_or_fail(Symbol* class_name, // Forwards to resolve_instance_class_or_null klassOop SystemDictionary::resolve_or_null(Symbol* class_name, Handle class_loader, Handle protection_domain, TRAPS) { - assert(!THREAD->is_Compiler_thread(), + // DCEVM: Check if this relaxing of the condition is correct? Test case hs203t004 failing otherwise. + assert(!THREAD->is_Compiler_thread() || JvmtiThreadState::state_for(JavaThread::current())->get_class_being_redefined() != NULL, err_msg("can not load classes with compiler thread: class=%s, classloader=%s", class_name->as_C_string(), class_loader.is_null() ? "null" : class_loader->klass()->klass_part()->name()->as_C_string())); @@ -961,6 +963,7 @@ klassOop SystemDictionary::parse_stream(Symbol* class_name, instanceKlassHandle k = ClassFileParser(st).parseClassFile(class_name, class_loader, protection_domain, + KlassHandle(), host_klass, cp_patches, parsed_name, @@ -1022,7 +1025,14 @@ klassOop SystemDictionary::resolve_from_stream(Symbol* class_name, Handle protection_domain, ClassFileStream* st, bool verify, + KlassHandle old_class, TRAPS) { + bool redefine_classes_locked = false; + if (!Thread::current()->redefine_classes_mutex()->owned_by_self()) { + Thread::current()->redefine_classes_mutex()->lock(); + redefine_classes_locked = true; + } + // Classloaders that support parallelism, e.g. bootstrap classloader, // or all classloaders with UnsyncloadClass do not acquire lock here bool DoObjectLock = true; @@ -1050,9 +1060,14 @@ klassOop SystemDictionary::resolve_from_stream(Symbol* class_name, instanceKlassHandle k = ClassFileParser(st).parseClassFile(class_name, class_loader, protection_domain, + old_class, parsed_name, verify, THREAD); + if (!old_class.is_null() && !k.is_null()) { + k->set_redefining(true); + k->set_old_version(old_class()); + } const char* pkg = "java/"; if (!HAS_PENDING_EXCEPTION && @@ -1087,13 +1102,18 @@ klassOop SystemDictionary::resolve_from_stream(Symbol* class_name, // Add class just loaded // If a class loader supports parallel classloading handle parallel define requests // find_or_define_instance_class may return a different instanceKlass - if (is_parallelCapable(class_loader)) { + // (tw) TODO: for class redefinition the parallel version does not work, check if this is a problem? + if (is_parallelCapable(class_loader) && old_class.is_null()) { k = find_or_define_instance_class(class_name, class_loader, k, THREAD); } else { - define_instance_class(k, THREAD); + define_instance_class(k, old_class, THREAD); } } + if (redefine_classes_locked) { + Thread::current()->redefine_classes_mutex()->unlock(); + } + // If parsing the class file or define_instance_class failed, we // need to remove the placeholder added on our behalf. But we // must make sure parsed_name is valid first (it won't be if we had @@ -1122,7 +1142,7 @@ klassOop SystemDictionary::resolve_from_stream(Symbol* class_name, MutexLocker mu(SystemDictionary_lock, THREAD); klassOop check = find_class(parsed_name, class_loader); - assert(check == k(), "should be present in the dictionary"); + assert((check == k() && !k->is_redefining()) || (k->is_redefining() && check == k->old_version()), "should be present in the dictionary"); klassOop check2 = find_class(h_name, h_loader); assert(check == check2, "name inconsistancy in SystemDictionary"); @@ -1349,7 +1369,11 @@ instanceKlassHandle SystemDictionary::load_instance_class(Symbol* class_name, Ha } } -void SystemDictionary::define_instance_class(instanceKlassHandle k, TRAPS) { +void SystemDictionary::rollback_redefinition() { + dictionary()->rollback_redefinition(); +} + +void SystemDictionary::define_instance_class(instanceKlassHandle k, KlassHandle old_class, TRAPS) { Handle class_loader_h(THREAD, k->class_loader()); @@ -1376,13 +1400,23 @@ void SystemDictionary::define_instance_class(instanceKlassHandle k, TRAPS) { Symbol* name_h = k->name(); unsigned int d_hash = dictionary()->compute_hash(name_h, class_loader_h); int d_index = dictionary()->hash_to_index(d_hash); - check_constraints(d_index, d_hash, k, class_loader_h, true, CHECK); + + // DCEVM: Update version of the klassOop in the system dictionary + // TODO: Check for thread safety! + if (!old_class.is_null()) { + bool ok = dictionary()->update_klass(d_index, d_hash, name_h, class_loader_h, k, old_class); + assert (ok, "must have found old class and updated!"); + } + check_constraints(d_index, d_hash, k, class_loader_h, old_class.is_null(), CHECK); + + if(!old_class.is_null() && TraceRedefineClasses >= 3){ tty->print_cr("Class has been updated!"); } // Register class just loaded with class loader (placed in Vector) // Note we do this before updating the dictionary, as this can // fail with an OutOfMemoryError (if it does, we will *not* put this // class in the dictionary and will not update the class hierarchy). - if (k->class_loader() != NULL) { + // (tw) Only register if not redefining a class. + if (k->class_loader() != NULL && old_class.is_null()) { methodHandle m(THREAD, Universe::loader_addClass_method()); JavaValue result(T_VOID); JavaCallArguments args(class_loader_h); @@ -1408,8 +1442,9 @@ void SystemDictionary::define_instance_class(instanceKlassHandle k, TRAPS) { } k->eager_initialize(THREAD); + // (tw) Only notify jvmti if not redefining a class. // notify jvmti - if (JvmtiExport::should_post_class_load()) { + if (JvmtiExport::should_post_class_load() && old_class.is_null()) { assert(THREAD->is_Java_thread(), "thread->is_Java_thread()"); JvmtiExport::post_class_load((JavaThread *) THREAD, k()); @@ -1482,7 +1517,7 @@ instanceKlassHandle SystemDictionary::find_or_define_instance_class(Symbol* clas } } - define_instance_class(k, THREAD); + define_instance_class(k, KlassHandle(), THREAD); Handle linkage_exception = Handle(); // null handle @@ -1612,6 +1647,14 @@ void SystemDictionary::add_to_hierarchy(instanceKlassHandle k, TRAPS) { Universe::flush_dependents_on(k); } +// (tw) Remove from hierarchy - Undo add_to_hierarchy. +void SystemDictionary::remove_from_hierarchy(instanceKlassHandle k) { + assert(k.not_null(), "just checking"); + + k->remove_from_sibling_list(); + + // TODO: Remove from interfaces. +} // ---------------------------------------------------------------------------- // GC support @@ -1701,6 +1744,24 @@ void SystemDictionary::oops_do(OopClosure* f) { } +// (tw) Iterate over all pre-loaded classes in the dictionary. +void SystemDictionary::preloaded_classes_do(OopClosure *f) { + for (int k = (int)FIRST_WKID; k < (int)WKID_LIMIT; k++) { + f->do_oop((oop*) &_well_known_klasses[k]); + } + + { + for (int i = 0; i < T_VOID+1; i++) { + if (_box_klasses[i] != NULL) { + assert(i >= T_BOOLEAN, "checking"); + f->do_oop((oop*) &_box_klasses[i]); + } + } + } + + // TODO: Check if we need to call FilterFieldsMap +} + void SystemDictionary::preloaded_oops_do(OopClosure* f) { for (int k = (int)FIRST_WKID; k < (int)WKID_LIMIT; k++) { f->do_oop((oop*) &_well_known_klasses[k]); @@ -1733,6 +1794,11 @@ void SystemDictionary::classes_do(void f(klassOop)) { dictionary()->classes_do(f); } +// (tw) Iterate over all classes in the dictionary. +void SystemDictionary::classes_do(ObjectClosure *closure) { + dictionary()->classes_do(closure); +} + // Added for initialize_itable_for_klass // Just the classes from defining class loaders // Don't iterate over placeholders @@ -1869,7 +1935,9 @@ void SystemDictionary::initialize_preloaded_classes(TRAPS) { // Preload ref klasses and set reference types instanceKlass::cast(WK_KLASS(Reference_klass))->set_reference_type(REF_OTHER); - instanceRefKlass::update_nonstatic_oop_maps(WK_KLASS(Reference_klass)); + + // (tw) This is now done in parseClassFile in order to support class redefinition + // instanceRefKlass::update_nonstatic_oop_maps(WK_KLASS(Reference_klass)); initialize_wk_klasses_through(WK_KLASS_ENUM_NAME(PhantomReference_klass), scan, CHECK); instanceKlass::cast(WK_KLASS(SoftReference_klass))->set_reference_type(REF_SOFT); @@ -1955,7 +2023,11 @@ void SystemDictionary::check_constraints(int d_index, unsigned int d_hash, // also holds array classes assert(check->klass_part()->oop_is_instance(), "noninstance in systemdictionary"); - if ((defining == true) || (k() != check)) { + if ((defining == true) && ((k() != check) && k->old_version() != check)) { + ResourceMark rm(Thread::current()); + tty->print_cr("(%d / %d) (%s/%s)", k->revision_number(), check->klass_part()->revision_number(), k->name()->as_C_string(), check->klass_part()->name()->as_C_string()); + k()->print(); + check->print(); linkage_error = "loader (instance of %s): attempted duplicate class " "definition for name: \"%s\""; } else { diff --git a/src/share/vm/classfile/systemDictionary.hpp b/src/share/vm/classfile/systemDictionary.hpp index adf82e5..e316f8e 100644 --- a/src/share/vm/classfile/systemDictionary.hpp +++ b/src/share/vm/classfile/systemDictionary.hpp @@ -268,7 +268,7 @@ public: // Resolve from stream (called by jni_DefineClass and JVM_DefineClass) static klassOop resolve_from_stream(Symbol* class_name, Handle class_loader, Handle protection_domain, - ClassFileStream* st, bool verify, TRAPS); + ClassFileStream* st, bool verify, KlassHandle old_class, TRAPS); // Lookup an already loaded class. If not found NULL is returned. static klassOop find(Symbol* class_name, Handle class_loader, Handle protection_domain, TRAPS); @@ -309,6 +309,8 @@ public: // Iterate over all klasses in dictionary // Just the classes from defining class loaders static void classes_do(void f(klassOop)); + static void classes_do(ObjectClosure *closure); + static void preloaded_classes_do(OopClosure *closure); // Added for initialize_itable_for_klass to handle exceptions static void classes_do(void f(klassOop, TRAPS), TRAPS); // All classes, and their class loaders @@ -415,6 +417,8 @@ public: initialize_wk_klasses_until((WKID) limit, start_id, THREAD); } + static void rollback_redefinition(); + public: #define WK_KLASS_DECLARE(name, symbol, option) \ static klassOop name() { return check_klass_##option(_well_known_klasses[WK_KLASS_ENUM_NAME(name)]); } @@ -596,7 +600,7 @@ private: // after waiting, but before reentering SystemDictionary_lock // to preserve lock order semantics. static void double_lock_wait(Handle lockObject, TRAPS); - static void define_instance_class(instanceKlassHandle k, TRAPS); + static void define_instance_class(instanceKlassHandle k, KlassHandle old_class, TRAPS); static instanceKlassHandle find_or_define_instance_class(Symbol* class_name, Handle class_loader, instanceKlassHandle k, TRAPS); @@ -615,12 +619,17 @@ private: // Setup link to hierarchy static void add_to_hierarchy(instanceKlassHandle k, TRAPS); +public: + + // Remove link to hierarchy + static void remove_from_hierarchy(instanceKlassHandle k); + +private: // event based tracing static void post_class_load_event(TracingTime start_time, instanceKlassHandle k, Handle initiating_loader); static void post_class_unload_events(BoolObjectClosure* is_alive); -private: // We pass in the hashtable index so we can calculate it outside of // the SystemDictionary_lock. diff --git a/src/share/vm/classfile/verifier.cpp b/src/share/vm/classfile/verifier.cpp index da188bb..53455df 100644 --- a/src/share/vm/classfile/verifier.cpp +++ b/src/share/vm/classfile/verifier.cpp @@ -106,7 +106,7 @@ bool Verifier::relax_verify_for(oop loader) { return !need_verify; } -bool Verifier::verify(instanceKlassHandle klass, Verifier::Mode mode, bool should_verify_class, TRAPS) { +bool Verifier::verify(instanceKlassHandle klass, Verifier::Mode mode, bool should_verify_class, bool may_use_old_verifier, TRAPS) { HandleMark hm; ResourceMark rm(THREAD); @@ -117,7 +117,8 @@ bool Verifier::verify(instanceKlassHandle klass, Verifier::Mode mode, bool shoul const char* klassName = klass->external_name(); bool can_failover = FailOverToOldVerifier && - klass->major_version() < NOFAILOVER_MAJOR_VERSION; + klass->major_version() < NOFAILOVER_MAJOR_VERSION && + may_use_old_verifier; // If the class should be verified, first see if we can use the split // verifier. If not, or if verification fails and FailOverToOldVerifier @@ -138,6 +139,7 @@ bool Verifier::verify(instanceKlassHandle klass, Verifier::Mode mode, bool shoul tty->print_cr( "Fail over class verification to old verifier for: %s", klassName); } + assert(may_use_old_verifier, ""); exception_name = inference_verify( klass, message_buffer, message_buffer_len, THREAD); } @@ -145,6 +147,7 @@ bool Verifier::verify(instanceKlassHandle klass, Verifier::Mode mode, bool shoul exception_message = split_verifier.exception_message(); } } else { + assert(may_use_old_verifier, ""); exception_name = inference_verify( klass, message_buffer, message_buffer_len, THREAD); } @@ -159,6 +162,9 @@ bool Verifier::verify(instanceKlassHandle klass, Verifier::Mode mode, bool shoul } tty->print_cr("End class verification for: %s", klassName); } + } else if (TraceClassInitialization) { + // (tw) Output not verified classes + tty->print_cr("Class %s was not verified", klassName); } if (HAS_PENDING_EXCEPTION) { @@ -210,7 +216,7 @@ bool Verifier::is_eligible_for_verification(instanceKlassHandle klass, bool shou // NOTE: this is called too early in the bootstrapping process to be // guarded by Universe::is_gte_jdk14x_version()/UseNewReflection. (refl_magic_klass == NULL || - !klass->is_subtype_of(refl_magic_klass) || + !(klass->is_subtype_of(refl_magic_klass) || klass->is_subtype_of(refl_magic_klass->klass_part()->newest_version())) || VerifyReflectionBytecodes) ); } @@ -517,7 +523,7 @@ void ErrorContext::stackmap_details(outputStream* ss, methodOop method) const { ClassVerifier::ClassVerifier( instanceKlassHandle klass, TRAPS) - : _thread(THREAD), _exception_type(NULL), _message(NULL), _klass(klass) { + : _thread(THREAD), _exception_type(NULL), _message(NULL), _klass(klass->newest_version()), _klass_to_verify(klass) { _this_type = VerificationType::reference_type(klass->name()); // Create list to hold symbols in reference area. _symbols = new GrowableArray(100, 0, NULL); @@ -547,7 +553,7 @@ void ClassVerifier::verify_class(TRAPS) { _klass->external_name()); } - objArrayHandle methods(THREAD, _klass->methods()); + objArrayHandle methods(THREAD, _klass_to_verify->methods()); int num_methods = methods->length(); for (int index = 0; index < num_methods; index++) { @@ -2444,7 +2450,10 @@ void ClassVerifier::verify_invoke_instructions( VerificationType stack_object_type = current_frame->pop_stack(ref_class_type, CHECK_VERIFY(this)); if (current_type() != stack_object_type) { - assert(cp->cache() == NULL, "not rewritten yet"); + + // (tw) TODO: Check if relaxing the following assertion is correct. For class redefinition we might call the verifier twice. + //assert(cp->cache() == NULL, "not rewritten yet"); + Symbol* ref_class_name = cp->klass_name_at(cp->klass_ref_index_at(index)); // See the comments in verify_field_instructions() for diff --git a/src/share/vm/classfile/verifier.hpp b/src/share/vm/classfile/verifier.hpp index 4457f4a..b1b96f2 100644 --- a/src/share/vm/classfile/verifier.hpp +++ b/src/share/vm/classfile/verifier.hpp @@ -47,7 +47,7 @@ class Verifier : AllStatic { * Otherwise, no exception is thrown and the return indicates the * error. */ - static bool verify(instanceKlassHandle klass, Mode mode, bool should_verify_class, TRAPS); + static bool verify(instanceKlassHandle klass, Mode mode, bool should_verify_class, bool may_use_old_verifier, TRAPS); // Return false if the class is loaded by the bootstrap loader, // or if defineClass was called requesting skipping verification @@ -256,7 +256,10 @@ class ClassVerifier : public StackObj { ErrorContext _error_context; // contains information about an error +public: void verify_method(methodHandle method, TRAPS); + +private: char* generate_code_data(methodHandle m, u4 code_length, TRAPS); void verify_exception_handler_table(u4 code_length, char* code_data, int& min, int& max, TRAPS); @@ -329,6 +332,7 @@ class ClassVerifier : public StackObj { VerificationType object_type() const; + instanceKlassHandle _klass_to_verify; instanceKlassHandle _klass; // the class being verified methodHandle _method; // current method being verified VerificationType _this_type; // the verification type of the current class diff --git a/src/share/vm/classfile/vmSymbols.hpp b/src/share/vm/classfile/vmSymbols.hpp index b4595c6..341de7a 100644 --- a/src/share/vm/classfile/vmSymbols.hpp +++ b/src/share/vm/classfile/vmSymbols.hpp @@ -138,6 +138,10 @@ template(tag_annotation_default, "AnnotationDefault") \ template(tag_enclosing_method, "EnclosingMethod") \ template(tag_bootstrap_methods, "BootstrapMethods") \ + template(tag_static_field_redefinition_policy, "StaticFieldRedefinitionPolicy") \ + template(tag_field_redefinition_policy, "FieldRedefinitionPolicy") \ + template(tag_method_redefinition_policy, "MethodRedefinitionPolicy") \ + template(tag_code_sections, "CodeSections") \ \ /* exception klasses: at least all exceptions thrown by the VM have entries here */ \ template(java_lang_ArithmeticException, "java/lang/ArithmeticException") \ @@ -377,6 +381,10 @@ template(oop_size_name, "oop_size") \ template(static_oop_field_count_name, "static_oop_field_count") \ \ + /* mutator in case of class redefinition */ \ + template(static_transformer_name, "$staticTransformer") \ + template(transformer_name, "$transformer") \ + \ /* non-intrinsic name/signature pairs: */ \ template(register_method_name, "register") \ do_alias(register_method_signature, object_void_signature) \ diff --git a/src/share/vm/compiler/compileBroker.cpp b/src/share/vm/compiler/compileBroker.cpp index 0feca89..cfa1958 100644 --- a/src/share/vm/compiler/compileBroker.cpp +++ b/src/share/vm/compiler/compileBroker.cpp @@ -1181,6 +1181,14 @@ nmethod* CompileBroker::compile_method(methodHandle method, int osr_bci, int comp_level, methodHandle hot_method, int hot_count, const char* comment, Thread* THREAD) { + JavaThread* thread = JavaThread::current(); + if (thread->is_Compiler_thread() && thread->as_CompilerThread()->should_bailout()) { + return NULL; // FIXME: DCEVM: should we do something else? + } + if (instanceKlass::cast(method->method_holder())->is_not_initialized()) { + return NULL; // FIXME: DCEVM: how should we avoid this? + } + // make sure arguments make sense assert(method->method_holder()->klass_part()->oop_is_instance(), "not an instance method"); assert(osr_bci == InvocationEntryBci || (0 <= osr_bci && osr_bci < method->code_size()), "bci out of range"); @@ -1260,6 +1268,7 @@ nmethod* CompileBroker::compile_method(methodHandle method, int osr_bci, } // RedefineClasses() has replaced this method; just return + // (tw) This is important for the new version of hotswapping: Old code will only execute properly in the interpreter! if (method->is_old()) { return NULL; } @@ -1592,6 +1601,8 @@ void CompileBroker::compiler_thread_loop() { // Never compile a method if breakpoints are present in it if (method()->number_of_breakpoints() == 0) { + thread->compilation_mutex()->lock(); + thread->set_should_bailout(false); // Compile the method. if ((UseCompiler || AlwaysCompileLoopMethods) && CompileBroker::should_compile_new_jobs()) { #ifdef COMPILER1 @@ -1615,6 +1626,7 @@ void CompileBroker::compiler_thread_loop() { // After compilation is disabled, remove remaining methods from queue method->clear_queued_for_compilation(); } + thread->compilation_mutex()->unlock(); } } } @@ -2164,3 +2176,15 @@ void CompileBroker::print_compiler_threads_on(outputStream* st) { st->cr(); #endif } + +// (tw) Clean up compiler interface after a class redefinition step +void CompileBroker::cleanup_after_redefinition() { + int num_threads = _method_threads->length(); + + ciObjectFactory::sort_ci_objects(ciObjectFactory::_shared_ci_objects); + for (int i=0; iat(i)->env() != NULL && _method_threads->at(i)->env() != (ciEnv *)badAddress) { + _method_threads->at(i)->env()->cleanup_after_redefinition(); + } + } +} diff --git a/src/share/vm/compiler/compileBroker.hpp b/src/share/vm/compiler/compileBroker.hpp index 29f2b22..37989d1 100644 --- a/src/share/vm/compiler/compileBroker.hpp +++ b/src/share/vm/compiler/compileBroker.hpp @@ -408,6 +408,7 @@ class CompileBroker: AllStatic { static void print_compiler_threads_on(outputStream* st); + static void cleanup_after_redefinition(); static int get_total_compile_count() { return _total_compile_count; } static int get_total_bailout_count() { return _total_bailout_count; } static int get_total_invalidated_count() { return _total_invalidated_count; } diff --git a/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.cpp b/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.cpp index b0c9ec8..b3298e0 100644 --- a/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.cpp +++ b/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.cpp @@ -162,6 +162,13 @@ CompactibleFreeListSpace::CompactibleFreeListSpace(BlockOffsetSharedArray* bs, } } + +HeapWord* CompactibleFreeListSpace::forward_compact_top(size_t size, + CompactPoint* cp, HeapWord* compact_top) { + ShouldNotReachHere(); + return NULL; +} + // Like CompactibleSpace forward() but always calls cross_threshold() to // update the block offset table. Removed initialize_threshold call because // CFLS does not use a block offset array for contiguous spaces. diff --git a/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.hpp b/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.hpp index 3b7bb9a..de7e54b 100644 --- a/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.hpp +++ b/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.hpp @@ -149,6 +149,7 @@ class CompactibleFreeListSpace: public CompactibleSpace { // Support for compacting cms HeapWord* cross_threshold(HeapWord* start, HeapWord* end); + HeapWord* forward_compact_top(size_t size, CompactPoint* cp, HeapWord* compact_top); HeapWord* forward(oop q, size_t size, CompactPoint* cp, HeapWord* compact_top); // Initialization helpers. diff --git a/src/share/vm/gc_implementation/shared/markSweep.cpp b/src/share/vm/gc_implementation/shared/markSweep.cpp index 29841d8..a13a35d 100644 --- a/src/share/vm/gc_implementation/shared/markSweep.cpp +++ b/src/share/vm/gc_implementation/shared/markSweep.cpp @@ -32,6 +32,8 @@ #include "oops/objArrayKlass.inline.hpp" #include "oops/oop.inline.hpp" +GrowableArray* MarkSweep::_rescued_oops = NULL; + Stack MarkSweep::_marking_stack; Stack MarkSweep::_revisit_mdo_stack; Stack MarkSweep::_revisit_klass_stack; @@ -357,3 +359,86 @@ void MarkSweep::trace(const char* msg) { } #endif + +// (tw) Copy the rescued objects to their destination address after compaction. +void MarkSweep::copy_rescued_objects_back() { + + if (_rescued_oops != NULL) { + + for (int i=0; i<_rescued_oops->length(); i++) { + oop rescued_obj = _rescued_oops->at(i); + + int size = rescued_obj->size(); + oop new_obj = rescued_obj->forwardee(); + + if (rescued_obj->blueprint()->new_version() != NULL) { + MarkSweep::update_fields(rescued_obj, new_obj); + } else { + Copy::aligned_disjoint_words((HeapWord*)rescued_obj, (HeapWord*)new_obj, size); + } + + FREE_RESOURCE_ARRAY(HeapWord, rescued_obj, size); + + new_obj->init_mark(); + assert(new_obj->is_oop(), "must be a valid oop"); + } + _rescued_oops->clear(); + _rescued_oops = NULL; + } +} + +// (tw) Update instances of a class whose fields changed. +void MarkSweep::update_fields(oop q, oop new_location) { + + assert(q->blueprint()->new_version() != NULL, "class of old object must have new version"); + + klassOop old_klass_oop = q->klass(); + klassOop new_klass_oop = q->blueprint()->new_version(); + + instanceKlass *old_klass = instanceKlass::cast(old_klass_oop); + instanceKlass *new_klass = instanceKlass::cast(new_klass_oop); + + int size = q->size_given_klass(old_klass); + int new_size = q->size_given_klass(new_klass); + + oop tmp_obj = q; + + if (new_klass_oop->klass_part()->is_copying_backwards()) { + if (((HeapWord *)q >= (HeapWord *)new_location && (HeapWord *)q < (HeapWord *)new_location + new_size) || + ((HeapWord *)new_location >= (HeapWord *)q && (HeapWord *)new_location < (HeapWord *)q + size)) { + tmp_obj = (oop)resource_allocate_bytes(size * HeapWordSize); + Copy::aligned_disjoint_words((HeapWord*)q, (HeapWord*)tmp_obj, size); + } + } + + int *cur = new_klass_oop->klass_part()->update_information(); + + tmp_obj->set_klass_no_check(new_klass_oop); + + if (cur == NULL) { + assert(size == new_size, "just checking"); + Copy::conjoint_words(((HeapWord *)tmp_obj), ((HeapWord *)new_location), size); + } else { + int destOffset = 0; + while (*cur != 0) { + if (*cur > 0) { + int size = *cur; + cur++; + int offset = *cur; + Copy::conjoint_jbytes(((char *)tmp_obj) + offset, ((char *)new_location) + destOffset, size); + destOffset += size; + cur++; + } else { + assert(*cur < 0, ""); + int skip = -*cur; + Copy::fill_to_bytes(((char*)new_location) + destOffset, skip, 0); + destOffset += skip; + cur++; + } + } + } + + if (tmp_obj != q) { + FREE_RESOURCE_ARRAY(HeapWord, tmp_obj, size); + } +} diff --git a/src/share/vm/gc_implementation/shared/markSweep.hpp b/src/share/vm/gc_implementation/shared/markSweep.hpp index eb8252c..b96a677 100644 --- a/src/share/vm/gc_implementation/shared/markSweep.hpp +++ b/src/share/vm/gc_implementation/shared/markSweep.hpp @@ -117,8 +117,12 @@ class MarkSweep : AllStatic { friend class AdjustPointerClosure; friend class KeepAliveClosure; friend class VM_MarkSweep; + friend class GenMarkSweep; friend void marksweep_init(); +public: + static GrowableArray* _rescued_oops; + // // Vars // @@ -208,6 +212,8 @@ class MarkSweep : AllStatic { template static inline void mark_and_push(T* p); static inline void push_objarray(oop obj, size_t index); + static void copy_rescued_objects_back(); + static void update_fields(oop q, oop new_location); static void follow_stack(); // Empty marking stack. static void preserve_mark(oop p, markOop mark); diff --git a/src/share/vm/interpreter/interpreterRuntime.cpp b/src/share/vm/interpreter/interpreterRuntime.cpp index 32c0bdb..7e30e78 100644 --- a/src/share/vm/interpreter/interpreterRuntime.cpp +++ b/src/share/vm/interpreter/interpreterRuntime.cpp @@ -402,7 +402,7 @@ IRT_ENTRY(address, InterpreterRuntime::exception_handler_for_exception(JavaThrea assert(h_exception.not_null(), "NULL exceptions should be handled by athrow"); assert(h_exception->is_oop(), "just checking"); // Check that exception is a subclass of Throwable, otherwise we have a VerifyError - if (!(h_exception->is_a(SystemDictionary::Throwable_klass()))) { + if (!(h_exception->is_a(SystemDictionary::Throwable_klass()->klass_part()->newest_version())) && !(h_exception->is_a(SystemDictionary::Throwable_klass()))) { if (ExitVMOnVerifyError) vm_exit(-1); ShouldNotReachHere(); } @@ -656,6 +656,82 @@ IRT_ENTRY(void, InterpreterRuntime::_breakpoint(JavaThread* thread, methodOopDes JvmtiExport::post_raw_breakpoint(thread, method, bcp); IRT_END +// (tw) Correctly resolve method when running old code. +IRT_ENTRY(void, InterpreterRuntime::forward_method(JavaThread *thread)) + { + MonitorLockerEx ml(RedefinitionSync_lock); + while (Threads::wait_at_instrumentation_entry()) { + ml.wait(); + } + } + frame f = last_frame(thread); + methodOop m = f.interpreter_frame_method(); + methodOop forward_method = m->forward_method(); + if (forward_method != NULL) { + int bci = f.interpreter_frame_bci(); + + if (TraceRedefineClasses >= 3) { + tty->print_cr("Executing NOP in method %s at bci %d %d", m->name()->as_C_string(), bci, m->is_in_code_section(bci + 1)); + } + + int next_bci = bci - 1; + // First try bci before NOP. + if (!m->is_in_code_section(next_bci)) { + // Try bci after NOP. + next_bci = bci + 1; + if (!m->is_in_code_section(next_bci)) return; + } + + int new_bci = m->calculate_forward_bci(next_bci, forward_method); + if (TraceRedefineClasses >= 2) { + tty->print_cr("Transfering execution of %s to new method old_bci=%d new_bci=%d", forward_method->name()->as_C_string(), bci, new_bci); + } + RegisterMap reg_map(thread); + vframe* vf = vframe::new_vframe(&f, ®_map, thread); + interpretedVFrame *iframe = (interpretedVFrame *)vf; + iframe->set_method(forward_method, new_bci - 1); + } +IRT_END + +// (tw) Correctly resolve method when running old code. +IRT_ENTRY(void, InterpreterRuntime::find_correct_method(JavaThread *thread, oopDesc* receiverOop, int vTableIndex)) + // extract receiver from the outgoing argument list if necessary + Handle receiver(thread, receiverOop); + + // TODO: Check for invokeinterface! + Bytecodes::Code bytecode = Bytecodes::_invokevirtual; + + int method_holder_revision_number = method(thread)->method_holder()->klass_part()->revision_number(); + klassOop klass = receiverOop->klass(); + while (klass->klass_part()->revision_number() > method_holder_revision_number) { + klass = klass->klass_part()->old_version(); + } + + // TODO: Check for correctness if different vtable indices in different versions? + + methodOop method = ((instanceKlass *)klass->klass_part())->method_at_vtable(vTableIndex); + thread->set_vm_result(method); +IRT_END + +// Correctly resolve interface method when running old code. +IRT_ENTRY(void, InterpreterRuntime::find_correct_interface_method(JavaThread *thread, oopDesc* receiverOop, oopDesc* interface_klass, int vTableIndex)) + + // extract receiver from the outgoing argument list if necessary + Handle receiver(thread, receiverOop); + + // TODO: Check for invokeinterface! + Bytecodes::Code bytecode = Bytecodes::_invokevirtual; + + int method_holder_revision_number = method(thread)->method_holder()->klass_part()->revision_number(); + klassOop klass = receiverOop->klass(); + while (klass->klass_part()->revision_number() > method_holder_revision_number) { + klass = klass->klass_part()->old_version(); + } + + methodOop method = ((instanceKlass *)klass->klass_part())->method_at_itable((klassOop)interface_klass, vTableIndex, THREAD); + thread->set_vm_result(method); +IRT_END + IRT_ENTRY(void, InterpreterRuntime::resolve_invoke(JavaThread* thread, Bytecodes::Code bytecode)) { // extract receiver from the outgoing argument list if necessary Handle receiver(thread, NULL); @@ -684,6 +760,10 @@ IRT_ENTRY(void, InterpreterRuntime::resolve_invoke(JavaThread* thread, Bytecodes if (JvmtiExport::can_hotswap_or_post_breakpoint()) { int retry_count = 0; while (info.resolved_method()->is_old()) { + // (tw) If we are executing an old method, this is OK! + if (method(thread)->is_old()) { + break; + } // It is very unlikely that method is redefined more than 100 times // in the middle of resolve. If it is looping here more than 100 times // means then there could be a bug here. diff --git a/src/share/vm/interpreter/interpreterRuntime.hpp b/src/share/vm/interpreter/interpreterRuntime.hpp index 7ec8e49..b60f062 100644 --- a/src/share/vm/interpreter/interpreterRuntime.hpp +++ b/src/share/vm/interpreter/interpreterRuntime.hpp @@ -140,6 +140,9 @@ class InterpreterRuntime: AllStatic { static void post_method_entry(JavaThread *thread); static void post_method_exit (JavaThread *thread); static int interpreter_contains(address pc); + static void forward_method(JavaThread *thread); + static void find_correct_method(JavaThread *thread, oopDesc* receiver, int vTableIndex); + static void find_correct_interface_method(JavaThread *thread, oopDesc* receiver, oopDesc* interface_klass, int vTableIndex); // Native signature handlers static void prepare_native_call(JavaThread* thread, methodOopDesc* method); diff --git a/src/share/vm/interpreter/linkResolver.cpp b/src/share/vm/interpreter/linkResolver.cpp index b17f405..6acf287 100644 --- a/src/share/vm/interpreter/linkResolver.cpp +++ b/src/share/vm/interpreter/linkResolver.cpp @@ -153,8 +153,8 @@ void CallInfo::set_common(KlassHandle resolved_klass, KlassHandle selected_klass // Klass resolution void LinkResolver::check_klass_accessability(KlassHandle ref_klass, KlassHandle sel_klass, TRAPS) { - if (!Reflection::verify_class_access(ref_klass->as_klassOop(), - sel_klass->as_klassOop(), + if (!Reflection::verify_class_access(ref_klass->as_klassOop()->klass_part()->newest_version(), + sel_klass->as_klassOop()->klass_part()->newest_version(), true)) { ResourceMark rm(THREAD); Exceptions::fthrow( @@ -338,7 +338,7 @@ void LinkResolver::check_method_accessability(KlassHandle ref_klass, // We'll check for the method name first, as that's most likely // to be false (so we'll short-circuit out of these tests). if (sel_method->name() == vmSymbols::clone_name() && - sel_klass() == SystemDictionary::Object_klass() && + sel_klass()->klass_part()->newest_version() == SystemDictionary::Object_klass()->klass_part()->newest_version() && resolved_klass->oop_is_array()) { // We need to change "protected" to "public". assert(flags.is_protected(), "clone not protected?"); @@ -404,6 +404,156 @@ void LinkResolver::resolve_method_statically(methodHandle& resolved_method, Klas } } + +void LinkResolver::lookup_method(methodHandle& resolved_method, KlassHandle resolved_klass, + Symbol* method_name, Symbol* method_signature, bool is_interface, KlassHandle current_klass, TRAPS) { + + // Interface method lookup? + if (is_interface) { + + // lookup method in this interface or its super, java.lang.Object + lookup_instance_method_in_klasses(resolved_method, resolved_klass, method_name, method_signature, CHECK); + + if (resolved_method.is_null()) { + // lookup method in all the super-interfaces + lookup_method_in_interfaces(resolved_method, resolved_klass, method_name, method_signature, CHECK); + } + + // Other methods + } else { + Handle nested_exception; + + // 2. lookup method in resolved klass and its super klasses + lookup_method_in_klasses(resolved_method, resolved_klass, method_name, method_signature, CHECK); + + if (resolved_method.is_null()) { // not found in the class hierarchy + // 3. lookup method in all the interfaces implemented by the resolved klass + lookup_method_in_interfaces(resolved_method, resolved_klass, method_name, method_signature, CHECK); + + if (resolved_method.is_null()) { + // JSR 292: see if this is an implicitly generated method MethodHandle.linkToVirtual(*...), etc + lookup_polymorphic_method(resolved_method, resolved_klass, method_name, method_signature, + current_klass, (Handle*)NULL, (Handle*)NULL, THREAD); + if (HAS_PENDING_EXCEPTION) { + nested_exception = Handle(THREAD, PENDING_EXCEPTION); + CLEAR_PENDING_EXCEPTION; + } + } + } + } +} + +void LinkResolver::lookup_correct_field(fieldDescriptor &fd, KlassHandle &sel_klass, KlassHandle resolved_klass, KlassHandle current_klass, Symbol* field_name, Symbol* field_sig, bool is_static) { + + // First attempt unversioned + sel_klass = KlassHandle(Thread::current(), instanceKlass::cast(resolved_klass())->find_field(field_name, field_sig, &fd)); + + + if (!current_klass.is_null() && !current_klass->is_newest_version()) { + + // Look for the policy defined in the new version of the class (_not_ in the newest, but only in the newer relative to current klass). + int redefinition_policy = current_klass->new_version()->klass_part()->field_redefinition_policy(); + if (is_static) { + redefinition_policy = current_klass->new_version()->klass_part()->static_field_redefinition_policy(); + } + + assert(redefinition_policy != Klass::StaticCheck, "if the policy is static check, then we can never reach here"); + + if (redefinition_policy != Klass::DynamicCheck) { + + if (redefinition_policy == Klass::AccessOldMembers) { + // Forget looked up fields + sel_klass = KlassHandle(Thread::current(), (oop)NULL); + } + + assert(redefinition_policy == Klass::AccessOldMembers || redefinition_policy == Klass::AccessDeletedMembers, ""); + + if (sel_klass.is_null() || fd.is_static() != is_static /* access old static field field is changed from static to non-static */) { + + // Select correct version for resolved klass. + find_correct_resolved_klass(resolved_klass, current_klass); + + sel_klass = KlassHandle(Thread::current(), instanceKlass::cast(resolved_klass())->find_field(field_name, field_sig, &fd)); + + // FIXME: idubrov + //if (sel_klass.is_null()) { + // TRACE_RC2("Trying to resolve field (%s) in old universe failed => exception is the correct behaviour", field_name->as_C_string()); + //} else { + // assert(sel_klass->new_version() != NULL, "must be old class!"); + // TRACE_RC2("Resolved a field in the old universe (%s)!", field_name->as_C_string()); + //} + } + } + } +} + +void LinkResolver::lookup_correct_method(methodHandle& resolved_method, KlassHandle resolved_klass, KlassHandle current_klass, + Symbol* method_name, Symbol* method_signature, bool is_interface, TRAPS) { + + // First attempt unversioned + lookup_method(resolved_method, resolved_klass, method_name, method_signature, is_interface, current_klass, CHECK); + + // (tw) Are we in an old method that wants to see a different view on the world? + if (!current_klass.is_null() && !current_klass->is_newest_version()) { + + // Look for the policy defined in the new version of the class (_not_ in the newest, but only in the newer relative to current klass). + int method_redefinition_policy = current_klass->new_version()->klass_part()->method_redefinition_policy(); + assert(method_redefinition_policy != Klass::StaticCheck, "if the policy is static check, then we can never reach here"); + + if (method_redefinition_policy != Klass::DynamicCheck) { + + // We do not throw the exception + if (method_redefinition_policy == Klass::AccessOldMembers) { + // Forget any new member lookup + resolved_method = methodHandle(THREAD, NULL); + } + + assert(method_redefinition_policy == Klass::AccessOldMembers || method_redefinition_policy == Klass::AccessDeletedMembers, ""); + + if (resolved_method.is_null()) { + + // Select correct version for resolved klass. + find_correct_resolved_klass(resolved_klass, current_klass); + + // Now do the lookup in a second attempt with a different resolved klass. + lookup_method(resolved_method, resolved_klass, method_name, method_signature, is_interface, current_klass, CHECK); + + // FIXME: idubrov + //IF_TRACE_RC2 { + // ResourceMark rm(THREAD); + // if (resolved_method.is_null()) { + // TRACE_RC2("Trying to resolve method (%s) in old universe failed => exception is the correct behaviour", method_name->as_C_string()); + // } else { + // assert(resolved_method->is_old(), "must be old method!"); + // TRACE_RC2("Resolved a method in the old universe (%s)!", resolved_method->name()->as_C_string()); + // } + //} + } + } + } + + if (resolved_method.is_null()) { + // no method found + ResourceMark rm(THREAD); + THROW_MSG(vmSymbols::java_lang_NoSuchMethodError(), + methodOopDesc::name_and_sig_as_C_string(Klass::cast(resolved_klass()), + method_name, + method_signature)); + } +} + +void LinkResolver::find_correct_resolved_klass(KlassHandle &resolved_klass, KlassHandle ¤t_klass) { + int current_klass_revision = current_klass->revision_number(); + int resolved_klass_revision = resolved_klass->revision_number(); + // FIXME: idubrov + //TRACE_RC2("The two different revision numbers for interfaces: current=%d / resolved_callee=%d", current_klass_revision, resolved_klass_revision); + + while (resolved_klass->revision_number() > current_klass_revision) { + assert(resolved_klass->old_version(), "must have old version"); + resolved_klass = KlassHandle(Thread::current(), resolved_klass->old_version()); + } +} + void LinkResolver::resolve_method(methodHandle& resolved_method, KlassHandle resolved_klass, Symbol* method_name, Symbol* method_signature, KlassHandle current_klass, bool check_access, TRAPS) { @@ -416,35 +566,8 @@ void LinkResolver::resolve_method(methodHandle& resolved_method, KlassHandle res THROW_MSG(vmSymbols::java_lang_IncompatibleClassChangeError(), buf); } - Handle nested_exception; - - // 2. lookup method in resolved klass and its super klasses - lookup_method_in_klasses(resolved_method, resolved_klass, method_name, method_signature, CHECK); - - if (resolved_method.is_null()) { // not found in the class hierarchy - // 3. lookup method in all the interfaces implemented by the resolved klass - lookup_method_in_interfaces(resolved_method, resolved_klass, method_name, method_signature, CHECK); - - if (resolved_method.is_null()) { - // JSR 292: see if this is an implicitly generated method MethodHandle.linkToVirtual(*...), etc - lookup_polymorphic_method(resolved_method, resolved_klass, method_name, method_signature, - current_klass, (Handle*)NULL, (Handle*)NULL, THREAD); - if (HAS_PENDING_EXCEPTION) { - nested_exception = Handle(THREAD, PENDING_EXCEPTION); - CLEAR_PENDING_EXCEPTION; - } - } - - if (resolved_method.is_null()) { - // 4. method lookup failed - ResourceMark rm(THREAD); - THROW_MSG_CAUSE(vmSymbols::java_lang_NoSuchMethodError(), - methodOopDesc::name_and_sig_as_C_string(Klass::cast(resolved_klass()), - method_name, - method_signature), - nested_exception); - } - } + // 2. and 3. and 4. lookup method in resolved klass and its super klasses + lookup_correct_method(resolved_method, resolved_klass, current_klass, method_name, method_signature, false, CHECK); // 5. check if method is concrete if (resolved_method->is_abstract() && !resolved_klass->is_abstract()) { @@ -512,20 +635,7 @@ void LinkResolver::resolve_interface_method(methodHandle& resolved_method, } // lookup method in this interface or its super, java.lang.Object - lookup_instance_method_in_klasses(resolved_method, resolved_klass, method_name, method_signature, CHECK); - - if (resolved_method.is_null()) { - // lookup method in all the super-interfaces - lookup_method_in_interfaces(resolved_method, resolved_klass, method_name, method_signature, CHECK); - if (resolved_method.is_null()) { - // no method found - ResourceMark rm(THREAD); - THROW_MSG(vmSymbols::java_lang_NoSuchMethodError(), - methodOopDesc::name_and_sig_as_C_string(Klass::cast(resolved_klass()), - method_name, - method_signature)); - } - } + lookup_correct_method(resolved_method, resolved_klass, current_klass, method_name, method_signature, true, CHECK); if (check_access) { HandleMark hm(THREAD); @@ -612,9 +722,14 @@ void LinkResolver::resolve_field(FieldAccessInfo& result, constantPoolHandle poo THROW_MSG(vmSymbols::java_lang_NoSuchFieldError(), field->as_C_string()); } + KlassHandle ref_klass(THREAD, pool->pool_holder()->klass_part()); + // Resolve instance field fieldDescriptor fd; // find_field initializes fd if found - KlassHandle sel_klass(THREAD, instanceKlass::cast(resolved_klass())->find_field(field, sig, &fd)); + + KlassHandle sel_klass; + lookup_correct_field(fd, sel_klass, resolved_klass, ref_klass, field, sig, is_static); + // check if field exists; i.e., if a klass containing the field def has been selected if (sel_klass.is_null()){ ResourceMark rm(THREAD); @@ -622,7 +737,6 @@ void LinkResolver::resolve_field(FieldAccessInfo& result, constantPoolHandle poo } // check access - KlassHandle ref_klass(THREAD, pool->pool_holder()); check_field_accessability(ref_klass, resolved_klass, sel_klass, fd, CHECK); // check for errors @@ -634,7 +748,7 @@ void LinkResolver::resolve_field(FieldAccessInfo& result, constantPoolHandle poo } // Final fields can only be accessed from its own class. - if (is_put && fd.access_flags().is_final() && sel_klass() != pool->pool_holder()) { + if (is_put && fd.access_flags().is_final() && sel_klass() != pool->pool_holder()->klass_part()->active_version() && sel_klass() != pool->pool_holder()) { THROW(vmSymbols::java_lang_IllegalAccessError()); } @@ -839,7 +953,7 @@ void LinkResolver::resolve_virtual_call(CallInfo& result, Handle recv, KlassHand bool check_access, bool check_null_and_abstract, TRAPS) { methodHandle resolved_method; linktime_resolve_virtual_method(resolved_method, resolved_klass, method_name, method_signature, current_klass, check_access, CHECK); - runtime_resolve_virtual_method(result, resolved_method, resolved_klass, recv, receiver_klass, check_null_and_abstract, CHECK); + runtime_resolve_virtual_method(result, resolved_method, resolved_klass, recv, receiver_klass, current_klass, check_null_and_abstract, CHECK); } // throws linktime exceptions @@ -869,6 +983,7 @@ void LinkResolver::runtime_resolve_virtual_method(CallInfo& result, KlassHandle resolved_klass, Handle recv, KlassHandle recv_klass, + KlassHandle current_klass, bool check_null_and_abstract, TRAPS) { @@ -917,7 +1032,40 @@ void LinkResolver::runtime_resolve_virtual_method(CallInfo& result, // recv_klass might be an arrayKlassOop but all vtables start at // the same place. The cast is to avoid virtual call and assertion. instanceKlass* inst = (instanceKlass*)recv_klass()->klass_part(); + + // (tw) The type of the virtual method call and the type of the receiver do not need to + // have anything in common, as the receiver type could've been hotswapped. + // Does not always work (method could be resolved with correct dynamic type and later + // be called at the same place with a wrong dynamic type). + // (tw) TODO: Need to handle the static type vs dynamic type issue more generally. + + // The vTable must be based on the view of the world of the resolved method + klassOop method_holder = resolved_method->method_holder(); + + if (method_holder->klass_part()->new_version() != NULL) { + // We are executing in old code + // FIXME: idubrov + //TRACE_RC2("Calling a method in old code"); + while (method_holder->klass_part()->revision_number() < inst->revision_number()) { + inst = (instanceKlass *)(inst->old_version()->klass_part()); + } + } + + if (inst->is_subtype_of(method_holder)) { selected_method = methodHandle(THREAD, inst->method_at_vtable(vtable_index)); + } else { + + tty->print_cr("Failure:"); + inst->as_klassOop()->print(); + inst->super()->print(); + juint off = inst->super_check_offset(); + klassOop sup = *(klassOop*)( (address)inst->as_klassOop() + off ); + sup->print(); + method_holder->print(); + + bool b = inst->is_subtype_of(method_holder); + THROW_MSG(vmSymbols::java_lang_NoSuchMethodError(), "(tw) A virtual method was called, but the type of the receiver is not related with the type of the class of the called method!"); + } } } diff --git a/src/share/vm/interpreter/linkResolver.hpp b/src/share/vm/interpreter/linkResolver.hpp index dfd74f9..cf6e44a 100644 --- a/src/share/vm/interpreter/linkResolver.hpp +++ b/src/share/vm/interpreter/linkResolver.hpp @@ -110,7 +110,11 @@ class CallInfo: public LinkInfo { // It does all necessary link-time checks & throws exceptions if necessary. class LinkResolver: AllStatic { - private: +private: + static void lookup_method (methodHandle& result, KlassHandle resolved_klass, Symbol* name, Symbol* signature, bool is_interface, KlassHandle current_klass, TRAPS); + static void lookup_correct_field (fieldDescriptor &fd, KlassHandle &sel_klass, KlassHandle resolved_klass, KlassHandle current_klass, Symbol* field_name, Symbol* field_sig, bool is_static); + static void lookup_correct_method (methodHandle& result, KlassHandle resolved_klass, KlassHandle current_klass, Symbol* name, Symbol* signature, bool is_interface, TRAPS); + static void find_correct_resolved_klass (KlassHandle &resolved_klass, KlassHandle ¤t_klass); static void lookup_method_in_klasses (methodHandle& result, KlassHandle klass, Symbol* name, Symbol* signature, TRAPS); static void lookup_instance_method_in_klasses (methodHandle& result, KlassHandle klass, Symbol* name, Symbol* signature, TRAPS); static void lookup_method_in_interfaces (methodHandle& result, KlassHandle klass, Symbol* name, Symbol* signature, TRAPS); @@ -133,7 +137,7 @@ class LinkResolver: AllStatic { static void linktime_resolve_interface_method (methodHandle& resolved_method, KlassHandle resolved_klass, Symbol* method_name, Symbol* method_signature, KlassHandle current_klass, bool check_access, TRAPS); static void runtime_resolve_special_method (CallInfo& result, methodHandle resolved_method, KlassHandle resolved_klass, KlassHandle current_klass, bool check_access, TRAPS); - static void runtime_resolve_virtual_method (CallInfo& result, methodHandle resolved_method, KlassHandle resolved_klass, Handle recv, KlassHandle recv_klass, bool check_null_and_abstract, TRAPS); + static void runtime_resolve_virtual_method (CallInfo& result, methodHandle resolved_method, KlassHandle resolved_klass, Handle recv, KlassHandle recv_klass, KlassHandle current_klass, bool check_null_and_abstract, TRAPS); static void runtime_resolve_interface_method (CallInfo& result, methodHandle resolved_method, KlassHandle resolved_klass, Handle recv, KlassHandle recv_klass, bool check_null_and_abstract, TRAPS); static void check_field_accessability (KlassHandle ref_klass, KlassHandle resolved_klass, KlassHandle sel_klass, fieldDescriptor& fd, TRAPS); diff --git a/src/share/vm/interpreter/templateTable.hpp b/src/share/vm/interpreter/templateTable.hpp index 17e9f26..e77500f 100644 --- a/src/share/vm/interpreter/templateTable.hpp +++ b/src/share/vm/interpreter/templateTable.hpp @@ -329,8 +329,8 @@ class TemplateTable: AllStatic { static void shouldnotreachhere(); // jvmti support - static void jvmti_post_field_access(Register cache, Register index, bool is_static, bool has_tos); - static void jvmti_post_field_mod(Register cache, Register index, bool is_static); + static void jvmti_post_field_access(Register cache, Register index, int byte_no, bool is_static, bool has_tos); + static void jvmti_post_field_mod(Register cache, Register index, int byte_no, bool is_static); static void jvmti_post_fast_field_mod(); // debugging of TemplateGenerator diff --git a/src/share/vm/memory/genMarkSweep.cpp b/src/share/vm/memory/genMarkSweep.cpp index 76e18d8..6af7c14 100644 --- a/src/share/vm/memory/genMarkSweep.cpp +++ b/src/share/vm/memory/genMarkSweep.cpp @@ -421,6 +421,7 @@ void GenMarkSweep::mark_sweep_phase4() { // in the same order in phase2, phase3 and phase4. We don't quite do that // here (perm_gen first rather than last), so we tell the validate code // to use a higher index (saved from phase2) when verifying perm_gen. + assert(_rescued_oops == NULL, "must be empty before processing"); GenCollectedHeap* gch = GenCollectedHeap::heap(); Generation* pg = gch->perm_gen(); @@ -433,10 +434,14 @@ void GenMarkSweep::mark_sweep_phase4() { VALIDATE_MARK_SWEEP_ONLY(reset_live_oop_tracking(false)); + MarkSweep::copy_rescued_objects_back(); + GenCompactClosure blk; gch->generation_iterate(&blk, true); VALIDATE_MARK_SWEEP_ONLY(compaction_complete()); + MarkSweep::copy_rescued_objects_back(); + pg->post_compact(); // Shared spaces verification. } diff --git a/src/share/vm/memory/permGen.cpp b/src/share/vm/memory/permGen.cpp index 350f583..59faad1 100644 --- a/src/share/vm/memory/permGen.cpp +++ b/src/share/vm/memory/permGen.cpp @@ -57,7 +57,12 @@ HeapWord* PermGen::mem_allocate_in_gen(size_t size, Generation* gen) { for (;;) { { - MutexLocker ml(Heap_lock); + // (tw) Only lock when not at a safepoint (necessary to use the split verifier from the VmThread) + Monitor *lock = Heap_lock; + if (SafepointSynchronize::is_at_safepoint()) { + lock = NULL; + } + MutexLockerEx ml(lock); if ((obj = gen->allocate(size, false)) != NULL) { return obj; } diff --git a/src/share/vm/memory/space.cpp b/src/share/vm/memory/space.cpp index f97bc34..9b20d08 100644 --- a/src/share/vm/memory/space.cpp +++ b/src/share/vm/memory/space.cpp @@ -378,6 +378,31 @@ void CompactibleSpace::clear(bool mangle_space) { _compaction_top = bottom(); } +// (tw) Calculates the compact_top that will be used for placing the next object with the giving size on the heap. +HeapWord* CompactibleSpace::forward_compact_top(size_t size, +CompactPoint* cp, HeapWord* compact_top) { + // First check if we should switch compaction space + assert(this == cp->space, "'this' should be current compaction space."); + size_t compaction_max_size = pointer_delta(end(), compact_top); + while (size > compaction_max_size) { + // switch to next compaction space + cp->space->set_compaction_top(compact_top); + cp->space = cp->space->next_compaction_space(); + if (cp->space == NULL) { + cp->gen = GenCollectedHeap::heap()->prev_gen(cp->gen); + assert(cp->gen != NULL, "compaction must succeed"); + cp->space = cp->gen->first_compaction_space(); + assert(cp->space != NULL, "generation must have a first compaction space"); + } + compact_top = cp->space->bottom(); + cp->space->set_compaction_top(compact_top); + cp->threshold = cp->space->initialize_threshold(); + compaction_max_size = pointer_delta(cp->space->end(), compact_top); + } + + return compact_top; +} + HeapWord* CompactibleSpace::forward(oop q, size_t size, CompactPoint* cp, HeapWord* compact_top) { // q is alive @@ -401,7 +426,7 @@ HeapWord* CompactibleSpace::forward(oop q, size_t size, } // store the forwarding pointer into the mark word - if ((HeapWord*)q != compact_top) { + if ((HeapWord*)q != compact_top || (size_t)q->size() != size) { q->forward_to(oop(compact_top)); assert(q->is_gc_marked(), "encoding the pointer should preserve the mark"); } else { @@ -449,7 +474,208 @@ void CompactibleSpace::prepare_for_compaction(CompactPoint* cp) { // Faster object search. void ContiguousSpace::prepare_for_compaction(CompactPoint* cp) { - SCAN_AND_FORWARD(cp, top, block_is_always_obj, obj_size); + if (!Universe::is_redefining_gc_run()) { + SCAN_AND_FORWARD(cp, top, block_is_always_obj, obj_size); + return; + } + + /* Compute the new addresses for the live objects and store it in the mark + * Used by universe::mark_sweep_phase2() + */ + HeapWord* compact_top; /* This is where we are currently compacting to. */ + + /* We're sure to be here before any objects are compacted into this + * space, so this is a good time to initialize this: + */ + set_compaction_top(bottom()); + + if (cp->space == NULL) { + assert(cp->gen != NULL, "need a generation"); + assert(cp->threshold == NULL, "just checking"); + assert(cp->gen->first_compaction_space() == this, "just checking"); + cp->space = cp->gen->first_compaction_space(); + compact_top = cp->space->bottom(); + cp->space->set_compaction_top(compact_top); + cp->threshold = cp->space->initialize_threshold(); + } else { + compact_top = cp->space->compaction_top(); + } + + /* We allow some amount of garbage towards the bottom of the space, so + * we don't start compacting before there is a significant gain to be made. + * Occasionally, we want to ensure a full compaction, which is determined + * by the MarkSweepAlwaysCompactCount parameter. + */ + int invocations = SharedHeap::heap()->perm_gen()->stat_record()->invocations; + bool skip_dead = (MarkSweepAlwaysCompactCount < 1) + ||((invocations % MarkSweepAlwaysCompactCount) != 0); + + size_t allowed_deadspace = 0; + if (skip_dead) { + int ratio = (int)allowed_dead_ratio(); + allowed_deadspace = (capacity() * ratio / 100) / HeapWordSize; + } + + HeapWord* q = bottom(); + HeapWord* t = end(); + + HeapWord* end_of_live= q; /* One byte beyond the last byte of the last + live object. */ + HeapWord* first_dead = end();/* The first dead object. */ + LiveRange* liveRange = NULL; /* The current live range, recorded in the + first header of preceding free area. */ + _first_dead = first_dead; + + const intx interval = PrefetchScanIntervalInBytes; + + while (q < t) { + assert(!block_is_obj(q) || + oop(q)->mark()->is_marked() || oop(q)->mark()->is_unlocked() || + oop(q)->mark()->has_bias_pattern(), + "these are the only valid states during a mark sweep"); + if (block_is_obj(q) && oop(q)->is_gc_marked()) { + /* prefetch beyond q */ + Prefetch::write(q, interval); + /* size_t size = oop(q)->size(); changing this for cms for perm gen */ + size_t size = block_size(q); + + // DCEVM: begin + ////////////////////////////////////////////////////////////////////////// + size_t forward_size = size; + + // Compute the forward sizes and leave out objects whose position could + // possibly overlap other objects. + + // DCEVM: There is a new version of the class of q => different size + if (oop(q)->blueprint()->new_version() != NULL && oop(q)->blueprint()->new_version()->klass_part()->update_information() != NULL) { + + size_t new_size = oop(q)->size_given_klass(oop(q)->blueprint()->new_version()->klass_part()); + assert(size != new_size || oop(q)->is_perm(), "instances without changed size have to be updated prior to GC run"); + forward_size = new_size; + } + + compact_top = cp->space->forward_compact_top(forward_size, cp, compact_top); + + bool rescueing = false; + if (rescueing = must_rescue(oop(q), oop(compact_top))) { + if (MarkSweep::_rescued_oops == NULL) { + MarkSweep::_rescued_oops = new GrowableArray(128); + } + // FIXME: idubrov + //TRACE_RC5("rescue obj %d klass=%s", MarkSweep::_rescued_oops->length(), oop(q)->klass()->klass_part()->name()->as_C_string()); + MarkSweep::_rescued_oops->append(oop(q)); + } else { + compact_top = cp->space->forward(oop(q), forward_size, cp, compact_top); + } + + if ((size != forward_size || rescueing) && q < first_dead) { + // (tw) This object moves => first_dead must be set to here! + first_dead = q; + } + ////////////////////////////////////////////////////////////////////////// + q += size; + end_of_live = q; + } else { + /* run over all the contiguous dead objects */ + HeapWord* end = q; + do { + /* prefetch beyond end */ + Prefetch::write(end, interval); + end += block_size(end); + } while (end < t && (!block_is_obj(end) || !oop(end)->is_gc_marked())); + + /* see if we might want to pretend this object is alive so that + * we don't have to compact quite as often. + */ + if (allowed_deadspace > 0 && q == compact_top) { + size_t sz = pointer_delta(end, q); + if (insert_deadspace(allowed_deadspace, q, sz)) { + compact_top = cp->space->forward(oop(q), sz, cp, compact_top); + q = end; + end_of_live = end; + continue; + } + } + + /* otherwise, it really is a free region. */ + + /* for the previous LiveRange, record the end of the live objects. */ + if (liveRange) { + liveRange->set_end(q); + } + + /* record the current LiveRange object. + * liveRange->start() is overlaid on the mark word. + */ + liveRange = (LiveRange*)q; + liveRange->set_start(end); + liveRange->set_end(end); + + /* see if this is the first dead region. */ + if (q < first_dead) { + first_dead = q; + } + + /* move on to the next object */ + q = end; + } + } + + ////////////////////////////////////////////////////////////////////////// + // Compute the forwarding addresses for the objects that need to be + // rescued. + // TODO: empty the _rescued_oops after ALL spaces are compacted! + if (MarkSweep::_rescued_oops != NULL) { + // FIXME: idubrov + //TRACE_RC2("Calculating new forward sizes for %d objects!", MarkSweep::_rescued_oops->length()); + + for (int i=0; ilength(); i++) { + oop q = MarkSweep::_rescued_oops->at(i); + + /* size_t size = oop(q)->size(); changing this for cms for perm gen */ + size_t size = block_size((HeapWord*)q); + + size_t forward_size = size; + + // (tw) There is a new version of the class of q => different size + if (oop(q)->blueprint()->new_version() != NULL) { + + size_t new_size = oop(q)->size_given_klass(oop(q)->blueprint()->new_version()->klass_part()); + assert(size != new_size || oop(q)->is_perm(), "instances without changed size have to be updated prior to GC run"); + forward_size = new_size; + } + + compact_top = cp->space->forward(oop(q), forward_size, cp, compact_top); + assert(compact_top <= t, "must not write over end of space!"); + } + MarkSweep::_rescued_oops->clear(); + MarkSweep::_rescued_oops = NULL; + } + ////////////////////////////////////////////////////////////////////////// + + assert(q == t, "just checking"); + if (liveRange != NULL) { + liveRange->set_end(q); + } + _end_of_live = end_of_live; + if (end_of_live < first_dead) { + first_dead = end_of_live; + } + _first_dead = first_dead; + +// FIXME: idubrov +// if (_first_dead > top()) { +// _first_dead = top(); +// } +// +// if (_end_of_live > top()) { +// _end_of_live = top(); +// } + assert(_first_dead <= top(), "Must be smaller equal"); + assert(_end_of_live <= top(), "Must be smaller equal"); + + /* save the compaction_top of the compaction space. */ + cp->space->set_compaction_top(compact_top); } void Space::adjust_pointers() { @@ -490,17 +716,313 @@ void Space::adjust_pointers() { assert(q == t, "just checking"); } + +#ifdef ASSERT + +int CompactibleSpace::space_index(oop obj) { + GenCollectedHeap* heap = GenCollectedHeap::heap(); + + if (heap->is_in_permanent(obj)) { + return -1; + } + + int index = 0; + for (int i = heap->n_gens() - 1; i >= 0; i--) { + Generation* gen = heap->get_gen(i); + CompactibleSpace* space = gen->first_compaction_space(); + while (space != NULL) { + if (space->is_in_reserved(obj)) { + return index; + } + space = space->next_compaction_space(); + index++; + } + } + + tty->print_cr("could not compute space_index for %08xh", obj); + index = 0; + for (int i = heap->n_gens() - 1; i >= 0; i--) { + Generation* gen = heap->get_gen(i); + tty->print_cr(" generation %s: %08xh - %08xh", gen->name(), gen->reserved().start(), gen->reserved().end()); + + CompactibleSpace* space = gen->first_compaction_space(); + while (space != NULL) { + tty->print_cr(" %2d space %08xh - %08xh", index, space->bottom(), space->end()); + space = space->next_compaction_space(); + index++; + } + } + + ShouldNotReachHere(); + return 0; +} +#endif + +bool CompactibleSpace::must_rescue(oop old_obj, oop new_obj) { + + assert(is_in_reserved(old_obj), "old_obj must be in this space"); + + if (old_obj->is_perm()) { + // This object is in perm gen; check for invariant obj->klass() <= obj + if (oop(old_obj)->blueprint()->new_version() != NULL) { + return true; + } + } + + int size = old_obj->size(); + int original_size = size; + if (oop(old_obj)->blueprint()->is_redefining()) { + assert(oop(old_obj)->blueprint()->old_version() != NULL, "must not be null"); + original_size = oop(old_obj)->size_given_klass(oop(old_obj)->blueprint()->old_version()->klass_part()); + } else if (oop(old_obj)->blueprint()->new_version() != NULL) { + size = oop(old_obj)->size_given_klass(oop(old_obj)->blueprint()->new_version()->klass_part()); + } + + bool normalComparison = (old_obj + original_size < new_obj + size); + + if (is_in_reserved(new_obj)) { + // Old and new address are in same space, so just compare the address. + // Must rescue if object moves towards the top of the space. + assert(space_index(old_obj) == space_index(new_obj), "old_obj and new_obj must be in same space"); + return normalComparison; + + } else { + + assert(space_index(old_obj) != space_index(new_obj), "old_obj and new_obj must be in different spaces"); + + Generation* tenured_gen = GenCollectedHeap::heap()->get_gen(1); + if (tenured_gen->is_in_reserved(new_obj)) { + // Must never rescue when moving from the new into the old generation. + assert(GenCollectedHeap::heap()->get_gen(0)->is_in_reserved(old_obj), "old_obj must be in DefNewGeneration"); + assert(space_index(old_obj) > space_index(new_obj), "must be"); + return false; + + } else if (tenured_gen->is_in_reserved(old_obj)) { + // Must always rescue when moving from the old into the new generation. + assert(GenCollectedHeap::heap()->get_gen(0)->is_in_reserved(new_obj), "new_obj must be in DefNewGeneration"); + assert(space_index(old_obj) < space_index(new_obj), "must be"); + return true; + + } else { + // In the new generation, eden is located before the from space, so a + // simple pointer comparison is sufficient. + assert(GenCollectedHeap::heap()->get_gen(0)->is_in_reserved(old_obj), "old_obj must be in DefNewGeneration"); + assert(GenCollectedHeap::heap()->get_gen(0)->is_in_reserved(new_obj), "new_obj must be in DefNewGeneration"); + assert((normalComparison) == (space_index(old_obj) < space_index(new_obj)), "slow and fast computation must yield same result"); + return normalComparison; + } + } +} + +oop CompactibleSpace::rescue(oop old_obj) { + assert(must_rescue(old_obj, old_obj->forwardee()), "do not call otherwise"); + + int size = old_obj->size(); + oop rescued_obj = (oop)resource_allocate_bytes(size * HeapWordSize); + Copy::aligned_disjoint_words((HeapWord*)old_obj, (HeapWord*)rescued_obj, size); + + if (MarkSweep::_rescued_oops == NULL) { + MarkSweep::_rescued_oops = new GrowableArray(128); + } + + MarkSweep::_rescued_oops->append(rescued_obj); + return rescued_obj; +} + void CompactibleSpace::adjust_pointers() { // Check first is there is any work to do. if (used() == 0) { return; // Nothing to do. } - - SCAN_AND_ADJUST_POINTERS(adjust_obj_size); + /* adjust all the interior pointers to point at the new locations of objects + * Used by MarkSweep::mark_sweep_phase3() */ + + HeapWord* q = bottom(); + HeapWord* t = _end_of_live; /* Established by "prepare_for_compaction". */ + + assert(_first_dead <= _end_of_live, "Stands to reason, no?"); + + debug_only(HeapWord* prev_q = NULL); + debug_only(HeapWord* prev_prev_q = NULL); + debug_only(HeapWord* prev_prev_prev_q = NULL); + if (q < t && _first_dead > q && + !oop(q)->is_gc_marked()) { + /* we have a chunk of the space which hasn't moved and we've + * reinitialized the mark word during the previous pass, so we can't + * use is_gc_marked for the traversal. */ + HeapWord* end = _first_dead; + + while (q < end) { + /* I originally tried to conjoin "block_start(q) == q" to the + * assertion below, but that doesn't work, because you can't + * accurately traverse previous objects to get to the current one + * after their pointers (including pointers into permGen) have been + * updated, until the actual compaction is done. dld, 4/00 */ + assert(block_is_obj(q), + "should be at block boundaries, and should be looking at objs"); + + VALIDATE_MARK_SWEEP_ONLY(MarkSweep::track_interior_pointers(oop(q))); + + /* point all the oops to the new location */ + size_t size = oop(q)->adjust_pointers(); + size = adjust_obj_size(size); + + VALIDATE_MARK_SWEEP_ONLY(MarkSweep::check_interior_pointers()); + VALIDATE_MARK_SWEEP_ONLY(MarkSweep::validate_live_oop(oop(q), size)); + + debug_only(prev_prev_prev_q = prev_prev_q); + debug_only(prev_prev_q = prev_q); + debug_only(prev_q = q); + q += size; + } + + // (tw) first_dead can be live object! + q = _first_dead; + +// if (_first_dead == t) { +// q = t; +// } else { +// /* $$$ This is funky. Using this to read the previously written +// * LiveRange. See also use below. */ +// q = (HeapWord*)oop(_first_dead)->mark()->decode_pointer(); +// } + } + + const intx interval = PrefetchScanIntervalInBytes; + + debug_only(prev_q = NULL); + debug_only(prev_prev_q = NULL); + debug_only(prev_prev_prev_q = NULL); + while (q < t) { + /* prefetch beyond q */ + Prefetch::write(q, interval); + if (oop(q)->is_gc_marked()) { + /* q is alive */ + VALIDATE_MARK_SWEEP_ONLY(MarkSweep::track_interior_pointers(oop(q))); + /* point all the oops to the new location */ + size_t size = oop(q)->adjust_pointers(); + size = adjust_obj_size(size); + VALIDATE_MARK_SWEEP_ONLY(MarkSweep::check_interior_pointers()); + VALIDATE_MARK_SWEEP_ONLY(MarkSweep::validate_live_oop(oop(q), size)); + debug_only(prev_prev_prev_q = prev_prev_q); + debug_only(prev_prev_q = prev_q); + debug_only(prev_q = q); + q += size; + } else { + /* q is not a live object, so its mark should point at the next + * live object */ + debug_only(prev_prev_prev_q = prev_prev_q); + debug_only(prev_prev_q = prev_q); + debug_only(prev_q = q); + q = (HeapWord*) oop(q)->mark()->decode_pointer(); + assert(q > prev_q, "we should be moving forward through memory"); + } + } + + assert(q == t, "just checking"); } void CompactibleSpace::compact() { - SCAN_AND_COMPACT(obj_size); + + if(!Universe::is_redefining_gc_run()) { + SCAN_AND_COMPACT(obj_size); + return; + } + + /* Copy all live objects to their new location + * Used by MarkSweep::mark_sweep_phase4() */ + + HeapWord* q = bottom(); + HeapWord* const t = _end_of_live; + debug_only(HeapWord* prev_q = NULL); + + if (q < t && _first_dead > q && + !oop(q)->is_gc_marked()) { + debug_only( + /* we have a chunk of the space which hasn't moved and we've reinitialized + * the mark word during the previous pass, so we can't use is_gc_marked for + * the traversal. */ + HeapWord* const end = _first_dead; + + while (q < end) { + size_t size = obj_size(q); // FIXME: idubrov oop(q)->size(); + assert(!oop(q)->is_gc_marked(), + "should be unmarked (special dense prefix handling)"); + VALIDATE_MARK_SWEEP_ONLY(MarkSweep::live_oop_moved_to(q, size, q)); + debug_only(prev_q = q); + q += size; + } + ) /* debug_only */ + // (tw) first_dead can be live object! + q = _first_dead; + + //if (_first_dead == t) { + // q = t; + //} else { + ///* $$$ Funky */ + //q = (HeapWord*) oop(_first_dead)->mark()->decode_pointer(); + //} + } + + const intx scan_interval = PrefetchScanIntervalInBytes; + const intx copy_interval = PrefetchCopyIntervalInBytes; + while (q < t) { + if (!oop(q)->is_gc_marked()) { + /* mark is pointer to next marked oop */ + debug_only(prev_q = q); + q = (HeapWord*) oop(q)->mark()->decode_pointer(); + assert(q > prev_q, "we should be moving forward through memory"); + } else { + /* prefetch beyond q */ + Prefetch::read(q, scan_interval); + + /* size and destination */ + size_t size = obj_size(q); + HeapWord* compaction_top = (HeapWord*)oop(q)->forwardee(); + + if (must_rescue(oop(q), oop(q)->forwardee())) { + oop dest_obj = rescue(oop(q)); + debug_only(Copy::fill_to_words(q, size, 0)); + } else { + + /* prefetch beyond compaction_top */ + Prefetch::write(compaction_top, copy_interval); + + /* copy object and reinit its mark */ + VALIDATE_MARK_SWEEP_ONLY(MarkSweep::live_oop_moved_to(q, size, + compaction_top)); + assert(q != compaction_top || oop(q)->blueprint()->new_version() != NULL, "everything in this pass should be moving"); + + if (oop(q)->blueprint()->new_version() != NULL) { + MarkSweep::update_fields(oop(q), oop(compaction_top)); + } else { + Copy::aligned_conjoint_words(q, compaction_top, size); + } + oop(compaction_top)->init_mark(); + assert(oop(compaction_top)->klass() != NULL, "should have a class"); + } + + debug_only(prev_q = q); + q += size; + } + } + + /* Let's remember if we were empty before we did the compaction. */ + bool was_empty = used_region().is_empty(); + /* Reset space after compaction is complete */ + reset_after_compaction(); + /* We do this clear, below, since it has overloaded meanings for some */ + /* space subtypes. For example, OffsetTableContigSpace's that were */ + /* compacted into will have had their offset table thresholds updated */ + /* continuously, but those that weren't need to have their thresholds */ + /* re-initialized. Also mangles unused area for debugging. */ + if (used_region().is_empty()) { + if (!was_empty) clear(SpaceDecorator::Mangle); + } else { + if (ZapUnusedHeapArea) mangle_unused_area(); + } + + //SCAN_AND_COMPACT(obj_size); } void Space::print_short() const { print_short_on(tty); } diff --git a/src/share/vm/memory/space.hpp b/src/share/vm/memory/space.hpp index ef2f2c6..b54d470 100644 --- a/src/share/vm/memory/space.hpp +++ b/src/share/vm/memory/space.hpp @@ -445,6 +445,9 @@ public: // indicates when the next such action should be taken. virtual void prepare_for_compaction(CompactPoint* cp); // MarkSweep support phase3 + DEBUG_ONLY(int space_index(oop obj)); + bool must_rescue(oop old_obj, oop new_obj); + oop rescue(oop old_obj); virtual void adjust_pointers(); // MarkSweep support phase4 virtual void compact(); @@ -475,6 +478,10 @@ public: virtual HeapWord* forward(oop q, size_t size, CompactPoint* cp, HeapWord* compact_top); + // (tw) + virtual HeapWord* forward_compact_top(size_t size, CompactPoint* cp, + HeapWord* compact_top); + // Return a size with adjusments as required of the space. virtual size_t adjust_object_size_v(size_t size) const { return size; } diff --git a/src/share/vm/memory/universe.cpp b/src/share/vm/memory/universe.cpp index 8ce17d9..4c1ba52 100644 --- a/src/share/vm/memory/universe.cpp +++ b/src/share/vm/memory/universe.cpp @@ -100,6 +100,8 @@ #include "gc_implementation/parallelScavenge/parallelScavengeHeap.hpp" #endif +bool Universe::_is_redefining_gc_run = false; + // Known objects klassOop Universe::_boolArrayKlassObj = NULL; klassOop Universe::_byteArrayKlassObj = NULL; @@ -204,6 +206,38 @@ void Universe::system_classes_do(void f(klassOop)) { f(systemObjArrayKlassObj()); } +// DCEVM: This method should iterate all pointers that are not within heap objects. +void Universe::root_oops_do(OopClosure *oopClosure) { + + class AlwaysTrueClosure: public BoolObjectClosure { + public: + void do_object(oop p) { ShouldNotReachHere(); } + bool do_object_b(oop p) { return true; } + }; + AlwaysTrueClosure always_true; + + // General strong roots + Universe::oops_do(oopClosure); + JNIHandles::oops_do(oopClosure); + Threads::oops_do(oopClosure, NULL); + ObjectSynchronizer::oops_do(oopClosure); + FlatProfiler::oops_do(oopClosure); + //Management::oops_do(oopClosure); // DCEVM: TODO: Check if this is correct? + JvmtiExport::oops_do(oopClosure); + // SO_AllClasses + SystemDictionary::oops_do(oopClosure); + + // Now adjust pointers in remaining weak roots. (All of which should + // have been cleared if they pointed to non-surviving objects.) + // Global (weak) JNI handles + JNIHandles::weak_oops_do(&always_true, oopClosure); + + CodeCache::oops_do(oopClosure); + StringTable::oops_do(oopClosure); + //ref_processor()->weak_oops_do(&oopClosure); // DCEVM: TODO: Check if this is correct? + //PSScavenge::reference_processor()->weak_oops_do(&oopClosure); // DCEVM: TODO: Check if this is correct? +} + void Universe::oops_do(OopClosure* f, bool do_all) { f->do_oop((oop*) &_int_mirror); diff --git a/src/share/vm/memory/universe.hpp b/src/share/vm/memory/universe.hpp index da21a8b..676675e 100644 --- a/src/share/vm/memory/universe.hpp +++ b/src/share/vm/memory/universe.hpp @@ -127,6 +127,8 @@ class Universe: AllStatic { friend class SystemDictionary; friend class VMStructs; friend class CompactingPermGenGen; + friend class Space; + friend class ContiguousSpace; friend class VM_PopulateDumpSharedSpace; friend jint universe_init(); @@ -258,7 +260,18 @@ class Universe: AllStatic { static void compute_verify_oop_data(); + static bool _is_redefining_gc_run; + public: + + static bool is_redefining_gc_run() { + return _is_redefining_gc_run; + } + + static void set_redefining_gc_run(bool b) { + _is_redefining_gc_run = b; + } + // Known classes in the VM static klassOop boolArrayKlassObj() { return _boolArrayKlassObj; } static klassOop byteArrayKlassObj() { return _byteArrayKlassObj; } @@ -403,6 +416,8 @@ class Universe: AllStatic { // Iteration + static void root_oops_do(OopClosure *f); + // Apply "f" to the addresses of all the direct heap pointers maintained // as static fields of "Universe". static void oops_do(OopClosure* f, bool do_all = false); @@ -419,6 +434,7 @@ class Universe: AllStatic { // Debugging static bool verify_in_progress() { return _verify_in_progress; } + static void set_verify_in_progress(bool b) { _verify_in_progress = b; } static void verify(bool silent, VerifyOption option); static void verify(bool silent) { verify(silent, VerifyOption_Default /* option */); diff --git a/src/share/vm/oops/arrayKlass.cpp b/src/share/vm/oops/arrayKlass.cpp index 4aa1155..2738ea9 100644 --- a/src/share/vm/oops/arrayKlass.cpp +++ b/src/share/vm/oops/arrayKlass.cpp @@ -129,9 +129,9 @@ objArrayOop arrayKlass::compute_secondary_supers(int num_extra_slots, TRAPS) { bool arrayKlass::compute_is_subtype_of(klassOop k) { // An array is a subtype of Serializable, Clonable, and Object - return k == SystemDictionary::Object_klass() - || k == SystemDictionary::Cloneable_klass() - || k == SystemDictionary::Serializable_klass(); + return k->klass_part()->newest_version() == SystemDictionary::Object_klass()->klass_part()->newest_version() + || k->klass_part()->newest_version() == SystemDictionary::Cloneable_klass()->klass_part()->newest_version() + || k->klass_part()->newest_version() == SystemDictionary::Serializable_klass()->klass_part()->newest_version(); } diff --git a/src/share/vm/oops/constMethodKlass.cpp b/src/share/vm/oops/constMethodKlass.cpp index e74811f..be3fe7d 100644 --- a/src/share/vm/oops/constMethodKlass.cpp +++ b/src/share/vm/oops/constMethodKlass.cpp @@ -102,6 +102,7 @@ void constMethodKlass::oop_follow_contents(oop obj) { constMethodOop cm = constMethodOop(obj); MarkSweep::mark_and_push(cm->adr_constants()); MarkSweep::mark_and_push(cm->adr_stackmap_data()); + MarkSweep::mark_and_push(cm->adr_code_section_table()); // Performance tweak: We skip iterating over the klass pointer since we // know that Universe::constMethodKlassObj never moves. } @@ -113,6 +114,7 @@ void constMethodKlass::oop_follow_contents(ParCompactionManager* cm, constMethodOop cm_oop = constMethodOop(obj); PSParallelCompact::mark_and_push(cm, cm_oop->adr_constants()); PSParallelCompact::mark_and_push(cm, cm_oop->adr_stackmap_data()); + PSParallelCompact::mark_and_push(cm, cm_oop->adr_code_section_table()); // Performance tweak: We skip iterating over the klass pointer since we // know that Universe::constMethodKlassObj never moves. } @@ -123,6 +125,7 @@ int constMethodKlass::oop_oop_iterate(oop obj, OopClosure* blk) { constMethodOop cm = constMethodOop(obj); blk->do_oop(cm->adr_constants()); blk->do_oop(cm->adr_stackmap_data()); + blk->do_oop(cm->adr_code_section_table()); // Get size before changing pointers. // Don't call size() or oop_size() since that is a virtual call. int size = cm->object_size(); @@ -138,6 +141,8 @@ int constMethodKlass::oop_oop_iterate_m(oop obj, OopClosure* blk, MemRegion mr) if (mr.contains(adr)) blk->do_oop(adr); adr = cm->adr_stackmap_data(); if (mr.contains(adr)) blk->do_oop(adr); + adr = cm->adr_code_section_table(); + if (mr.contains(adr)) blk->do_oop(adr); // Get size before changing pointers. // Don't call size() or oop_size() since that is a virtual call. int size = cm->object_size(); @@ -152,6 +157,7 @@ int constMethodKlass::oop_adjust_pointers(oop obj) { constMethodOop cm = constMethodOop(obj); MarkSweep::adjust_pointer(cm->adr_constants()); MarkSweep::adjust_pointer(cm->adr_stackmap_data()); + MarkSweep::adjust_pointer(cm->adr_code_section_table()); // Get size before changing pointers. // Don't call size() or oop_size() since that is a virtual call. int size = cm->object_size(); diff --git a/src/share/vm/oops/constMethodOop.hpp b/src/share/vm/oops/constMethodOop.hpp index 549192b..5cea5c4 100644 --- a/src/share/vm/oops/constMethodOop.hpp +++ b/src/share/vm/oops/constMethodOop.hpp @@ -129,7 +129,7 @@ private: public: oop* oop_block_beg() const { return adr_constants(); } - oop* oop_block_end() const { return adr_stackmap_data() + 1; } + oop* oop_block_end() const { return adr_code_section_table() + 1; } private: // @@ -141,6 +141,9 @@ private: // Raw stackmap data for the method typeArrayOop _stackmap_data; + // (tw) Table mapping code sections for method forward points. + typeArrayOop _code_section_table; + // // End of the oop block. // @@ -195,6 +198,28 @@ public: } bool has_stackmap_table() const { return _stackmap_data != NULL; } + // code section table + typeArrayOop code_section_table() const { return _code_section_table; } + void set_code_section_table(typeArrayOop e) { oop_store_without_check((oop*) &_code_section_table, (oop) e); } + bool has_code_section_table() const { return code_section_table() != NULL && code_section_table()->length() > 0; } + static const int ValuesPerCodeSectionEntry = 3; + int code_section_entries() const { + if (!has_code_section_table()) return 0; + return _code_section_table->length() / ValuesPerCodeSectionEntry; + } + + int code_section_new_index_at(int index) const { + return _code_section_table->short_at(index * ValuesPerCodeSectionEntry); + } + + int code_section_original_index_at(int index) const { + return _code_section_table->short_at(index * ValuesPerCodeSectionEntry + 1); + } + + int code_section_length_at(int index) const { + return _code_section_table->short_at(index * ValuesPerCodeSectionEntry + 2); + } + void init_fingerprint() { const uint64_t initval = CONST64(0x8000000000000000); _fingerprint = initval; @@ -301,6 +326,7 @@ public: // Garbage collection support oop* adr_constants() const { return (oop*)&_constants; } oop* adr_stackmap_data() const { return (oop*)&_stackmap_data; } + oop* adr_code_section_table() const { return (oop*)&_code_section_table; } bool is_conc_safe() { return _is_conc_safe; } void set_is_conc_safe(bool v) { _is_conc_safe = v; } diff --git a/src/share/vm/oops/cpCacheOop.cpp b/src/share/vm/oops/cpCacheOop.cpp index ad62921..f39f202 100644 --- a/src/share/vm/oops/cpCacheOop.cpp +++ b/src/share/vm/oops/cpCacheOop.cpp @@ -37,9 +37,15 @@ // Implememtation of ConstantPoolCacheEntry +void ConstantPoolCacheEntry::copy_from(ConstantPoolCacheEntry *other) { + _flags = other->_flags; // flags +} + void ConstantPoolCacheEntry::initialize_entry(int index) { assert(0 < index && index < 0x10000, "sanity check"); _indices = index; + _f1 = NULL; + _f2 = 0; assert(constant_pool_index() == index, ""); } @@ -162,7 +168,8 @@ void ConstantPoolCacheEntry::set_method(Bytecodes::Code invoke_code, int vtable_index) { assert(!is_secondary_entry(), ""); assert(method->interpreter_entry() != NULL, "should have been set at this point"); - assert(!method->is_obsolete(), "attempt to write obsolete method to cpCache"); + // (tw) No longer valid assert + //assert(!method->is_obsolete(), "attempt to write obsolete method to cpCache"); int byte_no = -1; bool change_to_virtual = false; @@ -183,6 +190,7 @@ void ConstantPoolCacheEntry::set_method(Bytecodes::Code invoke_code, set_method_flags(as_TosState(method->result_type()), ( 1 << is_vfinal_shift) | ((method->is_final_method() ? 1 : 0) << is_final_shift) | + ((method->is_old() ? 1 : 0) << is_old_method_shift) | ((change_to_virtual ? 1 : 0) << is_forced_virtual_shift), method()->size_of_parameters()); set_f2_as_vfinal_method(method()); @@ -190,9 +198,13 @@ void ConstantPoolCacheEntry::set_method(Bytecodes::Code invoke_code, assert(vtable_index >= 0, "valid index"); assert(!method->is_final_method(), "sanity"); set_method_flags(as_TosState(method->result_type()), + ((method->is_old() ? 1 : 0) << is_old_method_shift) | ((change_to_virtual ? 1 : 0) << is_forced_virtual_shift), method()->size_of_parameters()); set_f2(vtable_index); + + // (tw) save method holder in f1 for virtual calls + set_f1(method()); } byte_no = 2; break; @@ -206,7 +218,8 @@ void ConstantPoolCacheEntry::set_method(Bytecodes::Code invoke_code, // Once is_vfinal is set, it must stay that way, lest we get a dangling oop. set_method_flags(as_TosState(method->result_type()), ((is_vfinal() ? 1 : 0) << is_vfinal_shift) | - ((method->is_final_method() ? 1 : 0) << is_final_shift), + ((method->is_final_method() ? 1 : 0) << is_final_shift) | + ((method->is_old() ? 1 : 0) << is_old_method_shift), method()->size_of_parameters()); set_f1(method()); byte_no = 1; @@ -259,7 +272,7 @@ void ConstantPoolCacheEntry::set_interface_call(methodHandle method, int index) set_f1(interf); set_f2(index); set_method_flags(as_TosState(method->result_type()), - 0, // no option bits + ((method->is_old() ? 1 : 0) << is_old_method_shift), method()->size_of_parameters()); set_bytecode_1(Bytecodes::_invokeinterface); } @@ -520,27 +533,12 @@ void ConstantPoolCacheEntry::update_pointers() { // If this constantPoolCacheEntry refers to old_method then update it // to refer to new_method. bool ConstantPoolCacheEntry::adjust_method_entry(methodOop old_method, - methodOop new_method, bool * trace_name_printed) { + methodOop new_method) { if (is_vfinal()) { - // virtual and final so _f2 contains method ptr instead of vtable index - if (f2_as_vfinal_method() == old_method) { - // match old_method so need an update - // NOTE: can't use set_f2_as_vfinal_method as it asserts on different values - _f2 = (intptr_t)new_method; - if (RC_TRACE_IN_RANGE(0x00100000, 0x00400000)) { - if (!(*trace_name_printed)) { - // RC_TRACE_MESG macro has an embedded ResourceMark - RC_TRACE_MESG(("adjust: name=%s", - Klass::cast(old_method->method_holder())->external_name())); - *trace_name_printed = true; - } - // RC_TRACE macro has an embedded ResourceMark - RC_TRACE(0x00400000, ("cpc vf-entry update: %s(%s)", - new_method->name()->as_C_string(), - new_method->signature()->as_C_string())); - } - + // virtual and final so f2() contains method ptr instead of vtable index + if (f2_as_vfinal_method() != NULL && f2_as_vfinal_method()->method_holder()->klass_part()->new_version()) { + initialize_entry(constant_pool_index()); return true; } @@ -548,84 +546,27 @@ bool ConstantPoolCacheEntry::adjust_method_entry(methodOop old_method, return false; } - if ((oop)_f1 == NULL) { - // NULL f1() means this is a virtual entry so bail out - // We are assuming that the vtable index does not need change. + // (tw) check how to update interface methods! + if (bytecode_1() == Bytecodes::_invokevirtual || bytecode_2() == Bytecodes::_invokevirtual) { + + if(f1_as_method()->method_holder()->klass_part()->new_version()) { + initialize_entry(constant_pool_index()); + return true; + } + return false; } if ((oop)_f1 == old_method) { _f1 = new_method; - if (RC_TRACE_IN_RANGE(0x00100000, 0x00400000)) { - if (!(*trace_name_printed)) { - // RC_TRACE_MESG macro has an embedded ResourceMark - RC_TRACE_MESG(("adjust: name=%s", - Klass::cast(old_method->method_holder())->external_name())); - *trace_name_printed = true; - } - // RC_TRACE macro has an embedded ResourceMark - RC_TRACE(0x00400000, ("cpc entry update: %s(%s)", - new_method->name()->as_C_string(), - new_method->signature()->as_C_string())); - } - return true; + } else if(_f1 != NULL && (bytecode_1() != Bytecodes::_invokeinterface && f1_as_method()->method_holder()->klass_part()->new_version())) { + initialize_entry(constant_pool_index()); } return false; } -// a constant pool cache entry should never contain old or obsolete methods -bool ConstantPoolCacheEntry::check_no_old_or_obsolete_entries() { - if (is_vfinal()) { - // virtual and final so _f2 contains method ptr instead of vtable index - methodOop m = (methodOop)_f2; - // Return false if _f2 refers to an old or an obsolete method. - // _f2 == NULL || !m->is_method() are just as unexpected here. - return (m != NULL && m->is_method() && !m->is_old() && !m->is_obsolete()); - } else if ((oop)_f1 == NULL || !((oop)_f1)->is_method()) { - // _f1 == NULL || !_f1->is_method() are OK here - return true; - } - - methodOop m = (methodOop)_f1; - // return false if _f1 refers to an old or an obsolete method - return (!m->is_old() && !m->is_obsolete()); -} - -bool ConstantPoolCacheEntry::is_interesting_method_entry(klassOop k) { - if (!is_method_entry()) { - // not a method entry so not interesting by default - return false; - } - - methodOop m = NULL; - if (is_vfinal()) { - // virtual and final so _f2 contains method ptr instead of vtable index - m = f2_as_vfinal_method(); - } else if (is_f1_null()) { - // NULL _f1 means this is a virtual entry so also not interesting - return false; - } else { - oop f1 = _f1; // _f1 is volatile - if (!f1->is_method()) { - // _f1 can also contain a klassOop for an interface - return false; - } - m = f1_as_method(); - } - - assert(m != NULL && m->is_method(), "sanity check"); - if (m == NULL || !m->is_method() || (k != NULL && m->method_holder() != k)) { - // robustness for above sanity checks or method is not in - // the interesting class - return false; - } - - // the method is in the interesting class so the entry is interesting - return true; -} - void ConstantPoolCacheEntry::print(outputStream* st, int index) const { // print separator if (index == 0) st->print_cr(" -------------"); @@ -663,60 +604,18 @@ void constantPoolCacheOopDesc::initialize(intArray& inverse_index_map) { } } -// RedefineClasses() API support: -// If any entry of this constantPoolCache points to any of -// old_methods, replace it with the corresponding new_method. -void constantPoolCacheOopDesc::adjust_method_entries(methodOop* old_methods, methodOop* new_methods, - int methods_length, bool * trace_name_printed) { - - if (methods_length == 0) { - // nothing to do if there are no methods - return; - } - - // get shorthand for the interesting class - klassOop old_holder = old_methods[0]->method_holder(); +void constantPoolCacheOopDesc::adjust_entries(methodOop* old_methods, methodOop* new_methods, + int methods_length) { for (int i = 0; i < length(); i++) { - if (!entry_at(i)->is_interesting_method_entry(old_holder)) { - // skip uninteresting methods - continue; - } - - // The constantPoolCache contains entries for several different - // things, but we only care about methods. In fact, we only care - // about methods in the same class as the one that contains the - // old_methods. At this point, we have an interesting entry. - - for (int j = 0; j < methods_length; j++) { - methodOop old_method = old_methods[j]; - methodOop new_method = new_methods[j]; - - if (entry_at(i)->adjust_method_entry(old_method, new_method, - trace_name_printed)) { - // current old_method matched this entry and we updated it so - // break out and get to the next interesting entry if there one - break; - } + if (entry_at(i)->is_field_entry()) { + // (tw) TODO: Update only field offsets and modify only constant pool entries that + // point to changed fields + entry_at(i)->initialize_entry(entry_at(i)->constant_pool_index()); + } else if(entry_at(i)->is_method_entry()) { + entry_at(i)->adjust_method_entry(NULL, NULL); } } } -// the constant pool cache should never contain old or obsolete methods -bool constantPoolCacheOopDesc::check_no_old_or_obsolete_entries() { - for (int i = 1; i < length(); i++) { - if (entry_at(i)->is_interesting_method_entry(NULL) && - !entry_at(i)->check_no_old_or_obsolete_entries()) { - return false; - } - } - return true; -} -void constantPoolCacheOopDesc::dump_cache() { - for (int i = 1; i < length(); i++) { - if (entry_at(i)->is_interesting_method_entry(NULL)) { - entry_at(i)->print(tty, i); - } - } -} diff --git a/src/share/vm/oops/cpCacheOop.hpp b/src/share/vm/oops/cpCacheOop.hpp index ef26775..6f37d81 100644 --- a/src/share/vm/oops/cpCacheOop.hpp +++ b/src/share/vm/oops/cpCacheOop.hpp @@ -136,7 +136,8 @@ class ConstantPoolCacheEntry VALUE_OBJ_CLASS_SPEC { void set_bytecode_2(Bytecodes::Code code); void set_f1(oop f1) { oop existing_f1 = _f1; // read once - assert(existing_f1 == NULL || existing_f1 == f1, "illegal field change"); + // (tw) need to relax assertion for redefinition + // assert(existing_f1 == NULL || existing_f1 == f1, "illegal field change"); oop_store(&_f1, f1); } void release_set_f1(oop f1); @@ -167,6 +168,7 @@ class ConstantPoolCacheEntry VALUE_OBJ_CLASS_SPEC { tos_state_mask = right_n_bits(tos_state_bits), tos_state_shift = BitsPerInt - tos_state_bits, // see verify_tos_state_shift below // misc. option bits; can be any bit position in [16..27] + is_old_method_shift = 19, is_vfinal_shift = 20, is_volatile_shift = 21, is_final_shift = 22, @@ -200,6 +202,8 @@ class ConstantPoolCacheEntry VALUE_OBJ_CLASS_SPEC { void initialize_entry(int original_index); // initialize primary entry void initialize_secondary_entry(int main_index); // initialize secondary entry + void copy_from(ConstantPoolCacheEntry *other); + void set_field( // sets entry to resolved field state Bytecodes::Code get_code, // the bytecode used for reading the field Bytecodes::Code put_code, // the bytecode used for writing the field @@ -361,10 +365,7 @@ class ConstantPoolCacheEntry VALUE_OBJ_CLASS_SPEC { // trace_name_printed is set to true if the current call has // printed the klass name so that other routines in the adjust_* // group don't print the klass name. - bool adjust_method_entry(methodOop old_method, methodOop new_method, - bool * trace_name_printed); - bool check_no_old_or_obsolete_entries(); - bool is_interesting_method_entry(klassOop k); + bool adjust_method_entry(methodOop old_method, methodOop new_method); // Debugging & Printing void print (outputStream* st, int index) const; @@ -485,16 +486,9 @@ class constantPoolCacheOopDesc: public oopDesc { return (base_offset() + ConstantPoolCacheEntry::size_in_bytes() * index); } - // RedefineClasses() API support: - // If any entry of this constantPoolCache points to any of - // old_methods, replace it with the corresponding new_method. - // trace_name_printed is set to true if the current call has - // printed the klass name so that other routines in the adjust_* - // group don't print the klass name. - void adjust_method_entries(methodOop* old_methods, methodOop* new_methods, - int methods_length, bool * trace_name_printed); - bool check_no_old_or_obsolete_entries(); - void dump_cache(); + // (tw) Update method and field references + void adjust_entries(methodOop* old_methods, methodOop* new_methods, + int methods_length); }; #endif // SHARE_VM_OOPS_CPCACHEOOP_HPP diff --git a/src/share/vm/oops/instanceKlass.cpp b/src/share/vm/oops/instanceKlass.cpp index cd3dce0..2a0a2aa 100644 --- a/src/share/vm/oops/instanceKlass.cpp +++ b/src/share/vm/oops/instanceKlass.cpp @@ -250,12 +250,118 @@ void instanceKlass::initialize(TRAPS) { } +void instanceKlass::initialize_redefined_class() { + RC_TRACE(0x00000400, ("initializing redefined class %s", + name()->as_C_string())); + + assert(!is_initialized(), ""); + assert(this->old_version() != NULL, ""); + assert(is_linked(), "must be linked before"); + + + instanceKlassHandle this_oop(Thread::current(), this->as_klassOop()); + class UpdateStaticFieldClosure : public FieldClosure { + + private: + instanceKlassHandle this_oop; + + public: + UpdateStaticFieldClosure(instanceKlassHandle this_oop) { + this->this_oop = this_oop; + } + + virtual void do_field(fieldDescriptor* fd) { + fieldDescriptor result; + bool found = ((instanceKlass *)(this_oop->old_version()->klass_part()))->find_local_field(fd->name(), fd->signature(), &result); + + if (found && result.is_static()) { + int old_offset = result.offset(); + assert(result.field_type() == fd->field_type(), "Old and new field type does not match"); + + oop new_location = this_oop()->java_mirror(); + oop old_location = this_oop->old_version()->java_mirror(); + int offset = fd->offset(); + RC_TRACE(0x00000400, ("Copying static field value for field '%s' old_offset=%d new_offset=%d", + fd->name()->as_C_string(), old_offset, offset)); + + oop cur_oop; + + switch(result.field_type()) { + + // Found static field with same name and type in the old klass => copy value from old to new klass + + case T_BOOLEAN: + new_location->bool_field_put(offset, old_location->bool_field(old_offset)); + DEBUG_ONLY(old_location->byte_field_put(old_offset, 0)); + break; + + case T_CHAR: + new_location->char_field_put(offset, old_location->char_field(old_offset)); + DEBUG_ONLY(old_location->char_field_put(old_offset, 0)); + break; + + case T_FLOAT: + new_location->float_field_put(offset, old_location->float_field(old_offset)); + DEBUG_ONLY(old_location->float_field_put(old_offset, 0)); + break; + + case T_DOUBLE: + new_location->double_field_put(offset, old_location->double_field(old_offset)); + DEBUG_ONLY(old_location->double_field_put(old_offset, 0)); + break; + + case T_BYTE: + new_location->byte_field_put(offset, old_location->byte_field(old_offset)); + DEBUG_ONLY(old_location->byte_field_put(old_offset, 0)); + break; + + case T_SHORT: + new_location->short_field_put(offset, old_location->short_field(old_offset)); + DEBUG_ONLY(old_location->short_field_put(old_offset, 0)); + break; + + case T_INT: + new_location->int_field_put(offset, old_location->int_field(old_offset)); + DEBUG_ONLY(old_location->int_field_put(old_offset, 0)); + break; + + case T_LONG: + new_location->long_field_put(offset, old_location->long_field(old_offset)); + DEBUG_ONLY(old_location->long_field_put(old_offset, 0)); + break; + + case T_OBJECT: + case T_ARRAY: + cur_oop = old_location->obj_field(old_offset); + new_location->obj_field_put_raw(offset, cur_oop); + old_location->obj_field_put_raw(old_offset, NULL); + break; + + default: + ShouldNotReachHere(); + } + } else { + RC_TRACE(0x00000200, ("New static field %s has_initial_value=%d", + fd->name()->as_C_string(), (int)(fd->has_initial_value()))); + // field not found + // (tw) TODO: Probably this call is not necessary here! + // FIXME: idubrov + //ClassFileParser::initialize_static_field(fd, Thread::current()); + } + } + }; + + UpdateStaticFieldClosure cl(this_oop); + this->do_local_static_fields(&cl); +} + + bool instanceKlass::verify_code( instanceKlassHandle this_oop, bool throw_verifyerror, TRAPS) { // 1) Verify the bytecodes Verifier::Mode mode = throw_verifyerror ? Verifier::ThrowException : Verifier::NoException; - return Verifier::verify(this_oop, mode, this_oop->should_verify_class(), CHECK_false); + return Verifier::verify(this_oop, mode, this_oop->should_verify_class(), true, CHECK_false); } @@ -362,7 +468,13 @@ bool instanceKlass::link_class_impl( jt->get_thread_stat()->perf_recursion_counts_addr(), jt->get_thread_stat()->perf_timers_addr(), PerfClassTraceTime::CLASS_VERIFY); - bool verify_ok = verify_code(this_oop, throw_verifyerror, THREAD); + if (this_oop->is_redefining()) { + Thread::current()->set_pretend_new_universe(true); + } + bool verify_ok = verify_code(this_oop, throw_verifyerror, THREAD); + if (this_oop->is_redefining()) { + Thread::current()->set_pretend_new_universe(false); + } if (!verify_ok) { return false; } @@ -400,7 +512,8 @@ bool instanceKlass::link_class_impl( } #endif this_oop->set_init_state(linked); - if (JvmtiExport::should_post_class_prepare()) { + // (tw) Must check for old version in order to prevent infinite loops. + if (JvmtiExport::should_post_class_prepare() && this_oop->old_version() == NULL /* JVMTI deadlock otherwise */) { Thread *thread = THREAD; assert(thread->is_Java_thread(), "thread->is_Java_thread()"); JvmtiExport::post_class_prepare((JavaThread *) thread, this_oop()); @@ -673,6 +786,18 @@ bool instanceKlass::implements_interface(klassOop k) const { return false; } +bool instanceKlass::implements_interface_any_version(klassOop k) const { + k = k->klass_part()->newest_version(); + if (this->newest_version() == k) return true; + assert(Klass::cast(k)->is_interface(), "should be an interface class"); + for (int i = 0; i < transitive_interfaces()->length(); i++) { + if (((klassOop)transitive_interfaces()->obj_at(i))->klass_part()->newest_version() == k) { + return true; + } + } + return false; +} + objArrayOop instanceKlass::allocate_objArray(int n, int length, TRAPS) { if (length < 0) THROW_0(vmSymbols::java_lang_NegativeArraySizeException()); if (length > arrayOopDesc::max_array_length(T_OBJECT)) { @@ -801,7 +926,25 @@ methodOop instanceKlass::class_initializer() { } void instanceKlass::call_class_initializer_impl(instanceKlassHandle this_oop, TRAPS) { + + ResourceMark rm(THREAD); methodHandle h_method(THREAD, this_oop->class_initializer()); + + if (this_oop->revision_number() != -1){ + methodOop m = NULL; + if (AllowAdvancedClassRedefinition) { + m = this_oop->find_method(vmSymbols::static_transformer_name(), vmSymbols::void_method_signature()); + } + methodHandle method(m); + if (method() != NULL && method()->is_static()) { + RC_TRACE(0x00000200, ("Calling static transformer instead of static initializer")); + h_method = method; + } else if (!((instanceKlass*)this_oop->old_version()->klass_part())->is_not_initialized()) { + // Only execute the static initializer, if it was not yet executed for the old version of the class. + return; + } + } + assert(!this_oop->is_initialized(), "we cannot initialize twice"); if (TraceClassInitialization) { tty->print("%d Initializing ", call_class_initializer_impl_counter++); @@ -949,6 +1092,137 @@ void instanceKlass::methods_do(void f(methodOop method)) { } } +void instanceKlass::store_update_information(GrowableArray &values) { + int *arr = NEW_C_HEAP_ARRAY(int, values.length(), mtClass); + for (int i=0; i typeInfoPair; + +void instanceKlass::store_type_check_information(GrowableArray< Pair > &values) { + Pair *arr = NEW_C_HEAP_ARRAY(typeInfoPair, values.length(), mtClass); + for (int i=0; ias_klassOop(); + + if (_fields_not_changed) { + + class MyFieldClosure : public FieldClosure { + + FieldEvolutionClosure *_cl; + public: + MyFieldClosure(FieldEvolutionClosure *cl) {_cl = cl; } + virtual void do_field(fieldDescriptor* fd) { + _cl->do_changed_field(fd, fd); + } + }; + + MyFieldClosure mfc(cl); + do_nonstatic_fields(&mfc); + } else { + + _fields_not_changed = true; + GrowableArray fds; + while (true) { + for (JavaFieldStream fs(cur_new_klass); !fs.done(); fs.next()) { + fd.initialize(cur_new_klass_oop, fs.index()); + if (fd.is_static()) { + continue; + } + fds.append(fd); + } + + if (cur_new_klass->super() != NULL) { + cur_new_klass_oop = cur_new_klass->super(); + cur_new_klass = instanceKlass::cast(cur_new_klass_oop); + } else { + break; + } + } + + GrowableArray sortedFds; + while (fds.length() > 0) { + int minOffset = 0x7fffffff; + int minIndex = -1; + for (int i=0; ioffset(); + if (curOffset < minOffset) { + minOffset = curOffset; + minIndex = i; + } + } + + sortedFds.append(fds.at(minIndex)); + fds.remove_at(minIndex); + } + + + for (int i=0; isuper()) { + cur_old_klass_oop = cur_old_klass->super(); + cur_old_klass = instanceKlass::cast(cur_old_klass_oop); + } else { + break; + } + } + + if (found) { + if (old_fd.offset() != fd.offset()) { + _fields_not_changed = false; + } + cl->do_changed_field(&old_fd, &fd); + } else { + _fields_not_changed = false; + cl->do_new_field(&fd); + } + } + } +} void instanceKlass::do_local_static_fields(FieldClosure* cl) { for (JavaFieldStream fs(this); !fs.done(); fs.next()) { @@ -1368,6 +1642,20 @@ jmethodID instanceKlass::jmethod_id_or_null(methodOop method) { return id; } +bool instanceKlass::update_jmethod_id(methodOop method, jmethodID newMethodID) { + size_t idnum = (size_t)method->method_idnum(); + jmethodID* jmeths = methods_jmethod_ids_acquire(); + size_t length; // length assigned as debugging crumb + jmethodID id = NULL; + if (jmeths != NULL && // If there is a cache + (length = (size_t)jmeths[0]) > idnum) { // and if it is long enough, + jmeths[idnum+1] = newMethodID; // Set the id (may be NULL) + return true; + } + + return false; +} + // Cache an itable index void instanceKlass::set_cached_itable_index(size_t idnum, int index) { @@ -1527,6 +1815,13 @@ void instanceKlass::remove_dependent_nmethod(nmethod* nm) { last = b; b = b->next(); } + + // (tw) Hack as dependencies get wrong version of klassOop + if(this->old_version() != NULL) { + ((instanceKlass *)this->old_version()->klass_part())->remove_dependent_nmethod(nm); + return; + } + #ifdef ASSERT tty->print_cr("### %s can't find dependent nmethod:", this->external_name()); nm->print(); @@ -2382,6 +2677,9 @@ void instanceKlass::oop_print_on(oop obj, outputStream* st) { klassOop mirrored_klass = java_lang_Class::as_klassOop(obj); st->print(BULLET"fake entry for mirror: "); mirrored_klass->print_value_on(st); + if (mirrored_klass != NULL) { + st->print_cr("revision: %d (oldest=%d, newest=%d)", mirrored_klass->klass_part()->revision_number(), mirrored_klass->klass_part()->oldest_version()->klass_part()->revision_number(), mirrored_klass->klass_part()->newest_version()->klass_part()->revision_number()); + } st->cr(); st->print(BULLET"fake entry resolved_constructor: "); methodOop ctor = java_lang_Class::resolved_constructor(obj); diff --git a/src/share/vm/oops/instanceKlass.hpp b/src/share/vm/oops/instanceKlass.hpp index 8a849cb..f41f13c 100644 --- a/src/share/vm/oops/instanceKlass.hpp +++ b/src/share/vm/oops/instanceKlass.hpp @@ -101,6 +101,22 @@ public: virtual void do_field(fieldDescriptor* fd) = 0; }; +// (tw) Iterates over the fields of the old and new class +class FieldEvolutionClosure : public StackObj { +public: + virtual void do_new_field(fieldDescriptor* fd) = 0; + virtual void do_old_field(fieldDescriptor* fd) = 0; + virtual void do_changed_field(fieldDescriptor* old_fd, fieldDescriptor *new_fd) = 0; +}; + +// (tw) Iterates over the methods of the old and new class +class MethodEvolutionClosure : public StackObj { +public: + virtual void do_new_method(methodOop oop) = 0; + virtual void do_old_method(methodOop oop) = 0; + virtual void do_changed_method(methodOop oldOop, methodOop newOop) = 0; +}; + #ifndef PRODUCT // Print fields. // If "obj" argument to constructor is NULL, prints static fields, otherwise prints non-static fields. @@ -285,6 +301,11 @@ class instanceKlass: public Klass { // _idnum_allocated_count. u1 _init_state; // state of class + // (tw) Field that allows for a short-path when calculating updated fields for the second time and + // no fields changed. Testing performance impact with this, can be removed later when the update + // information is cached. + bool _fields_not_changed; + u1 _reference_type; // reference type // embedded Java vtable follows here @@ -452,6 +473,7 @@ class instanceKlass: public Klass { // initialization (virtuals from Klass) bool should_be_initialized() const; // means that initialize should be called void initialize(TRAPS); + void initialize_redefined_class(); void link_class(TRAPS); bool link_class_or_fail(TRAPS); // returns false on failure void unlink_class(); @@ -629,6 +651,7 @@ class instanceKlass: public Klass { static void get_jmethod_id_length_value(jmethodID* cache, size_t idnum, size_t *length_p, jmethodID* id_p); jmethodID jmethod_id_or_null(methodOop method); + bool update_jmethod_id(methodOop method, jmethodID newMethodID); // cached itable index support void set_cached_itable_index(size_t idnum, int index); @@ -711,6 +734,7 @@ class instanceKlass: public Klass { // subclass/subinterface checks bool implements_interface(klassOop k) const; + bool implements_interface_any_version(klassOop k) const; // Access to the implementor of an interface. klassOop implementor() const @@ -760,6 +784,12 @@ class instanceKlass: public Klass { void do_local_static_fields(FieldClosure* cl); void do_nonstatic_fields(FieldClosure* cl); // including inherited fields void do_local_static_fields(void f(fieldDescriptor*, TRAPS), TRAPS); + void do_fields_evolution(FieldEvolutionClosure *cl); + void store_update_information(GrowableArray &values); + void clear_update_information(); + void store_type_check_information(GrowableArray< Pair > &values); + void clear_type_check_information(); + void methods_do(void f(methodOop method)); void array_klasses_do(void f(klassOop k)); diff --git a/src/share/vm/oops/instanceKlassKlass.cpp b/src/share/vm/oops/instanceKlassKlass.cpp index 8e7dc12..63d6dc4 100644 --- a/src/share/vm/oops/instanceKlassKlass.cpp +++ b/src/share/vm/oops/instanceKlassKlass.cpp @@ -480,6 +480,28 @@ void instanceKlassKlass::oop_print_on(oop obj, outputStream* st) { instanceKlass* ik = instanceKlass::cast(klassOop(obj)); klassKlass::oop_print_on(obj, st); + // (tw) Output revision number and revision numbers of older / newer and oldest / newest version of this class. + + st->print(BULLET"revision: %d", ik->revision_number()); + + if (ik->new_version() != NULL) { + st->print(" (newer=%d)", ik->new_version()->klass_part()->revision_number()); + } + + if (ik->newest_version() != ik->new_version() && ik->newest_version() != obj) { + st->print(" (newest=%d)", ik->newest_version()->klass_part()->revision_number()); + } + + if (ik->old_version() != NULL) { + st->print(" (old=%d)", ik->old_version()->klass_part()->revision_number()); + } + + if (ik->oldest_version() != ik->old_version() && ik->oldest_version() != obj) { + st->print(" (oldest=%d)", ik->oldest_version()->klass_part()->revision_number()); + } + + st->cr(); + st->print(BULLET"instance size: %d", ik->size_helper()); st->cr(); st->print(BULLET"klass size: %d", ik->object_size()); st->cr(); st->print(BULLET"access: "); ik->access_flags().print_on(st); st->cr(); @@ -663,7 +685,7 @@ void instanceKlassKlass::oop_verify_on(oop obj, outputStream* st) { } guarantee(sib->as_klassOop()->is_klass(), "should be klass"); guarantee(sib->as_klassOop()->is_perm(), "should be in permspace"); - guarantee(sib->super() == super, "siblings should have same superklass"); + guarantee(sib->super() == super || super->klass_part()->newest_version() == SystemDictionary::Object_klass(), "siblings should have same superklass"); sib = sib->next_sibling(); } diff --git a/src/share/vm/oops/instanceRefKlass.cpp b/src/share/vm/oops/instanceRefKlass.cpp index 7db4f03..1171487 100644 --- a/src/share/vm/oops/instanceRefKlass.cpp +++ b/src/share/vm/oops/instanceRefKlass.cpp @@ -455,10 +455,13 @@ void instanceRefKlass::update_nonstatic_oop_maps(klassOop k) { instanceKlass* ik = instanceKlass::cast(k); // Check that we have the right class - debug_only(static bool first_time = true); - assert(k == SystemDictionary::Reference_klass() && first_time, - "Invalid update of maps"); - debug_only(first_time = false); + + // (tw) Asserts no longer valid for class redefinition + // debug_only(static bool first_time = true); + + //assert(k == SystemDictionary::Reference_klass() && first_time, + // "Invalid update of maps"); + //debug_only(first_time = false); assert(ik->nonstatic_oop_map_count() == 1, "just checking"); OopMapBlock* map = ik->start_of_nonstatic_oop_maps(); diff --git a/src/share/vm/oops/klass.cpp b/src/share/vm/oops/klass.cpp index 596d5ad..767588c 100644 --- a/src/share/vm/oops/klass.cpp +++ b/src/share/vm/oops/klass.cpp @@ -55,6 +55,26 @@ bool Klass::is_subclass_of(klassOop k) const { return false; } +void Klass::update_supers_to_newest_version() { + + if (super() != NULL) set_super(super()->klass_part()->newest_version()); + + for (uint i=0; iklass_part()->newest_version(); + } + } + + // Scan the array-of-objects + int cnt = secondary_supers()->length(); + for (int i = 0; i < cnt; i++) { + klassOop cur = (klassOop)secondary_supers()->obj_at(i); + if (cur != NULL) { + secondary_supers()->obj_at_put(i, cur->klass_part()->newest_version()); + } + } +} bool Klass::search_secondary_supers(klassOop k) const { // Put some extra logic here out-of-line, before the search proper. // This cuts down the size of the inline method. @@ -161,6 +181,16 @@ klassOop Klass::base_create_klass_oop(KlassHandle& klass, int size, kl->set_alloc_size(0); TRACE_INIT_ID(kl); + kl->set_redefinition_flags(Klass::NoRedefinition); + kl->set_redefining(false); + kl->set_new_version(NULL); + kl->set_old_version(NULL); + kl->set_redefinition_index(-1); + kl->set_revision_number(-1); + kl->set_field_redefinition_policy(DynamicCheck); + kl->set_static_field_redefinition_policy(AccessDeletedMembers); + kl->set_method_redefinition_policy(AccessDeletedMembers); + kl->set_prototype_header(markOopDesc::prototype()); kl->set_biased_lock_revocation_count(0); kl->set_last_biased_lock_bulk_revocation_time(0); @@ -232,7 +262,7 @@ void Klass::initialize_supers(klassOop k, TRAPS) { set_super(NULL); oop_store_without_check((oop*) &_primary_supers[0], (oop) this->as_klassOop()); assert(super_depth() == 0, "Object must already be initialized properly"); - } else if (k != super() || k == SystemDictionary::Object_klass()) { + } else if (k != super() || k->klass_part()->super() == NULL) { assert(super() == NULL || super() == SystemDictionary::Object_klass(), "initialize this only once to a non-trivial value"); set_super(k); diff --git a/src/share/vm/oops/klass.hpp b/src/share/vm/oops/klass.hpp index bcbd4e7..d086b5d 100644 --- a/src/share/vm/oops/klass.hpp +++ b/src/share/vm/oops/klass.hpp @@ -170,6 +170,7 @@ class Klass_vtbl { void* operator new(size_t ignored, KlassHandle& klass, int size, TRAPS); }; +template class Pair; class Klass : public Klass_vtbl { friend class VMStructs; @@ -222,6 +223,39 @@ class Klass : public Klass_vtbl { oop* oop_block_beg() const { return adr_secondary_super_cache(); } oop* oop_block_end() const { return adr_next_sibling() + 1; } + // (tw) Different class redefinition flags of code evolution. + enum RedefinitionFlags { + + // This class is not redefined at all! + NoRedefinition, + + // There are changes to the class meta data. + ModifyClass = 1, + + // The size of the class meta data changes. + ModifyClassSize = ModifyClass << 1, + + // There are change to the instance format. + ModifyInstances = ModifyClassSize << 1, + + // The size of instances changes. + ModifyInstanceSize = ModifyInstances << 1, + + // A super type of this class is removed. + RemoveSuperType = ModifyInstanceSize << 1, + + // This class (or one of its super classes) has an instance transformer method. + HasInstanceTransformer = RemoveSuperType << 1, + }; + + // (tw) Different policies dealing with deleted fields / methods in old code. + enum RedefinitionPolicy { + StaticCheck, + DynamicCheck, + AccessDeletedMembers, + AccessOldMembers + }; + protected: // // The oop block. All oop fields must be declared here and only oop fields @@ -241,6 +275,10 @@ class Klass : public Klass_vtbl { oop _java_mirror; // Superclass klassOop _super; + // Old class + klassOop _old_version; + // New class + klassOop _new_version; // First subclass (NULL if none); _subklass->next_sibling() is next one klassOop _subklass; // Sibling link (or NULL); links all subklasses of a klass @@ -253,6 +291,19 @@ class Klass : public Klass_vtbl { jint _modifier_flags; // Processed access flags, for use by Class.getModifiers. AccessFlags _access_flags; // Access flags. The class/interface distinction is stored here. + // (tw) Non-oop fields for enhanced class redefinition + jint _revision_number; // The revision number for redefined classes + jint _redefinition_index; // Index of this class when performing the redefinition + bool _subtype_changed; + int _redefinition_flags; // Level of class redefinition + bool _is_copying_backwards; // Does the class need to copy fields backwards? => possibly overwrite itself? + int * _update_information; // Update information + Pair * _type_check_information; // Offsets of object fields that need a type check + char _method_redefinition_policy; + char _field_redefinition_policy; + char _static_field_redefinition_policy; + bool _is_redefining; + #ifndef PRODUCT int _verify_count; // to avoid redundant verifies #endif @@ -301,6 +352,99 @@ class Klass : public Klass_vtbl { klassOop secondary_super_cache() const { return _secondary_super_cache; } void set_secondary_super_cache(klassOop k) { oop_store_without_check((oop*) &_secondary_super_cache, (oop) k); } + // BEGIN class redefinition utilities + + // double links between new and old version of a class + klassOop old_version() const { return _old_version; } + void set_old_version(klassOop klass) { assert(_old_version == NULL || klass == NULL, "Can only be set once!"); _old_version = klass; } + klassOop new_version() const { return _new_version; } + void set_new_version(klassOop klass) { assert(_new_version == NULL || klass == NULL, "Can only be set once!"); _new_version = klass; } + + // A subtype of this class is no longer a subtype + bool has_subtype_changed() const { return _subtype_changed; } + void set_subtype_changed(bool b) { assert(is_newest_version() || new_version()->klass_part()->is_newest_version(), "must be newest or second newest version"); + _subtype_changed = b; } + // state of being redefined + int redefinition_index() const { return _redefinition_index; } + void set_redefinition_index(int index) { _redefinition_index = index; } + void set_redefining(bool b) { _is_redefining = b; } + bool is_redefining() const { return _is_redefining; } + int redefinition_flags() const { return _redefinition_flags; } + bool check_redefinition_flag(int flags) const { return (_redefinition_flags & flags) != 0; } + void set_redefinition_flags(int flags) { _redefinition_flags = flags; } + bool is_copying_backwards() const { return _is_copying_backwards; } + void set_copying_backwards(bool b) { _is_copying_backwards = b; } + + // update information + int *update_information() const { return _update_information; } + void set_update_information(int *info) { _update_information = info; } + Pair *type_check_information() const { return _type_check_information; } + void set_type_check_information(Pair *info) { _type_check_information = info; } + + bool is_same_or_older_version(klassOop klass) const { + if (Klass::cast(klass) == this) { return true; } + else if (_old_version == NULL) { return false; } + else { return _old_version->klass_part()->is_same_or_older_version(klass); } + } + + // Revision number for redefined classes, -1 for originally loaded classes + jint revision_number() const { + return _revision_number; + } + + bool was_redefined() const { + return _revision_number != -1; + } + + void set_revision_number(jint number) { + _revision_number = number; + } + + char method_redefinition_policy() { + return _method_redefinition_policy; + } + + void set_method_redefinition_policy(char v) { + _method_redefinition_policy = v; + } + + char field_redefinition_policy() { + return _field_redefinition_policy; + } + + void set_field_redefinition_policy(char v) { + _field_redefinition_policy = v; + } + + char static_field_redefinition_policy() { + return _static_field_redefinition_policy; + } + + void set_static_field_redefinition_policy(char v) { + _static_field_redefinition_policy = v; + } + + klassOop oldest_version() const { + if (_old_version == NULL) { return this->as_klassOop(); } + else { return _old_version->klass_part()->oldest_version(); }; + } + + klassOop newest_version() const { + if (_new_version == NULL) { return this->as_klassOop(); } + else { return _new_version->klass_part()->newest_version(); }; + } + + klassOop active_version() const { + if (_new_version == NULL || _new_version->klass_part()->is_redefining()) { return this->as_klassOop(); assert(!this->is_redefining(), "just checking"); } + else { return _new_version->klass_part()->active_version(); }; + } + + bool is_newest_version() const { + return _new_version == NULL; + } + + // END class redefinition utilities + objArrayOop secondary_supers() const { return _secondary_supers; } void set_secondary_supers(objArrayOop k) { oop_store_without_check((oop*) &_secondary_supers, (oop) k); } @@ -361,6 +505,8 @@ class Klass : public Klass_vtbl { void set_next_sibling(klassOop s); oop* adr_super() const { return (oop*)&_super; } + oop* adr_old_version() const { return (oop*)&_old_version; } + oop* adr_new_version() const { return (oop*)&_new_version; } oop* adr_primary_supers() const { return (oop*)&_primary_supers[0]; } oop* adr_secondary_super_cache() const { return (oop*)&_secondary_super_cache; } oop* adr_secondary_supers()const { return (oop*)&_secondary_supers; } @@ -490,6 +636,7 @@ class Klass : public Klass_vtbl { return search_secondary_supers(k); } } + void update_supers_to_newest_version(); bool search_secondary_supers(klassOop k) const; // Find LCA in class hierarchy @@ -816,6 +963,8 @@ class Klass : public Klass_vtbl { inline oop klassOopDesc::java_mirror() const { return klass_part()->java_mirror(); } +inline klassOop klassOopDesc::old_version() const { return klass_part()->old_version(); } +inline klassOop klassOopDesc::new_version() const { return klass_part()->new_version(); } #endif // SHARE_VM_OOPS_KLASS_HPP diff --git a/src/share/vm/oops/klassKlass.cpp b/src/share/vm/oops/klassKlass.cpp index 06809d5..9c08f32 100644 --- a/src/share/vm/oops/klassKlass.cpp +++ b/src/share/vm/oops/klassKlass.cpp @@ -68,6 +68,8 @@ void klassKlass::oop_follow_contents(oop obj) { Klass* k = Klass::cast(klassOop(obj)); // If we are alive it is valid to keep our superclass and subtype caches alive MarkSweep::mark_and_push(k->adr_super()); + MarkSweep::mark_and_push(k->adr_old_version()); + MarkSweep::mark_and_push(k->adr_new_version()); for (juint i = 0; i < Klass::primary_super_limit(); i++) MarkSweep::mark_and_push(k->adr_primary_supers()+i); MarkSweep::mark_and_push(k->adr_secondary_super_cache()); @@ -87,6 +89,8 @@ void klassKlass::oop_follow_contents(ParCompactionManager* cm, Klass* k = Klass::cast(klassOop(obj)); // If we are alive it is valid to keep our superclass and subtype caches alive PSParallelCompact::mark_and_push(cm, k->adr_super()); + PSParallelCompact::mark_and_push(cm, k->adr_old_version()); + PSParallelCompact::mark_and_push(cm, k->adr_new_version()); for (juint i = 0; i < Klass::primary_super_limit(); i++) PSParallelCompact::mark_and_push(cm, k->adr_primary_supers()+i); PSParallelCompact::mark_and_push(cm, k->adr_secondary_super_cache()); @@ -106,6 +110,8 @@ int klassKlass::oop_oop_iterate(oop obj, OopClosure* blk) { int size = oop_size(obj); Klass* k = Klass::cast(klassOop(obj)); blk->do_oop(k->adr_super()); + blk->do_oop(k->adr_old_version()); + blk->do_oop(k->adr_new_version()); for (juint i = 0; i < Klass::primary_super_limit(); i++) blk->do_oop(k->adr_primary_supers()+i); blk->do_oop(k->adr_secondary_super_cache()); @@ -134,6 +140,10 @@ int klassKlass::oop_oop_iterate_m(oop obj, OopClosure* blk, MemRegion mr) { oop* adr; adr = k->adr_super(); if (mr.contains(adr)) blk->do_oop(adr); + adr = k->adr_old_version(); + if (mr.contains(adr)) blk->do_oop(adr); + adr = k->adr_new_version(); + if (mr.contains(adr)) blk->do_oop(adr); for (juint i = 0; i < Klass::primary_super_limit(); i++) { adr = k->adr_primary_supers()+i; if (mr.contains(adr)) blk->do_oop(adr); @@ -147,6 +157,8 @@ int klassKlass::oop_oop_iterate_m(oop obj, OopClosure* blk, MemRegion mr) { // The following are "weak links" in the perm gen and are // treated specially in a later phase of a perm gen collection. assert(oop(k)->is_perm(), "should be in perm"); + assert(oop(k->adr_old_version())->is_perm(), "should be in perm"); + assert(oop(k->adr_new_version())->is_perm(), "should be in perm"); assert(oop(k->adr_subklass())->is_perm(), "should be in perm"); assert(oop(k->adr_next_sibling())->is_perm(), "should be in perm"); if (blk->should_remember_klasses() @@ -167,6 +179,8 @@ int klassKlass::oop_adjust_pointers(oop obj) { Klass* k = Klass::cast(klassOop(obj)); MarkSweep::adjust_pointer(k->adr_super()); + MarkSweep::adjust_pointer(k->adr_new_version()); + MarkSweep::adjust_pointer(k->adr_old_version()); for (juint i = 0; i < Klass::primary_super_limit(); i++) MarkSweep::adjust_pointer(k->adr_primary_supers()+i); MarkSweep::adjust_pointer(k->adr_secondary_super_cache()); diff --git a/src/share/vm/oops/klassOop.hpp b/src/share/vm/oops/klassOop.hpp index f212fc5..9731a9c 100644 --- a/src/share/vm/oops/klassOop.hpp +++ b/src/share/vm/oops/klassOop.hpp @@ -41,8 +41,10 @@ class klassOopDesc : public oopDesc { // returns the Klass part containing dispatching behavior Klass* klass_part() const { return (Klass*)((address)this + sizeof(klassOopDesc)); } - // Convenience wrapper + // Convenience wrappers inline oop java_mirror() const; + inline klassOop old_version() const; + inline klassOop new_version() const; private: // These have no implementation since klassOop should never be accessed in this fashion diff --git a/src/share/vm/oops/klassVtable.cpp b/src/share/vm/oops/klassVtable.cpp index 94e2e04..09d3088 100644 --- a/src/share/vm/oops/klassVtable.cpp +++ b/src/share/vm/oops/klassVtable.cpp @@ -97,7 +97,8 @@ void klassVtable::compute_vtable_size_and_num_mirandas(int &vtable_length, vtable_length = Universe::base_vtable_size(); } - if (super == NULL && !Universe::is_bootstrapping() && + // (tw) TODO: Check if we can relax the condition on a fixed base vtable size + /*if (super == NULL && !Universe::is_bootstrapping() && vtable_length != Universe::base_vtable_size()) { // Someone is attempting to redefine java.lang.Object incorrectly. The // only way this should happen is from @@ -107,9 +108,9 @@ void klassVtable::compute_vtable_size_and_num_mirandas(int &vtable_length, vtable_length = Universe::base_vtable_size(); } assert(super != NULL || vtable_length == Universe::base_vtable_size(), - "bad vtable size for class Object"); + "bad vtable size for class Object");*/ assert(vtable_length % vtableEntry::size() == 0, "bad vtable length"); - assert(vtable_length >= Universe::base_vtable_size(), "vtable too small"); + //assert(vtable_length >= Universe::base_vtable_size(), "vtable too small"); } int klassVtable::index_of(methodOop m, int len) const { @@ -657,20 +658,6 @@ bool klassVtable::check_no_old_or_obsolete_entries() { return true; } -void klassVtable::dump_vtable() { - tty->print_cr("vtable dump --"); - for (int i = 0; i < length(); i++) { - methodOop m = unchecked_method_at(i); - if (m != NULL) { - tty->print(" (%5d) ", i); - m->access_flags().print_on(tty); - tty->print(" -- "); - m->print_name(tty); - tty->cr(); - } - } -} - // CDS/RedefineClasses support - clear vtables so they can be reinitialized void klassVtable::clear_vtable() { for (int i = 0; i < _length; i++) table()[i].clear(); @@ -1241,6 +1228,7 @@ void klassVtable::verify(outputStream* st, bool forced) { void klassVtable::verify_against(outputStream* st, klassVtable* vt, int index) { vtableEntry* vte = &vt->table()[index]; + if (vte->method() == NULL || table()[index].method() == NULL) return; if (vte->method()->name() != table()[index].method()->name() || vte->method()->signature() != table()[index].method()->signature()) { fatal("mismatched name/signature of vtable entries"); @@ -1260,6 +1248,8 @@ void klassVtable::print() { void vtableEntry::verify(klassVtable* vt, outputStream* st) { NOT_PRODUCT(FlagSetting fs(IgnoreLockingAssertions, true)); + // (tw) TODO: Check: Does not hold? + if (method() != NULL) { assert(method() != NULL, "must have set method"); method()->verify(); // we sub_type, because it could be a miranda method @@ -1267,7 +1257,13 @@ void vtableEntry::verify(klassVtable* vt, outputStream* st) { #ifndef PRODUCT print(); #endif - fatal(err_msg("vtableEntry " PTR_FORMAT ": method is from subclass", this)); + klassOop first_klass = vt->klass()(); + klassOop second_klass = method()->method_holder(); + // (tw) the following fatal does not work for old versions of classes + if (first_klass->klass_part()->is_newest_version()) { + //fatal1("vtableEntry %#lx: method is from subclass", this); + } + } } } @@ -1275,7 +1271,7 @@ void vtableEntry::verify(klassVtable* vt, outputStream* st) { void vtableEntry::print() { ResourceMark rm; - tty->print("vtableEntry %s: ", method()->name()->as_C_string()); + tty->print("vtableEntry %s: ", (method() == NULL) ? "null" : method()->name()->as_C_string()); if (Verbose) { tty->print("m %#lx ", (address)method()); } @@ -1342,6 +1338,33 @@ void klassVtable::print_statistics() { tty->print_cr("%6d bytes total", total); } +bool klassVtable::check_no_old_entries() { + // Check that there really is no entry + for (int i = 0; i < length(); i++) { + methodOop m = unchecked_method_at(i); + if (m != NULL) { + if (m->is_old() || !m->method_holder()->klass_part()->is_newest_version()) { + return false; + } + } + } + return true; +} + +void klassVtable::dump_vtable() { + tty->print_cr("vtable dump --"); + for (int i = 0; i < length(); i++) { + methodOop m = unchecked_method_at(i); + if (m != NULL) { + tty->print(" (%5d) ", i); + m->access_flags().print_on(tty); + tty->print(" -- "); + m->print_name(tty); + tty->cr(); + } + } +} + int klassItable::_total_classes; // Total no. of classes with itables long klassItable::_total_size; // Total no. of bytes used for itables diff --git a/src/share/vm/oops/klassVtable.hpp b/src/share/vm/oops/klassVtable.hpp index 405b0c7..0c8d2f7 100644 --- a/src/share/vm/oops/klassVtable.hpp +++ b/src/share/vm/oops/klassVtable.hpp @@ -100,6 +100,7 @@ class klassVtable : public ResourceObj { int methods_length, bool * trace_name_printed); bool check_no_old_or_obsolete_entries(); void dump_vtable(); + bool check_no_old_entries(); // Garbage collection void oop_follow_contents(); diff --git a/src/share/vm/oops/methodKlass.cpp b/src/share/vm/oops/methodKlass.cpp index 75d0b09..f1b7d2f 100644 --- a/src/share/vm/oops/methodKlass.cpp +++ b/src/share/vm/oops/methodKlass.cpp @@ -93,6 +93,10 @@ methodOop methodKlass::allocate(constMethodHandle xconst, m->set_adapter_entry(NULL); m->clear_code(); // from_c/from_i get set to c2i/i2i + m->set_forward_method(NULL); + m->set_new_version(NULL); + m->set_old_version(NULL); + if (access_flags.is_native()) { m->clear_native_function(); m->set_signature_handler(NULL); @@ -122,6 +126,9 @@ void methodKlass::oop_follow_contents(oop obj) { // Performance tweak: We skip iterating over the klass pointer since we // know that Universe::methodKlassObj never moves. MarkSweep::mark_and_push(m->adr_constMethod()); + MarkSweep::mark_and_push(m->adr_forward_method()); + MarkSweep::mark_and_push(m->adr_new_version()); + MarkSweep::mark_and_push(m->adr_old_version()); if (m->method_data() != NULL) { MarkSweep::mark_and_push(m->adr_method_data()); } @@ -135,6 +142,9 @@ void methodKlass::oop_follow_contents(ParCompactionManager* cm, // Performance tweak: We skip iterating over the klass pointer since we // know that Universe::methodKlassObj never moves. PSParallelCompact::mark_and_push(cm, m->adr_constMethod()); + PSParallelCompact::mark_and_push(cm, m->adr_forward_method()); + PSParallelCompact::mark_and_push(cm, m->adr_new_version()); + PSParallelCompact::mark_and_push(cm, m->adr_old_version()); #ifdef COMPILER2 if (m->method_data() != NULL) { PSParallelCompact::mark_and_push(cm, m->adr_method_data()); @@ -152,6 +162,9 @@ int methodKlass::oop_oop_iterate(oop obj, OopClosure* blk) { // Performance tweak: We skip iterating over the klass pointer since we // know that Universe::methodKlassObj never moves blk->do_oop(m->adr_constMethod()); + blk->do_oop(m->adr_forward_method()); + blk->do_oop(m->adr_new_version()); + blk->do_oop(m->adr_old_version()); if (m->method_data() != NULL) { blk->do_oop(m->adr_method_data()); } @@ -170,6 +183,12 @@ int methodKlass::oop_oop_iterate_m(oop obj, OopClosure* blk, MemRegion mr) { oop* adr; adr = m->adr_constMethod(); if (mr.contains(adr)) blk->do_oop(adr); + adr = m->adr_new_version(); + if (mr.contains(adr)) blk->do_oop(adr); + adr = m->adr_forward_method(); + if (mr.contains(adr)) blk->do_oop(adr); + adr = m->adr_old_version(); + if (mr.contains(adr)) blk->do_oop(adr); if (m->method_data() != NULL) { adr = m->adr_method_data(); if (mr.contains(adr)) blk->do_oop(adr); @@ -187,6 +206,9 @@ int methodKlass::oop_adjust_pointers(oop obj) { // Performance tweak: We skip iterating over the klass pointer since we // know that Universe::methodKlassObj never moves. MarkSweep::adjust_pointer(m->adr_constMethod()); + MarkSweep::adjust_pointer(m->adr_forward_method()); + MarkSweep::adjust_pointer(m->adr_new_version()); + MarkSweep::adjust_pointer(m->adr_old_version()); if (m->method_data() != NULL) { MarkSweep::adjust_pointer(m->adr_method_data()); } @@ -202,6 +224,9 @@ int methodKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) { assert(obj->is_method(), "should be method"); methodOop m = methodOop(obj); PSParallelCompact::adjust_pointer(m->adr_constMethod()); + PSParallelCompact::adjust_pointer(m->adr_forward_method()); + PSParallelCompact::adjust_pointer(m->adr_new_version()); + PSParallelCompact::adjust_pointer(m->adr_old_version()); #ifdef COMPILER2 if (m->method_data() != NULL) { PSParallelCompact::adjust_pointer(m->adr_method_data()); @@ -222,7 +247,18 @@ void methodKlass::oop_print_on(oop obj, outputStream* st) { methodOop m = methodOop(obj); // get the effect of PrintOopAddress, always, for methods: st->print_cr(" - this oop: "INTPTR_FORMAT, (intptr_t)m); - st->print (" - method holder: "); m->method_holder()->print_value_on(st); st->cr(); + st->print (" - method holder: "); m->method_holder()->print_value_on(st); + + if (m->method_holder()->klass_part()->new_version() != NULL) { + st->print(" (old)"); + } + st->cr(); + + st->print_cr(" - is obsolete: %d", (int)(m->is_obsolete())); + st->print_cr(" - is old: %d", (int)(m->is_old())); + st->print_cr(" - new version: "INTPTR_FORMAT" ", (address)(m->new_version())); + st->print_cr(" - old version: "INTPTR_FORMAT" ", (address)(m->old_version())); + st->print_cr(" - holder revision: %d", m->method_holder()->klass_part()->revision_number()); st->print (" - constants: "INTPTR_FORMAT" ", (address)m->constants()); m->constants()->print_value_on(st); st->cr(); st->print (" - access: 0x%x ", m->access_flags().as_int()); m->access_flags().print_on(st); st->cr(); diff --git a/src/share/vm/oops/methodOop.cpp b/src/share/vm/oops/methodOop.cpp index 4f59d3a..5cdf147 100644 --- a/src/share/vm/oops/methodOop.cpp +++ b/src/share/vm/oops/methodOop.cpp @@ -328,6 +328,70 @@ void methodOopDesc::cleanup_inline_caches() { } +bool methodOopDesc::is_in_code_section(int bci) { + // There is no table => every bci is in the code section table. + if (!constMethod()->has_code_section_table()) return true; + + constMethodOop m = constMethod(); + for (int i = 0; i < m->code_section_entries(); ++i) { + u2 new_index = m->code_section_new_index_at(i); + u2 length = m->code_section_length_at(i); + if (bci >= new_index && bci < new_index + length) { + // We are in a specified code section. + return true; + } + } + + return false; +} + +int methodOopDesc::calculate_forward_bci(int bci, methodOop new_method) { + int original_bci = -1; + if (constMethod()->has_code_section_table()) { + assert(is_in_code_section(bci), "can only forward in section"); + // First calculate back to original bci. + constMethodOop m = constMethod(); + for (int i = 0; i < m->code_section_entries(); ++i) { + u2 new_index = m->code_section_new_index_at(i); + u2 original_index = m->code_section_original_index_at(i); + u2 length = m->code_section_length_at(i); + if (bci >= new_index && bci < new_index + length) { + // We are in a specified code section. + original_bci = bci - new_index + original_index; + break; + } + } + assert (original_bci != -1, "must have been in code section"); + } else { + // No code sections specified => we are in an original method. + original_bci = bci; + } + + // We know the original bci => match to new method. + int new_bci = -1; + if (new_method->constMethod()->has_code_section_table()) { + // Map to new bci. + constMethodOop m = new_method->constMethod(); + for (int i = 0; i < m->code_section_entries(); ++i) { + u2 new_index = m->code_section_new_index_at(i); + u2 original_index = m->code_section_original_index_at(i); + u2 length = m->code_section_length_at(i); + if (original_bci >= original_index && original_bci < original_index + length) { + new_bci = original_bci - original_index + new_index; + break; + } + } + assert (new_bci != -1, "must have found new code section"); + + } else { + // We are in an original method. + new_bci = original_bci; + } + + return new_bci; +} + + int methodOopDesc::extra_stack_words() { // not an inline function, to avoid a header dependency on Interpreter return extra_stack_entries() * Interpreter::stackElementSize; @@ -1061,6 +1125,9 @@ methodHandle methodOopDesc::clone_with_new_data(methodHandle m, u_char* new_code // Reset correct method/const method, method size, and parameter info newm->set_constMethod(newcm); + newm->set_forward_method(newm->forward_method()); + newm->set_new_version(newm->new_version()); + newm->set_old_version(newm->old_version()); newm->constMethod()->set_code_size(new_code_length); newm->constMethod()->set_constMethod_size(new_const_method_size); newm->set_method_size(new_method_size); diff --git a/src/share/vm/oops/methodOop.hpp b/src/share/vm/oops/methodOop.hpp index 486e106..11e52bb 100644 --- a/src/share/vm/oops/methodOop.hpp +++ b/src/share/vm/oops/methodOop.hpp @@ -114,6 +114,11 @@ class methodOopDesc : public oopDesc { AccessFlags _access_flags; // Access flags int _vtable_index; // vtable index of this method (see VtableIndexFlag) // note: can have vtables with >2**16 elements (because of inheritance) + // (tw) Newer version of method available? + methodOop _forward_method; + methodOop _new_version; + methodOop _old_version; + #ifdef CC_INTERP int _result_index; // C++ interpreter needs for converting results to/from stack #endif @@ -175,6 +180,32 @@ class methodOopDesc : public oopDesc { int name_index() const { return constMethod()->name_index(); } void set_name_index(int index) { constMethod()->set_name_index(index); } + methodOop forward_method() const {return _forward_method; } + void set_forward_method(methodOop m) { _forward_method = m; } + bool has_forward_method() const { return forward_method() != NULL; } + methodOop new_version() const {return _new_version; } + void set_new_version(methodOop m) { _new_version = m; } + methodOop newest_version() { if(_new_version == NULL) return this; else return new_version()->newest_version(); } + + methodOop old_version() const {return _old_version; }; + void set_old_version(methodOop m) { + if (m == NULL) { + _old_version = NULL; + return; + } + + assert(_old_version == NULL, "may only be set once"); + assert(this->code_size() == m->code_size(), "must have same code length"); + _old_version = m; + } + + methodOop oldest_version() const { + if(_old_version == NULL) return (methodOop)this; + else { + return old_version()->oldest_version(); + } + } + // signature Symbol* signature() const { return constants()->symbol_at(signature_index()); } int signature_index() const { return constMethod()->signature_index(); } @@ -670,6 +701,10 @@ class methodOopDesc : public oopDesc { // Inline cache support void cleanup_inline_caches(); + // (tw) Method forwarding support. + bool is_in_code_section(int bci); + int calculate_forward_bci(int bci, methodOop new_method); + // Find if klass for method is loaded bool is_klass_loaded_by_klass_index(int klass_index) const; bool is_klass_loaded(int refinfo_index, bool must_be_resolved = false) const; @@ -734,6 +769,9 @@ class methodOopDesc : public oopDesc { // Garbage collection support oop* adr_constMethod() const { return (oop*)&_constMethod; } + oop* adr_forward_method() const { return (oop*)&_forward_method; } + oop* adr_new_version() const { return (oop*)&_new_version; } + oop* adr_old_version() const { return (oop*)&_old_version; } oop* adr_method_data() const { return (oop*)&_method_data; } }; diff --git a/src/share/vm/oops/oop.hpp b/src/share/vm/oops/oop.hpp index 5982c88..4873fca 100644 --- a/src/share/vm/oops/oop.hpp +++ b/src/share/vm/oops/oop.hpp @@ -95,6 +95,7 @@ class oopDesc { narrowOop* compressed_klass_addr(); void set_klass(klassOop k); + void set_klass_no_check(klassOop k); // For klass field compression int klass_gap() const; @@ -135,6 +136,7 @@ class oopDesc { bool is_array() const; bool is_objArray() const; bool is_klass() const; + bool is_instanceKlass() const; bool is_thread() const; bool is_method() const; bool is_constMethod() const; diff --git a/src/share/vm/oops/oop.inline.hpp b/src/share/vm/oops/oop.inline.hpp index f4eb2f7..0acb346 100644 --- a/src/share/vm/oops/oop.inline.hpp +++ b/src/share/vm/oops/oop.inline.hpp @@ -123,6 +123,14 @@ inline void oopDesc::set_klass(klassOop k) { } } +inline void oopDesc::set_klass_no_check(klassOop k) { + if (UseCompressedOops) { + oop_store_without_check(compressed_klass_addr(), (oop)k); + } else { + oop_store_without_check(klass_addr(), (oop) k); + } +} + inline int oopDesc::klass_gap() const { return *(int*)(((intptr_t)this) + klass_gap_offset_in_bytes()); } @@ -156,6 +164,7 @@ inline bool oopDesc::is_objArray() const { return blueprint()->oop_is_ inline bool oopDesc::is_typeArray() const { return blueprint()->oop_is_typeArray(); } inline bool oopDesc::is_javaArray() const { return blueprint()->oop_is_javaArray(); } inline bool oopDesc::is_klass() const { return blueprint()->oop_is_klass(); } +inline bool oopDesc::is_instanceKlass() const { return blueprint()->oop_is_instanceKlass(); } inline bool oopDesc::is_thread() const { return blueprint()->oop_is_thread(); } inline bool oopDesc::is_method() const { return blueprint()->oop_is_method(); } inline bool oopDesc::is_constMethod() const { return blueprint()->oop_is_constMethod(); } diff --git a/src/share/vm/prims/jni.cpp b/src/share/vm/prims/jni.cpp index 2123991..6cbd78c 100644 --- a/src/share/vm/prims/jni.cpp +++ b/src/share/vm/prims/jni.cpp @@ -406,7 +406,7 @@ JNI_ENTRY(jclass, jni_DefineClass(JNIEnv *env, const char *name, jobject loaderR } } klassOop k = SystemDictionary::resolve_from_stream(class_name, class_loader, - Handle(), &st, true, + Handle(), &st, true, KlassHandle(), CHECK_NULL); if (TraceClassResolution && k != NULL) { diff --git a/src/share/vm/prims/jvm.cpp b/src/share/vm/prims/jvm.cpp index 7dcd968..d59052f 100644 --- a/src/share/vm/prims/jvm.cpp +++ b/src/share/vm/prims/jvm.cpp @@ -872,7 +872,7 @@ static jclass jvm_define_class_common(JNIEnv *env, const char *name, Handle protection_domain (THREAD, JNIHandles::resolve(pd)); klassOop k = SystemDictionary::resolve_from_stream(class_name, class_loader, protection_domain, &st, - verify != 0, + verify != 0, KlassHandle(), CHECK_NULL); if (TraceClassResolution && k != NULL) { diff --git a/src/share/vm/prims/jvmtiEnv.cpp b/src/share/vm/prims/jvmtiEnv.cpp index 4ac6b82..30b8e84 100644 --- a/src/share/vm/prims/jvmtiEnv.cpp +++ b/src/share/vm/prims/jvmtiEnv.cpp @@ -290,7 +290,10 @@ JvmtiEnv::RetransformClasses(jint class_count, const jclass* classes) { class_definitions[index].klass = jcls; } VM_RedefineClasses op(class_count, class_definitions, jvmti_class_load_kind_retransform); - VMThread::execute(&op); + { + MutexLocker sd_mutex(RedefineClasses_lock); + VMThread::execute(&op); + } return (op.check_error()); } /* end RetransformClasses */ @@ -299,9 +302,12 @@ JvmtiEnv::RetransformClasses(jint class_count, const jclass* classes) { // class_definitions - pre-checked for NULL jvmtiError JvmtiEnv::RedefineClasses(jint class_count, const jvmtiClassDefinition* class_definitions) { -//TODO: add locking + VM_RedefineClasses op(class_count, class_definitions, jvmti_class_load_kind_redefine); - VMThread::execute(&op); + { + MutexLocker sd_mutex(RedefineClasses_lock); + VMThread::execute(&op); + } return (op.check_error()); } /* end RedefineClasses */ diff --git a/src/share/vm/prims/jvmtiExport.cpp b/src/share/vm/prims/jvmtiExport.cpp index ec8ede3..2bd5983 100644 --- a/src/share/vm/prims/jvmtiExport.cpp +++ b/src/share/vm/prims/jvmtiExport.cpp @@ -2296,7 +2296,7 @@ JvmtiDynamicCodeEventCollector::JvmtiDynamicCodeEventCollector() : _code_blobs(N // iterate over any code blob descriptors collected and post a // DYNAMIC_CODE_GENERATED event to the profiler. JvmtiDynamicCodeEventCollector::~JvmtiDynamicCodeEventCollector() { - assert(!JavaThread::current()->owns_locks(), "all locks must be released to post deferred events"); + assert(!JavaThread::current()->owns_locks_but_redefine_classes_lock(), "all locks must be released to post deferred events"); // iterate over any code blob descriptors that we collected if (_code_blobs != NULL) { for (int i=0; i<_code_blobs->length(); i++) { diff --git a/src/share/vm/prims/jvmtiImpl.cpp b/src/share/vm/prims/jvmtiImpl.cpp index d3fa140..f4f8b57 100644 --- a/src/share/vm/prims/jvmtiImpl.cpp +++ b/src/share/vm/prims/jvmtiImpl.cpp @@ -286,6 +286,8 @@ address JvmtiBreakpoint::getBcp() { void JvmtiBreakpoint::each_method_version_do(method_action meth_act) { ((methodOopDesc*)_method->*meth_act)(_bci); + // DCEVM: TODO: Check how we can implement this differently here! + // add/remove breakpoint to/from versions of the method that // are EMCP. Directly or transitively obsolete methods are // not saved in the PreviousVersionInfo. diff --git a/src/share/vm/prims/jvmtiRedefineClasses.cpp b/src/share/vm/prims/jvmtiRedefineClasses.cpp index eb52388..640e7da 100644 --- a/src/share/vm/prims/jvmtiRedefineClasses.cpp +++ b/src/share/vm/prims/jvmtiRedefineClasses.cpp @@ -1,5 +1,5 @@ /* - * Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it @@ -30,498 +30,637 @@ #include "interpreter/rewriter.hpp" #include "memory/gcLocker.hpp" #include "memory/universe.inline.hpp" -#include "oops/fieldStreams.hpp" +#include "memory/cardTableRS.hpp" #include "oops/klassVtable.hpp" +#include "oops/fieldStreams.hpp" #include "prims/jvmtiImpl.hpp" #include "prims/jvmtiRedefineClasses.hpp" +#include "prims/jvmtiClassFileReconstituter.hpp" #include "prims/methodComparator.hpp" #include "runtime/deoptimization.hpp" #include "runtime/relocator.hpp" #include "utilities/bitMap.inline.hpp" +#include "compiler/compileBroker.hpp" objArrayOop VM_RedefineClasses::_old_methods = NULL; objArrayOop VM_RedefineClasses::_new_methods = NULL; -methodOop* VM_RedefineClasses::_matching_old_methods = NULL; -methodOop* VM_RedefineClasses::_matching_new_methods = NULL; -methodOop* VM_RedefineClasses::_deleted_methods = NULL; -methodOop* VM_RedefineClasses::_added_methods = NULL; +int* VM_RedefineClasses::_matching_old_methods = NULL; +int* VM_RedefineClasses::_matching_new_methods = NULL; +int* VM_RedefineClasses::_deleted_methods = NULL; +int* VM_RedefineClasses::_added_methods = NULL; int VM_RedefineClasses::_matching_methods_length = 0; int VM_RedefineClasses::_deleted_methods_length = 0; int VM_RedefineClasses::_added_methods_length = 0; klassOop VM_RedefineClasses::_the_class_oop = NULL; +// Holds the revision number of the current class redefinition +int VM_RedefineClasses::_revision_number = -1; -VM_RedefineClasses::VM_RedefineClasses(jint class_count, - const jvmtiClassDefinition *class_defs, - JvmtiClassLoadKind class_load_kind) { +VM_RedefineClasses::VM_RedefineClasses(jint class_count, const jvmtiClassDefinition *class_defs, JvmtiClassLoadKind class_load_kind) + : VM_GC_Operation(Universe::heap()->total_full_collections(), GCCause::_jvmti_force_gc) { + RC_TIMER_START(_timer_total); _class_count = class_count; _class_defs = class_defs; _class_load_kind = class_load_kind; - _res = JVMTI_ERROR_NONE; + _updated_oops = NULL; + _result = JVMTI_ERROR_NONE; } -bool VM_RedefineClasses::doit_prologue() { - if (_class_count == 0) { - _res = JVMTI_ERROR_NONE; - return false; +VM_RedefineClasses::~VM_RedefineClasses() { + { + MonitorLockerEx ml(RedefinitionSync_lock); + Threads::set_wait_at_instrumentation_entry(false); + ml.notify_all(); + } + + unlock_threads(); + RC_TIMER_STOP(_timer_total); + + if (TimeRedefineClasses) { + tty->print_cr("Timing Prologue: %d", _timer_prologue.milliseconds()); + tty->print_cr("Timing Class Loading: %d", _timer_class_loading.milliseconds()); + tty->print_cr("Timing Waiting for Lock: %d", _timer_wait_for_locks.milliseconds()); + tty->print_cr("Timing Class Linking: %d", _timer_class_linking.milliseconds()); + tty->print_cr("Timing Check Type: %d", _timer_check_type.milliseconds()); + tty->print_cr("Timing Prepare Redefinition: %d", _timer_prepare_redefinition.milliseconds()); + tty->print_cr("Timing Redefinition GC: %d", _timer_redefinition.milliseconds()); + tty->print_cr("Timing Epilogue: %d", _timer_vm_op_epilogue.milliseconds()); + tty->print_cr("------------------------------------------------------------------"); + tty->print_cr("Total Time: %d", _timer_total.milliseconds()); } - if (_class_defs == NULL) { - _res = JVMTI_ERROR_NULL_POINTER; - return false; +} + +// Searches for all affected classes and performs a sorting such that a supertype is always before a subtype. +jvmtiError VM_RedefineClasses::find_sorted_affected_classes(GrowableArray *all_affected_klasses) { + + // Create array with all classes for which the redefine command was given + GrowableArray klasses_to_redefine; + for (int i=0; i<_class_count; i++) { + oop mirror = JNIHandles::resolve_non_null(_class_defs[i].klass); + instanceKlassHandle klass_handle(Thread::current(), java_lang_Class::as_klassOop(mirror)); + klasses_to_redefine.append(klass_handle); + assert(klass_handle->new_version() == NULL, "Must be new class"); } - for (int i = 0; i < _class_count; i++) { - if (_class_defs[i].klass == NULL) { - _res = JVMTI_ERROR_INVALID_CLASS; - return false; - } - if (_class_defs[i].class_byte_count == 0) { - _res = JVMTI_ERROR_INVALID_CLASS_FORMAT; - return false; - } - if (_class_defs[i].class_bytes == NULL) { - _res = JVMTI_ERROR_NULL_POINTER; - return false; + + // Find classes not directly redefined, but affected by a redefinition (because one of its supertypes is redefined) + GrowableArray affected_classes; + FindAffectedKlassesClosure closure(&klasses_to_redefine, &affected_classes); + + // Trace affected classes + if (RC_TRACE_ENABLED(0x00000001)) { + RC_TRACE(0x00000001, ("Klasses affected: %d", + affected_classes.length())); + for (int i=0; iname()->as_C_string())); } } - // Start timer after all the sanity checks; not quite accurate, but - // better than adding a bunch of stop() calls. - RC_TIMER_START(_timer_vm_op_prologue); + // Add the array of affected classes and the array of redefined classes to get a list of all classes that need a redefinition + all_affected_klasses->appendAll(&klasses_to_redefine); + all_affected_klasses->appendAll(&affected_classes); - // We first load new class versions in the prologue, because somewhere down the - // call chain it is required that the current thread is a Java thread. - _res = load_new_class_versions(Thread::current()); - if (_res != JVMTI_ERROR_NONE) { - // Free os::malloc allocated memory in load_new_class_version. - os::free(_scratch_classes); - RC_TIMER_STOP(_timer_vm_op_prologue); - return false; + // Sort the affected klasses such that a supertype is always on a smaller array index than its subtype. + jvmtiError result = do_topological_class_sorting(_class_defs, _class_count, &affected_classes, all_affected_klasses, Thread::current()); + if (RC_TRACE_ENABLED(0x00000001)) { + RC_TRACE(0x00000001, ("Redefine order: ")); + for (int i=0; ilength(); i++) { + RC_TRACE(0x00000001, ("%s", + all_affected_klasses->at(i)->name()->as_C_string())); + } } - RC_TIMER_STOP(_timer_vm_op_prologue); - return true; + return result; } -void VM_RedefineClasses::doit() { - Thread *thread = Thread::current(); +// Searches for the class bytes of the given class and returns them as a byte array. +jvmtiError VM_RedefineClasses::find_class_bytes(instanceKlassHandle the_class, const unsigned char **class_bytes, jint *class_byte_count, jboolean *not_changed) { - if (UseSharedSpaces) { - // Sharing is enabled so we remap the shared readonly space to - // shared readwrite, private just in case we need to redefine - // a shared class. We do the remap during the doit() phase of - // the safepoint to be safer. - if (!CompactingPermGenGen::remap_shared_readonly_as_readwrite()) { - RC_TRACE_WITH_THREAD(0x00000001, thread, - ("failed to remap shared readonly space to readwrite, private")); - _res = JVMTI_ERROR_INTERNAL; - return; + *not_changed = false; + + // Search for the index in the redefinition array that corresponds to the current class + int j; + for (j=0; j<_class_count; j++) { + oop mirror = JNIHandles::resolve_non_null(_class_defs[j].klass); + klassOop the_class_oop = java_lang_Class::as_klassOop(mirror); + if (the_class_oop == the_class()) { + break; } } - for (int i = 0; i < _class_count; i++) { - redefine_single_class(_class_defs[i].klass, _scratch_classes[i], thread); - } - // Disable any dependent concurrent compilations - SystemDictionary::notice_modification(); + if (j == _class_count) { - // Set flag indicating that some invariants are no longer true. - // See jvmtiExport.hpp for detailed explanation. - JvmtiExport::set_has_redefined_a_class(); + *not_changed = true; -// check_class() is optionally called for product bits, but is -// always called for non-product bits. -#ifdef PRODUCT - if (RC_TRACE_ENABLED(0x00004000)) { -#endif - RC_TRACE_WITH_THREAD(0x00004000, thread, ("calling check_class")); - SystemDictionary::classes_do(check_class, thread); -#ifdef PRODUCT - } -#endif -} + // Redefine with same bytecodes. This is a class that is only indirectly affected by redefinition, + // so the user did not specify a different bytecode for that class. -void VM_RedefineClasses::doit_epilogue() { - // Free os::malloc allocated memory. - // The memory allocated in redefine will be free'ed in next VM operation. - os::free(_scratch_classes); - - if (RC_TRACE_ENABLED(0x00000004)) { - // Used to have separate timers for "doit" and "all", but the timer - // overhead skewed the measurements. - jlong doit_time = _timer_rsc_phase1.milliseconds() + - _timer_rsc_phase2.milliseconds(); - jlong all_time = _timer_vm_op_prologue.milliseconds() + doit_time; - - RC_TRACE(0x00000004, ("vm_op: all=" UINT64_FORMAT - " prologue=" UINT64_FORMAT " doit=" UINT64_FORMAT, all_time, - _timer_vm_op_prologue.milliseconds(), doit_time)); - RC_TRACE(0x00000004, - ("redefine_single_class: phase1=" UINT64_FORMAT " phase2=" UINT64_FORMAT, - _timer_rsc_phase1.milliseconds(), _timer_rsc_phase2.milliseconds())); + if (the_class->get_cached_class_file_bytes() == NULL) { + // not cached, we need to reconstitute the class file from VM representation + constantPoolHandle constants(Thread::current(), the_class->constants()); + ObjectLocker ol(constants, Thread::current()); // lock constant pool while we query it + + JvmtiClassFileReconstituter reconstituter(the_class); + if (reconstituter.get_error() != JVMTI_ERROR_NONE) { + return reconstituter.get_error(); + } + + *class_byte_count = (jint)reconstituter.class_file_size(); + *class_bytes = (unsigned char*)reconstituter.class_file_bytes(); + + } else { + + // it is cached, get it from the cache + *class_byte_count = the_class->get_cached_class_file_len(); + *class_bytes = the_class->get_cached_class_file_bytes(); + } + + } else { + + // Redefine with bytecodes at index j + *class_bytes = _class_defs[j].class_bytes; + *class_byte_count = _class_defs[j].class_byte_count; } + + return JVMTI_ERROR_NONE; } -bool VM_RedefineClasses::is_modifiable_class(oop klass_mirror) { - // classes for primitives cannot be redefined - if (java_lang_Class::is_primitive(klass_mirror)) { +// Prologue of the VM operation, called on the Java thread in parallel to normal program execution +bool VM_RedefineClasses::doit_prologue() { + + _revision_number++; + RC_TRACE(0x00000001, ("Redefinition with revision number %d started!", _revision_number)); + + assert(Thread::current()->is_Java_thread(), "must be Java thread"); + RC_TIMER_START(_timer_prologue); + + if (!check_arguments()) { + RC_TIMER_STOP(_timer_prologue); return false; } - klassOop the_class_oop = java_lang_Class::as_klassOop(klass_mirror); - // classes for arrays cannot be redefined - if (the_class_oop == NULL || !Klass::cast(the_class_oop)->oop_is_instance()) { + + // We first load new class versions in the prologue, because somewhere down the + // call chain it is required that the current thread is a Java thread. + _new_classes = new (ResourceObj::C_HEAP, mtInternal) GrowableArray(5, true); + _result = load_new_class_versions(Thread::current()); + + RC_TRACE(0x00000001, ("Loaded new class versions!")); + if (_result != JVMTI_ERROR_NONE) { + RC_TRACE(0x00000001, ("error occured: %d!", _result)); + delete _new_classes; + _new_classes = NULL; + RC_TIMER_STOP(_timer_prologue); return false; } + + RC_TRACE(0x00000001, ("nearly finished")); + VM_GC_Operation::doit_prologue(); + RC_TIMER_STOP(_timer_prologue); + RC_TRACE(0x00000001, ("doit_prologue finished!")); return true; } -// Append the current entry at scratch_i in scratch_cp to *merge_cp_p -// where the end of *merge_cp_p is specified by *merge_cp_length_p. For -// direct CP entries, there is just the current entry to append. For -// indirect and double-indirect CP entries, there are zero or more -// referenced CP entries along with the current entry to append. -// Indirect and double-indirect CP entries are handled by recursive -// calls to append_entry() as needed. The referenced CP entries are -// always appended to *merge_cp_p before the referee CP entry. These -// referenced CP entries may already exist in *merge_cp_p in which case -// there is nothing extra to append and only the current entry is -// appended. -void VM_RedefineClasses::append_entry(constantPoolHandle scratch_cp, - int scratch_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p, - TRAPS) { - - // append is different depending on entry tag type - switch (scratch_cp->tag_at(scratch_i).value()) { - - // The old verifier is implemented outside the VM. It loads classes, - // but does not resolve constant pool entries directly so we never - // see Class entries here with the old verifier. Similarly the old - // verifier does not like Class entries in the input constant pool. - // The split-verifier is implemented in the VM so it can optionally - // and directly resolve constant pool entries to load classes. The - // split-verifier can accept either Class entries or UnresolvedClass - // entries in the input constant pool. We revert the appended copy - // back to UnresolvedClass so that either verifier will be happy - // with the constant pool entry. - case JVM_CONSTANT_Class: - { - // revert the copy to JVM_CONSTANT_UnresolvedClass - (*merge_cp_p)->unresolved_klass_at_put(*merge_cp_length_p, - scratch_cp->klass_name_at(scratch_i)); - - if (scratch_i != *merge_cp_length_p) { - // The new entry in *merge_cp_p is at a different index than - // the new entry in scratch_cp so we need to map the index values. - map_index(scratch_cp, scratch_i, *merge_cp_length_p); - } - (*merge_cp_length_p)++; - } break; - - // these are direct CP entries so they can be directly appended, - // but double and long take two constant pool entries - case JVM_CONSTANT_Double: // fall through - case JVM_CONSTANT_Long: - { - constantPoolOopDesc::copy_entry_to(scratch_cp, scratch_i, *merge_cp_p, *merge_cp_length_p, - THREAD); - - if (scratch_i != *merge_cp_length_p) { - // The new entry in *merge_cp_p is at a different index than - // the new entry in scratch_cp so we need to map the index values. - map_index(scratch_cp, scratch_i, *merge_cp_length_p); - } - (*merge_cp_length_p) += 2; - } break; - - // these are direct CP entries so they can be directly appended - case JVM_CONSTANT_Float: // fall through - case JVM_CONSTANT_Integer: // fall through - case JVM_CONSTANT_Utf8: // fall through - - // This was an indirect CP entry, but it has been changed into - // an interned string so this entry can be directly appended. - case JVM_CONSTANT_String: // fall through - - // These were indirect CP entries, but they have been changed into - // Symbol*s so these entries can be directly appended. - case JVM_CONSTANT_UnresolvedClass: // fall through - case JVM_CONSTANT_UnresolvedString: - { - constantPoolOopDesc::copy_entry_to(scratch_cp, scratch_i, *merge_cp_p, *merge_cp_length_p, - THREAD); +// Checks basic properties of the arguments of the redefinition command. +bool VM_RedefineClasses::check_arguments() { - if (scratch_i != *merge_cp_length_p) { - // The new entry in *merge_cp_p is at a different index than - // the new entry in scratch_cp so we need to map the index values. - map_index(scratch_cp, scratch_i, *merge_cp_length_p); - } - (*merge_cp_length_p)++; - } break; + if (_class_count == 0) RC_ABORT(JVMTI_ERROR_NONE); + if (_class_defs == NULL) RC_ABORT(JVMTI_ERROR_NULL_POINTER); + for (int i = 0; i < _class_count; i++) { + if (_class_defs[i].klass == NULL) RC_ABORT(JVMTI_ERROR_INVALID_CLASS); + if (_class_defs[i].class_byte_count == 0) RC_ABORT(JVMTI_ERROR_INVALID_CLASS_FORMAT); + if (_class_defs[i].class_bytes == NULL) RC_ABORT(JVMTI_ERROR_NULL_POINTER); + } - // this is an indirect CP entry so it needs special handling - case JVM_CONSTANT_NameAndType: - { - int name_ref_i = scratch_cp->name_ref_index_at(scratch_i); - int new_name_ref_i = 0; - bool match = (name_ref_i < *merge_cp_length_p) && - scratch_cp->compare_entry_to(name_ref_i, *merge_cp_p, name_ref_i, - THREAD); - if (!match) { - // forward reference in *merge_cp_p or not a direct match + return true; +} - int found_i = scratch_cp->find_matching_entry(name_ref_i, *merge_cp_p, - THREAD); - if (found_i != 0) { - guarantee(found_i != name_ref_i, - "compare_entry_to() and find_matching_entry() do not agree"); - - // Found a matching entry somewhere else in *merge_cp_p so - // just need a mapping entry. - new_name_ref_i = found_i; - map_index(scratch_cp, name_ref_i, found_i); - } else { - // no match found so we have to append this entry to *merge_cp_p - append_entry(scratch_cp, name_ref_i, merge_cp_p, merge_cp_length_p, - THREAD); - // The above call to append_entry() can only append one entry - // so the post call query of *merge_cp_length_p is only for - // the sake of consistency. - new_name_ref_i = *merge_cp_length_p - 1; +jvmtiError VM_RedefineClasses::check_exception() const { + Thread* THREAD = Thread::current(); + if (HAS_PENDING_EXCEPTION) { + + Symbol* ex_name = PENDING_EXCEPTION->klass()->klass_part()->name(); + RC_TRACE(0x00000001, ("parse_stream exception: '%s'", + ex_name->as_C_string())); + if (TraceRedefineClasses >= 1) { + java_lang_Throwable::print(PENDING_EXCEPTION, tty); + tty->print_cr(""); + } + CLEAR_PENDING_EXCEPTION; + + if (ex_name == vmSymbols::java_lang_UnsupportedClassVersionError()) { + return JVMTI_ERROR_UNSUPPORTED_VERSION; + } else if (ex_name == vmSymbols::java_lang_ClassFormatError()) { + return JVMTI_ERROR_INVALID_CLASS_FORMAT; + } else if (ex_name == vmSymbols::java_lang_ClassCircularityError()) { + return JVMTI_ERROR_CIRCULAR_CLASS_DEFINITION; + } else if (ex_name == vmSymbols::java_lang_NoClassDefFoundError()) { + // The message will be "XXX (wrong name: YYY)" + return JVMTI_ERROR_NAMES_DONT_MATCH; + } else if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) { + return JVMTI_ERROR_OUT_OF_MEMORY; + } else { + // Just in case more exceptions can be thrown.. + return JVMTI_ERROR_FAILS_VERIFICATION; + } + } + + return JVMTI_ERROR_NONE; +} + +// Loads all new class versions and stores the instanceKlass handles in an array. +jvmtiError VM_RedefineClasses::load_new_class_versions(TRAPS) { + + ResourceMark rm(THREAD); + + RC_TRACE(0x00000001, ("===================================================================")); + RC_TRACE(0x00000001, ("load new class versions (%d)", + _class_count)); + + // Retrieve an array of all classes that need to be redefined + GrowableArray all_affected_klasses; + jvmtiError err = find_sorted_affected_classes(&all_affected_klasses); + if (err != JVMTI_ERROR_NONE) { + RC_TRACE(0x00000001, ("Error finding sorted affected classes: %d", + (int)err)); + return err; + } + + + JvmtiThreadState *state = JvmtiThreadState::state_for(JavaThread::current()); + + _max_redefinition_flags = Klass::NoRedefinition; + jvmtiError result = JVMTI_ERROR_NONE; + + for (int i=0; iname()->as_C_string())); + + the_class->link_class(THREAD); + result = check_exception(); + if (result != JVMTI_ERROR_NONE) break; + + // Find new class bytes + const unsigned char* class_bytes; + jint class_byte_count; + jvmtiError error; + jboolean not_changed; + if ((error = find_class_bytes(the_class, &class_bytes, &class_byte_count, ¬_changed)) != JVMTI_ERROR_NONE) { + RC_TRACE(0x00000001, ("Error finding class bytes: %d", + (int)error)); + result = error; + break; + } + assert(class_bytes != NULL && class_byte_count != 0, "Class bytes defined at this point!"); + + + // Set redefined class handle in JvmtiThreadState class. + // This redefined class is sent to agent event handler for class file + // load hook event. + state->set_class_being_redefined(&the_class, _class_load_kind); + + RC_TRACE(0x00000002, ("Before resolving from stream")); + + RC_TIMER_STOP(_timer_prologue); + RC_TIMER_START(_timer_class_loading); + + + // Parse the stream. + Handle the_class_loader(THREAD, the_class->class_loader()); + Handle protection_domain(THREAD, the_class->protection_domain()); + Symbol* the_class_sym = the_class->name(); + ClassFileStream st((u1*) class_bytes, class_byte_count, (char *)"__VM_RedefineClasses__"); + instanceKlassHandle new_class(THREAD, SystemDictionary::resolve_from_stream(the_class_sym, + the_class_loader, + protection_domain, + &st, + true, + the_class, + THREAD)); + + not_changed = false; + + RC_TIMER_STOP(_timer_class_loading); + RC_TIMER_START(_timer_prologue); + + RC_TRACE(0x00000002, ("After resolving class from stream!")); + // Clear class_being_redefined just to be sure. + state->clear_class_being_redefined(); + + result = check_exception(); + if (result != JVMTI_ERROR_NONE) break; + +#ifdef ASSERT + + assert(new_class() != NULL, "Class could not be loaded!"); + assert(new_class() != the_class(), "must be different"); + assert(new_class->new_version() == NULL && new_class->old_version() != NULL, ""); + + + objArrayOop k_interfaces = new_class->local_interfaces(); + for (int j=0; jlength(); j++) { + assert(((klassOop)k_interfaces->obj_at(j))->klass_part()->is_newest_version(), "just checking"); + } + + if (!THREAD->is_Compiler_thread()) { + + RC_TRACE(0x00000002, ("name=%s loader="INTPTR_FORMAT" protection_domain="INTPTR_FORMAT" ", + the_class->name()->as_C_string(), + (address)(the_class->class_loader()), + (address)(the_class->protection_domain()))); + // If we are on the compiler thread, we must not try to resolve a class. + klassOop systemLookup = SystemDictionary::resolve_or_null(the_class->name(), the_class->class_loader(), the_class->protection_domain(), THREAD); + + if (systemLookup != NULL) { + assert(systemLookup == new_class->old_version(), "Old class must be in system dictionary!"); + + + Klass *subklass = new_class()->klass_part()->subklass(); + while (subklass != NULL) { + assert(subklass->new_version() == NULL, "Most recent version of class!"); + subklass = subklass->next_sibling(); } + } else { + // This can happen for reflection generated classes.. ? + CLEAR_PENDING_EXCEPTION; } + } - int signature_ref_i = scratch_cp->signature_ref_index_at(scratch_i); - int new_signature_ref_i = 0; - match = (signature_ref_i < *merge_cp_length_p) && - scratch_cp->compare_entry_to(signature_ref_i, *merge_cp_p, - signature_ref_i, THREAD); - if (!match) { - // forward reference in *merge_cp_p or not a direct match - - int found_i = scratch_cp->find_matching_entry(signature_ref_i, - *merge_cp_p, THREAD); - if (found_i != 0) { - guarantee(found_i != signature_ref_i, - "compare_entry_to() and find_matching_entry() do not agree"); - - // Found a matching entry somewhere else in *merge_cp_p so - // just need a mapping entry. - new_signature_ref_i = found_i; - map_index(scratch_cp, signature_ref_i, found_i); - } else { - // no match found so we have to append this entry to *merge_cp_p - append_entry(scratch_cp, signature_ref_i, merge_cp_p, - merge_cp_length_p, THREAD); - // The above call to append_entry() can only append one entry - // so the post call query of *merge_cp_length_p is only for - // the sake of consistency. - new_signature_ref_i = *merge_cp_length_p - 1; - } +#endif + + if (RC_TRACE_ENABLED(0x00000001)) { + if (new_class->layout_helper() != the_class->layout_helper()) { + RC_TRACE(0x00000001, ("Instance size change for class %s: new=%d old=%d", + new_class->name()->as_C_string(), + new_class->layout_helper(), + the_class->layout_helper())); } + } - // If the referenced entries already exist in *merge_cp_p, then - // both new_name_ref_i and new_signature_ref_i will both be 0. - // In that case, all we are appending is the current entry. - if (new_name_ref_i == 0) { - new_name_ref_i = name_ref_i; - } else { - RC_TRACE(0x00080000, - ("NameAndType entry@%d name_ref_index change: %d to %d", - *merge_cp_length_p, name_ref_i, new_name_ref_i)); + // Set the new version of the class + new_class->set_revision_number(_revision_number); + new_class->set_redefinition_index(i); + the_class->set_new_version(new_class()); + _new_classes->append(new_class); + + assert(new_class->new_version() == NULL, ""); + + int redefinition_flags = Klass::NoRedefinition; + + if (not_changed) { + redefinition_flags = Klass::NoRedefinition; + } else if (AllowAdvancedClassRedefinition) { + redefinition_flags = calculate_redefinition_flags(new_class); + } else { + jvmtiError allowed = check_redefinition_allowed(new_class); + if (allowed != JVMTI_ERROR_NONE) { + RC_TRACE(0x00000001, ("Error redefinition not allowed!")); + result = allowed; + break; } - if (new_signature_ref_i == 0) { - new_signature_ref_i = signature_ref_i; - } else { - RC_TRACE(0x00080000, - ("NameAndType entry@%d signature_ref_index change: %d to %d", - *merge_cp_length_p, signature_ref_i, new_signature_ref_i)); + redefinition_flags = Klass::ModifyClass; + } + + if (new_class->super() != NULL) { + redefinition_flags = redefinition_flags | new_class->super()->klass_part()->redefinition_flags(); + } + + for (int j=0; jlocal_interfaces()->length(); j++) { + redefinition_flags = redefinition_flags | ((klassOop)new_class->local_interfaces()->obj_at(j))->klass_part()->redefinition_flags(); + } + + new_class->set_redefinition_flags(redefinition_flags); + + _max_redefinition_flags = _max_redefinition_flags | redefinition_flags; + + if ((redefinition_flags & Klass::ModifyInstances) != 0) { + // TODO: Check if watch access flags of static fields are updated correctly. + calculate_instance_update_information(_new_classes->at(i)()); + } else { + assert(new_class->layout_helper() >> 1 == new_class->old_version()->klass_part()->layout_helper() >> 1, "must be equal"); + assert(new_class->fields()->length() == ((instanceKlass*)new_class->old_version()->klass_part())->fields()->length(), "must be equal"); + + fieldDescriptor fd_new; + fieldDescriptor fd_old; + for (JavaFieldStream fs(new_class); !fs.done(); fs.next()) { + fd_new.initialize(new_class(), fs.index()); + fd_old.initialize(new_class->old_version(), fs.index()); + transfer_special_access_flags(&fd_old, &fd_new); } + } - (*merge_cp_p)->name_and_type_at_put(*merge_cp_length_p, - new_name_ref_i, new_signature_ref_i); - if (scratch_i != *merge_cp_length_p) { - // The new entry in *merge_cp_p is at a different index than - // the new entry in scratch_cp so we need to map the index values. - map_index(scratch_cp, scratch_i, *merge_cp_length_p); + if (RC_TRACE_ENABLED(0x00000008)) { + if (new_class->super() != NULL) { + RC_TRACE(0x00000008, ("Super class is %s", + new_class->super()->klass_part()->name()->as_C_string())); } - (*merge_cp_length_p)++; - } break; + } - // this is a double-indirect CP entry so it needs special handling - case JVM_CONSTANT_Fieldref: // fall through - case JVM_CONSTANT_InterfaceMethodref: // fall through - case JVM_CONSTANT_Methodref: - { - int klass_ref_i = scratch_cp->uncached_klass_ref_index_at(scratch_i); - int new_klass_ref_i = 0; - bool match = (klass_ref_i < *merge_cp_length_p) && - scratch_cp->compare_entry_to(klass_ref_i, *merge_cp_p, klass_ref_i, - THREAD); - if (!match) { - // forward reference in *merge_cp_p or not a direct match +#ifdef ASSERT + assert(new_class->super() == NULL || new_class->super()->klass_part()->new_version() == NULL, "Super klass must be newest version!"); - int found_i = scratch_cp->find_matching_entry(klass_ref_i, *merge_cp_p, - THREAD); - if (found_i != 0) { - guarantee(found_i != klass_ref_i, - "compare_entry_to() and find_matching_entry() do not agree"); - - // Found a matching entry somewhere else in *merge_cp_p so - // just need a mapping entry. - new_klass_ref_i = found_i; - map_index(scratch_cp, klass_ref_i, found_i); - } else { - // no match found so we have to append this entry to *merge_cp_p - append_entry(scratch_cp, klass_ref_i, merge_cp_p, merge_cp_length_p, - THREAD); - // The above call to append_entry() can only append one entry - // so the post call query of *merge_cp_length_p is only for - // the sake of consistency. Without the optimization where we - // use JVM_CONSTANT_UnresolvedClass, then up to two entries - // could be appended. - new_klass_ref_i = *merge_cp_length_p - 1; - } - } + the_class->vtable()->verify(tty); + new_class->vtable()->verify(tty); +#endif - int name_and_type_ref_i = - scratch_cp->uncached_name_and_type_ref_index_at(scratch_i); - int new_name_and_type_ref_i = 0; - match = (name_and_type_ref_i < *merge_cp_length_p) && - scratch_cp->compare_entry_to(name_and_type_ref_i, *merge_cp_p, - name_and_type_ref_i, THREAD); - if (!match) { - // forward reference in *merge_cp_p or not a direct match - - int found_i = scratch_cp->find_matching_entry(name_and_type_ref_i, - *merge_cp_p, THREAD); - if (found_i != 0) { - guarantee(found_i != name_and_type_ref_i, - "compare_entry_to() and find_matching_entry() do not agree"); - - // Found a matching entry somewhere else in *merge_cp_p so - // just need a mapping entry. - new_name_and_type_ref_i = found_i; - map_index(scratch_cp, name_and_type_ref_i, found_i); - } else { - // no match found so we have to append this entry to *merge_cp_p - append_entry(scratch_cp, name_and_type_ref_i, merge_cp_p, - merge_cp_length_p, THREAD); - // The above call to append_entry() can append more than - // one entry so the post call query of *merge_cp_length_p - // is required in order to get the right index for the - // JVM_CONSTANT_NameAndType entry. - new_name_and_type_ref_i = *merge_cp_length_p - 1; + RC_TRACE(0x00000002, ("Verification done!")); + + if (i == all_affected_klasses.length() - 1) { + + // This was the last class processed => check if additional classes have been loaded in the meantime + + RC_TIMER_STOP(_timer_prologue); + lock_threads(); + RC_TIMER_START(_timer_prologue); + + for (int j=0; jklass_part()->subklass(); + Klass *cur_klass = initial_subklass; + while(cur_klass != NULL) { + + if(cur_klass->oop_is_instance() && cur_klass->is_newest_version()) { + instanceKlassHandle handle(THREAD, cur_klass->as_klassOop()); + if (!all_affected_klasses.contains(handle)) { + + int k = i + 1; + for (; kis_subtype_of(cur_klass->as_klassOop())) { + break; + } + } + all_affected_klasses.insert_before(k, handle); + RC_TRACE(0x00000002, ("Adding newly loaded class to affected classes: %s", + cur_klass->name()->as_C_string())); + } + } + + cur_klass = cur_klass->next_sibling(); } } - // If the referenced entries already exist in *merge_cp_p, then - // both new_klass_ref_i and new_name_and_type_ref_i will both be - // 0. In that case, all we are appending is the current entry. - if (new_klass_ref_i == 0) { - new_klass_ref_i = klass_ref_i; - } - if (new_name_and_type_ref_i == 0) { - new_name_and_type_ref_i = name_and_type_ref_i; - } + int new_count = all_affected_klasses.length() - 1 - i; + if (new_count != 0) { - const char *entry_name; - switch (scratch_cp->tag_at(scratch_i).value()) { - case JVM_CONSTANT_Fieldref: - entry_name = "Fieldref"; - (*merge_cp_p)->field_at_put(*merge_cp_length_p, new_klass_ref_i, - new_name_and_type_ref_i); - break; - case JVM_CONSTANT_InterfaceMethodref: - entry_name = "IFMethodref"; - (*merge_cp_p)->interface_method_at_put(*merge_cp_length_p, - new_klass_ref_i, new_name_and_type_ref_i); - break; - case JVM_CONSTANT_Methodref: - entry_name = "Methodref"; - (*merge_cp_p)->method_at_put(*merge_cp_length_p, new_klass_ref_i, - new_name_and_type_ref_i); - break; - default: - guarantee(false, "bad switch"); - break; + unlock_threads(); + RC_TRACE(0x00000001, ("Found new number of affected classes: %d", + new_count)); } + } + } - if (klass_ref_i != new_klass_ref_i) { - RC_TRACE(0x00080000, ("%s entry@%d class_index changed: %d to %d", - entry_name, *merge_cp_length_p, klass_ref_i, new_klass_ref_i)); - } - if (name_and_type_ref_i != new_name_and_type_ref_i) { - RC_TRACE(0x00080000, - ("%s entry@%d name_and_type_index changed: %d to %d", - entry_name, *merge_cp_length_p, name_and_type_ref_i, - new_name_and_type_ref_i)); - } + if (result != JVMTI_ERROR_NONE) { + rollback(); + return result; + } - if (scratch_i != *merge_cp_length_p) { - // The new entry in *merge_cp_p is at a different index than - // the new entry in scratch_cp so we need to map the index values. - map_index(scratch_cp, scratch_i, *merge_cp_length_p); - } - (*merge_cp_length_p)++; - } break; + RC_TIMER_STOP(_timer_prologue); + RC_TIMER_START(_timer_class_linking); + // Link and verify new classes _after_ all classes have been updated in the system dictionary! + for (int i=0; inew_version()); - // At this stage, Class or UnresolvedClass could be here, but not - // ClassIndex - case JVM_CONSTANT_ClassIndex: // fall through + RC_TRACE(0x00000002, ("Linking class %d/%d %s", + i, + all_affected_klasses.length(), + the_class->name()->as_C_string())); + new_class->link_class(THREAD); + + result = check_exception(); + if (result != JVMTI_ERROR_NONE) break; + } + RC_TIMER_STOP(_timer_class_linking); + RC_TIMER_START(_timer_prologue); - // Invalid is used as the tag for the second constant pool entry - // occupied by JVM_CONSTANT_Double or JVM_CONSTANT_Long. It should - // not be seen by itself. - case JVM_CONSTANT_Invalid: // fall through + if (result != JVMTI_ERROR_NONE) { + rollback(); + return result; + } - // At this stage, String or UnresolvedString could be here, but not - // StringIndex - case JVM_CONSTANT_StringIndex: // fall through + RC_TRACE(0x00000002, ("All classes loaded!")); - // At this stage JVM_CONSTANT_UnresolvedClassInError should not be - // here - case JVM_CONSTANT_UnresolvedClassInError: // fall through +#ifdef ASSERT + for (int i=0; inew_version() != NULL, "Must have been redefined"); + instanceKlassHandle new_version = instanceKlassHandle(THREAD, the_class->new_version()); + assert(new_version->new_version() == NULL, "Must be newest version"); - default: - { - // leave a breadcrumb - jbyte bad_value = scratch_cp->tag_at(scratch_i).value(); - ShouldNotReachHere(); - } break; - } // end switch tag value -} // end append_entry() + if (!(new_version->super() == NULL || new_version->super()->klass_part()->new_version() == NULL)) { + new_version()->print(); + new_version->super()->print(); + } + assert(new_version->super() == NULL || new_version->super()->klass_part()->new_version() == NULL, "Super class must be newest version"); + } + + SystemDictionary::classes_do(check_class, THREAD); + +#endif + + RC_TRACE(0x00000001, ("Finished verification!")); + return JVMTI_ERROR_NONE; +} + +void VM_RedefineClasses::lock_threads() { + + RC_TIMER_START(_timer_wait_for_locks); + + + JavaThread *javaThread = Threads::first(); + while (javaThread != NULL) { + if (javaThread->is_Compiler_thread() && javaThread != Thread::current()) { + CompilerThread *compilerThread = (CompilerThread *)javaThread; + compilerThread->set_should_bailout(true); + } + javaThread = javaThread->next(); + } + + int cnt = 0; + javaThread = Threads::first(); + while (javaThread != NULL) { + if (javaThread->is_Compiler_thread() && javaThread != Thread::current()) { + CompilerThread *compilerThread = (CompilerThread *)javaThread; + compilerThread->compilation_mutex()->lock(); + cnt++; + } + javaThread = javaThread->next(); + } + + RC_TRACE(0x00000002, ("Locked %d compiler threads", cnt)); + + cnt = 0; + javaThread = Threads::first(); + while (javaThread != NULL) { + if (javaThread != Thread::current()) { + javaThread->redefine_classes_mutex()->lock(); + } + javaThread = javaThread->next(); + } + + + RC_TRACE(0x00000002, ("Locked %d threads", cnt)); + + RC_TIMER_STOP(_timer_wait_for_locks); +} +void VM_RedefineClasses::unlock_threads() { -void VM_RedefineClasses::swap_all_method_annotations(int i, int j, instanceKlassHandle scratch_class) { - typeArrayOop save; + int cnt = 0; + JavaThread *javaThread = Threads::first(); + Thread *thread = Thread::current(); + while (javaThread != NULL) { + if (javaThread->is_Compiler_thread() && javaThread != Thread::current()) { + CompilerThread *compilerThread = (CompilerThread *)javaThread; + if (compilerThread->compilation_mutex()->owned_by_self()) { + compilerThread->compilation_mutex()->unlock(); + cnt++; + } + } + javaThread = javaThread->next(); + } - save = scratch_class->get_method_annotations_of(i); - scratch_class->set_method_annotations_of(i, scratch_class->get_method_annotations_of(j)); - scratch_class->set_method_annotations_of(j, save); + RC_TRACE(0x00000002, ("Unlocked %d compiler threads", cnt)); - save = scratch_class->get_method_parameter_annotations_of(i); - scratch_class->set_method_parameter_annotations_of(i, scratch_class->get_method_parameter_annotations_of(j)); - scratch_class->set_method_parameter_annotations_of(j, save); + cnt = 0; + javaThread = Threads::first(); + while (javaThread != NULL) { + if (javaThread != Thread::current()) { + if (javaThread->redefine_classes_mutex()->owned_by_self()) { + javaThread->redefine_classes_mutex()->unlock(); + } + } + javaThread = javaThread->next(); + } - save = scratch_class->get_method_default_annotations_of(i); - scratch_class->set_method_default_annotations_of(i, scratch_class->get_method_default_annotations_of(j)); - scratch_class->set_method_default_annotations_of(j, save); + RC_TRACE(0x00000002, ("Unlocked %d threads", cnt)); } +jvmtiError VM_RedefineClasses::check_redefinition_allowed(instanceKlassHandle scratch_class) { + + + + // Compatibility mode => check for unsupported modification + + + assert(scratch_class->old_version() != NULL, "must have old version"); + instanceKlassHandle the_class(scratch_class->old_version()); -jvmtiError VM_RedefineClasses::compare_and_normalize_class_versions( - instanceKlassHandle the_class, - instanceKlassHandle scratch_class) { int i; // Check superclasses, or rather their names, since superclasses themselves can be // requested to replace. // Check for NULL superclass first since this might be java.lang.Object if (the_class->super() != scratch_class->super() && - (the_class->super() == NULL || scratch_class->super() == NULL || - Klass::cast(the_class->super())->name() != - Klass::cast(scratch_class->super())->name())) { - return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED; + (the_class->super() == NULL || scratch_class->super() == NULL || + Klass::cast(the_class->super())->name() != + Klass::cast(scratch_class->super())->name())) { + return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED; } // Check if the number, names and order of directly implemented interfaces are the same. @@ -539,8 +678,8 @@ jvmtiError VM_RedefineClasses::compare_and_normalize_class_versions( } for (i = 0; i < n_intfs; i++) { if (Klass::cast((klassOop) k_interfaces->obj_at(i))->name() != - Klass::cast((klassOop) k_new_interfaces->obj_at(i))->name()) { - return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED; + Klass::cast((klassOop) k_new_interfaces->obj_at(i))->name()) { + return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED; } } @@ -689,12 +828,8 @@ jvmtiError VM_RedefineClasses::compare_and_normalize_class_versions( idnum_owner->set_method_idnum(new_num); } k_new_method->set_method_idnum(old_num); - swap_all_method_annotations(old_num, new_num, scratch_class); } } - RC_TRACE(0x00008000, ("Method matched: new: %s [%d] == old: %s [%d]", - k_new_method->name_and_sig_as_C_string(), ni, - k_old_method->name_and_sig_as_C_string(), oi)); // advance to next pair of methods ++oi; ++ni; @@ -703,11 +838,11 @@ jvmtiError VM_RedefineClasses::compare_and_normalize_class_versions( // method added, see if it is OK new_flags = (jushort) k_new_method->access_flags().get_flags(); if ((new_flags & JVM_ACC_PRIVATE) == 0 - // hack: private should be treated as final, but alas - || (new_flags & (JVM_ACC_FINAL|JVM_ACC_STATIC)) == 0 - ) { - // new methods must be private - return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_ADDED; + // hack: private should be treated as final, but alas + || (new_flags & (JVM_ACC_FINAL|JVM_ACC_STATIC)) == 0 + ) { + // new methods must be private + return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_ADDED; } { u2 num = the_class->next_method_idnum(); @@ -722,24 +857,19 @@ jvmtiError VM_RedefineClasses::compare_and_normalize_class_versions( idnum_owner->set_method_idnum(new_num); } k_new_method->set_method_idnum(num); - swap_all_method_annotations(new_num, num, scratch_class); } - RC_TRACE(0x00008000, ("Method added: new: %s [%d]", - k_new_method->name_and_sig_as_C_string(), ni)); ++ni; // advance to next new method break; case deleted: // method deleted, see if it is OK old_flags = (jushort) k_old_method->access_flags().get_flags(); if ((old_flags & JVM_ACC_PRIVATE) == 0 - // hack: private should be treated as final, but alas - || (old_flags & (JVM_ACC_FINAL|JVM_ACC_STATIC)) == 0 - ) { - // deleted methods must be private - return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_DELETED; + // hack: private should be treated as final, but alas + || (old_flags & (JVM_ACC_FINAL|JVM_ACC_STATIC)) == 0 + ) { + // deleted methods must be private + return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_DELETED; } - RC_TRACE(0x00008000, ("Method deleted: old: %s [%d]", - k_old_method->name_and_sig_as_C_string(), oi)); ++oi; // advance to next old method break; default: @@ -750,2200 +880,1783 @@ jvmtiError VM_RedefineClasses::compare_and_normalize_class_versions( return JVMTI_ERROR_NONE; } +int VM_RedefineClasses::calculate_redefinition_flags(instanceKlassHandle new_class) { -// Find new constant pool index value for old constant pool index value -// by seaching the index map. Returns zero (0) if there is no mapped -// value for the old constant pool index. -int VM_RedefineClasses::find_new_index(int old_index) { - if (_index_map_count == 0) { - // map is empty so nothing can be found - return 0; - } - - if (old_index < 1 || old_index >= _index_map_p->length()) { - // The old_index is out of range so it is not mapped. This should - // not happen in regular constant pool merging use, but it can - // happen if a corrupt annotation is processed. - return 0; - } - - int value = _index_map_p->at(old_index); - if (value == -1) { - // the old_index is not mapped - return 0; - } - - return value; -} // end find_new_index() - + int result = Klass::NoRedefinition; -// Returns true if the current mismatch is due to a resolved/unresolved -// class pair. Otherwise, returns false. -bool VM_RedefineClasses::is_unresolved_class_mismatch(constantPoolHandle cp1, - int index1, constantPoolHandle cp2, int index2) { - jbyte t1 = cp1->tag_at(index1).value(); - if (t1 != JVM_CONSTANT_Class && t1 != JVM_CONSTANT_UnresolvedClass) { - return false; // wrong entry type; not our special case - } - jbyte t2 = cp2->tag_at(index2).value(); - if (t2 != JVM_CONSTANT_Class && t2 != JVM_CONSTANT_UnresolvedClass) { - return false; // wrong entry type; not our special case - } + RC_TRACE(0x00000002, ("Comparing different class versions of class %s", + new_class->name()->as_C_string())); - if (t1 == t2) { - return false; // not a mismatch; not our special case - } + assert(new_class->old_version() != NULL, "must have old version"); + instanceKlassHandle the_class(new_class->old_version()); - char *s1 = cp1->klass_name_at(index1)->as_C_string(); - char *s2 = cp2->klass_name_at(index2)->as_C_string(); - if (strcmp(s1, s2) != 0) { - return false; // strings don't match; not our special case + // Check whether class is in the error init state. + if (the_class->is_in_error_state()) { + // TBD #5057930: special error code is needed in 1.6 + //result = Klass::union_redefinition_level(result, Klass::Invalid); } - return true; // made it through the gauntlet; this is our special case -} // end is_unresolved_class_mismatch() + int i; + ////////////////////////////////////////////////////////////////////////////////////////////////////////// + // Check superclasses + assert(new_class->super() == NULL || new_class->super()->klass_part()->is_newest_version(), ""); + if (the_class->super() != new_class->super()) { + // Super class changed + + klassOop cur_klass = the_class->super(); + while (cur_klass != NULL) { + if (!new_class->is_subclass_of(cur_klass->klass_part()->newest_version())) { + RC_TRACE(0x00000002, ("Removed super class %s", + cur_klass->klass_part()->name()->as_C_string())); + result = result | Klass::RemoveSuperType | Klass::ModifyInstances | Klass::ModifyClass; + + if (!cur_klass->klass_part()->has_subtype_changed()) { + RC_TRACE(0x00000002, ("Subtype changed of class %s", + cur_klass->klass_part()->name()->as_C_string())); + cur_klass->klass_part()->set_subtype_changed(true); + } + } -// Returns true if the current mismatch is due to a resolved/unresolved -// string pair. Otherwise, returns false. -bool VM_RedefineClasses::is_unresolved_string_mismatch(constantPoolHandle cp1, - int index1, constantPoolHandle cp2, int index2) { + cur_klass = cur_klass->klass_part()->super(); + } - jbyte t1 = cp1->tag_at(index1).value(); - if (t1 != JVM_CONSTANT_String && t1 != JVM_CONSTANT_UnresolvedString) { - return false; // wrong entry type; not our special case + cur_klass = new_class->super(); + while (cur_klass != NULL) { + if (!the_class->is_subclass_of(cur_klass->klass_part()->old_version())) { + RC_TRACE(0x00000002, ("Added super class %s", + cur_klass->klass_part()->name()->as_C_string())); + result = result | Klass::ModifyClass | Klass::ModifyInstances; + } + cur_klass = cur_klass->klass_part()->super(); + } + } + + ////////////////////////////////////////////////////////////////////////////////////////////////////////// + // Check interfaces + + // Interfaces removed? + objArrayOop old_interfaces = the_class->transitive_interfaces(); + for (i = 0; ilength(); i++) { + instanceKlassHandle old_interface((klassOop)old_interfaces->obj_at(i)); + if (!new_class->implements_interface_any_version(old_interface())) { + result = result | Klass::RemoveSuperType | Klass::ModifyClass; + RC_TRACE(0x00000002, ("Removed interface %s", + old_interface->name()->as_C_string())); + + if (!old_interface->has_subtype_changed()) { + RC_TRACE(0x00000002, ("Subtype changed of interface %s", + old_interface->name()->as_C_string())); + old_interface->set_subtype_changed(true); + } + } } - jbyte t2 = cp2->tag_at(index2).value(); - if (t2 != JVM_CONSTANT_String && t2 != JVM_CONSTANT_UnresolvedString) { - return false; // wrong entry type; not our special case + // Interfaces added? + objArrayOop new_interfaces = new_class->transitive_interfaces(); + for (i = 0; ilength(); i++) { + if (!the_class->implements_interface_any_version((klassOop)new_interfaces->obj_at(i))) { + result = result | Klass::ModifyClass; + RC_TRACE(0x00000002, ("Added interface %s", + ((klassOop)new_interfaces->obj_at(i))->klass_part()->name()->as_C_string())); + } } - if (t1 == t2) { - return false; // not a mismatch; not our special case - } - char *s1 = cp1->string_at_noresolve(index1); - char *s2 = cp2->string_at_noresolve(index2); - if (strcmp(s1, s2) != 0) { - return false; // strings don't match; not our special case - } - - return true; // made it through the gauntlet; this is our special case -} // end is_unresolved_string_mismatch() - - -jvmtiError VM_RedefineClasses::load_new_class_versions(TRAPS) { - // For consistency allocate memory using os::malloc wrapper. - _scratch_classes = (instanceKlassHandle *) - os::malloc(sizeof(instanceKlassHandle) * _class_count, mtInternal); - if (_scratch_classes == NULL) { - return JVMTI_ERROR_OUT_OF_MEMORY; + // Check whether class modifiers are the same. + jushort old_flags = (jushort) the_class->access_flags().get_flags(); + jushort new_flags = (jushort) new_class->access_flags().get_flags(); + if (old_flags != new_flags) { + // TODO (tw): Can this have any effects? } - ResourceMark rm(THREAD); - - JvmtiThreadState *state = JvmtiThreadState::state_for(JavaThread::current()); - // state can only be NULL if the current thread is exiting which - // should not happen since we're trying to do a RedefineClasses - guarantee(state != NULL, "exiting thread calling load_new_class_versions"); - for (int i = 0; i < _class_count; i++) { - oop mirror = JNIHandles::resolve_non_null(_class_defs[i].klass); - // classes for primitives cannot be redefined - if (!is_modifiable_class(mirror)) { - return JVMTI_ERROR_UNMODIFIABLE_CLASS; + // Check if the number, names, types and order of fields declared in these classes + // are the same. + JavaFieldStream old_fs(the_class); + JavaFieldStream new_fs(new_class); + for (; !old_fs.done() && !new_fs.done(); old_fs.next(), new_fs.next()) { + // access + old_flags = old_fs.access_flags().as_short(); + new_flags = new_fs.access_flags().as_short(); + if ((old_flags ^ new_flags) & JVM_RECOGNIZED_FIELD_MODIFIERS) { + // (tw) Can this have any effects? } - klassOop the_class_oop = java_lang_Class::as_klassOop(mirror); - instanceKlassHandle the_class = instanceKlassHandle(THREAD, the_class_oop); - Symbol* the_class_sym = the_class->name(); - - // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark - RC_TRACE_WITH_THREAD(0x00000001, THREAD, - ("loading name=%s kind=%d (avail_mem=" UINT64_FORMAT "K)", - the_class->external_name(), _class_load_kind, - os::available_memory() >> 10)); + // offset + if (old_fs.offset() != new_fs.offset()) { + result = result | Klass::ModifyInstances; + } + // name and signature + Symbol* name_sym1 = the_class->constants()->symbol_at(old_fs.name_index()); + Symbol* sig_sym1 = the_class->constants()->symbol_at(old_fs.signature_index()); + Symbol* name_sym2 = new_class->constants()->symbol_at(new_fs.name_index()); + Symbol* sig_sym2 = new_class->constants()->symbol_at(new_fs.signature_index()); + if (name_sym1 != name_sym2 || sig_sym1 != sig_sym2) { + result = result | Klass::ModifyInstances; + } + } - ClassFileStream st((u1*) _class_defs[i].class_bytes, - _class_defs[i].class_byte_count, (char *)"__VM_RedefineClasses__"); + if (!old_fs.done() || !new_fs.done()) { + result = result | Klass::ModifyInstances; + } - // Parse the stream. - Handle the_class_loader(THREAD, the_class->class_loader()); - Handle protection_domain(THREAD, the_class->protection_domain()); - // Set redefined class handle in JvmtiThreadState class. - // This redefined class is sent to agent event handler for class file - // load hook event. - state->set_class_being_redefined(&the_class, _class_load_kind); + // Do a parallel walk through the old and new methods. Detect + // cases where they match (exist in both), have been added in + // the new methods, or have been deleted (exist only in the + // old methods). The class file parser places methods in order + // by method name, but does not order overloaded methods by + // signature. In order to determine what fate befell the methods, + // this code places the overloaded new methods that have matching + // old methods in the same order as the old methods and places + // new overloaded methods at the end of overloaded methods of + // that name. The code for this order normalization is adapted + // from the algorithm used in instanceKlass::find_method(). + // Since we are swapping out of order entries as we find them, + // we only have to search forward through the overloaded methods. + // Methods which are added and have the same name as an existing + // method (but different signature) will be put at the end of + // the methods with that name, and the name mismatch code will + // handle them. + objArrayHandle k_old_methods(the_class->methods()); + objArrayHandle k_new_methods(new_class->methods()); + int n_old_methods = k_old_methods->length(); + int n_new_methods = k_new_methods->length(); - klassOop k = SystemDictionary::parse_stream(the_class_sym, - the_class_loader, - protection_domain, - &st, - THREAD); - // Clear class_being_redefined just to be sure. - state->clear_class_being_redefined(); + int ni = 0; + int oi = 0; + while (true) { + methodOop k_old_method; + methodOop k_new_method; + enum { matched, added, deleted, undetermined } method_was = undetermined; - // TODO: if this is retransform, and nothing changed we can skip it - - instanceKlassHandle scratch_class (THREAD, k); - - if (HAS_PENDING_EXCEPTION) { - Symbol* ex_name = PENDING_EXCEPTION->klass()->klass_part()->name(); - // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark - RC_TRACE_WITH_THREAD(0x00000002, THREAD, ("parse_stream exception: '%s'", - ex_name->as_C_string())); - CLEAR_PENDING_EXCEPTION; - - if (ex_name == vmSymbols::java_lang_UnsupportedClassVersionError()) { - return JVMTI_ERROR_UNSUPPORTED_VERSION; - } else if (ex_name == vmSymbols::java_lang_ClassFormatError()) { - return JVMTI_ERROR_INVALID_CLASS_FORMAT; - } else if (ex_name == vmSymbols::java_lang_ClassCircularityError()) { - return JVMTI_ERROR_CIRCULAR_CLASS_DEFINITION; - } else if (ex_name == vmSymbols::java_lang_NoClassDefFoundError()) { - // The message will be "XXX (wrong name: YYY)" - return JVMTI_ERROR_NAMES_DONT_MATCH; - } else if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) { - return JVMTI_ERROR_OUT_OF_MEMORY; - } else { // Just in case more exceptions can be thrown.. - return JVMTI_ERROR_FAILS_VERIFICATION; - } - } - - // Ensure class is linked before redefine - if (!the_class->is_linked()) { - the_class->link_class(THREAD); - if (HAS_PENDING_EXCEPTION) { - Symbol* ex_name = PENDING_EXCEPTION->klass()->klass_part()->name(); - // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark - RC_TRACE_WITH_THREAD(0x00000002, THREAD, ("link_class exception: '%s'", - ex_name->as_C_string())); - CLEAR_PENDING_EXCEPTION; - if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) { - return JVMTI_ERROR_OUT_OF_MEMORY; + if (oi >= n_old_methods) { + if (ni >= n_new_methods) { + break; // we've looked at everything, done + } + // New method at the end + k_new_method = (methodOop) k_new_methods->obj_at(ni); + method_was = added; + } else if (ni >= n_new_methods) { + // Old method, at the end, is deleted + k_old_method = (methodOop) k_old_methods->obj_at(oi); + method_was = deleted; + } else { + // There are more methods in both the old and new lists + k_old_method = (methodOop) k_old_methods->obj_at(oi); + k_new_method = (methodOop) k_new_methods->obj_at(ni); + if (k_old_method->name() != k_new_method->name()) { + // Methods are sorted by method name, so a mismatch means added + // or deleted + if (k_old_method->name()->fast_compare(k_new_method->name()) > 0) { + method_was = added; } else { - return JVMTI_ERROR_INTERNAL; + method_was = deleted; + } + } else if (k_old_method->signature() == k_new_method->signature()) { + // Both the name and signature match + method_was = matched; + } else { + // The name matches, but the signature doesn't, which means we have to + // search forward through the new overloaded methods. + int nj; // outside the loop for post-loop check + for (nj = ni + 1; nj < n_new_methods; nj++) { + methodOop m = (methodOop)k_new_methods->obj_at(nj); + if (k_old_method->name() != m->name()) { + // reached another method name so no more overloaded methods + method_was = deleted; + break; + } + if (k_old_method->signature() == m->signature()) { + // found a match so swap the methods + k_new_methods->obj_at_put(ni, m); + k_new_methods->obj_at_put(nj, k_new_method); + k_new_method = m; + method_was = matched; + break; + } + } + + if (nj >= n_new_methods) { + // reached the end without a match; so method was deleted + method_was = deleted; } } } - // Do the validity checks in compare_and_normalize_class_versions() - // before verifying the byte codes. By doing these checks first, we - // limit the number of functions that require redirection from - // the_class to scratch_class. In particular, we don't have to - // modify JNI GetSuperclass() and thus won't change its performance. - jvmtiError res = compare_and_normalize_class_versions(the_class, - scratch_class); - if (res != JVMTI_ERROR_NONE) { - return res; + switch (method_was) { + case matched: + // methods match, be sure modifiers do too + old_flags = (jushort) k_old_method->access_flags().get_flags(); + new_flags = (jushort) k_new_method->access_flags().get_flags(); + if ((old_flags ^ new_flags) & ~(JVM_ACC_NATIVE)) { + // (tw) Can this have any effects? Probably yes on vtables? + result = result | Klass::ModifyClass; } - - // verify what the caller passed us { - // The bug 6214132 caused the verification to fail. - // Information about the_class and scratch_class is temporarily - // recorded into jvmtiThreadState. This data is used to redirect - // the_class to scratch_class in the JVM_* functions called by the - // verifier. Please, refer to jvmtiThreadState.hpp for the detailed - // description. - RedefineVerifyMark rvm(&the_class, &scratch_class, state); - Verifier::verify( - scratch_class, Verifier::ThrowException, true, THREAD); - } - - if (HAS_PENDING_EXCEPTION) { - Symbol* ex_name = PENDING_EXCEPTION->klass()->klass_part()->name(); - // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark - RC_TRACE_WITH_THREAD(0x00000002, THREAD, - ("verify_byte_codes exception: '%s'", ex_name->as_C_string())); - CLEAR_PENDING_EXCEPTION; - if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) { - return JVMTI_ERROR_OUT_OF_MEMORY; - } else { - // tell the caller the bytecodes are bad - return JVMTI_ERROR_FAILS_VERIFICATION; + u2 new_num = k_new_method->method_idnum(); + u2 old_num = k_old_method->method_idnum(); + if (new_num != old_num) { + methodOop idnum_owner = new_class->method_with_idnum(old_num); + if (idnum_owner != NULL) { + // There is already a method assigned this idnum -- switch them + idnum_owner->set_method_idnum(new_num); + } + k_new_method->set_method_idnum(old_num); + RC_TRACE(0x00000002, ("swapping idnum of new and old method %d / %d!", + new_num, + old_num)); + // swap_all_method_annotations(old_num, new_num, new_class); } } + RC_TRACE(0x00008000, ("Method matched: new: %s [%d] == old: %s [%d]", + k_new_method->name_and_sig_as_C_string(), ni, + k_old_method->name_and_sig_as_C_string(), oi)); + // advance to next pair of methods + ++oi; + ++ni; + break; + case added: + // method added, see if it is OK + new_flags = (jushort) k_new_method->access_flags().get_flags(); + if ((new_flags & JVM_ACC_PRIVATE) == 0 + // hack: private should be treated as final, but alas + || (new_flags & (JVM_ACC_FINAL|JVM_ACC_STATIC)) == 0 + ) { + // new methods must be private + result = result | Klass::ModifyClass; + } + { + u2 num = the_class->next_method_idnum(); + if (num == constMethodOopDesc::UNSET_IDNUM) { + // cannot add any more methods + result = result | Klass::ModifyClass; + } + u2 new_num = k_new_method->method_idnum(); + methodOop idnum_owner = new_class->method_with_idnum(num); + if (idnum_owner != NULL) { + // There is already a method assigned this idnum -- switch them + idnum_owner->set_method_idnum(new_num); + } + k_new_method->set_method_idnum(num); + //swap_all_method_annotations(new_num, num, new_class); + } + RC_TRACE(0x00000001, ("Method added: new: %s [%d]", + k_new_method->name_and_sig_as_C_string(), ni)); + ++ni; // advance to next new method + break; + case deleted: + // method deleted, see if it is OK + old_flags = (jushort) k_old_method->access_flags().get_flags(); + if ((old_flags & JVM_ACC_PRIVATE) == 0 + // hack: private should be treated as final, but alas + || (old_flags & (JVM_ACC_FINAL|JVM_ACC_STATIC)) == 0 + ) { + // deleted methods must be private + result = result | Klass::ModifyClass; + } + RC_TRACE(0x00000001, ("Method deleted: old: %s [%d]", + k_old_method->name_and_sig_as_C_string(), oi)); + ++oi; // advance to next old method + break; + default: + ShouldNotReachHere(); + } + } + + if (new_class()->size() != new_class->old_version()->size()) { + result |= Klass::ModifyClassSize; + } + + if (new_class->size_helper() != ((instanceKlass*)(new_class->old_version()->klass_part()))->size_helper()) { + result |= Klass::ModifyInstanceSize; + } + + methodHandle instanceTransformerMethod(new_class->find_method(vmSymbols::transformer_name(), vmSymbols::void_method_signature())); + if (!instanceTransformerMethod.is_null() && !instanceTransformerMethod->is_static()) { + result |= Klass::HasInstanceTransformer; + } + + // (tw) Check method bodies to be able to return NoChange? + return result; +} + +void VM_RedefineClasses::calculate_instance_update_information(klassOop new_version) { + + class UpdateFieldsEvolutionClosure : public FieldEvolutionClosure { + + private: + + GrowableArray info; + int curPosition; + bool copy_backwards; + + public: - res = merge_cp_and_rewrite(the_class, scratch_class, THREAD); - if (res != JVMTI_ERROR_NONE) { - return res; + bool does_copy_backwards() { + return copy_backwards; } - if (VerifyMergedCPBytecodes) { - // verify what we have done during constant pool merging - { - RedefineVerifyMark rvm(&the_class, &scratch_class, state); - Verifier::verify(scratch_class, Verifier::ThrowException, true, THREAD); - } + UpdateFieldsEvolutionClosure(klassOop klass) { - if (HAS_PENDING_EXCEPTION) { - Symbol* ex_name = PENDING_EXCEPTION->klass()->klass_part()->name(); - // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark - RC_TRACE_WITH_THREAD(0x00000002, THREAD, - ("verify_byte_codes post merge-CP exception: '%s'", - ex_name->as_C_string())); - CLEAR_PENDING_EXCEPTION; - if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) { - return JVMTI_ERROR_OUT_OF_MEMORY; - } else { - // tell the caller that constant pool merging screwed up - return JVMTI_ERROR_INTERNAL; - } + int base_offset = instanceOopDesc::base_offset_in_bytes(); + + if (klass->klass_part()->newest_version() == SystemDictionary::Reference_klass()->klass_part()->newest_version()) { + base_offset += java_lang_ref_Reference::number_of_fake_oop_fields*size_of_type(T_OBJECT); } + + info.append(base_offset); + info.append(0); + curPosition = base_offset; + copy_backwards = false; + } + + GrowableArray &finish() { + info.append(0); + return info; } - Rewriter::rewrite(scratch_class, THREAD); - if (!HAS_PENDING_EXCEPTION) { - Rewriter::relocate_and_link(scratch_class, THREAD); + virtual void do_new_field(fieldDescriptor* fd){ + int alignment = fd->offset() - curPosition; + if (alignment > 0) { + // This field was aligned, so we need to make sure that we fill the gap + fill(alignment); + } + + int size = size_of_type(fd->field_type()); + fill(size); } - if (HAS_PENDING_EXCEPTION) { - Symbol* ex_name = PENDING_EXCEPTION->klass()->klass_part()->name(); - CLEAR_PENDING_EXCEPTION; - if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) { - return JVMTI_ERROR_OUT_OF_MEMORY; + + private: + + void fill(int size) { + if (info.length() > 0 && info.at(info.length() - 1) < 0) { + (*info.adr_at(info.length() - 1)) -= size; } else { - return JVMTI_ERROR_INTERNAL; + info.append(-size); } + + curPosition += size; } - _scratch_classes[i] = scratch_class; + int size_of_type(BasicType type) { + int size = 0; + switch(type) { + case T_BOOLEAN: + size = sizeof(jboolean); + break; - // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark - RC_TRACE_WITH_THREAD(0x00000001, THREAD, - ("loaded name=%s (avail_mem=" UINT64_FORMAT "K)", - the_class->external_name(), os::available_memory() >> 10)); - } + case T_CHAR: + size = (sizeof(jchar)); + break; - return JVMTI_ERROR_NONE; -} + case T_FLOAT: + size = (sizeof(jfloat)); + break; + case T_DOUBLE: + size = (sizeof(jdouble)); + break; -// Map old_index to new_index as needed. scratch_cp is only needed -// for RC_TRACE() calls. -void VM_RedefineClasses::map_index(constantPoolHandle scratch_cp, - int old_index, int new_index) { - if (find_new_index(old_index) != 0) { - // old_index is already mapped - return; - } + case T_BYTE: + size = (sizeof(jbyte)); + break; - if (old_index == new_index) { - // no mapping is needed - return; - } + case T_SHORT: + size = (sizeof(jshort)); + break; - _index_map_p->at_put(old_index, new_index); - _index_map_count++; + case T_INT: + size = (sizeof(jint)); + break; - RC_TRACE(0x00040000, ("mapped tag %d at index %d to %d", - scratch_cp->tag_at(old_index).value(), old_index, new_index)); -} // end map_index() + case T_LONG: + size = (sizeof(jlong)); + break; + case T_OBJECT: + case T_ARRAY: + if (UseCompressedOops) { + size = sizeof(narrowOop); + } else { + size = (sizeof(oop)); + } + break; -// Merge old_cp and scratch_cp and return the results of the merge via -// merge_cp_p. The number of entries in *merge_cp_p is returned via -// merge_cp_length_p. The entries in old_cp occupy the same locations -// in *merge_cp_p. Also creates a map of indices from entries in -// scratch_cp to the corresponding entry in *merge_cp_p. Index map -// entries are only created for entries in scratch_cp that occupy a -// different location in *merged_cp_p. -bool VM_RedefineClasses::merge_constant_pools(constantPoolHandle old_cp, - constantPoolHandle scratch_cp, constantPoolHandle *merge_cp_p, - int *merge_cp_length_p, TRAPS) { + default: + ShouldNotReachHere(); + } - if (merge_cp_p == NULL) { - assert(false, "caller must provide scatch constantPool"); - return false; // robustness - } - if (merge_cp_length_p == NULL) { - assert(false, "caller must provide scatch CP length"); - return false; // robustness - } - // Worst case we need old_cp->length() + scratch_cp()->length(), - // but the caller might be smart so make sure we have at least - // the minimum. - if ((*merge_cp_p)->length() < old_cp->length()) { - assert(false, "merge area too small"); - return false; // robustness - } + assert(size > 0, ""); + return size; - RC_TRACE_WITH_THREAD(0x00010000, THREAD, - ("old_cp_len=%d, scratch_cp_len=%d", old_cp->length(), - scratch_cp->length())); + } + + public: - { - // Pass 0: - // The old_cp is copied to *merge_cp_p; this means that any code - // using old_cp does not have to change. This work looks like a - // perfect fit for constantPoolOop::copy_cp_to(), but we need to - // handle one special case: - // - revert JVM_CONSTANT_Class to JVM_CONSTANT_UnresolvedClass - // This will make verification happy. - - int old_i; // index into old_cp - - // index zero (0) is not used in constantPools - for (old_i = 1; old_i < old_cp->length(); old_i++) { - // leave debugging crumb - jbyte old_tag = old_cp->tag_at(old_i).value(); - switch (old_tag) { - case JVM_CONSTANT_Class: - case JVM_CONSTANT_UnresolvedClass: - // revert the copy to JVM_CONSTANT_UnresolvedClass - // May be resolving while calling this so do the same for - // JVM_CONSTANT_UnresolvedClass (klass_name_at() deals with transition) - (*merge_cp_p)->unresolved_klass_at_put(old_i, - old_cp->klass_name_at(old_i)); - break; + virtual void do_old_field(fieldDescriptor* fd){} - case JVM_CONSTANT_Double: - case JVM_CONSTANT_Long: - // just copy the entry to *merge_cp_p, but double and long take - // two constant pool entries - constantPoolOopDesc::copy_entry_to(old_cp, old_i, *merge_cp_p, old_i, CHECK_0); - old_i++; - break; + virtual void do_changed_field(fieldDescriptor* old_fd, fieldDescriptor *new_fd){ - default: - // just copy the entry to *merge_cp_p - constantPoolOopDesc::copy_entry_to(old_cp, old_i, *merge_cp_p, old_i, CHECK_0); - break; + int alignment = new_fd->offset() - curPosition; + if (alignment > 0) { + // This field was aligned, so we need to make sure that we fill the gap + fill(alignment); } - } // end for each old_cp entry - - // We don't need to sanity check that *merge_cp_length_p is within - // *merge_cp_p bounds since we have the minimum on-entry check above. - (*merge_cp_length_p) = old_i; - } - // merge_cp_len should be the same as old_cp->length() at this point - // so this trace message is really a "warm-and-breathing" message. - RC_TRACE_WITH_THREAD(0x00020000, THREAD, - ("after pass 0: merge_cp_len=%d", *merge_cp_length_p)); + assert(old_fd->field_type() == new_fd->field_type(), ""); + assert(curPosition == new_fd->offset(), "must be correct offset!"); - int scratch_i; // index into scratch_cp - { - // Pass 1a: - // Compare scratch_cp entries to the old_cp entries that we have - // already copied to *merge_cp_p. In this pass, we are eliminating - // exact duplicates (matching entry at same index) so we only - // compare entries in the common indice range. - int increment = 1; - int pass1a_length = MIN2(old_cp->length(), scratch_cp->length()); - for (scratch_i = 1; scratch_i < pass1a_length; scratch_i += increment) { - switch (scratch_cp->tag_at(scratch_i).value()) { - case JVM_CONSTANT_Double: - case JVM_CONSTANT_Long: - // double and long take two constant pool entries - increment = 2; - break; + int offset = old_fd->offset(); + int size = size_of_type(old_fd->field_type()); - default: - increment = 1; - break; + int prevEnd = -1; + if (info.length() > 0 && info.at(info.length() - 1) > 0) { + prevEnd = info.at(info.length() - 2) + info.at(info.length() - 1); } - bool match = scratch_cp->compare_entry_to(scratch_i, *merge_cp_p, - scratch_i, CHECK_0); - if (match) { - // found a match at the same index so nothing more to do - continue; - } else if (is_unresolved_class_mismatch(scratch_cp, scratch_i, - *merge_cp_p, scratch_i)) { - // The mismatch in compare_entry_to() above is because of a - // resolved versus unresolved class entry at the same index - // with the same string value. Since Pass 0 reverted any - // class entries to unresolved class entries in *merge_cp_p, - // we go with the unresolved class entry. - continue; - } else if (is_unresolved_string_mismatch(scratch_cp, scratch_i, - *merge_cp_p, scratch_i)) { - // The mismatch in compare_entry_to() above is because of a - // resolved versus unresolved string entry at the same index - // with the same string value. We can live with whichever - // happens to be at scratch_i in *merge_cp_p. - continue; - } - - int found_i = scratch_cp->find_matching_entry(scratch_i, *merge_cp_p, - CHECK_0); - if (found_i != 0) { - guarantee(found_i != scratch_i, - "compare_entry_to() and find_matching_entry() do not agree"); - - // Found a matching entry somewhere else in *merge_cp_p so - // just need a mapping entry. - map_index(scratch_cp, scratch_i, found_i); - continue; - } - - // The find_matching_entry() call above could fail to find a match - // due to a resolved versus unresolved class or string entry situation - // like we solved above with the is_unresolved_*_mismatch() calls. - // However, we would have to call is_unresolved_*_mismatch() over - // all of *merge_cp_p (potentially) and that doesn't seem to be - // worth the time. - - // No match found so we have to append this entry and any unique - // referenced entries to *merge_cp_p. - append_entry(scratch_cp, scratch_i, merge_cp_p, merge_cp_length_p, - CHECK_0); - } - } - - RC_TRACE_WITH_THREAD(0x00020000, THREAD, - ("after pass 1a: merge_cp_len=%d, scratch_i=%d, index_map_len=%d", - *merge_cp_length_p, scratch_i, _index_map_count)); - - if (scratch_i < scratch_cp->length()) { - // Pass 1b: - // old_cp is smaller than scratch_cp so there are entries in - // scratch_cp that we have not yet processed. We take care of - // those now. - int increment = 1; - for (; scratch_i < scratch_cp->length(); scratch_i += increment) { - switch (scratch_cp->tag_at(scratch_i).value()) { - case JVM_CONSTANT_Double: - case JVM_CONSTANT_Long: - // double and long take two constant pool entries - increment = 2; - break; - - default: - increment = 1; - break; + if (prevEnd == offset) { + info.at_put(info.length() - 2, info.at(info.length() - 2) + size); + } else { + info.append(size); + info.append(offset); } - int found_i = - scratch_cp->find_matching_entry(scratch_i, *merge_cp_p, CHECK_0); - if (found_i != 0) { - // Found a matching entry somewhere else in *merge_cp_p so - // just need a mapping entry. - map_index(scratch_cp, scratch_i, found_i); - continue; + if (old_fd->offset() < new_fd->offset()) { + copy_backwards = true; } - // No match found so we have to append this entry and any unique - // referenced entries to *merge_cp_p. - append_entry(scratch_cp, scratch_i, merge_cp_p, merge_cp_length_p, - CHECK_0); + transfer_special_access_flags(old_fd, new_fd); + + curPosition += size; } + }; + + UpdateFieldsEvolutionClosure cl(new_version); + ((instanceKlass*)new_version->klass_part())->do_fields_evolution(&cl); - RC_TRACE_WITH_THREAD(0x00020000, THREAD, - ("after pass 1b: merge_cp_len=%d, scratch_i=%d, index_map_len=%d", - *merge_cp_length_p, scratch_i, _index_map_count)); + GrowableArray result = cl.finish(); + ((instanceKlass*)new_version->klass_part())->store_update_information(result); + ((instanceKlass*)new_version->klass_part())->set_copying_backwards(cl.does_copy_backwards()); + + if (RC_TRACE_ENABLED(0x00000002)) { + RC_TRACE(0x00000002, ("Instance update information for %s:", + new_version->klass_part()->name()->as_C_string())); + if (cl.does_copy_backwards()) { + RC_TRACE(0x00000002, ("\tDoes copy backwards!")); + } + for (int i=0; i 0) { + RC_TRACE(0x00000002, ("\t%d COPY from %d", curNum, result.at(i + 1))); + i++; + } else { + RC_TRACE(0x00000002, ("\tEND")); + } + } } +} - return true; -} // end merge_constant_pools() - - -// Merge constant pools between the_class and scratch_class and -// potentially rewrite bytecodes in scratch_class to use the merged -// constant pool. -jvmtiError VM_RedefineClasses::merge_cp_and_rewrite( - instanceKlassHandle the_class, instanceKlassHandle scratch_class, - TRAPS) { - // worst case merged constant pool length is old and new combined - int merge_cp_length = the_class->constants()->length() - + scratch_class->constants()->length(); - - constantPoolHandle old_cp(THREAD, the_class->constants()); - constantPoolHandle scratch_cp(THREAD, scratch_class->constants()); - - // Constant pools are not easily reused so we allocate a new one - // each time. - // merge_cp is created unsafe for concurrent GC processing. It - // should be marked safe before discarding it. Even though - // garbage, if it crosses a card boundary, it may be scanned - // in order to find the start of the first complete object on the card. - constantPoolHandle merge_cp(THREAD, - oopFactory::new_constantPool(merge_cp_length, - oopDesc::IsUnsafeConc, - THREAD)); - int orig_length = old_cp->orig_length(); - if (orig_length == 0) { - // This old_cp is an actual original constant pool. We save - // the original length in the merged constant pool so that - // merge_constant_pools() can be more efficient. If a constant - // pool has a non-zero orig_length() value, then that constant - // pool was created by a merge operation in RedefineClasses. - merge_cp->set_orig_length(old_cp->length()); - } else { - // This old_cp is a merged constant pool from a previous - // RedefineClasses() calls so just copy the orig_length() - // value. - merge_cp->set_orig_length(old_cp->orig_length()); +Symbol* VM_RedefineClasses::signature_to_class_name(Symbol* signature) { + assert(FieldType::is_obj(signature), ""); + return SymbolTable::new_symbol(signature->as_C_string() + 1, signature->utf8_length() - 2, Thread::current()); +} + +void VM_RedefineClasses::calculate_type_check_information(klassOop klass) { + if (klass->klass_part()->is_redefining()) { + klass = klass->klass_part()->old_version(); } - ResourceMark rm(THREAD); - _index_map_count = 0; - _index_map_p = new intArray(scratch_cp->length(), -1); + // We found an instance klass! + instanceKlass *cur_instance_klass = instanceKlass::cast(klass); + GrowableArray< Pair > type_check_information; - bool result = merge_constant_pools(old_cp, scratch_cp, &merge_cp, - &merge_cp_length, THREAD); - if (!result) { - // The merge can fail due to memory allocation failure or due - // to robustness checks. - return JVMTI_ERROR_INTERNAL; - } - - RC_TRACE_WITH_THREAD(0x00010000, THREAD, - ("merge_cp_len=%d, index_map_len=%d", merge_cp_length, _index_map_count)); - - if (_index_map_count == 0) { - // there is nothing to map between the new and merged constant pools - - if (old_cp->length() == scratch_cp->length()) { - // The old and new constant pools are the same length and the - // index map is empty. This means that the three constant pools - // are equivalent (but not the same). Unfortunately, the new - // constant pool has not gone through link resolution nor have - // the new class bytecodes gone through constant pool cache - // rewriting so we can't use the old constant pool with the new - // class. - - merge_cp()->set_is_conc_safe(true); - merge_cp = constantPoolHandle(); // toss the merged constant pool - } else if (old_cp->length() < scratch_cp->length()) { - // The old constant pool has fewer entries than the new constant - // pool and the index map is empty. This means the new constant - // pool is a superset of the old constant pool. However, the old - // class bytecodes have already gone through constant pool cache - // rewriting so we can't use the new constant pool with the old - // class. - - merge_cp()->set_is_conc_safe(true); - merge_cp = constantPoolHandle(); // toss the merged constant pool - } else { - // The old constant pool has more entries than the new constant - // pool and the index map is empty. This means that both the old - // and merged constant pools are supersets of the new constant - // pool. - - // Replace the new constant pool with a shrunken copy of the - // merged constant pool; the previous new constant pool will - // get GCed. - set_new_constant_pool(scratch_class, merge_cp, merge_cp_length, true, - THREAD); - // drop local ref to the merged constant pool - merge_cp()->set_is_conc_safe(true); - merge_cp = constantPoolHandle(); + class MyFieldClosure : public FieldClosure { + + public: + + GrowableArray< Pair > *_arr; + + MyFieldClosure(GrowableArray< Pair > *arr) { + _arr = arr; } - } else { - if (RC_TRACE_ENABLED(0x00040000)) { - // don't want to loop unless we are tracing - int count = 0; - for (int i = 1; i < _index_map_p->length(); i++) { - int value = _index_map_p->at(i); - - if (value != -1) { - RC_TRACE_WITH_THREAD(0x00040000, THREAD, - ("index_map[%d]: old=%d new=%d", count, i, value)); - count++; + + virtual void do_field(fieldDescriptor* fd) { + if (fd->field_type() == T_OBJECT) { + Symbol* signature = fd->signature(); + if (FieldType::is_obj(signature)) { + Symbol* name = signature_to_class_name(signature); + klassOop field_klass; + if (is_field_dangerous(name, fd, field_klass)) { + RC_TRACE(0x00000002, ("Found dangerous field %s in klass %s of type %s", + fd->name()->as_C_string(), + fd->field_holder()->klass_part()->name()->as_C_string(), + name->as_C_string())); + _arr->append(Pair(fd->offset(), field_klass->klass_part()->newest_version())); + } } + + // Array fields can never be a problem! } } - // We have entries mapped between the new and merged constant pools - // so we have to rewrite some constant pool references. - if (!rewrite_cp_refs(scratch_class, THREAD)) { - return JVMTI_ERROR_INTERNAL; + bool is_field_dangerous(Symbol* klass_name, fieldDescriptor *fd, klassOop &field_klass) { + field_klass = SystemDictionary::find(klass_name, fd->field_holder()->klass_part()->class_loader(), + fd->field_holder()->klass_part()->protection_domain(), Thread::current()); + if(field_klass != NULL) { + if (field_klass->klass_part()->is_redefining()) { + field_klass = field_klass->klass_part()->old_version(); + } + if (field_klass->klass_part()->has_subtype_changed()) { + return true; + } + } + return false; } + }; - // Replace the new constant pool with a shrunken copy of the - // merged constant pool so now the rewritten bytecodes have - // valid references; the previous new constant pool will get - // GCed. - set_new_constant_pool(scratch_class, merge_cp, merge_cp_length, true, - THREAD); - merge_cp()->set_is_conc_safe(true); + MyFieldClosure fieldClosure(&type_check_information); + cur_instance_klass->do_nonstatic_fields(&fieldClosure); + + if (type_check_information.length() > 0) { + type_check_information.append(Pair(-1, NULL)); + cur_instance_klass->store_type_check_information(type_check_information); } - assert(old_cp()->is_conc_safe(), "Just checking"); - assert(scratch_cp()->is_conc_safe(), "Just checking"); +} - return JVMTI_ERROR_NONE; -} // end merge_cp_and_rewrite() +bool VM_RedefineClasses::check_field_value_types() { + Thread *THREAD = Thread::current(); + class CheckFieldTypesClosure : public ObjectClosure { -// Rewrite constant pool references in klass scratch_class. -bool VM_RedefineClasses::rewrite_cp_refs(instanceKlassHandle scratch_class, - TRAPS) { + private: - // rewrite constant pool references in the methods: - if (!rewrite_cp_refs_in_methods(scratch_class, THREAD)) { - // propagate failure back to caller - return false; - } + bool _result; - // rewrite constant pool references in the class_annotations: - if (!rewrite_cp_refs_in_class_annotations(scratch_class, THREAD)) { - // propagate failure back to caller - return false; - } + public: - // rewrite constant pool references in the fields_annotations: - if (!rewrite_cp_refs_in_fields_annotations(scratch_class, THREAD)) { - // propagate failure back to caller - return false; - } + CheckFieldTypesClosure() { + _result = true; + } - // rewrite constant pool references in the methods_annotations: - if (!rewrite_cp_refs_in_methods_annotations(scratch_class, THREAD)) { - // propagate failure back to caller - return false; - } + bool result() { return _result; } - // rewrite constant pool references in the methods_parameter_annotations: - if (!rewrite_cp_refs_in_methods_parameter_annotations(scratch_class, - THREAD)) { - // propagate failure back to caller - return false; + virtual void do_object(oop obj) { + + if (!_result) { + return; + } + + if (obj->is_objArray()) { + + objArrayOop array = objArrayOop(obj); + + klassOop element_klass = objArrayKlass::cast(array->klass())->element_klass(); + + if (element_klass->klass_part()->has_subtype_changed()) { + int length = array->length(); + for (int i=0; iobj_at(i); + if (element != NULL && element->blueprint()->newest_version()->klass_part()->is_redefining()) { + // Check subtype relationship to static type of array + if (!element->blueprint()->newest_version()->klass_part()->is_subtype_of(element_klass->klass_part()->newest_version())) { + RC_TRACE(0x00000001, ("Array value is INVALID - abort redefinition (static_type=%s, index=%d, dynamic_type=%s)", + element_klass->klass_part()->name()->as_C_string(), + i, + element->blueprint()->name()->as_C_string())); + _result = false; + break; + } + } + } + } + + } else { + Pair *cur = obj->klass()->klass_part()->type_check_information(); + if (cur != NULL) { + // Type check information exists for this oop + while ((*cur).left() != -1) { + check_field(obj, (*cur).left(), (*cur).right()); + cur++; + } + } + } + } + + void check_field(oop obj, int offset, klassOop static_type) { + oop field_value = obj->obj_field(offset); + if (field_value != NULL) { + // Field is not null + if (field_value->klass()->klass_part()->newest_version()->klass_part()->is_subtype_of(static_type)) { + // We are OK + RC_TRACE(0x00008000, ("Field value is OK (klass=%s, static_type=%s, offset=%d, dynamic_type=%s)", + obj->klass()->klass_part()->name()->as_C_string(), + static_type->klass_part()->name()->as_C_string(), + offset, + field_value->klass()->klass_part()->name()->as_C_string())); + } else { + // Failure! + RC_TRACE(0x00000001, ("Field value is INVALID - abort redefinition (klass=%s, static_type=%s, offset=%d, dynamic_type=%s)", + obj->klass()->klass_part()->name()->as_C_string(), + static_type->klass_part()->name()->as_C_string(), + offset, + field_value->klass()->klass_part()->name()->as_C_string())); + _result = false; + } + } + } + }; + + CheckFieldTypesClosure myObjectClosure; + + // make sure that heap is parsable (fills TLABs with filler objects) + Universe::heap()->ensure_parsability(false); // no need to retire TLABs + + // do the iteration + // If this operation encounters a bad object when using CMS, + // consider using safe_object_iterate() which avoids perm gen + // objects that may contain bad references. + Universe::heap()->object_iterate(&myObjectClosure); + + // when sharing is enabled we must iterate over the shared spaces + if (UseSharedSpaces) { + GenCollectedHeap* gch = GenCollectedHeap::heap(); + CompactingPermGenGen* gen = (CompactingPermGenGen*)gch->perm_gen(); + gen->ro_space()->object_iterate(&myObjectClosure); + gen->rw_space()->object_iterate(&myObjectClosure); } - // rewrite constant pool references in the methods_default_annotations: - if (!rewrite_cp_refs_in_methods_default_annotations(scratch_class, - THREAD)) { - // propagate failure back to caller - return false; + return myObjectClosure.result(); +} + +void VM_RedefineClasses::clear_type_check_information(klassOop k) { + if (k->klass_part()->is_redefining()) { + k = k->klass_part()->old_version(); } - return true; -} // end rewrite_cp_refs() + // We found an instance klass! + instanceKlass *cur_instance_klass = instanceKlass::cast(k); + cur_instance_klass->clear_type_check_information(); +} +void VM_RedefineClasses::update_active_methods() { + + RC_TRACE(0x00000002, ("Updating active methods")); + JavaThread *java_thread = Threads::first(); + while (java_thread != NULL) { + + int stack_depth = 0; + if (java_thread->has_last_Java_frame()) { + + RC_TRACE(0x0000000400, ("checking stack of Java thread %s", java_thread->name())); + + // vframes are resource allocated + Thread* current_thread = Thread::current(); + ResourceMark rm(current_thread); + HandleMark hm(current_thread); + + RegisterMap reg_map(java_thread); + frame f = java_thread->last_frame(); + vframe* vf = vframe::new_vframe(&f, ®_map, java_thread); + frame* last_entry_frame = NULL; + + while (vf != NULL) { + if (vf->is_java_frame()) { + // java frame (interpreted, compiled, ...) + javaVFrame *jvf = javaVFrame::cast(vf); + + if (!(jvf->method()->is_native())) { + int bci = jvf->bci(); + RC_TRACE(0x00000400, ("found method: %s / bci=%d", jvf->method()->name()->as_C_string(), bci)); + ResourceMark rm(Thread::current()); + HandleMark hm; + instanceKlassHandle klass(jvf->method()->method_holder()); + + if (jvf->method()->new_version() != NULL && jvf->is_interpreted_frame()) { + + + RC_TRACE(0x00000002, ("Found method that should just be updated to the newest version %s", + jvf->method()->name_and_sig_as_C_string())); + + if (RC_TRACE_ENABLED(0x01000000)) { + int code_size = jvf->method()->code_size(); + char *code_base_old = (char*)jvf->method()->code_base(); + char *code_base_new = (char*)jvf->method()->new_version()->code_base(); + for (int i=0; iprint_cr("old=%d new=%d", *code_base_old++, *code_base_new++); + } + jvf->method()->print_codes_on(tty); + jvf->method()->new_version()->print_codes_on(tty); + } + + assert(jvf->is_interpreted_frame(), "Every frame must be interpreted!"); + interpretedVFrame *iframe = (interpretedVFrame *)jvf; + + + if (RC_TRACE_ENABLED(0x01000000)) { + constantPoolCacheOop cp_old = jvf->method()->constants()->cache(); + tty->print_cr("old cp"); + for (int i=0; ilength(); i++) { + cp_old->entry_at(i)->print(tty, i); + } + constantPoolCacheOop cp_new = jvf->method()->new_version()->constants()->cache(); + tty->print_cr("new cp"); + for (int i=0; ilength(); i++) { + cp_new->entry_at(i)->print(tty, i); + } + } -// Rewrite constant pool references in the methods. -bool VM_RedefineClasses::rewrite_cp_refs_in_methods( - instanceKlassHandle scratch_class, TRAPS) { + iframe->set_method(jvf->method()->new_version(), bci); + RC_TRACE(0x00000002, ("Updated method to newer version")); + assert(jvf->method()->new_version() == NULL, "must be latest version"); - objArrayHandle methods(THREAD, scratch_class->methods()); + } + } + } + vf = vf->sender(); + } + } + + // Advance to next thread + java_thread = java_thread->next(); + } +} + +void VM_RedefineClasses::method_forwarding() { + + int forwarding_count = 0; + JavaThread *java_thread = Threads::first(); + while (java_thread != NULL) { + + int stack_depth = 0; + if (java_thread->has_last_Java_frame()) { + + RC_TRACE(0x00000400, ("checking stack of Java thread %s", java_thread->name())); + + // vframes are resource allocated + Thread* current_thread = Thread::current(); + ResourceMark rm(current_thread); + HandleMark hm(current_thread); + + RegisterMap reg_map(java_thread); + frame f = java_thread->last_frame(); + vframe* vf = vframe::new_vframe(&f, ®_map, java_thread); + frame* last_entry_frame = NULL; + + while (vf != NULL) { + if (vf->is_java_frame()) { + // java frame (interpreted, compiled, ...) + javaVFrame *jvf = javaVFrame::cast(vf); + + if (!(jvf->method()->is_native())) { + RC_TRACE(0x00008000, ("found method: %s", + jvf->method()->name()->as_C_string())); + ResourceMark rm(Thread::current()); + HandleMark hm; + instanceKlassHandle klass(jvf->method()->method_holder()); + methodOop m = jvf->method(); + int bci = jvf->bci(); + RC_TRACE(0x00008000, ("klass redef %d", + klass->is_redefining())); + + if (klass->new_version() != NULL && m->new_version() == NULL) { + RC_TRACE(0x00008000, ("found potential forwarding method: %s", + m->name()->as_C_string())); + + klassOop new_klass = klass->newest_version(); + methodOop new_method = new_klass->klass_part()->lookup_method(m->name(), m->signature()); + RC_TRACE(0x00000002, ("%d %d", + new_method, + new_method->constMethod()->has_code_section_table())); + + if (new_method != NULL && new_method->constMethod()->has_code_section_table()) { + RC_TRACE(0x00008000, ("found code section table for method: %s", + new_method->name()->as_C_string())); + m->set_forward_method(new_method); + if (new_method->max_locals() != m->max_locals()) { + tty->print_cr("new_m max locals: %d old_m max locals: %d", new_method->max_locals(), m->max_locals()); + } + assert(new_method->max_locals() == m->max_locals(), "number of locals must match"); + assert(new_method->max_stack() == m->max_stack(), "number of stack values must match"); + if (jvf->is_interpreted_frame()) { + if (m->is_in_code_section(bci)) { + // We must transfer now and cannot delay until next NOP. + int new_bci = m->calculate_forward_bci(bci, new_method); + interpretedVFrame* iframe = interpretedVFrame::cast(jvf); + RC_TRACE(0x00000002, ("Transfering execution of %s to new method old_bci=%d new_bci=%d", + new_method->name()->as_C_string(), + bci, + new_bci)); + iframe->set_method(new_method, new_bci); + } else { + RC_TRACE(0x00000002, ("Delaying method forwarding of %s because %d is not in a code section", + new_method->name()->as_C_string(), + bci)); + } + } else { + RC_TRACE(0x00000002, ("Delaying method forwarding of %s because method is compiled", + new_method->name()->as_C_string())); + } + } + } + } + } + vf = vf->sender(); + } + } - if (methods.is_null() || methods->length() == 0) { - // no methods so nothing to do - return true; + // Advance to next thread + java_thread = java_thread->next(); } - // rewrite constant pool references in the methods: - for (int i = methods->length() - 1; i >= 0; i--) { - methodHandle method(THREAD, (methodOop)methods->obj_at(i)); - methodHandle new_method; - rewrite_cp_refs_in_method(method, &new_method, CHECK_false); - if (!new_method.is_null()) { - // the method has been replaced so save the new method version - methods->obj_at_put(i, new_method()); + RC_TRACE(0x00000001, ("Method forwarding applied to %d methods", + forwarding_count)); +} + +bool VM_RedefineClasses::check_method_stacks() { + + JavaThread *java_thread = Threads::first(); + while (java_thread != NULL) { + + int stack_depth = 0; + if (java_thread->has_last_Java_frame()) { + + RC_TRACE(0x00000400, ("checking stack of Java thread %s", java_thread->name())); + + // vframes are resource allocated + Thread* current_thread = Thread::current(); + ResourceMark rm(current_thread); + HandleMark hm(current_thread); + + RegisterMap reg_map(java_thread); + frame f = java_thread->last_frame(); + vframe* vf = vframe::new_vframe(&f, ®_map, java_thread); + frame* last_entry_frame = NULL; + + while (vf != NULL) { + if (vf->is_java_frame()) { + // java frame (interpreted, compiled, ...) + javaVFrame *jvf = javaVFrame::cast(vf); + + if (!(jvf->method()->is_native())) { + RC_TRACE(0x00000400, ("found method: %s", jvf->method()->name()->as_C_string())); + ResourceMark rm(Thread::current()); + HandleMark hm; + instanceKlassHandle klass(jvf->method()->method_holder()); + + StackValueCollection *locals = jvf->locals(); + const size_t message_buffer_len = klass->name()->utf8_length() + 1024; + char* message_buffer = NEW_RESOURCE_ARRAY(char, message_buffer_len); + + for (int i=0; isize(); i++) { + StackValue *stack_value = locals->at(i); + if (stack_value->type() == T_OBJECT) { + Handle obj = stack_value->get_obj(); + if (!obj.is_null() && obj->klass()->klass_part()->newest_version()->klass_part()->check_redefinition_flag(Klass::RemoveSuperType)) { + + // OK, so this is a possible failure => check local variable table, if it could be OK. + bool result = false; + methodOop method = jvf->method(); + if (method->has_localvariable_table()) { + LocalVariableTableElement *elem = jvf->method()->localvariable_table_start(); + for (int j=0; jlocalvariable_table_length(); j++) { + + if (elem->slot == i) { + + // Matching index found + + if (elem->start_bci <= jvf->bci() && elem->start_bci + elem->length > jvf->bci()) { + + // Also in range!! + Symbol* signature = jvf->method()->constants()->symbol_at(elem->descriptor_cp_index); + Symbol* klass_name = signature_to_class_name(signature); + + klassOop local_klass = SystemDictionary::find(klass_name, jvf->method()->method_holder()->klass_part()->class_loader(), jvf->method()->method_holder()->klass_part()->protection_domain(), Thread::current())->klass_part()->newest_version(); + klassOop cur = obj->klass()->klass_part()->newest_version(); + + // Field is not null + if (cur->klass_part()->newest_version()->klass_part()->is_subtype_of(local_klass)) { + // We are OK + RC_TRACE(0x00008000, ("Local variable value is OK (local_klass=%s, cur_klass=%s)", + local_klass->klass_part()->name()->as_C_string(), cur->klass_part()->name()->as_C_string())); + result = true; + } else { + // Failure! + RC_TRACE(0x00000001, ("Local variable value is INVALID - abort redefinition (local_klass=%s, cur_klass=%s)", + local_klass->klass_part()->name()->as_C_string(), + cur->klass_part()->name()->as_C_string())); + return false; + } + } + } + + elem++; + } + } else { + RC_TRACE(0x00000002, ("Method %s does not have a local variable table => abort", + method->name_and_sig_as_C_string())); + } + + if (!result) { + return false; + } + + RC_TRACE(0x00008000, ("Verifying class %s", + jvf->method()->method_holder()->klass_part()->name()->as_C_string())); + + Symbol* exception_name; + const size_t message_buffer_len = klass->name()->utf8_length() + 1024; + char* message_buffer = NEW_RESOURCE_ARRAY(char, message_buffer_len); + + Thread::current()->set_pretend_new_universe(true); + ClassVerifier split_verifier(klass, Thread::current()); + split_verifier.verify_method(jvf->method(), Thread::current()); + exception_name = split_verifier.result(); + Thread::current()->set_pretend_new_universe(false); + + if (exception_name != NULL) { + + RC_TRACE(0x00000001, ("Verification of class %s failed", + jvf->method()->method_holder()->klass_part()->name()->as_C_string())); + RC_TRACE(0x00000001, ("Exception: %s", + exception_name->as_C_string())); + RC_TRACE(0x00000001, ("Message: %s", + message_buffer)); + Thread::current()->clear_pending_exception(); + return false; + } + + } + } + } + } + } + vf = vf->sender(); + } } + + // Advance to next thread + java_thread = java_thread->next(); } return true; } +bool VM_RedefineClasses::check_method(methodOop method) { + + + return true; +} -// Rewrite constant pool references in the specific method. This code -// was adapted from Rewriter::rewrite_method(). -void VM_RedefineClasses::rewrite_cp_refs_in_method(methodHandle method, - methodHandle *new_method_p, TRAPS) { +// Warning: destroys redefinition level values of klasses. +bool VM_RedefineClasses::check_loaded_methods() { - *new_method_p = methodHandle(); // default is no new method + class CheckLoadedMethodsClosure : public ObjectClosure { - // We cache a pointer to the bytecodes here in code_base. If GC - // moves the methodOop, then the bytecodes will also move which - // will likely cause a crash. We create a No_Safepoint_Verifier - // object to detect whether we pass a possible safepoint in this - // code block. - No_Safepoint_Verifier nsv; + private: + + bool _result; + GrowableArray *_dangerous_klasses; - // Bytecodes and their length - address code_base = method->code_base(); - int code_length = method->code_size(); + public: + CheckLoadedMethodsClosure(GrowableArray *dangerous_klasses) { + _result = true; + _dangerous_klasses = dangerous_klasses; + } - int bc_length; - for (int bci = 0; bci < code_length; bci += bc_length) { - address bcp = code_base + bci; - Bytecodes::Code c = (Bytecodes::Code)(*bcp); + bool result() { + return _result; + } - bc_length = Bytecodes::length_for(c); - if (bc_length == 0) { - // More complicated bytecodes report a length of zero so - // we have to try again a slightly different way. - bc_length = Bytecodes::length_at(method(), bcp); + bool is_class_dangerous(klassOop k) { + return k->klass_part()->newest_version()->klass_part()->check_redefinition_flag(Klass::RemoveSuperType); } - assert(bc_length != 0, "impossible bytecode length"); + bool can_be_affected(instanceKlass *klass) { - switch (c) { - case Bytecodes::_ldc: - { - int cp_index = *(bcp + 1); - int new_index = find_new_index(cp_index); + constantPoolOop cp = klass->constants(); - if (StressLdcRewrite && new_index == 0) { - // If we are stressing ldc -> ldc_w rewriting, then we - // always need a new_index value. - new_index = cp_index; - } - if (new_index != 0) { - // the original index is mapped so we have more work to do - if (!StressLdcRewrite && new_index <= max_jubyte) { - // The new value can still use ldc instead of ldc_w - // unless we are trying to stress ldc -> ldc_w rewriting - RC_TRACE_WITH_THREAD(0x00080000, THREAD, - ("%s@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c), - bcp, cp_index, new_index)); - *(bcp + 1) = new_index; - } else { - RC_TRACE_WITH_THREAD(0x00080000, THREAD, - ("%s->ldc_w@" INTPTR_FORMAT " old=%d, new=%d", - Bytecodes::name(c), bcp, cp_index, new_index)); - // the new value needs ldc_w instead of ldc - u_char inst_buffer[4]; // max instruction size is 4 bytes - bcp = (address)inst_buffer; - // construct new instruction sequence - *bcp = Bytecodes::_ldc_w; - bcp++; - // Rewriter::rewrite_method() does not rewrite ldc -> ldc_w. - // See comment below for difference between put_Java_u2() - // and put_native_u2(). - Bytes::put_Java_u2(bcp, new_index); - - Relocator rc(method, NULL /* no RelocatorListener needed */); - methodHandle m; - { - Pause_No_Safepoint_Verifier pnsv(&nsv); - - // ldc is 2 bytes and ldc_w is 3 bytes - m = rc.insert_space_at(bci, 3, inst_buffer, THREAD); - if (m.is_null() || HAS_PENDING_EXCEPTION) { - guarantee(false, "insert_space_at() failed"); + Thread *THREAD = Thread::current(); + klassOop k; + Symbol* symbol; + + for (int i=1; ilength(); i++) { + jbyte tag = cp->tag_at(i).value(); + switch(tag) { + case JVM_CONSTANT_Long: + case JVM_CONSTANT_Double: + i++; + break; + + case JVM_CONSTANT_Utf8: + case JVM_CONSTANT_Unicode: + case JVM_CONSTANT_Integer: + case JVM_CONSTANT_Float: + case JVM_CONSTANT_String: + case JVM_CONSTANT_Fieldref: + case JVM_CONSTANT_Methodref: + case JVM_CONSTANT_InterfaceMethodref: + case JVM_CONSTANT_ClassIndex: + case JVM_CONSTANT_UnresolvedString: + case JVM_CONSTANT_StringIndex: + case JVM_CONSTANT_UnresolvedClassInError: + case JVM_CONSTANT_Object: + // do nothing + break; + + case JVM_CONSTANT_Class: + k = cp->klass_at(i, CHECK_(true)); + if (is_class_dangerous(k)) { + RC_TRACE(0x00000002, ("Class %s is potentially affected, because at cp[%d] references class %s", + klass->name()->as_C_string(), + i, + k->klass_part()->name()->as_C_string())); + return true; + } + break; + + case JVM_CONSTANT_NameAndType: + symbol = cp->symbol_at(cp->signature_ref_index_at(i)); + if (symbol->byte_at(0) == '(') { + // This must be a method + SignatureStream signatureStream(symbol); + while (true) { + + if (signatureStream.is_array()) { + Symbol* cur_signature = signatureStream.as_symbol(Thread::current()); + if (is_type_signature_dangerous(cur_signature)) { + return true; + } + } else if (signatureStream.is_object()) { + if (is_symbol_dangerous(signatureStream.as_symbol(Thread::current()))) { + return true; + } + } + + if (signatureStream.at_return_type()) { + break; + } + + signatureStream.next(); } + + } else if (is_type_signature_dangerous(symbol)) { + return true; } + break; - // return the new method so that the caller can update - // the containing class - *new_method_p = method = m; - // switch our bytecode processing loop from the old method - // to the new method - code_base = method->code_base(); - code_length = method->code_size(); - bcp = code_base + bci; - c = (Bytecodes::Code)(*bcp); - bc_length = Bytecodes::length_for(c); - assert(bc_length != 0, "sanity check"); - } // end we need ldc_w instead of ldc - } // end if there is a mapped index - } break; - - // these bytecodes have a two-byte constant pool index - case Bytecodes::_anewarray : // fall through - case Bytecodes::_checkcast : // fall through - case Bytecodes::_getfield : // fall through - case Bytecodes::_getstatic : // fall through - case Bytecodes::_instanceof : // fall through - case Bytecodes::_invokeinterface: // fall through - case Bytecodes::_invokespecial : // fall through - case Bytecodes::_invokestatic : // fall through - case Bytecodes::_invokevirtual : // fall through - case Bytecodes::_ldc_w : // fall through - case Bytecodes::_ldc2_w : // fall through - case Bytecodes::_multianewarray : // fall through - case Bytecodes::_new : // fall through - case Bytecodes::_putfield : // fall through - case Bytecodes::_putstatic : - { - address p = bcp + 1; - int cp_index = Bytes::get_Java_u2(p); - int new_index = find_new_index(cp_index); - if (new_index != 0) { - // the original index is mapped so update w/ new value - RC_TRACE_WITH_THREAD(0x00080000, THREAD, - ("%s@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c), - bcp, cp_index, new_index)); - // Rewriter::rewrite_method() uses put_native_u2() in this - // situation because it is reusing the constant pool index - // location for a native index into the constantPoolCache. - // Since we are updating the constant pool index prior to - // verification and constantPoolCache initialization, we - // need to keep the new index in Java byte order. - Bytes::put_Java_u2(p, new_index); + case JVM_CONSTANT_UnresolvedClass: + symbol = cp->unresolved_klass_at(i); + if (is_symbol_dangerous(symbol)) { + return true; + } + break; + + default: + ShouldNotReachHere(); } - } break; + } + + return false; } - } // end for each bytecode -} // end rewrite_cp_refs_in_method() + bool is_type_signature_dangerous(Symbol* signature) { + // This must be a field type + if (FieldType::is_obj(signature)) { + Symbol* name = signature_to_class_name(signature); + if (is_symbol_dangerous(name)) { + return true; + } + } else if (FieldType::is_array(signature)) { + //jint dimension; + //Symbol* object_key; + FieldArrayInfo fd; + FieldType::get_array_info(signature, fd, Thread::current()); + if (is_symbol_dangerous(fd.object_key())) { + return true; + } + } + return false; + } + + bool is_symbol_dangerous(Symbol* symbol) { + for (int i=0; i<_dangerous_klasses->length(); i++) { + if(_dangerous_klasses->at(i)->klass_part()->name() == symbol) { + RC_TRACE(0x00000002, ("Found constant pool index %d references class %s", + i, + symbol->as_C_string())); + return true; + } + } + return false; + } + + virtual void do_object(oop obj) { -// Rewrite constant pool references in the class_annotations field. -bool VM_RedefineClasses::rewrite_cp_refs_in_class_annotations( - instanceKlassHandle scratch_class, TRAPS) { + if (!_result) return; - typeArrayHandle class_annotations(THREAD, - scratch_class->class_annotations()); - if (class_annotations.is_null() || class_annotations->length() == 0) { - // no class_annotations so nothing to do - return true; + klassOop klassObj = (klassOop)obj; + Thread *THREAD = Thread::current(); + + // We found an instance klass! + instanceKlass *klass = instanceKlass::cast(klassObj); + instanceKlassHandle handle(klassObj); + + RC_TRACE(0x00000400, ("Check if verification is necessary for class %s major_version=%d", handle->name()->as_C_string(), handle->major_version())); + + if (!can_be_affected(klass)) { + RC_TRACE(0x00000400, ("Skipping verification of class %s major_version=%d", handle->name()->as_C_string(), handle->major_version())); + return; + } + + if (handle->major_version() < Verifier::STACKMAP_ATTRIBUTE_MAJOR_VERSION) { + RC_TRACE(0x00000001, ("Failing because cannot verify class %s major_version=%d", handle->name()->as_C_string(), handle->major_version())); + _result = false; + return; + } + + RC_TRACE(0x00000001, ("Verifying class %s", handle->name()->as_C_string())); + + if (!Verifier::verify(handle, Verifier::NoException, true, false, Thread::current())) { + + RC_TRACE(0x00000001, ("Verification of class %s failed", handle->name()->as_C_string())); + //Symbol* ex_name = PENDING_EXCEPTION->klass()->klass_part()->name(); + //RC_TRACE(0x00000002, ("exception when verifying class: '%s'", ex_name->as_C_string()); + //PENDING_EXCEPTION->print(); + CLEAR_PENDING_EXCEPTION; + _result = false; + } + + /*int method_count = klass->methods()->length(); + for (int i=0; imethods()->obj_at(i); + if (!check_method(cur_method)) { + RC_TRACE(0x00000001, ("Failed to verify consistency of method %s of klass %s", cur_method->name()->as_C_string(), klass->name()->as_C_string()); + } + }*/ + } + }; + + // TODO: Check bytecodes in case of interface => class or class => interface etc.. + + GrowableArray dangerous_klasses; + for (int i=0; i<_new_classes->length(); i++) { + instanceKlassHandle handle = _new_classes->at(i); + if (handle->check_redefinition_flag(Klass::RemoveSuperType)) { + dangerous_klasses.append(handle()); + } } - RC_TRACE_WITH_THREAD(0x02000000, THREAD, - ("class_annotations length=%d", class_annotations->length())); + CheckLoadedMethodsClosure checkLoadedMethodsClosure(&dangerous_klasses); + Thread::current()->set_pretend_new_universe(true); + SystemDictionary::classes_do(&checkLoadedMethodsClosure); + Thread::current()->set_pretend_new_universe(false); - int byte_i = 0; // byte index into class_annotations - return rewrite_cp_refs_in_annotations_typeArray(class_annotations, byte_i, - THREAD); + + return checkLoadedMethodsClosure.result(); } +bool VM_RedefineClasses::check_type_consistency() { -// Rewrite constant pool references in an annotations typeArray. This -// "structure" is adapted from the RuntimeVisibleAnnotations_attribute -// that is described in section 4.8.15 of the 2nd-edition of the VM spec: -// -// annotations_typeArray { -// u2 num_annotations; -// annotation annotations[num_annotations]; -// } -// -bool VM_RedefineClasses::rewrite_cp_refs_in_annotations_typeArray( - typeArrayHandle annotations_typeArray, int &byte_i_ref, TRAPS) { + Universe::set_verify_in_progress(true); - if ((byte_i_ref + 2) > annotations_typeArray->length()) { - // not enough room for num_annotations field - RC_TRACE_WITH_THREAD(0x02000000, THREAD, - ("length() is too small for num_annotations field")); + SystemDictionary::classes_do(calculate_type_check_information); + bool result = check_field_value_types(); + SystemDictionary::classes_do(clear_type_check_information); + if (!result) { + RC_TRACE(0x00000001, ("Aborting redefinition because of wrong field or array element value!")); + Universe::set_verify_in_progress(false); + return false; + } + + result = check_method_stacks(); + if (!result) { + RC_TRACE(0x00000001, ("Aborting redefinition because of wrong value on the stack")); + Universe::set_verify_in_progress(false); return false; } - u2 num_annotations = Bytes::get_Java_u2((address) - annotations_typeArray->byte_at_addr(byte_i_ref)); - byte_i_ref += 2; + result = check_loaded_methods(); + if (!result) { + RC_TRACE(0x00000001, ("Aborting redefinition because of wrong loaded method")); + Universe::set_verify_in_progress(false); + return false; + } + + RC_TRACE(0x00000001, ("Verification passed => hierarchy change is valid!")); + Universe::set_verify_in_progress(false); + return true; +} + +void VM_RedefineClasses::rollback() { + RC_TRACE(0x00000001, ("Rolling back redefinition!")); + SystemDictionary::rollback_redefinition(); + + RC_TRACE(0x00000001, ("After rolling back system dictionary!")); + for (int i=0; i<_new_classes->length(); i++) { + SystemDictionary::remove_from_hierarchy(_new_classes->at(i)); + } + + for (int i=0; i<_new_classes->length(); i++) { + instanceKlassHandle new_class = _new_classes->at(i); + new_class->set_redefining(false); + new_class->old_version()->klass_part()->set_new_version(NULL); + new_class->set_old_version(NULL); + } - RC_TRACE_WITH_THREAD(0x02000000, THREAD, - ("num_annotations=%d", num_annotations)); +} - int calc_num_annotations = 0; - for (; calc_num_annotations < num_annotations; calc_num_annotations++) { - if (!rewrite_cp_refs_in_annotation_struct(annotations_typeArray, - byte_i_ref, THREAD)) { - RC_TRACE_WITH_THREAD(0x02000000, THREAD, - ("bad annotation_struct at %d", calc_num_annotations)); - // propagate failure back to caller - return false; +template void VM_RedefineClasses::do_oop_work(T* p) { + T heap_oop = oopDesc::load_heap_oop(p); + if (!oopDesc::is_null(heap_oop)) { + oop obj = oopDesc::decode_heap_oop_not_null(heap_oop); + if (obj->is_instanceKlass()) { + klassOop klass = (klassOop)obj; + // DCEVM: note: can overwrite owner of old_klass constants pool with new_klass, so we need to fix it back later + if (klass->new_version() != NULL && klass->new_version()->klass_part()->is_redefining()) { + obj = klass->klass_part()->new_version(); + oopDesc::encode_store_heap_oop_not_null(p, obj); + } + } else if (obj->blueprint()->newest_version() == SystemDictionary::Class_klass()->klass_part()->newest_version()) { + // update references to java.lang.Class to point to newest version. Only update references to non-primitive + // java.lang.Class instances. + klassOop klass_oop = java_lang_Class::as_klassOop(obj); + if (klass_oop != NULL) { + if (klass_oop->new_version() != NULL && klass_oop->new_version()->klass_part()->is_redefining()) { + obj = klass_oop->new_version()->java_mirror(); + } else if (klass_oop->klass_part()->is_redefining()) { + obj = klass_oop->java_mirror(); + } + oopDesc::encode_store_heap_oop_not_null(p, obj); + + + // FIXME: DCEVM: better implementation? + // Starting from JDK 7 java_mirror can be kept in the regular heap. Therefore, it is possible + // that new java_mirror is in the young generation whereas p is in tenured generation. In that + // case we need to run write barrier to make sure card table is properly updated. This will + // allow JVM to detect reference in tenured generation properly during young generation GC. + if (Universe::heap()->is_in_reserved(p)) { + if (GenCollectedHeap::heap()->is_in_young(obj)) { + GenRemSet* rs = GenCollectedHeap::heap()->rem_set(); + assert(rs->rs_kind() == GenRemSet::CardTable, "Wrong rem set kind."); + CardTableRS* _rs = (CardTableRS*)rs; + _rs->inline_write_ref_field_gc(p, obj); + } + } + } } } - assert(num_annotations == calc_num_annotations, "sanity check"); +} - return true; -} // end rewrite_cp_refs_in_annotations_typeArray() +void VM_RedefineClasses::swap_marks(oop first, oop second) { + markOop first_mark = first->mark(); + markOop second_mark = second->mark(); + first->set_mark(second_mark); + second->set_mark(first_mark); +} +void VM_RedefineClasses::doit() { + Thread *thread = Thread::current(); -// Rewrite constant pool references in the annotation struct portion of -// an annotations_typeArray. This "structure" is from section 4.8.15 of -// the 2nd-edition of the VM spec: -// -// struct annotation { -// u2 type_index; -// u2 num_element_value_pairs; -// { -// u2 element_name_index; -// element_value value; -// } element_value_pairs[num_element_value_pairs]; -// } -// -bool VM_RedefineClasses::rewrite_cp_refs_in_annotation_struct( - typeArrayHandle annotations_typeArray, int &byte_i_ref, TRAPS) { - if ((byte_i_ref + 2 + 2) > annotations_typeArray->length()) { - // not enough room for smallest annotation_struct - RC_TRACE_WITH_THREAD(0x02000000, THREAD, - ("length() is too small for annotation_struct")); - return false; - } + RC_TRACE(0x00000001, ("Entering doit!")); - u2 type_index = rewrite_cp_ref_in_annotation_data(annotations_typeArray, - byte_i_ref, "mapped old type_index=%d", THREAD); - u2 num_element_value_pairs = Bytes::get_Java_u2((address) - annotations_typeArray->byte_at_addr( - byte_i_ref)); - byte_i_ref += 2; + if ((_max_redefinition_flags & Klass::RemoveSuperType) != 0) { - RC_TRACE_WITH_THREAD(0x02000000, THREAD, - ("type_index=%d num_element_value_pairs=%d", type_index, - num_element_value_pairs)); + RC_TIMER_START(_timer_check_type); - int calc_num_element_value_pairs = 0; - for (; calc_num_element_value_pairs < num_element_value_pairs; - calc_num_element_value_pairs++) { - if ((byte_i_ref + 2) > annotations_typeArray->length()) { - // not enough room for another element_name_index, let alone - // the rest of another component - RC_TRACE_WITH_THREAD(0x02000000, THREAD, - ("length() is too small for element_name_index")); - return false; + if (!check_type_consistency()) { + // (tw) TODO: Rollback the class redefinition + rollback(); + RC_TRACE(0x00000001, ("Detected type inconsistency!")); + _result = JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED; + RC_TIMER_STOP(_timer_check_type); + return; } - u2 element_name_index = rewrite_cp_ref_in_annotation_data( - annotations_typeArray, byte_i_ref, - "mapped old element_name_index=%d", THREAD); + RC_TIMER_STOP(_timer_check_type); + + } else { + RC_TRACE(0x00000001, ("No type narrowing => skipping check for type inconsistency")); + } - RC_TRACE_WITH_THREAD(0x02000000, THREAD, - ("element_name_index=%d", element_name_index)); + if (UseMethodForwardPoints) { + RC_TRACE(0x00000001, ("Check stack for forwarding methods to new version")); + method_forwarding(); + } - if (!rewrite_cp_refs_in_element_value(annotations_typeArray, - byte_i_ref, THREAD)) { - RC_TRACE_WITH_THREAD(0x02000000, THREAD, - ("bad element_value at %d", calc_num_element_value_pairs)); - // propagate failure back to caller - return false; + if (UseSharedSpaces) { + // Sharing is enabled so we remap the shared readonly space to + // shared readwrite, private just in case we need to redefine + // a shared class. We do the remap during the doit() phase of + // the safepoint to be safer. + if (!CompactingPermGenGen::remap_shared_readonly_as_readwrite()) { + RC_TRACE(0x00000001, ("failed to remap shared readonly space to readwrite, private")); + _result = JVMTI_ERROR_INTERNAL; + return; } - } // end for each component - assert(num_element_value_pairs == calc_num_element_value_pairs, - "sanity check"); + } - return true; -} // end rewrite_cp_refs_in_annotation_struct() - - -// Rewrite a constant pool reference at the current position in -// annotations_typeArray if needed. Returns the original constant -// pool reference if a rewrite was not needed or the new constant -// pool reference if a rewrite was needed. -u2 VM_RedefineClasses::rewrite_cp_ref_in_annotation_data( - typeArrayHandle annotations_typeArray, int &byte_i_ref, - const char * trace_mesg, TRAPS) { - - address cp_index_addr = (address) - annotations_typeArray->byte_at_addr(byte_i_ref); - u2 old_cp_index = Bytes::get_Java_u2(cp_index_addr); - u2 new_cp_index = find_new_index(old_cp_index); - if (new_cp_index != 0) { - RC_TRACE_WITH_THREAD(0x02000000, THREAD, (trace_mesg, old_cp_index)); - Bytes::put_Java_u2(cp_index_addr, new_cp_index); - old_cp_index = new_cp_index; - } - byte_i_ref += 2; - return old_cp_index; -} + RC_TIMER_START(_timer_prepare_redefinition); + for (int i = 0; i < _new_classes->length(); i++) { + redefine_single_class(_new_classes->at(i), thread); + } + // Deoptimize all compiled code that depends on this class + flush_dependent_code(instanceKlassHandle(Thread::current(), (klassOop)NULL), Thread::current()); -// Rewrite constant pool references in the element_value portion of an -// annotations_typeArray. This "structure" is from section 4.8.15.1 of -// the 2nd-edition of the VM spec: -// -// struct element_value { -// u1 tag; -// union { -// u2 const_value_index; -// { -// u2 type_name_index; -// u2 const_name_index; -// } enum_const_value; -// u2 class_info_index; -// annotation annotation_value; -// struct { -// u2 num_values; -// element_value values[num_values]; -// } array_value; -// } value; -// } -// -bool VM_RedefineClasses::rewrite_cp_refs_in_element_value( - typeArrayHandle annotations_typeArray, int &byte_i_ref, TRAPS) { + // Adjust constantpool caches and vtables for all classes + // that reference methods of the evolved class. + SystemDictionary::classes_do(adjust_cpool_cache, Thread::current()); - if ((byte_i_ref + 1) > annotations_typeArray->length()) { - // not enough room for a tag let alone the rest of an element_value - RC_TRACE_WITH_THREAD(0x02000000, THREAD, - ("length() is too small for a tag")); - return false; - } + RC_TIMER_STOP(_timer_prepare_redefinition); + RC_TIMER_START(_timer_redefinition); - u1 tag = annotations_typeArray->byte_at(byte_i_ref); - byte_i_ref++; - RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("tag='%c'", tag)); - - switch (tag) { - // These BaseType tag values are from Table 4.2 in VM spec: - case 'B': // byte - case 'C': // char - case 'D': // double - case 'F': // float - case 'I': // int - case 'J': // long - case 'S': // short - case 'Z': // boolean - - // The remaining tag values are from Table 4.8 in the 2nd-edition of - // the VM spec: - case 's': - { - // For the above tag values (including the BaseType values), - // value.const_value_index is right union field. + class ChangePointersOopClosure : public OopClosure { + virtual void do_oop(oop* o) { + do_oop_work(o); + } - if ((byte_i_ref + 2) > annotations_typeArray->length()) { - // not enough room for a const_value_index - RC_TRACE_WITH_THREAD(0x02000000, THREAD, - ("length() is too small for a const_value_index")); - return false; + virtual void do_oop(narrowOop* o) { + do_oop_work(o); } + }; - u2 const_value_index = rewrite_cp_ref_in_annotation_data( - annotations_typeArray, byte_i_ref, - "mapped old const_value_index=%d", THREAD); + class ChangePointersObjectClosure : public ObjectClosure { - RC_TRACE_WITH_THREAD(0x02000000, THREAD, - ("const_value_index=%d", const_value_index)); - } break; + private: - case 'e': - { - // for the above tag value, value.enum_const_value is right union field + OopClosure *_closure; + bool _needs_instance_update; + GrowableArray *_updated_oops; - if ((byte_i_ref + 4) > annotations_typeArray->length()) { - // not enough room for a enum_const_value - RC_TRACE_WITH_THREAD(0x02000000, THREAD, - ("length() is too small for a enum_const_value")); - return false; - } + public: + ChangePointersObjectClosure(OopClosure *closure) : _closure(closure), _needs_instance_update(false), _updated_oops(NULL) {} - u2 type_name_index = rewrite_cp_ref_in_annotation_data( - annotations_typeArray, byte_i_ref, - "mapped old type_name_index=%d", THREAD); + bool needs_instance_update() { + return _needs_instance_update; + } - u2 const_name_index = rewrite_cp_ref_in_annotation_data( - annotations_typeArray, byte_i_ref, - "mapped old const_name_index=%d", THREAD); + GrowableArray *updated_oops() { return _updated_oops; } - RC_TRACE_WITH_THREAD(0x02000000, THREAD, - ("type_name_index=%d const_name_index=%d", type_name_index, - const_name_index)); - } break; + virtual void do_object(oop obj) { + if (!obj->is_instanceKlass()) { + obj->oop_iterate(_closure); + + if (obj->blueprint()->is_redefining()) { - case 'c': - { - // for the above tag value, value.class_info_index is right union field + if (obj->blueprint()->check_redefinition_flag(Klass::HasInstanceTransformer)) { + if (_updated_oops == NULL) { + _updated_oops = new (ResourceObj::C_HEAP, mtInternal) GrowableArray(100, true); + } + _updated_oops->append(obj); + } - if ((byte_i_ref + 2) > annotations_typeArray->length()) { - // not enough room for a class_info_index - RC_TRACE_WITH_THREAD(0x02000000, THREAD, - ("length() is too small for a class_info_index")); - return false; - } + if(obj->blueprint()->update_information() != NULL || obj->is_perm()) { - u2 class_info_index = rewrite_cp_ref_in_annotation_data( - annotations_typeArray, byte_i_ref, - "mapped old class_info_index=%d", THREAD); + assert(obj->blueprint()->old_version() != NULL, "must have old version"); + obj->set_klass_no_check(obj->blueprint()->old_version()); - RC_TRACE_WITH_THREAD(0x02000000, THREAD, - ("class_info_index=%d", class_info_index)); - } break; + if (obj->size() != obj->size_given_klass(obj->blueprint()->new_version()->klass_part()) || obj->is_perm()) { + // We need an instance update => set back to old klass + _needs_instance_update = true; - case '@': - // For the above tag value, value.attr_value is the right union - // field. This is a nested annotation. - if (!rewrite_cp_refs_in_annotation_struct(annotations_typeArray, - byte_i_ref, THREAD)) { - // propagate failure back to caller - return false; - } - break; + } else { + MarkSweep::update_fields(obj, obj); + assert(obj->blueprint()->is_redefining(), "update fields resets the klass"); + } + } + } - case '[': - { - if ((byte_i_ref + 2) > annotations_typeArray->length()) { - // not enough room for a num_values field - RC_TRACE_WITH_THREAD(0x02000000, THREAD, - ("length() is too small for a num_values field")); - return false; - } - - // For the above tag value, value.array_value is the right union - // field. This is an array of nested element_value. - u2 num_values = Bytes::get_Java_u2((address) - annotations_typeArray->byte_at_addr(byte_i_ref)); - byte_i_ref += 2; - RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("num_values=%d", num_values)); - - int calc_num_values = 0; - for (; calc_num_values < num_values; calc_num_values++) { - if (!rewrite_cp_refs_in_element_value( - annotations_typeArray, byte_i_ref, THREAD)) { - RC_TRACE_WITH_THREAD(0x02000000, THREAD, - ("bad nested element_value at %d", calc_num_values)); - // propagate failure back to caller - return false; + } else { + instanceKlass *klass = instanceKlass::cast((klassOop)obj); + if (klass->is_redefining()) { + // DCEVM: We need to restorte constants pool owner which was updated by do_oop_work + instanceKlass* old_klass = instanceKlass::cast(klass->old_version()); + old_klass->constants()->set_pool_holder(klass->old_version()); + + // Initialize the new class! Special static initialization that does not execute the + // static constructor but copies static field values from the old class if name + // and signature of a static field match. + klass->initialize_redefined_class(); + } + // idubrov: FIXME: we probably don't need that since oop's will be visited in a regular way... + // idubrov: need to check if there is a test to verify that fields referencing class being updated + // idubrov: will get new version of that class + //klass->iterate_static_fields(_closure); } } - assert(num_values == calc_num_values, "sanity check"); - } break; + }; - default: - RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("bad tag=0x%x", tag)); - return false; - } // end decode tag field + ChangePointersOopClosure oopClosure; + ChangePointersObjectClosure objectClosure(&oopClosure); - return true; -} // end rewrite_cp_refs_in_element_value() + { + SharedHeap::heap()->gc_prologue(true); + Universe::root_oops_do(&oopClosure); + Universe::heap()->object_iterate(&objectClosure); + SharedHeap::heap()->gc_epilogue(false); + } + // Swap marks to have same hashcodes + for (int i=0; i<_new_classes->length(); i++) { + swap_marks(_new_classes->at(i)(), _new_classes->at(i)->old_version()); + swap_marks(_new_classes->at(i)->java_mirror(), _new_classes->at(i)->old_version()->java_mirror()); + } -// Rewrite constant pool references in a fields_annotations field. -bool VM_RedefineClasses::rewrite_cp_refs_in_fields_annotations( - instanceKlassHandle scratch_class, TRAPS) { + _updated_oops = objectClosure.updated_oops(); - objArrayHandle fields_annotations(THREAD, - scratch_class->fields_annotations()); + if (objectClosure.needs_instance_update()){ - if (fields_annotations.is_null() || fields_annotations->length() == 0) { - // no fields_annotations so nothing to do - return true; + // Do a full garbage collection to update the instance sizes accordingly + RC_TRACE(0x00000001, ("Before performing full GC!")); + Universe::set_redefining_gc_run(true); + JvmtiGCMarker jgcm; + notify_gc_begin(true); + Universe::heap()->collect_as_vm_thread(GCCause::_heap_inspection); + notify_gc_end(); + Universe::set_redefining_gc_run(false); + RC_TRACE(0x00000001, ("GC done!")); } - RC_TRACE_WITH_THREAD(0x02000000, THREAD, - ("fields_annotations length=%d", fields_annotations->length())); - - for (int i = 0; i < fields_annotations->length(); i++) { - typeArrayHandle field_annotations(THREAD, - (typeArrayOop)fields_annotations->obj_at(i)); - if (field_annotations.is_null() || field_annotations->length() == 0) { - // this field does not have any annotations so skip it - continue; - } - int byte_i = 0; // byte index into field_annotations - if (!rewrite_cp_refs_in_annotations_typeArray(field_annotations, byte_i, - THREAD)) { - RC_TRACE_WITH_THREAD(0x02000000, THREAD, - ("bad field_annotations at %d", i)); - // propagate failure back to caller - return false; + if (RC_TRACE_ENABLED(0x00000001)) { + if (_updated_oops != NULL) { + RC_TRACE(0x00000001, ("%d object(s) updated!", _updated_oops->length())); + } else { + RC_TRACE(0x00000001, ("No objects updated!")); } } - return true; -} // end rewrite_cp_refs_in_fields_annotations() - - -// Rewrite constant pool references in a methods_annotations field. -bool VM_RedefineClasses::rewrite_cp_refs_in_methods_annotations( - instanceKlassHandle scratch_class, TRAPS) { + // Unmark klassOops as "redefining" + for (int i=0; i<_new_classes->length(); i++) { + klassOop cur = _new_classes->at(i)(); + _new_classes->at(i)->set_redefining(false); + _new_classes->at(i)->clear_update_information(); + _new_classes->at(i)->update_supers_to_newest_version(); - objArrayHandle methods_annotations(THREAD, - scratch_class->methods_annotations()); + if (((instanceKlass *)cur->klass_part()->old_version()->klass_part())->array_klasses() != NULL) { + update_array_classes_to_newest_version(((instanceKlass *)cur->klass_part()->old_version()->klass_part())->array_klasses()); - if (methods_annotations.is_null() || methods_annotations->length() == 0) { - // no methods_annotations so nothing to do - return true; - } - - RC_TRACE_WITH_THREAD(0x02000000, THREAD, - ("methods_annotations length=%d", methods_annotations->length())); + // Transfer the array classes, otherwise we might get cast exceptions when casting array types. + ((instanceKlass*)cur->klass_part())->set_array_klasses(((instanceKlass*)cur->klass_part()->old_version()->klass_part())->array_klasses()); - for (int i = 0; i < methods_annotations->length(); i++) { - typeArrayHandle method_annotations(THREAD, - (typeArrayOop)methods_annotations->obj_at(i)); - if (method_annotations.is_null() || method_annotations->length() == 0) { - // this method does not have any annotations so skip it - continue; + oop new_mirror = _new_classes->at(i)->java_mirror(); + oop old_mirror = _new_classes->at(i)->old_version()->java_mirror(); + java_lang_Class::set_array_klass(new_mirror, java_lang_Class::array_klass(old_mirror)); } + } - int byte_i = 0; // byte index into method_annotations - if (!rewrite_cp_refs_in_annotations_typeArray(method_annotations, byte_i, - THREAD)) { - RC_TRACE_WITH_THREAD(0x02000000, THREAD, - ("bad method_annotations at %d", i)); - // propagate failure back to caller - return false; - } + for (int i=T_BOOLEAN; i<=T_LONG; i++) { + update_array_classes_to_newest_version(Universe::typeArrayKlassObj((BasicType)i)); } - return true; -} // end rewrite_cp_refs_in_methods_annotations() + // Disable any dependent concurrent compilations + SystemDictionary::notice_modification(); + // Set flag indicating that some invariants are no longer true. + // See jvmtiExport.hpp for detailed explanation. + JvmtiExport::set_has_redefined_a_class(); -// Rewrite constant pool references in a methods_parameter_annotations -// field. This "structure" is adapted from the -// RuntimeVisibleParameterAnnotations_attribute described in section -// 4.8.17 of the 2nd-edition of the VM spec: -// -// methods_parameter_annotations_typeArray { -// u1 num_parameters; -// { -// u2 num_annotations; -// annotation annotations[num_annotations]; -// } parameter_annotations[num_parameters]; -// } -// -bool VM_RedefineClasses::rewrite_cp_refs_in_methods_parameter_annotations( - instanceKlassHandle scratch_class, TRAPS) { + // Clean up caches in the compiler interface and compiler threads + CompileBroker::cleanup_after_redefinition(); - objArrayHandle methods_parameter_annotations(THREAD, - scratch_class->methods_parameter_annotations()); +#ifdef ASSERT - if (methods_parameter_annotations.is_null() - || methods_parameter_annotations->length() == 0) { - // no methods_parameter_annotations so nothing to do - return true; - } + // Universe::verify(); + // JNIHandles::verify(); - RC_TRACE_WITH_THREAD(0x02000000, THREAD, - ("methods_parameter_annotations length=%d", - methods_parameter_annotations->length())); + SystemDictionary::classes_do(check_class, thread); +#endif - for (int i = 0; i < methods_parameter_annotations->length(); i++) { - typeArrayHandle method_parameter_annotations(THREAD, - (typeArrayOop)methods_parameter_annotations->obj_at(i)); - if (method_parameter_annotations.is_null() - || method_parameter_annotations->length() == 0) { - // this method does not have any parameter annotations so skip it - continue; - } + update_active_methods(); + RC_TIMER_STOP(_timer_redefinition); - if (method_parameter_annotations->length() < 1) { - // not enough room for a num_parameters field - RC_TRACE_WITH_THREAD(0x02000000, THREAD, - ("length() is too small for a num_parameters field at %d", i)); - return false; - } +} - int byte_i = 0; // byte index into method_parameter_annotations +void VM_RedefineClasses::update_array_classes_to_newest_version(klassOop smallest_dimension) { - u1 num_parameters = method_parameter_annotations->byte_at(byte_i); - byte_i++; + arrayKlass *curArrayKlass = arrayKlass::cast(smallest_dimension); + assert(curArrayKlass->lower_dimension() == NULL, "argument must be smallest dimension"); - RC_TRACE_WITH_THREAD(0x02000000, THREAD, - ("num_parameters=%d", num_parameters)); - int calc_num_parameters = 0; - for (; calc_num_parameters < num_parameters; calc_num_parameters++) { - if (!rewrite_cp_refs_in_annotations_typeArray( - method_parameter_annotations, byte_i, THREAD)) { - RC_TRACE_WITH_THREAD(0x02000000, THREAD, - ("bad method_parameter_annotations at %d", calc_num_parameters)); - // propagate failure back to caller - return false; - } + while (curArrayKlass != NULL) { + klassOop higher_dimension = curArrayKlass->higher_dimension(); + klassOop lower_dimension = curArrayKlass->lower_dimension(); + curArrayKlass->update_supers_to_newest_version(); + + curArrayKlass = NULL; + if (higher_dimension != NULL) { + curArrayKlass = arrayKlass::cast(higher_dimension); } - assert(num_parameters == calc_num_parameters, "sanity check"); } - return true; -} // end rewrite_cp_refs_in_methods_parameter_annotations() +} + +void VM_RedefineClasses::doit_epilogue() { + RC_TIMER_START(_timer_vm_op_epilogue); -// Rewrite constant pool references in a methods_default_annotations -// field. This "structure" is adapted from the AnnotationDefault_attribute -// that is described in section 4.8.19 of the 2nd-edition of the VM spec: -// -// methods_default_annotations_typeArray { -// element_value default_value; -// } -// -bool VM_RedefineClasses::rewrite_cp_refs_in_methods_default_annotations( - instanceKlassHandle scratch_class, TRAPS) { + unlock_threads(); - objArrayHandle methods_default_annotations(THREAD, - scratch_class->methods_default_annotations()); + ResourceMark mark; - if (methods_default_annotations.is_null() - || methods_default_annotations->length() == 0) { - // no methods_default_annotations so nothing to do - return true; - } + VM_GC_Operation::doit_epilogue(); + RC_TRACE(0x00000001, ("GC Operation epilogue finished! ")); - RC_TRACE_WITH_THREAD(0x02000000, THREAD, - ("methods_default_annotations length=%d", - methods_default_annotations->length())); + GrowableArray instanceTransformerMethods; - for (int i = 0; i < methods_default_annotations->length(); i++) { - typeArrayHandle method_default_annotations(THREAD, - (typeArrayOop)methods_default_annotations->obj_at(i)); - if (method_default_annotations.is_null() - || method_default_annotations->length() == 0) { - // this method does not have any default annotations so skip it - continue; + // Call static transformers + for (int i=0; i<_new_classes->length(); i++) { + + instanceKlassHandle klass = _new_classes->at(i); + + // Transfer init state + if (klass->old_version() != NULL) { + instanceKlass::ClassState state = instanceKlass::cast(klass->old_version())->init_state(); + if (state > instanceKlass::linked) { + klass->initialize(Thread::current()); + } } + + // Find instance transformer method - int byte_i = 0; // byte index into method_default_annotations + if (klass->check_redefinition_flag(Klass::HasInstanceTransformer)) { - if (!rewrite_cp_refs_in_element_value( - method_default_annotations, byte_i, THREAD)) { - RC_TRACE_WITH_THREAD(0x02000000, THREAD, - ("bad default element_value at %d", i)); - // propagate failure back to caller - return false; + RC_TRACE(0x00008000, ("Call instance transformer of %s instance", klass->name()->as_C_string())); + klassOop cur_klass = klass(); + while (cur_klass != NULL) { + methodOop method = ((instanceKlass*)cur_klass->klass_part())->find_method(vmSymbols::transformer_name(), vmSymbols::void_method_signature()); + if (method != NULL) { + methodHandle instanceTransformerMethod(method); + instanceTransformerMethods.append(instanceTransformerMethod); + break; + } else { + cur_klass = cur_klass->klass_part()->super(); + } + } + assert(cur_klass != NULL, "must have instance transformer method"); + } else { + instanceTransformerMethods.append(methodHandle(Thread::current(), NULL)); } } - return true; -} // end rewrite_cp_refs_in_methods_default_annotations() + // Call instance transformers + if (_updated_oops != NULL) { -// Rewrite constant pool references in the method's stackmap table. -// These "structures" are adapted from the StackMapTable_attribute that -// is described in section 4.8.4 of the 6.0 version of the VM spec -// (dated 2005.10.26): -// file:///net/quincunx.sfbay/export/gbracha/ClassFile-Java6.pdf -// -// stack_map { -// u2 number_of_entries; -// stack_map_frame entries[number_of_entries]; -// } -// -void VM_RedefineClasses::rewrite_cp_refs_in_stack_map_table( - methodHandle method, TRAPS) { - - if (!method->has_stackmap_table()) { - return; - } - - typeArrayOop stackmap_data = method->stackmap_data(); - address stackmap_p = (address)stackmap_data->byte_at_addr(0); - address stackmap_end = stackmap_p + stackmap_data->length(); - - assert(stackmap_p + 2 <= stackmap_end, "no room for number_of_entries"); - u2 number_of_entries = Bytes::get_Java_u2(stackmap_p); - stackmap_p += 2; - - RC_TRACE_WITH_THREAD(0x04000000, THREAD, - ("number_of_entries=%u", number_of_entries)); - - // walk through each stack_map_frame - u2 calc_number_of_entries = 0; - for (; calc_number_of_entries < number_of_entries; calc_number_of_entries++) { - // The stack_map_frame structure is a u1 frame_type followed by - // 0 or more bytes of data: - // - // union stack_map_frame { - // same_frame; - // same_locals_1_stack_item_frame; - // same_locals_1_stack_item_frame_extended; - // chop_frame; - // same_frame_extended; - // append_frame; - // full_frame; - // } - - assert(stackmap_p + 1 <= stackmap_end, "no room for frame_type"); - // The Linux compiler does not like frame_type to be u1 or u2. It - // issues the following warning for the first if-statement below: - // - // "warning: comparison is always true due to limited range of data type" - // - u4 frame_type = *stackmap_p; - stackmap_p++; - - // same_frame { - // u1 frame_type = SAME; /* 0-63 */ - // } - if (frame_type >= 0 && frame_type <= 63) { - // nothing more to do for same_frame - } - - // same_locals_1_stack_item_frame { - // u1 frame_type = SAME_LOCALS_1_STACK_ITEM; /* 64-127 */ - // verification_type_info stack[1]; - // } - else if (frame_type >= 64 && frame_type <= 127) { - rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end, - calc_number_of_entries, frame_type, THREAD); - } - - // reserved for future use - else if (frame_type >= 128 && frame_type <= 246) { - // nothing more to do for reserved frame_types - } - - // same_locals_1_stack_item_frame_extended { - // u1 frame_type = SAME_LOCALS_1_STACK_ITEM_EXTENDED; /* 247 */ - // u2 offset_delta; - // verification_type_info stack[1]; - // } - else if (frame_type == 247) { - stackmap_p += 2; - rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end, - calc_number_of_entries, frame_type, THREAD); - } - - // chop_frame { - // u1 frame_type = CHOP; /* 248-250 */ - // u2 offset_delta; - // } - else if (frame_type >= 248 && frame_type <= 250) { - stackmap_p += 2; - } - - // same_frame_extended { - // u1 frame_type = SAME_FRAME_EXTENDED; /* 251*/ - // u2 offset_delta; - // } - else if (frame_type == 251) { - stackmap_p += 2; - } - - // append_frame { - // u1 frame_type = APPEND; /* 252-254 */ - // u2 offset_delta; - // verification_type_info locals[frame_type - 251]; - // } - else if (frame_type >= 252 && frame_type <= 254) { - assert(stackmap_p + 2 <= stackmap_end, - "no room for offset_delta"); - stackmap_p += 2; - u1 len = frame_type - 251; - for (u1 i = 0; i < len; i++) { - rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end, - calc_number_of_entries, frame_type, THREAD); - } - } - - // full_frame { - // u1 frame_type = FULL_FRAME; /* 255 */ - // u2 offset_delta; - // u2 number_of_locals; - // verification_type_info locals[number_of_locals]; - // u2 number_of_stack_items; - // verification_type_info stack[number_of_stack_items]; - // } - else if (frame_type == 255) { - assert(stackmap_p + 2 + 2 <= stackmap_end, - "no room for smallest full_frame"); - stackmap_p += 2; - - u2 number_of_locals = Bytes::get_Java_u2(stackmap_p); - stackmap_p += 2; - - for (u2 locals_i = 0; locals_i < number_of_locals; locals_i++) { - rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end, - calc_number_of_entries, frame_type, THREAD); - } - - // Use the largest size for the number_of_stack_items, but only get - // the right number of bytes. - u2 number_of_stack_items = Bytes::get_Java_u2(stackmap_p); - stackmap_p += 2; - - for (u2 stack_i = 0; stack_i < number_of_stack_items; stack_i++) { - rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end, - calc_number_of_entries, frame_type, THREAD); - } - } - } // end while there is a stack_map_frame - assert(number_of_entries == calc_number_of_entries, "sanity check"); -} // end rewrite_cp_refs_in_stack_map_table() - - -// Rewrite constant pool references in the verification type info -// portion of the method's stackmap table. These "structures" are -// adapted from the StackMapTable_attribute that is described in -// section 4.8.4 of the 6.0 version of the VM spec (dated 2005.10.26): -// file:///net/quincunx.sfbay/export/gbracha/ClassFile-Java6.pdf -// -// The verification_type_info structure is a u1 tag followed by 0 or -// more bytes of data: -// -// union verification_type_info { -// Top_variable_info; -// Integer_variable_info; -// Float_variable_info; -// Long_variable_info; -// Double_variable_info; -// Null_variable_info; -// UninitializedThis_variable_info; -// Object_variable_info; -// Uninitialized_variable_info; -// } -// -void VM_RedefineClasses::rewrite_cp_refs_in_verification_type_info( - address& stackmap_p_ref, address stackmap_end, u2 frame_i, - u1 frame_type, TRAPS) { - - assert(stackmap_p_ref + 1 <= stackmap_end, "no room for tag"); - u1 tag = *stackmap_p_ref; - stackmap_p_ref++; - - switch (tag) { - // Top_variable_info { - // u1 tag = ITEM_Top; /* 0 */ - // } - // verificationType.hpp has zero as ITEM_Bogus instead of ITEM_Top - case 0: // fall through - - // Integer_variable_info { - // u1 tag = ITEM_Integer; /* 1 */ - // } - case ITEM_Integer: // fall through - - // Float_variable_info { - // u1 tag = ITEM_Float; /* 2 */ - // } - case ITEM_Float: // fall through - - // Double_variable_info { - // u1 tag = ITEM_Double; /* 3 */ - // } - case ITEM_Double: // fall through - - // Long_variable_info { - // u1 tag = ITEM_Long; /* 4 */ - // } - case ITEM_Long: // fall through - - // Null_variable_info { - // u1 tag = ITEM_Null; /* 5 */ - // } - case ITEM_Null: // fall through - - // UninitializedThis_variable_info { - // u1 tag = ITEM_UninitializedThis; /* 6 */ - // } - case ITEM_UninitializedThis: - // nothing more to do for the above tag types - break; + for (int i=0; i<_updated_oops->length(); i++) { + assert(_updated_oops->at(i) != NULL, "must not be null!"); + Handle cur(_updated_oops->at(i)); + instanceKlassHandle klass(cur->klass()); - // Object_variable_info { - // u1 tag = ITEM_Object; /* 7 */ - // u2 cpool_index; - // } - case ITEM_Object: - { - assert(stackmap_p_ref + 2 <= stackmap_end, "no room for cpool_index"); - u2 cpool_index = Bytes::get_Java_u2(stackmap_p_ref); - u2 new_cp_index = find_new_index(cpool_index); - if (new_cp_index != 0) { - RC_TRACE_WITH_THREAD(0x04000000, THREAD, - ("mapped old cpool_index=%d", cpool_index)); - Bytes::put_Java_u2(stackmap_p_ref, new_cp_index); - cpool_index = new_cp_index; - } - stackmap_p_ref += 2; - - RC_TRACE_WITH_THREAD(0x04000000, THREAD, - ("frame_i=%u, frame_type=%u, cpool_index=%d", frame_i, - frame_type, cpool_index)); - } break; - - // Uninitialized_variable_info { - // u1 tag = ITEM_Uninitialized; /* 8 */ - // u2 offset; - // } - case ITEM_Uninitialized: - assert(stackmap_p_ref + 2 <= stackmap_end, "no room for offset"); - stackmap_p_ref += 2; - break; + if (klass->check_redefinition_flag(Klass::HasInstanceTransformer)) { - default: - RC_TRACE_WITH_THREAD(0x04000000, THREAD, - ("frame_i=%u, frame_type=%u, bad tag=0x%x", frame_i, frame_type, tag)); - ShouldNotReachHere(); - break; - } // end switch (tag) -} // end rewrite_cp_refs_in_verification_type_info() - - -// Change the constant pool associated with klass scratch_class to -// scratch_cp. If shrink is true, then scratch_cp_length elements -// are copied from scratch_cp to a smaller constant pool and the -// smaller constant pool is associated with scratch_class. -void VM_RedefineClasses::set_new_constant_pool( - instanceKlassHandle scratch_class, constantPoolHandle scratch_cp, - int scratch_cp_length, bool shrink, TRAPS) { - assert(!shrink || scratch_cp->length() >= scratch_cp_length, "sanity check"); - - if (shrink) { - // scratch_cp is a merged constant pool and has enough space for a - // worst case merge situation. We want to associate the minimum - // sized constant pool with the klass to save space. - constantPoolHandle smaller_cp(THREAD, - oopFactory::new_constantPool(scratch_cp_length, - oopDesc::IsUnsafeConc, - THREAD)); - // preserve orig_length() value in the smaller copy - int orig_length = scratch_cp->orig_length(); - assert(orig_length != 0, "sanity check"); - smaller_cp->set_orig_length(orig_length); - scratch_cp->copy_cp_to(1, scratch_cp_length - 1, smaller_cp, 1, THREAD); - scratch_cp = smaller_cp; - smaller_cp()->set_is_conc_safe(true); - } - - // attach new constant pool to klass - scratch_cp->set_pool_holder(scratch_class()); - - // attach klass to new constant pool - scratch_class->set_constants(scratch_cp()); - - int i; // for portability - - // update each field in klass to use new constant pool indices as needed - for (JavaFieldStream fs(scratch_class); !fs.done(); fs.next()) { - jshort cur_index = fs.name_index(); - jshort new_index = find_new_index(cur_index); - if (new_index != 0) { - RC_TRACE_WITH_THREAD(0x00080000, THREAD, - ("field-name_index change: %d to %d", cur_index, new_index)); - fs.set_name_index(new_index); - } - cur_index = fs.signature_index(); - new_index = find_new_index(cur_index); - if (new_index != 0) { - RC_TRACE_WITH_THREAD(0x00080000, THREAD, - ("field-signature_index change: %d to %d", cur_index, new_index)); - fs.set_signature_index(new_index); - } - cur_index = fs.initval_index(); - new_index = find_new_index(cur_index); - if (new_index != 0) { - RC_TRACE_WITH_THREAD(0x00080000, THREAD, - ("field-initval_index change: %d to %d", cur_index, new_index)); - fs.set_initval_index(new_index); - } - cur_index = fs.generic_signature_index(); - new_index = find_new_index(cur_index); - if (new_index != 0) { - RC_TRACE_WITH_THREAD(0x00080000, THREAD, - ("field-generic_signature change: %d to %d", cur_index, new_index)); - fs.set_generic_signature_index(new_index); - } - } // end for each field - - // Update constant pool indices in the inner classes info to use - // new constant indices as needed. The inner classes info is a - // quadruple: - // (inner_class_info, outer_class_info, inner_name, inner_access_flags) - InnerClassesIterator iter(scratch_class); - for (; !iter.done(); iter.next()) { - int cur_index = iter.inner_class_info_index(); - if (cur_index == 0) { - continue; // JVM spec. allows null inner class refs so skip it - } - int new_index = find_new_index(cur_index); - if (new_index != 0) { - RC_TRACE_WITH_THREAD(0x00080000, THREAD, - ("inner_class_info change: %d to %d", cur_index, new_index)); - iter.set_inner_class_info_index(new_index); - } - cur_index = iter.outer_class_info_index(); - new_index = find_new_index(cur_index); - if (new_index != 0) { - RC_TRACE_WITH_THREAD(0x00080000, THREAD, - ("outer_class_info change: %d to %d", cur_index, new_index)); - iter.set_outer_class_info_index(new_index); - } - cur_index = iter.inner_name_index(); - new_index = find_new_index(cur_index); - if (new_index != 0) { - RC_TRACE_WITH_THREAD(0x00080000, THREAD, - ("inner_name change: %d to %d", cur_index, new_index)); - iter.set_inner_name_index(new_index); - } - } // end for each inner class - - // Attach each method in klass to the new constant pool and update - // to use new constant pool indices as needed: - objArrayHandle methods(THREAD, scratch_class->methods()); - for (i = methods->length() - 1; i >= 0; i--) { - methodHandle method(THREAD, (methodOop)methods->obj_at(i)); - method->set_constants(scratch_cp()); - - int new_index = find_new_index(method->name_index()); - if (new_index != 0) { - RC_TRACE_WITH_THREAD(0x00080000, THREAD, - ("method-name_index change: %d to %d", method->name_index(), - new_index)); - method->set_name_index(new_index); - } - new_index = find_new_index(method->signature_index()); - if (new_index != 0) { - RC_TRACE_WITH_THREAD(0x00080000, THREAD, - ("method-signature_index change: %d to %d", - method->signature_index(), new_index)); - method->set_signature_index(new_index); - } - new_index = find_new_index(method->generic_signature_index()); - if (new_index != 0) { - RC_TRACE_WITH_THREAD(0x00080000, THREAD, - ("method-generic_signature_index change: %d to %d", - method->generic_signature_index(), new_index)); - method->set_generic_signature_index(new_index); - } - - // Update constant pool indices in the method's checked exception - // table to use new constant indices as needed. - int cext_length = method->checked_exceptions_length(); - if (cext_length > 0) { - CheckedExceptionElement * cext_table = - method->checked_exceptions_start(); - for (int j = 0; j < cext_length; j++) { - int cur_index = cext_table[j].class_cp_index; - int new_index = find_new_index(cur_index); - if (new_index != 0) { - RC_TRACE_WITH_THREAD(0x00080000, THREAD, - ("cext-class_cp_index change: %d to %d", cur_index, new_index)); - cext_table[j].class_cp_index = (u2)new_index; - } - } // end for each checked exception table entry - } // end if there are checked exception table entries - - // Update each catch type index in the method's exception table - // to use new constant pool indices as needed. The exception table - // holds quadruple entries of the form: - // (beg_bci, end_bci, handler_bci, klass_index) - - ExceptionTable ex_table(method()); - int ext_length = ex_table.length(); - - for (int j = 0; j < ext_length; j ++) { - int cur_index = ex_table.catch_type_index(j); - int new_index = find_new_index(cur_index); - if (new_index != 0) { - RC_TRACE_WITH_THREAD(0x00080000, THREAD, - ("ext-klass_index change: %d to %d", cur_index, new_index)); - ex_table.set_catch_type_index(j, new_index); - } - } // end for each exception table entry - - // Update constant pool indices in the method's local variable - // table to use new constant indices as needed. The local variable - // table hold sextuple entries of the form: - // (start_pc, length, name_index, descriptor_index, signature_index, slot) - int lvt_length = method->localvariable_table_length(); - if (lvt_length > 0) { - LocalVariableTableElement * lv_table = - method->localvariable_table_start(); - for (int j = 0; j < lvt_length; j++) { - int cur_index = lv_table[j].name_cp_index; - int new_index = find_new_index(cur_index); - if (new_index != 0) { - RC_TRACE_WITH_THREAD(0x00080000, THREAD, - ("lvt-name_cp_index change: %d to %d", cur_index, new_index)); - lv_table[j].name_cp_index = (u2)new_index; - } - cur_index = lv_table[j].descriptor_cp_index; - new_index = find_new_index(cur_index); - if (new_index != 0) { - RC_TRACE_WITH_THREAD(0x00080000, THREAD, - ("lvt-descriptor_cp_index change: %d to %d", cur_index, - new_index)); - lv_table[j].descriptor_cp_index = (u2)new_index; - } - cur_index = lv_table[j].signature_cp_index; - new_index = find_new_index(cur_index); - if (new_index != 0) { - RC_TRACE_WITH_THREAD(0x00080000, THREAD, - ("lvt-signature_cp_index change: %d to %d", cur_index, new_index)); - lv_table[j].signature_cp_index = (u2)new_index; - } - } // end for each local variable table entry - } // end if there are local variable table entries + methodHandle method = instanceTransformerMethods.at(klass->redefinition_index()); - rewrite_cp_refs_in_stack_map_table(method, THREAD); - } // end for each method - assert(scratch_cp()->is_conc_safe(), "Just checking"); -} // end set_new_constant_pool() + RC_TRACE(0x00008000, ("executing transformer method")); + + Thread *__the_thread__ = Thread::current(); + JavaValue result(T_VOID); + JavaCallArguments args(cur); + JavaCalls::call(&result, + method, + &args, + THREAD); + // TODO: What to do with an exception here? + if (HAS_PENDING_EXCEPTION) { + Symbol* ex_name = PENDING_EXCEPTION->klass()->klass_part()->name(); + RC_TRACE(0x00000002, ("exception when executing transformer: '%s'", + ex_name->as_C_string())); + CLEAR_PENDING_EXCEPTION; + } + } + } -// Unevolving classes may point to methods of the_class directly -// from their constant pool caches, itables, and/or vtables. We -// use the SystemDictionary::classes_do() facility and this helper -// to fix up these pointers. -// -// Note: We currently don't support updating the vtable in -// arrayKlassOops. See Open Issues in jvmtiRedefineClasses.hpp. -void VM_RedefineClasses::adjust_cpool_cache_and_vtable(klassOop k_oop, - oop initiating_loader, TRAPS) { - Klass *k = k_oop->klass_part(); - if (k->oop_is_instance()) { - HandleMark hm(THREAD); - instanceKlass *ik = (instanceKlass *) k; + delete _updated_oops; + _updated_oops = NULL; + } - // HotSpot specific optimization! HotSpot does not currently - // support delegation from the bootstrap class loader to a - // user-defined class loader. This means that if the bootstrap - // class loader is the initiating class loader, then it will also - // be the defining class loader. This also means that classes - // loaded by the bootstrap class loader cannot refer to classes - // loaded by a user-defined class loader. Note: a user-defined - // class loader can delegate to the bootstrap class loader. - // - // If the current class being redefined has a user-defined class - // loader as its defining class loader, then we can skip all - // classes loaded by the bootstrap class loader. - bool is_user_defined = - instanceKlass::cast(_the_class_oop)->class_loader() != NULL; - if (is_user_defined && ik->class_loader() == NULL) { - return; - } + // Free the array of scratch classes + delete _new_classes; + _new_classes = NULL; + RC_TRACE(0x00000001, ("Redefinition finished!")); - // This is a very busy routine. We don't want too much tracing - // printed out. - bool trace_name_printed = false; - - // Very noisy: only enable this call if you are trying to determine - // that a specific class gets found by this routine. - // RC_TRACE macro has an embedded ResourceMark - // RC_TRACE_WITH_THREAD(0x00100000, THREAD, - // ("adjust check: name=%s", ik->external_name())); - // trace_name_printed = true; - - // Fix the vtable embedded in the_class and subclasses of the_class, - // if one exists. We discard scratch_class and we don't keep an - // instanceKlass around to hold obsolete methods so we don't have - // any other instanceKlass embedded vtables to update. The vtable - // holds the methodOops for virtual (but not final) methods. - if (ik->vtable_length() > 0 && ik->is_subtype_of(_the_class_oop)) { - // ik->vtable() creates a wrapper object; rm cleans it up - ResourceMark rm(THREAD); - ik->vtable()->adjust_method_entries(_matching_old_methods, - _matching_new_methods, - _matching_methods_length, - &trace_name_printed); - } - - // If the current class has an itable and we are either redefining an - // interface or if the current class is a subclass of the_class, then - // we potentially have to fix the itable. If we are redefining an - // interface, then we have to call adjust_method_entries() for - // every instanceKlass that has an itable since there isn't a - // subclass relationship between an interface and an instanceKlass. - if (ik->itable_length() > 0 && (Klass::cast(_the_class_oop)->is_interface() - || ik->is_subclass_of(_the_class_oop))) { - // ik->itable() creates a wrapper object; rm cleans it up - ResourceMark rm(THREAD); - ik->itable()->adjust_method_entries(_matching_old_methods, - _matching_new_methods, - _matching_methods_length, - &trace_name_printed); - } - - // The constant pools in other classes (other_cp) can refer to - // methods in the_class. We have to update method information in - // other_cp's cache. If other_cp has a previous version, then we - // have to repeat the process for each previous version. The - // constant pool cache holds the methodOops for non-virtual - // methods and for virtual, final methods. - // - // Special case: if the current class is the_class, then new_cp - // has already been attached to the_class and old_cp has already - // been added as a previous version. The new_cp doesn't have any - // cached references to old methods so it doesn't need to be - // updated. We can simply start with the previous version(s) in - // that case. - constantPoolHandle other_cp; - constantPoolCacheOop cp_cache; - - if (k_oop != _the_class_oop) { - // this klass' constant pool cache may need adjustment - other_cp = constantPoolHandle(ik->constants()); - cp_cache = other_cp->cache(); - if (cp_cache != NULL) { - cp_cache->adjust_method_entries(_matching_old_methods, - _matching_new_methods, - _matching_methods_length, - &trace_name_printed); - } - } - { - ResourceMark rm(THREAD); - // PreviousVersionInfo objects returned via PreviousVersionWalker - // contain a GrowableArray of handles. We have to clean up the - // GrowableArray _after_ the PreviousVersionWalker destructor - // has destroyed the handles. - { - // the previous versions' constant pool caches may need adjustment - PreviousVersionWalker pvw(ik); - for (PreviousVersionInfo * pv_info = pvw.next_previous_version(); - pv_info != NULL; pv_info = pvw.next_previous_version()) { - other_cp = pv_info->prev_constant_pool_handle(); - cp_cache = other_cp->cache(); - if (cp_cache != NULL) { - cp_cache->adjust_method_entries(_matching_old_methods, - _matching_new_methods, - _matching_methods_length, - &trace_name_printed); - } - } - } // pvw is cleaned up - } // rm is cleaned up + RC_TIMER_STOP(_timer_vm_op_epilogue); +} + +bool VM_RedefineClasses::is_modifiable_class(oop klass_mirror) { + // classes for primitives cannot be redefined + if (java_lang_Class::is_primitive(klass_mirror)) { + return false; + } + klassOop the_class_oop = java_lang_Class::as_klassOop(klass_mirror); + // classes for arrays cannot be redefined + if (the_class_oop == NULL || !Klass::cast(the_class_oop)->oop_is_instance()) { + return false; } + return true; } -void VM_RedefineClasses::update_jmethod_ids() { - for (int j = 0; j < _matching_methods_length; ++j) { - methodOop old_method = _matching_old_methods[j]; - jmethodID jmid = old_method->find_jmethod_id_or_null(); - if (jmid != NULL) { - // There is a jmethodID, change it to point to the new method - methodHandle new_method_h(_matching_new_methods[j]); - JNIHandles::change_method_associated_with_jmethod_id(jmid, new_method_h); - assert(JNIHandles::resolve_jmethod_id(jmid) == _matching_new_methods[j], - "should be replaced"); - } +#ifdef ASSERT + +void VM_RedefineClasses::verify_classes(klassOop k_oop_latest, oop initiating_loader, TRAPS) { + klassOop k_oop = k_oop_latest; + while (k_oop != NULL) { + + instanceKlassHandle k_handle(THREAD, k_oop); + Verifier::verify(k_handle, Verifier::ThrowException, true, true, THREAD); + k_oop = k_oop->klass_part()->old_version(); } } -void VM_RedefineClasses::check_methods_and_mark_as_obsolete( - BitMap *emcp_methods, int * emcp_method_count_p) { - *emcp_method_count_p = 0; - int obsolete_count = 0; - int old_index = 0; - for (int j = 0; j < _matching_methods_length; ++j, ++old_index) { - methodOop old_method = _matching_old_methods[j]; - methodOop new_method = _matching_new_methods[j]; - methodOop old_array_method; - - // Maintain an old_index into the _old_methods array by skipping - // deleted methods - while ((old_array_method = (methodOop) _old_methods->obj_at(old_index)) - != old_method) { - ++old_index; - } - - if (MethodComparator::methods_EMCP(old_method, new_method)) { - // The EMCP definition from JSR-163 requires the bytecodes to be - // the same with the exception of constant pool indices which may - // differ. However, the constants referred to by those indices - // must be the same. - // - // We use methods_EMCP() for comparison since constant pool - // merging can remove duplicate constant pool entries that were - // present in the old method and removed from the rewritten new - // method. A faster binary comparison function would consider the - // old and new methods to be different when they are actually - // EMCP. - // - // The old and new methods are EMCP and you would think that we - // could get rid of one of them here and now and save some space. - // However, the concept of EMCP only considers the bytecodes and - // the constant pool entries in the comparison. Other things, - // e.g., the line number table (LNT) or the local variable table - // (LVT) don't count in the comparison. So the new (and EMCP) - // method can have a new LNT that we need so we can't just - // overwrite the new method with the old method. - // - // When this routine is called, we have already attached the new - // methods to the_class so the old methods are effectively - // overwritten. However, if an old method is still executing, - // then the old method cannot be collected until sometime after - // the old method call has returned. So the overwriting of old - // methods by new methods will save us space except for those - // (hopefully few) old methods that are still executing. - // - // A method refers to a constMethodOop and this presents another - // possible avenue to space savings. The constMethodOop in the - // new method contains possibly new attributes (LNT, LVT, etc). - // At first glance, it seems possible to save space by replacing - // the constMethodOop in the old method with the constMethodOop - // from the new method. The old and new methods would share the - // same constMethodOop and we would save the space occupied by - // the old constMethodOop. However, the constMethodOop contains - // a back reference to the containing method. Sharing the - // constMethodOop between two methods could lead to confusion in - // the code that uses the back reference. This would lead to - // brittle code that could be broken in non-obvious ways now or - // in the future. - // - // Another possibility is to copy the constMethodOop from the new - // method to the old method and then overwrite the new method with - // the old method. Since the constMethodOop contains the bytecodes - // for the method embedded in the oop, this option would change - // the bytecodes out from under any threads executing the old - // method and make the thread's bcp invalid. Since EMCP requires - // that the bytecodes be the same modulo constant pool indices, it - // is straight forward to compute the correct new bcp in the new - // constMethodOop from the old bcp in the old constMethodOop. The - // time consuming part would be searching all the frames in all - // of the threads to find all of the calls to the old method. - // - // It looks like we will have to live with the limited savings - // that we get from effectively overwriting the old methods - // when the new methods are attached to the_class. - - // track which methods are EMCP for add_previous_version() call - emcp_methods->set_bit(old_index); - (*emcp_method_count_p)++; - - // An EMCP method is _not_ obsolete. An obsolete method has a - // different jmethodID than the current method. An EMCP method - // has the same jmethodID as the current method. Having the - // same jmethodID for all EMCP versions of a method allows for - // a consistent view of the EMCP methods regardless of which - // EMCP method you happen to have in hand. For example, a - // breakpoint set in one EMCP method will work for all EMCP - // versions of the method including the current one. - } else { - // mark obsolete methods as such - old_method->set_is_obsolete(); - obsolete_count++; +#endif - // obsolete methods need a unique idnum - u2 num = instanceKlass::cast(_the_class_oop)->next_method_idnum(); - if (num != constMethodOopDesc::UNSET_IDNUM) { -// u2 old_num = old_method->method_idnum(); - old_method->set_method_idnum(num); -// TO DO: attach obsolete annotations to obsolete method's new idnum +// Rewrite faster byte-codes back to their slower equivalent. Undoes rewriting happening in templateTable_xxx.cpp +// The reason is that once we zero cpool caches, we need to re-resolve all entries again. Faster bytecodes do not +// do that, they assume that cache entry is resolved already. +static void unpatch_bytecode(methodOop method) { + RawBytecodeStream bcs(method); + Bytecodes::Code code; + Bytecodes::Code java_code; + while (!bcs.is_last_bytecode()) { + code = bcs.raw_next(); + address bcp = bcs.bcp(); + + if (code == Bytecodes::_breakpoint) { + int bci = method->bci_from(bcp); + code = method->orig_bytecode_at(bci); + java_code = Bytecodes::java_code(code); + if (code != java_code && + (java_code == Bytecodes::_getfield || + java_code == Bytecodes::_putfield || + java_code == Bytecodes::_aload_0)) { + // Let breakpoint table handling unpatch bytecode + method->set_orig_bytecode_at(bci, java_code); } - // With tracing we try not to "yack" too much. The position of - // this trace assumes there are fewer obsolete methods than - // EMCP methods. - RC_TRACE(0x00000100, ("mark %s(%s) as obsolete", - old_method->name()->as_C_string(), - old_method->signature()->as_C_string())); + } else { + java_code = Bytecodes::java_code(code); + if (code != java_code && + (java_code == Bytecodes::_getfield || + java_code == Bytecodes::_putfield || + java_code == Bytecodes::_aload_0)) { + *bcp = java_code; + } + } + + // Additionally, we need to unpatch bytecode at bcp+1 for fast_xaccess (which would be fast field access) + if (code == Bytecodes::_fast_iaccess_0 || code == Bytecodes::_fast_aaccess_0 || code == Bytecodes::_fast_faccess_0) { + Bytecodes::Code code2 = Bytecodes::code_or_bp_at(bcp + 1); + assert(code2 == Bytecodes::_fast_igetfield || + code2 == Bytecodes::_fast_agetfield || + code2 == Bytecodes::_fast_fgetfield, ""); + *(bcp + 1) = Bytecodes::java_code(code2); } - old_method->set_is_old(); - } - for (int i = 0; i < _deleted_methods_length; ++i) { - methodOop old_method = _deleted_methods[i]; - - assert(old_method->vtable_index() < 0, - "cannot delete methods with vtable entries");; - - // Mark all deleted methods as old and obsolete - old_method->set_is_old(); - old_method->set_is_obsolete(); - ++obsolete_count; - // With tracing we try not to "yack" too much. The position of - // this trace assumes there are fewer obsolete methods than - // EMCP methods. - RC_TRACE(0x00000100, ("mark deleted %s(%s) as obsolete", - old_method->name()->as_C_string(), - old_method->signature()->as_C_string())); - } - assert((*emcp_method_count_p + obsolete_count) == _old_methods->length(), - "sanity check"); - RC_TRACE(0x00000100, ("EMCP_cnt=%d, obsolete_cnt=%d", *emcp_method_count_p, - obsolete_count)); + } } -// This internal class transfers the native function registration from old methods -// to new methods. It is designed to handle both the simple case of unchanged -// native methods and the complex cases of native method prefixes being added and/or -// removed. -// It expects only to be used during the VM_RedefineClasses op (a safepoint). -// -// This class is used after the new methods have been installed in "the_class". -// -// So, for example, the following must be handled. Where 'm' is a method and -// a number followed by an underscore is a prefix. -// -// Old Name New Name -// Simple transfer to new method m -> m -// Add prefix m -> 1_m -// Remove prefix 1_m -> m -// Simultaneous add of prefixes m -> 3_2_1_m -// Simultaneous removal of prefixes 3_2_1_m -> m -// Simultaneous add and remove 1_m -> 2_m -// Same, caused by prefix removal only 3_2_1_m -> 3_2_m +// Unevolving classes may point to old methods directly +// from their constant pool caches, itables, and/or vtables. We +// use the SystemDictionary::classes_do() facility and this helper +// to fix up these pointers. Additional field offsets and vtable indices +// in the constant pool cache entries are fixed. // -class TransferNativeFunctionRegistration { - private: - instanceKlassHandle the_class; - int prefix_count; - char** prefixes; +// Note: We currently don't support updating the vtable in +// arrayKlassOops. See Open Issues in jvmtiRedefineClasses.hpp. +void VM_RedefineClasses::adjust_cpool_cache(klassOop k_oop_latest, oop initiating_loader, TRAPS) { + klassOop k_oop = k_oop_latest; + while (k_oop != NULL) { + //tty->print_cr("name=%s", k_oop->klass_part()->name()->as_C_string()); +/* + methodOop *matching_old_methods = NEW_RESOURCE_ARRAY(methodOop, _old_methods->length()); + methodOop *matching_new_methods = NEW_RESOURCE_ARRAY(methodOop, _old_methods->length()); - // Recursively search the binary tree of possibly prefixed method names. - // Iteration could be used if all agents were well behaved. Full tree walk is - // more resilent to agents not cleaning up intermediate methods. - // Branch at each depth in the binary tree is: - // (1) without the prefix. - // (2) with the prefix. - // where 'prefix' is the prefix at that 'depth' (first prefix, second prefix,...) - methodOop search_prefix_name_space(int depth, char* name_str, size_t name_len, - Symbol* signature) { - TempNewSymbol name_symbol = SymbolTable::probe(name_str, (int)name_len); - if (name_symbol != NULL) { - methodOop method = Klass::cast(the_class())->lookup_method(name_symbol, signature); - if (method != NULL) { - // Even if prefixed, intermediate methods must exist. - if (method->is_native()) { - // Wahoo, we found a (possibly prefixed) version of the method, return it. - return method; - } - if (depth < prefix_count) { - // Try applying further prefixes (other than this one). - method = search_prefix_name_space(depth+1, name_str, name_len, signature); - if (method != NULL) { - return method; // found - } + for (int i=0; i<_matching_methods_length; i++) { + matching_old_methods[i] = (methodOop)_old_methods->obj_at(_matching_old_methods[i]); + matching_new_methods[i] = (methodOop)_new_methods->obj_at(_matching_new_methods[i]); + }*/ - // Try adding this prefix to the method name and see if it matches - // another method name. - char* prefix = prefixes[depth]; - size_t prefix_len = strlen(prefix); - size_t trial_len = name_len + prefix_len; - char* trial_name_str = NEW_RESOURCE_ARRAY(char, trial_len + 1); - strcpy(trial_name_str, prefix); - strcat(trial_name_str, name_str); - method = search_prefix_name_space(depth+1, trial_name_str, trial_len, - signature); - if (method != NULL) { - // If found along this branch, it was prefixed, mark as such - method->set_is_prefixed_native(); - return method; // found - } - } - } - } - return NULL; // This whole branch bore nothing - } + Klass *k = k_oop->klass_part(); + if (k->oop_is_instance()) { + HandleMark hm(THREAD); + instanceKlass *ik = (instanceKlass *) k; - // Return the method name with old prefixes stripped away. - char* method_name_without_prefixes(methodOop method) { - Symbol* name = method->name(); - char* name_str = name->as_utf8(); + constantPoolHandle other_cp; + constantPoolCacheOop cp_cache; - // Old prefixing may be defunct, strip prefixes, if any. - for (int i = prefix_count-1; i >= 0; i--) { - char* prefix = prefixes[i]; - size_t prefix_len = strlen(prefix); - if (strncmp(prefix, name_str, prefix_len) == 0) { - name_str += prefix_len; - } - } - return name_str; - } + other_cp = constantPoolHandle(ik->constants()); - // Strip any prefixes off the old native method, then try to find a - // (possibly prefixed) new native that matches it. - methodOop strip_and_search_for_new_native(methodOop method) { - ResourceMark rm; - char* name_str = method_name_without_prefixes(method); - return search_prefix_name_space(0, name_str, strlen(name_str), - method->signature()); - } + for (int i=0; ilength(); i++) { + if (other_cp->tag_at(i).is_klass()) { + klassOop klass = other_cp->klass_at(i, THREAD); + if (klass->klass_part()->new_version() != NULL) { - public: + // (tw) TODO: check why/if this is necessary + other_cp->klass_at_put(i, klass->klass_part()->new_version()); + } + klass = other_cp->klass_at(i, THREAD); + assert(klass->klass_part()->new_version() == NULL, "Must be new klass!"); + } + } - // Construct a native method transfer processor for this class. - TransferNativeFunctionRegistration(instanceKlassHandle _the_class) { - assert(SafepointSynchronize::is_at_safepoint(), "sanity check"); + cp_cache = other_cp->cache(); - the_class = _the_class; - prefixes = JvmtiExport::get_all_native_method_prefixes(&prefix_count); + if (cp_cache != NULL) { + cp_cache->adjust_entries(NULL, + NULL, + 0); + } + + // If bytecode rewriting is enabled, we also need to unpatch bytecode to force resolution of zeroied entries + if (RewriteBytecodes) { + ik->methods_do(unpatch_bytecode); + } + } + k_oop = k_oop->klass_part()->old_version(); } +} - // Attempt to transfer any of the old or deleted methods that are native - void transfer_registrations(methodOop* old_methods, int methods_length) { - for (int j = 0; j < methods_length; j++) { - methodOop old_method = old_methods[j]; +void VM_RedefineClasses::update_jmethod_ids() { + for (int j = 0; j < _matching_methods_length; ++j) { + methodOop old_method = (methodOop)_old_methods->obj_at(_matching_old_methods[j]); + RC_TRACE(0x00008000, ("matching method %s", old_method->name_and_sig_as_C_string())); + + jmethodID jmid = old_method->find_jmethod_id_or_null(); + if (old_method->new_version() != NULL && jmid == NULL) { + // (tw) Have to create jmethodID in this case + jmid = old_method->jmethod_id(); + } + + if (jmid != NULL) { + // There is a jmethodID, change it to point to the new method + methodHandle new_method_h((methodOop)_new_methods->obj_at(_matching_new_methods[j])); + if (old_method->new_version() == NULL) { + methodHandle old_method_h((methodOop)_old_methods->obj_at(_matching_old_methods[j])); + jmethodID new_jmethod_id = JNIHandles::make_jmethod_id(old_method_h); + bool result = instanceKlass::cast(old_method_h->method_holder())->update_jmethod_id(old_method_h(), new_jmethod_id); + //RC_TRACE(0x00008000, ("Changed jmethodID for old method assigned to %d / result=%d", new_jmethod_id, result); + //RC_TRACE(0x00008000, ("jmethodID new method: %d jmethodID old method: %d", new_method_h->jmethod_id(), old_method->jmethod_id()); + } else { + jmethodID mid = new_method_h->jmethod_id(); + bool result = instanceKlass::cast(new_method_h->method_holder())->update_jmethod_id(new_method_h(), jmid); + //RC_TRACE(0x00008000, ("Changed jmethodID for new method assigned to %d / result=%d", jmid, result); - if (old_method->is_native() && old_method->has_native_function()) { - methodOop new_method = strip_and_search_for_new_native(old_method); - if (new_method != NULL) { - // Actually set the native function in the new method. - // Redefine does not send events (except CFLH), certainly not this - // behind the scenes re-registration. - new_method->set_native_function(old_method->native_function(), - !methodOopDesc::native_bind_event_is_interesting); - } } + JNIHandles::change_method_associated_with_jmethod_id(jmid, new_method_h); + //RC_TRACE(0x00008000, ("changing method associated with jmethod id %d to %s", (int)jmid, new_method_h->name()->as_C_string()); + assert(JNIHandles::resolve_jmethod_id(jmid) == (methodOop)_new_methods->obj_at(_matching_new_methods[j]), "should be replaced"); + jmethodID mid = ((methodOop)_new_methods->obj_at(_matching_new_methods[j]))->jmethod_id(); + assert(JNIHandles::resolve_non_null((jobject)mid) == new_method_h(), "must match!"); + + //RC_TRACE(0x00008000, ("jmethodID new method: %d jmethodID old method: %d", new_method_h->jmethod_id(), old_method->jmethod_id()); } } -}; - -// Don't lose the association between a native method and its JNI function. -void VM_RedefineClasses::transfer_old_native_function_registrations(instanceKlassHandle the_class) { - TransferNativeFunctionRegistration transfer(the_class); - transfer.transfer_registrations(_deleted_methods, _deleted_methods_length); - transfer.transfer_registrations(_matching_old_methods, _matching_methods_length); } + // Deoptimize all compiled code that depends on this class. // // If the can_redefine_classes capability is obtained in the onload @@ -2964,7 +2677,10 @@ void VM_RedefineClasses::flush_dependent_code(instanceKlassHandle k_h, TRAPS) { // All dependencies have been recorded from startup or this is a second or // subsequent use of RedefineClasses - if (JvmtiExport::all_dependencies_are_recorded()) { + + // For now deopt all + // (tw) TODO: Improve the dependency system such that we can safely deopt only a subset of the methods + if (0 && JvmtiExport::all_dependencies_are_recorded()) { Universe::flush_evol_dependents_on(k_h); } else { CodeCache::mark_all_nmethods_for_deoptimization(); @@ -2987,10 +2703,10 @@ void VM_RedefineClasses::compute_added_deleted_matching_methods() { methodOop old_method; methodOop new_method; - _matching_old_methods = NEW_RESOURCE_ARRAY(methodOop, _old_methods->length()); - _matching_new_methods = NEW_RESOURCE_ARRAY(methodOop, _old_methods->length()); - _added_methods = NEW_RESOURCE_ARRAY(methodOop, _new_methods->length()); - _deleted_methods = NEW_RESOURCE_ARRAY(methodOop, _old_methods->length()); + _matching_old_methods = NEW_RESOURCE_ARRAY(int, _old_methods->length()); + _matching_new_methods = NEW_RESOURCE_ARRAY(int, _old_methods->length()); + _added_methods = NEW_RESOURCE_ARRAY(int, _new_methods->length()); + _deleted_methods = NEW_RESOURCE_ARRAY(int, _old_methods->length()); _matching_methods_length = 0; _deleted_methods_length = 0; @@ -3005,36 +2721,36 @@ void VM_RedefineClasses::compute_added_deleted_matching_methods() { } // New method at the end new_method = (methodOop) _new_methods->obj_at(nj); - _added_methods[_added_methods_length++] = new_method; + _added_methods[_added_methods_length++] = nj; ++nj; } else if (nj >= _new_methods->length()) { // Old method, at the end, is deleted old_method = (methodOop) _old_methods->obj_at(oj); - _deleted_methods[_deleted_methods_length++] = old_method; + _deleted_methods[_deleted_methods_length++] = oj; ++oj; } else { old_method = (methodOop) _old_methods->obj_at(oj); new_method = (methodOop) _new_methods->obj_at(nj); if (old_method->name() == new_method->name()) { if (old_method->signature() == new_method->signature()) { - _matching_old_methods[_matching_methods_length ] = old_method; - _matching_new_methods[_matching_methods_length++] = new_method; + _matching_old_methods[_matching_methods_length ] = oj;//old_method; + _matching_new_methods[_matching_methods_length++] = nj;//new_method; ++nj; ++oj; } else { // added overloaded have already been moved to the end, // so this is a deleted overloaded method - _deleted_methods[_deleted_methods_length++] = old_method; + _deleted_methods[_deleted_methods_length++] = oj;//old_method; ++oj; } } else { // names don't match if (old_method->name()->fast_compare(new_method->name()) > 0) { // new method - _added_methods[_added_methods_length++] = new_method; + _added_methods[_added_methods_length++] = nj;//new_method; ++nj; } else { // deleted method - _deleted_methods[_deleted_methods_length++] = old_method; + _deleted_methods[_deleted_methods_length++] = oj;//old_method; ++oj; } } @@ -3042,6 +2758,8 @@ void VM_RedefineClasses::compute_added_deleted_matching_methods() { } assert(_matching_methods_length + _deleted_methods_length == _old_methods->length(), "sanity"); assert(_matching_methods_length + _added_methods_length == _new_methods->length(), "sanity"); + RC_TRACE(0x00008000, ("Matching methods = %d / deleted methods = %d / added methods = %d", + _matching_methods_length, _deleted_methods_length, _added_methods_length)); } @@ -3049,287 +2767,184 @@ void VM_RedefineClasses::compute_added_deleted_matching_methods() { // Install the redefinition of a class: // - house keeping (flushing breakpoints and caches, deoptimizing // dependent compiled code) -// - replacing parts in the_class with parts from scratch_class -// - adding a weak reference to track the obsolete but interesting -// parts of the_class // - adjusting constant pool caches and vtables in other classes -// that refer to methods in the_class. These adjustments use the -// SystemDictionary::classes_do() facility which only allows -// a helper method to be specified. The interesting parameters -// that we would like to pass to the helper method are saved in -// static global fields in the VM operation. -void VM_RedefineClasses::redefine_single_class(jclass the_jclass, - instanceKlassHandle scratch_class, TRAPS) { +void VM_RedefineClasses::redefine_single_class(instanceKlassHandle the_new_class, TRAPS) { + + ResourceMark rm(THREAD); - RC_TIMER_START(_timer_rsc_phase1); + assert(the_new_class->old_version() != NULL, "Must not be null"); + assert(the_new_class->old_version()->klass_part()->new_version() == the_new_class(), "Must equal"); - oop the_class_mirror = JNIHandles::resolve_non_null(the_jclass); - klassOop the_class_oop = java_lang_Class::as_klassOop(the_class_mirror); - instanceKlassHandle the_class = instanceKlassHandle(THREAD, the_class_oop); + instanceKlassHandle the_old_class = instanceKlassHandle(THREAD, the_new_class->old_version()); +#ifndef JVMTI_KERNEL // Remove all breakpoints in methods of this class JvmtiBreakpoints& jvmti_breakpoints = JvmtiCurrentBreakpoints::get_jvmti_breakpoints(); - jvmti_breakpoints.clearall_in_class_at_safepoint(the_class_oop); + jvmti_breakpoints.clearall_in_class_at_safepoint(the_old_class()); +#endif // !JVMTI_KERNEL - if (the_class_oop == Universe::reflect_invoke_cache()->klass()) { + if (the_old_class() == Universe::reflect_invoke_cache()->klass()) { // We are redefining java.lang.reflect.Method. Method.invoke() is // cached and users of the cache care about each active version of // the method so we have to track this previous version. // Do this before methods get switched Universe::reflect_invoke_cache()->add_previous_version( - the_class->method_with_idnum(Universe::reflect_invoke_cache()->method_idnum())); + the_old_class->method_with_idnum(Universe::reflect_invoke_cache()->method_idnum())); } - // Deoptimize all compiled code that depends on this class - flush_dependent_code(the_class, THREAD); - - _old_methods = the_class->methods(); - _new_methods = scratch_class->methods(); - _the_class_oop = the_class_oop; + _old_methods = the_old_class->methods(); + _new_methods = the_new_class->methods(); + _the_class_oop = the_old_class(); compute_added_deleted_matching_methods(); - update_jmethod_ids(); - - // Attach new constant pool to the original klass. The original - // klass still refers to the old constant pool (for now). - scratch_class->constants()->set_pool_holder(the_class()); - -#if 0 - // In theory, with constant pool merging in place we should be able - // to save space by using the new, merged constant pool in place of - // the old constant pool(s). By "pool(s)" I mean the constant pool in - // the klass version we are replacing now and any constant pool(s) in - // previous versions of klass. Nice theory, doesn't work in practice. - // When this code is enabled, even simple programs throw NullPointer - // exceptions. I'm guessing that this is caused by some constant pool - // cache difference between the new, merged constant pool and the - // constant pool that was just being used by the klass. I'm keeping - // this code around to archive the idea, but the code has to remain - // disabled for now. - - // Attach each old method to the new constant pool. This can be - // done here since we are past the bytecode verification and - // constant pool optimization phases. - for (int i = _old_methods->length() - 1; i >= 0; i--) { - methodOop method = (methodOop)_old_methods->obj_at(i); - method->set_constants(scratch_class->constants()); - } - - { - // walk all previous versions of the klass - instanceKlass *ik = (instanceKlass *)the_class()->klass_part(); - PreviousVersionWalker pvw(ik); - instanceKlassHandle ikh; - do { - ikh = pvw.next_previous_version(); - if (!ikh.is_null()) { - ik = ikh(); - - // attach previous version of klass to the new constant pool - ik->set_constants(scratch_class->constants()); - - // Attach each method in the previous version of klass to the - // new constant pool - objArrayOop prev_methods = ik->methods(); - for (int i = prev_methods->length() - 1; i >= 0; i--) { - methodOop method = (methodOop)prev_methods->obj_at(i); - method->set_constants(scratch_class->constants()); - } - } - } while (!ikh.is_null()); - } -#endif - - // Replace methods and constantpool - the_class->set_methods(_new_methods); - scratch_class->set_methods(_old_methods); // To prevent potential GCing of the old methods, - // and to be able to undo operation easily. - - constantPoolOop old_constants = the_class->constants(); - the_class->set_constants(scratch_class->constants()); - scratch_class->set_constants(old_constants); // See the previous comment. -#if 0 - // We are swapping the guts of "the new class" with the guts of "the - // class". Since the old constant pool has just been attached to "the - // new class", it seems logical to set the pool holder in the old - // constant pool also. However, doing this will change the observable - // class hierarchy for any old methods that are still executing. A - // method can query the identity of its "holder" and this query uses - // the method's constant pool link to find the holder. The change in - // holding class from "the class" to "the new class" can confuse - // things. - // - // Setting the old constant pool's holder will also cause - // verification done during vtable initialization below to fail. - // During vtable initialization, the vtable's class is verified to be - // a subtype of the method's holder. The vtable's class is "the - // class" and the method's holder is gotten from the constant pool - // link in the method itself. For "the class"'s directly implemented - // methods, the method holder is "the class" itself (as gotten from - // the new constant pool). The check works fine in this case. The - // check also works fine for methods inherited from super classes. - // - // Miranda methods are a little more complicated. A miranda method is - // provided by an interface when the class implementing the interface - // does not provide its own method. These interfaces are implemented - // internally as an instanceKlass. These special instanceKlasses - // share the constant pool of the class that "implements" the - // interface. By sharing the constant pool, the method holder of a - // miranda method is the class that "implements" the interface. In a - // non-redefine situation, the subtype check works fine. However, if - // the old constant pool's pool holder is modified, then the check - // fails because there is no class hierarchy relationship between the - // vtable's class and "the new class". - - old_constants->set_pool_holder(scratch_class()); -#endif // track which methods are EMCP for add_previous_version() call below - BitMap emcp_methods(_old_methods->length()); + + // (tw) TODO: Check if we need the concept of EMCP? + BitMap emcp_methods(_old_methods->length()); int emcp_method_count = 0; emcp_methods.clear(); // clears 0..(length() - 1) + + // We need to mark methods as old!! check_methods_and_mark_as_obsolete(&emcp_methods, &emcp_method_count); - transfer_old_native_function_registrations(the_class); - - // The class file bytes from before any retransformable agents mucked - // with them was cached on the scratch class, move to the_class. - // Note: we still want to do this if nothing needed caching since it - // should get cleared in the_class too. - if (the_class->get_cached_class_file_bytes() == 0) { - // the_class doesn't have a cache yet so copy it - the_class->set_cached_class_file( - scratch_class->get_cached_class_file_bytes(), - scratch_class->get_cached_class_file_len()); - } -#ifndef PRODUCT - else { - assert(the_class->get_cached_class_file_bytes() == - scratch_class->get_cached_class_file_bytes(), "cache ptrs must match"); - assert(the_class->get_cached_class_file_len() == - scratch_class->get_cached_class_file_len(), "cache lens must match"); - } -#endif - - // Replace inner_classes - typeArrayOop old_inner_classes = the_class->inner_classes(); - the_class->set_inner_classes(scratch_class->inner_classes()); - scratch_class->set_inner_classes(old_inner_classes); - - // Initialize the vtable and interface table after - // methods have been rewritten - { - ResourceMark rm(THREAD); - // no exception should happen here since we explicitly - // do not check loader constraints. - // compare_and_normalize_class_versions has already checked: - // - classloaders unchanged, signatures unchanged - // - all instanceKlasses for redefined classes reused & contents updated - the_class->vtable()->initialize_vtable(false, THREAD); - the_class->itable()->initialize_itable(false, THREAD); - assert(!HAS_PENDING_EXCEPTION || (THREAD->pending_exception()->is_a(SystemDictionary::ThreadDeath_klass())), "redefine exception"); - } - - // Leave arrays of jmethodIDs and itable index cache unchanged - - // Copy the "source file name" attribute from new class version - the_class->set_source_file_name(scratch_class->source_file_name()); - - // Copy the "source debug extension" attribute from new class version - the_class->set_source_debug_extension( - scratch_class->source_debug_extension(), - scratch_class->source_debug_extension() == NULL ? 0 : - (int)strlen(scratch_class->source_debug_extension())); - - // Use of javac -g could be different in the old and the new - if (scratch_class->access_flags().has_localvariable_table() != - the_class->access_flags().has_localvariable_table()) { - - AccessFlags flags = the_class->access_flags(); - if (scratch_class->access_flags().has_localvariable_table()) { - flags.set_has_localvariable_table(); - } else { - flags.clear_has_localvariable_table(); - } - the_class->set_access_flags(flags); - } - - // Replace class annotation fields values - typeArrayOop old_class_annotations = the_class->class_annotations(); - the_class->set_class_annotations(scratch_class->class_annotations()); - scratch_class->set_class_annotations(old_class_annotations); - - // Replace fields annotation fields values - objArrayOop old_fields_annotations = the_class->fields_annotations(); - the_class->set_fields_annotations(scratch_class->fields_annotations()); - scratch_class->set_fields_annotations(old_fields_annotations); - - // Replace methods annotation fields values - objArrayOop old_methods_annotations = the_class->methods_annotations(); - the_class->set_methods_annotations(scratch_class->methods_annotations()); - scratch_class->set_methods_annotations(old_methods_annotations); - - // Replace methods parameter annotation fields values - objArrayOop old_methods_parameter_annotations = - the_class->methods_parameter_annotations(); - the_class->set_methods_parameter_annotations( - scratch_class->methods_parameter_annotations()); - scratch_class->set_methods_parameter_annotations(old_methods_parameter_annotations); - - // Replace methods default annotation fields values - objArrayOop old_methods_default_annotations = - the_class->methods_default_annotations(); - the_class->set_methods_default_annotations( - scratch_class->methods_default_annotations()); - scratch_class->set_methods_default_annotations(old_methods_default_annotations); - - // Replace minor version number of class file - u2 old_minor_version = the_class->minor_version(); - the_class->set_minor_version(scratch_class->minor_version()); - scratch_class->set_minor_version(old_minor_version); - - // Replace major version number of class file - u2 old_major_version = the_class->major_version(); - the_class->set_major_version(scratch_class->major_version()); - scratch_class->set_major_version(old_major_version); - - // Replace CP indexes for class and name+type of enclosing method - u2 old_class_idx = the_class->enclosing_method_class_index(); - u2 old_method_idx = the_class->enclosing_method_method_index(); - the_class->set_enclosing_method_indices( - scratch_class->enclosing_method_class_index(), - scratch_class->enclosing_method_method_index()); - scratch_class->set_enclosing_method_indices(old_class_idx, old_method_idx); + update_jmethod_ids(); // keep track of previous versions of this class - the_class->add_previous_version(scratch_class, &emcp_methods, + the_new_class->add_previous_version(the_old_class, &emcp_methods, emcp_method_count); - RC_TIMER_STOP(_timer_rsc_phase1); - RC_TIMER_START(_timer_rsc_phase2); + // TODO: + transfer_old_native_function_registrations(the_old_class); - // Adjust constantpool caches and vtables for all classes - // that reference methods of the evolved class. - SystemDictionary::classes_do(adjust_cpool_cache_and_vtable, THREAD); - if (the_class->oop_map_cache() != NULL) { - // Flush references to any obsolete methods from the oop map cache - // so that obsolete methods are not pinned. - the_class->oop_map_cache()->flush_obsolete_entries(); +#ifdef ASSERT + +// klassOop systemLookup1 = SystemDictionary::resolve_or_null(the_old_class->name(), the_old_class->class_loader(), the_old_class->protection_domain(), THREAD); +// assert(systemLookup1 == the_new_class(), "New class must be in system dictionary!"); + + //JNIHandles::verify(); + +// klassOop systemLookup = SystemDictionary::resolve_or_null(the_old_class->name(), the_old_class->class_loader(), the_old_class->protection_domain(), THREAD); + +// assert(systemLookup == the_new_class(), "New class must be in system dictionary!"); + assert(the_new_class->old_version() != NULL, "Must not be null"); + assert(the_new_class->old_version()->klass_part()->new_version() == the_new_class(), "Must equal"); + + for (int i=0; imethods()->length(); i++) { + assert(((methodOop)the_new_class->methods()->obj_at(i))->method_holder() == the_new_class(), "method holder must match!"); } + _old_methods->verify(); + _new_methods->verify(); + + the_new_class->vtable()->verify(tty); + the_old_class->vtable()->verify(tty); + +#endif + // increment the classRedefinedCount field in the_class and in any // direct and indirect subclasses of the_class - increment_class_counter((instanceKlass *)the_class()->klass_part(), THREAD); + increment_class_counter((instanceKlass *)the_old_class()->klass_part(), THREAD); + +} + + +void VM_RedefineClasses::check_methods_and_mark_as_obsolete(BitMap *emcp_methods, int * emcp_method_count_p) { + RC_TRACE(0x00008000, ("Checking matching methods for EMCP")); + *emcp_method_count_p = 0; + int obsolete_count = 0; + int old_index = 0; + for (int j = 0; j < _matching_methods_length; ++j, ++old_index) { + methodOop old_method = (methodOop)_old_methods->obj_at(_matching_old_methods[j]); + methodOop new_method = (methodOop)_new_methods->obj_at(_matching_new_methods[j]); + methodOop old_array_method; + + // Maintain an old_index into the _old_methods array by skipping + // deleted methods + while ((old_array_method = (methodOop) _old_methods->obj_at(old_index)) + != old_method) { + ++old_index; + } + + if (MethodComparator::methods_EMCP(old_method, new_method)) { + // The EMCP definition from JSR-163 requires the bytecodes to be + // the same with the exception of constant pool indices which may + // differ. However, the constants referred to by those indices + // must be the same. + // + // We use methods_EMCP() for comparison since constant pool + // merging can remove duplicate constant pool entries that were + // present in the old method and removed from the rewritten new + // method. A faster binary comparison function would consider the + // old and new methods to be different when they are actually + // EMCP. + + // track which methods are EMCP for add_previous_version() call + emcp_methods->set_bit(old_index); + (*emcp_method_count_p)++; + + // An EMCP method is _not_ obsolete. An obsolete method has a + // different jmethodID than the current method. An EMCP method + // has the same jmethodID as the current method. Having the + // same jmethodID for all EMCP versions of a method allows for + // a consistent view of the EMCP methods regardless of which + // EMCP method you happen to have in hand. For example, a + // breakpoint set in one EMCP method will work for all EMCP + // versions of the method including the current one. + + old_method->set_new_version(new_method); + new_method->set_old_version(old_method); + + RC_TRACE(0x00008000, ("Found EMCP method %s", old_method->name_and_sig_as_C_string())); + + // Transfer breakpoints + instanceKlass *ik = instanceKlass::cast(old_method->method_holder()); + for (BreakpointInfo* bp = ik->breakpoints(); bp != NULL; bp = bp->next()) { + RC_TRACE(0x00000002, ("Checking breakpoint")); + RC_TRACE(0x00000002, ("%d / %d", + bp->match(old_method), bp->match(new_method))); + if (bp->match(old_method)) { + assert(bp->match(new_method), "if old method is method, then new method must match too"); + RC_TRACE(0x00000002, ("Found a breakpoint in an old EMCP method")); + new_method->set_breakpoint(bp->bci()); + } + } + + - // RC_TRACE macro has an embedded ResourceMark - RC_TRACE_WITH_THREAD(0x00000001, THREAD, - ("redefined name=%s, count=%d (avail_mem=" UINT64_FORMAT "K)", - the_class->external_name(), - java_lang_Class::classRedefinedCount(the_class_mirror), - os::available_memory() >> 10)); + } else { + // mark obsolete methods as such + old_method->set_is_obsolete(); + obsolete_count++; + + // With tracing we try not to "yack" too much. The position of + // this trace assumes there are fewer obsolete methods than + // EMCP methods. + RC_TRACE(0x00008000, ("mark %s(%s) as obsolete", + old_method->name()->as_C_string(), + old_method->signature()->as_C_string())); + } + old_method->set_is_old(); + } + for (int i = 0; i < _deleted_methods_length; ++i) { + methodOop old_method = (methodOop)_old_methods->obj_at(_deleted_methods[i]); - RC_TIMER_STOP(_timer_rsc_phase2); -} // end redefine_single_class() + //assert(old_method->vtable_index() < 0, + // "cannot delete methods with vtable entries");; + // Mark all deleted methods as old and obsolete + old_method->set_is_old(); + old_method->set_is_obsolete(); + ++obsolete_count; + // With tracing we try not to "yack" too much. The position of + // this trace assumes there are fewer obsolete methods than + // EMCP methods. + RC_TRACE(0x00008000, ("mark deleted %s(%s) as obsolete", + old_method->name()->as_C_string(), + old_method->signature()->as_C_string())); + } + //assert((*emcp_method_count_p + obsolete_count) == _old_methods->length(), "sanity check"); + RC_TRACE(0x00008000, ("EMCP_cnt=%d, obsolete_cnt=%d !", *emcp_method_count_p, obsolete_count)); +} // Increment the classRedefinedCount field in the specific instanceKlass // and in all direct and indirect subclasses. @@ -3338,134 +2953,324 @@ void VM_RedefineClasses::increment_class_counter(instanceKlass *ik, TRAPS) { klassOop class_oop = java_lang_Class::as_klassOop(class_mirror); int new_count = java_lang_Class::classRedefinedCount(class_mirror) + 1; java_lang_Class::set_classRedefinedCount(class_mirror, new_count); + RC_TRACE(0x00008000, ("updated count for class=%s to %d", ik->external_name(), new_count)); +} + +#ifndef PRODUCT +void VM_RedefineClasses::check_class(klassOop k_oop, TRAPS) { + Klass *k = k_oop->klass_part(); + if (k->oop_is_instance()) { + HandleMark hm(THREAD); + instanceKlass *ik = (instanceKlass *) k; + assert(ik->is_newest_version(), "must be latest version in system dictionary"); + + if (ik->vtable_length() > 0) { + ResourceMark rm(THREAD); + if (!ik->vtable()->check_no_old_entries()) { + RC_TRACE(0x00000001, ("size of class: %d\n", + k_oop->size())); + RC_TRACE(0x00000001, ("klassVtable::check_no_old_entries failure -- OLD method found -- class: %s", + ik->signature_name())); + assert(false, "OLD method found"); + } - if (class_oop != _the_class_oop) { - // _the_class_oop count is printed at end of redefine_single_class() - RC_TRACE_WITH_THREAD(0x00000008, THREAD, - ("updated count in subclass=%s to %d", ik->external_name(), new_count)); + ik->vtable()->verify(tty, true); + } } +} + +#endif + +VM_RedefineClasses::FindAffectedKlassesClosure::FindAffectedKlassesClosure( GrowableArray *original_klasses, GrowableArray *result ) +{ + assert(original_klasses != NULL && result != NULL, ""); + this->_original_klasses = original_klasses; + this->_result = result; + SystemDictionary::classes_do(this); +} - for (Klass *subk = ik->subklass(); subk != NULL; - subk = subk->next_sibling()) { - if (subk->oop_is_instance()) { - // Only update instanceKlasses - instanceKlass *subik = (instanceKlass*)subk; - // recursively do subclasses of the current subclass - increment_class_counter(subik, THREAD); +void VM_RedefineClasses::FindAffectedKlassesClosure::do_object( oop obj ) +{ + klassOop klass = (klassOop)obj; + assert(!_result->contains(klass), "must not occur more than once!"); + assert(klass->klass_part()->new_version() == NULL, "Only last version is valid entry in system dictionary"); + + for(int i=0; i<_original_klasses->length(); i++) { + instanceKlassHandle cur = _original_klasses->at(i); + if (cur() != klass && klass->klass_part()->is_subtype_of(cur()) && !_original_klasses->contains(klass)) { + RC_TRACE(0x00008000, ("Found affected class: %s", klass->klass_part()->name()->as_C_string())); + _result->append(klass); + break; } } } -void VM_RedefineClasses::check_class(klassOop k_oop, - oop initiating_loader, TRAPS) { - Klass *k = k_oop->klass_part(); - if (k->oop_is_instance()) { - HandleMark hm(THREAD); - instanceKlass *ik = (instanceKlass *) k; - bool no_old_methods = true; // be optimistic - ResourceMark rm(THREAD); +jvmtiError VM_RedefineClasses::do_topological_class_sorting( const jvmtiClassDefinition *class_defs, int class_count, GrowableArray *affected, GrowableArray *arr, TRAPS) +{ + GrowableArray< Pair > *links = new GrowableArray< Pair >(); - // a vtable should never contain old or obsolete methods - if (ik->vtable_length() > 0 && - !ik->vtable()->check_no_old_or_obsolete_entries()) { - if (RC_TRACE_ENABLED(0x00004000)) { - RC_TRACE_WITH_THREAD(0x00004000, THREAD, - ("klassVtable::check_no_old_or_obsolete_entries failure" - " -- OLD or OBSOLETE method found -- class: %s", - ik->signature_name())); - ik->vtable()->dump_vtable(); - } - no_old_methods = false; - } - - // an itable should never contain old or obsolete methods - if (ik->itable_length() > 0 && - !ik->itable()->check_no_old_or_obsolete_entries()) { - if (RC_TRACE_ENABLED(0x00004000)) { - RC_TRACE_WITH_THREAD(0x00004000, THREAD, - ("klassItable::check_no_old_or_obsolete_entries failure" - " -- OLD or OBSOLETE method found -- class: %s", - ik->signature_name())); - ik->itable()->dump_itable(); - } - no_old_methods = false; - } - - // the constant pool cache should never contain old or obsolete methods - if (ik->constants() != NULL && - ik->constants()->cache() != NULL && - !ik->constants()->cache()->check_no_old_or_obsolete_entries()) { - if (RC_TRACE_ENABLED(0x00004000)) { - RC_TRACE_WITH_THREAD(0x00004000, THREAD, - ("cp-cache::check_no_old_or_obsolete_entries failure" - " -- OLD or OBSOLETE method found -- class: %s", - ik->signature_name())); - ik->constants()->cache()->dump_cache(); - } - no_old_methods = false; - } - - if (!no_old_methods) { - if (RC_TRACE_ENABLED(0x00004000)) { - dump_methods(); - } else { - tty->print_cr("INFO: use the '-XX:TraceRedefineClasses=16384' option " - "to see more info about the following guarantee() failure."); + for (int i=0; iclass_loader()); + Handle protection_domain(THREAD, the_class->protection_domain()); + + ClassFileStream st((u1*) class_defs[i].class_bytes, + class_defs[i].class_byte_count, (char *)"__VM_RedefineClasses__"); + ClassFileParser cfp(&st); + + GrowableArray symbolArr; + RC_TRACE(0x00000002, ("Before find super symbols of class %s", + the_class->name()->as_C_string())); + cfp.findSuperSymbols(the_class->name(), the_class_loader, protection_domain, the_class, symbolArr, THREAD); + + for (int j=0; jas_C_string())); + + for (int k=0; klength(); k++) { + klassOop curOop = arr->at(k)(); + // (tw) TODO: Check if we get aliasing problems with different class loaders? + if (curOop->klass_part()->name() == sym /*&& curOop->klass_part()->class_loader() == the_class_loader()*/) { + RC_TRACE(0x00000002, ("Found class to link")); + links->append(Pair(curOop, the_class())); + break; + } + } + } + } + + + RC_TRACE(0x00000001, ("Identified links between classes! ")); + + for (int i=0; ilength(); i++) { + + instanceKlassHandle klass = affected->at(i); + + klassOop superKlass = klass->super(); + if (affected->contains(superKlass)) { + links->append(Pair(superKlass, klass())); + } + + objArrayOop superInterfaces = klass->local_interfaces(); + for (int j=0; jlength(); j++) { + klassOop interfaceKlass = (klassOop)superInterfaces->obj_at(j); + if (arr->contains(interfaceKlass)) { + links->append(Pair(interfaceKlass, klass())); + } + } + } + + if (RC_TRACE_ENABLED(0x00000002)) { + RC_TRACE(0x00000002, ("Identified links: ")); + for (int i=0; ilength(); i++) { + RC_TRACE(0x00000002, ("%s to %s", + links->at(i).left()->klass_part()->name()->as_C_string(), + links->at(i).right()->klass_part()->name()->as_C_string())); + } + } + + for (int i=0; ilength(); i++) { + + int j; + for (j=i; jlength(); j++) { + + int k; + for (k=0; klength(); k++) { + + klassOop k1 = links->adr_at(k)->right(); + klassOop k2 = arr->at(j)(); + if (k1 == k2) { + break; + } + } + + if (k == links->length()) { + break; } - guarantee(false, "OLD and/or OBSOLETE method(s) found"); + } + + if (j == arr->length()) { + // circle detected + return JVMTI_ERROR_CIRCULAR_CLASS_DEFINITION; + } + + for (int k=0; klength(); k++) { + if (links->adr_at(k)->left() == arr->at(j)()) { + links->at_put(k, links->at(links->length() - 1)); + links->remove_at(links->length() - 1); + k--; + } + } + + instanceKlassHandle tmp = arr->at(j); + arr->at_put(j, arr->at(i)); + arr->at_put(i, tmp); + } + + return JVMTI_ERROR_NONE; +} + +void VM_RedefineClasses::oops_do(OopClosure *closure) { + + if (_updated_oops != NULL) { + for (int i=0; i<_updated_oops->length(); i++) { + closure->do_oop(_updated_oops->adr_at(i)); } } } -void VM_RedefineClasses::dump_methods() { - int j; - RC_TRACE(0x00004000, ("_old_methods --")); - for (j = 0; j < _old_methods->length(); ++j) { - methodOop m = (methodOop) _old_methods->obj_at(j); - RC_TRACE_NO_CR(0x00004000, ("%4d (%5d) ", j, m->vtable_index())); - m->access_flags().print_on(tty); - tty->print(" -- "); - m->print_name(tty); - tty->cr(); - } - RC_TRACE(0x00004000, ("_new_methods --")); - for (j = 0; j < _new_methods->length(); ++j) { - methodOop m = (methodOop) _new_methods->obj_at(j); - RC_TRACE_NO_CR(0x00004000, ("%4d (%5d) ", j, m->vtable_index())); - m->access_flags().print_on(tty); - tty->print(" -- "); - m->print_name(tty); - tty->cr(); - } - RC_TRACE(0x00004000, ("_matching_(old/new)_methods --")); - for (j = 0; j < _matching_methods_length; ++j) { - methodOop m = _matching_old_methods[j]; - RC_TRACE_NO_CR(0x00004000, ("%4d (%5d) ", j, m->vtable_index())); - m->access_flags().print_on(tty); - tty->print(" -- "); - m->print_name(tty); - tty->cr(); - m = _matching_new_methods[j]; - RC_TRACE_NO_CR(0x00004000, (" (%5d) ", m->vtable_index())); - m->access_flags().print_on(tty); - tty->cr(); - } - RC_TRACE(0x00004000, ("_deleted_methods --")); - for (j = 0; j < _deleted_methods_length; ++j) { - methodOop m = _deleted_methods[j]; - RC_TRACE_NO_CR(0x00004000, ("%4d (%5d) ", j, m->vtable_index())); - m->access_flags().print_on(tty); - tty->print(" -- "); - m->print_name(tty); - tty->cr(); - } - RC_TRACE(0x00004000, ("_added_methods --")); - for (j = 0; j < _added_methods_length; ++j) { - methodOop m = _added_methods[j]; - RC_TRACE_NO_CR(0x00004000, ("%4d (%5d) ", j, m->vtable_index())); - m->access_flags().print_on(tty); - tty->print(" -- "); - m->print_name(tty); - tty->cr(); +void VM_RedefineClasses::transfer_special_access_flags(fieldDescriptor *from, fieldDescriptor *to) { + to->set_is_field_modification_watched(from->is_field_modification_watched()); + to->set_is_field_access_watched(from->is_field_access_watched()); + if (from->is_field_modification_watched() || from->is_field_access_watched()) { + RC_TRACE(0x00000002, ("Transfered watch for field %s", + from->name()->as_C_string())); + } + update_klass_field_access_flag(to); +} + +void VM_RedefineClasses::update_klass_field_access_flag(fieldDescriptor *fd) { + instanceKlass* ik = instanceKlass::cast(fd->field_holder()); + FieldInfo* fi = FieldInfo::from_field_array(ik->fields(), fd->index()); + fi->set_access_flags(fd->access_flags().as_short()); +} + + +// This internal class transfers the native function registration from old methods +// to new methods. It is designed to handle both the simple case of unchanged +// native methods and the complex cases of native method prefixes being added and/or +// removed. +// It expects only to be used during the VM_RedefineClasses op (a safepoint). +// +// This class is used after the new methods have been installed in "the_class". +// +// So, for example, the following must be handled. Where 'm' is a method and +// a number followed by an underscore is a prefix. +// +// Old Name New Name +// Simple transfer to new method m -> m +// Add prefix m -> 1_m +// Remove prefix 1_m -> m +// Simultaneous add of prefixes m -> 3_2_1_m +// Simultaneous removal of prefixes 3_2_1_m -> m +// Simultaneous add and remove 1_m -> 2_m +// Same, caused by prefix removal only 3_2_1_m -> 3_2_m +// +class TransferNativeFunctionRegistration { +private: + instanceKlassHandle the_class; + int prefix_count; + char** prefixes; + + // Recursively search the binary tree of possibly prefixed method names. + // Iteration could be used if all agents were well behaved. Full tree walk is + // more resilent to agents not cleaning up intermediate methods. + // Branch at each depth in the binary tree is: + // (1) without the prefix. + // (2) with the prefix. + // where 'prefix' is the prefix at that 'depth' (first prefix, second prefix,...) + methodOop search_prefix_name_space(int depth, char* name_str, size_t name_len, + Symbol* signature) { + Symbol* name_symbol = SymbolTable::probe(name_str, (int)name_len); + if (name_symbol != NULL) { + methodOop method = Klass::cast(the_class()->klass_part()->new_version())->lookup_method(name_symbol, signature); + if (method != NULL) { + // Even if prefixed, intermediate methods must exist. + if (method->is_native()) { + // Wahoo, we found a (possibly prefixed) version of the method, return it. + return method; + } + if (depth < prefix_count) { + // Try applying further prefixes (other than this one). + method = search_prefix_name_space(depth+1, name_str, name_len, signature); + if (method != NULL) { + return method; // found + } + + // Try adding this prefix to the method name and see if it matches + // another method name. + char* prefix = prefixes[depth]; + size_t prefix_len = strlen(prefix); + size_t trial_len = name_len + prefix_len; + char* trial_name_str = NEW_RESOURCE_ARRAY(char, trial_len + 1); + strcpy(trial_name_str, prefix); + strcat(trial_name_str, name_str); + method = search_prefix_name_space(depth+1, trial_name_str, trial_len, + signature); + if (method != NULL) { + // If found along this branch, it was prefixed, mark as such + method->set_is_prefixed_native(); + return method; // found + } + } + } + } + return NULL; // This whole branch bore nothing + } + + // Return the method name with old prefixes stripped away. + char* method_name_without_prefixes(methodOop method) { + Symbol* name = method->name(); + char* name_str = name->as_utf8(); + + // Old prefixing may be defunct, strip prefixes, if any. + for (int i = prefix_count-1; i >= 0; i--) { + char* prefix = prefixes[i]; + size_t prefix_len = strlen(prefix); + if (strncmp(prefix, name_str, prefix_len) == 0) { + name_str += prefix_len; + } + } + return name_str; + } + + // Strip any prefixes off the old native method, then try to find a + // (possibly prefixed) new native that matches it. + methodOop strip_and_search_for_new_native(methodOop method) { + ResourceMark rm; + char* name_str = method_name_without_prefixes(method); + return search_prefix_name_space(0, name_str, strlen(name_str), + method->signature()); + } + +public: + + // Construct a native method transfer processor for this class. + TransferNativeFunctionRegistration(instanceKlassHandle _the_class) { + assert(SafepointSynchronize::is_at_safepoint(), "sanity check"); + + the_class = _the_class; + prefixes = JvmtiExport::get_all_native_method_prefixes(&prefix_count); + } + + // Attempt to transfer any of the old or deleted methods that are native + void transfer_registrations(instanceKlassHandle old_klass, int* old_methods, int methods_length) { + for (int j = 0; j < methods_length; j++) { + methodOop old_method = (methodOop)old_klass->methods()->obj_at(old_methods[j]); + + if (old_method->is_native() && old_method->has_native_function()) { + methodOop new_method = strip_and_search_for_new_native(old_method); + if (new_method != NULL) { + // Actually set the native function in the new method. + // Redefine does not send events (except CFLH), certainly not this + // behind the scenes re-registration. + new_method->set_native_function(old_method->native_function(), + !methodOopDesc::native_bind_event_is_interesting); + + RC_TRACE(0x00008000, ("Transfering native function for method %s", old_method->name()->as_C_string())); + } + } + } } +}; + +// Don't lose the association between a native method and its JNI function. +void VM_RedefineClasses::transfer_old_native_function_registrations(instanceKlassHandle old_klass) { + TransferNativeFunctionRegistration transfer(old_klass); + transfer.transfer_registrations(old_klass, _deleted_methods, _deleted_methods_length); + transfer.transfer_registrations(old_klass, _matching_old_methods, _matching_methods_length); } diff --git a/src/share/vm/prims/jvmtiRedefineClasses.hpp b/src/share/vm/prims/jvmtiRedefineClasses.hpp index 671f2ae..88fdbac 100644 --- a/src/share/vm/prims/jvmtiRedefineClasses.hpp +++ b/src/share/vm/prims/jvmtiRedefineClasses.hpp @@ -1,26 +1,29 @@ /* - * Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved. - * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. - * - * This code is free software; you can redistribute it and/or modify it - * under the terms of the GNU General Public License version 2 only, as - * published by the Free Software Foundation. - * - * This code is distributed in the hope that it will be useful, but WITHOUT - * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or - * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License - * version 2 for more details (a copy is included in the LICENSE file that - * accompanied this code). - * - * You should have received a copy of the GNU General Public License version - * 2 along with this work; if not, write to the Free Software Foundation, - * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. - * - * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA - * or visit www.oracle.com if you need additional information or have any - * questions. - * - */ +* Copyright (c) 2010, Oracle and/or its affiliates. All rights reserved. +* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. +* +* This code is free software; you can redistribute it and/or modify it +* under the terms of the GNU General Public License version 2 only, as +* published by the Free Software Foundation. +* +* This code is distributed in the hope that it will be useful, but WITHOUT +* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License +* version 2 for more details (a copy is included in the LICENSE file that +* accompanied this code). +* +* You should have received a copy of the GNU General Public License version +* 2 along with this work; if not, write to the Free Software Foundation, +* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. +* +* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA +* or visit www.oracle.com if you need additional information or have any +* questions. +* +*/ + +// New version that allows arbitrary changes to already loaded classes. +// Modifications done by: Thomas Wuerthinger #ifndef SHARE_VM_PRIMS_JVMTIREDEFINECLASSES_HPP #define SHARE_VM_PRIMS_JVMTIREDEFINECLASSES_HPP @@ -32,331 +35,28 @@ #include "oops/objArrayOop.hpp" #include "prims/jvmtiRedefineClassesTrace.hpp" #include "runtime/vm_operations.hpp" +#include "gc_implementation/shared/vmGCOperations.hpp" -// Introduction: -// -// The RedefineClasses() API is used to change the definition of one or -// more classes. While the API supports redefining more than one class -// in a single call, in general, the API is discussed in the context of -// changing the definition of a single current class to a single new -// class. For clarity, the current class is will always be called -// "the_class" and the new class will always be called "scratch_class". -// -// The name "the_class" is used because there is only one structure -// that represents a specific class; redefinition does not replace the -// structure, but instead replaces parts of the structure. The name -// "scratch_class" is used because the structure that represents the -// new definition of a specific class is simply used to carry around -// the parts of the new definition until they are used to replace the -// appropriate parts in the_class. Once redefinition of a class is -// complete, scratch_class is thrown away. -// -// -// Implementation Overview: -// -// The RedefineClasses() API is mostly a wrapper around the VM op that -// does the real work. The work is split in varying degrees between -// doit_prologue(), doit() and doit_epilogue(). -// -// 1) doit_prologue() is called by the JavaThread on the way to a -// safepoint. It does parameter verification and loads scratch_class -// which involves: -// - parsing the incoming class definition using the_class' class -// loader and security context -// - linking scratch_class -// - merging constant pools and rewriting bytecodes as needed -// for the merged constant pool -// - verifying the bytecodes in scratch_class -// - setting up the constant pool cache and rewriting bytecodes -// as needed to use the cache -// - finally, scratch_class is compared to the_class to verify -// that it is a valid replacement class -// - if everything is good, then scratch_class is saved in an -// instance field in the VM operation for the doit() call -// -// Note: A JavaThread must do the above work. -// -// 2) doit() is called by the VMThread during a safepoint. It installs -// the new class definition(s) which involves: -// - retrieving the scratch_class from the instance field in the -// VM operation -// - house keeping (flushing breakpoints and caches, deoptimizing -// dependent compiled code) -// - replacing parts in the_class with parts from scratch_class -// - adding weak reference(s) to track the obsolete but interesting -// parts of the_class -// - adjusting constant pool caches and vtables in other classes -// that refer to methods in the_class. These adjustments use the -// SystemDictionary::classes_do() facility which only allows -// a helper method to be specified. The interesting parameters -// that we would like to pass to the helper method are saved in -// static global fields in the VM operation. -// - telling the SystemDictionary to notice our changes -// -// Note: the above work must be done by the VMThread to be safe. -// -// 3) doit_epilogue() is called by the JavaThread after the VM op -// is finished and the safepoint is done. It simply cleans up -// memory allocated in doit_prologue() and used in doit(). -// -// -// Constant Pool Details: -// -// When the_class is redefined, we cannot just replace the constant -// pool in the_class with the constant pool from scratch_class because -// that could confuse obsolete methods that may still be running. -// Instead, the constant pool from the_class, old_cp, is merged with -// the constant pool from scratch_class, scratch_cp. The resulting -// constant pool, merge_cp, replaces old_cp in the_class. -// -// The key part of any merging algorithm is the entry comparison -// function so we have to know the types of entries in a constant pool -// in order to merge two of them together. Constant pools can contain -// up to 12 different kinds of entries; the JVM_CONSTANT_Unicode entry -// is not presently used so we only have to worry about the other 11 -// entry types. For the purposes of constant pool merging, it is -// helpful to know that the 11 entry types fall into 3 different -// subtypes: "direct", "indirect" and "double-indirect". -// -// Direct CP entries contain data and do not contain references to -// other CP entries. The following are direct CP entries: -// JVM_CONSTANT_{Double,Float,Integer,Long,Utf8} -// -// Indirect CP entries contain 1 or 2 references to a direct CP entry -// and no other data. The following are indirect CP entries: -// JVM_CONSTANT_{Class,NameAndType,String} -// -// Double-indirect CP entries contain two references to indirect CP -// entries and no other data. The following are double-indirect CP -// entries: -// JVM_CONSTANT_{Fieldref,InterfaceMethodref,Methodref} -// -// When comparing entries between two constant pools, the entry types -// are compared first and if they match, then further comparisons are -// made depending on the entry subtype. Comparing direct CP entries is -// simply a matter of comparing the data associated with each entry. -// Comparing both indirect and double-indirect CP entries requires -// recursion. -// -// Fortunately, the recursive combinations are limited because indirect -// CP entries can only refer to direct CP entries and double-indirect -// CP entries can only refer to indirect CP entries. The following is -// an example illustration of the deepest set of indirections needed to -// access the data associated with a JVM_CONSTANT_Fieldref entry: -// -// JVM_CONSTANT_Fieldref { -// class_index => JVM_CONSTANT_Class { -// name_index => JVM_CONSTANT_Utf8 { -// -// } -// } -// name_and_type_index => JVM_CONSTANT_NameAndType { -// name_index => JVM_CONSTANT_Utf8 { -// -// } -// descriptor_index => JVM_CONSTANT_Utf8 { -// -// } -// } -// } -// -// The above illustration is not a data structure definition for any -// computer language. The curly braces ('{' and '}') are meant to -// delimit the context of the "fields" in the CP entry types shown. -// Each indirection from the JVM_CONSTANT_Fieldref entry is shown via -// "=>", e.g., the class_index is used to indirectly reference a -// JVM_CONSTANT_Class entry where the name_index is used to indirectly -// reference a JVM_CONSTANT_Utf8 entry which contains the interesting -// . In order to understand a JVM_CONSTANT_Fieldref entry, we -// have to do a total of 5 indirections just to get to the CP entries -// that contain the interesting pieces of data and then we have to -// fetch the three pieces of data. This means we have to do a total of -// (5 + 3) * 2 == 16 dereferences to compare two JVM_CONSTANT_Fieldref -// entries. -// -// Here is the indirection, data and dereference count for each entry -// type: -// -// JVM_CONSTANT_Class 1 indir, 1 data, 2 derefs -// JVM_CONSTANT_Double 0 indir, 1 data, 1 deref -// JVM_CONSTANT_Fieldref 2 indir, 3 data, 8 derefs -// JVM_CONSTANT_Float 0 indir, 1 data, 1 deref -// JVM_CONSTANT_Integer 0 indir, 1 data, 1 deref -// JVM_CONSTANT_InterfaceMethodref 2 indir, 3 data, 8 derefs -// JVM_CONSTANT_Long 0 indir, 1 data, 1 deref -// JVM_CONSTANT_Methodref 2 indir, 3 data, 8 derefs -// JVM_CONSTANT_NameAndType 1 indir, 2 data, 4 derefs -// JVM_CONSTANT_String 1 indir, 1 data, 2 derefs -// JVM_CONSTANT_Utf8 0 indir, 1 data, 1 deref -// -// So different subtypes of CP entries require different amounts of -// work for a proper comparison. -// -// Now that we've talked about the different entry types and how to -// compare them we need to get back to merging. This is not a merge in -// the "sort -u" sense or even in the "sort" sense. When we merge two -// constant pools, we copy all the entries from old_cp to merge_cp, -// preserving entry order. Next we append all the unique entries from -// scratch_cp to merge_cp and we track the index changes from the -// location in scratch_cp to the possibly new location in merge_cp. -// When we are done, any obsolete code that is still running that -// uses old_cp should not be able to observe any difference if it -// were to use merge_cp. As for the new code in scratch_class, it is -// modified to use the appropriate index values in merge_cp before it -// is used to replace the code in the_class. -// -// There is one small complication in copying the entries from old_cp -// to merge_cp. Two of the CP entry types are special in that they are -// lazily resolved. Before explaining the copying complication, we need -// to digress into CP entry resolution. -// -// JVM_CONSTANT_Class and JVM_CONSTANT_String entries are present in -// the class file, but are not stored in memory as such until they are -// resolved. The entries are not resolved unless they are used because -// resolution is expensive. During class file parsing the entries are -// initially stored in memory as JVM_CONSTANT_ClassIndex and -// JVM_CONSTANT_StringIndex entries. These special CP entry types -// indicate that the JVM_CONSTANT_Class and JVM_CONSTANT_String entries -// have been parsed, but the index values in the entries have not been -// validated. After the entire constant pool has been parsed, the index -// values can be validated and then the entries are converted into -// JVM_CONSTANT_UnresolvedClass and JVM_CONSTANT_UnresolvedString -// entries. During this conversion process, the UTF8 values that are -// indirectly referenced by the JVM_CONSTANT_ClassIndex and -// JVM_CONSTANT_StringIndex entries are changed into Symbol*s and the -// entries are modified to refer to the Symbol*s. This optimization -// eliminates one level of indirection for those two CP entry types and -// gets the entries ready for verification. During class file parsing -// it is also possible for JVM_CONSTANT_UnresolvedString entries to be -// resolved into JVM_CONSTANT_String entries. Verification expects to -// find JVM_CONSTANT_UnresolvedClass and either JVM_CONSTANT_String or -// JVM_CONSTANT_UnresolvedString entries and not JVM_CONSTANT_Class -// entries. -// -// Now we can get back to the copying complication. When we copy -// entries from old_cp to merge_cp, we have to revert any -// JVM_CONSTANT_Class entries to JVM_CONSTANT_UnresolvedClass entries -// or verification will fail. -// -// It is important to explicitly state that the merging algorithm -// effectively unresolves JVM_CONSTANT_Class entries that were in the -// old_cp when they are changed into JVM_CONSTANT_UnresolvedClass -// entries in the merge_cp. This is done both to make verification -// happy and to avoid adding more brittleness between RedefineClasses -// and the constant pool cache. By allowing the constant pool cache -// implementation to (re)resolve JVM_CONSTANT_UnresolvedClass entries -// into JVM_CONSTANT_Class entries, we avoid having to embed knowledge -// about those algorithms in RedefineClasses. -// -// Appending unique entries from scratch_cp to merge_cp is straight -// forward for direct CP entries and most indirect CP entries. For the -// indirect CP entry type JVM_CONSTANT_NameAndType and for the double- -// indirect CP entry types, the presence of more than one piece of -// interesting data makes appending the entries more complicated. -// -// For the JVM_CONSTANT_{Double,Float,Integer,Long,Utf8} entry types, -// the entry is simply copied from scratch_cp to the end of merge_cp. -// If the index in scratch_cp is different than the destination index -// in merge_cp, then the change in index value is tracked. -// -// Note: the above discussion for the direct CP entries also applies -// to the JVM_CONSTANT_Unresolved{Class,String} entry types. -// -// For the JVM_CONSTANT_{Class,String} entry types, since there is only -// one data element at the end of the recursion, we know that we have -// either one or two unique entries. If the JVM_CONSTANT_Utf8 entry is -// unique then it is appended to merge_cp before the current entry. -// If the JVM_CONSTANT_Utf8 entry is not unique, then the current entry -// is updated to refer to the duplicate entry in merge_cp before it is -// appended to merge_cp. Again, any changes in index values are tracked -// as needed. -// -// Note: the above discussion for JVM_CONSTANT_{Class,String} entry -// types is theoretical. Since those entry types have already been -// optimized into JVM_CONSTANT_Unresolved{Class,String} entry types, -// they are handled as direct CP entries. -// -// For the JVM_CONSTANT_NameAndType entry type, since there are two -// data elements at the end of the recursions, we know that we have -// between one and three unique entries. Any unique JVM_CONSTANT_Utf8 -// entries are appended to merge_cp before the current entry. For any -// JVM_CONSTANT_Utf8 entries that are not unique, the current entry is -// updated to refer to the duplicate entry in merge_cp before it is -// appended to merge_cp. Again, any changes in index values are tracked -// as needed. -// -// For the JVM_CONSTANT_{Fieldref,InterfaceMethodref,Methodref} entry -// types, since there are two indirect CP entries and three data -// elements at the end of the recursions, we know that we have between -// one and six unique entries. See the JVM_CONSTANT_Fieldref diagram -// above for an example of all six entries. The uniqueness algorithm -// for the JVM_CONSTANT_Class and JVM_CONSTANT_NameAndType entries is -// covered above. Any unique entries are appended to merge_cp before -// the current entry. For any entries that are not unique, the current -// entry is updated to refer to the duplicate entry in merge_cp before -// it is appended to merge_cp. Again, any changes in index values are -// tracked as needed. -// -// -// Other Details: -// -// Details for other parts of RedefineClasses need to be written. -// This is a placeholder section. -// -// -// Open Issues (in no particular order): -// -// - How do we serialize the RedefineClasses() API without deadlocking? -// -// - SystemDictionary::parse_stream() was called with a NULL protection -// domain since the initial version. This has been changed to pass -// the_class->protection_domain(). This change has been tested with -// all NSK tests and nothing broke, but what will adding it now break -// in ways that we don't test? -// -// - GenerateOopMap::rewrite_load_or_store() has a comment in its -// (indirect) use of the Relocator class that the max instruction -// size is 4 bytes. goto_w and jsr_w are 5 bytes and wide/iinc is -// 6 bytes. Perhaps Relocator only needs a 4 byte buffer to do -// what it does to the bytecodes. More investigation is needed. -// -// - java.lang.Object methods can be called on arrays. This is -// implemented via the arrayKlassOop vtable which we don't -// update. For example, if we redefine java.lang.Object.toString(), -// then the new version of the method will not be called for array -// objects. -// -// - How do we know if redefine_single_class() and the guts of -// instanceKlass are out of sync? I don't think this can be -// automated, but we should probably order the work in -// redefine_single_class() to match the order of field -// definitions in instanceKlass. We also need to add some -// comments about keeping things in sync. -// -// - set_new_constant_pool() is huge and we should consider refactoring -// it into smaller chunks of work. -// -// - The exception table update code in set_new_constant_pool() defines -// const values that are also defined in a local context elsewhere. -// The same literal values are also used in elsewhere. We need to -// coordinate a cleanup of these constants with Runtime. -// - -class VM_RedefineClasses: public VM_Operation { +#define RC_ABORT(error) { _result = error; return false; } + +class VM_RedefineClasses: public VM_GC_Operation { private: + // These static fields are needed by SystemDictionary::classes_do() // facility and the adjust_cpool_cache_and_vtable() helper: static objArrayOop _old_methods; static objArrayOop _new_methods; - static methodOop* _matching_old_methods; - static methodOop* _matching_new_methods; - static methodOop* _deleted_methods; - static methodOop* _added_methods; + static int* _matching_old_methods; + static int* _matching_new_methods; + static int* _deleted_methods; + static int* _added_methods; static int _matching_methods_length; static int _deleted_methods_length; static int _added_methods_length; static klassOop _the_class_oop; + static int _revision_number; + // The instance fields are used to pass information from // doit_prologue() to doit() and doit_epilogue(). jint _class_count; @@ -370,36 +70,29 @@ class VM_RedefineClasses: public VM_Operation { // _index_map_p contains any entries. int _index_map_count; intArray * _index_map_p; - // ptr to _class_count scratch_classes - instanceKlassHandle * _scratch_classes; - jvmtiError _res; + GrowableArray* _new_classes; + GrowableArray* _updated_oops; + jvmtiError _result; + int _max_redefinition_flags; // Performance measurement support. These timers do not cover all // the work done for JVM/TI RedefineClasses() but they do cover // the heavy lifting. - elapsedTimer _timer_rsc_phase1; - elapsedTimer _timer_rsc_phase2; - elapsedTimer _timer_vm_op_prologue; - - // These routines are roughly in call order unless otherwise noted. - - // Load the caller's new class definition(s) into _scratch_classes. - // Constant pool merging work is done here as needed. Also calls - // compare_and_normalize_class_versions() to verify the class - // definition(s). + elapsedTimer _timer_total; + elapsedTimer _timer_prologue; + elapsedTimer _timer_class_linking; + elapsedTimer _timer_class_loading; + elapsedTimer _timer_check_type; + elapsedTimer _timer_prepare_redefinition; + elapsedTimer _timer_wait_for_locks; + elapsedTimer _timer_redefinition; + elapsedTimer _timer_vm_op_epilogue; + + jvmtiError check_redefinition_allowed(instanceKlassHandle new_class); + jvmtiError find_sorted_affected_classes(GrowableArray *all_affected_klasses); + jvmtiError find_class_bytes(instanceKlassHandle the_class, const unsigned char **class_bytes, jint *class_byte_count, jboolean *not_changed); jvmtiError load_new_class_versions(TRAPS); - // Verify that the caller provided class definition(s) that meet - // the restrictions of RedefineClasses. Normalize the order of - // overloaded methods as needed. - jvmtiError compare_and_normalize_class_versions( - instanceKlassHandle the_class, instanceKlassHandle scratch_class); - - // Swap annotations[i] with annotations[j] - // Used by compare_and_normalize_class_versions() when normalizing - // overloaded methods or changing idnum as when adding or deleting methods. - void swap_all_method_annotations(int i, int j, instanceKlassHandle scratch_class); - // Figure out which new methods match old methods in name and signature, // which methods have been added, and which are no longer present void compute_added_deleted_matching_methods(); @@ -407,95 +100,100 @@ class VM_RedefineClasses: public VM_Operation { // Change jmethodIDs to point to the new methods void update_jmethod_ids(); - // In addition to marking methods as obsolete, this routine - // records which methods are EMCP (Equivalent Module Constant - // Pool) in the emcp_methods BitMap and returns the number of - // EMCP methods via emcp_method_count_p. This information is - // used when information about the previous version of the_class - // is squirreled away. - void check_methods_and_mark_as_obsolete(BitMap *emcp_methods, - int * emcp_method_count_p); - void transfer_old_native_function_registrations(instanceKlassHandle the_class); + class FindAffectedKlassesClosure : public ObjectClosure { - // Unevolving classes may point to methods of the_class directly - // from their constant pool caches, itables, and/or vtables. We - // use the SystemDictionary::classes_do() facility and this helper - // to fix up these pointers. - static void adjust_cpool_cache_and_vtable(klassOop k_oop, oop loader, TRAPS); + private: + GrowableArray *_original_klasses; + GrowableArray *_result; + + public: + FindAffectedKlassesClosure(GrowableArray *original_klasses, GrowableArray *result); + + virtual void do_object(oop obj); + }; + + + static jvmtiError do_topological_class_sorting(const jvmtiClassDefinition *class_definitions, int class_count, GrowableArray *affected, GrowableArray *arr, TRAPS); // Install the redefinition of a class - void redefine_single_class(jclass the_jclass, - instanceKlassHandle scratch_class, TRAPS); + void redefine_single_class(instanceKlassHandle the_new_class, TRAPS); // Increment the classRedefinedCount field in the specific instanceKlass // and in all direct and indirect subclasses. void increment_class_counter(instanceKlass *ik, TRAPS); - // Support for constant pool merging (these routines are in alpha - // order): - void append_entry(constantPoolHandle scratch_cp, int scratch_i, - constantPoolHandle *merge_cp_p, int *merge_cp_length_p, TRAPS); - int find_new_index(int old_index); - bool is_unresolved_class_mismatch(constantPoolHandle cp1, int index1, - constantPoolHandle cp2, int index2); - bool is_unresolved_string_mismatch(constantPoolHandle cp1, int index1, - constantPoolHandle cp2, int index2); - void map_index(constantPoolHandle scratch_cp, int old_index, int new_index); - bool merge_constant_pools(constantPoolHandle old_cp, - constantPoolHandle scratch_cp, constantPoolHandle *merge_cp_p, - int *merge_cp_length_p, TRAPS); - jvmtiError merge_cp_and_rewrite(instanceKlassHandle the_class, - instanceKlassHandle scratch_class, TRAPS); - u2 rewrite_cp_ref_in_annotation_data( - typeArrayHandle annotations_typeArray, int &byte_i_ref, - const char * trace_mesg, TRAPS); - bool rewrite_cp_refs(instanceKlassHandle scratch_class, TRAPS); - bool rewrite_cp_refs_in_annotation_struct( - typeArrayHandle class_annotations, int &byte_i_ref, TRAPS); - bool rewrite_cp_refs_in_annotations_typeArray( - typeArrayHandle annotations_typeArray, int &byte_i_ref, TRAPS); - bool rewrite_cp_refs_in_class_annotations( - instanceKlassHandle scratch_class, TRAPS); - bool rewrite_cp_refs_in_element_value( - typeArrayHandle class_annotations, int &byte_i_ref, TRAPS); - bool rewrite_cp_refs_in_fields_annotations( - instanceKlassHandle scratch_class, TRAPS); - void rewrite_cp_refs_in_method(methodHandle method, - methodHandle * new_method_p, TRAPS); - bool rewrite_cp_refs_in_methods(instanceKlassHandle scratch_class, TRAPS); - bool rewrite_cp_refs_in_methods_annotations( - instanceKlassHandle scratch_class, TRAPS); - bool rewrite_cp_refs_in_methods_default_annotations( - instanceKlassHandle scratch_class, TRAPS); - bool rewrite_cp_refs_in_methods_parameter_annotations( - instanceKlassHandle scratch_class, TRAPS); - void rewrite_cp_refs_in_stack_map_table(methodHandle method, TRAPS); - void rewrite_cp_refs_in_verification_type_info( - address& stackmap_addr_ref, address stackmap_end, u2 frame_i, - u1 frame_size, TRAPS); - void set_new_constant_pool(instanceKlassHandle scratch_class, - constantPoolHandle scratch_cp, int scratch_cp_length, bool shrink, TRAPS); void flush_dependent_code(instanceKlassHandle k_h, TRAPS); - static void check_class(klassOop k_oop, oop initiating_loader, TRAPS); - static void dump_methods(); + static void check_class(klassOop k_oop,/* oop initiating_loader,*/ TRAPS) PRODUCT_RETURN; + + static void adjust_cpool_cache(klassOop k_oop, oop initiating_loader, TRAPS); + +#ifdef ASSERT + static void verify_classes(klassOop k_oop, oop initiating_loader, TRAPS); +#endif + + int calculate_redefinition_flags(instanceKlassHandle new_version); + void calculate_instance_update_information(klassOop new_version); + void check_methods_and_mark_as_obsolete(BitMap *emcp_methods, int * emcp_method_count_p); + + static void calculate_type_check_information(klassOop k); + static void clear_type_check_information(klassOop k); public: - VM_RedefineClasses(jint class_count, - const jvmtiClassDefinition *class_defs, - JvmtiClassLoadKind class_load_kind); - VMOp_Type type() const { return VMOp_RedefineClasses; } + VM_RedefineClasses(jint class_count, const jvmtiClassDefinition *class_defs, JvmtiClassLoadKind class_load_kind); + virtual ~VM_RedefineClasses(); + + bool check_arguments(); bool doit_prologue(); void doit(); void doit_epilogue(); + void rollback(); - bool allow_nested_vm_operations() const { return true; } - jvmtiError check_error() { return _res; } + jvmtiError check_exception() const; + VMOp_Type type() const { return VMOp_RedefineClasses; } + bool skip_operation() const { return false; } + bool allow_nested_vm_operations() const { return true; } + jvmtiError check_error() { return _result; } + + void update_active_methods(); + + // Checks for type consistency after hierarchy change + bool check_type_consistency(); + void calculate_type_check_information(); + bool check_field_value_types(); + void clear_type_check_information(); + bool check_method_stacks(); + bool check_loaded_methods(); + bool check_method(methodOop method); + static Symbol* signature_to_class_name(Symbol* signature); + + void method_forwarding(); + + void update_array_classes_to_newest_version(klassOop smallest_dimension); // Modifiable test must be shared between IsModifiableClass query // and redefine implementation static bool is_modifiable_class(oop klass_mirror); + + // Method used during garbage collection, the VM operation must iterate over all oops. + void oops_do(OopClosure* f); + + // Utility methods for transfering field access flags + + static void transfer_special_access_flags(fieldDescriptor *from, fieldDescriptor *to); + static void update_klass_field_access_flag(fieldDescriptor *fd); + + void transfer_old_native_function_registrations(instanceKlassHandle the_class); + + void lock_threads(); + void unlock_threads(); + + template static void do_oop_work(T* p); + + static void swap_marks(oop first, oop second); + }; #endif // SHARE_VM_PRIMS_JVMTIREDEFINECLASSES_HPP + diff --git a/src/share/vm/prims/methodComparator.cpp b/src/share/vm/prims/methodComparator.cpp index 60eaf97..07bb6e3 100644 --- a/src/share/vm/prims/methodComparator.cpp +++ b/src/share/vm/prims/methodComparator.cpp @@ -65,6 +65,7 @@ bool MethodComparator::methods_EMCP(methodOop old_method, methodOop new_method) if (! args_same(c_old, c_new)) return false; } + return true; } diff --git a/src/share/vm/prims/nativeLookup.cpp b/src/share/vm/prims/nativeLookup.cpp index 41fc42d..53b3e0c 100644 --- a/src/share/vm/prims/nativeLookup.cpp +++ b/src/share/vm/prims/nativeLookup.cpp @@ -35,6 +35,7 @@ #include "oops/symbol.hpp" #include "prims/jvm_misc.hpp" #include "prims/nativeLookup.hpp" +#include "prims/jvmtiRedefineClasses.hpp" #include "runtime/arguments.hpp" #include "runtime/handles.inline.hpp" #include "runtime/javaCalls.hpp" @@ -53,7 +54,6 @@ # include "os_bsd.inline.hpp" #endif - static void mangle_name_on(outputStream* st, Symbol* name, int begin, int end) { char* bytes = (char*)name->bytes() + begin; char* end_bytes = (char*)name->bytes() + end; @@ -138,6 +138,40 @@ static JNINativeMethod lookup_special_native_methods[] = { { CC"Java_sun_hotspot_WhiteBox_registerNatives", NULL, FN_PTR(JVM_RegisterWhiteBoxMethods) }, }; +// Helper function to call redefineClasses from Java Code +JVM_ENTRY(int, JVM_RedefineClassesHelper(JNIEnv *env, jclass cb, jclass target, jbyteArray bytes)) + ResourceMark rm(THREAD); + + JavaThread* current_thread = JavaThread::current(); + jbyte* bytecodes = NULL; + const int class_count = 1; + jvmtiClassDefinition* class_definitions = NEW_RESOURCE_ARRAY(jvmtiClassDefinition, class_count); + + { + ThreadToNativeFromVM ttnfv(thread); + jboolean is_copy = JNI_FALSE; + bytecodes = env->GetByteArrayElements(bytes, &is_copy); + class_definitions[0].klass = target; + class_definitions[0].class_byte_count = env->GetArrayLength(bytes); + class_definitions[0].class_bytes = (unsigned char*)bytecodes; + } + + VM_RedefineClasses op(class_count, class_definitions, jvmti_class_load_kind_retransform); + VMThread::execute(&op); + int result = op.check_error(); + + { + ThreadToNativeFromVM ttnfv(thread); + if (env->ExceptionOccurred()) { + return -1; + } + env->ReleaseByteArrayElements(bytes, bytecodes, 0); + } + + return result; +JVM_END + + static address lookup_special_native(char* jni_name) { int i = !JDK_Version::is_gte_jdk14x_version() ? 0 : 2; // see comment in lookup_special_native_methods int count = sizeof(lookup_special_native_methods) / sizeof(JNINativeMethod); @@ -177,6 +211,9 @@ address NativeLookup::lookup_style(methodHandle method, char* pure_name, const c return entry; } } + if(strstr(jni_name, "Java_at_ssw_hotswap_ClassRedefinition_redefineClasses") != NULL) { + return CAST_FROM_FN_PTR(address, JVM_RedefineClassesHelper); + } // Otherwise call static method findNative in ClassLoader KlassHandle klass (THREAD, SystemDictionary::ClassLoader_klass()); diff --git a/src/share/vm/runtime/arguments.cpp b/src/share/vm/runtime/arguments.cpp index 22d450b..d7bf297 100644 --- a/src/share/vm/runtime/arguments.cpp +++ b/src/share/vm/runtime/arguments.cpp @@ -1792,6 +1792,15 @@ bool Arguments::check_gc_consistency() { status = false; } + // (tw) Must use serial GC + if (!UseSerialGC && i >= 1) { + jio_fprintf(defaultStream::error_stream(), + "Must use the serial GC in the Dynamic Code Evolution VM\n"); + status = false; + } else { + UseSerialGC = true; + } + return status; } diff --git a/src/share/vm/runtime/deoptimization.cpp b/src/share/vm/runtime/deoptimization.cpp index 2b767d4..bcea3be 100644 --- a/src/share/vm/runtime/deoptimization.cpp +++ b/src/share/vm/runtime/deoptimization.cpp @@ -599,6 +599,38 @@ JRT_LEAF(BasicType, Deoptimization::unpack_frames(JavaThread* thread, int exec_m // Cleanup thread deopt data cleanup_deopt_info(thread, array); + // (tw) Redefinition support: Check if we need to transfer method execution points to new versions + { + ResourceMark res_mark; + + // Verify that the just-unpacked frames match the interpreter's + // notions of expression stack and locals + vframeArray* cur_array = thread->vframe_array_last(); + RegisterMap rm(thread, false); + rm.set_include_argument_oops(false); + for (int i = 0; i < cur_array->frames(); i++) { + vframeArrayElement* el = cur_array->element(i); + frame* frame = el->iframe(); + guarantee(frame->is_interpreted_frame(), "Wrong frame type"); + RegisterMap reg_map(thread); + vframe* vf = vframe::new_vframe(frame, ®_map, thread); + interpretedVFrame *iframe = (interpretedVFrame *)vf; + methodOop method = iframe->method(); + int bci = iframe->bci(); + method = method->newest_version(); + iframe->set_method(method, bci); + + methodOop forward_method = method->forward_method(); + if (forward_method != NULL && method->is_in_code_section(bci)) { + int new_bci = method->calculate_forward_bci(bci, forward_method); + if (TraceRedefineClasses >= 2) { + tty->print_cr("Transfering execution of %s to new method old_bci=%d new_bci=%d", forward_method->name()->as_C_string(), bci, new_bci); + } + iframe->set_method(forward_method, new_bci); + } + } + } + #ifndef PRODUCT if (VerifyStack) { ResourceMark res_mark; diff --git a/src/share/vm/runtime/frame.cpp b/src/share/vm/runtime/frame.cpp index 32d02d6..bbb6c47 100644 --- a/src/share/vm/runtime/frame.cpp +++ b/src/share/vm/runtime/frame.cpp @@ -407,6 +407,12 @@ void frame::interpreter_frame_set_method(methodOop method) { *interpreter_frame_method_addr() = method; } +// (tw) Sets constant pool cache oop +void frame::interpreter_frame_set_cache(constantPoolCacheOop cp) { + assert(is_interpreted_frame(), "interpreted frame expected"); + *interpreter_frame_cache_addr() = cp; +} + void frame::interpreter_frame_set_bcx(intptr_t bcx) { assert(is_interpreted_frame(), "Not an interpreted frame"); if (ProfileInterpreter) { @@ -422,19 +428,27 @@ void frame::interpreter_frame_set_bcx(intptr_t bcx) { // The bcx was just converted from bci to bcp. // Convert the mdx in parallel. methodDataOop mdo = interpreter_frame_method()->method_data(); - assert(mdo != NULL, ""); - int mdi = mdx - 1; // We distinguish valid mdi from zero by adding one. - address mdp = mdo->di_to_dp(mdi); - interpreter_frame_set_mdx((intptr_t)mdp); + if (mdo == NULL) { + interpreter_frame_set_mdx(0); + } else { + assert(mdo != NULL, ""); + int mdi = mdx - 1; // We distinguish valid mdi from zero by adding one. + address mdp = mdo->di_to_dp(mdi); + interpreter_frame_set_mdx((intptr_t)mdp); + } } } else { if (is_now_bci) { // The bcx was just converted from bcp to bci. // Convert the mdx in parallel. methodDataOop mdo = interpreter_frame_method()->method_data(); - assert(mdo != NULL, ""); - int mdi = mdo->dp_to_di((address)mdx); - interpreter_frame_set_mdx((intptr_t)mdi + 1); // distinguish valid from 0. + if (mdo == NULL) { + interpreter_frame_set_mdx(0); + } else { + assert(mdo != NULL, ""); + int mdi = mdo->dp_to_di((address)mdx); + interpreter_frame_set_mdx((intptr_t)mdi + 1); // distinguish valid from 0. + } } } } diff --git a/src/share/vm/runtime/frame.hpp b/src/share/vm/runtime/frame.hpp index 9c7bb72..04a6595 100644 --- a/src/share/vm/runtime/frame.hpp +++ b/src/share/vm/runtime/frame.hpp @@ -346,6 +346,7 @@ class frame VALUE_OBJ_CLASS_SPEC { // Method & constant pool cache methodOop interpreter_frame_method() const; void interpreter_frame_set_method(methodOop method); + void interpreter_frame_set_cache(constantPoolCacheOop method); methodOop* interpreter_frame_method_addr() const; constantPoolCacheOop* interpreter_frame_cache_addr() const; #ifdef PPC diff --git a/src/share/vm/runtime/globals.hpp b/src/share/vm/runtime/globals.hpp index 8df7220..634c589 100644 --- a/src/share/vm/runtime/globals.hpp +++ b/src/share/vm/runtime/globals.hpp @@ -1227,9 +1227,23 @@ class CommandLineFlags { product(bool, StressLdcRewrite, false, \ "Force ldc -> ldc_w rewrite during RedefineClasses") \ \ + product(bool, UseMethodForwardPoints, false, \ + "Use method forward points") \ + \ + product(intx, MethodForwardPointsMaxLocals, 300, \ + "Maximum number of locals in forwarding method") \ + \ + product(intx, MethodForwardPointsMaxStack, 300, \ + "Maximum number of stack slots in forwarding method") \ + \ product(intx, TraceRedefineClasses, 0, \ "Trace level for JVMTI RedefineClasses") \ \ + product(bool, TimeRedefineClasses, false, \ + "Measure timing for JVMTI RedefineClasses") \ + \ + product(bool, AllowAdvancedClassRedefinition, true, \ + "Allow advanced class redefinition beyond swapping method bodies")\ develop(bool, StressMethodComparator, false, \ "run the MethodComparator on all loaded methods") \ \ diff --git a/src/share/vm/runtime/interfaceSupport.hpp b/src/share/vm/runtime/interfaceSupport.hpp index 2875ee0..61fd8fe 100644 --- a/src/share/vm/runtime/interfaceSupport.hpp +++ b/src/share/vm/runtime/interfaceSupport.hpp @@ -296,7 +296,7 @@ class ThreadToNativeFromVM : public ThreadStateTransition { ThreadToNativeFromVM(JavaThread *thread) : ThreadStateTransition(thread) { // We are leaving the VM at this point and going directly to native code. // Block, if we are in the middle of a safepoint synchronization. - assert(!thread->owns_locks(), "must release all locks when leaving VM"); + assert(!thread->owns_locks_but_redefine_classes_lock(), "must release all locks when leaving VM"); thread->frame_anchor()->make_walkable(thread); trans_and_fence(_thread_in_vm, _thread_in_native); // Check for pending. async. exceptions or suspends. diff --git a/src/share/vm/runtime/javaCalls.cpp b/src/share/vm/runtime/javaCalls.cpp index edbba98..4a27925 100644 --- a/src/share/vm/runtime/javaCalls.cpp +++ b/src/share/vm/runtime/javaCalls.cpp @@ -60,7 +60,7 @@ JavaCallWrapper::JavaCallWrapper(methodHandle callee_method, Handle receiver, Ja bool clear_pending_exception = true; guarantee(thread->is_Java_thread(), "crucial check - the VM thread cannot and must not escape to Java code"); - assert(!thread->owns_locks(), "must release all locks when leaving VM"); + assert(!thread->owns_locks_but_redefine_classes_lock(), "must release all locks when leaving VM"); guarantee(!thread->is_Compiler_thread(), "cannot make java calls from the compiler"); _result = result; diff --git a/src/share/vm/runtime/jniHandles.cpp b/src/share/vm/runtime/jniHandles.cpp index 3cbcaca..30839d7 100644 --- a/src/share/vm/runtime/jniHandles.cpp +++ b/src/share/vm/runtime/jniHandles.cpp @@ -112,6 +112,10 @@ jobject JNIHandles::make_weak_global(Handle obj) { } jmethodID JNIHandles::make_jmethod_id(methodHandle mh) { + if (mh->newest_version() != mh()) { + methodHandle mh_new(Thread::current(), mh()->newest_version()); + return (jmethodID) make_weak_global(mh_new); + } return (jmethodID) make_weak_global(mh); } diff --git a/src/share/vm/runtime/mutex.cpp b/src/share/vm/runtime/mutex.cpp index 2095237..c541434 100644 --- a/src/share/vm/runtime/mutex.cpp +++ b/src/share/vm/runtime/mutex.cpp @@ -1227,7 +1227,7 @@ Monitor * Monitor::get_least_ranked_lock(Monitor * locks) { // in increasing rank order (modulo any native ranks) for (tmp = locks; tmp != NULL; tmp = tmp->next()) { if (tmp->next() != NULL) { - assert(tmp->rank() == Mutex::native || + assert(tmp->rank() == Mutex::native || tmp->rank() == Mutex::redefine_classes || tmp->rank() <= tmp->next()->rank(), "mutex rank anomaly?"); } } @@ -1247,7 +1247,7 @@ Monitor* Monitor::get_least_ranked_lock_besides_this(Monitor* locks) { // in increasing rank order (modulo any native ranks) for (tmp = locks; tmp != NULL; tmp = tmp->next()) { if (tmp->next() != NULL) { - assert(tmp->rank() == Mutex::native || + assert(tmp->rank() == Mutex::native || tmp->rank() == Mutex::redefine_classes || tmp->rank() <= tmp->next()->rank(), "mutex rank anomaly?"); } } @@ -1310,6 +1310,7 @@ void Monitor::set_owner_implementation(Thread *new_owner) { // already hold Terminator_lock - may happen because of periodic safepoints if (this->rank() != Mutex::native && this->rank() != Mutex::suspend_resume && + this->rank() != Mutex::redefine_classes && locks != NULL && locks->rank() <= this->rank() && !SafepointSynchronize::is_at_safepoint() && this != Interrupt_lock && diff --git a/src/share/vm/runtime/mutex.hpp b/src/share/vm/runtime/mutex.hpp index 7d2cd82..11eb32e 100644 --- a/src/share/vm/runtime/mutex.hpp +++ b/src/share/vm/runtime/mutex.hpp @@ -109,7 +109,8 @@ class Monitor : public CHeapObj { barrier = safepoint + 1, nonleaf = barrier + 1, max_nonleaf = nonleaf + 900, - native = max_nonleaf + 1 + native = max_nonleaf + 1, + redefine_classes = native + 1 }; // The WaitSet and EntryList linked lists are composed of ParkEvents. diff --git a/src/share/vm/runtime/mutexLocker.cpp b/src/share/vm/runtime/mutexLocker.cpp index a6b2106..758e87f 100644 --- a/src/share/vm/runtime/mutexLocker.cpp +++ b/src/share/vm/runtime/mutexLocker.cpp @@ -49,6 +49,7 @@ // Consider using GCC's __read_mostly. Mutex* Patching_lock = NULL; +Mutex* RedefineClasses_lock = NULL; Monitor* SystemDictionary_lock = NULL; Mutex* PackageTable_lock = NULL; Mutex* CompiledIC_lock = NULL; @@ -90,6 +91,7 @@ Mutex* Shared_SATB_Q_lock = NULL; Mutex* DirtyCardQ_FL_lock = NULL; Monitor* DirtyCardQ_CBL_mon = NULL; Mutex* Shared_DirtyCardQ_lock = NULL; +Monitor* RedefinitionSync_lock = NULL; Mutex* ParGCRareEvent_lock = NULL; Mutex* EvacFailureStack_lock = NULL; Mutex* DerivedPointerTableGC_lock = NULL; @@ -205,6 +207,7 @@ void mutex_init() { def(HotCardCache_lock , Mutex , special , true ); def(EvacFailureStack_lock , Mutex , nonleaf , true ); } + def(RedefinitionSync_lock , Monitor , leaf , false ); def(ParGCRareEvent_lock , Mutex , leaf , true ); def(DerivedPointerTableGC_lock , Mutex, leaf, true ); def(CodeCache_lock , Mutex , special, true ); @@ -279,6 +282,7 @@ void mutex_init() { def(Debug2_lock , Mutex , nonleaf+4, true ); def(Debug3_lock , Mutex , nonleaf+4, true ); def(CompileThread_lock , Monitor, nonleaf+5, false ); + def(RedefineClasses_lock , Mutex , nonleaf+7, false ); // for ensuring that class redefinition is not done in parallel def(JfrMsg_lock , Monitor, leaf, true); def(JfrBuffer_lock , Mutex, nonleaf+1, true); diff --git a/src/share/vm/runtime/mutexLocker.hpp b/src/share/vm/runtime/mutexLocker.hpp index 40008bb..72f8ce0 100644 --- a/src/share/vm/runtime/mutexLocker.hpp +++ b/src/share/vm/runtime/mutexLocker.hpp @@ -43,6 +43,8 @@ // Mutexes used in the VM. extern Mutex* Patching_lock; // a lock used to guard code patching of compiled code +extern Monitor* RedefinitionSync_lock; // a lock on synchronized class redefinition +extern Mutex* RedefineClasses_lock; // a lock on class redefinition extern Monitor* SystemDictionary_lock; // a lock on the system dictonary extern Mutex* PackageTable_lock; // a lock on the class loader package table extern Mutex* CompiledIC_lock; // a lock used to guard compiled IC patching and access diff --git a/src/share/vm/runtime/reflection.cpp b/src/share/vm/runtime/reflection.cpp index cd009ed..a53ad09 100644 --- a/src/share/vm/runtime/reflection.cpp +++ b/src/share/vm/runtime/reflection.cpp @@ -468,7 +468,8 @@ bool Reflection::verify_class_access(klassOop current_class, klassOop new_class, // sun/reflect/MagicAccessorImpl subclasses to succeed trivially. if ( JDK_Version::is_gte_jdk14x_version() && UseNewReflection - && Klass::cast(current_class)->is_subclass_of(SystemDictionary::reflect_MagicAccessorImpl_klass())) { + && (Klass::cast(current_class)->is_subclass_of(SystemDictionary::reflect_MagicAccessorImpl_klass()) || + Klass::cast(current_class)->is_subclass_of(SystemDictionary::reflect_MagicAccessorImpl_klass()->klass_part()->newest_version()))) { return true; } @@ -519,6 +520,12 @@ bool Reflection::verify_field_access(klassOop current_class, AccessFlags access, bool classloader_only, bool protected_restriction) { + + // (tw) Decide accessibility based on active version + if (current_class != NULL) { + current_class = current_class->klass_part()->active_version(); + } + // Verify that current_class can access a field of field_class, where that // field's access bits are "access". We assume that we've already verified // that current_class can access field_class. @@ -560,7 +567,8 @@ bool Reflection::verify_field_access(klassOop current_class, // sun/reflect/MagicAccessorImpl subclasses to succeed trivially. if ( JDK_Version::is_gte_jdk14x_version() && UseNewReflection - && Klass::cast(current_class)->is_subclass_of(SystemDictionary::reflect_MagicAccessorImpl_klass())) { + && (Klass::cast(current_class)->is_subclass_of(SystemDictionary::reflect_MagicAccessorImpl_klass()) || + Klass::cast(current_class)->is_subclass_of(SystemDictionary::reflect_MagicAccessorImpl_klass()->klass_part()->newest_version()))) { return true; } diff --git a/src/share/vm/runtime/sharedRuntime.cpp b/src/share/vm/runtime/sharedRuntime.cpp index 709d783..e0e19b1 100644 --- a/src/share/vm/runtime/sharedRuntime.cpp +++ b/src/share/vm/runtime/sharedRuntime.cpp @@ -1137,7 +1137,20 @@ methodHandle SharedRuntime::resolve_helper(JavaThread *thread, if (JvmtiExport::can_hotswap_or_post_breakpoint()) { int retry_count = 0; while (!HAS_PENDING_EXCEPTION && callee_method->is_old() && - callee_method->method_holder() != SystemDictionary::Object_klass()) { + callee_method->method_holder()->klass_part()->newest_version() != SystemDictionary::Object_klass()->klass_part()->newest_version()) { + + // DCEVM: If we are executing an old method, this is OK! + { + ResourceMark rm(thread); + RegisterMap cbl_map(thread, false); + frame caller_frame = thread->last_frame().sender(&cbl_map); + + CodeBlob* caller_cb = caller_frame.cb(); + guarantee(caller_cb != NULL && caller_cb->is_nmethod(), "must be called from nmethod"); + nmethod* caller_nm = caller_cb->as_nmethod_or_null(); + if (caller_nm->method()->is_old()) break; + } + // If has a pending exception then there is no need to re-try to // resolve this method. // If the method has been redefined, we need to try again. diff --git a/src/share/vm/runtime/thread.cpp b/src/share/vm/runtime/thread.cpp index ae28b65..7d0d809 100644 --- a/src/share/vm/runtime/thread.cpp +++ b/src/share/vm/runtime/thread.cpp @@ -216,6 +216,8 @@ Thread::Thread() { set_self_raw_id(0); set_lgrp_id(-1); + _redefine_classes_mutex = new Mutex(Mutex::redefine_classes, "redefine classes lock", false); + // allocated data structures set_osthread(NULL); set_resource_area(new (mtThread)ResourceArea()); @@ -249,6 +251,7 @@ Thread::Thread() { omFreeProvision = 32 ; omInUseList = NULL ; omInUseCount = 0 ; + _pretend_new_universe = false; #ifdef ASSERT _visited_for_critical_count = false; @@ -884,6 +887,15 @@ bool Thread::owns_locks_but_compiled_lock() const { return false; } +bool Thread::owns_locks_but_redefine_classes_lock() const { + for(Monitor *cur = _owned_locks; cur; cur = cur->next()) { + if (cur != RedefineClasses_lock && cur->rank() != Mutex::redefine_classes) { + return true; + } + } + return false; +} + #endif @@ -1637,7 +1649,7 @@ void JavaThread::run() { ThreadStateTransition::transition_and_fence(this, _thread_new, _thread_in_vm); assert(JavaThread::current() == this, "sanity check"); - assert(!Thread::current()->owns_locks(), "sanity check"); + assert(!Thread::current()->owns_locks_but_redefine_classes_lock(), "sanity check"); DTRACE_THREAD_PROBE(start, this); @@ -3193,7 +3205,7 @@ static void compiler_thread_entry(JavaThread* thread, TRAPS) { // Create a CompilerThread CompilerThread::CompilerThread(CompileQueue* queue, CompilerCounters* counters) -: JavaThread(&compiler_thread_entry) { +: JavaThread(&compiler_thread_entry), _should_bailout(false) { _env = NULL; _log = NULL; _task = NULL; @@ -3201,6 +3213,7 @@ CompilerThread::CompilerThread(CompileQueue* queue, CompilerCounters* counters) _counters = counters; _buffer_blob = NULL; _scanned_nmethod = NULL; + _compilation_mutex = new Mutex(Mutex::redefine_classes, "compilationMutex", false); #ifndef PRODUCT _ideal_graph_printer = NULL; @@ -3230,6 +3243,7 @@ int Threads::_number_of_threads = 0; int Threads::_number_of_non_daemon_threads = 0; int Threads::_return_code = 0; size_t JavaThread::_stack_size_at_create = 0; +bool Threads::_wait_at_instrumentation_entry = false; // All JavaThreads #define ALL_JAVA_THREADS(X) for (JavaThread* X = _thread_list; X; X = X->next()) diff --git a/src/share/vm/runtime/thread.hpp b/src/share/vm/runtime/thread.hpp index 774bd27..4ca4502 100644 --- a/src/share/vm/runtime/thread.hpp +++ b/src/share/vm/runtime/thread.hpp @@ -203,11 +203,14 @@ class Thread: public ThreadShadow { void enter_signal_handler() { _num_nested_signal++; } void leave_signal_handler() { _num_nested_signal--; } bool is_inside_signal_handler() const { return _num_nested_signal > 0; } + Mutex* redefine_classes_mutex() { return _redefine_classes_mutex; } private: // Debug tracing static void trace(const char* msg, const Thread* const thread) PRODUCT_RETURN; + Mutex* _redefine_classes_mutex; + // Active_handles points to a block of handles JNIHandleBlock* _active_handles; @@ -530,10 +533,15 @@ public: uintptr_t _self_raw_id; // used by get_thread (mutable) int _lgrp_id; + + bool _pretend_new_universe; + public: // Stack overflow support address stack_base() const { assert(_stack_base != NULL,"Sanity check"); return _stack_base; } + void set_pretend_new_universe(bool b) { if (_pretend_new_universe != b) { if (TraceRedefineClasses >= 5) tty->print_cr("Changing pretend universe to %d", (int)b); _pretend_new_universe = b; } } + bool pretend_new_universe() { return _pretend_new_universe; } void set_stack_base(address base) { _stack_base = base; } size_t stack_size() const { return _stack_size; } void set_stack_size(size_t size) { _stack_size = size; } @@ -570,6 +578,7 @@ public: void print_owned_locks() const { print_owned_locks_on(tty); } Monitor* owned_locks() const { return _owned_locks; } bool owns_locks() const { return owned_locks() != NULL; } + bool owns_locks_but_redefine_classes_lock() const; bool owns_locks_but_compiled_lock() const; // Deadlock detection @@ -1793,6 +1802,8 @@ class CompilerThread : public JavaThread { CompileTask* _task; CompileQueue* _queue; BufferBlob* _buffer_blob; + bool _should_bailout; + Mutex* _compilation_mutex; nmethod* _scanned_nmethod; // nmethod being scanned by the sweeper @@ -1802,12 +1813,16 @@ class CompilerThread : public JavaThread { CompilerThread(CompileQueue* queue, CompilerCounters* counters); + bool should_bailout() const { return _should_bailout; } + void set_should_bailout(bool b) { _should_bailout = false; } + bool is_Compiler_thread() const { return true; } // Hide this compiler thread from external view. bool is_hidden_from_external_view() const { return true; } CompileQueue* queue() { return _queue; } CompilerCounters* counters() { return _counters; } + Mutex *compilation_mutex() { return _compilation_mutex; } // Get/set the thread's compilation environment. ciEnv* env() { return _env; } @@ -1862,6 +1877,7 @@ class Threads: AllStatic { static int _number_of_threads; static int _number_of_non_daemon_threads; static int _return_code; + static bool _wait_at_instrumentation_entry; public: // Thread management @@ -1873,6 +1889,9 @@ class Threads: AllStatic { static JavaThread* first() { return _thread_list; } static void threads_do(ThreadClosure* tc); + static bool wait_at_instrumentation_entry() { return _wait_at_instrumentation_entry; } + static void set_wait_at_instrumentation_entry(bool b) { _wait_at_instrumentation_entry = b; } + // Initializes the vm and creates the vm thread static jint create_vm(JavaVMInitArgs* args, bool* canTryAgain); static void convert_vm_init_libraries_to_agents(); diff --git a/src/share/vm/runtime/vframe.cpp b/src/share/vm/runtime/vframe.cpp index 09e324f..d47ffef 100644 --- a/src/share/vm/runtime/vframe.cpp +++ b/src/share/vm/runtime/vframe.cpp @@ -253,6 +253,46 @@ methodOop interpretedVFrame::method() const { return fr().interpreter_frame_method(); } +// (tw) Sets interpreter frame method. +void interpretedVFrame::set_method(methodOop new_method, int new_bci) { + methodOop old_method = fr().interpreter_frame_method(); + int old_stack_size = fr().interpreter_frame_expression_stack_size(); + if (old_method == new_method) return; + u_char *old_bcp = bcp(); + int old_bci = bci(); + fr().interpreter_frame_set_method(new_method); + fr().interpreter_frame_set_cache(new_method->constants()->cache()); + u_char *new_bcp = new_method->code_base() + new_bci; + assert(new_method->bcp_from(new_bci) == new_bcp, ""); + + set_bcp(new_bcp); + + Bytecodes::Code code = Bytecodes::java_code_at(old_method, old_bcp); + assert(Bytecodes::java_code_at(new_method, new_bcp) == code, "must have same bytecode at this position"); + + switch (code) { + case Bytecodes::_invokevirtual : + case Bytecodes::_invokespecial : + case Bytecodes::_invokestatic : + case Bytecodes::_invokeinterface: { + int old_index = Bytes::get_native_u2(old_bcp+1); + int new_index = Bytes::get_native_u2(new_bcp+1); + new_method->constants()->cache()->entry_at(new_index)->copy_from(old_method->constants()->cache()->entry_at(old_index)); + break; + } + + case Bytecodes::_invokedynamic: { + int old_index = Bytes::get_native_u4(old_bcp+1); + int new_index = Bytes::get_native_u4(new_bcp+1); + new_method->constants()->cache()->secondary_entry_at(new_index)->copy_from(old_method->constants()->cache()->secondary_entry_at(old_index)); + break; + } + } + + int new_stack_size = fr().interpreter_frame_expression_stack_size(); + assert(new_method->validate_bci_from_bcx((intptr_t)new_bcp) == new_bci, ""); +} + StackValueCollection* interpretedVFrame::locals() const { int length = method()->max_locals(); diff --git a/src/share/vm/runtime/vframe.hpp b/src/share/vm/runtime/vframe.hpp index badfea5..edbc5c7 100644 --- a/src/share/vm/runtime/vframe.hpp +++ b/src/share/vm/runtime/vframe.hpp @@ -163,6 +163,7 @@ class interpretedVFrame: public javaVFrame { StackValueCollection* locals() const; StackValueCollection* expressions() const; GrowableArray* monitors() const; + void set_method(methodOop method, int new_bci); void set_locals(StackValueCollection* values) const; diff --git a/src/share/vm/runtime/vmThread.cpp b/src/share/vm/runtime/vmThread.cpp index 7643670..036ab64 100644 --- a/src/share/vm/runtime/vmThread.cpp +++ b/src/share/vm/runtime/vmThread.cpp @@ -691,6 +691,10 @@ void VMThread::execute(VM_Operation* op) { void VMThread::oops_do(OopClosure* f, CodeBlobClosure* cf) { Thread::oops_do(f, cf); _vm_queue->oops_do(f); + // (DCEVM) need to update oops in VM_RedefineClasses! + if (_cur_vm_operation != NULL) { + _cur_vm_operation->oops_do(f); + } } //------------------------------------------------------------------------------------------------------------------ diff --git a/src/share/vm/utilities/exceptions.cpp b/src/share/vm/utilities/exceptions.cpp index 03f254d..c9e0efc 100644 --- a/src/share/vm/utilities/exceptions.cpp +++ b/src/share/vm/utilities/exceptions.cpp @@ -254,6 +254,8 @@ Handle Exceptions::new_exception(Thread *thread, Symbol* name, assert(thread->is_Java_thread(), "can only be called by a Java thread"); assert(!thread->has_pending_exception(), "already has exception"); + bool old_pretend_value = Thread::current()->pretend_new_universe(); + Thread::current()->set_pretend_new_universe(false); Handle h_exception; // Resolve exception klass @@ -285,6 +287,7 @@ Handle Exceptions::new_exception(Thread *thread, Symbol* name, h_exception = Handle(thread, thread->pending_exception()); thread->clear_pending_exception(); } + Thread::current()->set_pretend_new_universe(old_pretend_value); return h_exception; } @@ -295,6 +298,8 @@ Handle Exceptions::new_exception(Thread *thread, Symbol* name, Symbol* signature, JavaCallArguments *args, Handle h_cause, Handle h_loader, Handle h_protection_domain) { + bool old_pretend_value = Thread::current()->pretend_new_universe(); + Thread::current()->set_pretend_new_universe(false); Handle h_exception = new_exception(thread, name, signature, args, h_loader, h_protection_domain); // Future: object initializer should take a cause argument @@ -317,6 +322,8 @@ Handle Exceptions::new_exception(Thread *thread, Symbol* name, h_exception = Handle(thread, thread->pending_exception()); thread->clear_pending_exception(); } + + Thread::current()->set_pretend_new_universe(old_pretend_value); return h_exception; } diff --git a/src/share/vm/utilities/growableArray.hpp b/src/share/vm/utilities/growableArray.hpp index 2a6d6b8..4b6927f 100644 --- a/src/share/vm/utilities/growableArray.hpp +++ b/src/share/vm/utilities/growableArray.hpp @@ -145,6 +145,33 @@ class GenericGrowableArray : public ResourceObj { assert(on_stack(), "fast ResourceObj path only"); return (void*)resource_allocate_bytes(thread, elementSize * _max); } + +}; + +template class Pair : public StackObj +{ +private: + E _left; + F _right; + +public: + + Pair() { + + } + + Pair(E left, F right) { + this->_left = left; + this->_right = right; + } + + E left() { + return _left; + } + + F right() { + return _right; + } }; template class GrowableArray : public GenericGrowableArray {