aboutsummaryrefslogtreecommitdiffstats
path: root/hotspot/.hg/patches
diff options
context:
space:
mode:
authorIvan Dubrov <idubrov@guidewire.com>2015-07-09 23:19:55 -0700
committerIvan Dubrov <idubrov@guidewire.com>2015-07-10 10:50:54 -0700
commit3f9d18a4058fbe6958a6b7ec827af221e84f9792 (patch)
treee160fa2853c9af85b1fbcec945e7b6b32e75649b /hotspot/.hg/patches
parent4d67683d0d7a5af417efa8f0d4da7d4b34d61863 (diff)
downloaddcevm-3f9d18a4058fbe6958a6b7ec827af221e84f9792.tar.gz
dcevm-3f9d18a4058fbe6958a6b7ec827af221e84f9792.zip
Support for Java7u79feature/jdk7u79-support
Diffstat (limited to 'hotspot/.hg/patches')
-rw-r--r--hotspot/.hg/patches/full-jdk7u79-b15-method-handles.patch328
-rw-r--r--hotspot/.hg/patches/full-jdk7u79-b15.patch (renamed from hotspot/.hg/patches/full-jdk7u79-b02.patch)4922
-rw-r--r--hotspot/.hg/patches/series5
3 files changed, 2791 insertions, 2464 deletions
diff --git a/hotspot/.hg/patches/full-jdk7u79-b15-method-handles.patch b/hotspot/.hg/patches/full-jdk7u79-b15-method-handles.patch
new file mode 100644
index 00000000..41439d75
--- /dev/null
+++ b/hotspot/.hg/patches/full-jdk7u79-b15-method-handles.patch
@@ -0,0 +1,328 @@
+# HG changeset patch
+# Parent 2ffb90b422e20ff948d25b96bfb9be923c130734
+
+diff -r 2ffb90b422e2 src/share/vm/classfile/javaClasses.cpp
+--- a/src/share/vm/classfile/javaClasses.cpp Fri Jul 10 10:19:24 2015 -0700
++++ b/src/share/vm/classfile/javaClasses.cpp Fri Jul 10 10:19:37 2015 -0700
+@@ -2409,6 +2409,52 @@
+ }
+ }
+
++// Support for java_lang_invoke_DirectMethodHandle$StaticAccessor
++
++int java_lang_invoke_DirectMethodHandle_StaticAccessor::_static_offset_offset;
++
++long java_lang_invoke_DirectMethodHandle_StaticAccessor::static_offset(oop dmh) {
++ assert(_static_offset_offset != 0, "");
++ return dmh->long_field(_static_offset_offset);
++}
++
++void java_lang_invoke_DirectMethodHandle_StaticAccessor::set_static_offset(oop dmh, long static_offset) {
++ assert(_static_offset_offset != 0, "");
++ dmh->long_field_put(_static_offset_offset, static_offset);
++}
++
++
++void java_lang_invoke_DirectMethodHandle_StaticAccessor::compute_offsets() {
++ klassOop klass_oop = SystemDictionary::DirectMethodHandle_StaticAccessor_klass();
++ if (klass_oop != NULL && EnableInvokeDynamic) {
++ compute_offset(_static_offset_offset, klass_oop, vmSymbols::static_offset_name(), vmSymbols::long_signature());
++ }
++}
++
++// Support for java_lang_invoke_DirectMethodHandle$Accessor
++
++int java_lang_invoke_DirectMethodHandle_Accessor::_field_offset_offset;
++
++int java_lang_invoke_DirectMethodHandle_Accessor::field_offset(oop dmh) {
++ assert(_field_offset_offset != 0, "");
++ return dmh->int_field(_field_offset_offset);
++}
++
++void java_lang_invoke_DirectMethodHandle_Accessor::set_field_offset(oop dmh, int field_offset) {
++ assert(_field_offset_offset != 0, "");
++ dmh->int_field_put(_field_offset_offset, field_offset);
++}
++
++
++void java_lang_invoke_DirectMethodHandle_Accessor::compute_offsets() {
++ klassOop klass_oop = SystemDictionary::DirectMethodHandle_Accessor_klass();
++ if (klass_oop != NULL && EnableInvokeDynamic) {
++ compute_offset(_field_offset_offset, klass_oop, vmSymbols::field_offset_name(), vmSymbols::int_signature());
++ }
++}
++
++
++
+ // Support for java_lang_invoke_MethodHandle
+
+ int java_lang_invoke_MethodHandle::_type_offset;
+@@ -3028,6 +3074,8 @@
+ if (EnableInvokeDynamic) {
+ java_lang_invoke_MethodHandle::compute_offsets();
+ java_lang_invoke_DirectMethodHandle::compute_offsets();
++ java_lang_invoke_DirectMethodHandle_StaticAccessor::compute_offsets();
++ java_lang_invoke_DirectMethodHandle_Accessor::compute_offsets();
+ java_lang_invoke_MemberName::compute_offsets();
+ java_lang_invoke_LambdaForm::compute_offsets();
+ java_lang_invoke_MethodType::compute_offsets();
+diff -r 2ffb90b422e2 src/share/vm/classfile/javaClasses.hpp
+--- a/src/share/vm/classfile/javaClasses.hpp Fri Jul 10 10:19:24 2015 -0700
++++ b/src/share/vm/classfile/javaClasses.hpp Fri Jul 10 10:19:37 2015 -0700
+@@ -940,6 +940,54 @@
+ static int member_offset_in_bytes() { return _member_offset; }
+ };
+
++// Interface to java.lang.invoke.DirectMethodHandle$StaticAccessor objects
++
++class java_lang_invoke_DirectMethodHandle_StaticAccessor: AllStatic {
++ friend class JavaClasses;
++
++ private:
++ static int _static_offset_offset; // offset to static field
++
++ static void compute_offsets();
++
++ public:
++ // Accessors
++ static long static_offset(oop dmh);
++ static void set_static_offset(oop dmh, long value);
++
++ // Testers
++ static bool is_subclass(klassOop klass) {
++ return Klass::cast(klass)->is_subclass_of(SystemDictionary::DirectMethodHandle_StaticAccessor_klass());
++ }
++ static bool is_instance(oop obj) {
++ return obj != NULL && is_subclass(obj->klass());
++ }
++};
++
++// Interface to java.lang.invoke.DirectMethodHandle$Accessor objects
++
++class java_lang_invoke_DirectMethodHandle_Accessor: AllStatic {
++ friend class JavaClasses;
++
++ private:
++ static int _field_offset_offset; // offset to field
++
++ static void compute_offsets();
++
++ public:
++ // Accessors
++ static int field_offset(oop dmh);
++ static void set_field_offset(oop dmh, int value);
++
++ // Testers
++ static bool is_subclass(klassOop klass) {
++ return Klass::cast(klass)->is_subclass_of(SystemDictionary::DirectMethodHandle_Accessor_klass());
++ }
++ static bool is_instance(oop obj) {
++ return obj != NULL && is_subclass(obj->klass());
++ }
++};
++
+ // Interface to java.lang.invoke.LambdaForm objects
+ // (These are a private interface for managing adapter code generation.)
+
+diff -r 2ffb90b422e2 src/share/vm/classfile/systemDictionary.hpp
+--- a/src/share/vm/classfile/systemDictionary.hpp Fri Jul 10 10:19:24 2015 -0700
++++ b/src/share/vm/classfile/systemDictionary.hpp Fri Jul 10 10:19:37 2015 -0700
+@@ -148,6 +148,8 @@
+ do_klass(reflect_UnsafeStaticFieldAccessorImpl_klass, sun_reflect_UnsafeStaticFieldAccessorImpl, Opt_Only_JDK15 ) \
+ \
+ /* support for dynamic typing; it's OK if these are NULL in earlier JDKs */ \
++ do_klass(DirectMethodHandle_StaticAccessor_klass, java_lang_invoke_DirectMethodHandle_StaticAccessor, Opt ) \
++ do_klass(DirectMethodHandle_Accessor_klass, java_lang_invoke_DirectMethodHandle_Accessor, Opt ) \
+ do_klass(MethodHandle_klass, java_lang_invoke_MethodHandle, Pre_JSR292 ) \
+ do_klass(MemberName_klass, java_lang_invoke_MemberName, Pre_JSR292 ) \
+ do_klass(MethodHandleNatives_klass, java_lang_invoke_MethodHandleNatives, Pre_JSR292 ) \
+diff -r 2ffb90b422e2 src/share/vm/classfile/vmSymbols.hpp
+--- a/src/share/vm/classfile/vmSymbols.hpp Fri Jul 10 10:19:24 2015 -0700
++++ b/src/share/vm/classfile/vmSymbols.hpp Fri Jul 10 10:19:37 2015 -0700
+@@ -248,6 +248,8 @@
+ /* Support for JSR 292 & invokedynamic (JDK 1.7 and above) */ \
+ template(java_lang_invoke_CallSite, "java/lang/invoke/CallSite") \
+ template(java_lang_invoke_ConstantCallSite, "java/lang/invoke/ConstantCallSite") \
++ template(java_lang_invoke_DirectMethodHandle_StaticAccessor, "java/lang/invoke/DirectMethodHandle$StaticAccessor") \
++ template(java_lang_invoke_DirectMethodHandle_Accessor, "java/lang/invoke/DirectMethodHandle$Accessor") \
+ template(java_lang_invoke_DirectMethodHandle, "java/lang/invoke/DirectMethodHandle") \
+ template(java_lang_invoke_MutableCallSite, "java/lang/invoke/MutableCallSite") \
+ template(java_lang_invoke_VolatileCallSite, "java/lang/invoke/VolatileCallSite") \
+@@ -485,6 +487,10 @@
+ template(int_StringBuffer_signature, "(I)Ljava/lang/StringBuffer;") \
+ template(char_StringBuffer_signature, "(C)Ljava/lang/StringBuffer;") \
+ template(int_String_signature, "(I)Ljava/lang/String;") \
++ template(static_offset_name, "staticOffset") \
++ template(static_base_name, "staticBase") \
++ template(field_offset_name, "fieldOffset") \
++ template(field_type_name, "fieldType") \
+ /* signature symbols needed by intrinsics */ \
+ VM_INTRINSICS_DO(VM_INTRINSIC_IGNORE, VM_SYMBOL_IGNORE, VM_SYMBOL_IGNORE, template, VM_ALIAS_IGNORE) \
+ \
+diff -r 2ffb90b422e2 src/share/vm/prims/jvmtiRedefineClasses.cpp
+--- a/src/share/vm/prims/jvmtiRedefineClasses.cpp Fri Jul 10 10:19:24 2015 -0700
++++ b/src/share/vm/prims/jvmtiRedefineClasses.cpp Fri Jul 10 10:19:37 2015 -0700
+@@ -2110,6 +2110,120 @@
+
+ }
+
++// import java_lang_invoke_MemberName.*
++enum {
++ REFERENCE_KIND_SHIFT = java_lang_invoke_MemberName::MN_REFERENCE_KIND_SHIFT,
++ REFERENCE_KIND_MASK = java_lang_invoke_MemberName::MN_REFERENCE_KIND_MASK,
++};
++
++static oop field_name_or_null(Symbol* s) {
++ if (s == NULL) return NULL;
++ return StringTable::lookup(s);
++}
++
++static oop object_java_mirror() {
++ return Klass::cast(SystemDictionary::Object_klass())->java_mirror();
++}
++
++static oop field_signature_type_or_null(Symbol* s) {
++ if (s == NULL) return NULL;
++ BasicType bt = FieldType::basic_type(s);
++ if (is_java_primitive(bt)) {
++ assert(s->utf8_length() == 1, "");
++ return java_lang_Class::primitive_mirror(bt);
++ }
++ // Here are some more short cuts for common types.
++ // They are optional, since reference types can be resolved lazily.
++ if (bt == T_OBJECT) {
++ if (s == vmSymbols::object_signature()) {
++ return object_java_mirror();
++ } else if (s == vmSymbols::class_signature()) {
++ return Klass::cast(SystemDictionary::Class_klass())->java_mirror();
++ } else if (s == vmSymbols::string_signature()) {
++ return Klass::cast(SystemDictionary::String_klass())->java_mirror();
++ }
++ }
++ return NULL;
++}
++
++bool update_member_name(oop obj) {
++ int flags = java_lang_invoke_MemberName::flags(obj);
++ int ref_kind = (flags >> REFERENCE_KIND_SHIFT) & REFERENCE_KIND_MASK;
++ if (MethodHandles::ref_kind_is_method(ref_kind)) {
++ methodOop m = (methodOop) java_lang_invoke_MemberName::vmtarget(obj);
++ if (m != NULL && !instanceKlass::cast(m->method_holder())->is_newest_version()) {
++ // Let's try to re-resolve method
++ KlassHandle newest = instanceKlass::cast(m->method_holder())->newest_version();
++ methodOop new_method = instanceKlass::cast(newest())->find_method(m->name(), m->signature());
++
++ // Note: we might set NULL at this point, which should force AbstractMethodError at runtime
++ bool do_dispatch = (ref_kind != JVM_REF_invokeSpecial);
++ MethodHandles::init_method_MemberName(obj, new_method, do_dispatch, newest);
++ }
++ } else if (MethodHandles::ref_kind_is_field(ref_kind)) {
++ klassOop k = (klassOop) java_lang_invoke_MemberName::vmtarget(obj);
++ if (k == NULL) {
++ return false; // Was cleared before, this MemberName is invalid.
++ }
++
++ if (k != NULL && !Klass::cast(k)->is_newest_version()) {
++ // Let's try to re-resolve field
++ fieldDescriptor fd;
++ int offset = java_lang_invoke_MemberName::vmindex(obj);
++ bool is_static = MethodHandles::ref_kind_is_static(ref_kind);
++ instanceKlass *ik = instanceKlass::cast(k);
++ if (ik->find_local_field_from_offset(offset, is_static, &fd)) {
++ KlassHandle newest = Klass::cast(k)->newest_version();
++ fieldDescriptor fd_new;
++ if (instanceKlass::cast(newest())->find_local_field(fd.name(), fd.signature(), &fd_new)) {
++ bool is_setter = MethodHandles::ref_kind_is_setter(ref_kind);
++ oop type = field_signature_type_or_null(fd_new.signature());
++ oop name = field_name_or_null(fd_new.name());
++ MethodHandles::init_field_MemberName(obj, newest, fd_new.access_flags(), type, name, fd_new.offset(), is_setter);
++ } else {
++ // Matching field is not found in new version, not much we can do here.
++ // JVM will crash once faulty MH is invoked.
++ // However, to avoid that all DMH's using this faulty MH are cleared (set to NULL)
++ // Eventually, we probably want to replace them with something more meaningful,
++ // like instance throwing NoSuchFieldError or DMH that will resort to dynamic
++ // field resolution (with possibility of type conversion)
++ java_lang_invoke_MemberName::set_vmtarget(obj, NULL);
++ java_lang_invoke_MemberName::set_vmindex(obj, 0);
++ return false;
++ }
++ }
++ }
++ }
++ return true;
++}
++
++bool update_direct_method_handle(oop obj) {
++ // Always update member name first.
++ oop mem_name = java_lang_invoke_DirectMethodHandle::member(obj);
++ if (!update_member_name(mem_name)) {
++ return false;
++ }
++
++ // Here we rely on DirectMethodHandle implementation.
++ // The current implementation caches field offset in $StaticAccessor/$Accessor
++ int flags = java_lang_invoke_MemberName::flags(mem_name);
++ int ref_kind = (flags >> REFERENCE_KIND_SHIFT) & REFERENCE_KIND_MASK;
++ if (MethodHandles::ref_kind_is_field(ref_kind)) {
++ // Note: we don't care about staticBase field (which is java.lang.Class)
++ // It should be processed during normal object update.
++ // Update offset in StaticAccessor
++ int offset = java_lang_invoke_MemberName::vmindex(mem_name);
++ if (offset != 0) { // index of 0 means that field no longer exist
++ if (java_lang_invoke_DirectMethodHandle_StaticAccessor::is_instance(obj)) {
++ java_lang_invoke_DirectMethodHandle_StaticAccessor::set_static_offset(obj, offset);
++ } else if (java_lang_invoke_DirectMethodHandle_Accessor::is_instance(obj)) {
++ java_lang_invoke_DirectMethodHandle_Accessor::set_field_offset(obj, offset);
++ }
++ }
++ }
++ return true;
++}
++
+ template <class T> void VM_RedefineClasses::do_oop_work(T* p) {
+ T heap_oop = oopDesc::load_heap_oop(p);
+ if (!oopDesc::is_null(heap_oop)) {
+@@ -2148,6 +2262,18 @@
+ }
+ }
+ }
++ } else {
++ // JSR 292 support, uptade java.lang.invoke.MemberName instances
++ if (java_lang_invoke_MemberName::is_instance(obj)) {
++ update_member_name(obj);
++ } else if (java_lang_invoke_DirectMethodHandle::is_instance(obj)) {
++ if (!update_direct_method_handle(obj)) {
++ // DMH is no longer valid, replace it with null reference.
++ // See note above. We probably want to replace this with something more meaningful.
++ oopDesc::encode_store_heap_oop_not_null(p, NULL);
++ //S::oop_store(p, NULL);
++ }
++ }
+ }
+ }
+ }
+@@ -2822,6 +2948,11 @@
+ // TODO:
+ transfer_old_native_function_registrations(the_old_class);
+
++ // Swap method handles
++ MemberNameTable* mnt = the_old_class->member_names();
++ assert(the_new_class->member_names() == NULL, "");
++ the_new_class->set_member_names(mnt);
++ the_old_class->set_member_names(NULL);
+
+ #ifdef ASSERT
+
+diff -r 2ffb90b422e2 src/share/vm/runtime/mutexLocker.cpp
+--- a/src/share/vm/runtime/mutexLocker.cpp Fri Jul 10 10:19:24 2015 -0700
++++ b/src/share/vm/runtime/mutexLocker.cpp Fri Jul 10 10:19:37 2015 -0700
+@@ -267,7 +267,7 @@
+ def(Heap_lock , Monitor, nonleaf+1, false);
+ def(JfieldIdCreation_lock , Mutex , nonleaf+1, true ); // jfieldID, Used in VM_Operation
+ def(JNICachedItableIndex_lock , Mutex , nonleaf+1, false); // Used to cache an itable index during JNI invoke
+- def(MemberNameTable_lock , Mutex , nonleaf+1, false); // Used to protect MemberNameTable
++ def(MemberNameTable_lock , Mutex , nonleaf+1, true); // Used to protect MemberNameTable
+
+ def(CompiledIC_lock , Mutex , nonleaf+2, false); // locks VtableStubs_lock, InlineCacheBuffer_lock
+ def(CompileTaskAlloc_lock , Mutex , nonleaf+2, true );
diff --git a/hotspot/.hg/patches/full-jdk7u79-b02.patch b/hotspot/.hg/patches/full-jdk7u79-b15.patch
index 8b82eea3..19ff13ed 100644
--- a/hotspot/.hg/patches/full-jdk7u79-b02.patch
+++ b/hotspot/.hg/patches/full-jdk7u79-b15.patch
@@ -1,7 +1,9 @@
-diff --git a/make/bsd/makefiles/gcc.make b/make/bsd/makefiles/gcc.make
-index 3de1dea..8bf7f94 100644
---- a/make/bsd/makefiles/gcc.make
-+++ b/make/bsd/makefiles/gcc.make
+# HG changeset patch
+# Parent 882f6c762ac5352a0d94efcea46c8f9a917ceb08
+
+diff -r 882f6c762ac5 make/bsd/makefiles/gcc.make
+--- a/make/bsd/makefiles/gcc.make Thu Jul 09 23:10:04 2015 -0700
++++ b/make/bsd/makefiles/gcc.make Thu Jul 09 23:18:17 2015 -0700
@@ -117,7 +117,10 @@
CFLAGS += -fno-rtti
CFLAGS += -fno-exceptions
@@ -14,10 +16,9 @@ index 3de1dea..8bf7f94 100644
# version 4 and above support fvisibility=hidden (matches jni_x86.h file)
# except 4.1.2 gives pointless warnings that can't be disabled (afaik)
ifneq "$(shell expr \( $(CC_VER_MAJOR) \> 4 \) \| \( \( $(CC_VER_MAJOR) = 4 \) \& \( $(CC_VER_MINOR) \>= 3 \) \))" "0"
-diff --git a/src/cpu/x86/vm/templateTable_x86_32.cpp b/src/cpu/x86/vm/templateTable_x86_32.cpp
-index fc19edc..d2cddd3 100644
---- a/src/cpu/x86/vm/templateTable_x86_32.cpp
-+++ b/src/cpu/x86/vm/templateTable_x86_32.cpp
+diff -r 882f6c762ac5 src/cpu/x86/vm/templateTable_x86_32.cpp
+--- a/src/cpu/x86/vm/templateTable_x86_32.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/cpu/x86/vm/templateTable_x86_32.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -2109,6 +2109,22 @@
// resolve first time through
address entry;
@@ -173,10 +174,9 @@ index fc19edc..d2cddd3 100644
// Get receiver klass into rdx - also a null check
__ restore_locals(); // restore rdi
__ null_check(rcx, oopDesc::klass_offset_in_bytes());
-diff --git a/src/cpu/x86/vm/templateTable_x86_64.cpp b/src/cpu/x86/vm/templateTable_x86_64.cpp
-index 932ee97..67bb710 100644
---- a/src/cpu/x86/vm/templateTable_x86_64.cpp
-+++ b/src/cpu/x86/vm/templateTable_x86_64.cpp
+diff -r 882f6c762ac5 src/cpu/x86/vm/templateTable_x86_64.cpp
+--- a/src/cpu/x86/vm/templateTable_x86_64.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/cpu/x86/vm/templateTable_x86_64.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -2151,6 +2151,22 @@
// resolve first time through
address entry;
@@ -337,10 +337,9 @@ index 932ee97..67bb710 100644
// Get receiver klass into rdx - also a null check
__ restore_locals(); // restore r14
__ null_check(rcx, oopDesc::klass_offset_in_bytes());
-diff --git a/src/share/vm/c1/c1_Compilation.hpp b/src/share/vm/c1/c1_Compilation.hpp
-index 9a8ca61..196ab25 100644
---- a/src/share/vm/c1/c1_Compilation.hpp
-+++ b/src/share/vm/c1/c1_Compilation.hpp
+diff -r 882f6c762ac5 src/share/vm/c1/c1_Compilation.hpp
+--- a/src/share/vm/c1/c1_Compilation.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/c1/c1_Compilation.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -242,8 +242,8 @@
#define BAILOUT(msg) { bailout(msg); return; }
#define BAILOUT_(msg, res) { bailout(msg); return res; }
@@ -352,10 +351,9 @@ index 9a8ca61..196ab25 100644
class InstructionMark: public StackObj {
-diff --git a/src/share/vm/ci/ciEnv.cpp b/src/share/vm/ci/ciEnv.cpp
-index e20db5d..57f37db 100644
---- a/src/share/vm/ci/ciEnv.cpp
-+++ b/src/share/vm/ci/ciEnv.cpp
+diff -r 882f6c762ac5 src/share/vm/ci/ciEnv.cpp
+--- a/src/share/vm/ci/ciEnv.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/ci/ciEnv.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -1172,3 +1172,11 @@
// If memory is low, we stop compiling methods.
record_method_not_compilable("out of memory");
@@ -368,10 +366,9 @@ index e20db5d..57f37db 100644
+ _factory->cleanup_after_redefinition();
+ }
+}
-diff --git a/src/share/vm/ci/ciEnv.hpp b/src/share/vm/ci/ciEnv.hpp
-index 103e532..abe2e37 100644
---- a/src/share/vm/ci/ciEnv.hpp
-+++ b/src/share/vm/ci/ciEnv.hpp
+diff -r 882f6c762ac5 src/share/vm/ci/ciEnv.hpp
+--- a/src/share/vm/ci/ciEnv.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/ci/ciEnv.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -417,6 +417,8 @@
void record_failure(const char* reason);
void record_method_not_compilable(const char* reason, bool all_tiers = true);
@@ -381,10 +378,9 @@ index 103e532..abe2e37 100644
};
#endif // SHARE_VM_CI_CIENV_HPP
-diff --git a/src/share/vm/ci/ciObjectFactory.cpp b/src/share/vm/ci/ciObjectFactory.cpp
-index e0ab96b..36efef4 100644
---- a/src/share/vm/ci/ciObjectFactory.cpp
-+++ b/src/share/vm/ci/ciObjectFactory.cpp
+diff -r 882f6c762ac5 src/share/vm/ci/ciObjectFactory.cpp
+--- a/src/share/vm/ci/ciObjectFactory.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/ci/ciObjectFactory.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -296,6 +296,11 @@
// into the table. We need to recompute our index.
index = find(keyHandle(), _ci_objects);
@@ -448,10 +444,9 @@ index e0ab96b..36efef4 100644
+void ciObjectFactory::cleanup_after_redefinition() {
+ sort_ci_objects(_ci_objects);
+}
-diff --git a/src/share/vm/ci/ciObjectFactory.hpp b/src/share/vm/ci/ciObjectFactory.hpp
-index 26cc2c3..855a4ac 100644
---- a/src/share/vm/ci/ciObjectFactory.hpp
-+++ b/src/share/vm/ci/ciObjectFactory.hpp
+diff -r 882f6c762ac5 src/share/vm/ci/ciObjectFactory.hpp
+--- a/src/share/vm/ci/ciObjectFactory.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/ci/ciObjectFactory.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -38,6 +38,7 @@
class ciObjectFactory : public ResourceObj {
friend class VMStructs;
@@ -472,10 +467,9 @@ index 26cc2c3..855a4ac 100644
};
#endif // SHARE_VM_CI_CIOBJECTFACTORY_HPP
-diff --git a/src/share/vm/classfile/classFileParser.cpp b/src/share/vm/classfile/classFileParser.cpp
-index 505bad1..2912a66 100644
---- a/src/share/vm/classfile/classFileParser.cpp
-+++ b/src/share/vm/classfile/classFileParser.cpp
+diff -r 882f6c762ac5 src/share/vm/classfile/classFileParser.cpp
+--- a/src/share/vm/classfile/classFileParser.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/classfile/classFileParser.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -795,6 +795,7 @@
Handle class_loader,
Handle protection_domain,
@@ -557,15 +551,15 @@ index 505bad1..2912a66 100644
// Fill in code attribute information
m->set_max_stack(max_stack);
m->set_max_locals(max_locals);
-@@ -2228,6 +2267,8 @@
- * sure that the oops can pass verification when this field is set.
+@@ -2229,6 +2268,8 @@
*/
m->constMethod()->set_stackmap_data(stackmap_data());
-+
-+ m->constMethod()->set_code_section_table(code_section_table());
++ m->constMethod()->set_code_section_table(code_section_table());
++
// Copy byte codes
m->set_code(code_start);
+
@@ -2825,6 +2866,15 @@
"Invalid Deprecated classfile attribute length %u in class file %s",
attribute_length, CHECK);
@@ -803,17 +797,17 @@ index 505bad1..2912a66 100644
// reinitialize modifiers, using the InnerClasses attribute
int computed_modifiers = this_klass->compute_modifier_flags(CHECK_(nullHandle));
-@@ -3748,6 +3948,10 @@
-
+@@ -3749,6 +3949,10 @@
// Allocate mirror and initialize static fields
java_lang_Class::create_mirror(this_klass, CHECK_(nullHandle));
-+
+
+ if (rt == REF_OTHER) {
+ instanceRefKlass::update_nonstatic_oop_maps(ik);
+ }
-
++
ClassLoadingService::notify_class_loaded(instanceKlass::cast(this_klass()),
false /* not shared class */);
+
@@ -3891,7 +4095,7 @@
}
@@ -843,10 +837,9 @@ index 505bad1..2912a66 100644
k->set_is_cloneable();
}
}
-diff --git a/src/share/vm/classfile/classFileParser.hpp b/src/share/vm/classfile/classFileParser.hpp
-index a95a784..ac217fc 100644
---- a/src/share/vm/classfile/classFileParser.hpp
-+++ b/src/share/vm/classfile/classFileParser.hpp
+diff -r 882f6c762ac5 src/share/vm/classfile/classFileParser.hpp
+--- a/src/share/vm/classfile/classFileParser.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/classfile/classFileParser.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -64,6 +64,9 @@
int _sde_length;
typeArrayHandle _inner_classes;
@@ -923,10 +916,9 @@ index a95a784..ac217fc 100644
// Verifier checks
static void check_super_class_access(instanceKlassHandle this_klass, TRAPS);
static void check_super_interface_access(instanceKlassHandle this_klass, TRAPS);
-diff --git a/src/share/vm/classfile/classLoader.cpp b/src/share/vm/classfile/classLoader.cpp
-index a2e61a4..450e19f 100644
---- a/src/share/vm/classfile/classLoader.cpp
-+++ b/src/share/vm/classfile/classLoader.cpp
+diff -r 882f6c762ac5 src/share/vm/classfile/classLoader.cpp
+--- a/src/share/vm/classfile/classLoader.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/classfile/classLoader.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -915,6 +915,7 @@
instanceKlassHandle result = parser.parseClassFile(h_name,
class_loader,
@@ -935,10 +927,9 @@ index a2e61a4..450e19f 100644
parsed_name,
false,
CHECK_(h));
-diff --git a/src/share/vm/classfile/dictionary.cpp b/src/share/vm/classfile/dictionary.cpp
-index 78e76cc..ee21f3a 100644
---- a/src/share/vm/classfile/dictionary.cpp
-+++ b/src/share/vm/classfile/dictionary.cpp
+diff -r 882f6c762ac5 src/share/vm/classfile/dictionary.cpp
+--- a/src/share/vm/classfile/dictionary.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/classfile/dictionary.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -326,6 +326,21 @@
}
}
@@ -1037,10 +1028,9 @@ index 78e76cc..ee21f3a 100644
}
-diff --git a/src/share/vm/classfile/dictionary.hpp b/src/share/vm/classfile/dictionary.hpp
-index bd33760..186d0eb 100644
---- a/src/share/vm/classfile/dictionary.hpp
-+++ b/src/share/vm/classfile/dictionary.hpp
+diff -r 882f6c762ac5 src/share/vm/classfile/dictionary.hpp
+--- a/src/share/vm/classfile/dictionary.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/classfile/dictionary.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -73,6 +73,10 @@
void add_klass(Symbol* class_name, Handle class_loader,KlassHandle obj);
@@ -1068,10 +1058,9 @@ index bd33760..186d0eb 100644
klassOop find(int index, unsigned int hash, Symbol* name,
Handle loader, Handle protection_domain, TRAPS);
bool is_valid_protection_domain(int index, unsigned int hash,
-diff --git a/src/share/vm/classfile/javaClasses.cpp b/src/share/vm/classfile/javaClasses.cpp
-index 7dd5f1b..9c7d8eb 100644
---- a/src/share/vm/classfile/javaClasses.cpp
-+++ b/src/share/vm/classfile/javaClasses.cpp
+diff -r 882f6c762ac5 src/share/vm/classfile/javaClasses.cpp
+--- a/src/share/vm/classfile/javaClasses.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/classfile/javaClasses.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -1798,7 +1798,7 @@
klassOop klass = SystemDictionary::reflect_Method_klass();
// This class is eagerly initialized during VM initialization, since we keep a refence
@@ -1081,10 +1070,9 @@ index 7dd5f1b..9c7d8eb 100644
return instanceKlass::cast(klass)->allocate_instance_handle(CHECK_NH);
}
-diff --git a/src/share/vm/classfile/javaClasses.hpp b/src/share/vm/classfile/javaClasses.hpp
-index 36d1cec..a6de98e 100644
---- a/src/share/vm/classfile/javaClasses.hpp
-+++ b/src/share/vm/classfile/javaClasses.hpp
+diff -r 882f6c762ac5 src/share/vm/classfile/javaClasses.hpp
+--- a/src/share/vm/classfile/javaClasses.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/classfile/javaClasses.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -213,7 +213,6 @@
class java_lang_Class : AllStatic {
@@ -1093,10 +1081,9 @@ index 36d1cec..a6de98e 100644
private:
// The fake offsets are added by the class loader when java.lang.Class is loaded
-diff --git a/src/share/vm/classfile/loaderConstraints.cpp b/src/share/vm/classfile/loaderConstraints.cpp
-index 8650cd9..965cce2 100644
---- a/src/share/vm/classfile/loaderConstraints.cpp
-+++ b/src/share/vm/classfile/loaderConstraints.cpp
+diff -r 882f6c762ac5 src/share/vm/classfile/loaderConstraints.cpp
+--- a/src/share/vm/classfile/loaderConstraints.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/classfile/loaderConstraints.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -449,7 +449,7 @@
if (k != NULL) {
// We found the class in the system dictionary, so we should
@@ -1106,10 +1093,9 @@ index 8650cd9..965cce2 100644
} else {
// If we don't find the class in the system dictionary, it
// has to be in the placeholders table.
-diff --git a/src/share/vm/classfile/loaderConstraints.hpp b/src/share/vm/classfile/loaderConstraints.hpp
-index d01b2c4..1ad80f7 100644
---- a/src/share/vm/classfile/loaderConstraints.hpp
-+++ b/src/share/vm/classfile/loaderConstraints.hpp
+diff -r 882f6c762ac5 src/share/vm/classfile/loaderConstraints.hpp
+--- a/src/share/vm/classfile/loaderConstraints.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/classfile/loaderConstraints.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -106,7 +106,7 @@
klassOop klass() { return literal(); }
@@ -1119,10 +1105,9 @@ index d01b2c4..1ad80f7 100644
LoaderConstraintEntry* next() {
return (LoaderConstraintEntry*)HashtableEntry<klassOop, mtClass>::next();
-diff --git a/src/share/vm/classfile/systemDictionary.cpp b/src/share/vm/classfile/systemDictionary.cpp
-index 2fa221b..8e45161 100644
---- a/src/share/vm/classfile/systemDictionary.cpp
-+++ b/src/share/vm/classfile/systemDictionary.cpp
+diff -r 882f6c762ac5 src/share/vm/classfile/systemDictionary.cpp
+--- a/src/share/vm/classfile/systemDictionary.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/classfile/systemDictionary.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -157,6 +157,7 @@
// can return a null klass
klass = handle_resolution_exception(class_name, class_loader, protection_domain, throw_error, k_h, THREAD);
@@ -1179,7 +1164,7 @@ index 2fa221b..8e45161 100644
const char* pkg = "java/";
if (!HAS_PENDING_EXCEPTION &&
-@@ -1087,11 +1102,16 @@
+@@ -1087,13 +1102,18 @@
// Add class just loaded
// If a class loader supports parallel classloading handle parallel define requests
// find_or_define_instance_class may return a different instanceKlass
@@ -1191,13 +1176,15 @@ index 2fa221b..8e45161 100644
- define_instance_class(k, THREAD);
+ define_instance_class(k, old_class, THREAD);
}
-+ }
-+
-+ if (redefine_classes_locked) {
-+ Thread::current()->redefine_classes_mutex()->unlock();
}
++ if (redefine_classes_locked) {
++ Thread::current()->redefine_classes_mutex()->unlock();
++ }
++
// If parsing the class file or define_instance_class failed, we
+ // need to remove the placeholder added on our behalf. But we
+ // must make sure parsed_name is valid first (it won't be if we had
@@ -1122,7 +1142,7 @@
MutexLocker mu(SystemDictionary_lock, THREAD);
@@ -1281,12 +1268,24 @@ index 2fa221b..8e45161 100644
// ----------------------------------------------------------------------------
// GC support
-@@ -1702,6 +1745,24 @@
+@@ -1702,7 +1745,8 @@
}
+-void SystemDictionary::preloaded_oops_do(OopClosure* f) {
+// (tw) Iterate over all pre-loaded classes in the dictionary.
+void SystemDictionary::preloaded_classes_do(OopClosure *f) {
+ for (int k = (int)FIRST_WKID; k < (int)WKID_LIMIT; k++) {
+ f->do_oop((oop*) &_well_known_klasses[k]);
+ }
+@@ -1716,6 +1760,23 @@
+ }
+ }
+
++ // TODO: Check if we need to call FilterFieldsMap
++}
++
++void SystemDictionary::preloaded_oops_do(OopClosure* f) {
+ for (int k = (int)FIRST_WKID; k < (int)WKID_LIMIT; k++) {
+ f->do_oop((oop*) &_well_known_klasses[k]);
+ }
@@ -1300,24 +1299,21 @@ index 2fa221b..8e45161 100644
+ }
+ }
+
-+ // TODO: Check if we need to call FilterFieldsMap
-+}
-+
- void SystemDictionary::preloaded_oops_do(OopClosure* f) {
- for (int k = (int)FIRST_WKID; k < (int)WKID_LIMIT; k++) {
- f->do_oop((oop*) &_well_known_klasses[k]);
-@@ -1732,6 +1793,11 @@
- // Don't iterate over placeholders
- void SystemDictionary::classes_do(void f(klassOop)) {
+ // The basic type mirrors would have already been processed in
+ // Universe::oops_do(), via a call to shared_oops_do(), so should
+ // not be processed again.
+@@ -1734,6 +1795,11 @@
dictionary()->classes_do(f);
-+}
-+
+ }
+
+// (tw) Iterate over all classes in the dictionary.
+void SystemDictionary::classes_do(ObjectClosure *closure) {
+ dictionary()->classes_do(closure);
- }
-
++}
++
// Added for initialize_itable_for_klass
+ // Just the classes from defining class loaders
+ // Don't iterate over placeholders
@@ -1870,7 +1936,9 @@
// Preload ref klasses and set reference types
@@ -1342,10 +1338,9 @@ index 2fa221b..8e45161 100644
linkage_error = "loader (instance of %s): attempted duplicate class "
"definition for name: \"%s\"";
} else {
-diff --git a/src/share/vm/classfile/systemDictionary.hpp b/src/share/vm/classfile/systemDictionary.hpp
-index 85474c5..2dabd95 100644
---- a/src/share/vm/classfile/systemDictionary.hpp
-+++ b/src/share/vm/classfile/systemDictionary.hpp
+diff -r 882f6c762ac5 src/share/vm/classfile/systemDictionary.hpp
+--- a/src/share/vm/classfile/systemDictionary.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/classfile/systemDictionary.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -270,7 +270,7 @@
// Resolve from stream (called by jni_DefineClass and JVM_DefineClass)
static klassOop resolve_from_stream(Symbol* class_name, Handle class_loader,
@@ -1364,15 +1359,15 @@ index 85474c5..2dabd95 100644
// Added for initialize_itable_for_klass to handle exceptions
static void classes_do(void f(klassOop, TRAPS), TRAPS);
// All classes, and their class loaders
-@@ -416,6 +418,8 @@
- int limit = (int)end_id + 1;
+@@ -417,6 +419,8 @@
initialize_wk_klasses_until((WKID) limit, start_id, THREAD);
}
-+
-+ static void rollback_redefinition();
++ static void rollback_redefinition();
++
public:
#define WK_KLASS_DECLARE(name, symbol, option) \
+ static klassOop name() { return check_klass_##option(_well_known_klasses[WK_KLASS_ENUM_NAME(name)]); }
@@ -598,11 +602,11 @@
// after waiting, but before reentering SystemDictionary_lock
// to preserve lock order semantics.
@@ -1405,10 +1400,9 @@ index 85474c5..2dabd95 100644
// We pass in the hashtable index so we can calculate it outside of
// the SystemDictionary_lock.
-diff --git a/src/share/vm/classfile/verifier.cpp b/src/share/vm/classfile/verifier.cpp
-index 49c4b2c..74d7614 100644
---- a/src/share/vm/classfile/verifier.cpp
-+++ b/src/share/vm/classfile/verifier.cpp
+diff -r 882f6c762ac5 src/share/vm/classfile/verifier.cpp
+--- a/src/share/vm/classfile/verifier.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/classfile/verifier.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -106,7 +106,7 @@
return !need_verify;
}
@@ -1493,10 +1487,9 @@ index 49c4b2c..74d7614 100644
Symbol* ref_class_name =
cp->klass_name_at(cp->klass_ref_index_at(index));
// See the comments in verify_field_instructions() for
-diff --git a/src/share/vm/classfile/verifier.hpp b/src/share/vm/classfile/verifier.hpp
-index 92c4fd2..5e8e0da 100644
---- a/src/share/vm/classfile/verifier.hpp
-+++ b/src/share/vm/classfile/verifier.hpp
+diff -r 882f6c762ac5 src/share/vm/classfile/verifier.hpp
+--- a/src/share/vm/classfile/verifier.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/classfile/verifier.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -48,7 +48,7 @@
* Otherwise, no exception is thrown and the return indicates the
* error.
@@ -1525,10 +1518,9 @@ index 92c4fd2..5e8e0da 100644
instanceKlassHandle _klass; // the class being verified
methodHandle _method; // current method being verified
VerificationType _this_type; // the verification type of the current class
-diff --git a/src/share/vm/classfile/vmSymbols.hpp b/src/share/vm/classfile/vmSymbols.hpp
-index 51ceb15..7ef1e1f 100644
---- a/src/share/vm/classfile/vmSymbols.hpp
-+++ b/src/share/vm/classfile/vmSymbols.hpp
+diff -r 882f6c762ac5 src/share/vm/classfile/vmSymbols.hpp
+--- a/src/share/vm/classfile/vmSymbols.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/classfile/vmSymbols.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -139,6 +139,10 @@
template(tag_annotation_default, "AnnotationDefault") \
template(tag_enclosing_method, "EnclosingMethod") \
@@ -1551,10 +1543,9 @@ index 51ceb15..7ef1e1f 100644
/* non-intrinsic name/signature pairs: */ \
template(register_method_name, "register") \
do_alias(register_method_signature, object_void_signature) \
-diff --git a/src/share/vm/compiler/compileBroker.cpp b/src/share/vm/compiler/compileBroker.cpp
-index 3e8a65a..fe96a94 100644
---- a/src/share/vm/compiler/compileBroker.cpp
-+++ b/src/share/vm/compiler/compileBroker.cpp
+diff -r 882f6c762ac5 src/share/vm/compiler/compileBroker.cpp
+--- a/src/share/vm/compiler/compileBroker.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/compiler/compileBroker.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -1181,6 +1181,14 @@
int comp_level,
methodHandle hot_method, int hot_count,
@@ -1611,10 +1602,9 @@ index 3e8a65a..fe96a94 100644
+ }
+ }
+}
-diff --git a/src/share/vm/compiler/compileBroker.hpp b/src/share/vm/compiler/compileBroker.hpp
-index 29f2b22..37989d1 100644
---- a/src/share/vm/compiler/compileBroker.hpp
-+++ b/src/share/vm/compiler/compileBroker.hpp
+diff -r 882f6c762ac5 src/share/vm/compiler/compileBroker.hpp
+--- a/src/share/vm/compiler/compileBroker.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/compiler/compileBroker.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -408,6 +408,7 @@
static void print_compiler_threads_on(outputStream* st);
@@ -1623,10 +1613,9 @@ index 29f2b22..37989d1 100644
static int get_total_compile_count() { return _total_compile_count; }
static int get_total_bailout_count() { return _total_bailout_count; }
static int get_total_invalidated_count() { return _total_invalidated_count; }
-diff --git a/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.cpp b/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.cpp
-index a3abd66..b45f9e1 100644
---- a/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.cpp
-+++ b/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.cpp
+diff -r 882f6c762ac5 src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.cpp
+--- a/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -162,6 +162,13 @@
}
}
@@ -1641,10 +1630,9 @@ index a3abd66..b45f9e1 100644
// Like CompactibleSpace forward() but always calls cross_threshold() to
// update the block offset table. Removed initialize_threshold call because
// CFLS does not use a block offset array for contiguous spaces.
-diff --git a/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.hpp b/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.hpp
-index 24509b6..6670b73 100644
---- a/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.hpp
-+++ b/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.hpp
+diff -r 882f6c762ac5 src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.hpp
+--- a/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -149,6 +149,7 @@
// Support for compacting cms
@@ -1653,10 +1641,9 @@ index 24509b6..6670b73 100644
HeapWord* forward(oop q, size_t size, CompactPoint* cp, HeapWord* compact_top);
// Initialization helpers.
-diff --git a/src/share/vm/gc_implementation/shared/markSweep.cpp b/src/share/vm/gc_implementation/shared/markSweep.cpp
-index 29841d8..a13a35d 100644
---- a/src/share/vm/gc_implementation/shared/markSweep.cpp
-+++ b/src/share/vm/gc_implementation/shared/markSweep.cpp
+diff -r 882f6c762ac5 src/share/vm/gc_implementation/shared/markSweep.cpp
+--- a/src/share/vm/gc_implementation/shared/markSweep.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/gc_implementation/shared/markSweep.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -32,6 +32,8 @@
#include "oops/objArrayKlass.inline.hpp"
#include "oops/oop.inline.hpp"
@@ -1753,22 +1740,22 @@ index 29841d8..a13a35d 100644
+ FREE_RESOURCE_ARRAY(HeapWord, tmp_obj, size);
+ }
+}
-diff --git a/src/share/vm/gc_implementation/shared/markSweep.hpp b/src/share/vm/gc_implementation/shared/markSweep.hpp
-index eb8252c..b96a677 100644
---- a/src/share/vm/gc_implementation/shared/markSweep.hpp
-+++ b/src/share/vm/gc_implementation/shared/markSweep.hpp
-@@ -117,7 +117,11 @@
+diff -r 882f6c762ac5 src/share/vm/gc_implementation/shared/markSweep.hpp
+--- a/src/share/vm/gc_implementation/shared/markSweep.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/gc_implementation/shared/markSweep.hpp Thu Jul 09 23:18:17 2015 -0700
+@@ -117,8 +117,12 @@
friend class AdjustPointerClosure;
friend class KeepAliveClosure;
friend class VM_MarkSweep;
+ friend class GenMarkSweep;
friend void marksweep_init();
-+
+
+public:
+ static GrowableArray<oop>* _rescued_oops;
-
++
//
// Vars
+ //
@@ -208,6 +212,8 @@
template <class T> static inline void mark_and_push(T* p);
static inline void push_objarray(oop obj, size_t index);
@@ -1778,10 +1765,9 @@ index eb8252c..b96a677 100644
static void follow_stack(); // Empty marking stack.
static void preserve_mark(oop p, markOop mark);
-diff --git a/src/share/vm/interpreter/interpreterRuntime.cpp b/src/share/vm/interpreter/interpreterRuntime.cpp
-index 1a1af1b..94972f0 100644
---- a/src/share/vm/interpreter/interpreterRuntime.cpp
-+++ b/src/share/vm/interpreter/interpreterRuntime.cpp
+diff -r 882f6c762ac5 src/share/vm/interpreter/interpreterRuntime.cpp
+--- a/src/share/vm/interpreter/interpreterRuntime.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/interpreter/interpreterRuntime.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -402,7 +402,7 @@
assert(h_exception.not_null(), "NULL exceptions should be handled by athrow");
assert(h_exception->is_oop(), "just checking");
@@ -1885,10 +1871,9 @@ index 1a1af1b..94972f0 100644
// It is very unlikely that method is redefined more than 100 times
// in the middle of resolve. If it is looping here more than 100 times
// means then there could be a bug here.
-diff --git a/src/share/vm/interpreter/interpreterRuntime.hpp b/src/share/vm/interpreter/interpreterRuntime.hpp
-index 6d5f13a..50fa219 100644
---- a/src/share/vm/interpreter/interpreterRuntime.hpp
-+++ b/src/share/vm/interpreter/interpreterRuntime.hpp
+diff -r 882f6c762ac5 src/share/vm/interpreter/interpreterRuntime.hpp
+--- a/src/share/vm/interpreter/interpreterRuntime.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/interpreter/interpreterRuntime.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -141,6 +141,9 @@
static void post_method_entry(JavaThread *thread);
static void post_method_exit (JavaThread *thread);
@@ -1899,10 +1884,9 @@ index 6d5f13a..50fa219 100644
// Native signature handlers
static void prepare_native_call(JavaThread* thread, methodOopDesc* method);
-diff --git a/src/share/vm/interpreter/linkResolver.cpp b/src/share/vm/interpreter/linkResolver.cpp
-index 1676add..ccd5241 100644
---- a/src/share/vm/interpreter/linkResolver.cpp
-+++ b/src/share/vm/interpreter/linkResolver.cpp
+diff -r 882f6c762ac5 src/share/vm/interpreter/linkResolver.cpp
+--- a/src/share/vm/interpreter/linkResolver.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/interpreter/linkResolver.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -153,8 +153,8 @@
// Klass resolution
@@ -2224,10 +2208,9 @@ index 1676add..ccd5241 100644
// check if method exists
if (selected_method.is_null()) {
ResourceMark rm(THREAD);
-diff --git a/src/share/vm/interpreter/linkResolver.hpp b/src/share/vm/interpreter/linkResolver.hpp
-index dfd74f9..cf6e44a 100644
---- a/src/share/vm/interpreter/linkResolver.hpp
-+++ b/src/share/vm/interpreter/linkResolver.hpp
+diff -r 882f6c762ac5 src/share/vm/interpreter/linkResolver.hpp
+--- a/src/share/vm/interpreter/linkResolver.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/interpreter/linkResolver.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -110,7 +110,11 @@
// It does all necessary link-time checks & throws exceptions if necessary.
@@ -2250,10 +2233,9 @@ index dfd74f9..cf6e44a 100644
static void runtime_resolve_interface_method (CallInfo& result, methodHandle resolved_method, KlassHandle resolved_klass, Handle recv, KlassHandle recv_klass, bool check_null_and_abstract, TRAPS);
static void check_field_accessability (KlassHandle ref_klass, KlassHandle resolved_klass, KlassHandle sel_klass, fieldDescriptor& fd, TRAPS);
-diff --git a/src/share/vm/interpreter/templateTable.hpp b/src/share/vm/interpreter/templateTable.hpp
-index 17e9f26..e77500f 100644
---- a/src/share/vm/interpreter/templateTable.hpp
-+++ b/src/share/vm/interpreter/templateTable.hpp
+diff -r 882f6c762ac5 src/share/vm/interpreter/templateTable.hpp
+--- a/src/share/vm/interpreter/templateTable.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/interpreter/templateTable.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -329,8 +329,8 @@
static void shouldnotreachhere();
@@ -2265,10 +2247,9 @@ index 17e9f26..e77500f 100644
static void jvmti_post_fast_field_mod();
// debugging of TemplateGenerator
-diff --git a/src/share/vm/memory/genMarkSweep.cpp b/src/share/vm/memory/genMarkSweep.cpp
-index 76e18d8..6af7c14 100644
---- a/src/share/vm/memory/genMarkSweep.cpp
-+++ b/src/share/vm/memory/genMarkSweep.cpp
+diff -r 882f6c762ac5 src/share/vm/memory/genMarkSweep.cpp
+--- a/src/share/vm/memory/genMarkSweep.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/memory/genMarkSweep.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -421,6 +421,7 @@
// in the same order in phase2, phase3 and phase4. We don't quite do that
// here (perm_gen first rather than last), so we tell the validate code
@@ -2292,10 +2273,9 @@ index 76e18d8..6af7c14 100644
+
pg->post_compact(); // Shared spaces verification.
}
-diff --git a/src/share/vm/memory/permGen.cpp b/src/share/vm/memory/permGen.cpp
-index 350f583..59faad1 100644
---- a/src/share/vm/memory/permGen.cpp
-+++ b/src/share/vm/memory/permGen.cpp
+diff -r 882f6c762ac5 src/share/vm/memory/permGen.cpp
+--- a/src/share/vm/memory/permGen.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/memory/permGen.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -57,7 +57,12 @@
for (;;) {
@@ -2310,10 +2290,9 @@ index 350f583..59faad1 100644
if ((obj = gen->allocate(size, false)) != NULL) {
return obj;
}
-diff --git a/src/share/vm/memory/space.cpp b/src/share/vm/memory/space.cpp
-index f97bc34..9b20d08 100644
---- a/src/share/vm/memory/space.cpp
-+++ b/src/share/vm/memory/space.cpp
+diff -r 882f6c762ac5 src/share/vm/memory/space.cpp
+--- a/src/share/vm/memory/space.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/memory/space.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -378,6 +378,31 @@
_compaction_top = bottom();
}
@@ -2881,10 +2860,9 @@ index f97bc34..9b20d08 100644
}
void Space::print_short() const { print_short_on(tty); }
-diff --git a/src/share/vm/memory/space.hpp b/src/share/vm/memory/space.hpp
-index ef2f2c6..b54d470 100644
---- a/src/share/vm/memory/space.hpp
-+++ b/src/share/vm/memory/space.hpp
+diff -r 882f6c762ac5 src/share/vm/memory/space.hpp
+--- a/src/share/vm/memory/space.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/memory/space.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -445,6 +445,9 @@
// indicates when the next such action should be taken.
virtual void prepare_for_compaction(CompactPoint* cp);
@@ -2906,10 +2884,9 @@ index ef2f2c6..b54d470 100644
// Return a size with adjusments as required of the space.
virtual size_t adjust_object_size_v(size_t size) const { return size; }
-diff --git a/src/share/vm/memory/universe.cpp b/src/share/vm/memory/universe.cpp
-index 4030d9d..da9a186 100644
---- a/src/share/vm/memory/universe.cpp
-+++ b/src/share/vm/memory/universe.cpp
+diff -r 882f6c762ac5 src/share/vm/memory/universe.cpp
+--- a/src/share/vm/memory/universe.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/memory/universe.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -100,6 +100,8 @@
#include "gc_implementation/parallelScavenge/parallelScavengeHeap.hpp"
#endif
@@ -2958,10 +2935,9 @@ index 4030d9d..da9a186 100644
void Universe::oops_do(OopClosure* f, bool do_all) {
f->do_oop((oop*) &_int_mirror);
-diff --git a/src/share/vm/memory/universe.hpp b/src/share/vm/memory/universe.hpp
-index 50fcb62..afb6b33 100644
---- a/src/share/vm/memory/universe.hpp
-+++ b/src/share/vm/memory/universe.hpp
+diff -r 882f6c762ac5 src/share/vm/memory/universe.hpp
+--- a/src/share/vm/memory/universe.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/memory/universe.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -127,6 +127,8 @@
friend class SystemDictionary;
friend class VMStructs;
@@ -3007,10 +2983,9 @@ index 50fcb62..afb6b33 100644
static void verify(VerifyOption option, const char* prefix, bool silent = VerifySilently);
static void verify(const char* prefix, bool silent = VerifySilently) {
verify(VerifyOption_Default, prefix, silent);
-diff --git a/src/share/vm/oops/arrayKlass.cpp b/src/share/vm/oops/arrayKlass.cpp
-index 16142b8..921e546 100644
---- a/src/share/vm/oops/arrayKlass.cpp
-+++ b/src/share/vm/oops/arrayKlass.cpp
+diff -r 882f6c762ac5 src/share/vm/oops/arrayKlass.cpp
+--- a/src/share/vm/oops/arrayKlass.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/oops/arrayKlass.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -136,9 +136,9 @@
bool arrayKlass::compute_is_subtype_of(klassOop k) {
@@ -3024,10 +2999,9 @@ index 16142b8..921e546 100644
}
-diff --git a/src/share/vm/oops/constMethodKlass.cpp b/src/share/vm/oops/constMethodKlass.cpp
-index e74811f..be3fe7d 100644
---- a/src/share/vm/oops/constMethodKlass.cpp
-+++ b/src/share/vm/oops/constMethodKlass.cpp
+diff -r 882f6c762ac5 src/share/vm/oops/constMethodKlass.cpp
+--- a/src/share/vm/oops/constMethodKlass.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/oops/constMethodKlass.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -102,6 +102,7 @@
constMethodOop cm = constMethodOop(obj);
MarkSweep::mark_and_push(cm->adr_constants());
@@ -3069,10 +3043,9 @@ index e74811f..be3fe7d 100644
// Get size before changing pointers.
// Don't call size() or oop_size() since that is a virtual call.
int size = cm->object_size();
-diff --git a/src/share/vm/oops/constMethodOop.hpp b/src/share/vm/oops/constMethodOop.hpp
-index 549192b..5cea5c4 100644
---- a/src/share/vm/oops/constMethodOop.hpp
-+++ b/src/share/vm/oops/constMethodOop.hpp
+diff -r 882f6c762ac5 src/share/vm/oops/constMethodOop.hpp
+--- a/src/share/vm/oops/constMethodOop.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/oops/constMethodOop.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -129,7 +129,7 @@
public:
@@ -3082,21 +3055,20 @@ index 549192b..5cea5c4 100644
private:
//
-@@ -140,6 +140,9 @@
-
+@@ -141,6 +141,9 @@
// Raw stackmap data for the method
typeArrayOop _stackmap_data;
-+
+
+ // (tw) Table mapping code sections for method forward points.
+ typeArrayOop _code_section_table;
-
++
//
// End of the oop block.
-@@ -194,6 +197,28 @@
- oop_store_without_check((oop*)&_stackmap_data, (oop)sd);
+ //
+@@ -195,6 +198,28 @@
}
bool has_stackmap_table() const { return _stackmap_data != NULL; }
-+
+
+ // code section table
+ typeArrayOop code_section_table() const { return _code_section_table; }
+ void set_code_section_table(typeArrayOop e) { oop_store_without_check((oop*) &_code_section_table, (oop) e); }
@@ -3118,9 +3090,10 @@ index 549192b..5cea5c4 100644
+ int code_section_length_at(int index) const {
+ return _code_section_table->short_at(index * ValuesPerCodeSectionEntry + 2);
+ }
-
++
void init_fingerprint() {
const uint64_t initval = CONST64(0x8000000000000000);
+ _fingerprint = initval;
@@ -301,6 +326,7 @@
// Garbage collection support
oop* adr_constants() const { return (oop*)&_constants; }
@@ -3129,10 +3102,9 @@ index 549192b..5cea5c4 100644
bool is_conc_safe() { return _is_conc_safe; }
void set_is_conc_safe(bool v) { _is_conc_safe = v; }
-diff --git a/src/share/vm/oops/cpCacheOop.cpp b/src/share/vm/oops/cpCacheOop.cpp
-index ad62921..f39f202 100644
---- a/src/share/vm/oops/cpCacheOop.cpp
-+++ b/src/share/vm/oops/cpCacheOop.cpp
+diff -r 882f6c762ac5 src/share/vm/oops/cpCacheOop.cpp
+--- a/src/share/vm/oops/cpCacheOop.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/oops/cpCacheOop.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -37,9 +37,15 @@
// Implememtation of ConstantPoolCacheEntry
@@ -3232,7 +3204,7 @@ index ad62921..f39f202 100644
return true;
}
-@@ -548,82 +546,25 @@
+@@ -548,84 +546,27 @@
return false;
}
@@ -3271,8 +3243,8 @@ index ad62921..f39f202 100644
}
return false;
--}
--
+ }
+
-// a constant pool cache entry should never contain old or obsolete methods
-bool ConstantPoolCacheEntry::check_no_old_or_obsolete_entries() {
- if (is_vfinal()) {
@@ -3322,9 +3294,11 @@ index ad62921..f39f202 100644
-
- // the method is in the interesting class so the entry is interesting
- return true;
- }
-
+-}
+-
void ConstantPoolCacheEntry::print(outputStream* st, int index) const {
+ // print separator
+ if (index == 0) st->print_cr(" -------------");
@@ -663,60 +604,18 @@
}
}
@@ -3370,7 +3344,7 @@ index ad62921..f39f202 100644
+ // (tw) TODO: Update only field offsets and modify only constant pool entries that
+ // point to changed fields
+ entry_at(i)->initialize_entry(entry_at(i)->constant_pool_index());
-+ } else if(entry_at(i)->is_method_entry()) {
++ } else if(entry_at(i)->is_method_entry() && !entry_at(i)->is_secondary_entry()) {
+ entry_at(i)->adjust_method_entry(NULL, NULL);
}
}
@@ -3394,10 +3368,9 @@ index ad62921..f39f202 100644
- }
- }
-}
-diff --git a/src/share/vm/oops/cpCacheOop.hpp b/src/share/vm/oops/cpCacheOop.hpp
-index b9107cd..653ab13 100644
---- a/src/share/vm/oops/cpCacheOop.hpp
-+++ b/src/share/vm/oops/cpCacheOop.hpp
+diff -r 882f6c762ac5 src/share/vm/oops/cpCacheOop.hpp
+--- a/src/share/vm/oops/cpCacheOop.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/oops/cpCacheOop.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -136,7 +136,8 @@
void set_bytecode_2(Bytecodes::Code code);
void set_f1(oop f1) {
@@ -3416,15 +3389,15 @@ index b9107cd..653ab13 100644
is_vfinal_shift = 20,
is_volatile_shift = 21,
is_final_shift = 22,
-@@ -206,6 +208,8 @@
- // Initialization
+@@ -207,6 +209,8 @@
void initialize_entry(int original_index); // initialize primary entry
void initialize_secondary_entry(int main_index); // initialize secondary entry
-+
-+ void copy_from(ConstantPoolCacheEntry *other);
++ void copy_from(ConstantPoolCacheEntry *other);
++
void set_field( // sets entry to resolved field state
Bytecodes::Code get_code, // the bytecode used for reading the field
+ Bytecodes::Code put_code, // the bytecode used for writing the field
@@ -368,10 +372,7 @@
// trace_name_printed is set to true if the current call has
// printed the klass name so that other routines in the adjust_*
@@ -3457,10 +3430,9 @@ index b9107cd..653ab13 100644
};
#endif // SHARE_VM_OOPS_CPCACHEOOP_HPP
-diff --git a/src/share/vm/oops/instanceKlass.cpp b/src/share/vm/oops/instanceKlass.cpp
-index a775b02..749458f 100644
---- a/src/share/vm/oops/instanceKlass.cpp
-+++ b/src/share/vm/oops/instanceKlass.cpp
+diff -r 882f6c762ac5 src/share/vm/oops/instanceKlass.cpp
+--- a/src/share/vm/oops/instanceKlass.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/oops/instanceKlass.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -250,12 +250,118 @@
}
@@ -3834,10 +3806,9 @@ index a775b02..749458f 100644
st->cr();
st->print(BULLET"fake entry resolved_constructor: ");
methodOop ctor = java_lang_Class::resolved_constructor(obj);
-diff --git a/src/share/vm/oops/instanceKlass.hpp b/src/share/vm/oops/instanceKlass.hpp
-index bb613b0..71f0083 100644
---- a/src/share/vm/oops/instanceKlass.hpp
-+++ b/src/share/vm/oops/instanceKlass.hpp
+diff -r 882f6c762ac5 src/share/vm/oops/instanceKlass.hpp
+--- a/src/share/vm/oops/instanceKlass.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/oops/instanceKlass.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -102,6 +102,22 @@
virtual void do_field(fieldDescriptor* fd) = 0;
};
@@ -3910,10 +3881,9 @@ index bb613b0..71f0083 100644
void methods_do(void f(methodOop method));
void array_klasses_do(void f(klassOop k));
-diff --git a/src/share/vm/oops/instanceKlassKlass.cpp b/src/share/vm/oops/instanceKlassKlass.cpp
-index 8e7dc12..63d6dc4 100644
---- a/src/share/vm/oops/instanceKlassKlass.cpp
-+++ b/src/share/vm/oops/instanceKlassKlass.cpp
+diff -r 882f6c762ac5 src/share/vm/oops/instanceKlassKlass.cpp
+--- a/src/share/vm/oops/instanceKlassKlass.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/oops/instanceKlassKlass.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -480,6 +480,28 @@
instanceKlass* ik = instanceKlass::cast(klassOop(obj));
klassKlass::oop_print_on(obj, st);
@@ -3952,10 +3922,9 @@ index 8e7dc12..63d6dc4 100644
sib = sib->next_sibling();
}
-diff --git a/src/share/vm/oops/instanceRefKlass.cpp b/src/share/vm/oops/instanceRefKlass.cpp
-index 7db4f03..1171487 100644
---- a/src/share/vm/oops/instanceRefKlass.cpp
-+++ b/src/share/vm/oops/instanceRefKlass.cpp
+diff -r 882f6c762ac5 src/share/vm/oops/instanceRefKlass.cpp
+--- a/src/share/vm/oops/instanceRefKlass.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/oops/instanceRefKlass.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -455,10 +455,13 @@
instanceKlass* ik = instanceKlass::cast(k);
@@ -3974,10 +3943,9 @@ index 7db4f03..1171487 100644
assert(ik->nonstatic_oop_map_count() == 1, "just checking");
OopMapBlock* map = ik->start_of_nonstatic_oop_maps();
-diff --git a/src/share/vm/oops/klass.cpp b/src/share/vm/oops/klass.cpp
-index ff33181..28537e2 100644
---- a/src/share/vm/oops/klass.cpp
-+++ b/src/share/vm/oops/klass.cpp
+diff -r 882f6c762ac5 src/share/vm/oops/klass.cpp
+--- a/src/share/vm/oops/klass.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/oops/klass.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -55,6 +55,26 @@
return false;
}
@@ -4031,10 +3999,9 @@ index ff33181..28537e2 100644
assert(super() == NULL || super() == SystemDictionary::Object_klass(),
"initialize this only once to a non-trivial value");
set_super(k);
-diff --git a/src/share/vm/oops/klass.hpp b/src/share/vm/oops/klass.hpp
-index a449e87..52364ba 100644
---- a/src/share/vm/oops/klass.hpp
-+++ b/src/share/vm/oops/klass.hpp
+diff -r 882f6c762ac5 src/share/vm/oops/klass.hpp
+--- a/src/share/vm/oops/klass.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/oops/klass.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -171,6 +171,7 @@
void* operator new(size_t ignored, KlassHandle& klass, int size, TRAPS);
};
@@ -4094,11 +4061,10 @@ index a449e87..52364ba 100644
// First subclass (NULL if none); _subklass->next_sibling() is next one
klassOop _subklass;
// Sibling link (or NULL); links all subklasses of a klass
-@@ -253,6 +291,19 @@
-
+@@ -254,6 +292,19 @@
jint _modifier_flags; // Processed access flags, for use by Class.getModifiers.
AccessFlags _access_flags; // Access flags. The class/interface distinction is stored here.
-+
+
+ // (tw) Non-oop fields for enhanced class redefinition
+ jint _revision_number; // The revision number for redefined classes
+ jint _redefinition_index; // Index of this class when performing the redefinition
@@ -4111,14 +4077,14 @@ index a449e87..52364ba 100644
+ char _field_redefinition_policy;
+ char _static_field_redefinition_policy;
+ bool _is_redefining;
-
++
#ifndef PRODUCT
int _verify_count; // to avoid redundant verifies
-@@ -301,6 +352,99 @@
-
+ #endif
+@@ -302,6 +353,99 @@
klassOop secondary_super_cache() const { return _secondary_super_cache; }
void set_secondary_super_cache(klassOop k) { oop_store_without_check((oop*) &_secondary_super_cache, (oop) k); }
-+
+
+ // BEGIN class redefinition utilities
+
+ // double links between new and old version of a class
@@ -4211,9 +4177,10 @@ index a449e87..52364ba 100644
+ }
+
+ // END class redefinition utilities
-
++
objArrayOop secondary_supers() const { return _secondary_supers; }
void set_secondary_supers(objArrayOop k) { oop_store_without_check((oop*) &_secondary_supers, (oop) k); }
+
@@ -362,6 +506,8 @@
void set_next_sibling(klassOop s);
@@ -4240,10 +4207,9 @@ index a449e87..52364ba 100644
#endif // SHARE_VM_OOPS_KLASS_HPP
-diff --git a/src/share/vm/oops/klassKlass.cpp b/src/share/vm/oops/klassKlass.cpp
-index 06809d5..9c08f32 100644
---- a/src/share/vm/oops/klassKlass.cpp
-+++ b/src/share/vm/oops/klassKlass.cpp
+diff -r 882f6c762ac5 src/share/vm/oops/klassKlass.cpp
+--- a/src/share/vm/oops/klassKlass.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/oops/klassKlass.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -68,6 +68,8 @@
Klass* k = Klass::cast(klassOop(obj));
// If we are alive it is valid to keep our superclass and subtype caches alive
@@ -4300,10 +4266,9 @@ index 06809d5..9c08f32 100644
for (juint i = 0; i < Klass::primary_super_limit(); i++)
MarkSweep::adjust_pointer(k->adr_primary_supers()+i);
MarkSweep::adjust_pointer(k->adr_secondary_super_cache());
-diff --git a/src/share/vm/oops/klassOop.hpp b/src/share/vm/oops/klassOop.hpp
-index f212fc5..9731a9c 100644
---- a/src/share/vm/oops/klassOop.hpp
-+++ b/src/share/vm/oops/klassOop.hpp
+diff -r 882f6c762ac5 src/share/vm/oops/klassOop.hpp
+--- a/src/share/vm/oops/klassOop.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/oops/klassOop.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -41,8 +41,10 @@
// returns the Klass part containing dispatching behavior
Klass* klass_part() const { return (Klass*)((address)this + sizeof(klassOopDesc)); }
@@ -4316,10 +4281,9 @@ index f212fc5..9731a9c 100644
private:
// These have no implementation since klassOop should never be accessed in this fashion
-diff --git a/src/share/vm/oops/klassVtable.cpp b/src/share/vm/oops/klassVtable.cpp
-index ff22444..8d39611 100644
---- a/src/share/vm/oops/klassVtable.cpp
-+++ b/src/share/vm/oops/klassVtable.cpp
+diff -r 882f6c762ac5 src/share/vm/oops/klassVtable.cpp
+--- a/src/share/vm/oops/klassVtable.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/oops/klassVtable.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -97,7 +97,8 @@
vtable_length = Universe::base_vtable_size();
}
@@ -4342,12 +4306,10 @@ index ff22444..8d39611 100644
}
int klassVtable::index_of(methodOop m, int len) const {
-@@ -676,20 +677,6 @@
- }
- }
+@@ -678,20 +679,6 @@
return true;
--}
--
+ }
+
-void klassVtable::dump_vtable() {
- tty->print_cr("vtable dump --");
- for (int i = 0; i < length(); i++) {
@@ -4360,9 +4322,11 @@ index ff22444..8d39611 100644
- tty->cr();
- }
- }
- }
-
+-}
+-
// CDS/RedefineClasses support - clear vtables so they can be reinitialized
+ void klassVtable::clear_vtable() {
+ for (int i = 0; i < _length; i++) table()[i].clear();
@@ -1262,6 +1249,7 @@
void klassVtable::verify_against(outputStream* st, klassVtable* vt, int index) {
@@ -4438,10 +4402,9 @@ index ff22444..8d39611 100644
int klassItable::_total_classes; // Total no. of classes with itables
long klassItable::_total_size; // Total no. of bytes used for itables
-diff --git a/src/share/vm/oops/klassVtable.hpp b/src/share/vm/oops/klassVtable.hpp
-index 405b0c7..0c8d2f7 100644
---- a/src/share/vm/oops/klassVtable.hpp
-+++ b/src/share/vm/oops/klassVtable.hpp
+diff -r 882f6c762ac5 src/share/vm/oops/klassVtable.hpp
+--- a/src/share/vm/oops/klassVtable.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/oops/klassVtable.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -100,6 +100,7 @@
int methods_length, bool * trace_name_printed);
bool check_no_old_or_obsolete_entries();
@@ -4450,10 +4413,9 @@ index 405b0c7..0c8d2f7 100644
// Garbage collection
void oop_follow_contents();
-diff --git a/src/share/vm/oops/methodKlass.cpp b/src/share/vm/oops/methodKlass.cpp
-index 75d0b09..f1b7d2f 100644
---- a/src/share/vm/oops/methodKlass.cpp
-+++ b/src/share/vm/oops/methodKlass.cpp
+diff -r 882f6c762ac5 src/share/vm/oops/methodKlass.cpp
+--- a/src/share/vm/oops/methodKlass.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/oops/methodKlass.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -93,6 +93,10 @@
m->set_adapter_entry(NULL);
m->clear_code(); // from_c/from_i get set to c2i/i2i
@@ -4548,10 +4510,9 @@ index 75d0b09..f1b7d2f 100644
st->print (" - constants: "INTPTR_FORMAT" ", (address)m->constants());
m->constants()->print_value_on(st); st->cr();
st->print (" - access: 0x%x ", m->access_flags().as_int()); m->access_flags().print_on(st); st->cr();
-diff --git a/src/share/vm/oops/methodOop.cpp b/src/share/vm/oops/methodOop.cpp
-index 4f59d3a..5cdf147 100644
---- a/src/share/vm/oops/methodOop.cpp
-+++ b/src/share/vm/oops/methodOop.cpp
+diff -r 882f6c762ac5 src/share/vm/oops/methodOop.cpp
+--- a/src/share/vm/oops/methodOop.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/oops/methodOop.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -328,6 +328,70 @@
}
@@ -4633,10 +4594,9 @@ index 4f59d3a..5cdf147 100644
newm->constMethod()->set_code_size(new_code_length);
newm->constMethod()->set_constMethod_size(new_const_method_size);
newm->set_method_size(new_method_size);
-diff --git a/src/share/vm/oops/methodOop.hpp b/src/share/vm/oops/methodOop.hpp
-index 486e106..11e52bb 100644
---- a/src/share/vm/oops/methodOop.hpp
-+++ b/src/share/vm/oops/methodOop.hpp
+diff -r 882f6c762ac5 src/share/vm/oops/methodOop.hpp
+--- a/src/share/vm/oops/methodOop.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/oops/methodOop.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -114,6 +114,11 @@
AccessFlags _access_flags; // Access flags
int _vtable_index; // vtable index of this method (see VtableIndexFlag)
@@ -4649,11 +4609,10 @@ index 486e106..11e52bb 100644
#ifdef CC_INTERP
int _result_index; // C++ interpreter needs for converting results to/from stack
#endif
-@@ -174,6 +179,32 @@
- Symbol* name() const { return constants()->symbol_at(name_index()); }
+@@ -175,6 +180,32 @@
int name_index() const { return constMethod()->name_index(); }
void set_name_index(int index) { constMethod()->set_name_index(index); }
-+
+
+ methodOop forward_method() const {return _forward_method; }
+ void set_forward_method(methodOop m) { _forward_method = m; }
+ bool has_forward_method() const { return forward_method() != NULL; }
@@ -4679,9 +4638,10 @@ index 486e106..11e52bb 100644
+ return old_version()->oldest_version();
+ }
+ }
-
++
// signature
Symbol* signature() const { return constants()->symbol_at(signature_index()); }
+ int signature_index() const { return constMethod()->signature_index(); }
@@ -670,6 +701,10 @@
// Inline cache support
void cleanup_inline_caches();
@@ -4703,10 +4663,9 @@ index 486e106..11e52bb 100644
oop* adr_method_data() const { return (oop*)&_method_data; }
};
-diff --git a/src/share/vm/oops/oop.hpp b/src/share/vm/oops/oop.hpp
-index 5982c88..4873fca 100644
---- a/src/share/vm/oops/oop.hpp
-+++ b/src/share/vm/oops/oop.hpp
+diff -r 882f6c762ac5 src/share/vm/oops/oop.hpp
+--- a/src/share/vm/oops/oop.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/oops/oop.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -95,6 +95,7 @@
narrowOop* compressed_klass_addr();
@@ -4723,10 +4682,9 @@ index 5982c88..4873fca 100644
bool is_thread() const;
bool is_method() const;
bool is_constMethod() const;
-diff --git a/src/share/vm/oops/oop.inline.hpp b/src/share/vm/oops/oop.inline.hpp
-index f4eb2f7..0acb346 100644
---- a/src/share/vm/oops/oop.inline.hpp
-+++ b/src/share/vm/oops/oop.inline.hpp
+diff -r 882f6c762ac5 src/share/vm/oops/oop.inline.hpp
+--- a/src/share/vm/oops/oop.inline.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/oops/oop.inline.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -123,6 +123,14 @@
}
}
@@ -4750,10 +4708,9 @@ index f4eb2f7..0acb346 100644
inline bool oopDesc::is_thread() const { return blueprint()->oop_is_thread(); }
inline bool oopDesc::is_method() const { return blueprint()->oop_is_method(); }
inline bool oopDesc::is_constMethod() const { return blueprint()->oop_is_constMethod(); }
-diff --git a/src/share/vm/prims/jni.cpp b/src/share/vm/prims/jni.cpp
-index 50e1263..701ed74 100644
---- a/src/share/vm/prims/jni.cpp
-+++ b/src/share/vm/prims/jni.cpp
+diff -r 882f6c762ac5 src/share/vm/prims/jni.cpp
+--- a/src/share/vm/prims/jni.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/prims/jni.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -406,7 +406,7 @@
}
}
@@ -4763,10 +4720,9 @@ index 50e1263..701ed74 100644
CHECK_NULL);
if (TraceClassResolution && k != NULL) {
-diff --git a/src/share/vm/prims/jvm.cpp b/src/share/vm/prims/jvm.cpp
-index ccd09f6..ef3f024 100644
---- a/src/share/vm/prims/jvm.cpp
-+++ b/src/share/vm/prims/jvm.cpp
+diff -r 882f6c762ac5 src/share/vm/prims/jvm.cpp
+--- a/src/share/vm/prims/jvm.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/prims/jvm.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -932,7 +932,7 @@
Handle protection_domain (THREAD, JNIHandles::resolve(pd));
klassOop k = SystemDictionary::resolve_from_stream(class_name, class_loader,
@@ -4776,10 +4732,9 @@ index ccd09f6..ef3f024 100644
CHECK_NULL);
if (TraceClassResolution && k != NULL) {
-diff --git a/src/share/vm/prims/jvmtiEnv.cpp b/src/share/vm/prims/jvmtiEnv.cpp
-index 4ac6b82..30b8e84 100644
---- a/src/share/vm/prims/jvmtiEnv.cpp
-+++ b/src/share/vm/prims/jvmtiEnv.cpp
+diff -r 882f6c762ac5 src/share/vm/prims/jvmtiEnv.cpp
+--- a/src/share/vm/prims/jvmtiEnv.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/prims/jvmtiEnv.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -290,7 +290,10 @@
class_definitions[index].klass = jcls;
}
@@ -4807,10 +4762,9 @@ index 4ac6b82..30b8e84 100644
return (op.check_error());
} /* end RedefineClasses */
-diff --git a/src/share/vm/prims/jvmtiExport.cpp b/src/share/vm/prims/jvmtiExport.cpp
-index ec8ede3..2bd5983 100644
---- a/src/share/vm/prims/jvmtiExport.cpp
-+++ b/src/share/vm/prims/jvmtiExport.cpp
+diff -r 882f6c762ac5 src/share/vm/prims/jvmtiExport.cpp
+--- a/src/share/vm/prims/jvmtiExport.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/prims/jvmtiExport.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -2296,7 +2296,7 @@
// iterate over any code blob descriptors collected and post a
// DYNAMIC_CODE_GENERATED event to the profiler.
@@ -4820,10 +4774,9 @@ index ec8ede3..2bd5983 100644
// iterate over any code blob descriptors that we collected
if (_code_blobs != NULL) {
for (int i=0; i<_code_blobs->length(); i++) {
-diff --git a/src/share/vm/prims/jvmtiImpl.cpp b/src/share/vm/prims/jvmtiImpl.cpp
-index d3fa140..f4f8b57 100644
---- a/src/share/vm/prims/jvmtiImpl.cpp
-+++ b/src/share/vm/prims/jvmtiImpl.cpp
+diff -r 882f6c762ac5 src/share/vm/prims/jvmtiImpl.cpp
+--- a/src/share/vm/prims/jvmtiImpl.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/prims/jvmtiImpl.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -286,6 +286,8 @@
void JvmtiBreakpoint::each_method_version_do(method_action meth_act) {
((methodOopDesc*)_method->*meth_act)(_bci);
@@ -4833,10 +4786,9 @@ index d3fa140..f4f8b57 100644
// add/remove breakpoint to/from versions of the method that
// are EMCP. Directly or transitively obsolete methods are
// not saved in the PreviousVersionInfo.
-diff --git a/src/share/vm/prims/jvmtiRedefineClasses.cpp b/src/share/vm/prims/jvmtiRedefineClasses.cpp
-index 606be1c..ef4f380 100644
---- a/src/share/vm/prims/jvmtiRedefineClasses.cpp
-+++ b/src/share/vm/prims/jvmtiRedefineClasses.cpp
+diff -r 882f6c762ac5 src/share/vm/prims/jvmtiRedefineClasses.cpp
+--- a/src/share/vm/prims/jvmtiRedefineClasses.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/prims/jvmtiRedefineClasses.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2003, 2014, Oracle and/or its affiliates. All rights reserved.
@@ -4848,10 +4800,10 @@ index 606be1c..ef4f380 100644
#include "interpreter/rewriter.hpp"
#include "memory/gcLocker.hpp"
#include "memory/universe.inline.hpp"
--#include "oops/fieldStreams.hpp"
+#include "memory/cardTableRS.hpp"
- #include "oops/klassVtable.hpp"
-+#include "oops/fieldStreams.hpp"
++#include "oops/klassVtable.hpp"
+ #include "oops/fieldStreams.hpp"
+-#include "oops/klassVtable.hpp"
#include "prims/jvmtiImpl.hpp"
#include "prims/jvmtiRedefineClasses.hpp"
+#include "prims/jvmtiClassFileReconstituter.hpp"
@@ -4878,12 +4830,13 @@ index 606be1c..ef4f380 100644
int VM_RedefineClasses::_added_methods_length = 0;
klassOop VM_RedefineClasses::_the_class_oop = NULL;
-+// Holds the revision number of the current class redefinition
-+int VM_RedefineClasses::_revision_number = -1;
-
+-
-VM_RedefineClasses::VM_RedefineClasses(jint class_count,
- const jvmtiClassDefinition *class_defs,
- JvmtiClassLoadKind class_load_kind) {
++// Holds the revision number of the current class redefinition
++int VM_RedefineClasses::_revision_number = -1;
++
+VM_RedefineClasses::VM_RedefineClasses(jint class_count, const jvmtiClassDefinition *class_defs, JvmtiClassLoadKind class_load_kind)
+ : VM_GC_Operation(Universe::heap()->total_full_collections(), GCCause::_jvmti_force_gc) {
+ RC_TIMER_START(_timer_total);
@@ -4895,19 +4848,12 @@ index 606be1c..ef4f380 100644
+ _result = JVMTI_ERROR_NONE;
}
--bool VM_RedefineClasses::doit_prologue() {
-- if (_class_count == 0) {
-- _res = JVMTI_ERROR_NONE;
-- return false;
+VM_RedefineClasses::~VM_RedefineClasses() {
+ {
+ MonitorLockerEx ml(RedefinitionSync_lock);
+ Threads::set_wait_at_instrumentation_entry(false);
+ ml.notify_all();
- }
-- if (_class_defs == NULL) {
-- _res = JVMTI_ERROR_NULL_POINTER;
-- return false;
++ }
+
+ unlock_threads();
+ RC_TIMER_STOP(_timer_total);
@@ -4923,19 +4869,7 @@ index 606be1c..ef4f380 100644
+ tty->print_cr("Timing Epilogue: %d", _timer_vm_op_epilogue.milliseconds());
+ tty->print_cr("------------------------------------------------------------------");
+ tty->print_cr("Total Time: %d", _timer_total.milliseconds());
- }
-- for (int i = 0; i < _class_count; i++) {
-- if (_class_defs[i].klass == NULL) {
-- _res = JVMTI_ERROR_INVALID_CLASS;
-- return false;
-- }
-- if (_class_defs[i].class_byte_count == 0) {
-- _res = JVMTI_ERROR_INVALID_CLASS_FORMAT;
-- return false;
-- }
-- if (_class_defs[i].class_bytes == NULL) {
-- _res = JVMTI_ERROR_NULL_POINTER;
-- return false;
++ }
+}
+
+// Searches for all affected classes and performs a sorting such that a supertype is always before a subtype.
@@ -4961,12 +4895,9 @@ index 606be1c..ef4f380 100644
+ for (int i=0; i<affected_classes.length(); i++) {
+ RC_TRACE(0x00000001, ("%s",
+ affected_classes.at(i)->name()->as_C_string()));
- }
- }
-
-- // Start timer after all the sanity checks; not quite accurate, but
-- // better than adding a bunch of stop() calls.
-- RC_TIMER_START(_timer_vm_op_prologue);
++ }
++ }
++
+ // Add the array of affected classes and the array of redefined classes to get a list of all classes that need a redefinition
+ all_affected_klasses->appendAll(&klasses_to_redefine);
+ all_affected_klasses->appendAll(&affected_classes);
@@ -5037,7 +4968,9 @@ index 606be1c..ef4f380 100644
+}
+
+// Prologue of the VM operation, called on the Java thread in parallel to normal program execution
-+bool VM_RedefineClasses::doit_prologue() {
+ bool VM_RedefineClasses::doit_prologue() {
+- if (_class_count == 0) {
+- _res = JVMTI_ERROR_NONE;
+
+ _revision_number++;
+ RC_TRACE(0x00000001, ("Redefinition with revision number %d started!", _revision_number));
@@ -5047,16 +4980,13 @@ index 606be1c..ef4f380 100644
+
+ if (!check_arguments()) {
+ RC_TIMER_STOP(_timer_prologue);
-+ return false;
-+ }
-
- // We first load new class versions in the prologue, because somewhere down the
- // call chain it is required that the current thread is a Java thread.
-- _res = load_new_class_versions(Thread::current());
-- if (_res != JVMTI_ERROR_NONE) {
-- // Free os::malloc allocated memory in load_new_class_version.
-- os::free(_scratch_classes);
-- RC_TIMER_STOP(_timer_vm_op_prologue);
+ return false;
+ }
+- if (_class_defs == NULL) {
+- _res = JVMTI_ERROR_NULL_POINTER;
++
++ // We first load new class versions in the prologue, because somewhere down the
++ // call chain it is required that the current thread is a Java thread.
+ _new_classes = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<instanceKlassHandle>(5, true);
+ _result = load_new_class_versions(Thread::current());
+
@@ -5068,20 +4998,108 @@ index 606be1c..ef4f380 100644
+ RC_TIMER_STOP(_timer_prologue);
return false;
}
-
-- RC_TIMER_STOP(_timer_vm_op_prologue);
++
+ RC_TRACE(0x00000001, ("nearly finished"));
+ VM_GC_Operation::doit_prologue();
+ RC_TIMER_STOP(_timer_prologue);
+ RC_TRACE(0x00000001, ("doit_prologue finished!"));
- return true;
- }
-
--void VM_RedefineClasses::doit() {
-- Thread *thread = Thread::current();
++ return true;
++}
++
+// Checks basic properties of the arguments of the redefinition command.
+bool VM_RedefineClasses::check_arguments() {
++
++ if (_class_count == 0) RC_ABORT(JVMTI_ERROR_NONE);
++ if (_class_defs == NULL) RC_ABORT(JVMTI_ERROR_NULL_POINTER);
+ for (int i = 0; i < _class_count; i++) {
+- if (_class_defs[i].klass == NULL) {
+- _res = JVMTI_ERROR_INVALID_CLASS;
+- return false;
++ if (_class_defs[i].klass == NULL) RC_ABORT(JVMTI_ERROR_INVALID_CLASS);
++ if (_class_defs[i].class_byte_count == 0) RC_ABORT(JVMTI_ERROR_INVALID_CLASS_FORMAT);
++ if (_class_defs[i].class_bytes == NULL) RC_ABORT(JVMTI_ERROR_NULL_POINTER);
++ }
++
++ return true;
++}
++
++jvmtiError VM_RedefineClasses::check_exception() const {
++ Thread* THREAD = Thread::current();
++ if (HAS_PENDING_EXCEPTION) {
++
++ Symbol* ex_name = PENDING_EXCEPTION->klass()->klass_part()->name();
++ RC_TRACE(0x00000001, ("parse_stream exception: '%s'",
++ ex_name->as_C_string()));
++ if (TraceRedefineClasses >= 1) {
++ java_lang_Throwable::print(PENDING_EXCEPTION, tty);
++ tty->print_cr("");
+ }
+- if (_class_defs[i].class_byte_count == 0) {
+- _res = JVMTI_ERROR_INVALID_CLASS_FORMAT;
+- return false;
+- }
+- if (_class_defs[i].class_bytes == NULL) {
+- _res = JVMTI_ERROR_NULL_POINTER;
+- return false;
++ CLEAR_PENDING_EXCEPTION;
++
++ if (ex_name == vmSymbols::java_lang_UnsupportedClassVersionError()) {
++ return JVMTI_ERROR_UNSUPPORTED_VERSION;
++ } else if (ex_name == vmSymbols::java_lang_ClassFormatError()) {
++ return JVMTI_ERROR_INVALID_CLASS_FORMAT;
++ } else if (ex_name == vmSymbols::java_lang_ClassCircularityError()) {
++ return JVMTI_ERROR_CIRCULAR_CLASS_DEFINITION;
++ } else if (ex_name == vmSymbols::java_lang_NoClassDefFoundError()) {
++ // The message will be "XXX (wrong name: YYY)"
++ return JVMTI_ERROR_NAMES_DONT_MATCH;
++ } else if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
++ return JVMTI_ERROR_OUT_OF_MEMORY;
++ } else {
++ // Just in case more exceptions can be thrown..
++ return JVMTI_ERROR_FAILS_VERIFICATION;
+ }
+ }
+
+- // Start timer after all the sanity checks; not quite accurate, but
+- // better than adding a bunch of stop() calls.
+- RC_TIMER_START(_timer_vm_op_prologue);
+-
+- // We first load new class versions in the prologue, because somewhere down the
+- // call chain it is required that the current thread is a Java thread.
+- _res = load_new_class_versions(Thread::current());
+- if (_res != JVMTI_ERROR_NONE) {
+- // Free os::malloc allocated memory in load_new_class_version.
+- os::free(_scratch_classes);
+- RC_TIMER_STOP(_timer_vm_op_prologue);
+- return false;
++ return JVMTI_ERROR_NONE;
++}
++
++// Loads all new class versions and stores the instanceKlass handles in an array.
++jvmtiError VM_RedefineClasses::load_new_class_versions(TRAPS) {
++
++ ResourceMark rm(THREAD);
++
++ RC_TRACE(0x00000001, ("==================================================================="));
++ RC_TRACE(0x00000001, ("load new class versions (%d)",
++ _class_count));
++
++ // Retrieve an array of all classes that need to be redefined
++ GrowableArray<instanceKlassHandle> all_affected_klasses;
++ jvmtiError err = find_sorted_affected_classes(&all_affected_klasses);
++ if (err != JVMTI_ERROR_NONE) {
++ RC_TRACE(0x00000001, ("Error finding sorted affected classes: %d",
++ (int)err));
++ return err;
+ }
+- RC_TIMER_STOP(_timer_vm_op_prologue);
+- return true;
+-}
+-
+-void VM_RedefineClasses::doit() {
+- Thread *thread = Thread::current();
+-
- if (UseSharedSpaces) {
- // Sharing is enabled so we remap the shared readonly space to
- // shared readwrite, private just in case we need to redefine
@@ -5092,20 +5110,43 @@ index 606be1c..ef4f380 100644
- ("failed to remap shared readonly space to readwrite, private"));
- _res = JVMTI_ERROR_INTERNAL;
- return;
-- }
++
++ JvmtiThreadState *state = JvmtiThreadState::state_for(JavaThread::current());
++
++ _max_redefinition_flags = Klass::NoRedefinition;
++ jvmtiError result = JVMTI_ERROR_NONE;
++
++ for (int i=0; i<all_affected_klasses.length(); i++) {
++ RC_TRACE(0x00000002, ("Processing affected class %d of %d",
++ i+1, all_affected_klasses.length()));
++
++ instanceKlassHandle the_class = all_affected_klasses.at(i);
++ RC_TRACE(0x00000002, ("name=%s",
++ the_class->name()->as_C_string()));
++
++ the_class->link_class(THREAD);
++ result = check_exception();
++ if (result != JVMTI_ERROR_NONE) break;
++
++ // Find new class bytes
++ const unsigned char* class_bytes;
++ jint class_byte_count;
++ jvmtiError error;
++ jboolean not_changed;
++ if ((error = find_class_bytes(the_class, &class_bytes, &class_byte_count, &not_changed)) != JVMTI_ERROR_NONE) {
++ RC_TRACE(0x00000001, ("Error finding class bytes: %d",
++ (int)error));
++ result = error;
++ break;
+ }
- }
-
-+ if (_class_count == 0) RC_ABORT(JVMTI_ERROR_NONE);
-+ if (_class_defs == NULL) RC_ABORT(JVMTI_ERROR_NULL_POINTER);
- for (int i = 0; i < _class_count; i++) {
+- for (int i = 0; i < _class_count; i++) {
- redefine_single_class(_class_defs[i].klass, _scratch_classes[i], thread);
-+ if (_class_defs[i].klass == NULL) RC_ABORT(JVMTI_ERROR_INVALID_CLASS);
-+ if (_class_defs[i].class_byte_count == 0) RC_ABORT(JVMTI_ERROR_INVALID_CLASS_FORMAT);
-+ if (_class_defs[i].class_bytes == NULL) RC_ABORT(JVMTI_ERROR_NULL_POINTER);
- }
+- }
- // Disable any dependent concurrent compilations
- SystemDictionary::notice_modification();
-
+-
- // Set flag indicating that some invariants are no longer true.
- // See jvmtiExport.hpp for detailed explanation.
- JvmtiExport::set_has_redefined_a_class();
@@ -5114,7 +5155,82 @@ index 606be1c..ef4f380 100644
-// always called for non-product bits.
-#ifdef PRODUCT
- if (RC_TRACE_ENABLED(0x00004000)) {
--#endif
++ assert(class_bytes != NULL && class_byte_count != 0, "Class bytes defined at this point!");
++
++
++ // Set redefined class handle in JvmtiThreadState class.
++ // This redefined class is sent to agent event handler for class file
++ // load hook event.
++ state->set_class_being_redefined(&the_class, _class_load_kind);
++
++ RC_TRACE(0x00000002, ("Before resolving from stream"));
++
++ RC_TIMER_STOP(_timer_prologue);
++ RC_TIMER_START(_timer_class_loading);
++
++
++ // Parse the stream.
++ Handle the_class_loader(THREAD, the_class->class_loader());
++ Handle protection_domain(THREAD, the_class->protection_domain());
++ Symbol* the_class_sym = the_class->name();
++ ClassFileStream st((u1*) class_bytes, class_byte_count, (char *)"__VM_RedefineClasses__");
++ instanceKlassHandle new_class(THREAD, SystemDictionary::resolve_from_stream(the_class_sym,
++ the_class_loader,
++ protection_domain,
++ &st,
++ true,
++ the_class,
++ THREAD));
++
++ not_changed = false;
++
++ RC_TIMER_STOP(_timer_class_loading);
++ RC_TIMER_START(_timer_prologue);
++
++ RC_TRACE(0x00000002, ("After resolving class from stream!"));
++ // Clear class_being_redefined just to be sure.
++ state->clear_class_being_redefined();
++
++ result = check_exception();
++ if (result != JVMTI_ERROR_NONE) break;
++
++#ifdef ASSERT
++
++ assert(new_class() != NULL, "Class could not be loaded!");
++ assert(new_class() != the_class(), "must be different");
++ assert(new_class->new_version() == NULL && new_class->old_version() != NULL, "");
++
++
++ objArrayOop k_interfaces = new_class->local_interfaces();
++ for (int j=0; j<k_interfaces->length(); j++) {
++ assert(((klassOop)k_interfaces->obj_at(j))->klass_part()->is_newest_version(), "just checking");
++ }
++
++ if (!THREAD->is_Compiler_thread()) {
++
++ RC_TRACE(0x00000002, ("name=%s loader="INTPTR_FORMAT" protection_domain="INTPTR_FORMAT" ",
++ the_class->name()->as_C_string(),
++ (address)(the_class->class_loader()),
++ (address)(the_class->protection_domain())));
++ // If we are on the compiler thread, we must not try to resolve a class.
++ klassOop systemLookup = SystemDictionary::resolve_or_null(the_class->name(), the_class->class_loader(), the_class->protection_domain(), THREAD);
++
++ if (systemLookup != NULL) {
++ assert(systemLookup == new_class->old_version(), "Old class must be in system dictionary!");
++
++
++ Klass *subklass = new_class()->klass_part()->subklass();
++ while (subklass != NULL) {
++ assert(subklass->new_version() == NULL, "Most recent version of class!");
++ subklass = subklass->next_sibling();
++ }
++ } else {
++ // This can happen for reflection generated classes.. ?
++ CLEAR_PENDING_EXCEPTION;
++ }
++ }
++
+ #endif
- RC_TRACE_WITH_THREAD(0x00004000, thread, ("calling check_class"));
- SystemDictionary::classes_do(check_class, thread);
-#ifdef PRODUCT
@@ -5153,9 +5269,9 @@ index 606be1c..ef4f380 100644
- if (the_class_oop == NULL || !Klass::cast(the_class_oop)->oop_is_instance()) {
- return false;
- }
- return true;
- }
-
+- return true;
+-}
+-
-// Append the current entry at scratch_i in scratch_cp to *merge_cp_p
-// where the end of *merge_cp_p is specified by *merge_cp_length_p. For
-// direct CP entries, there is just the current entry to append. For
@@ -5170,21 +5286,10 @@ index 606be1c..ef4f380 100644
-void VM_RedefineClasses::append_entry(constantPoolHandle scratch_cp,
- int scratch_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p,
- TRAPS) {
-+jvmtiError VM_RedefineClasses::check_exception() const {
-+ Thread* THREAD = Thread::current();
-+ if (HAS_PENDING_EXCEPTION) {
-
+-
- // append is different depending on entry tag type
- switch (scratch_cp->tag_at(scratch_i).value()) {
-+ Symbol* ex_name = PENDING_EXCEPTION->klass()->klass_part()->name();
-+ RC_TRACE(0x00000001, ("parse_stream exception: '%s'",
-+ ex_name->as_C_string()));
-+ if (TraceRedefineClasses >= 1) {
-+ java_lang_Throwable::print(PENDING_EXCEPTION, tty);
-+ tty->print_cr("");
-+ }
-+ CLEAR_PENDING_EXCEPTION;
-
+-
- // The old verifier is implemented outside the VM. It loads classes,
- // but does not resolve constant pool entries directly so we never
- // see Class entries here with the old verifier. Similarly the old
@@ -5205,7 +5310,14 @@ index 606be1c..ef4f380 100644
- // The new entry in *merge_cp_p is at a different index than
- // the new entry in scratch_cp so we need to map the index values.
- map_index(scratch_cp, scratch_i, *merge_cp_length_p);
-- }
++
++ if (RC_TRACE_ENABLED(0x00000001)) {
++ if (new_class->layout_helper() != the_class->layout_helper()) {
++ RC_TRACE(0x00000001, ("Instance size change for class %s: new=%d old=%d",
++ new_class->name()->as_C_string(),
++ new_class->layout_helper(),
++ the_class->layout_helper()));
+ }
- (*merge_cp_length_p)++;
- } break;
-
@@ -5317,25 +5429,124 @@ index 606be1c..ef4f380 100644
- break;
- default:
- guarantee(false, "bad switch");
-- break;
-- }
++ }
++
++ // Set the new version of the class
++ new_class->set_revision_number(_revision_number);
++ new_class->set_redefinition_index(i);
++ the_class->set_new_version(new_class());
++ _new_classes->append(new_class);
++
++ assert(new_class->new_version() == NULL, "");
++
++ int redefinition_flags = Klass::NoRedefinition;
++
++ if (not_changed) {
++ redefinition_flags = Klass::NoRedefinition;
++ } else if (AllowAdvancedClassRedefinition) {
++ redefinition_flags = calculate_redefinition_flags(new_class);
++ } else {
++ jvmtiError allowed = check_redefinition_allowed(new_class);
++ if (allowed != JVMTI_ERROR_NONE) {
++ RC_TRACE(0x00000001, ("Error redefinition not allowed!"));
++ result = allowed;
+ break;
+ }
-
- if (klass_ref_i != new_klass_ref_i) {
- RC_TRACE(0x00080000, ("%s entry@%d class_index changed: %d to %d",
- entry_name, *merge_cp_length_p, klass_ref_i, new_klass_ref_i));
-- }
++ redefinition_flags = Klass::ModifyClass;
++ }
++
++ if (new_class->super() != NULL) {
++ redefinition_flags = redefinition_flags | new_class->super()->klass_part()->redefinition_flags();
++ }
++
++ for (int j=0; j<new_class->local_interfaces()->length(); j++) {
++ redefinition_flags = redefinition_flags | ((klassOop)new_class->local_interfaces()->obj_at(j))->klass_part()->redefinition_flags();
++ }
++
++ new_class->set_redefinition_flags(redefinition_flags);
++
++ _max_redefinition_flags = _max_redefinition_flags | redefinition_flags;
++
++ if ((redefinition_flags & Klass::ModifyInstances) != 0) {
++ // TODO: Check if watch access flags of static fields are updated correctly.
++ calculate_instance_update_information(_new_classes->at(i)());
++ } else {
++ assert(new_class->layout_helper() >> 1 == new_class->old_version()->klass_part()->layout_helper() >> 1, "must be equal");
++ assert(new_class->fields()->length() == ((instanceKlass*)new_class->old_version()->klass_part())->fields()->length(), "must be equal");
++
++ fieldDescriptor fd_new;
++ fieldDescriptor fd_old;
++ for (JavaFieldStream fs(new_class); !fs.done(); fs.next()) {
++ fd_new.initialize(new_class(), fs.index());
++ fd_old.initialize(new_class->old_version(), fs.index());
++ transfer_special_access_flags(&fd_old, &fd_new);
+ }
- if (name_and_type_ref_i != new_name_and_type_ref_i) {
- RC_TRACE(0x00080000,
- ("%s entry@%d name_and_type_index changed: %d to %d",
- entry_name, *merge_cp_length_p, name_and_type_ref_i,
- new_name_and_type_ref_i));
-- }
++ }
++
++ if (RC_TRACE_ENABLED(0x00000008)) {
++ if (new_class->super() != NULL) {
++ RC_TRACE(0x00000008, ("Super class is %s",
++ new_class->super()->klass_part()->name()->as_C_string()));
+ }
-
- if (scratch_i != *merge_cp_length_p) {
- // The new entry in *merge_cp_p is at a different index than
- // the new entry in scratch_cp so we need to map the index values.
- map_index(scratch_cp, scratch_i, *merge_cp_length_p);
-- }
++ }
++
++#ifdef ASSERT
++ assert(new_class->super() == NULL || new_class->super()->klass_part()->new_version() == NULL, "Super klass must be newest version!");
++
++ the_class->vtable()->verify(tty);
++ new_class->vtable()->verify(tty);
++#endif
++
++ RC_TRACE(0x00000002, ("Verification done!"));
++
++ if (i == all_affected_klasses.length() - 1) {
++
++ // This was the last class processed => check if additional classes have been loaded in the meantime
++
++ RC_TIMER_STOP(_timer_prologue);
++ lock_threads();
++ RC_TIMER_START(_timer_prologue);
++
++ for (int j=0; j<all_affected_klasses.length(); j++) {
++
++ klassOop initial_klass = all_affected_klasses.at(j)();
++ Klass *initial_subklass = initial_klass->klass_part()->subklass();
++ Klass *cur_klass = initial_subklass;
++ while(cur_klass != NULL) {
++
++ if(cur_klass->oop_is_instance() && cur_klass->is_newest_version()) {
++ instanceKlassHandle handle(THREAD, cur_klass->as_klassOop());
++ if (!all_affected_klasses.contains(handle)) {
++
++ int k = i + 1;
++ for (; k<all_affected_klasses.length(); k++) {
++ if (all_affected_klasses.at(k)->is_subtype_of(cur_klass->as_klassOop())) {
++ break;
++ }
++ }
++ all_affected_klasses.insert_before(k, handle);
++ RC_TRACE(0x00000002, ("Adding newly loaded class to affected classes: %s",
++ cur_klass->name()->as_C_string()));
++ }
++ }
++
++ cur_klass = cur_klass->next_sibling();
++ }
+ }
- (*merge_cp_length_p)++;
- } break;
-
@@ -5453,29 +5664,16 @@ index 606be1c..ef4f380 100644
- // Found a matching entry somewhere else in *merge_cp_p so just need a mapping entry.
- new_ref_i = found_i;
- map_index(scratch_cp, ref_i, found_i);
-+ if (ex_name == vmSymbols::java_lang_UnsupportedClassVersionError()) {
-+ return JVMTI_ERROR_UNSUPPORTED_VERSION;
-+ } else if (ex_name == vmSymbols::java_lang_ClassFormatError()) {
-+ return JVMTI_ERROR_INVALID_CLASS_FORMAT;
-+ } else if (ex_name == vmSymbols::java_lang_ClassCircularityError()) {
-+ return JVMTI_ERROR_CIRCULAR_CLASS_DEFINITION;
-+ } else if (ex_name == vmSymbols::java_lang_NoClassDefFoundError()) {
-+ // The message will be "XXX (wrong name: YYY)"
-+ return JVMTI_ERROR_NAMES_DONT_MATCH;
-+ } else if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
-+ return JVMTI_ERROR_OUT_OF_MEMORY;
- } else {
+- } else {
- // no match found so we have to append this entry to *merge_cp_p
- append_entry(scratch_cp, ref_i, merge_cp_p, merge_cp_length_p, THREAD);
- // The above call to append_entry() can only append one entry
- // so the post call query of *merge_cp_length_p is only for
- // the sake of consistency.
- new_ref_i = *merge_cp_length_p - 1;
-+ // Just in case more exceptions can be thrown..
-+ return JVMTI_ERROR_FAILS_VERIFICATION;
- }
- }
-
+- }
+- }
+-
- return new_ref_i;
-} // end find_or_append_indirect_entry()
-
@@ -5569,9 +5767,16 @@ index 606be1c..ef4f380 100644
- RC_TRACE_WITH_THREAD(0x00040000, THREAD,
- ("operands_index_map[%d]: old=%d new=%d", count, i, value));
- count++;
-- }
-- }
-- }
++
++ int new_count = all_affected_klasses.length() - 1 - i;
++ if (new_count != 0) {
++
++ unlock_threads();
++ RC_TRACE(0x00000001, ("Found new number of affected classes: %d",
++ new_count));
+ }
+ }
+ }
- // Clean-up
- _operands_index_map_p = NULL;
- _operands_cur_length = 0;
@@ -5593,257 +5798,6 @@ index 606be1c..ef4f380 100644
- save = scratch_class->get_method_default_annotations_of(i);
- scratch_class->set_method_default_annotations_of(i, scratch_class->get_method_default_annotations_of(j));
- scratch_class->set_method_default_annotations_of(j, save);
-+ return JVMTI_ERROR_NONE;
- }
-
-+// Loads all new class versions and stores the instanceKlass handles in an array.
-+jvmtiError VM_RedefineClasses::load_new_class_versions(TRAPS) {
-
--jvmtiError VM_RedefineClasses::compare_and_normalize_class_versions(
-- instanceKlassHandle the_class,
-- instanceKlassHandle scratch_class) {
-+ ResourceMark rm(THREAD);
-+
-+ RC_TRACE(0x00000001, ("==================================================================="));
-+ RC_TRACE(0x00000001, ("load new class versions (%d)",
-+ _class_count));
-+
-+ // Retrieve an array of all classes that need to be redefined
-+ GrowableArray<instanceKlassHandle> all_affected_klasses;
-+ jvmtiError err = find_sorted_affected_classes(&all_affected_klasses);
-+ if (err != JVMTI_ERROR_NONE) {
-+ RC_TRACE(0x00000001, ("Error finding sorted affected classes: %d",
-+ (int)err));
-+ return err;
-+ }
-+
-+
-+ JvmtiThreadState *state = JvmtiThreadState::state_for(JavaThread::current());
-+
-+ _max_redefinition_flags = Klass::NoRedefinition;
-+ jvmtiError result = JVMTI_ERROR_NONE;
-+
-+ for (int i=0; i<all_affected_klasses.length(); i++) {
-+ RC_TRACE(0x00000002, ("Processing affected class %d of %d",
-+ i+1, all_affected_klasses.length()));
-+
-+ instanceKlassHandle the_class = all_affected_klasses.at(i);
-+ RC_TRACE(0x00000002, ("name=%s",
-+ the_class->name()->as_C_string()));
-+
-+ the_class->link_class(THREAD);
-+ result = check_exception();
-+ if (result != JVMTI_ERROR_NONE) break;
-+
-+ // Find new class bytes
-+ const unsigned char* class_bytes;
-+ jint class_byte_count;
-+ jvmtiError error;
-+ jboolean not_changed;
-+ if ((error = find_class_bytes(the_class, &class_bytes, &class_byte_count, &not_changed)) != JVMTI_ERROR_NONE) {
-+ RC_TRACE(0x00000001, ("Error finding class bytes: %d",
-+ (int)error));
-+ result = error;
-+ break;
-+ }
-+ assert(class_bytes != NULL && class_byte_count != 0, "Class bytes defined at this point!");
-+
-+
-+ // Set redefined class handle in JvmtiThreadState class.
-+ // This redefined class is sent to agent event handler for class file
-+ // load hook event.
-+ state->set_class_being_redefined(&the_class, _class_load_kind);
-+
-+ RC_TRACE(0x00000002, ("Before resolving from stream"));
-+
-+ RC_TIMER_STOP(_timer_prologue);
-+ RC_TIMER_START(_timer_class_loading);
-+
-+
-+ // Parse the stream.
-+ Handle the_class_loader(THREAD, the_class->class_loader());
-+ Handle protection_domain(THREAD, the_class->protection_domain());
-+ Symbol* the_class_sym = the_class->name();
-+ ClassFileStream st((u1*) class_bytes, class_byte_count, (char *)"__VM_RedefineClasses__");
-+ instanceKlassHandle new_class(THREAD, SystemDictionary::resolve_from_stream(the_class_sym,
-+ the_class_loader,
-+ protection_domain,
-+ &st,
-+ true,
-+ the_class,
-+ THREAD));
-+
-+ not_changed = false;
-+
-+ RC_TIMER_STOP(_timer_class_loading);
-+ RC_TIMER_START(_timer_prologue);
-+
-+ RC_TRACE(0x00000002, ("After resolving class from stream!"));
-+ // Clear class_being_redefined just to be sure.
-+ state->clear_class_being_redefined();
-+
-+ result = check_exception();
-+ if (result != JVMTI_ERROR_NONE) break;
-+
-+#ifdef ASSERT
-+
-+ assert(new_class() != NULL, "Class could not be loaded!");
-+ assert(new_class() != the_class(), "must be different");
-+ assert(new_class->new_version() == NULL && new_class->old_version() != NULL, "");
-+
-+
-+ objArrayOop k_interfaces = new_class->local_interfaces();
-+ for (int j=0; j<k_interfaces->length(); j++) {
-+ assert(((klassOop)k_interfaces->obj_at(j))->klass_part()->is_newest_version(), "just checking");
-+ }
-+
-+ if (!THREAD->is_Compiler_thread()) {
-+
-+ RC_TRACE(0x00000002, ("name=%s loader="INTPTR_FORMAT" protection_domain="INTPTR_FORMAT" ",
-+ the_class->name()->as_C_string(),
-+ (address)(the_class->class_loader()),
-+ (address)(the_class->protection_domain())));
-+ // If we are on the compiler thread, we must not try to resolve a class.
-+ klassOop systemLookup = SystemDictionary::resolve_or_null(the_class->name(), the_class->class_loader(), the_class->protection_domain(), THREAD);
-+
-+ if (systemLookup != NULL) {
-+ assert(systemLookup == new_class->old_version(), "Old class must be in system dictionary!");
-+
-+
-+ Klass *subklass = new_class()->klass_part()->subklass();
-+ while (subklass != NULL) {
-+ assert(subklass->new_version() == NULL, "Most recent version of class!");
-+ subklass = subklass->next_sibling();
-+ }
-+ } else {
-+ // This can happen for reflection generated classes.. ?
-+ CLEAR_PENDING_EXCEPTION;
-+ }
-+ }
-+
-+#endif
-+
-+ if (RC_TRACE_ENABLED(0x00000001)) {
-+ if (new_class->layout_helper() != the_class->layout_helper()) {
-+ RC_TRACE(0x00000001, ("Instance size change for class %s: new=%d old=%d",
-+ new_class->name()->as_C_string(),
-+ new_class->layout_helper(),
-+ the_class->layout_helper()));
-+ }
-+ }
-+
-+ // Set the new version of the class
-+ new_class->set_revision_number(_revision_number);
-+ new_class->set_redefinition_index(i);
-+ the_class->set_new_version(new_class());
-+ _new_classes->append(new_class);
-+
-+ assert(new_class->new_version() == NULL, "");
-+
-+ int redefinition_flags = Klass::NoRedefinition;
-+
-+ if (not_changed) {
-+ redefinition_flags = Klass::NoRedefinition;
-+ } else if (AllowAdvancedClassRedefinition) {
-+ redefinition_flags = calculate_redefinition_flags(new_class);
-+ } else {
-+ jvmtiError allowed = check_redefinition_allowed(new_class);
-+ if (allowed != JVMTI_ERROR_NONE) {
-+ RC_TRACE(0x00000001, ("Error redefinition not allowed!"));
-+ result = allowed;
-+ break;
-+ }
-+ redefinition_flags = Klass::ModifyClass;
-+ }
-+
-+ if (new_class->super() != NULL) {
-+ redefinition_flags = redefinition_flags | new_class->super()->klass_part()->redefinition_flags();
-+ }
-+
-+ for (int j=0; j<new_class->local_interfaces()->length(); j++) {
-+ redefinition_flags = redefinition_flags | ((klassOop)new_class->local_interfaces()->obj_at(j))->klass_part()->redefinition_flags();
-+ }
-+
-+ new_class->set_redefinition_flags(redefinition_flags);
-+
-+ _max_redefinition_flags = _max_redefinition_flags | redefinition_flags;
-+
-+ if ((redefinition_flags & Klass::ModifyInstances) != 0) {
-+ // TODO: Check if watch access flags of static fields are updated correctly.
-+ calculate_instance_update_information(_new_classes->at(i)());
-+ } else {
-+ assert(new_class->layout_helper() >> 1 == new_class->old_version()->klass_part()->layout_helper() >> 1, "must be equal");
-+ assert(new_class->fields()->length() == ((instanceKlass*)new_class->old_version()->klass_part())->fields()->length(), "must be equal");
-+
-+ fieldDescriptor fd_new;
-+ fieldDescriptor fd_old;
-+ for (JavaFieldStream fs(new_class); !fs.done(); fs.next()) {
-+ fd_new.initialize(new_class(), fs.index());
-+ fd_old.initialize(new_class->old_version(), fs.index());
-+ transfer_special_access_flags(&fd_old, &fd_new);
-+ }
-+ }
-+
-+ if (RC_TRACE_ENABLED(0x00000008)) {
-+ if (new_class->super() != NULL) {
-+ RC_TRACE(0x00000008, ("Super class is %s",
-+ new_class->super()->klass_part()->name()->as_C_string()));
-+ }
-+ }
-+
-+#ifdef ASSERT
-+ assert(new_class->super() == NULL || new_class->super()->klass_part()->new_version() == NULL, "Super klass must be newest version!");
-+
-+ the_class->vtable()->verify(tty);
-+ new_class->vtable()->verify(tty);
-+#endif
-+
-+ RC_TRACE(0x00000002, ("Verification done!"));
-+
-+ if (i == all_affected_klasses.length() - 1) {
-+
-+ // This was the last class processed => check if additional classes have been loaded in the meantime
-+
-+ RC_TIMER_STOP(_timer_prologue);
-+ lock_threads();
-+ RC_TIMER_START(_timer_prologue);
-+
-+ for (int j=0; j<all_affected_klasses.length(); j++) {
-+
-+ klassOop initial_klass = all_affected_klasses.at(j)();
-+ Klass *initial_subklass = initial_klass->klass_part()->subklass();
-+ Klass *cur_klass = initial_subklass;
-+ while(cur_klass != NULL) {
-+
-+ if(cur_klass->oop_is_instance() && cur_klass->is_newest_version()) {
-+ instanceKlassHandle handle(THREAD, cur_klass->as_klassOop());
-+ if (!all_affected_klasses.contains(handle)) {
-+
-+ int k = i + 1;
-+ for (; k<all_affected_klasses.length(); k++) {
-+ if (all_affected_klasses.at(k)->is_subtype_of(cur_klass->as_klassOop())) {
-+ break;
-+ }
-+ }
-+ all_affected_klasses.insert_before(k, handle);
-+ RC_TRACE(0x00000002, ("Adding newly loaded class to affected classes: %s",
-+ cur_klass->name()->as_C_string()));
-+ }
-+ }
-+
-+ cur_klass = cur_klass->next_sibling();
-+ }
-+ }
-+
-+ int new_count = all_affected_klasses.length() - 1 - i;
-+ if (new_count != 0) {
-+
-+ unlock_threads();
-+ RC_TRACE(0x00000001, ("Found new number of affected classes: %d",
-+ new_count));
-+ }
-+ }
-+ }
+
+ if (result != JVMTI_ERROR_NONE) {
+ rollback();
@@ -5896,8 +5850,12 @@ index 606be1c..ef4f380 100644
+
+ RC_TRACE(0x00000001, ("Finished verification!"));
+ return JVMTI_ERROR_NONE;
-+}
-+
+ }
+
+-
+-jvmtiError VM_RedefineClasses::compare_and_normalize_class_versions(
+- instanceKlassHandle the_class,
+- instanceKlassHandle scratch_class) {
+void VM_RedefineClasses::lock_threads() {
+
+ RC_TIMER_START(_timer_wait_for_locks);
@@ -6070,12 +6028,11 @@ index 606be1c..ef4f380 100644
++oi; // advance to next old method
break;
default:
-@@ -833,2120 +880,2265 @@
+@@ -833,2121 +880,2266 @@
return JVMTI_ERROR_NONE;
}
-+int VM_RedefineClasses::calculate_redefinition_flags(instanceKlassHandle new_class) {
-
+-
-// Find new constant pool index value for old constant pool index value
-// by seaching the index map. Returns zero (0) if there is no mapped
-// value for the old constant pool index.
@@ -6083,6 +6040,8 @@ index 606be1c..ef4f380 100644
- if (_index_map_count == 0) {
- // map is empty so nothing can be found
- return 0;
++int VM_RedefineClasses::calculate_redefinition_flags(instanceKlassHandle new_class) {
++
+ int result = Klass::NoRedefinition;
+
+
@@ -6104,29 +6063,51 @@ index 606be1c..ef4f380 100644
- // not happen in regular constant pool merging use, but it can
- // happen if a corrupt annotation is processed.
- return 0;
-- }
+ int i;
-
-- int value = _index_map_p->at(old_index);
-- if (value == -1) {
-- // the old_index is not mapped
-- return 0;
-- }
++
+ //////////////////////////////////////////////////////////////////////////////////////////////////////////
+ // Check superclasses
+ assert(new_class->super() == NULL || new_class->super()->klass_part()->is_newest_version(), "");
+ if (the_class->super() != new_class->super()) {
+ // Super class changed
-
-- return value;
--} // end find_new_index()
++
+ klassOop cur_klass = the_class->super();
+ while (cur_klass != NULL) {
+ if (!new_class->is_subclass_of(cur_klass->klass_part()->newest_version())) {
+ RC_TRACE(0x00000002, ("Removed super class %s",
+ cur_klass->klass_part()->name()->as_C_string()));
+ result = result | Klass::RemoveSuperType | Klass::ModifyInstances | Klass::ModifyClass;
++
++ if (!cur_klass->klass_part()->has_subtype_changed()) {
++ RC_TRACE(0x00000002, ("Subtype changed of class %s",
++ cur_klass->klass_part()->name()->as_C_string()));
++ cur_klass->klass_part()->set_subtype_changed(true);
++ }
++ }
++
++ cur_klass = cur_klass->klass_part()->super();
++ }
++
++ cur_klass = new_class->super();
++ while (cur_klass != NULL) {
++ if (!the_class->is_subclass_of(cur_klass->klass_part()->old_version())) {
++ RC_TRACE(0x00000002, ("Added super class %s",
++ cur_klass->klass_part()->name()->as_C_string()));
++ result = result | Klass::ModifyClass | Klass::ModifyInstances;
++ }
++ cur_klass = cur_klass->klass_part()->super();
++ }
+ }
+- int value = _index_map_p->at(old_index);
+- if (value == -1) {
+- // the old_index is not mapped
+- return 0;
+- }
+-
+- return value;
+-} // end find_new_index()
+-
-
-// Find new bootstrap specifier index value for old bootstrap specifier index
-// value by seaching the index map. Returns zero (-1) if there is no mapped
@@ -6280,349 +6261,9 @@ index 606be1c..ef4f380 100644
- return JVMTI_ERROR_OUT_OF_MEMORY;
- } else { // Just in case more exceptions can be thrown..
- return JVMTI_ERROR_FAILS_VERIFICATION;
-- }
-- }
--
-- // Ensure class is linked before redefine
-- if (!the_class->is_linked()) {
-- the_class->link_class(THREAD);
-- if (HAS_PENDING_EXCEPTION) {
-- Symbol* ex_name = PENDING_EXCEPTION->klass()->klass_part()->name();
-- // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
-- RC_TRACE_WITH_THREAD(0x00000002, THREAD, ("link_class exception: '%s'",
-- ex_name->as_C_string()));
-- CLEAR_PENDING_EXCEPTION;
-- if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
-- return JVMTI_ERROR_OUT_OF_MEMORY;
-- } else {
-- return JVMTI_ERROR_INTERNAL;
-+ if (!cur_klass->klass_part()->has_subtype_changed()) {
-+ RC_TRACE(0x00000002, ("Subtype changed of class %s",
-+ cur_klass->klass_part()->name()->as_C_string()));
-+ cur_klass->klass_part()->set_subtype_changed(true);
- }
- }
-+
-+ cur_klass = cur_klass->klass_part()->super();
- }
-
-- // Do the validity checks in compare_and_normalize_class_versions()
-- // before verifying the byte codes. By doing these checks first, we
-- // limit the number of functions that require redirection from
-- // the_class to scratch_class. In particular, we don't have to
-- // modify JNI GetSuperclass() and thus won't change its performance.
-- jvmtiError res = compare_and_normalize_class_versions(the_class,
-- scratch_class);
-- if (res != JVMTI_ERROR_NONE) {
-- return res;
-- }
--
-- // verify what the caller passed us
-- {
-- // The bug 6214132 caused the verification to fail.
-- // Information about the_class and scratch_class is temporarily
-- // recorded into jvmtiThreadState. This data is used to redirect
-- // the_class to scratch_class in the JVM_* functions called by the
-- // verifier. Please, refer to jvmtiThreadState.hpp for the detailed
-- // description.
-- RedefineVerifyMark rvm(&the_class, &scratch_class, state);
-- Verifier::verify(
-- scratch_class, Verifier::ThrowException, true, THREAD);
-- }
--
-- if (HAS_PENDING_EXCEPTION) {
-- Symbol* ex_name = PENDING_EXCEPTION->klass()->klass_part()->name();
-- // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
-- RC_TRACE_WITH_THREAD(0x00000002, THREAD,
-- ("verify_byte_codes exception: '%s'", ex_name->as_C_string()));
-- CLEAR_PENDING_EXCEPTION;
-- if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
-- return JVMTI_ERROR_OUT_OF_MEMORY;
-- } else {
-- // tell the caller the bytecodes are bad
-- return JVMTI_ERROR_FAILS_VERIFICATION;
-+ cur_klass = new_class->super();
-+ while (cur_klass != NULL) {
-+ if (!the_class->is_subclass_of(cur_klass->klass_part()->old_version())) {
-+ RC_TRACE(0x00000002, ("Added super class %s",
-+ cur_klass->klass_part()->name()->as_C_string()));
-+ result = result | Klass::ModifyClass | Klass::ModifyInstances;
- }
-- }
--
-- res = merge_cp_and_rewrite(the_class, scratch_class, THREAD);
-- if (res != JVMTI_ERROR_NONE) {
-- return res;
-- }
--
-- if (VerifyMergedCPBytecodes) {
-- // verify what we have done during constant pool merging
-- {
-- RedefineVerifyMark rvm(&the_class, &scratch_class, state);
-- Verifier::verify(scratch_class, Verifier::ThrowException, true, THREAD);
-- }
--
-- if (HAS_PENDING_EXCEPTION) {
-- Symbol* ex_name = PENDING_EXCEPTION->klass()->klass_part()->name();
-- // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
-- RC_TRACE_WITH_THREAD(0x00000002, THREAD,
-- ("verify_byte_codes post merge-CP exception: '%s'",
-- ex_name->as_C_string()));
-- CLEAR_PENDING_EXCEPTION;
-- if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
-- return JVMTI_ERROR_OUT_OF_MEMORY;
-- } else {
-- // tell the caller that constant pool merging screwed up
-- return JVMTI_ERROR_INTERNAL;
-- }
-- }
-- }
--
-- Rewriter::rewrite(scratch_class, THREAD);
-- if (!HAS_PENDING_EXCEPTION) {
-- Rewriter::relocate_and_link(scratch_class, THREAD);
-- }
-- if (HAS_PENDING_EXCEPTION) {
-- Symbol* ex_name = PENDING_EXCEPTION->klass()->klass_part()->name();
-- CLEAR_PENDING_EXCEPTION;
-- if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
-- return JVMTI_ERROR_OUT_OF_MEMORY;
-- } else {
-- return JVMTI_ERROR_INTERNAL;
-- }
-- }
--
-- _scratch_classes[i] = scratch_class;
--
-- // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
-- RC_TRACE_WITH_THREAD(0x00000001, THREAD,
-- ("loaded name=%s (avail_mem=" UINT64_FORMAT "K)",
-- the_class->external_name(), os::available_memory() >> 10));
-- }
--
-- return JVMTI_ERROR_NONE;
--}
--
--
--// Map old_index to new_index as needed. scratch_cp is only needed
--// for RC_TRACE() calls.
--void VM_RedefineClasses::map_index(constantPoolHandle scratch_cp,
-- int old_index, int new_index) {
-- if (find_new_index(old_index) != 0) {
-- // old_index is already mapped
-- return;
-- }
--
-- if (old_index == new_index) {
-- // no mapping is needed
-- return;
-- }
--
-- _index_map_p->at_put(old_index, new_index);
-- _index_map_count++;
--
-- RC_TRACE(0x00040000, ("mapped tag %d at index %d to %d",
-- scratch_cp->tag_at(old_index).value(), old_index, new_index));
--} // end map_index()
--
--
--// Map old_index to new_index as needed.
--void VM_RedefineClasses::map_operand_index(int old_index, int new_index) {
-- if (find_new_operand_index(old_index) != -1) {
-- // old_index is already mapped
-- return;
-- }
--
-- if (old_index == new_index) {
-- // no mapping is needed
-- return;
-- }
--
-- _operands_index_map_p->at_put(old_index, new_index);
-- _operands_index_map_count++;
--
-- RC_TRACE(0x00040000, ("mapped bootstrap specifier at index %d to %d", old_index, new_index));
--} // end map_index()
--
--
--// Merge old_cp and scratch_cp and return the results of the merge via
--// merge_cp_p. The number of entries in *merge_cp_p is returned via
--// merge_cp_length_p. The entries in old_cp occupy the same locations
--// in *merge_cp_p. Also creates a map of indices from entries in
--// scratch_cp to the corresponding entry in *merge_cp_p. Index map
--// entries are only created for entries in scratch_cp that occupy a
--// different location in *merged_cp_p.
--bool VM_RedefineClasses::merge_constant_pools(constantPoolHandle old_cp,
-- constantPoolHandle scratch_cp, constantPoolHandle *merge_cp_p,
-- int *merge_cp_length_p, TRAPS) {
--
-- if (merge_cp_p == NULL) {
-- assert(false, "caller must provide scatch constantPool");
-- return false; // robustness
-- }
-- if (merge_cp_length_p == NULL) {
-- assert(false, "caller must provide scatch CP length");
-- return false; // robustness
-- }
-- // Worst case we need old_cp->length() + scratch_cp()->length(),
-- // but the caller might be smart so make sure we have at least
-- // the minimum.
-- if ((*merge_cp_p)->length() < old_cp->length()) {
-- assert(false, "merge area too small");
-- return false; // robustness
-- }
--
-- RC_TRACE_WITH_THREAD(0x00010000, THREAD,
-- ("old_cp_len=%d, scratch_cp_len=%d", old_cp->length(),
-- scratch_cp->length()));
--
-- {
-- // Pass 0:
-- // The old_cp is copied to *merge_cp_p; this means that any code
-- // using old_cp does not have to change. This work looks like a
-- // perfect fit for constantPoolOop::copy_cp_to(), but we need to
-- // handle one special case:
-- // - revert JVM_CONSTANT_Class to JVM_CONSTANT_UnresolvedClass
-- // This will make verification happy.
--
-- int old_i; // index into old_cp
--
-- // index zero (0) is not used in constantPools
-- for (old_i = 1; old_i < old_cp->length(); old_i++) {
-- // leave debugging crumb
-- jbyte old_tag = old_cp->tag_at(old_i).value();
-- switch (old_tag) {
-- case JVM_CONSTANT_Class:
-- case JVM_CONSTANT_UnresolvedClass:
-- // revert the copy to JVM_CONSTANT_UnresolvedClass
-- // May be resolving while calling this so do the same for
-- // JVM_CONSTANT_UnresolvedClass (klass_name_at() deals with transition)
-- (*merge_cp_p)->unresolved_klass_at_put(old_i,
-- old_cp->klass_name_at(old_i));
-- break;
--
-- case JVM_CONSTANT_Double:
-- case JVM_CONSTANT_Long:
-- // just copy the entry to *merge_cp_p, but double and long take
-- // two constant pool entries
-- constantPoolOopDesc::copy_entry_to(old_cp, old_i, *merge_cp_p, old_i, CHECK_0);
-- old_i++;
-- break;
--
-- default:
-- // just copy the entry to *merge_cp_p
-- constantPoolOopDesc::copy_entry_to(old_cp, old_i, *merge_cp_p, old_i, CHECK_0);
-- break;
-- }
-- } // end for each old_cp entry
--
-- constantPoolOopDesc::copy_operands(old_cp, *merge_cp_p, CHECK_0);
-- (*merge_cp_p)->extend_operands(scratch_cp, CHECK_0);
--
-- // We don't need to sanity check that *merge_cp_length_p is within
-- // *merge_cp_p bounds since we have the minimum on-entry check above.
-- (*merge_cp_length_p) = old_i;
-- }
--
-- // merge_cp_len should be the same as old_cp->length() at this point
-- // so this trace message is really a "warm-and-breathing" message.
-- RC_TRACE_WITH_THREAD(0x00020000, THREAD,
-- ("after pass 0: merge_cp_len=%d", *merge_cp_length_p));
--
-- int scratch_i; // index into scratch_cp
-- {
-- // Pass 1a:
-- // Compare scratch_cp entries to the old_cp entries that we have
-- // already copied to *merge_cp_p. In this pass, we are eliminating
-- // exact duplicates (matching entry at same index) so we only
-- // compare entries in the common indice range.
-- int increment = 1;
-- int pass1a_length = MIN2(old_cp->length(), scratch_cp->length());
-- for (scratch_i = 1; scratch_i < pass1a_length; scratch_i += increment) {
-- switch (scratch_cp->tag_at(scratch_i).value()) {
-- case JVM_CONSTANT_Double:
-- case JVM_CONSTANT_Long:
-- // double and long take two constant pool entries
-- increment = 2;
-- break;
--
-- default:
-- increment = 1;
-- break;
-- }
--
-- bool match = scratch_cp->compare_entry_to(scratch_i, *merge_cp_p,
-- scratch_i, CHECK_0);
-- if (match) {
-- // found a match at the same index so nothing more to do
-- continue;
-- } else if (is_unresolved_class_mismatch(scratch_cp, scratch_i,
-- *merge_cp_p, scratch_i)) {
-- // The mismatch in compare_entry_to() above is because of a
-- // resolved versus unresolved class entry at the same index
-- // with the same string value. Since Pass 0 reverted any
-- // class entries to unresolved class entries in *merge_cp_p,
-- // we go with the unresolved class entry.
-- continue;
-- } else if (is_unresolved_string_mismatch(scratch_cp, scratch_i,
-- *merge_cp_p, scratch_i)) {
-- // The mismatch in compare_entry_to() above is because of a
-- // resolved versus unresolved string entry at the same index
-- // with the same string value. We can live with whichever
-- // happens to be at scratch_i in *merge_cp_p.
-- continue;
-- }
--
-- int found_i = scratch_cp->find_matching_entry(scratch_i, *merge_cp_p,
-- CHECK_0);
-- if (found_i != 0) {
-- guarantee(found_i != scratch_i,
-- "compare_entry_to() and find_matching_entry() do not agree");
--
-- // Found a matching entry somewhere else in *merge_cp_p so
-- // just need a mapping entry.
-- map_index(scratch_cp, scratch_i, found_i);
-- continue;
-- }
--
-- // The find_matching_entry() call above could fail to find a match
-- // due to a resolved versus unresolved class or string entry situation
-- // like we solved above with the is_unresolved_*_mismatch() calls.
-- // However, we would have to call is_unresolved_*_mismatch() over
-- // all of *merge_cp_p (potentially) and that doesn't seem to be
-- // worth the time.
--
-- // No match found so we have to append this entry and any unique
-- // referenced entries to *merge_cp_p.
-- append_entry(scratch_cp, scratch_i, merge_cp_p, merge_cp_length_p,
-- CHECK_0);
-+ cur_klass = cur_klass->klass_part()->super();
- }
- }
-
-- RC_TRACE_WITH_THREAD(0x00020000, THREAD,
-- ("after pass 1a: merge_cp_len=%d, scratch_i=%d, index_map_len=%d",
-- *merge_cp_length_p, scratch_i, _index_map_count));
+ //////////////////////////////////////////////////////////////////////////////////////////////////////////
+ // Check interfaces
-
-- if (scratch_i < scratch_cp->length()) {
-- // Pass 1b:
-- // old_cp is smaller than scratch_cp so there are entries in
-- // scratch_cp that we have not yet processed. We take care of
-- // those now.
-- int increment = 1;
-- for (; scratch_i < scratch_cp->length(); scratch_i += increment) {
-- switch (scratch_cp->tag_at(scratch_i).value()) {
-- case JVM_CONSTANT_Double:
-- case JVM_CONSTANT_Long:
-- // double and long take two constant pool entries
-- increment = 2;
-- break;
--
-- default:
-- increment = 1;
-- break;
++
+ // Interfaces removed?
+ objArrayOop old_interfaces = the_class->transitive_interfaces();
+ for (i = 0; i<old_interfaces->length(); i++) {
@@ -6637,89 +6278,21 @@ index 606be1c..ef4f380 100644
+ old_interface->name()->as_C_string()));
+ old_interface->set_subtype_changed(true);
}
--
-- int found_i =
-- scratch_cp->find_matching_entry(scratch_i, *merge_cp_p, CHECK_0);
-- if (found_i != 0) {
-- // Found a matching entry somewhere else in *merge_cp_p so
-- // just need a mapping entry.
-- map_index(scratch_cp, scratch_i, found_i);
-- continue;
-- }
--
-- // No match found so we have to append this entry and any unique
-- // referenced entries to *merge_cp_p.
-- append_entry(scratch_cp, scratch_i, merge_cp_p, merge_cp_length_p,
-- CHECK_0);
}
-
-- RC_TRACE_WITH_THREAD(0x00020000, THREAD,
-- ("after pass 1b: merge_cp_len=%d, scratch_i=%d, index_map_len=%d",
-- *merge_cp_length_p, scratch_i, _index_map_count));
-- }
-- finalize_operands_merge(*merge_cp_p, THREAD);
--
-- return true;
--} // end merge_constant_pools()
--
--
--// Merge constant pools between the_class and scratch_class and
--// potentially rewrite bytecodes in scratch_class to use the merged
--// constant pool.
--jvmtiError VM_RedefineClasses::merge_cp_and_rewrite(
-- instanceKlassHandle the_class, instanceKlassHandle scratch_class,
-- TRAPS) {
-- // worst case merged constant pool length is old and new combined
-- int merge_cp_length = the_class->constants()->length()
-- + scratch_class->constants()->length();
--
-- constantPoolHandle old_cp(THREAD, the_class->constants());
-- constantPoolHandle scratch_cp(THREAD, scratch_class->constants());
--
-- // Constant pools are not easily reused so we allocate a new one
-- // each time.
-- // merge_cp is created unsafe for concurrent GC processing. It
-- // should be marked safe before discarding it. Even though
-- // garbage, if it crosses a card boundary, it may be scanned
-- // in order to find the start of the first complete object on the card.
-- constantPoolHandle merge_cp(THREAD,
-- oopFactory::new_constantPool(merge_cp_length,
-- oopDesc::IsUnsafeConc,
-- THREAD));
-- int orig_length = old_cp->orig_length();
-- if (orig_length == 0) {
-- // This old_cp is an actual original constant pool. We save
-- // the original length in the merged constant pool so that
-- // merge_constant_pools() can be more efficient. If a constant
-- // pool has a non-zero orig_length() value, then that constant
-- // pool was created by a merge operation in RedefineClasses.
-- merge_cp->set_orig_length(old_cp->length());
-- } else {
-- // This old_cp is a merged constant pool from a previous
-- // RedefineClasses() calls so just copy the orig_length()
-- // value.
-- merge_cp->set_orig_length(old_cp->orig_length());
- }
-
-- ResourceMark rm(THREAD);
-- _index_map_count = 0;
-- _index_map_p = new intArray(scratch_cp->length(), -1);
--
-- _operands_cur_length = constantPoolOopDesc::operand_array_length(old_cp->operands());
-- _operands_index_map_count = 0;
-- _operands_index_map_p = new intArray(
-- constantPoolOopDesc::operand_array_length(scratch_cp->operands()), -1);
--
-- // reference to the cp holder is needed for copy_operands()
-- merge_cp->set_pool_holder(scratch_class());
-- bool result = merge_constant_pools(old_cp, scratch_cp, &merge_cp,
-- &merge_cp_length, THREAD);
-- merge_cp->set_pool_holder(NULL);
--
-- if (!result) {
-- // The merge can fail due to memory allocation failure or due
-- // to robustness checks.
-- return JVMTI_ERROR_INTERNAL;
+- // Ensure class is linked before redefine
+- if (!the_class->is_linked()) {
+- the_class->link_class(THREAD);
+- if (HAS_PENDING_EXCEPTION) {
+- Symbol* ex_name = PENDING_EXCEPTION->klass()->klass_part()->name();
+- // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
+- RC_TRACE_WITH_THREAD(0x00000002, THREAD, ("link_class exception: '%s'",
+- ex_name->as_C_string()));
+- CLEAR_PENDING_EXCEPTION;
+- if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
+- return JVMTI_ERROR_OUT_OF_MEMORY;
++ }
++
+ // Interfaces added?
+ objArrayOop new_interfaces = new_class->transitive_interfaces();
+ for (i = 0; i<new_interfaces->length(); i++) {
@@ -6728,28 +6301,16 @@ index 606be1c..ef4f380 100644
+ RC_TRACE(0x00000002, ("Added interface %s",
+ ((klassOop)new_interfaces->obj_at(i))->klass_part()->name()->as_C_string()));
+ }
- }
-
-- RC_TRACE_WITH_THREAD(0x00010000, THREAD,
-- ("merge_cp_len=%d, index_map_len=%d", merge_cp_length, _index_map_count));
-
-- if (_index_map_count == 0) {
-- // there is nothing to map between the new and merged constant pools
++ }
++
++
+ // Check whether class modifiers are the same.
+ jushort old_flags = (jushort) the_class->access_flags().get_flags();
+ jushort new_flags = (jushort) new_class->access_flags().get_flags();
+ if (old_flags != new_flags) {
+ // TODO (tw): Can this have any effects?
+ }
-
-- if (old_cp->length() == scratch_cp->length()) {
-- // The old and new constant pools are the same length and the
-- // index map is empty. This means that the three constant pools
-- // are equivalent (but not the same). Unfortunately, the new
-- // constant pool has not gone through link resolution nor have
-- // the new class bytecodes gone through constant pool cache
-- // rewriting so we can't use the old constant pool with the new
-- // class.
++
+ // Check if the number, names, types and order of fields declared in these classes
+ // are the same.
+ JavaFieldStream old_fs(the_class);
@@ -6774,22 +6335,11 @@ index 606be1c..ef4f380 100644
+ result = result | Klass::ModifyInstances;
+ }
+ }
-
-- merge_cp()->set_is_conc_safe(true);
-- merge_cp = constantPoolHandle(); // toss the merged constant pool
-- } else if (old_cp->length() < scratch_cp->length()) {
-- // The old constant pool has fewer entries than the new constant
-- // pool and the index map is empty. This means the new constant
-- // pool is a superset of the old constant pool. However, the old
-- // class bytecodes have already gone through constant pool cache
-- // rewriting so we can't use the new constant pool with the old
-- // class.
++
+ if (!old_fs.done() || !new_fs.done()) {
+ result = result | Klass::ModifyInstances;
+ }
-
-- merge_cp()->set_is_conc_safe(true);
-- merge_cp = constantPoolHandle(); // toss the merged constant pool
++
+ // Do a parallel walk through the old and new methods. Detect
+ // cases where they match (exist in both), have been added in
+ // the new methods, or have been deleted (exist only in the
@@ -6830,11 +6380,7 @@ index 606be1c..ef4f380 100644
+ // Old method, at the end, is deleted
+ k_old_method = (methodOop) k_old_methods->obj_at(oi);
+ method_was = deleted;
- } else {
-- // The old constant pool has more entries than the new constant
-- // pool and the index map is empty. This means that both the old
-- // and merged constant pools are supersets of the new constant
-- // pool.
++ } else {
+ // There are more methods in both the old and new lists
+ k_old_method = (methodOop) k_old_methods->obj_at(oi);
+ k_new_method = (methodOop) k_new_methods->obj_at(ni);
@@ -6843,7 +6389,8 @@ index 606be1c..ef4f380 100644
+ // or deleted
+ if (k_old_method->name()->fast_compare(k_new_method->name()) > 0) {
+ method_was = added;
-+ } else {
+ } else {
+- return JVMTI_ERROR_INTERNAL;
+ method_was = deleted;
+ }
+ } else if (k_old_method->signature() == k_new_method->signature()) {
@@ -6869,27 +6416,7 @@ index 606be1c..ef4f380 100644
+ break;
+ }
+ }
-
-- // Replace the new constant pool with a shrunken copy of the
-- // merged constant pool; the previous new constant pool will
-- // get GCed.
-- set_new_constant_pool(scratch_class, merge_cp, merge_cp_length,
-- THREAD);
-- // drop local ref to the merged constant pool
-- merge_cp()->set_is_conc_safe(true);
-- merge_cp = constantPoolHandle();
-- }
-- } else {
-- if (RC_TRACE_ENABLED(0x00040000)) {
-- // don't want to loop unless we are tracing
-- int count = 0;
-- for (int i = 1; i < _index_map_p->length(); i++) {
-- int value = _index_map_p->at(i);
--
-- if (value != -1) {
-- RC_TRACE_WITH_THREAD(0x00040000, THREAD,
-- ("index_map[%d]: old=%d new=%d", count, i, value));
-- count++;
++
+ if (nj >= n_new_methods) {
+ // reached the end without a match; so method was deleted
+ method_was = deleted;
@@ -6897,10 +6424,15 @@ index 606be1c..ef4f380 100644
}
}
-- // We have entries mapped between the new and merged constant pools
-- // so we have to rewrite some constant pool references.
-- if (!rewrite_cp_refs(scratch_class, THREAD)) {
-- return JVMTI_ERROR_INTERNAL;
+- // Do the validity checks in compare_and_normalize_class_versions()
+- // before verifying the byte codes. By doing these checks first, we
+- // limit the number of functions that require redirection from
+- // the_class to scratch_class. In particular, we don't have to
+- // modify JNI GetSuperclass() and thus won't change its performance.
+- jvmtiError res = compare_and_normalize_class_versions(the_class,
+- scratch_class);
+- if (res != JVMTI_ERROR_NONE) {
+- return res;
+ switch (method_was) {
+ case matched:
+ // methods match, be sure modifiers do too
@@ -6911,87 +6443,30 @@ index 606be1c..ef4f380 100644
+ result = result | Klass::ModifyClass;
}
-
-- // Replace the new constant pool with a shrunken copy of the
-- // merged constant pool so now the rewritten bytecodes have
-- // valid references; the previous new constant pool will get
-- // GCed.
-- set_new_constant_pool(scratch_class, merge_cp, merge_cp_length,
-- THREAD);
-- merge_cp()->set_is_conc_safe(true);
-- }
-- assert(old_cp()->is_conc_safe(), "Just checking");
-- assert(scratch_cp()->is_conc_safe(), "Just checking");
--
-- return JVMTI_ERROR_NONE;
--} // end merge_cp_and_rewrite()
--
--
--// Rewrite constant pool references in klass scratch_class.
--bool VM_RedefineClasses::rewrite_cp_refs(instanceKlassHandle scratch_class,
-- TRAPS) {
--
-- // rewrite constant pool references in the methods:
-- if (!rewrite_cp_refs_in_methods(scratch_class, THREAD)) {
-- // propagate failure back to caller
-- return false;
-- }
--
-- // rewrite constant pool references in the class_annotations:
-- if (!rewrite_cp_refs_in_class_annotations(scratch_class, THREAD)) {
-- // propagate failure back to caller
-- return false;
-- }
--
-- // rewrite constant pool references in the fields_annotations:
-- if (!rewrite_cp_refs_in_fields_annotations(scratch_class, THREAD)) {
-- // propagate failure back to caller
-- return false;
-- }
--
-- // rewrite constant pool references in the methods_annotations:
-- if (!rewrite_cp_refs_in_methods_annotations(scratch_class, THREAD)) {
-- // propagate failure back to caller
-- return false;
-- }
--
-- // rewrite constant pool references in the methods_parameter_annotations:
-- if (!rewrite_cp_refs_in_methods_parameter_annotations(scratch_class,
-- THREAD)) {
-- // propagate failure back to caller
-- return false;
-- }
--
-- // rewrite constant pool references in the methods_default_annotations:
-- if (!rewrite_cp_refs_in_methods_default_annotations(scratch_class,
-- THREAD)) {
-- // propagate failure back to caller
-- return false;
-- }
--
-- return true;
--} // end rewrite_cp_refs()
--
--
--// Rewrite constant pool references in the methods.
--bool VM_RedefineClasses::rewrite_cp_refs_in_methods(
-- instanceKlassHandle scratch_class, TRAPS) {
--
-- objArrayHandle methods(THREAD, scratch_class->methods());
--
-- if (methods.is_null() || methods->length() == 0) {
-- // no methods so nothing to do
-- return true;
-- }
+- // verify what the caller passed us
+ {
+- // The bug 6214132 caused the verification to fail.
+- // Information about the_class and scratch_class is temporarily
+- // recorded into jvmtiThreadState. This data is used to redirect
+- // the_class to scratch_class in the JVM_* functions called by the
+- // verifier. Please, refer to jvmtiThreadState.hpp for the detailed
+- // description.
+- RedefineVerifyMark rvm(&the_class, &scratch_class, state);
+- Verifier::verify(
+- scratch_class, Verifier::ThrowException, true, THREAD);
+- }
-
-- // rewrite constant pool references in the methods:
-- for (int i = methods->length() - 1; i >= 0; i--) {
-- methodHandle method(THREAD, (methodOop)methods->obj_at(i));
-- methodHandle new_method;
-- rewrite_cp_refs_in_method(method, &new_method, CHECK_false);
-- if (!new_method.is_null()) {
-- // the method has been replaced so save the new method version
-- methods->obj_at_put(i, new_method());
-+ {
+- if (HAS_PENDING_EXCEPTION) {
+- Symbol* ex_name = PENDING_EXCEPTION->klass()->klass_part()->name();
+- // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
+- RC_TRACE_WITH_THREAD(0x00000002, THREAD,
+- ("verify_byte_codes exception: '%s'", ex_name->as_C_string()));
+- CLEAR_PENDING_EXCEPTION;
+- if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
+- return JVMTI_ERROR_OUT_OF_MEMORY;
+- } else {
+- // tell the caller the bytecodes are bad
+- return JVMTI_ERROR_FAILS_VERIFICATION;
+ u2 new_num = k_new_method->method_idnum();
+ u2 old_num = k_old_method->method_idnum();
+ if (new_num != old_num) {
@@ -7005,8 +6480,12 @@ index 606be1c..ef4f380 100644
+ new_num,
+ old_num));
+ // swap_all_method_annotations(old_num, new_num, new_class);
-+ }
-+ }
+ }
+ }
+-
+- res = merge_cp_and_rewrite(the_class, scratch_class, THREAD);
+- if (res != JVMTI_ERROR_NONE) {
+- return res;
+ RC_TRACE(0x00008000, ("Method matched: new: %s [%d] == old: %s [%d]",
+ k_new_method->name_and_sig_as_C_string(), ni,
+ k_old_method->name_and_sig_as_C_string(), oi));
@@ -7023,13 +6502,32 @@ index 606be1c..ef4f380 100644
+ ) {
+ // new methods must be private
+ result = result | Klass::ModifyClass;
-+ }
+ }
+-
+- if (VerifyMergedCPBytecodes) {
+- // verify what we have done during constant pool merging
+- {
+- RedefineVerifyMark rvm(&the_class, &scratch_class, state);
+- Verifier::verify(scratch_class, Verifier::ThrowException, true, THREAD);
+ {
+ u2 num = the_class->next_method_idnum();
+ if (num == constMethodOopDesc::UNSET_IDNUM) {
+ // cannot add any more methods
+ result = result | Klass::ModifyClass;
-+ }
+ }
+-
+- if (HAS_PENDING_EXCEPTION) {
+- Symbol* ex_name = PENDING_EXCEPTION->klass()->klass_part()->name();
+- // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
+- RC_TRACE_WITH_THREAD(0x00000002, THREAD,
+- ("verify_byte_codes post merge-CP exception: '%s'",
+- ex_name->as_C_string()));
+- CLEAR_PENDING_EXCEPTION;
+- if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
+- return JVMTI_ERROR_OUT_OF_MEMORY;
+- } else {
+- // tell the caller that constant pool merging screwed up
+- return JVMTI_ERROR_INTERNAL;
+ u2 new_num = k_new_method->method_idnum();
+ methodOop idnum_owner = new_class->method_with_idnum(num);
+ if (idnum_owner != NULL) {
@@ -7059,10 +6557,9 @@ index 606be1c..ef4f380 100644
+ break;
+ default:
+ ShouldNotReachHere();
- }
- }
-
-- return true;
++ }
++ }
++
+ if (new_class()->size() != new_class->old_version()->size()) {
+ result |= Klass::ModifyClassSize;
+ }
@@ -7078,72 +6575,28 @@ index 606be1c..ef4f380 100644
+
+ // (tw) Check method bodies to be able to return NoChange?
+ return result;
- }
-
++}
++
+void VM_RedefineClasses::calculate_instance_update_information(klassOop new_version) {
-
--// Rewrite constant pool references in the specific method. This code
--// was adapted from Rewriter::rewrite_method().
--void VM_RedefineClasses::rewrite_cp_refs_in_method(methodHandle method,
-- methodHandle *new_method_p, TRAPS) {
++
+ class UpdateFieldsEvolutionClosure : public FieldEvolutionClosure {
-
-- *new_method_p = methodHandle(); // default is no new method
++
+ private:
-
-- // We cache a pointer to the bytecodes here in code_base. If GC
-- // moves the methodOop, then the bytecodes will also move which
-- // will likely cause a crash. We create a No_Safepoint_Verifier
-- // object to detect whether we pass a possible safepoint in this
-- // code block.
-- No_Safepoint_Verifier nsv;
++
+ GrowableArray<int> info;
+ int curPosition;
+ bool copy_backwards;
-
-- // Bytecodes and their length
-- address code_base = method->code_base();
-- int code_length = method->code_size();
++
+ public:
-
-- int bc_length;
-- for (int bci = 0; bci < code_length; bci += bc_length) {
-- address bcp = code_base + bci;
-- Bytecodes::Code c = (Bytecodes::Code)(*bcp);
--
-- bc_length = Bytecodes::length_for(c);
-- if (bc_length == 0) {
-- // More complicated bytecodes report a length of zero so
-- // we have to try again a slightly different way.
-- bc_length = Bytecodes::length_at(method(), bcp);
++
+ bool does_copy_backwards() {
+ return copy_backwards;
- }
-
-- assert(bc_length != 0, "impossible bytecode length");
++ }
++
+ UpdateFieldsEvolutionClosure(klassOop klass) {
-
-- switch (c) {
-- case Bytecodes::_ldc:
-- {
-- int cp_index = *(bcp + 1);
-- int new_index = find_new_index(cp_index);
++
+ int base_offset = instanceOopDesc::base_offset_in_bytes();
-
-- if (StressLdcRewrite && new_index == 0) {
-- // If we are stressing ldc -> ldc_w rewriting, then we
-- // always need a new_index value.
-- new_index = cp_index;
-- }
-- if (new_index != 0) {
-- // the original index is mapped so we have more work to do
-- if (!StressLdcRewrite && new_index <= max_jubyte) {
-- // The new value can still use ldc instead of ldc_w
-- // unless we are trying to stress ldc -> ldc_w rewriting
-- RC_TRACE_WITH_THREAD(0x00080000, THREAD,
-- ("%s@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c),
-- bcp, cp_index, new_index));
-- *(bcp + 1) = new_index;
++
+ if (klass->klass_part()->newest_version() == SystemDictionary::Reference_klass()->klass_part()->newest_version()) {
+ base_offset += java_lang_ref_Reference::number_of_fake_oop_fields*size_of_type(T_OBJECT);
+ }
@@ -7221,36 +6674,15 @@ index 606be1c..ef4f380 100644
+ case T_ARRAY:
+ if (UseCompressedOops) {
+ size = sizeof(narrowOop);
- } else {
-- RC_TRACE_WITH_THREAD(0x00080000, THREAD,
-- ("%s->ldc_w@" INTPTR_FORMAT " old=%d, new=%d",
-- Bytecodes::name(c), bcp, cp_index, new_index));
-- // the new value needs ldc_w instead of ldc
-- u_char inst_buffer[4]; // max instruction size is 4 bytes
-- bcp = (address)inst_buffer;
-- // construct new instruction sequence
-- *bcp = Bytecodes::_ldc_w;
-- bcp++;
-- // Rewriter::rewrite_method() does not rewrite ldc -> ldc_w.
-- // See comment below for difference between put_Java_u2()
-- // and put_native_u2().
-- Bytes::put_Java_u2(bcp, new_index);
++ } else {
+ size = (sizeof(oop));
+ }
+ break;
-
-- Relocator rc(method, NULL /* no RelocatorListener needed */);
-- methodHandle m;
-- {
-- Pause_No_Safepoint_Verifier pnsv(&nsv);
++
+ default:
+ ShouldNotReachHere();
+ }
-
-- // ldc is 2 bytes and ldc_w is 3 bytes
-- m = rc.insert_space_at(bci, 3, inst_buffer, THREAD);
-- if (m.is_null() || HAS_PENDING_EXCEPTION) {
-- guarantee(false, "insert_space_at() failed");
++
+ assert(size > 0, "");
+ return size;
+
@@ -7432,60 +6864,10 @@ index 606be1c..ef4f380 100644
+ element->blueprint()->name()->as_C_string()));
+ _result = false;
+ break;
- }
- }
--
-- // return the new method so that the caller can update
-- // the containing class
-- *new_method_p = method = m;
-- // switch our bytecode processing loop from the old method
-- // to the new method
-- code_base = method->code_base();
-- code_length = method->code_size();
-- bcp = code_base + bci;
-- c = (Bytecodes::Code)(*bcp);
-- bc_length = Bytecodes::length_for(c);
-- assert(bc_length != 0, "sanity check");
-- } // end we need ldc_w instead of ldc
-- } // end if there is a mapped index
-- } break;
--
-- // these bytecodes have a two-byte constant pool index
-- case Bytecodes::_anewarray : // fall through
-- case Bytecodes::_checkcast : // fall through
-- case Bytecodes::_getfield : // fall through
-- case Bytecodes::_getstatic : // fall through
-- case Bytecodes::_instanceof : // fall through
-- case Bytecodes::_invokedynamic : // fall through
-- case Bytecodes::_invokeinterface: // fall through
-- case Bytecodes::_invokespecial : // fall through
-- case Bytecodes::_invokestatic : // fall through
-- case Bytecodes::_invokevirtual : // fall through
-- case Bytecodes::_ldc_w : // fall through
-- case Bytecodes::_ldc2_w : // fall through
-- case Bytecodes::_multianewarray : // fall through
-- case Bytecodes::_new : // fall through
-- case Bytecodes::_putfield : // fall through
-- case Bytecodes::_putstatic :
-- {
-- address p = bcp + 1;
-- int cp_index = Bytes::get_Java_u2(p);
-- int new_index = find_new_index(cp_index);
-- if (new_index != 0) {
-- // the original index is mapped so update w/ new value
-- RC_TRACE_WITH_THREAD(0x00080000, THREAD,
-- ("%s@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c),
-- bcp, cp_index, new_index));
-- // Rewriter::rewrite_method() uses put_native_u2() in this
-- // situation because it is reusing the constant pool index
-- // location for a native index into the constantPoolCache.
-- // Since we are updating the constant pool index prior to
-- // verification and constantPoolCache initialization, we
-- // need to keep the new index in Java byte order.
-- Bytes::put_Java_u2(p, new_index);
++ }
++ }
+ }
- }
-- } break;
++ }
+
+ } else {
+ Pair<int, klassOop> *cur = obj->klass()->klass_part()->type_check_information();
@@ -7495,12 +6877,391 @@ index 606be1c..ef4f380 100644
+ check_field(obj, (*cur).left(), (*cur).right());
+ cur++;
+ }
-+ }
-+ }
+ }
+ }
}
-- } // end for each bytecode
--} // end rewrite_cp_refs_in_method()
+- Rewriter::rewrite(scratch_class, THREAD);
+- if (!HAS_PENDING_EXCEPTION) {
+- Rewriter::relocate_and_link(scratch_class, THREAD);
+- }
+- if (HAS_PENDING_EXCEPTION) {
+- Symbol* ex_name = PENDING_EXCEPTION->klass()->klass_part()->name();
+- CLEAR_PENDING_EXCEPTION;
+- if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
+- return JVMTI_ERROR_OUT_OF_MEMORY;
+- } else {
+- return JVMTI_ERROR_INTERNAL;
+- }
+- }
+-
+- _scratch_classes[i] = scratch_class;
+-
+- // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
+- RC_TRACE_WITH_THREAD(0x00000001, THREAD,
+- ("loaded name=%s (avail_mem=" UINT64_FORMAT "K)",
+- the_class->external_name(), os::available_memory() >> 10));
+- }
+-
+- return JVMTI_ERROR_NONE;
+-}
+-
+-
+-// Map old_index to new_index as needed. scratch_cp is only needed
+-// for RC_TRACE() calls.
+-void VM_RedefineClasses::map_index(constantPoolHandle scratch_cp,
+- int old_index, int new_index) {
+- if (find_new_index(old_index) != 0) {
+- // old_index is already mapped
+- return;
+- }
+-
+- if (old_index == new_index) {
+- // no mapping is needed
+- return;
+- }
+-
+- _index_map_p->at_put(old_index, new_index);
+- _index_map_count++;
+-
+- RC_TRACE(0x00040000, ("mapped tag %d at index %d to %d",
+- scratch_cp->tag_at(old_index).value(), old_index, new_index));
+-} // end map_index()
+-
+-
+-// Map old_index to new_index as needed.
+-void VM_RedefineClasses::map_operand_index(int old_index, int new_index) {
+- if (find_new_operand_index(old_index) != -1) {
+- // old_index is already mapped
+- return;
+- }
+-
+- if (old_index == new_index) {
+- // no mapping is needed
+- return;
+- }
+-
+- _operands_index_map_p->at_put(old_index, new_index);
+- _operands_index_map_count++;
+-
+- RC_TRACE(0x00040000, ("mapped bootstrap specifier at index %d to %d", old_index, new_index));
+-} // end map_index()
+-
+-
+-// Merge old_cp and scratch_cp and return the results of the merge via
+-// merge_cp_p. The number of entries in *merge_cp_p is returned via
+-// merge_cp_length_p. The entries in old_cp occupy the same locations
+-// in *merge_cp_p. Also creates a map of indices from entries in
+-// scratch_cp to the corresponding entry in *merge_cp_p. Index map
+-// entries are only created for entries in scratch_cp that occupy a
+-// different location in *merged_cp_p.
+-bool VM_RedefineClasses::merge_constant_pools(constantPoolHandle old_cp,
+- constantPoolHandle scratch_cp, constantPoolHandle *merge_cp_p,
+- int *merge_cp_length_p, TRAPS) {
+-
+- if (merge_cp_p == NULL) {
+- assert(false, "caller must provide scatch constantPool");
+- return false; // robustness
+- }
+- if (merge_cp_length_p == NULL) {
+- assert(false, "caller must provide scatch CP length");
+- return false; // robustness
+- }
+- // Worst case we need old_cp->length() + scratch_cp()->length(),
+- // but the caller might be smart so make sure we have at least
+- // the minimum.
+- if ((*merge_cp_p)->length() < old_cp->length()) {
+- assert(false, "merge area too small");
+- return false; // robustness
+- }
+-
+- RC_TRACE_WITH_THREAD(0x00010000, THREAD,
+- ("old_cp_len=%d, scratch_cp_len=%d", old_cp->length(),
+- scratch_cp->length()));
+-
+- {
+- // Pass 0:
+- // The old_cp is copied to *merge_cp_p; this means that any code
+- // using old_cp does not have to change. This work looks like a
+- // perfect fit for constantPoolOop::copy_cp_to(), but we need to
+- // handle one special case:
+- // - revert JVM_CONSTANT_Class to JVM_CONSTANT_UnresolvedClass
+- // This will make verification happy.
+-
+- int old_i; // index into old_cp
+-
+- // index zero (0) is not used in constantPools
+- for (old_i = 1; old_i < old_cp->length(); old_i++) {
+- // leave debugging crumb
+- jbyte old_tag = old_cp->tag_at(old_i).value();
+- switch (old_tag) {
+- case JVM_CONSTANT_Class:
+- case JVM_CONSTANT_UnresolvedClass:
+- // revert the copy to JVM_CONSTANT_UnresolvedClass
+- // May be resolving while calling this so do the same for
+- // JVM_CONSTANT_UnresolvedClass (klass_name_at() deals with transition)
+- (*merge_cp_p)->unresolved_klass_at_put(old_i,
+- old_cp->klass_name_at(old_i));
+- break;
+-
+- case JVM_CONSTANT_Double:
+- case JVM_CONSTANT_Long:
+- // just copy the entry to *merge_cp_p, but double and long take
+- // two constant pool entries
+- constantPoolOopDesc::copy_entry_to(old_cp, old_i, *merge_cp_p, old_i, CHECK_0);
+- old_i++;
+- break;
+-
+- default:
+- // just copy the entry to *merge_cp_p
+- constantPoolOopDesc::copy_entry_to(old_cp, old_i, *merge_cp_p, old_i, CHECK_0);
+- break;
+- }
+- } // end for each old_cp entry
+-
+- constantPoolOopDesc::copy_operands(old_cp, *merge_cp_p, CHECK_0);
+- (*merge_cp_p)->extend_operands(scratch_cp, CHECK_0);
+-
+- // We don't need to sanity check that *merge_cp_length_p is within
+- // *merge_cp_p bounds since we have the minimum on-entry check above.
+- (*merge_cp_length_p) = old_i;
+- }
+-
+- // merge_cp_len should be the same as old_cp->length() at this point
+- // so this trace message is really a "warm-and-breathing" message.
+- RC_TRACE_WITH_THREAD(0x00020000, THREAD,
+- ("after pass 0: merge_cp_len=%d", *merge_cp_length_p));
+-
+- int scratch_i; // index into scratch_cp
+- {
+- // Pass 1a:
+- // Compare scratch_cp entries to the old_cp entries that we have
+- // already copied to *merge_cp_p. In this pass, we are eliminating
+- // exact duplicates (matching entry at same index) so we only
+- // compare entries in the common indice range.
+- int increment = 1;
+- int pass1a_length = MIN2(old_cp->length(), scratch_cp->length());
+- for (scratch_i = 1; scratch_i < pass1a_length; scratch_i += increment) {
+- switch (scratch_cp->tag_at(scratch_i).value()) {
+- case JVM_CONSTANT_Double:
+- case JVM_CONSTANT_Long:
+- // double and long take two constant pool entries
+- increment = 2;
+- break;
+-
+- default:
+- increment = 1;
+- break;
+- }
+-
+- bool match = scratch_cp->compare_entry_to(scratch_i, *merge_cp_p,
+- scratch_i, CHECK_0);
+- if (match) {
+- // found a match at the same index so nothing more to do
+- continue;
+- } else if (is_unresolved_class_mismatch(scratch_cp, scratch_i,
+- *merge_cp_p, scratch_i)) {
+- // The mismatch in compare_entry_to() above is because of a
+- // resolved versus unresolved class entry at the same index
+- // with the same string value. Since Pass 0 reverted any
+- // class entries to unresolved class entries in *merge_cp_p,
+- // we go with the unresolved class entry.
+- continue;
+- } else if (is_unresolved_string_mismatch(scratch_cp, scratch_i,
+- *merge_cp_p, scratch_i)) {
+- // The mismatch in compare_entry_to() above is because of a
+- // resolved versus unresolved string entry at the same index
+- // with the same string value. We can live with whichever
+- // happens to be at scratch_i in *merge_cp_p.
+- continue;
+- }
+-
+- int found_i = scratch_cp->find_matching_entry(scratch_i, *merge_cp_p,
+- CHECK_0);
+- if (found_i != 0) {
+- guarantee(found_i != scratch_i,
+- "compare_entry_to() and find_matching_entry() do not agree");
+-
+- // Found a matching entry somewhere else in *merge_cp_p so
+- // just need a mapping entry.
+- map_index(scratch_cp, scratch_i, found_i);
+- continue;
+- }
+-
+- // The find_matching_entry() call above could fail to find a match
+- // due to a resolved versus unresolved class or string entry situation
+- // like we solved above with the is_unresolved_*_mismatch() calls.
+- // However, we would have to call is_unresolved_*_mismatch() over
+- // all of *merge_cp_p (potentially) and that doesn't seem to be
+- // worth the time.
+-
+- // No match found so we have to append this entry and any unique
+- // referenced entries to *merge_cp_p.
+- append_entry(scratch_cp, scratch_i, merge_cp_p, merge_cp_length_p,
+- CHECK_0);
+- }
+- }
+-
+- RC_TRACE_WITH_THREAD(0x00020000, THREAD,
+- ("after pass 1a: merge_cp_len=%d, scratch_i=%d, index_map_len=%d",
+- *merge_cp_length_p, scratch_i, _index_map_count));
+-
+- if (scratch_i < scratch_cp->length()) {
+- // Pass 1b:
+- // old_cp is smaller than scratch_cp so there are entries in
+- // scratch_cp that we have not yet processed. We take care of
+- // those now.
+- int increment = 1;
+- for (; scratch_i < scratch_cp->length(); scratch_i += increment) {
+- switch (scratch_cp->tag_at(scratch_i).value()) {
+- case JVM_CONSTANT_Double:
+- case JVM_CONSTANT_Long:
+- // double and long take two constant pool entries
+- increment = 2;
+- break;
+-
+- default:
+- increment = 1;
+- break;
+- }
+-
+- int found_i =
+- scratch_cp->find_matching_entry(scratch_i, *merge_cp_p, CHECK_0);
+- if (found_i != 0) {
+- // Found a matching entry somewhere else in *merge_cp_p so
+- // just need a mapping entry.
+- map_index(scratch_cp, scratch_i, found_i);
+- continue;
+- }
+-
+- // No match found so we have to append this entry and any unique
+- // referenced entries to *merge_cp_p.
+- append_entry(scratch_cp, scratch_i, merge_cp_p, merge_cp_length_p,
+- CHECK_0);
+- }
+-
+- RC_TRACE_WITH_THREAD(0x00020000, THREAD,
+- ("after pass 1b: merge_cp_len=%d, scratch_i=%d, index_map_len=%d",
+- *merge_cp_length_p, scratch_i, _index_map_count));
+- }
+- finalize_operands_merge(*merge_cp_p, THREAD);
+-
+- return true;
+-} // end merge_constant_pools()
+-
+-
+-// Merge constant pools between the_class and scratch_class and
+-// potentially rewrite bytecodes in scratch_class to use the merged
+-// constant pool.
+-jvmtiError VM_RedefineClasses::merge_cp_and_rewrite(
+- instanceKlassHandle the_class, instanceKlassHandle scratch_class,
+- TRAPS) {
+- // worst case merged constant pool length is old and new combined
+- int merge_cp_length = the_class->constants()->length()
+- + scratch_class->constants()->length();
+-
+- constantPoolHandle old_cp(THREAD, the_class->constants());
+- constantPoolHandle scratch_cp(THREAD, scratch_class->constants());
+-
+- // Constant pools are not easily reused so we allocate a new one
+- // each time.
+- // merge_cp is created unsafe for concurrent GC processing. It
+- // should be marked safe before discarding it. Even though
+- // garbage, if it crosses a card boundary, it may be scanned
+- // in order to find the start of the first complete object on the card.
+- constantPoolHandle merge_cp(THREAD,
+- oopFactory::new_constantPool(merge_cp_length,
+- oopDesc::IsUnsafeConc,
+- THREAD));
+- int orig_length = old_cp->orig_length();
+- if (orig_length == 0) {
+- // This old_cp is an actual original constant pool. We save
+- // the original length in the merged constant pool so that
+- // merge_constant_pools() can be more efficient. If a constant
+- // pool has a non-zero orig_length() value, then that constant
+- // pool was created by a merge operation in RedefineClasses.
+- merge_cp->set_orig_length(old_cp->length());
+- } else {
+- // This old_cp is a merged constant pool from a previous
+- // RedefineClasses() calls so just copy the orig_length()
+- // value.
+- merge_cp->set_orig_length(old_cp->orig_length());
+- }
+-
+- ResourceMark rm(THREAD);
+- _index_map_count = 0;
+- _index_map_p = new intArray(scratch_cp->length(), -1);
+-
+- _operands_cur_length = constantPoolOopDesc::operand_array_length(old_cp->operands());
+- _operands_index_map_count = 0;
+- _operands_index_map_p = new intArray(
+- constantPoolOopDesc::operand_array_length(scratch_cp->operands()), -1);
+-
+- // reference to the cp holder is needed for copy_operands()
+- merge_cp->set_pool_holder(scratch_class());
+- bool result = merge_constant_pools(old_cp, scratch_cp, &merge_cp,
+- &merge_cp_length, THREAD);
+- merge_cp->set_pool_holder(NULL);
+-
+- if (!result) {
+- // The merge can fail due to memory allocation failure or due
+- // to robustness checks.
+- return JVMTI_ERROR_INTERNAL;
+- }
+-
+- RC_TRACE_WITH_THREAD(0x00010000, THREAD,
+- ("merge_cp_len=%d, index_map_len=%d", merge_cp_length, _index_map_count));
+-
+- if (_index_map_count == 0) {
+- // there is nothing to map between the new and merged constant pools
+-
+- if (old_cp->length() == scratch_cp->length()) {
+- // The old and new constant pools are the same length and the
+- // index map is empty. This means that the three constant pools
+- // are equivalent (but not the same). Unfortunately, the new
+- // constant pool has not gone through link resolution nor have
+- // the new class bytecodes gone through constant pool cache
+- // rewriting so we can't use the old constant pool with the new
+- // class.
+-
+- merge_cp()->set_is_conc_safe(true);
+- merge_cp = constantPoolHandle(); // toss the merged constant pool
+- } else if (old_cp->length() < scratch_cp->length()) {
+- // The old constant pool has fewer entries than the new constant
+- // pool and the index map is empty. This means the new constant
+- // pool is a superset of the old constant pool. However, the old
+- // class bytecodes have already gone through constant pool cache
+- // rewriting so we can't use the new constant pool with the old
+- // class.
+-
+- merge_cp()->set_is_conc_safe(true);
+- merge_cp = constantPoolHandle(); // toss the merged constant pool
+- } else {
+- // The old constant pool has more entries than the new constant
+- // pool and the index map is empty. This means that both the old
+- // and merged constant pools are supersets of the new constant
+- // pool.
+-
+- // Replace the new constant pool with a shrunken copy of the
+- // merged constant pool; the previous new constant pool will
+- // get GCed.
+- set_new_constant_pool(scratch_class, merge_cp, merge_cp_length,
+- THREAD);
+- // drop local ref to the merged constant pool
+- merge_cp()->set_is_conc_safe(true);
+- merge_cp = constantPoolHandle();
+- }
+- } else {
+- if (RC_TRACE_ENABLED(0x00040000)) {
+- // don't want to loop unless we are tracing
+- int count = 0;
+- for (int i = 1; i < _index_map_p->length(); i++) {
+- int value = _index_map_p->at(i);
+-
+- if (value != -1) {
+- RC_TRACE_WITH_THREAD(0x00040000, THREAD,
+- ("index_map[%d]: old=%d new=%d", count, i, value));
+- count++;
+ void check_field(oop obj, int offset, klassOop static_type) {
+ oop field_value = obj->obj_field(offset);
+ if (field_value != NULL) {
@@ -7520,21 +7281,18 @@ index 606be1c..ef4f380 100644
+ offset,
+ field_value->klass()->klass_part()->name()->as_C_string()));
+ _result = false;
-+ }
-+ }
-+ }
+ }
+ }
+ }
+-
+- // We have entries mapped between the new and merged constant pools
+- // so we have to rewrite some constant pool references.
+- if (!rewrite_cp_refs(scratch_class, THREAD)) {
+- return JVMTI_ERROR_INTERNAL;
+ };
-
--// Rewrite constant pool references in the class_annotations field.
--bool VM_RedefineClasses::rewrite_cp_refs_in_class_annotations(
-- instanceKlassHandle scratch_class, TRAPS) {
++
+ CheckFieldTypesClosure myObjectClosure;
-
-- typeArrayHandle class_annotations(THREAD,
-- scratch_class->class_annotations());
-- if (class_annotations.is_null() || class_annotations->length() == 0) {
-- // no class_annotations so nothing to do
-- return true;
++
+ // make sure that heap is parsable (fills TLABs with filler objects)
+ Universe::heap()->ensure_parsability(false); // no need to retire TLABs
+
@@ -7550,187 +7308,27 @@ index 606be1c..ef4f380 100644
+ CompactingPermGenGen* gen = (CompactingPermGenGen*)gch->perm_gen();
+ gen->ro_space()->object_iterate(&myObjectClosure);
+ gen->rw_space()->object_iterate(&myObjectClosure);
- }
-
-- RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-- ("class_annotations length=%d", class_annotations->length()));
--
-- int byte_i = 0; // byte index into class_annotations
-- return rewrite_cp_refs_in_annotations_typeArray(class_annotations, byte_i,
-- THREAD);
++ }
++
+ return myObjectClosure.result();
- }
-
--
--// Rewrite constant pool references in an annotations typeArray. This
--// "structure" is adapted from the RuntimeVisibleAnnotations_attribute
--// that is described in section 4.8.15 of the 2nd-edition of the VM spec:
--//
--// annotations_typeArray {
--// u2 num_annotations;
--// annotation annotations[num_annotations];
--// }
--//
--bool VM_RedefineClasses::rewrite_cp_refs_in_annotations_typeArray(
-- typeArrayHandle annotations_typeArray, int &byte_i_ref, TRAPS) {
--
-- if ((byte_i_ref + 2) > annotations_typeArray->length()) {
-- // not enough room for num_annotations field
-- RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-- ("length() is too small for num_annotations field"));
-- return false;
++}
++
+void VM_RedefineClasses::clear_type_check_information(klassOop k) {
+ if (k->klass_part()->is_redefining()) {
+ k = k->klass_part()->old_version();
- }
-
-- u2 num_annotations = Bytes::get_Java_u2((address)
-- annotations_typeArray->byte_at_addr(byte_i_ref));
-- byte_i_ref += 2;
--
-- RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-- ("num_annotations=%d", num_annotations));
--
-- int calc_num_annotations = 0;
-- for (; calc_num_annotations < num_annotations; calc_num_annotations++) {
-- if (!rewrite_cp_refs_in_annotation_struct(annotations_typeArray,
-- byte_i_ref, THREAD)) {
-- RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-- ("bad annotation_struct at %d", calc_num_annotations));
-- // propagate failure back to caller
-- return false;
-- }
-- }
-- assert(num_annotations == calc_num_annotations, "sanity check");
--
-- return true;
--} // end rewrite_cp_refs_in_annotations_typeArray()
--
--
--// Rewrite constant pool references in the annotation struct portion of
--// an annotations_typeArray. This "structure" is from section 4.8.15 of
--// the 2nd-edition of the VM spec:
--//
--// struct annotation {
--// u2 type_index;
--// u2 num_element_value_pairs;
--// {
--// u2 element_name_index;
--// element_value value;
--// } element_value_pairs[num_element_value_pairs];
--// }
--//
--bool VM_RedefineClasses::rewrite_cp_refs_in_annotation_struct(
-- typeArrayHandle annotations_typeArray, int &byte_i_ref, TRAPS) {
-- if ((byte_i_ref + 2 + 2) > annotations_typeArray->length()) {
-- // not enough room for smallest annotation_struct
-- RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-- ("length() is too small for annotation_struct"));
-- return false;
-- }
--
-- u2 type_index = rewrite_cp_ref_in_annotation_data(annotations_typeArray,
-- byte_i_ref, "mapped old type_index=%d", THREAD);
--
-- u2 num_element_value_pairs = Bytes::get_Java_u2((address)
-- annotations_typeArray->byte_at_addr(
-- byte_i_ref));
-- byte_i_ref += 2;
--
-- RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-- ("type_index=%d num_element_value_pairs=%d", type_index,
-- num_element_value_pairs));
--
-- int calc_num_element_value_pairs = 0;
-- for (; calc_num_element_value_pairs < num_element_value_pairs;
-- calc_num_element_value_pairs++) {
-- if ((byte_i_ref + 2) > annotations_typeArray->length()) {
-- // not enough room for another element_name_index, let alone
-- // the rest of another component
-- RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-- ("length() is too small for element_name_index"));
-- return false;
-- }
--
-- u2 element_name_index = rewrite_cp_ref_in_annotation_data(
-- annotations_typeArray, byte_i_ref,
-- "mapped old element_name_index=%d", THREAD);
--
-- RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-- ("element_name_index=%d", element_name_index));
--
-- if (!rewrite_cp_refs_in_element_value(annotations_typeArray,
-- byte_i_ref, THREAD)) {
-- RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-- ("bad element_value at %d", calc_num_element_value_pairs));
-- // propagate failure back to caller
-- return false;
-- }
-- } // end for each component
-- assert(num_element_value_pairs == calc_num_element_value_pairs,
-- "sanity check");
--
-- return true;
--} // end rewrite_cp_refs_in_annotation_struct()
--
--
--// Rewrite a constant pool reference at the current position in
--// annotations_typeArray if needed. Returns the original constant
--// pool reference if a rewrite was not needed or the new constant
--// pool reference if a rewrite was needed.
--u2 VM_RedefineClasses::rewrite_cp_ref_in_annotation_data(
-- typeArrayHandle annotations_typeArray, int &byte_i_ref,
-- const char * trace_mesg, TRAPS) {
--
-- address cp_index_addr = (address)
-- annotations_typeArray->byte_at_addr(byte_i_ref);
-- u2 old_cp_index = Bytes::get_Java_u2(cp_index_addr);
-- u2 new_cp_index = find_new_index(old_cp_index);
-- if (new_cp_index != 0) {
-- RC_TRACE_WITH_THREAD(0x02000000, THREAD, (trace_mesg, old_cp_index));
-- Bytes::put_Java_u2(cp_index_addr, new_cp_index);
-- old_cp_index = new_cp_index;
-- }
-- byte_i_ref += 2;
-- return old_cp_index;
++ }
++
+ // We found an instance klass!
+ instanceKlass *cur_instance_klass = instanceKlass::cast(k);
+ cur_instance_klass->clear_type_check_information();
- }
-
++}
++
+void VM_RedefineClasses::update_active_methods() {
-
--// Rewrite constant pool references in the element_value portion of an
--// annotations_typeArray. This "structure" is from section 4.8.15.1 of
--// the 2nd-edition of the VM spec:
--//
--// struct element_value {
--// u1 tag;
--// union {
--// u2 const_value_index;
--// {
--// u2 type_name_index;
--// u2 const_name_index;
--// } enum_const_value;
--// u2 class_info_index;
--// annotation annotation_value;
--// struct {
--// u2 num_values;
--// element_value values[num_values];
--// } array_value;
--// } value;
--// }
--//
--bool VM_RedefineClasses::rewrite_cp_refs_in_element_value(
-- typeArrayHandle annotations_typeArray, int &byte_i_ref, TRAPS) {
++
+ RC_TRACE(0x00000002, ("Updating active methods"));
+ JavaThread *java_thread = Threads::first();
+ while (java_thread != NULL) {
-
-- if ((byte_i_ref + 1) > annotations_typeArray->length()) {
-- // not enough room for a tag let alone the rest of an element_value
-- RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-- ("length() is too small for a tag"));
++
+ int stack_depth = 0;
+ if (java_thread->has_last_Java_frame()) {
+
@@ -7801,11 +7399,32 @@ index 606be1c..ef4f380 100644
+ }
+ vf = vf->sender();
+ }
-+ }
-+
+ }
+
+- // Replace the new constant pool with a shrunken copy of the
+- // merged constant pool so now the rewritten bytecodes have
+- // valid references; the previous new constant pool will get
+- // GCed.
+- set_new_constant_pool(scratch_class, merge_cp, merge_cp_length,
+- THREAD);
+- merge_cp()->set_is_conc_safe(true);
+ // Advance to next thread
+ java_thread = java_thread->next();
-+ }
+ }
+- assert(old_cp()->is_conc_safe(), "Just checking");
+- assert(scratch_cp()->is_conc_safe(), "Just checking");
+-
+- return JVMTI_ERROR_NONE;
+-} // end merge_cp_and_rewrite()
+-
+-
+-// Rewrite constant pool references in klass scratch_class.
+-bool VM_RedefineClasses::rewrite_cp_refs(instanceKlassHandle scratch_class,
+- TRAPS) {
+-
+- // rewrite constant pool references in the methods:
+- if (!rewrite_cp_refs_in_methods(scratch_class, THREAD)) {
+- // propagate failure back to caller
+}
+
+void VM_RedefineClasses::method_forwarding() {
@@ -8255,49 +7874,34 @@ index 606be1c..ef4f380 100644
return false;
}
-- u1 tag = annotations_typeArray->byte_at(byte_i_ref);
-- byte_i_ref++;
-- RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("tag='%c'", tag));
+- // rewrite constant pool references in the class_annotations:
+- if (!rewrite_cp_refs_in_class_annotations(scratch_class, THREAD)) {
+- // propagate failure back to caller
+ result = check_method_stacks();
+ if (!result) {
+ RC_TRACE(0x00000001, ("Aborting redefinition because of wrong value on the stack"));
+ Universe::set_verify_in_progress(false);
-+ return false;
-+ }
+ return false;
+ }
-- switch (tag) {
-- // These BaseType tag values are from Table 4.2 in VM spec:
-- case 'B': // byte
-- case 'C': // char
-- case 'D': // double
-- case 'F': // float
-- case 'I': // int
-- case 'J': // long
-- case 'S': // short
-- case 'Z': // boolean
+- // rewrite constant pool references in the fields_annotations:
+- if (!rewrite_cp_refs_in_fields_annotations(scratch_class, THREAD)) {
+- // propagate failure back to caller
+ result = check_loaded_methods();
+ if (!result) {
+ RC_TRACE(0x00000001, ("Aborting redefinition because of wrong loaded method"));
+ Universe::set_verify_in_progress(false);
-+ return false;
-+ }
+ return false;
+ }
-- // The remaining tag values are from Table 4.8 in the 2nd-edition of
-- // the VM spec:
-- case 's':
-- {
-- // For the above tag values (including the BaseType values),
-- // value.const_value_index is right union field.
+- // rewrite constant pool references in the methods_annotations:
+- if (!rewrite_cp_refs_in_methods_annotations(scratch_class, THREAD)) {
+- // propagate failure back to caller
+ RC_TRACE(0x00000001, ("Verification passed => hierarchy change is valid!"));
+ Universe::set_verify_in_progress(false);
+ return true;
+}
-
-- if ((byte_i_ref + 2) > annotations_typeArray->length()) {
-- // not enough room for a const_value_index
-- RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-- ("length() is too small for a const_value_index"));
-- return false;
++
+void VM_RedefineClasses::rollback() {
+ RC_TRACE(0x00000001, ("Rolling back redefinition!"));
+ SystemDictionary::rollback_redefinition();
@@ -8326,7 +7930,7 @@ index 606be1c..ef4f380 100644
+ if (klass->new_version() != NULL && klass->new_version()->klass_part()->is_redefining()) {
+ obj = klass->klass_part()->new_version();
+ oopDesc::encode_store_heap_oop_not_null(p, obj);
- }
++ }
+ } else if (obj->blueprint()->newest_version() == SystemDictionary::Class_klass()->klass_part()->newest_version()) {
+ // update references to java.lang.Class to point to newest version. Only update references to non-primitive
+ // java.lang.Class instances.
@@ -8338,11 +7942,821 @@ index 606be1c..ef4f380 100644
+ obj = klass_oop->java_mirror();
+ }
+ oopDesc::encode_store_heap_oop_not_null(p, obj);
++
++
++ // FIXME: DCEVM: better implementation?
++ // Starting from JDK 7 java_mirror can be kept in the regular heap. Therefore, it is possible
++ // that new java_mirror is in the young generation whereas p is in tenured generation. In that
++ // case we need to run write barrier to make sure card table is properly updated. This will
++ // allow JVM to detect reference in tenured generation properly during young generation GC.
++ if (Universe::heap()->is_in_reserved(p)) {
++ if (GenCollectedHeap::heap()->is_in_young(obj)) {
++ GenRemSet* rs = GenCollectedHeap::heap()->rem_set();
++ assert(rs->rs_kind() == GenRemSet::CardTable, "Wrong rem set kind.");
++ CardTableRS* _rs = (CardTableRS*)rs;
++ _rs->inline_write_ref_field_gc(p, obj);
++ }
++ }
++ }
++ }
++ }
++}
++
++void VM_RedefineClasses::swap_marks(oop first, oop second) {
++ markOop first_mark = first->mark();
++ markOop second_mark = second->mark();
++ first->set_mark(second_mark);
++ second->set_mark(first_mark);
++}
++
++void VM_RedefineClasses::doit() {
++ Thread *thread = Thread::current();
++
++ RC_TRACE(0x00000001, ("Entering doit!"));
++
++
++ if ((_max_redefinition_flags & Klass::RemoveSuperType) != 0) {
++
++ RC_TIMER_START(_timer_check_type);
++
++ if (!check_type_consistency()) {
++ // (tw) TODO: Rollback the class redefinition
++ rollback();
++ RC_TRACE(0x00000001, ("Detected type inconsistency!"));
++ _result = JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
++ RC_TIMER_STOP(_timer_check_type);
++ return;
++ }
++
++ RC_TIMER_STOP(_timer_check_type);
++
++ } else {
++ RC_TRACE(0x00000001, ("No type narrowing => skipping check for type inconsistency"));
++ }
++
++ if (UseMethodForwardPoints) {
++ RC_TRACE(0x00000001, ("Check stack for forwarding methods to new version"));
++ method_forwarding();
++ }
++
++ if (UseSharedSpaces) {
++ // Sharing is enabled so we remap the shared readonly space to
++ // shared readwrite, private just in case we need to redefine
++ // a shared class. We do the remap during the doit() phase of
++ // the safepoint to be safer.
++ if (!CompactingPermGenGen::remap_shared_readonly_as_readwrite()) {
++ RC_TRACE(0x00000001, ("failed to remap shared readonly space to readwrite, private"));
++ _result = JVMTI_ERROR_INTERNAL;
++ return;
++ }
++ }
++
++ RC_TIMER_START(_timer_prepare_redefinition);
++ for (int i = 0; i < _new_classes->length(); i++) {
++ redefine_single_class(_new_classes->at(i), thread);
++ }
++
++ // Deoptimize all compiled code that depends on this class
++ flush_dependent_code(instanceKlassHandle(Thread::current(), (klassOop)NULL), Thread::current());
++
++ // Adjust constantpool caches and vtables for all classes
++ // that reference methods of the evolved class.
++ SystemDictionary::classes_do(adjust_cpool_cache, Thread::current());
++
++ RC_TIMER_STOP(_timer_prepare_redefinition);
++ RC_TIMER_START(_timer_redefinition);
++
++ class ChangePointersOopClosure : public OopClosure {
++ virtual void do_oop(oop* o) {
++ do_oop_work(o);
++ }
++
++ virtual void do_oop(narrowOop* o) {
++ do_oop_work(o);
++ }
++ };
++
++ class ChangePointersObjectClosure : public ObjectClosure {
++
++ private:
++
++ OopClosure *_closure;
++ bool _needs_instance_update;
++ GrowableArray<oop> *_updated_oops;
++
++ public:
++ ChangePointersObjectClosure(OopClosure *closure) : _closure(closure), _needs_instance_update(false), _updated_oops(NULL) {}
++
++ bool needs_instance_update() {
++ return _needs_instance_update;
++ }
++
++ GrowableArray<oop> *updated_oops() { return _updated_oops; }
++
++ virtual void do_object(oop obj) {
++ if (!obj->is_instanceKlass()) {
++ obj->oop_iterate(_closure);
++
++ if (obj->blueprint()->is_redefining()) {
++
++ if (obj->blueprint()->check_redefinition_flag(Klass::HasInstanceTransformer)) {
++ if (_updated_oops == NULL) {
++ _updated_oops = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<oop>(100, true);
++ }
++ _updated_oops->append(obj);
++ }
++
++ if(obj->blueprint()->update_information() != NULL || obj->is_perm()) {
++
++ assert(obj->blueprint()->old_version() != NULL, "must have old version");
++ obj->set_klass_no_check(obj->blueprint()->old_version());
++
++ if (obj->size() != obj->size_given_klass(obj->blueprint()->new_version()->klass_part()) || obj->is_perm()) {
++ // We need an instance update => set back to old klass
++ _needs_instance_update = true;
++
++ } else {
++ MarkSweep::update_fields(obj, obj);
++ assert(obj->blueprint()->is_redefining(), "update fields resets the klass");
++ }
++ }
++ }
++
++ } else {
++ instanceKlass *klass = instanceKlass::cast((klassOop)obj);
++ if (klass->is_redefining()) {
++ // DCEVM: We need to restorte constants pool owner which was updated by do_oop_work
++ instanceKlass* old_klass = instanceKlass::cast(klass->old_version());
++ old_klass->constants()->set_pool_holder(klass->old_version());
++
++ // Initialize the new class! Special static initialization that does not execute the
++ // static constructor but copies static field values from the old class if name
++ // and signature of a static field match.
++ klass->initialize_redefined_class();
++ }
++ // idubrov: FIXME: we probably don't need that since oop's will be visited in a regular way...
++ // idubrov: need to check if there is a test to verify that fields referencing class being updated
++ // idubrov: will get new version of that class
++ //klass->iterate_static_fields(_closure);
++ }
++ }
++ };
++
++ ChangePointersOopClosure oopClosure;
++ ChangePointersObjectClosure objectClosure(&oopClosure);
++
++ {
++ SharedHeap::heap()->gc_prologue(true);
++ Universe::root_oops_do(&oopClosure);
++ Universe::heap()->object_iterate(&objectClosure);
++ SharedHeap::heap()->gc_epilogue(false);
++ }
++
++ // Swap marks to have same hashcodes
++ for (int i=0; i<_new_classes->length(); i++) {
++ swap_marks(_new_classes->at(i)(), _new_classes->at(i)->old_version());
++ swap_marks(_new_classes->at(i)->java_mirror(), _new_classes->at(i)->old_version()->java_mirror());
++ }
++
++ _updated_oops = objectClosure.updated_oops();
++
++ if (objectClosure.needs_instance_update()){
++
++ // Do a full garbage collection to update the instance sizes accordingly
++ RC_TRACE(0x00000001, ("Before performing full GC!"));
++ Universe::set_redefining_gc_run(true);
++ JvmtiGCMarker jgcm;
++ notify_gc_begin(true);
++ Universe::heap()->collect_as_vm_thread(GCCause::_heap_inspection);
++ notify_gc_end();
++ Universe::set_redefining_gc_run(false);
++ RC_TRACE(0x00000001, ("GC done!"));
++ }
++
++
++ if (RC_TRACE_ENABLED(0x00000001)) {
++ if (_updated_oops != NULL) {
++ RC_TRACE(0x00000001, ("%d object(s) updated!", _updated_oops->length()));
++ } else {
++ RC_TRACE(0x00000001, ("No objects updated!"));
++ }
++ }
++
++ // Unmark klassOops as "redefining"
++ for (int i=0; i<_new_classes->length(); i++) {
++ klassOop cur = _new_classes->at(i)();
++ _new_classes->at(i)->set_redefining(false);
++ _new_classes->at(i)->clear_update_information();
++ _new_classes->at(i)->update_supers_to_newest_version();
++
++ if (((instanceKlass *)cur->klass_part()->old_version()->klass_part())->array_klasses() != NULL) {
++ update_array_classes_to_newest_version(((instanceKlass *)cur->klass_part()->old_version()->klass_part())->array_klasses());
++
++ // Transfer the array classes, otherwise we might get cast exceptions when casting array types.
++ ((instanceKlass*)cur->klass_part())->set_array_klasses(((instanceKlass*)cur->klass_part()->old_version()->klass_part())->array_klasses());
++
++ oop new_mirror = _new_classes->at(i)->java_mirror();
++ oop old_mirror = _new_classes->at(i)->old_version()->java_mirror();
++ java_lang_Class::set_array_klass(new_mirror, java_lang_Class::array_klass(old_mirror));
++ }
++ }
++
++ for (int i=T_BOOLEAN; i<=T_LONG; i++) {
++ update_array_classes_to_newest_version(Universe::typeArrayKlassObj((BasicType)i));
++ }
++
++ // Disable any dependent concurrent compilations
++ SystemDictionary::notice_modification();
++
++ // Set flag indicating that some invariants are no longer true.
++ // See jvmtiExport.hpp for detailed explanation.
++ JvmtiExport::set_has_redefined_a_class();
++
++ // Clean up caches in the compiler interface and compiler threads
++ CompileBroker::cleanup_after_redefinition();
++
++#ifdef ASSERT
++
++ // Universe::verify();
++ // JNIHandles::verify();
++
++ SystemDictionary::classes_do(check_class, thread);
++#endif
++
++ update_active_methods();
++ RC_TIMER_STOP(_timer_redefinition);
++
++}
++
++void VM_RedefineClasses::update_array_classes_to_newest_version(klassOop smallest_dimension) {
++
++ arrayKlass *curArrayKlass = arrayKlass::cast(smallest_dimension);
++ assert(curArrayKlass->lower_dimension() == NULL, "argument must be smallest dimension");
++
++
++ while (curArrayKlass != NULL) {
++ klassOop higher_dimension = curArrayKlass->higher_dimension();
++ klassOop lower_dimension = curArrayKlass->lower_dimension();
++ curArrayKlass->update_supers_to_newest_version();
++
++ curArrayKlass = NULL;
++ if (higher_dimension != NULL) {
++ curArrayKlass = arrayKlass::cast(higher_dimension);
++ }
++ }
++
++}
++
++void VM_RedefineClasses::doit_epilogue() {
++
++ RC_TIMER_START(_timer_vm_op_epilogue);
++
++ unlock_threads();
++
++ ResourceMark mark;
++
++ VM_GC_Operation::doit_epilogue();
++ RC_TRACE(0x00000001, ("GC Operation epilogue finished! "));
++
++ GrowableArray<methodHandle> instanceTransformerMethods;
++
++ // Call static transformers
++ for (int i=0; i<_new_classes->length(); i++) {
++
++ instanceKlassHandle klass = _new_classes->at(i);
++
++ // Transfer init state
++ if (klass->old_version() != NULL) {
++ instanceKlass::ClassState state = instanceKlass::cast(klass->old_version())->init_state();
++ if (state > instanceKlass::linked) {
++ klass->initialize(Thread::current());
++ }
++ }
++
++ // Find instance transformer method
++
++ if (klass->check_redefinition_flag(Klass::HasInstanceTransformer)) {
++
++ RC_TRACE(0x00008000, ("Call instance transformer of %s instance", klass->name()->as_C_string()));
++ klassOop cur_klass = klass();
++ while (cur_klass != NULL) {
++ methodOop method = ((instanceKlass*)cur_klass->klass_part())->find_method(vmSymbols::transformer_name(), vmSymbols::void_method_signature());
++ if (method != NULL) {
++ methodHandle instanceTransformerMethod(method);
++ instanceTransformerMethods.append(instanceTransformerMethod);
++ break;
++ } else {
++ cur_klass = cur_klass->klass_part()->super();
++ }
++ }
++ assert(cur_klass != NULL, "must have instance transformer method");
++ } else {
++ instanceTransformerMethods.append(methodHandle(Thread::current(), NULL));
++ }
++ }
++
++
++ // Call instance transformers
++ if (_updated_oops != NULL) {
++
++ for (int i=0; i<_updated_oops->length(); i++) {
++ assert(_updated_oops->at(i) != NULL, "must not be null!");
++ Handle cur(_updated_oops->at(i));
++ instanceKlassHandle klass(cur->klass());
++
++ if (klass->check_redefinition_flag(Klass::HasInstanceTransformer)) {
++
++ methodHandle method = instanceTransformerMethods.at(klass->redefinition_index());
++
++ RC_TRACE(0x00008000, ("executing transformer method"));
++
++ Thread *__the_thread__ = Thread::current();
++ JavaValue result(T_VOID);
++ JavaCallArguments args(cur);
++ JavaCalls::call(&result,
++ method,
++ &args,
++ THREAD);
++
++ // TODO: What to do with an exception here?
++ if (HAS_PENDING_EXCEPTION) {
++ Symbol* ex_name = PENDING_EXCEPTION->klass()->klass_part()->name();
++ RC_TRACE(0x00000002, ("exception when executing transformer: '%s'",
++ ex_name->as_C_string()));
++ CLEAR_PENDING_EXCEPTION;
++ }
++ }
++ }
++
++ delete _updated_oops;
++ _updated_oops = NULL;
++ }
++
++ // Free the array of scratch classes
++ delete _new_classes;
++ _new_classes = NULL;
++ RC_TRACE(0x00000001, ("Redefinition finished!"));
++
++ RC_TIMER_STOP(_timer_vm_op_epilogue);
++}
++
++bool VM_RedefineClasses::is_modifiable_class(oop klass_mirror) {
++ // classes for primitives cannot be redefined
++ if (java_lang_Class::is_primitive(klass_mirror)) {
+ return false;
+ }
+-
+- // rewrite constant pool references in the methods_parameter_annotations:
+- if (!rewrite_cp_refs_in_methods_parameter_annotations(scratch_class,
+- THREAD)) {
+- // propagate failure back to caller
++ klassOop the_class_oop = java_lang_Class::as_klassOop(klass_mirror);
++ // classes for arrays cannot be redefined
++ if (the_class_oop == NULL || !Klass::cast(the_class_oop)->oop_is_instance()) {
+ return false;
+ }
+-
+- // rewrite constant pool references in the methods_default_annotations:
+- if (!rewrite_cp_refs_in_methods_default_annotations(scratch_class,
+- THREAD)) {
+- // propagate failure back to caller
+- return false;
++ return true;
++}
++
++#ifdef ASSERT
++
++void VM_RedefineClasses::verify_classes(klassOop k_oop_latest, oop initiating_loader, TRAPS) {
++ klassOop k_oop = k_oop_latest;
++ while (k_oop != NULL) {
++
++ instanceKlassHandle k_handle(THREAD, k_oop);
++ Verifier::verify(k_handle, Verifier::ThrowException, true, true, THREAD);
++ k_oop = k_oop->klass_part()->old_version();
+ }
+-
+- return true;
+-} // end rewrite_cp_refs()
+-
+-
+-// Rewrite constant pool references in the methods.
+-bool VM_RedefineClasses::rewrite_cp_refs_in_methods(
+- instanceKlassHandle scratch_class, TRAPS) {
+-
+- objArrayHandle methods(THREAD, scratch_class->methods());
+-
+- if (methods.is_null() || methods->length() == 0) {
+- // no methods so nothing to do
+- return true;
+- }
+-
+- // rewrite constant pool references in the methods:
+- for (int i = methods->length() - 1; i >= 0; i--) {
+- methodHandle method(THREAD, (methodOop)methods->obj_at(i));
+- methodHandle new_method;
+- rewrite_cp_refs_in_method(method, &new_method, CHECK_false);
+- if (!new_method.is_null()) {
+- // the method has been replaced so save the new method version
+- methods->obj_at_put(i, new_method());
++}
++
++#endif
++
++// Rewrite faster byte-codes back to their slower equivalent. Undoes rewriting happening in templateTable_xxx.cpp
++// The reason is that once we zero cpool caches, we need to re-resolve all entries again. Faster bytecodes do not
++// do that, they assume that cache entry is resolved already.
++static void unpatch_bytecode(methodOop method) {
++ RawBytecodeStream bcs(method);
++ Bytecodes::Code code;
++ Bytecodes::Code java_code;
++ while (!bcs.is_last_bytecode()) {
++ code = bcs.raw_next();
++ address bcp = bcs.bcp();
++
++ if (code == Bytecodes::_breakpoint) {
++ int bci = method->bci_from(bcp);
++ code = method->orig_bytecode_at(bci);
++ java_code = Bytecodes::java_code(code);
++ if (code != java_code &&
++ (java_code == Bytecodes::_getfield ||
++ java_code == Bytecodes::_putfield ||
++ java_code == Bytecodes::_aload_0)) {
++ // Let breakpoint table handling unpatch bytecode
++ method->set_orig_bytecode_at(bci, java_code);
++ }
++ } else {
++ java_code = Bytecodes::java_code(code);
++ if (code != java_code &&
++ (java_code == Bytecodes::_getfield ||
++ java_code == Bytecodes::_putfield ||
++ java_code == Bytecodes::_aload_0)) {
++ *bcp = java_code;
++ }
++ }
++
++ // Additionally, we need to unpatch bytecode at bcp+1 for fast_xaccess (which would be fast field access)
++ if (code == Bytecodes::_fast_iaccess_0 || code == Bytecodes::_fast_aaccess_0 || code == Bytecodes::_fast_faccess_0) {
++ Bytecodes::Code code2 = Bytecodes::code_or_bp_at(bcp + 1);
++ assert(code2 == Bytecodes::_fast_igetfield ||
++ code2 == Bytecodes::_fast_agetfield ||
++ code2 == Bytecodes::_fast_fgetfield, "");
++ *(bcp + 1) = Bytecodes::java_code(code2);
+ }
+ }
+-
+- return true;
+ }
+-
+-// Rewrite constant pool references in the specific method. This code
+-// was adapted from Rewriter::rewrite_method().
+-void VM_RedefineClasses::rewrite_cp_refs_in_method(methodHandle method,
+- methodHandle *new_method_p, TRAPS) {
+-
+- *new_method_p = methodHandle(); // default is no new method
+-
+- // We cache a pointer to the bytecodes here in code_base. If GC
+- // moves the methodOop, then the bytecodes will also move which
+- // will likely cause a crash. We create a No_Safepoint_Verifier
+- // object to detect whether we pass a possible safepoint in this
+- // code block.
+- No_Safepoint_Verifier nsv;
+-
+- // Bytecodes and their length
+- address code_base = method->code_base();
+- int code_length = method->code_size();
+-
+- int bc_length;
+- for (int bci = 0; bci < code_length; bci += bc_length) {
+- address bcp = code_base + bci;
+- Bytecodes::Code c = (Bytecodes::Code)(*bcp);
+-
+- bc_length = Bytecodes::length_for(c);
+- if (bc_length == 0) {
+- // More complicated bytecodes report a length of zero so
+- // we have to try again a slightly different way.
+- bc_length = Bytecodes::length_at(method(), bcp);
+- }
+-
+- assert(bc_length != 0, "impossible bytecode length");
+-
+- switch (c) {
+- case Bytecodes::_ldc:
+- {
+- int cp_index = *(bcp + 1);
+- int new_index = find_new_index(cp_index);
+-
+- if (StressLdcRewrite && new_index == 0) {
+- // If we are stressing ldc -> ldc_w rewriting, then we
+- // always need a new_index value.
+- new_index = cp_index;
+- }
+- if (new_index != 0) {
+- // the original index is mapped so we have more work to do
+- if (!StressLdcRewrite && new_index <= max_jubyte) {
+- // The new value can still use ldc instead of ldc_w
+- // unless we are trying to stress ldc -> ldc_w rewriting
+- RC_TRACE_WITH_THREAD(0x00080000, THREAD,
+- ("%s@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c),
+- bcp, cp_index, new_index));
+- *(bcp + 1) = new_index;
+- } else {
+- RC_TRACE_WITH_THREAD(0x00080000, THREAD,
+- ("%s->ldc_w@" INTPTR_FORMAT " old=%d, new=%d",
+- Bytecodes::name(c), bcp, cp_index, new_index));
+- // the new value needs ldc_w instead of ldc
+- u_char inst_buffer[4]; // max instruction size is 4 bytes
+- bcp = (address)inst_buffer;
+- // construct new instruction sequence
+- *bcp = Bytecodes::_ldc_w;
+- bcp++;
+- // Rewriter::rewrite_method() does not rewrite ldc -> ldc_w.
+- // See comment below for difference between put_Java_u2()
+- // and put_native_u2().
+- Bytes::put_Java_u2(bcp, new_index);
+-
+- Relocator rc(method, NULL /* no RelocatorListener needed */);
+- methodHandle m;
+- {
+- Pause_No_Safepoint_Verifier pnsv(&nsv);
+-
+- // ldc is 2 bytes and ldc_w is 3 bytes
+- m = rc.insert_space_at(bci, 3, inst_buffer, THREAD);
+- if (m.is_null() || HAS_PENDING_EXCEPTION) {
+- guarantee(false, "insert_space_at() failed");
+- }
+- }
+-
+- // return the new method so that the caller can update
+- // the containing class
+- *new_method_p = method = m;
+- // switch our bytecode processing loop from the old method
+- // to the new method
+- code_base = method->code_base();
+- code_length = method->code_size();
+- bcp = code_base + bci;
+- c = (Bytecodes::Code)(*bcp);
+- bc_length = Bytecodes::length_for(c);
+- assert(bc_length != 0, "sanity check");
+- } // end we need ldc_w instead of ldc
+- } // end if there is a mapped index
+- } break;
+-
+- // these bytecodes have a two-byte constant pool index
+- case Bytecodes::_anewarray : // fall through
+- case Bytecodes::_checkcast : // fall through
+- case Bytecodes::_getfield : // fall through
+- case Bytecodes::_getstatic : // fall through
+- case Bytecodes::_instanceof : // fall through
+- case Bytecodes::_invokedynamic : // fall through
+- case Bytecodes::_invokeinterface: // fall through
+- case Bytecodes::_invokespecial : // fall through
+- case Bytecodes::_invokestatic : // fall through
+- case Bytecodes::_invokevirtual : // fall through
+- case Bytecodes::_ldc_w : // fall through
+- case Bytecodes::_ldc2_w : // fall through
+- case Bytecodes::_multianewarray : // fall through
+- case Bytecodes::_new : // fall through
+- case Bytecodes::_putfield : // fall through
+- case Bytecodes::_putstatic :
+- {
+- address p = bcp + 1;
+- int cp_index = Bytes::get_Java_u2(p);
+- int new_index = find_new_index(cp_index);
+- if (new_index != 0) {
+- // the original index is mapped so update w/ new value
+- RC_TRACE_WITH_THREAD(0x00080000, THREAD,
+- ("%s@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c),
+- bcp, cp_index, new_index));
+- // Rewriter::rewrite_method() uses put_native_u2() in this
+- // situation because it is reusing the constant pool index
+- // location for a native index into the constantPoolCache.
+- // Since we are updating the constant pool index prior to
+- // verification and constantPoolCache initialization, we
+- // need to keep the new index in Java byte order.
+- Bytes::put_Java_u2(p, new_index);
+- }
+- } break;
+- }
+- } // end for each bytecode
+-} // end rewrite_cp_refs_in_method()
+-
+-
+-// Rewrite constant pool references in the class_annotations field.
+-bool VM_RedefineClasses::rewrite_cp_refs_in_class_annotations(
+- instanceKlassHandle scratch_class, TRAPS) {
+-
+- typeArrayHandle class_annotations(THREAD,
+- scratch_class->class_annotations());
+- if (class_annotations.is_null() || class_annotations->length() == 0) {
+- // no class_annotations so nothing to do
+- return true;
+- }
+-
+- RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+- ("class_annotations length=%d", class_annotations->length()));
+-
+- int byte_i = 0; // byte index into class_annotations
+- return rewrite_cp_refs_in_annotations_typeArray(class_annotations, byte_i,
+- THREAD);
+-}
+-
+-
+-// Rewrite constant pool references in an annotations typeArray. This
+-// "structure" is adapted from the RuntimeVisibleAnnotations_attribute
+-// that is described in section 4.8.15 of the 2nd-edition of the VM spec:
+-//
+-// annotations_typeArray {
+-// u2 num_annotations;
+-// annotation annotations[num_annotations];
+-// }
+-//
+-bool VM_RedefineClasses::rewrite_cp_refs_in_annotations_typeArray(
+- typeArrayHandle annotations_typeArray, int &byte_i_ref, TRAPS) {
+-
+- if ((byte_i_ref + 2) > annotations_typeArray->length()) {
+- // not enough room for num_annotations field
+- RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+- ("length() is too small for num_annotations field"));
+- return false;
+- }
+-
+- u2 num_annotations = Bytes::get_Java_u2((address)
+- annotations_typeArray->byte_at_addr(byte_i_ref));
+- byte_i_ref += 2;
+-
+- RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+- ("num_annotations=%d", num_annotations));
+-
+- int calc_num_annotations = 0;
+- for (; calc_num_annotations < num_annotations; calc_num_annotations++) {
+- if (!rewrite_cp_refs_in_annotation_struct(annotations_typeArray,
+- byte_i_ref, THREAD)) {
+- RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+- ("bad annotation_struct at %d", calc_num_annotations));
+- // propagate failure back to caller
+- return false;
+- }
+- }
+- assert(num_annotations == calc_num_annotations, "sanity check");
+-
+- return true;
+-} // end rewrite_cp_refs_in_annotations_typeArray()
+-
+-
+-// Rewrite constant pool references in the annotation struct portion of
+-// an annotations_typeArray. This "structure" is from section 4.8.15 of
+-// the 2nd-edition of the VM spec:
+-//
+-// struct annotation {
+-// u2 type_index;
+-// u2 num_element_value_pairs;
+-// {
+-// u2 element_name_index;
+-// element_value value;
+-// } element_value_pairs[num_element_value_pairs];
+-// }
+-//
+-bool VM_RedefineClasses::rewrite_cp_refs_in_annotation_struct(
+- typeArrayHandle annotations_typeArray, int &byte_i_ref, TRAPS) {
+- if ((byte_i_ref + 2 + 2) > annotations_typeArray->length()) {
+- // not enough room for smallest annotation_struct
+- RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+- ("length() is too small for annotation_struct"));
+- return false;
+- }
+-
+- u2 type_index = rewrite_cp_ref_in_annotation_data(annotations_typeArray,
+- byte_i_ref, "mapped old type_index=%d", THREAD);
+-
+- u2 num_element_value_pairs = Bytes::get_Java_u2((address)
+- annotations_typeArray->byte_at_addr(
+- byte_i_ref));
+- byte_i_ref += 2;
+-
+- RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+- ("type_index=%d num_element_value_pairs=%d", type_index,
+- num_element_value_pairs));
+-
+- int calc_num_element_value_pairs = 0;
+- for (; calc_num_element_value_pairs < num_element_value_pairs;
+- calc_num_element_value_pairs++) {
+- if ((byte_i_ref + 2) > annotations_typeArray->length()) {
+- // not enough room for another element_name_index, let alone
+- // the rest of another component
+- RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+- ("length() is too small for element_name_index"));
+- return false;
+- }
+-
+- u2 element_name_index = rewrite_cp_ref_in_annotation_data(
+- annotations_typeArray, byte_i_ref,
+- "mapped old element_name_index=%d", THREAD);
+-
+- RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+- ("element_name_index=%d", element_name_index));
+-
+- if (!rewrite_cp_refs_in_element_value(annotations_typeArray,
+- byte_i_ref, THREAD)) {
+- RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+- ("bad element_value at %d", calc_num_element_value_pairs));
+- // propagate failure back to caller
+- return false;
+- }
+- } // end for each component
+- assert(num_element_value_pairs == calc_num_element_value_pairs,
+- "sanity check");
+-
+- return true;
+-} // end rewrite_cp_refs_in_annotation_struct()
+-
+-
+-// Rewrite a constant pool reference at the current position in
+-// annotations_typeArray if needed. Returns the original constant
+-// pool reference if a rewrite was not needed or the new constant
+-// pool reference if a rewrite was needed.
+-u2 VM_RedefineClasses::rewrite_cp_ref_in_annotation_data(
+- typeArrayHandle annotations_typeArray, int &byte_i_ref,
+- const char * trace_mesg, TRAPS) {
+-
+- address cp_index_addr = (address)
+- annotations_typeArray->byte_at_addr(byte_i_ref);
+- u2 old_cp_index = Bytes::get_Java_u2(cp_index_addr);
+- u2 new_cp_index = find_new_index(old_cp_index);
+- if (new_cp_index != 0) {
+- RC_TRACE_WITH_THREAD(0x02000000, THREAD, (trace_mesg, old_cp_index));
+- Bytes::put_Java_u2(cp_index_addr, new_cp_index);
+- old_cp_index = new_cp_index;
+- }
+- byte_i_ref += 2;
+- return old_cp_index;
+-}
+-
+-
+-// Rewrite constant pool references in the element_value portion of an
+-// annotations_typeArray. This "structure" is from section 4.8.15.1 of
+-// the 2nd-edition of the VM spec:
+-//
+-// struct element_value {
+-// u1 tag;
+-// union {
+-// u2 const_value_index;
+-// {
+-// u2 type_name_index;
+-// u2 const_name_index;
+-// } enum_const_value;
+-// u2 class_info_index;
+-// annotation annotation_value;
+-// struct {
+-// u2 num_values;
+-// element_value values[num_values];
+-// } array_value;
+-// } value;
+-// }
+-//
+-bool VM_RedefineClasses::rewrite_cp_refs_in_element_value(
+- typeArrayHandle annotations_typeArray, int &byte_i_ref, TRAPS) {
+-
+- if ((byte_i_ref + 1) > annotations_typeArray->length()) {
+- // not enough room for a tag let alone the rest of an element_value
+- RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+- ("length() is too small for a tag"));
+- return false;
+- }
+-
+- u1 tag = annotations_typeArray->byte_at(byte_i_ref);
+- byte_i_ref++;
+- RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("tag='%c'", tag));
+-
+- switch (tag) {
+- // These BaseType tag values are from Table 4.2 in VM spec:
+- case 'B': // byte
+- case 'C': // char
+- case 'D': // double
+- case 'F': // float
+- case 'I': // int
+- case 'J': // long
+- case 'S': // short
+- case 'Z': // boolean
+-
+- // The remaining tag values are from Table 4.8 in the 2nd-edition of
+- // the VM spec:
+- case 's':
+- {
+- // For the above tag values (including the BaseType values),
+- // value.const_value_index is right union field.
+-
+- if ((byte_i_ref + 2) > annotations_typeArray->length()) {
+- // not enough room for a const_value_index
+- RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+- ("length() is too small for a const_value_index"));
+- return false;
+- }
+-
- u2 const_value_index = rewrite_cp_ref_in_annotation_data(
- annotations_typeArray, byte_i_ref,
- "mapped old const_value_index=%d", THREAD);
-
+-
- RC_TRACE_WITH_THREAD(0x02000000, THREAD,
- ("const_value_index=%d", const_value_index));
- } break;
@@ -8424,20 +8838,8 @@ index 606be1c..ef4f380 100644
- ("bad nested element_value at %d", calc_num_values));
- // propagate failure back to caller
- return false;
-+ // FIXME: DCEVM: better implementation?
-+ // Starting from JDK 7 java_mirror can be kept in the regular heap. Therefore, it is possible
-+ // that new java_mirror is in the young generation whereas p is in tenured generation. In that
-+ // case we need to run write barrier to make sure card table is properly updated. This will
-+ // allow JVM to detect reference in tenured generation properly during young generation GC.
-+ if (Universe::heap()->is_in_reserved(p)) {
-+ if (GenCollectedHeap::heap()->is_in_young(obj)) {
-+ GenRemSet* rs = GenCollectedHeap::heap()->rem_set();
-+ assert(rs->rs_kind() == GenRemSet::CardTable, "Wrong rem set kind.");
-+ CardTableRS* _rs = (CardTableRS*)rs;
-+ _rs->inline_write_ref_field_gc(p, obj);
-+ }
- }
- }
+- }
+- }
- assert(num_values == calc_num_values, "sanity check");
- } break;
-
@@ -8480,40 +8882,28 @@ index 606be1c..ef4f380 100644
- ("bad field_annotations at %d", i));
- // propagate failure back to caller
- return false;
- }
- }
-+}
-
+- }
+- }
+-
- return true;
-} // end rewrite_cp_refs_in_fields_annotations()
-+void VM_RedefineClasses::swap_marks(oop first, oop second) {
-+ markOop first_mark = first->mark();
-+ markOop second_mark = second->mark();
-+ first->set_mark(second_mark);
-+ second->set_mark(first_mark);
-+}
-
-+void VM_RedefineClasses::doit() {
-+ Thread *thread = Thread::current();
-
+-
+-
-// Rewrite constant pool references in a methods_annotations field.
-bool VM_RedefineClasses::rewrite_cp_refs_in_methods_annotations(
- instanceKlassHandle scratch_class, TRAPS) {
-+ RC_TRACE(0x00000001, ("Entering doit!"));
-
+-
- objArrayHandle methods_annotations(THREAD,
- scratch_class->methods_annotations());
-
+-
- if (methods_annotations.is_null() || methods_annotations->length() == 0) {
- // no methods_annotations so nothing to do
- return true;
- }
-+ if ((_max_redefinition_flags & Klass::RemoveSuperType) != 0) {
-
+-
- RC_TRACE_WITH_THREAD(0x02000000, THREAD,
- ("methods_annotations length=%d", methods_annotations->length()));
-+ RC_TIMER_START(_timer_check_type);
-
+-
- for (int i = 0; i < methods_annotations->length(); i++) {
- typeArrayHandle method_annotations(THREAD,
- (typeArrayOop)methods_annotations->obj_at(i));
@@ -9119,513 +9509,17 @@ index 606be1c..ef4f380 100644
-
-
-// Unevolving classes may point to methods of the_class directly
--// from their constant pool caches, itables, and/or vtables. We
--// use the SystemDictionary::classes_do() facility and this helper
--// to fix up these pointers.
--//
--// Note: We currently don't support updating the vtable in
--// arrayKlassOops. See Open Issues in jvmtiRedefineClasses.hpp.
--void VM_RedefineClasses::adjust_cpool_cache_and_vtable(klassOop k_oop,
-- oop initiating_loader, TRAPS) {
-- Klass *k = k_oop->klass_part();
-- if (k->oop_is_instance()) {
-- HandleMark hm(THREAD);
-- instanceKlass *ik = (instanceKlass *) k;
--
-- // HotSpot specific optimization! HotSpot does not currently
-- // support delegation from the bootstrap class loader to a
-- // user-defined class loader. This means that if the bootstrap
-- // class loader is the initiating class loader, then it will also
-- // be the defining class loader. This also means that classes
-- // loaded by the bootstrap class loader cannot refer to classes
-- // loaded by a user-defined class loader. Note: a user-defined
-- // class loader can delegate to the bootstrap class loader.
-- //
-- // If the current class being redefined has a user-defined class
-- // loader as its defining class loader, then we can skip all
-- // classes loaded by the bootstrap class loader.
-- bool is_user_defined =
-- instanceKlass::cast(_the_class_oop)->class_loader() != NULL;
-- if (is_user_defined && ik->class_loader() == NULL) {
-+ if (!check_type_consistency()) {
-+ // (tw) TODO: Rollback the class redefinition
-+ rollback();
-+ RC_TRACE(0x00000001, ("Detected type inconsistency!"));
-+ _result = JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
-+ RC_TIMER_STOP(_timer_check_type);
- return;
- }
-
-- // This is a very busy routine. We don't want too much tracing
-- // printed out.
-- bool trace_name_printed = false;
-+ RC_TIMER_STOP(_timer_check_type);
-
-- // Very noisy: only enable this call if you are trying to determine
-- // that a specific class gets found by this routine.
-- // RC_TRACE macro has an embedded ResourceMark
-- // RC_TRACE_WITH_THREAD(0x00100000, THREAD,
-- // ("adjust check: name=%s", ik->external_name()));
-- // trace_name_printed = true;
-+ } else {
-+ RC_TRACE(0x00000001, ("No type narrowing => skipping check for type inconsistency"));
-+ }
-
-- // Fix the vtable embedded in the_class and subclasses of the_class,
-- // if one exists. We discard scratch_class and we don't keep an
-- // instanceKlass around to hold obsolete methods so we don't have
-- // any other instanceKlass embedded vtables to update. The vtable
-- // holds the methodOops for virtual (but not final) methods.
-- if (ik->vtable_length() > 0 && ik->is_subtype_of(_the_class_oop)) {
-- // ik->vtable() creates a wrapper object; rm cleans it up
-- ResourceMark rm(THREAD);
-- ik->vtable()->adjust_method_entries(_matching_old_methods,
-- _matching_new_methods,
-- _matching_methods_length,
-- &trace_name_printed);
-+ if (UseMethodForwardPoints) {
-+ RC_TRACE(0x00000001, ("Check stack for forwarding methods to new version"));
-+ method_forwarding();
-+ }
-+
-+ if (UseSharedSpaces) {
-+ // Sharing is enabled so we remap the shared readonly space to
-+ // shared readwrite, private just in case we need to redefine
-+ // a shared class. We do the remap during the doit() phase of
-+ // the safepoint to be safer.
-+ if (!CompactingPermGenGen::remap_shared_readonly_as_readwrite()) {
-+ RC_TRACE(0x00000001, ("failed to remap shared readonly space to readwrite, private"));
-+ _result = JVMTI_ERROR_INTERNAL;
-+ return;
-+ }
-+ }
-+
-+ RC_TIMER_START(_timer_prepare_redefinition);
-+ for (int i = 0; i < _new_classes->length(); i++) {
-+ redefine_single_class(_new_classes->at(i), thread);
-+ }
-+
-+ // Deoptimize all compiled code that depends on this class
-+ flush_dependent_code(instanceKlassHandle(Thread::current(), (klassOop)NULL), Thread::current());
-+
-+ // Adjust constantpool caches and vtables for all classes
-+ // that reference methods of the evolved class.
-+ SystemDictionary::classes_do(adjust_cpool_cache, Thread::current());
-+
-+ RC_TIMER_STOP(_timer_prepare_redefinition);
-+ RC_TIMER_START(_timer_redefinition);
-+
-+ class ChangePointersOopClosure : public OopClosure {
-+ virtual void do_oop(oop* o) {
-+ do_oop_work(o);
-+ }
-+
-+ virtual void do_oop(narrowOop* o) {
-+ do_oop_work(o);
-+ }
-+ };
-+
-+ class ChangePointersObjectClosure : public ObjectClosure {
-+
-+ private:
-+
-+ OopClosure *_closure;
-+ bool _needs_instance_update;
-+ GrowableArray<oop> *_updated_oops;
-+
-+ public:
-+ ChangePointersObjectClosure(OopClosure *closure) : _closure(closure), _needs_instance_update(false), _updated_oops(NULL) {}
-+
-+ bool needs_instance_update() {
-+ return _needs_instance_update;
-+ }
-+
-+ GrowableArray<oop> *updated_oops() { return _updated_oops; }
-+
-+ virtual void do_object(oop obj) {
-+ if (!obj->is_instanceKlass()) {
-+ obj->oop_iterate(_closure);
-+
-+ if (obj->blueprint()->is_redefining()) {
-+
-+ if (obj->blueprint()->check_redefinition_flag(Klass::HasInstanceTransformer)) {
-+ if (_updated_oops == NULL) {
-+ _updated_oops = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<oop>(100, true);
-+ }
-+ _updated_oops->append(obj);
-+ }
-+
-+ if(obj->blueprint()->update_information() != NULL || obj->is_perm()) {
-+
-+ assert(obj->blueprint()->old_version() != NULL, "must have old version");
-+ obj->set_klass_no_check(obj->blueprint()->old_version());
-+
-+ if (obj->size() != obj->size_given_klass(obj->blueprint()->new_version()->klass_part()) || obj->is_perm()) {
-+ // We need an instance update => set back to old klass
-+ _needs_instance_update = true;
-+
-+ } else {
-+ MarkSweep::update_fields(obj, obj);
-+ assert(obj->blueprint()->is_redefining(), "update fields resets the klass");
-+ }
-+ }
-+ }
-+
-+ } else {
-+ instanceKlass *klass = instanceKlass::cast((klassOop)obj);
-+ if (klass->is_redefining()) {
-+ // DCEVM: We need to restorte constants pool owner which was updated by do_oop_work
-+ instanceKlass* old_klass = instanceKlass::cast(klass->old_version());
-+ old_klass->constants()->set_pool_holder(klass->old_version());
-+
-+ // Initialize the new class! Special static initialization that does not execute the
-+ // static constructor but copies static field values from the old class if name
-+ // and signature of a static field match.
-+ klass->initialize_redefined_class();
-+ }
-+ // idubrov: FIXME: we probably don't need that since oop's will be visited in a regular way...
-+ // idubrov: need to check if there is a test to verify that fields referencing class being updated
-+ // idubrov: will get new version of that class
-+ //klass->iterate_static_fields(_closure);
-+ }
-+ }
-+ };
-+
-+ ChangePointersOopClosure oopClosure;
-+ ChangePointersObjectClosure objectClosure(&oopClosure);
-+
-+ {
-+ SharedHeap::heap()->gc_prologue(true);
-+ Universe::root_oops_do(&oopClosure);
-+ Universe::heap()->object_iterate(&objectClosure);
-+ SharedHeap::heap()->gc_epilogue(false);
- }
-
-- // If the current class has an itable and we are either redefining an
-- // interface or if the current class is a subclass of the_class, then
-- // we potentially have to fix the itable. If we are redefining an
-- // interface, then we have to call adjust_method_entries() for
-- // every instanceKlass that has an itable since there isn't a
-- // subclass relationship between an interface and an instanceKlass.
-- if (ik->itable_length() > 0 && (Klass::cast(_the_class_oop)->is_interface()
-- || ik->is_subclass_of(_the_class_oop))) {
-- // ik->itable() creates a wrapper object; rm cleans it up
-- ResourceMark rm(THREAD);
-- ik->itable()->adjust_method_entries(_matching_old_methods,
-- _matching_new_methods,
-- _matching_methods_length,
-- &trace_name_printed);
-+ // Swap marks to have same hashcodes
-+ for (int i=0; i<_new_classes->length(); i++) {
-+ swap_marks(_new_classes->at(i)(), _new_classes->at(i)->old_version());
-+ swap_marks(_new_classes->at(i)->java_mirror(), _new_classes->at(i)->old_version()->java_mirror());
- }
-
-- // The constant pools in other classes (other_cp) can refer to
-- // methods in the_class. We have to update method information in
-- // other_cp's cache. If other_cp has a previous version, then we
-- // have to repeat the process for each previous version. The
-- // constant pool cache holds the methodOops for non-virtual
-- // methods and for virtual, final methods.
-- //
-- // Special case: if the current class is the_class, then new_cp
-- // has already been attached to the_class and old_cp has already
-- // been added as a previous version. The new_cp doesn't have any
-- // cached references to old methods so it doesn't need to be
-- // updated. We can simply start with the previous version(s) in
-- // that case.
-- constantPoolHandle other_cp;
-- constantPoolCacheOop cp_cache;
-+ _updated_oops = objectClosure.updated_oops();
-
-- if (k_oop != _the_class_oop) {
-- // this klass' constant pool cache may need adjustment
-- other_cp = constantPoolHandle(ik->constants());
-- cp_cache = other_cp->cache();
-- if (cp_cache != NULL) {
-- cp_cache->adjust_method_entries(_matching_old_methods,
-- _matching_new_methods,
-- _matching_methods_length,
-- &trace_name_printed);
-+ if (objectClosure.needs_instance_update()){
-+
-+ // Do a full garbage collection to update the instance sizes accordingly
-+ RC_TRACE(0x00000001, ("Before performing full GC!"));
-+ Universe::set_redefining_gc_run(true);
-+ JvmtiGCMarker jgcm;
-+ notify_gc_begin(true);
-+ Universe::heap()->collect_as_vm_thread(GCCause::_heap_inspection);
-+ notify_gc_end();
-+ Universe::set_redefining_gc_run(false);
-+ RC_TRACE(0x00000001, ("GC done!"));
-+ }
-+
-+
-+ if (RC_TRACE_ENABLED(0x00000001)) {
-+ if (_updated_oops != NULL) {
-+ RC_TRACE(0x00000001, ("%d object(s) updated!", _updated_oops->length()));
-+ } else {
-+ RC_TRACE(0x00000001, ("No objects updated!"));
-+ }
-+ }
-+
-+ // Unmark klassOops as "redefining"
-+ for (int i=0; i<_new_classes->length(); i++) {
-+ klassOop cur = _new_classes->at(i)();
-+ _new_classes->at(i)->set_redefining(false);
-+ _new_classes->at(i)->clear_update_information();
-+ _new_classes->at(i)->update_supers_to_newest_version();
-+
-+ if (((instanceKlass *)cur->klass_part()->old_version()->klass_part())->array_klasses() != NULL) {
-+ update_array_classes_to_newest_version(((instanceKlass *)cur->klass_part()->old_version()->klass_part())->array_klasses());
-+
-+ // Transfer the array classes, otherwise we might get cast exceptions when casting array types.
-+ ((instanceKlass*)cur->klass_part())->set_array_klasses(((instanceKlass*)cur->klass_part()->old_version()->klass_part())->array_klasses());
-+
-+ oop new_mirror = _new_classes->at(i)->java_mirror();
-+ oop old_mirror = _new_classes->at(i)->old_version()->java_mirror();
-+ java_lang_Class::set_array_klass(new_mirror, java_lang_Class::array_klass(old_mirror));
-+ }
-+ }
-+
-+ for (int i=T_BOOLEAN; i<=T_LONG; i++) {
-+ update_array_classes_to_newest_version(Universe::typeArrayKlassObj((BasicType)i));
-+ }
-+
-+ // Disable any dependent concurrent compilations
-+ SystemDictionary::notice_modification();
-+
-+ // Set flag indicating that some invariants are no longer true.
-+ // See jvmtiExport.hpp for detailed explanation.
-+ JvmtiExport::set_has_redefined_a_class();
-+
-+ // Clean up caches in the compiler interface and compiler threads
-+ CompileBroker::cleanup_after_redefinition();
-+
-+#ifdef ASSERT
-+
-+ // Universe::verify();
-+ // JNIHandles::verify();
-+
-+ SystemDictionary::classes_do(check_class, thread);
-+#endif
-+
-+ update_active_methods();
-+ RC_TIMER_STOP(_timer_redefinition);
-+
-+}
-+
-+void VM_RedefineClasses::update_array_classes_to_newest_version(klassOop smallest_dimension) {
-+
-+ arrayKlass *curArrayKlass = arrayKlass::cast(smallest_dimension);
-+ assert(curArrayKlass->lower_dimension() == NULL, "argument must be smallest dimension");
-+
-+
-+ while (curArrayKlass != NULL) {
-+ klassOop higher_dimension = curArrayKlass->higher_dimension();
-+ klassOop lower_dimension = curArrayKlass->lower_dimension();
-+ curArrayKlass->update_supers_to_newest_version();
-+
-+ curArrayKlass = NULL;
-+ if (higher_dimension != NULL) {
-+ curArrayKlass = arrayKlass::cast(higher_dimension);
-+ }
-+ }
-+
-+}
-+
-+void VM_RedefineClasses::doit_epilogue() {
-+
-+ RC_TIMER_START(_timer_vm_op_epilogue);
-+
-+ unlock_threads();
-+
-+ ResourceMark mark;
-+
-+ VM_GC_Operation::doit_epilogue();
-+ RC_TRACE(0x00000001, ("GC Operation epilogue finished! "));
-+
-+ GrowableArray<methodHandle> instanceTransformerMethods;
-+
-+ // Call static transformers
-+ for (int i=0; i<_new_classes->length(); i++) {
-+
-+ instanceKlassHandle klass = _new_classes->at(i);
-+
-+ // Transfer init state
-+ if (klass->old_version() != NULL) {
-+ instanceKlass::ClassState state = instanceKlass::cast(klass->old_version())->init_state();
-+ if (state > instanceKlass::linked) {
-+ klass->initialize(Thread::current());
- }
- }
-- {
-- ResourceMark rm(THREAD);
-- // PreviousVersionInfo objects returned via PreviousVersionWalker
-- // contain a GrowableArray of handles. We have to clean up the
-- // GrowableArray _after_ the PreviousVersionWalker destructor
-- // has destroyed the handles.
-- {
-- // the previous versions' constant pool caches may need adjustment
-- PreviousVersionWalker pvw(ik);
-- for (PreviousVersionInfo * pv_info = pvw.next_previous_version();
-- pv_info != NULL; pv_info = pvw.next_previous_version()) {
-- other_cp = pv_info->prev_constant_pool_handle();
-- cp_cache = other_cp->cache();
-- if (cp_cache != NULL) {
-- cp_cache->adjust_method_entries(_matching_old_methods,
-- _matching_new_methods,
-- _matching_methods_length,
-- &trace_name_printed);
-- }
-+
-+ // Find instance transformer method
-+
-+ if (klass->check_redefinition_flag(Klass::HasInstanceTransformer)) {
-+
-+ RC_TRACE(0x00008000, ("Call instance transformer of %s instance", klass->name()->as_C_string()));
-+ klassOop cur_klass = klass();
-+ while (cur_klass != NULL) {
-+ methodOop method = ((instanceKlass*)cur_klass->klass_part())->find_method(vmSymbols::transformer_name(), vmSymbols::void_method_signature());
-+ if (method != NULL) {
-+ methodHandle instanceTransformerMethod(method);
-+ instanceTransformerMethods.append(instanceTransformerMethod);
-+ break;
-+ } else {
-+ cur_klass = cur_klass->klass_part()->super();
- }
-- } // pvw is cleaned up
-- } // rm is cleaned up
-+ }
-+ assert(cur_klass != NULL, "must have instance transformer method");
-+ } else {
-+ instanceTransformerMethods.append(methodHandle(Thread::current(), NULL));
-+ }
-+ }
-+
-+
-+ // Call instance transformers
-+ if (_updated_oops != NULL) {
-+
-+ for (int i=0; i<_updated_oops->length(); i++) {
-+ assert(_updated_oops->at(i) != NULL, "must not be null!");
-+ Handle cur(_updated_oops->at(i));
-+ instanceKlassHandle klass(cur->klass());
-+
-+ if (klass->check_redefinition_flag(Klass::HasInstanceTransformer)) {
-+
-+ methodHandle method = instanceTransformerMethods.at(klass->redefinition_index());
-+
-+ RC_TRACE(0x00008000, ("executing transformer method"));
-+
-+ Thread *__the_thread__ = Thread::current();
-+ JavaValue result(T_VOID);
-+ JavaCallArguments args(cur);
-+ JavaCalls::call(&result,
-+ method,
-+ &args,
-+ THREAD);
-+
-+ // TODO: What to do with an exception here?
-+ if (HAS_PENDING_EXCEPTION) {
-+ Symbol* ex_name = PENDING_EXCEPTION->klass()->klass_part()->name();
-+ RC_TRACE(0x00000002, ("exception when executing transformer: '%s'",
-+ ex_name->as_C_string()));
-+ CLEAR_PENDING_EXCEPTION;
-+ }
-+ }
-+ }
-+
-+ delete _updated_oops;
-+ _updated_oops = NULL;
-+ }
-+
-+ // Free the array of scratch classes
-+ delete _new_classes;
-+ _new_classes = NULL;
-+ RC_TRACE(0x00000001, ("Redefinition finished!"));
-+
-+ RC_TIMER_STOP(_timer_vm_op_epilogue);
-+}
-+
-+bool VM_RedefineClasses::is_modifiable_class(oop klass_mirror) {
-+ // classes for primitives cannot be redefined
-+ if (java_lang_Class::is_primitive(klass_mirror)) {
-+ return false;
-+ }
-+ klassOop the_class_oop = java_lang_Class::as_klassOop(klass_mirror);
-+ // classes for arrays cannot be redefined
-+ if (the_class_oop == NULL || !Klass::cast(the_class_oop)->oop_is_instance()) {
-+ return false;
-+ }
-+ return true;
-+}
-+
-+#ifdef ASSERT
-+
-+void VM_RedefineClasses::verify_classes(klassOop k_oop_latest, oop initiating_loader, TRAPS) {
-+ klassOop k_oop = k_oop_latest;
-+ while (k_oop != NULL) {
-+
-+ instanceKlassHandle k_handle(THREAD, k_oop);
-+ Verifier::verify(k_handle, Verifier::ThrowException, true, true, THREAD);
-+ k_oop = k_oop->klass_part()->old_version();
-+ }
-+}
-+
-+#endif
-+
-+// Rewrite faster byte-codes back to their slower equivalent. Undoes rewriting happening in templateTable_xxx.cpp
-+// The reason is that once we zero cpool caches, we need to re-resolve all entries again. Faster bytecodes do not
-+// do that, they assume that cache entry is resolved already.
-+static void unpatch_bytecode(methodOop method) {
-+ RawBytecodeStream bcs(method);
-+ Bytecodes::Code code;
-+ Bytecodes::Code java_code;
-+ while (!bcs.is_last_bytecode()) {
-+ code = bcs.raw_next();
-+ address bcp = bcs.bcp();
-+
-+ if (code == Bytecodes::_breakpoint) {
-+ int bci = method->bci_from(bcp);
-+ code = method->orig_bytecode_at(bci);
-+ java_code = Bytecodes::java_code(code);
-+ if (code != java_code &&
-+ (java_code == Bytecodes::_getfield ||
-+ java_code == Bytecodes::_putfield ||
-+ java_code == Bytecodes::_aload_0)) {
-+ // Let breakpoint table handling unpatch bytecode
-+ method->set_orig_bytecode_at(bci, java_code);
-+ }
-+ } else {
-+ java_code = Bytecodes::java_code(code);
-+ if (code != java_code &&
-+ (java_code == Bytecodes::_getfield ||
-+ java_code == Bytecodes::_putfield ||
-+ java_code == Bytecodes::_aload_0)) {
-+ *bcp = java_code;
-+ }
-+ }
-+
-+ // Additionally, we need to unpatch bytecode at bcp+1 for fast_xaccess (which would be fast field access)
-+ if (code == Bytecodes::_fast_iaccess_0 || code == Bytecodes::_fast_aaccess_0 || code == Bytecodes::_fast_faccess_0) {
-+ Bytecodes::Code code2 = Bytecodes::code_or_bp_at(bcp + 1);
-+ assert(code2 == Bytecodes::_fast_igetfield ||
-+ code2 == Bytecodes::_fast_agetfield ||
-+ code2 == Bytecodes::_fast_fgetfield, "");
-+ *(bcp + 1) = Bytecodes::java_code(code2);
-+ }
-+ }
-+}
-+
+// Unevolving classes may point to old methods directly
-+// from their constant pool caches, itables, and/or vtables. We
-+// use the SystemDictionary::classes_do() facility and this helper
+ // from their constant pool caches, itables, and/or vtables. We
+ // use the SystemDictionary::classes_do() facility and this helper
+-// to fix up these pointers.
+// to fix up these pointers. Additional field offsets and vtable indices
+// in the constant pool cache entries are fixed.
-+//
-+// Note: We currently don't support updating the vtable in
-+// arrayKlassOops. See Open Issues in jvmtiRedefineClasses.hpp.
+ //
+ // Note: We currently don't support updating the vtable in
+ // arrayKlassOops. See Open Issues in jvmtiRedefineClasses.hpp.
+-void VM_RedefineClasses::adjust_cpool_cache_and_vtable(klassOop k_oop,
+- oop initiating_loader, TRAPS) {
+void VM_RedefineClasses::adjust_cpool_cache(klassOop k_oop_latest, oop initiating_loader, TRAPS) {
+ klassOop k_oop = k_oop_latest;
+ while (k_oop != NULL) {
@@ -9676,24 +9570,22 @@ index 606be1c..ef4f380 100644
+ }
+ }
+ k_oop = k_oop->klass_part()->old_version();
- }
- }
-
- void VM_RedefineClasses::update_jmethod_ids() {
- for (int j = 0; j < _matching_methods_length; ++j) {
-- methodOop old_method = _matching_old_methods[j];
++ }
++}
++
++void VM_RedefineClasses::update_jmethod_ids() {
++ for (int j = 0; j < _matching_methods_length; ++j) {
+ methodOop old_method = (methodOop)_old_methods->obj_at(_matching_old_methods[j]);
+ RC_TRACE(0x00008000, ("matching method %s", old_method->name_and_sig_as_C_string()));
+
- jmethodID jmid = old_method->find_jmethod_id_or_null();
++ jmethodID jmid = old_method->find_jmethod_id_or_null();
+ if (old_method->new_version() != NULL && jmid == NULL) {
+ // (tw) Have to create jmethodID in this case
+ jmid = old_method->jmethod_id();
+ }
+
- if (jmid != NULL) {
- // There is a jmethodID, change it to point to the new method
-- methodHandle new_method_h(_matching_new_methods[j]);
++ if (jmid != NULL) {
++ // There is a jmethodID, change it to point to the new method
+ methodHandle new_method_h((methodOop)_new_methods->obj_at(_matching_new_methods[j]));
+ if (old_method->new_version() == NULL) {
+ methodHandle old_method_h((methodOop)_old_methods->obj_at(_matching_old_methods[j]));
@@ -9707,35 +9599,18 @@ index 606be1c..ef4f380 100644
+ //RC_TRACE(0x00008000, ("Changed jmethodID for new method assigned to %d / result=%d", jmid, result);
+
+ }
- JNIHandles::change_method_associated_with_jmethod_id(jmid, new_method_h);
-- assert(JNIHandles::resolve_jmethod_id(jmid) == _matching_new_methods[j],
-- "should be replaced");
++ JNIHandles::change_method_associated_with_jmethod_id(jmid, new_method_h);
+ //RC_TRACE(0x00008000, ("changing method associated with jmethod id %d to %s", (int)jmid, new_method_h->name()->as_C_string());
+ assert(JNIHandles::resolve_jmethod_id(jmid) == (methodOop)_new_methods->obj_at(_matching_new_methods[j]), "should be replaced");
+ jmethodID mid = ((methodOop)_new_methods->obj_at(_matching_new_methods[j]))->jmethod_id();
+ assert(JNIHandles::resolve_non_null((jobject)mid) == new_method_h(), "must match!");
+
+ //RC_TRACE(0x00008000, ("jmethodID new method: %d jmethodID old method: %d", new_method_h->jmethod_id(), old_method->jmethod_id());
- }
- }
- }
-
--void VM_RedefineClasses::check_methods_and_mark_as_obsolete(
-- BitMap *emcp_methods, int * emcp_method_count_p) {
-- *emcp_method_count_p = 0;
-- int obsolete_count = 0;
-- int old_index = 0;
-- for (int j = 0; j < _matching_methods_length; ++j, ++old_index) {
-- methodOop old_method = _matching_old_methods[j];
-- methodOop new_method = _matching_new_methods[j];
-- methodOop old_array_method;
-
-- // Maintain an old_index into the _old_methods array by skipping
-- // deleted methods
-- while ((old_array_method = (methodOop) _old_methods->obj_at(old_index))
-- != old_method) {
-- ++old_index;
-- }
++ }
++ }
++}
++
++
+// Deoptimize all compiled code that depends on this class.
+//
+// If the can_redefine_classes capability is obtained in the onload
@@ -9753,100 +9628,20 @@ index 606be1c..ef4f380 100644
+//
+void VM_RedefineClasses::flush_dependent_code(instanceKlassHandle k_h, TRAPS) {
+ assert_locked_or_safepoint(Compile_lock);
-
-- if (MethodComparator::methods_EMCP(old_method, new_method)) {
-- // The EMCP definition from JSR-163 requires the bytecodes to be
-- // the same with the exception of constant pool indices which may
-- // differ. However, the constants referred to by those indices
-- // must be the same.
-- //
-- // We use methods_EMCP() for comparison since constant pool
-- // merging can remove duplicate constant pool entries that were
-- // present in the old method and removed from the rewritten new
-- // method. A faster binary comparison function would consider the
-- // old and new methods to be different when they are actually
-- // EMCP.
-- //
-- // The old and new methods are EMCP and you would think that we
-- // could get rid of one of them here and now and save some space.
-- // However, the concept of EMCP only considers the bytecodes and
-- // the constant pool entries in the comparison. Other things,
-- // e.g., the line number table (LNT) or the local variable table
-- // (LVT) don't count in the comparison. So the new (and EMCP)
-- // method can have a new LNT that we need so we can't just
-- // overwrite the new method with the old method.
-- //
-- // When this routine is called, we have already attached the new
-- // methods to the_class so the old methods are effectively
-- // overwritten. However, if an old method is still executing,
-- // then the old method cannot be collected until sometime after
-- // the old method call has returned. So the overwriting of old
-- // methods by new methods will save us space except for those
-- // (hopefully few) old methods that are still executing.
-- //
-- // A method refers to a constMethodOop and this presents another
-- // possible avenue to space savings. The constMethodOop in the
-- // new method contains possibly new attributes (LNT, LVT, etc).
-- // At first glance, it seems possible to save space by replacing
-- // the constMethodOop in the old method with the constMethodOop
-- // from the new method. The old and new methods would share the
-- // same constMethodOop and we would save the space occupied by
-- // the old constMethodOop. However, the constMethodOop contains
-- // a back reference to the containing method. Sharing the
-- // constMethodOop between two methods could lead to confusion in
-- // the code that uses the back reference. This would lead to
-- // brittle code that could be broken in non-obvious ways now or
-- // in the future.
-- //
-- // Another possibility is to copy the constMethodOop from the new
-- // method to the old method and then overwrite the new method with
-- // the old method. Since the constMethodOop contains the bytecodes
-- // for the method embedded in the oop, this option would change
-- // the bytecodes out from under any threads executing the old
-- // method and make the thread's bcp invalid. Since EMCP requires
-- // that the bytecodes be the same modulo constant pool indices, it
-- // is straight forward to compute the correct new bcp in the new
-- // constMethodOop from the old bcp in the old constMethodOop. The
-- // time consuming part would be searching all the frames in all
-- // of the threads to find all of the calls to the old method.
-- //
-- // It looks like we will have to live with the limited savings
-- // that we get from effectively overwriting the old methods
-- // when the new methods are attached to the_class.
++
+ // All dependencies have been recorded from startup or this is a second or
+ // subsequent use of RedefineClasses
-
-- // track which methods are EMCP for add_previous_version() call
-- emcp_methods->set_bit(old_index);
-- (*emcp_method_count_p)++;
++
+ // For now deopt all
+ // (tw) TODO: Improve the dependency system such that we can safely deopt only a subset of the methods
+ if (0 && JvmtiExport::all_dependencies_are_recorded()) {
+ Universe::flush_evol_dependents_on(k_h);
+ } else {
+ CodeCache::mark_all_nmethods_for_deoptimization();
-
-- // An EMCP method is _not_ obsolete. An obsolete method has a
-- // different jmethodID than the current method. An EMCP method
-- // has the same jmethodID as the current method. Having the
-- // same jmethodID for all EMCP versions of a method allows for
-- // a consistent view of the EMCP methods regardless of which
-- // EMCP method you happen to have in hand. For example, a
-- // breakpoint set in one EMCP method will work for all EMCP
-- // versions of the method including the current one.
-- } else {
-- // mark obsolete methods as such
-- old_method->set_is_obsolete();
-- obsolete_count++;
++
+ ResourceMark rm(THREAD);
+ DeoptimizationMarker dm;
-
-- // obsolete methods need a unique idnum
-- u2 num = instanceKlass::cast(_the_class_oop)->next_method_idnum();
-- if (num != constMethodOopDesc::UNSET_IDNUM) {
--// u2 old_num = old_method->method_idnum();
-- old_method->set_method_idnum(num);
--// TO DO: attach obsolete annotations to obsolete method's new idnum
++
+ // Deoptimize all activations depending on marked nmethods
+ Deoptimization::deoptimize_dependents();
+
@@ -9877,7 +9672,7 @@ index 606be1c..ef4f380 100644
+ if (oj >= _old_methods->length()) {
+ if (nj >= _new_methods->length()) {
+ break; // we've looked at everything, done
- }
++ }
+ // New method at the end
+ new_method = (methodOop) _new_methods->obj_at(nj);
+ _added_methods[_added_methods_length++] = nj;
@@ -10094,40 +9889,16 @@ index 606be1c..ef4f380 100644
+ old_method->set_is_old();
+ old_method->set_is_obsolete();
+ ++obsolete_count;
- // With tracing we try not to "yack" too much. The position of
- // this trace assumes there are fewer obsolete methods than
- // EMCP methods.
-- RC_TRACE(0x00000100, ("mark %s(%s) as obsolete",
++ // With tracing we try not to "yack" too much. The position of
++ // this trace assumes there are fewer obsolete methods than
++ // EMCP methods.
+ RC_TRACE(0x00008000, ("mark deleted %s(%s) as obsolete",
- old_method->name()->as_C_string(),
- old_method->signature()->as_C_string()));
- }
-- old_method->set_is_old();
-- }
-- for (int i = 0; i < _deleted_methods_length; ++i) {
-- methodOop old_method = _deleted_methods[i];
--
-- assert(old_method->vtable_index() < 0,
-- "cannot delete methods with vtable entries");;
--
-- // Mark all deleted methods as old and obsolete
-- old_method->set_is_old();
-- old_method->set_is_obsolete();
-- ++obsolete_count;
-- // With tracing we try not to "yack" too much. The position of
-- // this trace assumes there are fewer obsolete methods than
-- // EMCP methods.
-- RC_TRACE(0x00000100, ("mark deleted %s(%s) as obsolete",
-- old_method->name()->as_C_string(),
-- old_method->signature()->as_C_string()));
-- }
-- assert((*emcp_method_count_p + obsolete_count) == _old_methods->length(),
-- "sanity check");
-- RC_TRACE(0x00000100, ("EMCP_cnt=%d, obsolete_cnt=%d", *emcp_method_count_p,
-- obsolete_count));
++ old_method->name()->as_C_string(),
++ old_method->signature()->as_C_string()));
++ }
+ //assert((*emcp_method_count_p + obsolete_count) == _old_methods->length(), "sanity check");
+ RC_TRACE(0x00008000, ("EMCP_cnt=%d, obsolete_cnt=%d !", *emcp_method_count_p, obsolete_count));
- }
++}
+
+// Increment the classRedefinedCount field in the specific instanceKlass
+// and in all direct and indirect subclasses.
@@ -10141,27 +9912,162 @@ index 606be1c..ef4f380 100644
+
+#ifndef PRODUCT
+void VM_RedefineClasses::check_class(klassOop k_oop, TRAPS) {
-+ Klass *k = k_oop->klass_part();
-+ if (k->oop_is_instance()) {
-+ HandleMark hm(THREAD);
-+ instanceKlass *ik = (instanceKlass *) k;
+ Klass *k = k_oop->klass_part();
+ if (k->oop_is_instance()) {
+ HandleMark hm(THREAD);
+ instanceKlass *ik = (instanceKlass *) k;
+-
+- // HotSpot specific optimization! HotSpot does not currently
+- // support delegation from the bootstrap class loader to a
+- // user-defined class loader. This means that if the bootstrap
+- // class loader is the initiating class loader, then it will also
+- // be the defining class loader. This also means that classes
+- // loaded by the bootstrap class loader cannot refer to classes
+- // loaded by a user-defined class loader. Note: a user-defined
+- // class loader can delegate to the bootstrap class loader.
+- //
+- // If the current class being redefined has a user-defined class
+- // loader as its defining class loader, then we can skip all
+- // classes loaded by the bootstrap class loader.
+- bool is_user_defined =
+- instanceKlass::cast(_the_class_oop)->class_loader() != NULL;
+- if (is_user_defined && ik->class_loader() == NULL) {
+- return;
+- }
+-
+- // This is a very busy routine. We don't want too much tracing
+- // printed out.
+- bool trace_name_printed = false;
+-
+- // Very noisy: only enable this call if you are trying to determine
+- // that a specific class gets found by this routine.
+- // RC_TRACE macro has an embedded ResourceMark
+- // RC_TRACE_WITH_THREAD(0x00100000, THREAD,
+- // ("adjust check: name=%s", ik->external_name()));
+- // trace_name_printed = true;
+-
+- // Fix the vtable embedded in the_class and subclasses of the_class,
+- // if one exists. We discard scratch_class and we don't keep an
+- // instanceKlass around to hold obsolete methods so we don't have
+- // any other instanceKlass embedded vtables to update. The vtable
+- // holds the methodOops for virtual (but not final) methods.
+- if (ik->vtable_length() > 0 && ik->is_subtype_of(_the_class_oop)) {
+- // ik->vtable() creates a wrapper object; rm cleans it up
+ assert(ik->is_newest_version(), "must be latest version in system dictionary");
+
+ if (ik->vtable_length() > 0) {
-+ ResourceMark rm(THREAD);
+ ResourceMark rm(THREAD);
+- ik->vtable()->adjust_method_entries(_matching_old_methods,
+- _matching_new_methods,
+- _matching_methods_length,
+- &trace_name_printed);
+- }
+-
+- // If the current class has an itable and we are either redefining an
+- // interface or if the current class is a subclass of the_class, then
+- // we potentially have to fix the itable. If we are redefining an
+- // interface, then we have to call adjust_method_entries() for
+- // every instanceKlass that has an itable since there isn't a
+- // subclass relationship between an interface and an instanceKlass.
+- if (ik->itable_length() > 0 && (Klass::cast(_the_class_oop)->is_interface()
+- || ik->is_subclass_of(_the_class_oop))) {
+- // ik->itable() creates a wrapper object; rm cleans it up
+- ResourceMark rm(THREAD);
+- ik->itable()->adjust_method_entries(_matching_old_methods,
+- _matching_new_methods,
+- _matching_methods_length,
+- &trace_name_printed);
+- }
+-
+- // The constant pools in other classes (other_cp) can refer to
+- // methods in the_class. We have to update method information in
+- // other_cp's cache. If other_cp has a previous version, then we
+- // have to repeat the process for each previous version. The
+- // constant pool cache holds the methodOops for non-virtual
+- // methods and for virtual, final methods.
+- //
+- // Special case: if the current class is the_class, then new_cp
+- // has already been attached to the_class and old_cp has already
+- // been added as a previous version. The new_cp doesn't have any
+- // cached references to old methods so it doesn't need to be
+- // updated. We can simply start with the previous version(s) in
+- // that case.
+- constantPoolHandle other_cp;
+- constantPoolCacheOop cp_cache;
+-
+- if (k_oop != _the_class_oop) {
+- // this klass' constant pool cache may need adjustment
+- other_cp = constantPoolHandle(ik->constants());
+- cp_cache = other_cp->cache();
+- if (cp_cache != NULL) {
+- cp_cache->adjust_method_entries(_matching_old_methods,
+- _matching_new_methods,
+- _matching_methods_length,
+- &trace_name_printed);
+ if (!ik->vtable()->check_no_old_entries()) {
+ RC_TRACE(0x00000001, ("size of class: %d\n",
+ k_oop->size()));
+ RC_TRACE(0x00000001, ("klassVtable::check_no_old_entries failure -- OLD method found -- class: %s",
+ ik->signature_name()));
+ assert(false, "OLD method found");
-+ }
+ }
+- }
+- {
+- ResourceMark rm(THREAD);
+- // PreviousVersionInfo objects returned via PreviousVersionWalker
+- // contain a GrowableArray of handles. We have to clean up the
+- // GrowableArray _after_ the PreviousVersionWalker destructor
+- // has destroyed the handles.
+- {
+- // the previous versions' constant pool caches may need adjustment
+- PreviousVersionWalker pvw(ik);
+- for (PreviousVersionInfo * pv_info = pvw.next_previous_version();
+- pv_info != NULL; pv_info = pvw.next_previous_version()) {
+- other_cp = pv_info->prev_constant_pool_handle();
+- cp_cache = other_cp->cache();
+- if (cp_cache != NULL) {
+- cp_cache->adjust_method_entries(_matching_old_methods,
+- _matching_new_methods,
+- _matching_methods_length,
+- &trace_name_printed);
+- }
+- }
+- } // pvw is cleaned up
+- } // rm is cleaned up
+- }
+-}
+-
+-void VM_RedefineClasses::update_jmethod_ids() {
+- for (int j = 0; j < _matching_methods_length; ++j) {
+- methodOop old_method = _matching_old_methods[j];
+- jmethodID jmid = old_method->find_jmethod_id_or_null();
+- if (jmid != NULL) {
+- // There is a jmethodID, change it to point to the new method
+- methodHandle new_method_h(_matching_new_methods[j]);
+- JNIHandles::change_method_associated_with_jmethod_id(jmid, new_method_h);
+- assert(JNIHandles::resolve_jmethod_id(jmid) == _matching_new_methods[j],
+- "should be replaced");
+
+ ik->vtable()->verify(tty, true);
-+ }
-+ }
-+}
-+
+ }
+ }
+ }
+
+-void VM_RedefineClasses::check_methods_and_mark_as_obsolete(
+- BitMap *emcp_methods, int * emcp_method_count_p) {
+- *emcp_method_count_p = 0;
+- int obsolete_count = 0;
+- int old_index = 0;
+- for (int j = 0; j < _matching_methods_length; ++j, ++old_index) {
+- methodOop old_method = _matching_old_methods[j];
+- methodOop new_method = _matching_new_methods[j];
+- methodOop old_array_method;
+-
+- // Maintain an old_index into the _old_methods array by skipping
+- // deleted methods
+- while ((old_array_method = (methodOop) _old_methods->obj_at(old_index))
+- != old_method) {
+- ++old_index;
+#endif
+
+VM_RedefineClasses::FindAffectedKlassesClosure::FindAffectedKlassesClosure( GrowableArray<instanceKlassHandle> *original_klasses, GrowableArray<instanceKlassHandle> *result )
@@ -10184,7 +10090,91 @@ index 606be1c..ef4f380 100644
+ RC_TRACE(0x00008000, ("Found affected class: %s", klass->klass_part()->name()->as_C_string()));
+ _result->append(klass);
+ break;
-+ }
+ }
+-
+- if (MethodComparator::methods_EMCP(old_method, new_method)) {
+- // The EMCP definition from JSR-163 requires the bytecodes to be
+- // the same with the exception of constant pool indices which may
+- // differ. However, the constants referred to by those indices
+- // must be the same.
+- //
+- // We use methods_EMCP() for comparison since constant pool
+- // merging can remove duplicate constant pool entries that were
+- // present in the old method and removed from the rewritten new
+- // method. A faster binary comparison function would consider the
+- // old and new methods to be different when they are actually
+- // EMCP.
+- //
+- // The old and new methods are EMCP and you would think that we
+- // could get rid of one of them here and now and save some space.
+- // However, the concept of EMCP only considers the bytecodes and
+- // the constant pool entries in the comparison. Other things,
+- // e.g., the line number table (LNT) or the local variable table
+- // (LVT) don't count in the comparison. So the new (and EMCP)
+- // method can have a new LNT that we need so we can't just
+- // overwrite the new method with the old method.
+- //
+- // When this routine is called, we have already attached the new
+- // methods to the_class so the old methods are effectively
+- // overwritten. However, if an old method is still executing,
+- // then the old method cannot be collected until sometime after
+- // the old method call has returned. So the overwriting of old
+- // methods by new methods will save us space except for those
+- // (hopefully few) old methods that are still executing.
+- //
+- // A method refers to a constMethodOop and this presents another
+- // possible avenue to space savings. The constMethodOop in the
+- // new method contains possibly new attributes (LNT, LVT, etc).
+- // At first glance, it seems possible to save space by replacing
+- // the constMethodOop in the old method with the constMethodOop
+- // from the new method. The old and new methods would share the
+- // same constMethodOop and we would save the space occupied by
+- // the old constMethodOop. However, the constMethodOop contains
+- // a back reference to the containing method. Sharing the
+- // constMethodOop between two methods could lead to confusion in
+- // the code that uses the back reference. This would lead to
+- // brittle code that could be broken in non-obvious ways now or
+- // in the future.
+- //
+- // Another possibility is to copy the constMethodOop from the new
+- // method to the old method and then overwrite the new method with
+- // the old method. Since the constMethodOop contains the bytecodes
+- // for the method embedded in the oop, this option would change
+- // the bytecodes out from under any threads executing the old
+- // method and make the thread's bcp invalid. Since EMCP requires
+- // that the bytecodes be the same modulo constant pool indices, it
+- // is straight forward to compute the correct new bcp in the new
+- // constMethodOop from the old bcp in the old constMethodOop. The
+- // time consuming part would be searching all the frames in all
+- // of the threads to find all of the calls to the old method.
+- //
+- // It looks like we will have to live with the limited savings
+- // that we get from effectively overwriting the old methods
+- // when the new methods are attached to the_class.
+-
+- // track which methods are EMCP for add_previous_version() call
+- emcp_methods->set_bit(old_index);
+- (*emcp_method_count_p)++;
+-
+- // An EMCP method is _not_ obsolete. An obsolete method has a
+- // different jmethodID than the current method. An EMCP method
+- // has the same jmethodID as the current method. Having the
+- // same jmethodID for all EMCP versions of a method allows for
+- // a consistent view of the EMCP methods regardless of which
+- // EMCP method you happen to have in hand. For example, a
+- // breakpoint set in one EMCP method will work for all EMCP
+- // versions of the method including the current one.
+- } else {
+- // mark obsolete methods as such
+- old_method->set_is_obsolete();
+- obsolete_count++;
+-
+- // obsolete methods need a unique idnum
+- u2 num = instanceKlass::cast(_the_class_oop)->next_method_idnum();
+- if (num != constMethodOopDesc::UNSET_IDNUM) {
+-// u2 old_num = old_method->method_idnum();
+- old_method->set_method_idnum(num);
+-// TO DO: attach obsolete annotations to obsolete method's new idnum
+ }
+}
+
@@ -10222,9 +10212,32 @@ index 606be1c..ef4f380 100644
+ links->append(Pair<klassOop, klassOop>(curOop, the_class()));
+ break;
+ }
-+ }
-+ }
-+ }
+ }
+- // With tracing we try not to "yack" too much. The position of
+- // this trace assumes there are fewer obsolete methods than
+- // EMCP methods.
+- RC_TRACE(0x00000100, ("mark %s(%s) as obsolete",
+- old_method->name()->as_C_string(),
+- old_method->signature()->as_C_string()));
+ }
+- old_method->set_is_old();
+ }
+- for (int i = 0; i < _deleted_methods_length; ++i) {
+- methodOop old_method = _deleted_methods[i];
+-
+- assert(old_method->vtable_index() < 0,
+- "cannot delete methods with vtable entries");;
+-
+- // Mark all deleted methods as old and obsolete
+- old_method->set_is_old();
+- old_method->set_is_obsolete();
+- ++obsolete_count;
+- // With tracing we try not to "yack" too much. The position of
+- // this trace assumes there are fewer obsolete methods than
+- // EMCP methods.
+- RC_TRACE(0x00000100, ("mark deleted %s(%s) as obsolete",
+- old_method->name()->as_C_string(),
+- old_method->signature()->as_C_string()));
+
+
+ RC_TRACE(0x00000001, ("Identified links between classes! "));
@@ -10245,7 +10258,11 @@ index 606be1c..ef4f380 100644
+ links->append(Pair<klassOop, klassOop>(interfaceKlass, klass()));
+ }
+ }
-+ }
+ }
+- assert((*emcp_method_count_p + obsolete_count) == _old_methods->length(),
+- "sanity check");
+- RC_TRACE(0x00000100, ("EMCP_cnt=%d, obsolete_cnt=%d", *emcp_method_count_p,
+- obsolete_count));
+
+ if (RC_TRACE_ENABLED(0x00000002)) {
+ RC_TRACE(0x00000002, ("Identified links: "));
@@ -10295,8 +10312,8 @@ index 606be1c..ef4f380 100644
+ }
+
+ return JVMTI_ERROR_NONE;
-+}
-+
+ }
+
+void VM_RedefineClasses::oops_do(OopClosure *closure) {
+
+ if (_updated_oops != NULL) {
@@ -10322,9 +10339,10 @@ index 606be1c..ef4f380 100644
+ fi->set_access_flags(fd->access_flags().as_short());
+}
+
-
++
// This internal class transfers the native function registration from old methods
// to new methods. It is designed to handle both the simple case of unchanged
+ // native methods and the complex cases of native method prefixes being added and/or
@@ -2969,7 +3161,7 @@
// Same, caused by prefix removal only 3_2_1_m -> 3_2_m
//
@@ -10363,13 +10381,7 @@ index 606be1c..ef4f380 100644
+ // Wahoo, we found a (possibly prefixed) version of the method, return it.
+ return method;
}
-+ if (depth < prefix_count) {
-+ // Try applying further prefixes (other than this one).
-+ method = search_prefix_name_space(depth+1, name_str, name_len, signature);
-+ if (method != NULL) {
-+ return method; // found
-+ }
-
+-
- // Try adding this prefix to the method name and see if it matches
- // another method name.
- char* prefix = prefixes[depth];
@@ -10384,6 +10396,13 @@ index 606be1c..ef4f380 100644
- // If found along this branch, it was prefixed, mark as such
- method->set_is_prefixed_native();
- return method; // found
++ if (depth < prefix_count) {
++ // Try applying further prefixes (other than this one).
++ method = search_prefix_name_space(depth+1, name_str, name_len, signature);
++ if (method != NULL) {
++ return method; // found
++ }
++
+ // Try adding this prefix to the method name and see if it matches
+ // another method name.
+ char* prefix = prefixes[depth];
@@ -10452,7 +10471,11 @@ index 606be1c..ef4f380 100644
- TransferNativeFunctionRegistration transfer(the_class);
- transfer.transfer_registrations(_deleted_methods, _deleted_methods_length);
- transfer.transfer_registrations(_matching_old_methods, _matching_methods_length);
--}
++void VM_RedefineClasses::transfer_old_native_function_registrations(instanceKlassHandle old_klass) {
++ TransferNativeFunctionRegistration transfer(old_klass);
++ transfer.transfer_registrations(old_klass, _deleted_methods, _deleted_methods_length);
++ transfer.transfer_registrations(old_klass, _matching_old_methods, _matching_methods_length);
+ }
-
-// Deoptimize all compiled code that depends on this class.
-//
@@ -10988,15 +11011,10 @@ index 606be1c..ef4f380 100644
- m->print_name(tty);
- tty->cr();
- }
-+void VM_RedefineClasses::transfer_old_native_function_registrations(instanceKlassHandle old_klass) {
-+ TransferNativeFunctionRegistration transfer(old_klass);
-+ transfer.transfer_registrations(old_klass, _deleted_methods, _deleted_methods_length);
-+ transfer.transfer_registrations(old_klass, _matching_old_methods, _matching_methods_length);
- }
-diff --git a/src/share/vm/prims/jvmtiRedefineClasses.hpp b/src/share/vm/prims/jvmtiRedefineClasses.hpp
-index bfac62c..d7ba79f 100644
---- a/src/share/vm/prims/jvmtiRedefineClasses.hpp
-+++ b/src/share/vm/prims/jvmtiRedefineClasses.hpp
+-}
+diff -r 882f6c762ac5 src/share/vm/prims/jvmtiRedefineClasses.hpp
+--- a/src/share/vm/prims/jvmtiRedefineClasses.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/prims/jvmtiRedefineClasses.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -1,26 +1,29 @@
/*
- * Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
@@ -11049,7 +11067,7 @@ index bfac62c..d7ba79f 100644
#ifndef SHARE_VM_PRIMS_JVMTIREDEFINECLASSES_HPP
#define SHARE_VM_PRIMS_JVMTIREDEFINECLASSES_HPP
-@@ -32,330 +35,27 @@
+@@ -32,331 +35,28 @@
#include "oops/objArrayOop.hpp"
#include "prims/jvmtiRedefineClassesTrace.hpp"
#include "runtime/vm_operations.hpp"
@@ -11385,12 +11403,13 @@ index bfac62c..d7ba79f 100644
static int _deleted_methods_length;
static int _added_methods_length;
static klassOop _the_class_oop;
-+
-+ static int _revision_number;
++ static int _revision_number;
++
// The instance fields are used to pass information from
// doit_prologue() to doit() and doit_epilogue().
-@@ -370,42 +70,28 @@
+ jint _class_count;
+@@ -370,43 +70,29 @@
// _index_map_p contains any entries.
int _index_map_count;
intArray * _index_map_p;
@@ -11435,7 +11454,7 @@ index bfac62c..d7ba79f 100644
+ jvmtiError find_sorted_affected_classes(GrowableArray<instanceKlassHandle> *all_affected_klasses);
+ jvmtiError find_class_bytes(instanceKlassHandle the_class, const unsigned char **class_bytes, jint *class_byte_count, jboolean *not_changed);
jvmtiError load_new_class_versions(TRAPS);
--
+
- // Verify that the caller provided class definition(s) that meet
- // the restrictions of RedefineClasses. Normalize the order of
- // overloaded methods as needed.
@@ -11446,9 +11465,10 @@ index bfac62c..d7ba79f 100644
- // Used by compare_and_normalize_class_versions() when normalizing
- // overloaded methods or changing idnum as when adding or deleting methods.
- void swap_all_method_annotations(int i, int j, instanceKlassHandle scratch_class);
-
+-
// Figure out which new methods match old methods in name and signature,
// which methods have been added, and which are no longer present
+ void compute_added_deleted_matching_methods();
@@ -414,103 +100,99 @@
// Change jmethodIDs to point to the new methods
void update_jmethod_ids();
@@ -11625,10 +11645,9 @@ index bfac62c..d7ba79f 100644
};
#endif // SHARE_VM_PRIMS_JVMTIREDEFINECLASSES_HPP
-diff --git a/src/share/vm/prims/methodComparator.cpp b/src/share/vm/prims/methodComparator.cpp
-index 60eaf97..07bb6e3 100644
---- a/src/share/vm/prims/methodComparator.cpp
-+++ b/src/share/vm/prims/methodComparator.cpp
+diff -r 882f6c762ac5 src/share/vm/prims/methodComparator.cpp
+--- a/src/share/vm/prims/methodComparator.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/prims/methodComparator.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -65,6 +65,7 @@
if (! args_same(c_old, c_new))
return false;
@@ -11637,10 +11656,9 @@ index 60eaf97..07bb6e3 100644
return true;
}
-diff --git a/src/share/vm/prims/nativeLookup.cpp b/src/share/vm/prims/nativeLookup.cpp
-index 41fc42d..53b3e0c 100644
---- a/src/share/vm/prims/nativeLookup.cpp
-+++ b/src/share/vm/prims/nativeLookup.cpp
+diff -r 882f6c762ac5 src/share/vm/prims/nativeLookup.cpp
+--- a/src/share/vm/prims/nativeLookup.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/prims/nativeLookup.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -35,6 +35,7 @@
#include "oops/symbol.hpp"
#include "prims/jvm_misc.hpp"
@@ -11649,14 +11667,14 @@ index 41fc42d..53b3e0c 100644
#include "runtime/arguments.hpp"
#include "runtime/handles.inline.hpp"
#include "runtime/javaCalls.hpp"
-@@ -52,7 +53,6 @@
- #ifdef TARGET_OS_FAMILY_bsd
+@@ -53,7 +54,6 @@
# include "os_bsd.inline.hpp"
#endif
--
+-
static void mangle_name_on(outputStream* st, Symbol* name, int begin, int end) {
char* bytes = (char*)name->bytes() + begin;
+ char* end_bytes = (char*)name->bytes() + end;
@@ -138,6 +138,40 @@
{ CC"Java_sun_hotspot_WhiteBox_registerNatives", NULL, FN_PTR(JVM_RegisterWhiteBoxMethods) },
};
@@ -11708,10 +11726,9 @@ index 41fc42d..53b3e0c 100644
// Otherwise call static method findNative in ClassLoader
KlassHandle klass (THREAD, SystemDictionary::ClassLoader_klass());
-diff --git a/src/share/vm/runtime/arguments.cpp b/src/share/vm/runtime/arguments.cpp
-index 0496367..62e9d55 100644
---- a/src/share/vm/runtime/arguments.cpp
-+++ b/src/share/vm/runtime/arguments.cpp
+diff -r 882f6c762ac5 src/share/vm/runtime/arguments.cpp
+--- a/src/share/vm/runtime/arguments.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/runtime/arguments.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -1874,6 +1874,15 @@
status = false;
}
@@ -11728,10 +11745,9 @@ index 0496367..62e9d55 100644
return status;
}
-diff --git a/src/share/vm/runtime/deoptimization.cpp b/src/share/vm/runtime/deoptimization.cpp
-index 2b767d4..e9b6f47 100644
---- a/src/share/vm/runtime/deoptimization.cpp
-+++ b/src/share/vm/runtime/deoptimization.cpp
+diff -r 882f6c762ac5 src/share/vm/runtime/deoptimization.cpp
+--- a/src/share/vm/runtime/deoptimization.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/runtime/deoptimization.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -599,6 +599,38 @@
// Cleanup thread deopt data
cleanup_deopt_info(thread, array);
@@ -11771,10 +11787,9 @@ index 2b767d4..e9b6f47 100644
#ifndef PRODUCT
if (VerifyStack) {
ResourceMark res_mark;
-diff --git a/src/share/vm/runtime/frame.cpp b/src/share/vm/runtime/frame.cpp
-index aacd835..3772b3f 100644
---- a/src/share/vm/runtime/frame.cpp
-+++ b/src/share/vm/runtime/frame.cpp
+diff -r 882f6c762ac5 src/share/vm/runtime/frame.cpp
+--- a/src/share/vm/runtime/frame.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/runtime/frame.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -408,6 +408,12 @@
*interpreter_frame_method_addr() = method;
}
@@ -11823,10 +11838,9 @@ index aacd835..3772b3f 100644
}
}
}
-diff --git a/src/share/vm/runtime/frame.hpp b/src/share/vm/runtime/frame.hpp
-index 9c7bb72..04a6595 100644
---- a/src/share/vm/runtime/frame.hpp
-+++ b/src/share/vm/runtime/frame.hpp
+diff -r 882f6c762ac5 src/share/vm/runtime/frame.hpp
+--- a/src/share/vm/runtime/frame.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/runtime/frame.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -346,6 +346,7 @@
// Method & constant pool cache
methodOop interpreter_frame_method() const;
@@ -11835,10 +11849,9 @@ index 9c7bb72..04a6595 100644
methodOop* interpreter_frame_method_addr() const;
constantPoolCacheOop* interpreter_frame_cache_addr() const;
#ifdef PPC
-diff --git a/src/share/vm/runtime/globals.hpp b/src/share/vm/runtime/globals.hpp
-index 0731ca0..cbd6eca 100644
---- a/src/share/vm/runtime/globals.hpp
-+++ b/src/share/vm/runtime/globals.hpp
+diff -r 882f6c762ac5 src/share/vm/runtime/globals.hpp
+--- a/src/share/vm/runtime/globals.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/runtime/globals.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -1229,9 +1229,23 @@
product(bool, StressLdcRewrite, false, \
"Force ldc -> ldc_w rewrite during RedefineClasses") \
@@ -11863,10 +11876,9 @@ index 0731ca0..cbd6eca 100644
develop(bool, StressMethodComparator, false, \
"run the MethodComparator on all loaded methods") \
\
-diff --git a/src/share/vm/runtime/interfaceSupport.hpp b/src/share/vm/runtime/interfaceSupport.hpp
-index 2875ee0..61fd8fe 100644
---- a/src/share/vm/runtime/interfaceSupport.hpp
-+++ b/src/share/vm/runtime/interfaceSupport.hpp
+diff -r 882f6c762ac5 src/share/vm/runtime/interfaceSupport.hpp
+--- a/src/share/vm/runtime/interfaceSupport.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/runtime/interfaceSupport.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -296,7 +296,7 @@
ThreadToNativeFromVM(JavaThread *thread) : ThreadStateTransition(thread) {
// We are leaving the VM at this point and going directly to native code.
@@ -11876,10 +11888,9 @@ index 2875ee0..61fd8fe 100644
thread->frame_anchor()->make_walkable(thread);
trans_and_fence(_thread_in_vm, _thread_in_native);
// Check for pending. async. exceptions or suspends.
-diff --git a/src/share/vm/runtime/javaCalls.cpp b/src/share/vm/runtime/javaCalls.cpp
-index edbba98..4a27925 100644
---- a/src/share/vm/runtime/javaCalls.cpp
-+++ b/src/share/vm/runtime/javaCalls.cpp
+diff -r 882f6c762ac5 src/share/vm/runtime/javaCalls.cpp
+--- a/src/share/vm/runtime/javaCalls.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/runtime/javaCalls.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -60,7 +60,7 @@
bool clear_pending_exception = true;
@@ -11889,10 +11900,9 @@ index edbba98..4a27925 100644
guarantee(!thread->is_Compiler_thread(), "cannot make java calls from the compiler");
_result = result;
-diff --git a/src/share/vm/runtime/jniHandles.cpp b/src/share/vm/runtime/jniHandles.cpp
-index 3cbcaca..30839d7 100644
---- a/src/share/vm/runtime/jniHandles.cpp
-+++ b/src/share/vm/runtime/jniHandles.cpp
+diff -r 882f6c762ac5 src/share/vm/runtime/jniHandles.cpp
+--- a/src/share/vm/runtime/jniHandles.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/runtime/jniHandles.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -112,6 +112,10 @@
}
@@ -11904,10 +11914,9 @@ index 3cbcaca..30839d7 100644
return (jmethodID) make_weak_global(mh);
}
-diff --git a/src/share/vm/runtime/mutex.cpp b/src/share/vm/runtime/mutex.cpp
-index 2095237..c541434 100644
---- a/src/share/vm/runtime/mutex.cpp
-+++ b/src/share/vm/runtime/mutex.cpp
+diff -r 882f6c762ac5 src/share/vm/runtime/mutex.cpp
+--- a/src/share/vm/runtime/mutex.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/runtime/mutex.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -1227,7 +1227,7 @@
// in increasing rank order (modulo any native ranks)
for (tmp = locks; tmp != NULL; tmp = tmp->next()) {
@@ -11934,10 +11943,9 @@ index 2095237..c541434 100644
locks != NULL && locks->rank() <= this->rank() &&
!SafepointSynchronize::is_at_safepoint() &&
this != Interrupt_lock &&
-diff --git a/src/share/vm/runtime/mutex.hpp b/src/share/vm/runtime/mutex.hpp
-index 7d2cd82..11eb32e 100644
---- a/src/share/vm/runtime/mutex.hpp
-+++ b/src/share/vm/runtime/mutex.hpp
+diff -r 882f6c762ac5 src/share/vm/runtime/mutex.hpp
+--- a/src/share/vm/runtime/mutex.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/runtime/mutex.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -109,7 +109,8 @@
barrier = safepoint + 1,
nonleaf = barrier + 1,
@@ -11948,10 +11956,9 @@ index 7d2cd82..11eb32e 100644
};
// The WaitSet and EntryList linked lists are composed of ParkEvents.
-diff --git a/src/share/vm/runtime/mutexLocker.cpp b/src/share/vm/runtime/mutexLocker.cpp
-index 77fddef..9d453d4 100644
---- a/src/share/vm/runtime/mutexLocker.cpp
-+++ b/src/share/vm/runtime/mutexLocker.cpp
+diff -r 882f6c762ac5 src/share/vm/runtime/mutexLocker.cpp
+--- a/src/share/vm/runtime/mutexLocker.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/runtime/mutexLocker.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -49,6 +49,7 @@
// Consider using GCC's __read_mostly.
@@ -11984,10 +11991,9 @@ index 77fddef..9d453d4 100644
#ifdef INCLUDE_TRACE
def(JfrMsg_lock , Monitor, leaf, true);
-diff --git a/src/share/vm/runtime/mutexLocker.hpp b/src/share/vm/runtime/mutexLocker.hpp
-index a6549b1..089ed11 100644
---- a/src/share/vm/runtime/mutexLocker.hpp
-+++ b/src/share/vm/runtime/mutexLocker.hpp
+diff -r 882f6c762ac5 src/share/vm/runtime/mutexLocker.hpp
+--- a/src/share/vm/runtime/mutexLocker.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/runtime/mutexLocker.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -43,6 +43,8 @@
// Mutexes used in the VM.
@@ -11997,10 +12003,9 @@ index a6549b1..089ed11 100644
extern Monitor* SystemDictionary_lock; // a lock on the system dictonary
extern Mutex* PackageTable_lock; // a lock on the class loader package table
extern Mutex* CompiledIC_lock; // a lock used to guard compiled IC patching and access
-diff --git a/src/share/vm/runtime/reflection.cpp b/src/share/vm/runtime/reflection.cpp
-index 1665d93..6baabba 100644
---- a/src/share/vm/runtime/reflection.cpp
-+++ b/src/share/vm/runtime/reflection.cpp
+diff -r 882f6c762ac5 src/share/vm/runtime/reflection.cpp
+--- a/src/share/vm/runtime/reflection.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/runtime/reflection.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -468,7 +468,8 @@
// sun/reflect/MagicAccessorImpl subclasses to succeed trivially.
if ( JDK_Version::is_gte_jdk14x_version()
@@ -12034,10 +12039,9 @@ index 1665d93..6baabba 100644
return true;
}
-diff --git a/src/share/vm/runtime/sharedRuntime.cpp b/src/share/vm/runtime/sharedRuntime.cpp
-index 709d783..e0e19b1 100644
---- a/src/share/vm/runtime/sharedRuntime.cpp
-+++ b/src/share/vm/runtime/sharedRuntime.cpp
+diff -r 882f6c762ac5 src/share/vm/runtime/sharedRuntime.cpp
+--- a/src/share/vm/runtime/sharedRuntime.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/runtime/sharedRuntime.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -1137,7 +1137,20 @@
if (JvmtiExport::can_hotswap_or_post_breakpoint()) {
int retry_count = 0;
@@ -12060,10 +12064,9 @@ index 709d783..e0e19b1 100644
// If has a pending exception then there is no need to re-try to
// resolve this method.
// If the method has been redefined, we need to try again.
-diff --git a/src/share/vm/runtime/thread.cpp b/src/share/vm/runtime/thread.cpp
-index f3acf4d..ea61cae 100644
---- a/src/share/vm/runtime/thread.cpp
-+++ b/src/share/vm/runtime/thread.cpp
+diff -r 882f6c762ac5 src/share/vm/runtime/thread.cpp
+--- a/src/share/vm/runtime/thread.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/runtime/thread.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -216,6 +216,8 @@
set_self_raw_id(0);
set_lgrp_id(-1);
@@ -12081,22 +12084,22 @@ index f3acf4d..ea61cae 100644
#ifdef ASSERT
_visited_for_critical_count = false;
-@@ -880,6 +883,15 @@
- bool Thread::owns_locks_but_compiled_lock() const {
- for(Monitor *cur = _owned_locks; cur; cur = cur->next()) {
- if (cur != Compile_lock) return true;
-+ }
-+ return false;
-+}
-+
+@@ -884,6 +887,15 @@
+ return false;
+ }
+
+bool Thread::owns_locks_but_redefine_classes_lock() const {
+ for(Monitor *cur = _owned_locks; cur; cur = cur->next()) {
+ if (cur != RedefineClasses_lock && cur->rank() != Mutex::redefine_classes) {
+ return true;
+ }
- }
- return false;
- }
++ }
++ return false;
++}
++
+
+ #endif
+
@@ -1637,7 +1649,7 @@
ThreadStateTransition::transition_and_fence(this, _thread_new, _thread_in_vm);
@@ -12131,11 +12134,10 @@ index f3acf4d..ea61cae 100644
// All JavaThreads
#define ALL_JAVA_THREADS(X) for (JavaThread* X = _thread_list; X; X = X->next())
-diff --git a/src/share/vm/runtime/thread.hpp b/src/share/vm/runtime/thread.hpp
-index 774bd27..4ca4502 100644
---- a/src/share/vm/runtime/thread.hpp
-+++ b/src/share/vm/runtime/thread.hpp
-@@ -203,10 +203,13 @@
+diff -r 882f6c762ac5 src/share/vm/runtime/thread.hpp
+--- a/src/share/vm/runtime/thread.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/runtime/thread.hpp Thu Jul 09 23:18:17 2015 -0700
+@@ -203,11 +203,14 @@
void enter_signal_handler() { _num_nested_signal++; }
void leave_signal_handler() { _num_nested_signal--; }
bool is_inside_signal_handler() const { return _num_nested_signal > 0; }
@@ -12144,11 +12146,12 @@ index 774bd27..4ca4502 100644
private:
// Debug tracing
static void trace(const char* msg, const Thread* const thread) PRODUCT_RETURN;
-+
-+ Mutex* _redefine_classes_mutex;
++ Mutex* _redefine_classes_mutex;
++
// Active_handles points to a block of handles
JNIHandleBlock* _active_handles;
+
@@ -530,10 +533,15 @@
uintptr_t _self_raw_id; // used by get_thread (mutable)
int _lgrp_id;
@@ -12217,10 +12220,9 @@ index 774bd27..4ca4502 100644
// Initializes the vm and creates the vm thread
static jint create_vm(JavaVMInitArgs* args, bool* canTryAgain);
static void convert_vm_init_libraries_to_agents();
-diff --git a/src/share/vm/runtime/vframe.cpp b/src/share/vm/runtime/vframe.cpp
-index 09e324f..d47ffef 100644
---- a/src/share/vm/runtime/vframe.cpp
-+++ b/src/share/vm/runtime/vframe.cpp
+diff -r 882f6c762ac5 src/share/vm/runtime/vframe.cpp
+--- a/src/share/vm/runtime/vframe.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/runtime/vframe.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -253,6 +253,46 @@
return fr().interpreter_frame_method();
}
@@ -12268,10 +12270,9 @@ index 09e324f..d47ffef 100644
StackValueCollection* interpretedVFrame::locals() const {
int length = method()->max_locals();
-diff --git a/src/share/vm/runtime/vframe.hpp b/src/share/vm/runtime/vframe.hpp
-index badfea5..edbc5c7 100644
---- a/src/share/vm/runtime/vframe.hpp
-+++ b/src/share/vm/runtime/vframe.hpp
+diff -r 882f6c762ac5 src/share/vm/runtime/vframe.hpp
+--- a/src/share/vm/runtime/vframe.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/runtime/vframe.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -163,6 +163,7 @@
StackValueCollection* locals() const;
StackValueCollection* expressions() const;
@@ -12280,10 +12281,9 @@ index badfea5..edbc5c7 100644
void set_locals(StackValueCollection* values) const;
-diff --git a/src/share/vm/runtime/vmThread.cpp b/src/share/vm/runtime/vmThread.cpp
-index 0a3e0da..3d294e1 100644
---- a/src/share/vm/runtime/vmThread.cpp
-+++ b/src/share/vm/runtime/vmThread.cpp
+diff -r 882f6c762ac5 src/share/vm/runtime/vmThread.cpp
+--- a/src/share/vm/runtime/vmThread.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/runtime/vmThread.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -694,6 +694,10 @@
void VMThread::oops_do(OopClosure* f, CodeBlobClosure* cf) {
Thread::oops_do(f, cf);
@@ -12295,10 +12295,9 @@ index 0a3e0da..3d294e1 100644
}
//------------------------------------------------------------------------------------------------------------------
-diff --git a/src/share/vm/utilities/exceptions.cpp b/src/share/vm/utilities/exceptions.cpp
-index 03f254d..c9e0efc 100644
---- a/src/share/vm/utilities/exceptions.cpp
-+++ b/src/share/vm/utilities/exceptions.cpp
+diff -r 882f6c762ac5 src/share/vm/utilities/exceptions.cpp
+--- a/src/share/vm/utilities/exceptions.cpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/utilities/exceptions.cpp Thu Jul 09 23:18:17 2015 -0700
@@ -254,6 +254,8 @@
assert(thread->is_Java_thread(), "can only be called by a Java thread");
assert(!thread->has_pending_exception(), "already has exception");
@@ -12334,10 +12333,9 @@ index 03f254d..c9e0efc 100644
return h_exception;
}
-diff --git a/src/share/vm/utilities/growableArray.hpp b/src/share/vm/utilities/growableArray.hpp
-index ea92a80..05e940e 100644
---- a/src/share/vm/utilities/growableArray.hpp
-+++ b/src/share/vm/utilities/growableArray.hpp
+diff -r 882f6c762ac5 src/share/vm/utilities/growableArray.hpp
+--- a/src/share/vm/utilities/growableArray.hpp Thu Jul 09 23:10:04 2015 -0700
++++ b/src/share/vm/utilities/growableArray.hpp Thu Jul 09 23:18:17 2015 -0700
@@ -145,6 +145,33 @@
assert(on_stack(), "fast ResourceObj path only");
return (void*)resource_allocate_bytes(thread, elementSize * _max);
@@ -12371,4 +12369,4 @@ index ea92a80..05e940e 100644
+ }
};
- template<class E> class GrowableArray : public GenericGrowableArray { \ No newline at end of file
+ template<class E> class GrowableArray : public GenericGrowableArray {
diff --git a/hotspot/.hg/patches/series b/hotspot/.hg/patches/series
index c15467c0..10f843cb 100644
--- a/hotspot/.hg/patches/series
+++ b/hotspot/.hg/patches/series
@@ -35,8 +35,9 @@ light-jdk7u79-b02.patch #+light-jdk7u79-b02
light-jdk7u60-deopt-cp.patch #+light-jdk7u60-b09 #+light-jdk7u71-b01 #+light-jdk7u79-b02
full-jdk7u60-b09.patch #+full-jdk7u60-b09
full-jdk7u71-b01.patch #+full-jdk7u71-b01
-full-jdk7u79-b02.patch #+full-jdk7u79-b02
-full-jdk7u60-deopt-cp.patch #+full-jdk7u60-b09 #+full-jdk7u71-b01 #+full-jdk7u79-b02
+full-jdk7u79-b15.patch #+full-jdk7u79-b15
+full-jdk7u60-deopt-cp.patch #+full-jdk7u60-b09 #+full-jdk7u71-b01 #+full-jdk7u79-b15
+full-jdk7u79-b15-method-handles.patch #+full-jdk7u79-b15
light-jdk8u5-b13.patch #+light-jdk8u5-b13