diff --git a/src/cpu/x86/vm/interp_masm_x86_32.cpp b/src/cpu/x86/vm/interp_masm_x86_32.cpp
index b0ebcfd..6366d68 100644
--- a/src/cpu/x86/vm/interp_masm_x86_32.cpp
+++ b/src/cpu/x86/vm/interp_masm_x86_32.cpp
@@ -1364,7 +1364,7 @@ void InterpreterMacroAssembler::notify_method_entry() {
   }
 
   // RedefineClasses() tracing support for obsolete method entry
-  if (RC_TRACE_IN_RANGE(0x00001000, 0x00002000)) {
+  IF_TRACE_RC4 {
     get_thread(rcx);
     get_method(rbx);
     call_VM_leaf(
diff --git a/src/cpu/x86/vm/interp_masm_x86_64.cpp b/src/cpu/x86/vm/interp_masm_x86_64.cpp
index 2790c2a..c315b18 100644
--- a/src/cpu/x86/vm/interp_masm_x86_64.cpp
+++ b/src/cpu/x86/vm/interp_masm_x86_64.cpp
@@ -1427,7 +1427,7 @@ void InterpreterMacroAssembler::notify_method_entry() {
   }
 
   // RedefineClasses() tracing support for obsolete method entry
-  if (RC_TRACE_IN_RANGE(0x00001000, 0x00002000)) {
+  IF_TRACE_RC4 {
     get_method(c_rarg1);
     call_VM_leaf(
       CAST_FROM_FN_PTR(address, SharedRuntime::rc_trace_method_entry),
diff --git a/src/cpu/x86/vm/sharedRuntime_x86_32.cpp b/src/cpu/x86/vm/sharedRuntime_x86_32.cpp
index 16958cd..09d6300 100644
--- a/src/cpu/x86/vm/sharedRuntime_x86_32.cpp
+++ b/src/cpu/x86/vm/sharedRuntime_x86_32.cpp
@@ -1976,7 +1976,7 @@ nmethod* SharedRuntime::generate_native_wrapper(MacroAssembler* masm,
   }
 
   // RedefineClasses() tracing support for obsolete method entry
-  if (RC_TRACE_IN_RANGE(0x00001000, 0x00002000)) {
+  IF_TRACE_RC4 {
     __ movoop(rax, JNIHandles::make_local(method()));
     __ call_VM_leaf(
          CAST_FROM_FN_PTR(address, SharedRuntime::rc_trace_method_entry),
diff --git a/src/cpu/x86/vm/sharedRuntime_x86_64.cpp b/src/cpu/x86/vm/sharedRuntime_x86_64.cpp
index 7dc4e62..86c8c95 100644
--- a/src/cpu/x86/vm/sharedRuntime_x86_64.cpp
+++ b/src/cpu/x86/vm/sharedRuntime_x86_64.cpp
@@ -2235,7 +2235,7 @@ nmethod* SharedRuntime::generate_native_wrapper(MacroAssembler* masm,
   }
 
   // RedefineClasses() tracing support for obsolete method entry
-  if (RC_TRACE_IN_RANGE(0x00001000, 0x00002000)) {
+  IF_TRACE_RC4 {
     // protect the args we've loaded
     save_args(masm, total_c_args, c_arg, out_regs);
     __ movoop(c_rarg1, JNIHandles::make_local(method()));
diff --git a/src/share/vm/c1/c1_Compilation.hpp b/src/share/vm/c1/c1_Compilation.hpp
index 9a8ca61..83e6f54 100644
--- a/src/share/vm/c1/c1_Compilation.hpp
+++ b/src/share/vm/c1/c1_Compilation.hpp
@@ -242,8 +242,9 @@ class Compilation: public StackObj {
 #define BAILOUT(msg)               { bailout(msg); return;              }
 #define BAILOUT_(msg, res)         { bailout(msg); return res;          }
 
-#define CHECK_BAILOUT()            { if (bailed_out()) return;          }
-#define CHECK_BAILOUT_(res)        { if (bailed_out()) return res;      }
+// (tw) Also checks a thread local flag that can be set to trigger compiler bailout from another thread.
+#define CHECK_BAILOUT()            { if (((CompilerThread *)Thread::current())->should_bailout()) bailout("Aborted externally"); if (bailed_out()) return;          }
+#define CHECK_BAILOUT_(res)        { if (((CompilerThread *)Thread::current())->should_bailout()) bailout("Aborted externally"); if (bailed_out()) return res;      }
 
 
 class InstructionMark: public StackObj {
diff --git a/src/share/vm/ci/ciObjectFactory.cpp b/src/share/vm/ci/ciObjectFactory.cpp
index e0ab96b..db8e551 100644
--- a/src/share/vm/ci/ciObjectFactory.cpp
+++ b/src/share/vm/ci/ciObjectFactory.cpp
@@ -764,3 +764,26 @@ void ciObjectFactory::print() {
              _unloaded_instances->length(),
              _unloaded_klasses->length());
 }
+
+int ciObjectFactory::compare_ciobjects(ciObject** a, ciObject** b) {
+  oop oop1 = (*a)->get_oop();
+  oop oop2 = (*b)->get_oop();
+  return ((oop1 > oop2) ? 1 : ((oop1 == oop2) ? 0 : -1));
+}
+
+// (DCEVM) Resoring the ciObject arrays after class redefinition
+void ciObjectFactory::resort_shared_ci_objects() {
+  _shared_ci_objects->sort(ciObjectFactory::compare_ciobjects);
+
+#ifdef ASSERT
+  if (CIObjectFactoryVerify) {
+    oop last = NULL;
+    for (int j = 0; j < _shared_ci_objects->length(); j++) {
+      oop o = _shared_ci_objects->at(j)->get_oop();
+      assert(last < o, "out of order");
+      last = o;
+    }
+  }
+#endif // ASSERT
+}
+
diff --git a/src/share/vm/ci/ciObjectFactory.hpp b/src/share/vm/ci/ciObjectFactory.hpp
index 26cc2c3..d99d3d6 100644
--- a/src/share/vm/ci/ciObjectFactory.hpp
+++ b/src/share/vm/ci/ciObjectFactory.hpp
@@ -88,6 +88,7 @@ private:
 
   ciInstance* get_unloaded_instance(ciInstanceKlass* klass);
 
+  static int compare_ciobjects(ciObject** a, ciObject** b);
 public:
   static bool is_initialized() { return _initialized; }
 
@@ -137,6 +138,8 @@ public:
 
   void print_contents();
   void print();
+
+  static void resort_shared_ci_objects();
 };
 
 #endif // SHARE_VM_CI_CIOBJECTFACTORY_HPP
diff --git a/src/share/vm/classfile/classFileParser.cpp b/src/share/vm/classfile/classFileParser.cpp
index 8d57bb1..8b7d5eb 100644
--- a/src/share/vm/classfile/classFileParser.cpp
+++ b/src/share/vm/classfile/classFileParser.cpp
@@ -795,6 +795,7 @@ objArrayHandle ClassFileParser::parse_interfaces(constantPoolHandle cp,
                                                  Handle class_loader,
                                                  Handle protection_domain,
                                                  Symbol* class_name,
+                                                 KlassHandle old_klass,
                                                  TRAPS) {
   ClassFileStream* cfs = stream();
   assert(length > 0, "only called for length>0");
@@ -813,6 +814,9 @@ objArrayHandle ClassFileParser::parse_interfaces(constantPoolHandle cp,
       interface_index, CHECK_(nullHandle));
     if (cp->tag_at(interface_index).is_klass()) {
       interf = KlassHandle(THREAD, cp->resolved_klass_at(interface_index));
+      if (!old_klass.is_null() && !interf->is_newest_version()) {
+        interf =  KlassHandle(THREAD, interf->newest_version());
+      }
     } else {
       Symbol*  unresolved_klass  = cp->klass_name_at(interface_index);
 
@@ -825,6 +829,9 @@ objArrayHandle ClassFileParser::parse_interfaces(constantPoolHandle cp,
       klassOop k = SystemDictionary::resolve_super_or_fail(class_name,
                     unresolved_klass, class_loader, protection_domain,
                     false, CHECK_(nullHandle));
+      if (!old_klass.is_null()) {
+        k = k->klass_part()->newest_version();
+      }
       interf = KlassHandle(THREAD, k);
     }
 
@@ -2921,8 +2928,10 @@ typeArrayHandle ClassFileParser::assemble_annotations(u1* runtime_visible_annota
 instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name,
                                                     Handle class_loader,
                                                     Handle protection_domain,
+                                                    KlassHandle old_klass,
                                                     KlassHandle host_klass,
                                                     GrowableArray<Handle>* cp_patches,
+                                                    GrowableArray<Symbol*>* parsed_super_symbols,
                                                     TempNewSymbol& parsed_name,
                                                     bool verify,
                                                     TRAPS) {
@@ -2948,7 +2957,7 @@ instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name,
 
   init_parsed_class_attributes();
 
-  if (JvmtiExport::should_post_class_file_load_hook()) {
+  if (parsed_super_symbols == NULL && JvmtiExport::should_post_class_file_load_hook()) {
     // Get the cached class file bytes (if any) from the class that
     // is being redefined or retransformed. We use jvmti_thread_state()
     // instead of JvmtiThreadState::state_for(jt) so we don't allocate
@@ -2971,10 +2980,13 @@ instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name,
     unsigned char* ptr = cfs->buffer();
     unsigned char* end_ptr = cfs->buffer() + cfs->length();
 
+    bool pretend_new_universe = Thread::current()->pretend_new_universe();
+    Thread::current()->set_pretend_new_universe(false);
     JvmtiExport::post_class_file_load_hook(name, class_loader, protection_domain,
                                            &ptr, &end_ptr,
                                            &cached_class_file_bytes,
                                            &cached_class_file_length);
+    Thread::current()->set_pretend_new_universe(pretend_new_universe);
 
     if (ptr != cfs->buffer()) {
       // JVMTI agent has modified class file data.
@@ -3090,6 +3102,30 @@ instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name,
                        CHECK_(nullHandle));
   }
 
+  // (tw) Do not parse full class file, only get super symbols and return.
+  if (parsed_super_symbols != NULL) {
+    u2 super_class_index = cfs->get_u2_fast();
+
+    if (super_class_index != 0) {
+      parsed_super_symbols->append(cp->klass_name_at(super_class_index));
+    }
+
+    // Interfaces
+    u2 itfs_len = cfs->get_u2_fast();
+    objArrayHandle local_interfaces;
+    if (itfs_len == 0) {
+      local_interfaces = objArrayHandle(THREAD, Universe::the_empty_system_obj_array());
+    } else {
+      local_interfaces = parse_interfaces(cp, itfs_len, class_loader, protection_domain, _class_name, old_klass, CHECK_NULL);
+    }
+
+    for (int i=0; i<local_interfaces->length(); i++) {
+      oop o = local_interfaces->obj_at(i);
+      parsed_super_symbols->append(((klassOop)o)->klass_part()->name());
+    }
+    return NULL;
+  }
+
   klassOop preserve_this_klass;   // for storing result across HandleMark
 
   // release all handles when parsing is done
@@ -3130,7 +3166,11 @@ instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name,
       // However, make sure it is not an array type.
       bool is_array = false;
       if (cp->tag_at(super_class_index).is_klass()) {
-        super_klass = instanceKlassHandle(THREAD, cp->resolved_klass_at(super_class_index));
+        klassOop resolved_klass = cp->resolved_klass_at(super_class_index);
+        if (!old_klass.is_null()) {
+          resolved_klass = resolved_klass->klass_part()->newest_version();
+        }
+        super_klass = instanceKlassHandle(THREAD, resolved_klass);
         if (_need_verify)
           is_array = super_klass->oop_is_array();
       } else if (_need_verify) {
@@ -3148,7 +3188,7 @@ instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name,
     if (itfs_len == 0) {
       local_interfaces = objArrayHandle(THREAD, Universe::the_empty_system_obj_array());
     } else {
-      local_interfaces = parse_interfaces(cp, itfs_len, class_loader, protection_domain, _class_name, CHECK_(nullHandle));
+      local_interfaces = parse_interfaces(cp, itfs_len, class_loader, protection_domain, _class_name, old_klass, CHECK_(nullHandle));
     }
 
     u2 java_fields_count = 0;
@@ -3202,7 +3242,9 @@ instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name,
                                                            protection_domain,
                                                            true,
                                                            CHECK_(nullHandle));
-
+      if (!old_klass.is_null()) {
+        k = k->klass_part()->newest_version();
+      }
       KlassHandle kh (THREAD, k);
       super_klass = instanceKlassHandle(THREAD, kh());
     }
@@ -3591,6 +3633,19 @@ instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name,
       rt = REF_NONE;
     } else {
       rt = super_klass->reference_type();
+
+      // (tw) With class redefinition, it can also happen that special classes are loaded.
+      if (name == vmSymbols::java_lang_ref_Reference()) {
+        rt = REF_OTHER;
+      } else if (name == vmSymbols::java_lang_ref_SoftReference()) {
+        rt = REF_SOFT;
+      } else if (name == vmSymbols::java_lang_ref_WeakReference()) {
+        rt = REF_WEAK;
+      } else if (name == vmSymbols::java_lang_ref_FinalReference()) {
+        rt = REF_FINAL;
+      } else if (name == vmSymbols::java_lang_ref_PhantomReference()) {
+        rt = REF_PHANTOM;
+      }
     }
 
     // We can now create the basic klassOop for this klass
@@ -3599,6 +3654,7 @@ instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name,
                                                 total_oop_map_count,
                                                 access_flags,
                                                 rt, host_klass,
+                                                old_klass,
                                                 CHECK_(nullHandle));
     instanceKlassHandle this_klass (THREAD, ik);
 
@@ -3691,7 +3747,7 @@ instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name,
     fill_oop_maps(this_klass, nonstatic_oop_map_count, nonstatic_oop_offsets, nonstatic_oop_counts);
 
     // Fill in has_finalizer, has_vanilla_constructor, and layout_helper
-    set_precomputed_flags(this_klass);
+    set_precomputed_flags(this_klass, old_klass);
 
     // reinitialize modifiers, using the InnerClasses attribute
     int computed_modifiers = this_klass->compute_modifier_flags(CHECK_(nullHandle));
@@ -3711,6 +3767,10 @@ instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name,
       check_illegal_static_method(this_klass, CHECK_(nullHandle));
     }
 
+    if (rt == REF_OTHER) {
+      instanceRefKlass::update_nonstatic_oop_maps(ik);
+    }
+
     // Allocate mirror and initialize static fields
     java_lang_Class::create_mirror(this_klass, CHECK_(nullHandle));
 
@@ -3856,7 +3916,7 @@ void ClassFileParser::fill_oop_maps(instanceKlassHandle k,
 }
 
 
-void ClassFileParser::set_precomputed_flags(instanceKlassHandle k) {
+void ClassFileParser::set_precomputed_flags(instanceKlassHandle k, KlassHandle old_klass) {
   klassOop super = k->super();
 
   // Check if this klass has an empty finalize method (i.e. one with return bytecode only),
@@ -3864,7 +3924,9 @@ void ClassFileParser::set_precomputed_flags(instanceKlassHandle k) {
   if (!_has_empty_finalizer) {
     if (_has_finalizer ||
         (super != NULL && super->klass_part()->has_finalizer())) {
-      k->set_has_finalizer();
+      if (old_klass.is_null() || old_klass->has_finalizer()) {
+        k->set_has_finalizer();
+      }
     }
   }
 
@@ -3880,7 +3942,7 @@ void ClassFileParser::set_precomputed_flags(instanceKlassHandle k) {
 
   // Check if this klass supports the java.lang.Cloneable interface
   if (SystemDictionary::Cloneable_klass_loaded()) {
-    if (k->is_subtype_of(SystemDictionary::Cloneable_klass())) {
+    if (k->is_subtype_of(SystemDictionary::Cloneable_klass()) || k->is_subtype_of(SystemDictionary::Cloneable_klass()->klass_part()->newest_version())) {
       k->set_is_cloneable();
     }
   }
diff --git a/src/share/vm/classfile/classFileParser.hpp b/src/share/vm/classfile/classFileParser.hpp
index 314ec5e..5fca1da 100644
--- a/src/share/vm/classfile/classFileParser.hpp
+++ b/src/share/vm/classfile/classFileParser.hpp
@@ -151,6 +151,7 @@ class ClassFileParser VALUE_OBJ_CLASS_SPEC {
                                   Handle class_loader,
                                   Handle protection_domain,
                                   Symbol* class_name,
+                                  KlassHandle old_klass,
                                   TRAPS);
 
   // Field parsing
@@ -237,7 +238,7 @@ class ClassFileParser VALUE_OBJ_CLASS_SPEC {
                      unsigned int nonstatic_oop_map_count,
                      int* nonstatic_oop_offsets,
                      unsigned int* nonstatic_oop_counts);
-  void set_precomputed_flags(instanceKlassHandle k);
+  void set_precomputed_flags(instanceKlassHandle k, KlassHandle old_klass);
   objArrayHandle compute_transitive_interfaces(instanceKlassHandle super,
                                                objArrayHandle local_ifs, TRAPS);
 
@@ -349,17 +350,20 @@ class ClassFileParser VALUE_OBJ_CLASS_SPEC {
   instanceKlassHandle parseClassFile(Symbol* name,
                                      Handle class_loader,
                                      Handle protection_domain,
+                                     KlassHandle old_klass,
                                      TempNewSymbol& parsed_name,
                                      bool verify,
                                      TRAPS) {
     KlassHandle no_host_klass;
-    return parseClassFile(name, class_loader, protection_domain, no_host_klass, NULL, parsed_name, verify, THREAD);
+    return parseClassFile(name, class_loader, protection_domain, old_klass, no_host_klass, NULL, NULL, parsed_name, verify, THREAD);
   }
   instanceKlassHandle parseClassFile(Symbol* name,
                                      Handle class_loader,
                                      Handle protection_domain,
+                                     KlassHandle old_klass,
                                      KlassHandle host_klass,
                                      GrowableArray<Handle>* cp_patches,
+                                     GrowableArray<Symbol*>* parsed_super_symbols,
                                      TempNewSymbol& parsed_name,
                                      bool verify,
                                      TRAPS);
diff --git a/src/share/vm/classfile/classLoader.cpp b/src/share/vm/classfile/classLoader.cpp
index a2e61a4..450e19f 100644
--- a/src/share/vm/classfile/classLoader.cpp
+++ b/src/share/vm/classfile/classLoader.cpp
@@ -915,6 +915,7 @@ instanceKlassHandle ClassLoader::load_classfile(Symbol* h_name, TRAPS) {
     instanceKlassHandle result = parser.parseClassFile(h_name,
                                                        class_loader,
                                                        protection_domain,
+                                                       KlassHandle(),
                                                        parsed_name,
                                                        false,
                                                        CHECK_(h));
diff --git a/src/share/vm/classfile/dictionary.cpp b/src/share/vm/classfile/dictionary.cpp
index 78e76cc..d167c98 100644
--- a/src/share/vm/classfile/dictionary.cpp
+++ b/src/share/vm/classfile/dictionary.cpp
@@ -144,87 +144,10 @@ bool Dictionary::do_unloading(BoolObjectClosure* is_alive) {
       probe = *p;
       klassOop e = probe->klass();
       oop class_loader = probe->loader();
-
       instanceKlass* ik = instanceKlass::cast(e);
-      if (ik->previous_versions() != NULL) {
-        // This klass has previous versions so see what we can cleanup
-        // while it is safe to do so.
-
-        int gc_count = 0;    // leave debugging breadcrumbs
-        int live_count = 0;
-
-        // RC_TRACE macro has an embedded ResourceMark
-        RC_TRACE(0x00000200, ("unload: %s: previous version length=%d",
-          ik->external_name(), ik->previous_versions()->length()));
-
-        for (int i = ik->previous_versions()->length() - 1; i >= 0; i--) {
-          // check the previous versions array for GC'ed weak refs
-          PreviousVersionNode * pv_node = ik->previous_versions()->at(i);
-          jobject cp_ref = pv_node->prev_constant_pool();
-          assert(cp_ref != NULL, "cp ref was unexpectedly cleared");
-          if (cp_ref == NULL) {
-            delete pv_node;
-            ik->previous_versions()->remove_at(i);
-            // Since we are traversing the array backwards, we don't have to
-            // do anything special with the index.
-            continue;  // robustness
-          }
-
-          constantPoolOop pvcp = (constantPoolOop)JNIHandles::resolve(cp_ref);
-          if (pvcp == NULL) {
-            // this entry has been GC'ed so remove it
-            delete pv_node;
-            ik->previous_versions()->remove_at(i);
-            // Since we are traversing the array backwards, we don't have to
-            // do anything special with the index.
-            gc_count++;
-            continue;
-          } else {
-            RC_TRACE(0x00000200, ("unload: previous version @%d is alive", i));
-            if (is_alive->do_object_b(pvcp)) {
-              live_count++;
-            } else {
-              guarantee(false, "sanity check");
-            }
-          }
-
-          GrowableArray<jweak>* method_refs = pv_node->prev_EMCP_methods();
-          if (method_refs != NULL) {
-            RC_TRACE(0x00000200, ("unload: previous methods length=%d",
-              method_refs->length()));
-            for (int j = method_refs->length() - 1; j >= 0; j--) {
-              jweak method_ref = method_refs->at(j);
-              assert(method_ref != NULL, "weak method ref was unexpectedly cleared");
-              if (method_ref == NULL) {
-                method_refs->remove_at(j);
-                // Since we are traversing the array backwards, we don't have to
-                // do anything special with the index.
-                continue;  // robustness
-              }
-
-              methodOop method = (methodOop)JNIHandles::resolve(method_ref);
-              if (method == NULL) {
-                // this method entry has been GC'ed so remove it
-                JNIHandles::destroy_weak_global(method_ref);
-                method_refs->remove_at(j);
-              } else {
-                // RC_TRACE macro has an embedded ResourceMark
-                RC_TRACE(0x00000200,
-                  ("unload: %s(%s): prev method @%d in version @%d is alive",
-                  method->name()->as_C_string(),
-                  method->signature()->as_C_string(), j, i));
-              }
-            }
-          }
-        }
-        assert(ik->previous_versions()->length() == live_count, "sanity check");
-        RC_TRACE(0x00000200,
-          ("unload: previous version stats: live=%d, GC'ed=%d", live_count,
-          gc_count));
-      }
-
+      
       // Non-unloadable classes were handled in always_strong_oops_do
-      if (!is_strongly_reachable(class_loader, e)) {
+      if (!ik->is_redefining() && !is_strongly_reachable(class_loader, e)) {
         // Entry was not visited in phase1 (negated test from phase1)
         assert(class_loader != NULL, "unloading entry with null class loader");
         oop k_def_class_loader = ik->class_loader();
@@ -326,6 +249,7 @@ void Dictionary::classes_do(void f(klassOop)) {
   }
 }
 
+
 // Added for initialize_itable_for_klass to handle exceptions
 //   Just the classes from defining class loaders
 void Dictionary::classes_do(void f(klassOop, TRAPS), TRAPS) {
@@ -433,6 +357,33 @@ void Dictionary::add_klass(Symbol* class_name, Handle class_loader,
   add_entry(index, entry);
 }
 
+// (tw) Updates the klass entry to point to the new klassOop. Necessary only for class redefinition.
+bool Dictionary::update_klass(int index, unsigned int hash, Symbol* name, Handle loader, KlassHandle k, KlassHandle old_class) {
+
+  // There are several entries for the same class in the dictionary: One extra entry for each parent classloader of the classloader of the class.
+  bool found = false;
+  for (int index = 0; index < table_size(); index++) {
+    for (DictionaryEntry* entry = bucket(index); entry != NULL; entry = entry->next()) {
+      if (entry->klass() == old_class()) {
+        entry->set_literal(k());
+        found = true;
+      }
+    }
+  }
+
+  return found;
+}
+
+// (tw) Undo previous updates to the system dictionary
+void Dictionary::rollback_redefinition() {
+  for (int index = 0; index < table_size(); index++) {
+    for (DictionaryEntry* entry = bucket(index); entry != NULL; entry = entry->next()) {
+      if (entry->klass()->klass_part()->is_redefining()) {
+        entry->set_literal(entry->klass()->klass_part()->old_version());
+      }
+    }
+  }
+}
 
 // This routine does not lock the system dictionary.
 //
@@ -459,12 +410,21 @@ DictionaryEntry* Dictionary::get_entry(int index, unsigned int hash,
   return NULL;
 }
 
+klassOop Dictionary::intercept_for_version(klassOop k) {
+  if (k == NULL) return k;
+
+  if (k->klass_part()->is_redefining() && !Thread::current()->pretend_new_universe()) {
+    return k->klass_part()->old_version();
+  }
+
+  return k;
+}
 
 klassOop Dictionary::find(int index, unsigned int hash, Symbol* name,
                           Handle loader, Handle protection_domain, TRAPS) {
   DictionaryEntry* entry = get_entry(index, hash, name, loader);
   if (entry != NULL && entry->is_valid_protection_domain(protection_domain)) {
-    return entry->klass();
+    return intercept_for_version(entry->klass());
   } else {
     return NULL;
   }
@@ -477,7 +437,7 @@ klassOop Dictionary::find_class(int index, unsigned int hash,
   assert (index == index_for(name, loader), "incorrect index?");
 
   DictionaryEntry* entry = get_entry(index, hash, name, loader);
-  return (entry != NULL) ? entry->klass() : (klassOop)NULL;
+  return intercept_for_version((entry != NULL) ? entry->klass() : (klassOop)NULL);
 }
 
 
@@ -489,7 +449,7 @@ klassOop Dictionary::find_shared_class(int index, unsigned int hash,
   assert (index == index_for(name, Handle()), "incorrect index?");
 
   DictionaryEntry* entry = get_entry(index, hash, name, Handle());
-  return (entry != NULL) ? entry->klass() : (klassOop)NULL;
+  return intercept_for_version((entry != NULL) ? entry->klass() : (klassOop)NULL);
 }
 
 
diff --git a/src/share/vm/classfile/dictionary.hpp b/src/share/vm/classfile/dictionary.hpp
index bd33760..ea1fe3c 100644
--- a/src/share/vm/classfile/dictionary.hpp
+++ b/src/share/vm/classfile/dictionary.hpp
@@ -73,6 +73,10 @@ public:
 
   void add_klass(Symbol* class_name, Handle class_loader,KlassHandle obj);
 
+  bool update_klass(int index, unsigned int hash, Symbol* name, Handle loader, KlassHandle k, KlassHandle old_class);
+
+  void rollback_redefinition();
+
   klassOop find_class(int index, unsigned int hash,
                       Symbol* name, Handle loader);
 
@@ -105,6 +109,7 @@ public:
   bool do_unloading(BoolObjectClosure* is_alive);
 
   // Protection domains
+  static klassOop intercept_for_version(klassOop k);
   klassOop find(int index, unsigned int hash, Symbol* name,
                 Handle loader, Handle protection_domain, TRAPS);
   bool is_valid_protection_domain(int index, unsigned int hash,
diff --git a/src/share/vm/classfile/javaClasses.cpp b/src/share/vm/classfile/javaClasses.cpp
index f8b10b3..c417a29 100644
--- a/src/share/vm/classfile/javaClasses.cpp
+++ b/src/share/vm/classfile/javaClasses.cpp
@@ -621,6 +621,10 @@ klassOop java_lang_Class::as_klassOop(oop java_class) {
   assert(java_lang_Class::is_instance(java_class), "must be a Class object");
   klassOop k = klassOop(java_class->obj_field(_klass_offset));
   assert(k == NULL || k->is_klass(), "type check");
+  // Necessary to make old verifier work.
+  if (Thread::current()->pretend_new_universe()) {
+    k = k->klass_part()->newest_version();
+  }
   return k;
 }
 
@@ -1541,6 +1545,7 @@ void java_lang_Throwable::fill_in_stack_trace(Handle throwable, methodHandle met
         skip_throwableInit_check = true;
       }
     }
+    method = method->newest_version();
     if (method->is_hidden()) {
       if (skip_hidden)  continue;
     }
diff --git a/src/share/vm/classfile/javaClasses.hpp b/src/share/vm/classfile/javaClasses.hpp
index b741cfa..5412831 100644
--- a/src/share/vm/classfile/javaClasses.hpp
+++ b/src/share/vm/classfile/javaClasses.hpp
@@ -213,6 +213,7 @@ class java_lang_String : AllStatic {
 
 class java_lang_Class : AllStatic {
   friend class VMStructs;
+  friend class VM_RedefineClasses;
 
  private:
   // The fake offsets are added by the class loader when java.lang.Class is loaded
@@ -248,7 +249,7 @@ class java_lang_Class : AllStatic {
   static void print_signature(oop java_class, outputStream *st);
   // Testing
   static bool is_instance(oop obj) {
-    return obj != NULL && obj->klass() == SystemDictionary::Class_klass();
+    return obj != NULL && (obj->klass()->klass_part()->newest_version() == SystemDictionary::Class_klass()->klass_part()->newest_version());
   }
   static bool is_primitive(oop java_class);
   static BasicType primitive_type(oop java_class);
diff --git a/src/share/vm/classfile/loaderConstraints.cpp b/src/share/vm/classfile/loaderConstraints.cpp
index 8650cd9..965cce2 100644
--- a/src/share/vm/classfile/loaderConstraints.cpp
+++ b/src/share/vm/classfile/loaderConstraints.cpp
@@ -449,7 +449,7 @@ void LoaderConstraintTable::verify(Dictionary* dictionary,
         if (k != NULL) {
           // We found the class in the system dictionary, so we should
           // make sure that the klassOop matches what we already have.
-          guarantee(k == probe->klass(), "klass should be in dictionary");
+          guarantee(k == probe->klass()->klass_part()->newest_version(), "klass should be in dictionary");
         } else {
           // If we don't find the class in the system dictionary, it
           // has to be in the placeholders table.
diff --git a/src/share/vm/classfile/systemDictionary.cpp b/src/share/vm/classfile/systemDictionary.cpp
index 899153a..3f64268 100644
--- a/src/share/vm/classfile/systemDictionary.cpp
+++ b/src/share/vm/classfile/systemDictionary.cpp
@@ -157,6 +157,7 @@ klassOop SystemDictionary::resolve_or_fail(Symbol* class_name, Handle class_load
     // can return a null klass
     klass = handle_resolution_exception(class_name, class_loader, protection_domain, throw_error, k_h, THREAD);
   }
+  assert(klass == NULL || klass->klass_part()->is_newest_version() || klass->klass_part()->newest_version()->klass_part()->is_redefining(), "must be");
   return klass;
 }
 
@@ -199,7 +200,7 @@ klassOop SystemDictionary::resolve_or_fail(Symbol* class_name,
 // Forwards to resolve_instance_class_or_null
 
 klassOop SystemDictionary::resolve_or_null(Symbol* class_name, Handle class_loader, Handle protection_domain, TRAPS) {
-  assert(!THREAD->is_Compiler_thread(),
+  assert(!THREAD->is_Compiler_thread() || JvmtiThreadState::state_for(JavaThread::current())->get_class_being_redefined() != NULL,
          err_msg("can not load classes with compiler thread: class=%s, classloader=%s",
                  class_name->as_C_string(),
                  class_loader.is_null() ? "null" : class_loader->klass()->klass_part()->name()->as_C_string()));
@@ -961,8 +962,10 @@ klassOop SystemDictionary::parse_stream(Symbol* class_name,
   instanceKlassHandle k = ClassFileParser(st).parseClassFile(class_name,
                                                              class_loader,
                                                              protection_domain,
+                                                             KlassHandle(),
                                                              host_klass,
                                                              cp_patches,
+                                                             NULL,
                                                              parsed_name,
                                                              true,
                                                              THREAD);
@@ -1022,7 +1025,15 @@ klassOop SystemDictionary::resolve_from_stream(Symbol* class_name,
                                                Handle protection_domain,
                                                ClassFileStream* st,
                                                bool verify,
+                                               KlassHandle old_class,
                                                TRAPS) {
+
+  bool redefine_classes_locked = false;
+  if (!Thread::current()->redefine_classes_mutex()->owned_by_self()) {
+    Thread::current()->redefine_classes_mutex()->lock();
+    redefine_classes_locked = true;
+  }
+  
   // Classloaders that support parallelism, e.g. bootstrap classloader,
   // or all classloaders with UnsyncloadClass do not acquire lock here
   bool DoObjectLock = true;
@@ -1050,9 +1061,14 @@ klassOop SystemDictionary::resolve_from_stream(Symbol* class_name,
   instanceKlassHandle k = ClassFileParser(st).parseClassFile(class_name,
                                                              class_loader,
                                                              protection_domain,
+                                                             old_class,
                                                              parsed_name,
                                                              verify,
                                                              THREAD);
+  if (!old_class.is_null() && !k.is_null()) {
+    k->set_redefining(true);
+    k->set_old_version(old_class());
+  }
 
   const char* pkg = "java/";
   if (!HAS_PENDING_EXCEPTION &&
@@ -1087,13 +1103,18 @@ klassOop SystemDictionary::resolve_from_stream(Symbol* class_name,
     // Add class just loaded
     // If a class loader supports parallel classloading handle parallel define requests
     // find_or_define_instance_class may return a different instanceKlass
-    if (is_parallelCapable(class_loader)) {
+    // (tw) TODO: for class redefinition the parallel version does not work, check if this is a problem?
+    if (is_parallelCapable(class_loader) && old_class.is_null()) {
       k = find_or_define_instance_class(class_name, class_loader, k, THREAD);
     } else {
-      define_instance_class(k, THREAD);
+      define_instance_class(k, old_class, THREAD);
     }
   }
 
+  if (redefine_classes_locked) {
+    Thread::current()->redefine_classes_mutex()->unlock();
+  }
+
   // If parsing the class file or define_instance_class failed, we
   // need to remove the placeholder added on our behalf. But we
   // must make sure parsed_name is valid first (it won't be if we had
@@ -1122,7 +1143,7 @@ klassOop SystemDictionary::resolve_from_stream(Symbol* class_name,
       MutexLocker mu(SystemDictionary_lock, THREAD);
 
       klassOop check = find_class(parsed_name, class_loader);
-      assert(check == k(), "should be present in the dictionary");
+      assert((check == k() && !k->is_redefining()) || (k->is_redefining() && check == k->old_version()), "should be present in the dictionary");
 
       klassOop check2 = find_class(h_name, h_loader);
       assert(check == check2, "name inconsistancy in SystemDictionary");
@@ -1349,7 +1370,11 @@ instanceKlassHandle SystemDictionary::load_instance_class(Symbol* class_name, Ha
   }
 }
 
-void SystemDictionary::define_instance_class(instanceKlassHandle k, TRAPS) {
+void SystemDictionary::rollback_redefinition() {
+  dictionary()->rollback_redefinition();
+}
+
+void SystemDictionary::define_instance_class(instanceKlassHandle k, KlassHandle old_class, TRAPS) {
 
   Handle class_loader_h(THREAD, k->class_loader());
 
@@ -1376,13 +1401,23 @@ void SystemDictionary::define_instance_class(instanceKlassHandle k, TRAPS) {
   Symbol*  name_h = k->name();
   unsigned int d_hash = dictionary()->compute_hash(name_h, class_loader_h);
   int d_index = dictionary()->hash_to_index(d_hash);
-  check_constraints(d_index, d_hash, k, class_loader_h, true, CHECK);
+
+  // (tw) Update version of the klassOop in the system dictionary
+  // TODO: Check for thread safety!
+  if (!old_class.is_null()) {
+    bool ok = dictionary()->update_klass(d_index, d_hash, name_h, class_loader_h, k, old_class);
+    assert (ok, "must have found old class and updated!");
+  }
+  check_constraints(d_index, d_hash, k, class_loader_h, old_class.is_null(), CHECK);
+
+  if(!old_class.is_null() && TraceRedefineClasses >= 3){ tty->print_cr("Class has been updated!"); }
 
   // Register class just loaded with class loader (placed in Vector)
   // Note we do this before updating the dictionary, as this can
   // fail with an OutOfMemoryError (if it does, we will *not* put this
   // class in the dictionary and will not update the class hierarchy).
-  if (k->class_loader() != NULL) {
+  // (tw) Only register if not redefining a class.
+  if (k->class_loader() != NULL && old_class.is_null()) {
     methodHandle m(THREAD, Universe::loader_addClass_method());
     JavaValue result(T_VOID);
     JavaCallArguments args(class_loader_h);
@@ -1408,8 +1443,9 @@ void SystemDictionary::define_instance_class(instanceKlassHandle k, TRAPS) {
   }
   k->eager_initialize(THREAD);
 
+  // (tw) Only notify jvmti if not redefining a class.
   // notify jvmti
-  if (JvmtiExport::should_post_class_load()) {
+  if (JvmtiExport::should_post_class_load() && old_class.is_null()) {
       assert(THREAD->is_Java_thread(), "thread->is_Java_thread()");
       JvmtiExport::post_class_load((JavaThread *) THREAD, k());
 
@@ -1482,7 +1518,7 @@ instanceKlassHandle SystemDictionary::find_or_define_instance_class(Symbol* clas
     }
   }
 
-  define_instance_class(k, THREAD);
+  define_instance_class(k, KlassHandle(), THREAD);
 
   Handle linkage_exception = Handle(); // null handle
 
@@ -1612,6 +1648,14 @@ void SystemDictionary::add_to_hierarchy(instanceKlassHandle k, TRAPS) {
   Universe::flush_dependents_on(k);
 }
 
+// (tw) Remove from hierarchy - Undo add_to_hierarchy.
+void SystemDictionary::remove_from_hierarchy(instanceKlassHandle k) {
+  assert(k.not_null(), "just checking");
+
+  k->remove_from_sibling_list();
+
+  // TODO (tw): Remove from interfaces.
+}
 
 // ----------------------------------------------------------------------------
 // GC support
@@ -1869,9 +1913,12 @@ void SystemDictionary::initialize_preloaded_classes(TRAPS) {
 
   // Preload ref klasses and set reference types
   instanceKlass::cast(WK_KLASS(Reference_klass))->set_reference_type(REF_OTHER);
-  instanceRefKlass::update_nonstatic_oop_maps(WK_KLASS(Reference_klass));
+
+  // (tw) This is now done in parseClassFile in order to support class redefinition
+  // instanceRefKlass::update_nonstatic_oop_maps(WK_KLASS(Reference_klass));
 
   initialize_wk_klasses_through(WK_KLASS_ENUM_NAME(PhantomReference_klass), scan, CHECK);
+  // TODO(tw): Check that the following is also not necessary?
   instanceKlass::cast(WK_KLASS(SoftReference_klass))->set_reference_type(REF_SOFT);
   instanceKlass::cast(WK_KLASS(WeakReference_klass))->set_reference_type(REF_WEAK);
   instanceKlass::cast(WK_KLASS(FinalReference_klass))->set_reference_type(REF_FINAL);
@@ -1955,7 +2002,7 @@ void SystemDictionary::check_constraints(int d_index, unsigned int d_hash,
       // also holds array classes
 
       assert(check->klass_part()->oop_is_instance(), "noninstance in systemdictionary");
-      if ((defining == true) || (k() != check)) {
+      if ((defining == true) && ((k() != check) && k->old_version() != check)) {
         linkage_error = "loader (instance of  %s): attempted  duplicate class "
           "definition for name: \"%s\"";
       } else {
@@ -2602,8 +2649,10 @@ void SystemDictionary::verify_obj_klass_present(Handle obj,
       name = find_placeholder(class_name, class_loader);
     }
   }
-  guarantee(probe != NULL || name != NULL,
-            "Loaded klasses should be in SystemDictionary");
+  // (tw) Relaxed assertion to allow different class versions. Also allow redefining classes lie around (because of rollback).
+  guarantee(probe != NULL &&
+            (!probe->is_klass() || (!((klassOop)(obj()))->klass_part()->is_redefining()) || ((klassOop)probe)->klass_part()->is_same_or_older_version((klassOop)(obj()))) || ((klassOop)(obj()))->klass_part()->is_redefining(),
+                     "Loaded klasses should be in SystemDictionary");
 }
 
 // utility function for posting class load event
diff --git a/src/share/vm/classfile/systemDictionary.hpp b/src/share/vm/classfile/systemDictionary.hpp
index adf82e5..00cf392 100644
--- a/src/share/vm/classfile/systemDictionary.hpp
+++ b/src/share/vm/classfile/systemDictionary.hpp
@@ -268,7 +268,7 @@ public:
   // Resolve from stream (called by jni_DefineClass and JVM_DefineClass)
   static klassOop resolve_from_stream(Symbol* class_name, Handle class_loader,
                                       Handle protection_domain,
-                                      ClassFileStream* st, bool verify, TRAPS);
+                                      ClassFileStream* st, bool verify, KlassHandle old_class, TRAPS);
 
   // Lookup an already loaded class. If not found NULL is returned.
   static klassOop find(Symbol* class_name, Handle class_loader, Handle protection_domain, TRAPS);
@@ -343,6 +343,8 @@ public:
   // System loader lock
   static oop system_loader_lock()           { return _system_loader_lock_obj; }
 
+  // Remove link to hierarchy
+  static void remove_from_hierarchy(instanceKlassHandle k);
 private:
   //    Traverses preloaded oops: various system classes.  These are
   //    guaranteed to be in the perm gen.
@@ -415,6 +417,8 @@ public:
     initialize_wk_klasses_until((WKID) limit, start_id, THREAD);
   }
 
+  static void rollback_redefinition();
+
 public:
   #define WK_KLASS_DECLARE(name, symbol, option) \
     static klassOop name() { return check_klass_##option(_well_known_klasses[WK_KLASS_ENUM_NAME(name)]); }
@@ -596,7 +600,7 @@ private:
   // after waiting, but before reentering SystemDictionary_lock
   // to preserve lock order semantics.
   static void double_lock_wait(Handle lockObject, TRAPS);
-  static void define_instance_class(instanceKlassHandle k, TRAPS);
+  static void define_instance_class(instanceKlassHandle k, KlassHandle old_class, TRAPS);
   static instanceKlassHandle find_or_define_instance_class(Symbol* class_name,
                                                 Handle class_loader,
                                                 instanceKlassHandle k, TRAPS);
diff --git a/src/share/vm/classfile/verifier.cpp b/src/share/vm/classfile/verifier.cpp
index da188bb..097c50c 100644
--- a/src/share/vm/classfile/verifier.cpp
+++ b/src/share/vm/classfile/verifier.cpp
@@ -106,7 +106,7 @@ bool Verifier::relax_verify_for(oop loader) {
   return !need_verify;
 }
 
-bool Verifier::verify(instanceKlassHandle klass, Verifier::Mode mode, bool should_verify_class, TRAPS) {
+bool Verifier::verify(instanceKlassHandle klass, Verifier::Mode mode, bool should_verify_class, bool may_use_old_verifier, TRAPS) {
   HandleMark hm;
   ResourceMark rm(THREAD);
 
@@ -117,6 +117,7 @@ bool Verifier::verify(instanceKlassHandle klass, Verifier::Mode mode, bool shoul
 
   const char* klassName = klass->external_name();
   bool can_failover = FailOverToOldVerifier &&
+      may_use_old_verifier &&
       klass->major_version() < NOFAILOVER_MAJOR_VERSION;
 
   // If the class should be verified, first see if we can use the split
@@ -138,6 +139,7 @@ bool Verifier::verify(instanceKlassHandle klass, Verifier::Mode mode, bool shoul
           tty->print_cr(
             "Fail over class verification to old verifier for: %s", klassName);
         }
+        assert(may_use_old_verifier, "");
         exception_name = inference_verify(
           klass, message_buffer, message_buffer_len, THREAD);
       }
@@ -145,6 +147,7 @@ bool Verifier::verify(instanceKlassHandle klass, Verifier::Mode mode, bool shoul
         exception_message = split_verifier.exception_message();
       }
     } else {
+      assert(may_use_old_verifier, "");
       exception_name = inference_verify(
           klass, message_buffer, message_buffer_len, THREAD);
     }
@@ -210,7 +213,7 @@ bool Verifier::is_eligible_for_verification(instanceKlassHandle klass, bool shou
     // NOTE: this is called too early in the bootstrapping process to be
     // guarded by Universe::is_gte_jdk14x_version()/UseNewReflection.
     (refl_magic_klass == NULL ||
-     !klass->is_subtype_of(refl_magic_klass) ||
+     !(klass->is_subtype_of(refl_magic_klass) || klass->is_subtype_of(refl_magic_klass->klass_part()->newest_version())) ||
      VerifyReflectionBytecodes)
   );
 }
@@ -517,7 +520,7 @@ void ErrorContext::stackmap_details(outputStream* ss, methodOop method) const {
 
 ClassVerifier::ClassVerifier(
     instanceKlassHandle klass, TRAPS)
-    : _thread(THREAD), _exception_type(NULL), _message(NULL), _klass(klass) {
+    : _thread(THREAD), _exception_type(NULL), _message(NULL), _klass(klass->newest_version()), _klass_to_verify(klass) {
   _this_type = VerificationType::reference_type(klass->name());
   // Create list to hold symbols in reference area.
   _symbols = new GrowableArray<Symbol*>(100, 0, NULL);
@@ -547,7 +550,7 @@ void ClassVerifier::verify_class(TRAPS) {
       _klass->external_name());
   }
 
-  objArrayHandle methods(THREAD, _klass->methods());
+  objArrayHandle methods(THREAD, _klass_to_verify->methods());
   int num_methods = methods->length();
 
   for (int index = 0; index < num_methods; index++) {
@@ -2444,7 +2447,8 @@ void ClassVerifier::verify_invoke_instructions(
         VerificationType stack_object_type =
           current_frame->pop_stack(ref_class_type, CHECK_VERIFY(this));
         if (current_type() != stack_object_type) {
-          assert(cp->cache() == NULL, "not rewritten yet");
+          // (tw) TODO: Check if relaxing the following assertion is correct. For class redefinition we might call the verifier twice.
+          //assert(cp->cache() == NULL, "not rewritten yet");
           Symbol* ref_class_name =
             cp->klass_name_at(cp->klass_ref_index_at(index));
           // See the comments in verify_field_instructions() for
diff --git a/src/share/vm/classfile/verifier.hpp b/src/share/vm/classfile/verifier.hpp
index 4457f4a..b1b96f2 100644
--- a/src/share/vm/classfile/verifier.hpp
+++ b/src/share/vm/classfile/verifier.hpp
@@ -47,7 +47,7 @@ class Verifier : AllStatic {
    * Otherwise, no exception is thrown and the return indicates the
    * error.
    */
-  static bool verify(instanceKlassHandle klass, Mode mode, bool should_verify_class, TRAPS);
+  static bool verify(instanceKlassHandle klass, Mode mode, bool should_verify_class, bool may_use_old_verifier, TRAPS);
 
   // Return false if the class is loaded by the bootstrap loader,
   // or if defineClass was called requesting skipping verification
@@ -256,7 +256,10 @@ class ClassVerifier : public StackObj {
 
   ErrorContext _error_context;  // contains information about an error
 
+public:
   void verify_method(methodHandle method, TRAPS);
+
+private:
   char* generate_code_data(methodHandle m, u4 code_length, TRAPS);
   void verify_exception_handler_table(u4 code_length, char* code_data,
                                       int& min, int& max, TRAPS);
@@ -329,6 +332,7 @@ class ClassVerifier : public StackObj {
 
   VerificationType object_type() const;
 
+  instanceKlassHandle _klass_to_verify;
   instanceKlassHandle _klass;  // the class being verified
   methodHandle        _method; // current method being verified
   VerificationType    _this_type; // the verification type of the current class
diff --git a/src/share/vm/code/nmethod.cpp b/src/share/vm/code/nmethod.cpp
index 21c9413..59f5f7e 100644
--- a/src/share/vm/code/nmethod.cpp
+++ b/src/share/vm/code/nmethod.cpp
@@ -2074,15 +2074,14 @@ bool nmethod::is_evol_dependent_on(klassOop dependee) {
       methodOop method = deps.method_argument(0);
       for (int j = 0; j < dependee_methods->length(); j++) {
         if ((methodOop) dependee_methods->obj_at(j) == method) {
-          // RC_TRACE macro has an embedded ResourceMark
-          RC_TRACE(0x01000000,
-            ("Found evol dependency of nmethod %s.%s(%s) compile_id=%d on method %s.%s(%s)",
+          ResourceMark rm(Thread::current());
+          TRACE_RC3("Found evol dependency of nmethod %s.%s(%s) compile_id=%d on method %s.%s(%s)",
             _method->method_holder()->klass_part()->external_name(),
             _method->name()->as_C_string(),
             _method->signature()->as_C_string(), compile_id(),
             method->method_holder()->klass_part()->external_name(),
             method->name()->as_C_string(),
-            method->signature()->as_C_string()));
+            method->signature()->as_C_string());
           if (TraceDependencies || LogCompilation)
             deps.log_dependency(dependee);
           return true;
diff --git a/src/share/vm/compiler/compileBroker.cpp b/src/share/vm/compiler/compileBroker.cpp
index 0feca89..1c4b014 100644
--- a/src/share/vm/compiler/compileBroker.cpp
+++ b/src/share/vm/compiler/compileBroker.cpp
@@ -1592,6 +1592,9 @@ void CompileBroker::compiler_thread_loop() {
 
       // Never compile a method if breakpoints are present in it
       if (method()->number_of_breakpoints() == 0) {
+        // (tw) Obtain a compilation lock. Class redefinition requires that there is no compilation in parallel.
+        thread->compilation_mutex()->lock();
+        thread->set_should_bailout(false);
         // Compile the method.
         if ((UseCompiler || AlwaysCompileLoopMethods) && CompileBroker::should_compile_new_jobs()) {
 #ifdef COMPILER1
@@ -1615,6 +1618,7 @@ void CompileBroker::compiler_thread_loop() {
           // After compilation is disabled, remove remaining methods from queue
           method->clear_queued_for_compilation();
         }
+        thread->compilation_mutex()->unlock();
       }
     }
   }
@@ -1780,7 +1784,11 @@ void CompileBroker::invoke_compiler_on_method(CompileTask* task) {
       //assert(false, "compiler should always document failure");
       // The compiler elected, without comment, not to register a result.
       // Do not attempt further compilations of this method.
-      ci_env.record_method_not_compilable("compile failed", !TieredCompilation);
+      if (((CompilerThread *)Thread::current())->should_bailout()) {
+        ci_env.record_failure("compile externally aborted");
+      } else {
+        ci_env.record_method_not_compilable("compile failed");
+      }
     }
 
     // Copy this bit to the enclosing block:
diff --git a/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.cpp b/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.cpp
index b0c9ec8..7feadf9 100644
--- a/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.cpp
+++ b/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.cpp
@@ -162,6 +162,13 @@ CompactibleFreeListSpace::CompactibleFreeListSpace(BlockOffsetSharedArray* bs,
   }
 }
 
+
+HeapWord* CompactibleFreeListSpace::forward_compact_top(size_t size,
+                                    CompactPoint* cp, HeapWord* compact_top) {
+  ShouldNotReachHere();
+  return NULL;
+}
+
 // Like CompactibleSpace forward() but always calls cross_threshold() to
 // update the block offset table.  Removed initialize_threshold call because
 // CFLS does not use a block offset array for contiguous spaces.
@@ -2118,7 +2125,7 @@ bool CompactibleFreeListSpace::should_concurrent_collect() const {
 // Support for compaction
 
 void CompactibleFreeListSpace::prepare_for_compaction(CompactPoint* cp) {
-  SCAN_AND_FORWARD(cp,end,block_is_obj,block_size);
+  SCAN_AND_FORWARD(cp,end,block_is_obj,block_size,false);
   // prepare_for_compaction() uses the space between live objects
   // so that later phase can skip dead space quickly.  So verification
   // of the free lists doesn't work after.
@@ -2139,7 +2146,7 @@ void CompactibleFreeListSpace::adjust_pointers() {
 }
 
 void CompactibleFreeListSpace::compact() {
-  SCAN_AND_COMPACT(obj_size);
+  SCAN_AND_COMPACT(obj_size, false);
 }
 
 // fragmentation_metric = 1 - [sum of (fbs**2) / (sum of fbs)**2]
diff --git a/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.hpp b/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.hpp
index 3b7bb9a..de7e54b 100644
--- a/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.hpp
+++ b/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.hpp
@@ -149,6 +149,7 @@ class CompactibleFreeListSpace: public CompactibleSpace {
 
   // Support for compacting cms
   HeapWord* cross_threshold(HeapWord* start, HeapWord* end);
+  HeapWord* forward_compact_top(size_t size, CompactPoint* cp, HeapWord* compact_top);
   HeapWord* forward(oop q, size_t size, CompactPoint* cp, HeapWord* compact_top);
 
   // Initialization helpers.
diff --git a/src/share/vm/gc_implementation/shared/markSweep.cpp b/src/share/vm/gc_implementation/shared/markSweep.cpp
index 29841d8..d1386c7 100644
--- a/src/share/vm/gc_implementation/shared/markSweep.cpp
+++ b/src/share/vm/gc_implementation/shared/markSweep.cpp
@@ -32,6 +32,8 @@
 #include "oops/objArrayKlass.inline.hpp"
 #include "oops/oop.inline.hpp"
 
+GrowableArray<oop>*     MarkSweep::_rescued_oops       = NULL;
+
 Stack<oop, mtGC>              MarkSweep::_marking_stack;
 Stack<DataLayout*, mtGC>      MarkSweep::_revisit_mdo_stack;
 Stack<Klass*, mtGC>           MarkSweep::_revisit_klass_stack;
@@ -357,3 +359,97 @@ void MarkSweep::trace(const char* msg) {
 }
 
 #endif
+
+// (tw) Copy the rescued objects to their destination address after compaction.
+void MarkSweep::copy_rescued_objects_back() {
+
+  if (_rescued_oops != NULL) {
+
+    for (int i=0; i<_rescued_oops->length(); i++) {
+      oop rescued_obj = _rescued_oops->at(i);
+
+      int size = rescued_obj->size();
+      oop new_obj = rescued_obj->forwardee();
+
+      assert(rescued_obj->blueprint()->new_version() != NULL, "just checking");
+
+      if (rescued_obj->blueprint()->new_version()->klass_part()->update_information() != NULL) {
+        MarkSweep::update_fields(rescued_obj, new_obj);
+      } else {
+        rescued_obj->set_klass_no_check(rescued_obj->blueprint()->new_version());
+        Copy::aligned_disjoint_words((HeapWord*)rescued_obj, (HeapWord*)new_obj, size);
+      }
+
+      FREE_RESOURCE_ARRAY(HeapWord, rescued_obj, size);
+
+      new_obj->init_mark();
+      assert(new_obj->is_oop(), "must be a valid oop");
+    }
+    _rescued_oops->clear();
+    _rescued_oops = NULL;
+  }
+}
+
+// (tw) Update instances of a class whose fields changed.
+void MarkSweep::update_fields(oop q, oop new_location) {
+
+  assert(q->blueprint()->new_version() != NULL, "class of old object must have new version");
+
+  klassOop old_klass_oop = q->klass();
+  klassOop new_klass_oop = q->blueprint()->new_version();
+
+  instanceKlass *old_klass = instanceKlass::cast(old_klass_oop);
+  instanceKlass *new_klass = instanceKlass::cast(new_klass_oop);
+
+  int size = q->size_given_klass(old_klass);
+  int new_size = q->size_given_klass(new_klass);
+  
+  oop tmp_obj = q;
+
+  // Save object somewhere, there is an overlap in fields
+  if (new_klass_oop->klass_part()->is_copying_backwards()) {
+    if (((HeapWord *)q >= (HeapWord *)new_location && (HeapWord *)q < (HeapWord *)new_location + new_size) || 
+        ((HeapWord *)new_location >= (HeapWord *)q && (HeapWord *)new_location < (HeapWord *)q + size)) {
+       tmp_obj = (oop)resource_allocate_bytes(size * HeapWordSize);
+       Copy::aligned_disjoint_words((HeapWord*)q, (HeapWord*)tmp_obj, size);
+    }
+  }
+
+  tmp_obj->set_klass_no_check(new_klass_oop);
+  int *cur = new_klass_oop->klass_part()->update_information();
+  assert(cur != NULL, "just checking");
+  MarkSweep::update_fields(new_location, tmp_obj, cur);
+  
+  if (tmp_obj != q) {
+    FREE_RESOURCE_ARRAY(HeapWord, tmp_obj, size);
+  }
+}
+ 
+void MarkSweep::update_fields(oop new_location, oop tmp_obj, int *cur) {
+  assert(cur != NULL, "just checking");
+  char* to = (char*)new_location;
+  while (*cur != 0) {
+    int size = *cur;
+    if (size > 0) {
+      cur++;
+      int offset = *cur;
+      HeapWord* from = (HeapWord*)(((char *)tmp_obj) + offset);
+      if (size == HeapWordSize) {
+        *((HeapWord*)to) = *from;
+      } else if (size == HeapWordSize * 2) {
+        *((HeapWord*)to) = *from;
+        *(((HeapWord*)to) + 1) = *(from + 1);
+      } else {
+        Copy::conjoint_jbytes(from, to, size);
+      }
+      to += size;
+      cur++;
+    } else {
+      assert(size < 0, "");
+      int skip = -*cur;
+      Copy::fill_to_bytes(to, skip, 0);
+      to += skip;
+      cur++;
+    }
+  }
+}
diff --git a/src/share/vm/gc_implementation/shared/markSweep.hpp b/src/share/vm/gc_implementation/shared/markSweep.hpp
index eb8252c..40118db 100644
--- a/src/share/vm/gc_implementation/shared/markSweep.hpp
+++ b/src/share/vm/gc_implementation/shared/markSweep.hpp
@@ -117,8 +117,12 @@ class MarkSweep : AllStatic {
   friend class AdjustPointerClosure;
   friend class KeepAliveClosure;
   friend class VM_MarkSweep;
+  friend class GenMarkSweep;
   friend void marksweep_init();
 
+public:
+  static GrowableArray<oop>*             _rescued_oops;
+
   //
   // Vars
   //
@@ -208,6 +212,9 @@ class MarkSweep : AllStatic {
   template <class T> static inline void mark_and_push(T* p);
   static inline void push_objarray(oop obj, size_t index);
 
+  static void copy_rescued_objects_back();
+  static void update_fields(oop q, oop new_location);
+  static void update_fields(oop new_location, oop tmp_obj, int *cur);
   static void follow_stack();   // Empty marking stack.
 
   static void preserve_mark(oop p, markOop mark);
diff --git a/src/share/vm/interpreter/interpreterRuntime.cpp b/src/share/vm/interpreter/interpreterRuntime.cpp
index 32c0bdb..448d673 100644
--- a/src/share/vm/interpreter/interpreterRuntime.cpp
+++ b/src/share/vm/interpreter/interpreterRuntime.cpp
@@ -402,7 +402,7 @@ IRT_ENTRY(address, InterpreterRuntime::exception_handler_for_exception(JavaThrea
     assert(h_exception.not_null(), "NULL exceptions should be handled by athrow");
     assert(h_exception->is_oop(), "just checking");
     // Check that exception is a subclass of Throwable, otherwise we have a VerifyError
-    if (!(h_exception->is_a(SystemDictionary::Throwable_klass()))) {
+    if (!(h_exception->is_a(SystemDictionary::Throwable_klass()->klass_part()->newest_version())) && !(h_exception->is_a(SystemDictionary::Throwable_klass()))) {
       if (ExitVMOnVerifyError) vm_exit(-1);
       ShouldNotReachHere();
     }
diff --git a/src/share/vm/interpreter/linkResolver.cpp b/src/share/vm/interpreter/linkResolver.cpp
index b17f405..1c96783 100644
--- a/src/share/vm/interpreter/linkResolver.cpp
+++ b/src/share/vm/interpreter/linkResolver.cpp
@@ -153,8 +153,8 @@ void CallInfo::set_common(KlassHandle resolved_klass, KlassHandle selected_klass
 // Klass resolution
 
 void LinkResolver::check_klass_accessability(KlassHandle ref_klass, KlassHandle sel_klass, TRAPS) {
-  if (!Reflection::verify_class_access(ref_klass->as_klassOop(),
-                                       sel_klass->as_klassOop(),
+  if (!Reflection::verify_class_access(ref_klass->as_klassOop()->klass_part()->newest_version(),
+                                       sel_klass->as_klassOop()->klass_part()->newest_version(),
                                        true)) {
     ResourceMark rm(THREAD);
     Exceptions::fthrow(
@@ -338,7 +338,7 @@ void LinkResolver::check_method_accessability(KlassHandle ref_klass,
   // We'll check for the method name first, as that's most likely
   // to be false (so we'll short-circuit out of these tests).
   if (sel_method->name() == vmSymbols::clone_name() &&
-      sel_klass() == SystemDictionary::Object_klass() &&
+      sel_klass()->klass_part()->newest_version() == SystemDictionary::Object_klass()->klass_part()->newest_version() &&
       resolved_klass->oop_is_array()) {
     // We need to change "protected" to "public".
     assert(flags.is_protected(), "clone not protected?");
@@ -634,7 +634,7 @@ void LinkResolver::resolve_field(FieldAccessInfo& result, constantPoolHandle poo
   }
 
   // Final fields can only be accessed from its own class.
-  if (is_put && fd.access_flags().is_final() && sel_klass() != pool->pool_holder()) {
+  if (is_put && fd.access_flags().is_final() && sel_klass() != pool->pool_holder()->klass_part()->active_version() && sel_klass() != pool->pool_holder()) {
     THROW(vmSymbols::java_lang_IllegalAccessError());
   }
 
@@ -839,7 +839,7 @@ void LinkResolver::resolve_virtual_call(CallInfo& result, Handle recv, KlassHand
                                         bool check_access, bool check_null_and_abstract, TRAPS) {
   methodHandle resolved_method;
   linktime_resolve_virtual_method(resolved_method, resolved_klass, method_name, method_signature, current_klass, check_access, CHECK);
-  runtime_resolve_virtual_method(result, resolved_method, resolved_klass, recv, receiver_klass, check_null_and_abstract, CHECK);
+  runtime_resolve_virtual_method(result, resolved_method, resolved_klass, recv, receiver_klass, current_klass, check_null_and_abstract, CHECK);
 }
 
 // throws linktime exceptions
@@ -869,6 +869,7 @@ void LinkResolver::runtime_resolve_virtual_method(CallInfo& result,
                                                   KlassHandle resolved_klass,
                                                   Handle recv,
                                                   KlassHandle recv_klass,
+                                                  KlassHandle current_klass,
                                                   bool check_null_and_abstract,
                                                   TRAPS) {
 
@@ -917,6 +918,9 @@ void LinkResolver::runtime_resolve_virtual_method(CallInfo& result,
       // recv_klass might be an arrayKlassOop but all vtables start at
       // the same place. The cast is to avoid virtual call and assertion.
       instanceKlass* inst = (instanceKlass*)recv_klass()->klass_part();
+
+      // (tw) Check that the receiver is a subtype of the holder of the resolved method.
+      assert(inst->is_subtype_of(resolved_method->method_holder()), "receiver and resolved method holder are inconsistent");
       selected_method = methodHandle(THREAD, inst->method_at_vtable(vtable_index));
     }
   }
diff --git a/src/share/vm/interpreter/linkResolver.hpp b/src/share/vm/interpreter/linkResolver.hpp
index dfd74f9..6ca1b54 100644
--- a/src/share/vm/interpreter/linkResolver.hpp
+++ b/src/share/vm/interpreter/linkResolver.hpp
@@ -110,7 +110,8 @@ class CallInfo: public LinkInfo {
 // It does all necessary link-time checks & throws exceptions if necessary.
 
 class LinkResolver: AllStatic {
- private:
+private:
+  static void lookup_method                     (methodHandle& result, KlassHandle resolved_klass, Symbol* name, Symbol* signature, bool is_interface, KlassHandle current_klass, TRAPS);
   static void lookup_method_in_klasses          (methodHandle& result, KlassHandle klass, Symbol* name, Symbol* signature, TRAPS);
   static void lookup_instance_method_in_klasses (methodHandle& result, KlassHandle klass, Symbol* name, Symbol* signature, TRAPS);
   static void lookup_method_in_interfaces       (methodHandle& result, KlassHandle klass, Symbol* name, Symbol* signature, TRAPS);
@@ -133,7 +134,7 @@ class LinkResolver: AllStatic {
   static void linktime_resolve_interface_method (methodHandle& resolved_method, KlassHandle resolved_klass, Symbol* method_name, Symbol* method_signature, KlassHandle current_klass, bool check_access, TRAPS);
 
   static void runtime_resolve_special_method    (CallInfo& result, methodHandle resolved_method, KlassHandle resolved_klass, KlassHandle current_klass, bool check_access, TRAPS);
-  static void runtime_resolve_virtual_method    (CallInfo& result, methodHandle resolved_method, KlassHandle resolved_klass, Handle recv, KlassHandle recv_klass, bool check_null_and_abstract, TRAPS);
+  static void runtime_resolve_virtual_method    (CallInfo& result, methodHandle resolved_method, KlassHandle resolved_klass, Handle recv, KlassHandle recv_klass, KlassHandle current_klass, bool check_null_and_abstract, TRAPS);
   static void runtime_resolve_interface_method  (CallInfo& result, methodHandle resolved_method, KlassHandle resolved_klass, Handle recv, KlassHandle recv_klass, bool check_null_and_abstract, TRAPS);
 
   static void check_field_accessability   (KlassHandle ref_klass, KlassHandle resolved_klass, KlassHandle sel_klass, fieldDescriptor& fd, TRAPS);
diff --git a/src/share/vm/interpreter/oopMapCache.cpp b/src/share/vm/interpreter/oopMapCache.cpp
index 01d5753..6816b3a 100644
--- a/src/share/vm/interpreter/oopMapCache.cpp
+++ b/src/share/vm/interpreter/oopMapCache.cpp
@@ -536,9 +536,9 @@ void OopMapCache::flush_obsolete_entries() {
     if (!_array[i].is_empty() && _array[i].method()->is_old()) {
       // Cache entry is occupied by an old redefined method and we don't want
       // to pin it down so flush the entry.
-      RC_TRACE(0x08000000, ("flush: %s(%s): cached entry @%d",
+      TRACE_RC3("flush: %s(%s): cached entry @%d",
         _array[i].method()->name()->as_C_string(),
-        _array[i].method()->signature()->as_C_string(), i));
+        _array[i].method()->signature()->as_C_string(), i);
 
       _array[i].flush();
     }
diff --git a/src/share/vm/memory/genMarkSweep.cpp b/src/share/vm/memory/genMarkSweep.cpp
index 76e18d8..6af7c14 100644
--- a/src/share/vm/memory/genMarkSweep.cpp
+++ b/src/share/vm/memory/genMarkSweep.cpp
@@ -421,6 +421,7 @@ void GenMarkSweep::mark_sweep_phase4() {
   // in the same order in phase2, phase3 and phase4. We don't quite do that
   // here (perm_gen first rather than last), so we tell the validate code
   // to use a higher index (saved from phase2) when verifying perm_gen.
+  assert(_rescued_oops == NULL, "must be empty before processing");
   GenCollectedHeap* gch = GenCollectedHeap::heap();
   Generation* pg = gch->perm_gen();
 
@@ -433,10 +434,14 @@ void GenMarkSweep::mark_sweep_phase4() {
 
   VALIDATE_MARK_SWEEP_ONLY(reset_live_oop_tracking(false));
 
+  MarkSweep::copy_rescued_objects_back();
+
   GenCompactClosure blk;
   gch->generation_iterate(&blk, true);
 
   VALIDATE_MARK_SWEEP_ONLY(compaction_complete());
 
+  MarkSweep::copy_rescued_objects_back();
+
   pg->post_compact(); // Shared spaces verification.
 }
diff --git a/src/share/vm/memory/iterator.hpp b/src/share/vm/memory/iterator.hpp
index b5f8e0e..856cfce 100644
--- a/src/share/vm/memory/iterator.hpp
+++ b/src/share/vm/memory/iterator.hpp
@@ -101,6 +101,12 @@ class OopClosure : public Closure {
 #endif
 };
 
+class OopClosureNoHeader : public OopClosure {
+public:
+  // If "true", invoke on header klass field.
+  bool do_header() { return false; } // Note that this is non-virtual.
+};
+
 // ObjectClosure is used for iterating through an object space
 
 class ObjectClosure : public Closure {
diff --git a/src/share/vm/memory/oopFactory.cpp b/src/share/vm/memory/oopFactory.cpp
index def88cc..016d7eb 100644
--- a/src/share/vm/memory/oopFactory.cpp
+++ b/src/share/vm/memory/oopFactory.cpp
@@ -129,11 +129,11 @@ klassOop oopFactory::new_instanceKlass(Symbol* name, int vtable_len, int itable_
                                        unsigned int nonstatic_oop_map_count,
                                        AccessFlags access_flags,
                                        ReferenceType rt,
-                                       KlassHandle host_klass, TRAPS) {
+                                       KlassHandle host_klass, KlassHandle old_klass, TRAPS) {
   instanceKlassKlass* ikk = instanceKlassKlass::cast(Universe::instanceKlassKlassObj());
   return ikk->allocate_instance_klass(name, vtable_len, itable_len,
                                       static_field_size, nonstatic_oop_map_count,
-                                      access_flags, rt, host_klass, CHECK_NULL);
+                                      access_flags, rt, host_klass, old_klass, CHECK_NULL);
 }
 
 
diff --git a/src/share/vm/memory/oopFactory.hpp b/src/share/vm/memory/oopFactory.hpp
index e7e22d4..ce39ada 100644
--- a/src/share/vm/memory/oopFactory.hpp
+++ b/src/share/vm/memory/oopFactory.hpp
@@ -80,7 +80,7 @@ class oopFactory: AllStatic {
                                            unsigned int nonstatic_oop_map_count,
                                            AccessFlags access_flags,
                                            ReferenceType rt,
-                                           KlassHandle host_klass, TRAPS);
+                                           KlassHandle host_klass, KlassHandle old_klass, TRAPS);
 
   // Methods
 private:
diff --git a/src/share/vm/memory/space.cpp b/src/share/vm/memory/space.cpp
index f97bc34..c8563b2 100644
--- a/src/share/vm/memory/space.cpp
+++ b/src/share/vm/memory/space.cpp
@@ -378,9 +378,8 @@ void CompactibleSpace::clear(bool mangle_space) {
   _compaction_top = bottom();
 }
 
-HeapWord* CompactibleSpace::forward(oop q, size_t size,
-                                    CompactPoint* cp, HeapWord* compact_top) {
-  // q is alive
+// (tw) Calculates the compact_top that will be used for placing the next object with the giving size on the heap.
+HeapWord* CompactibleSpace::forward_compact_top(size_t size, CompactPoint* cp, HeapWord* compact_top) {
   // First check if we should switch compaction space
   assert(this == cp->space, "'this' should be current compaction space.");
   size_t compaction_max_size = pointer_delta(end(), compact_top);
@@ -400,8 +399,15 @@ HeapWord* CompactibleSpace::forward(oop q, size_t size,
     compaction_max_size = pointer_delta(cp->space->end(), compact_top);
   }
 
+  return compact_top;
+}
+
+HeapWord* CompactibleSpace::forward(oop q, size_t size,
+                                    CompactPoint* cp, HeapWord* compact_top) {
+  compact_top = forward_compact_top(size, cp, compact_top);
+
   // store the forwarding pointer into the mark word
-  if ((HeapWord*)q != compact_top) {
+  if ((HeapWord*)q != compact_top || (size_t)q->size() != size) {
     q->forward_to(oop(compact_top));
     assert(q->is_gc_marked(), "encoding the pointer should preserve the mark");
   } else {
@@ -423,6 +429,58 @@ HeapWord* CompactibleSpace::forward(oop q, size_t size,
   return compact_top;
 }
 
+// Compute the forward sizes and leave out objects whose position could
+// possibly overlap other objects.
+HeapWord* CompactibleSpace::forward_with_rescue(oop q, size_t size,
+                                                CompactPoint* cp, HeapWord* compact_top) {
+  size_t forward_size = size;
+
+  // (DCEVM) There is a new version of the class of q => different size
+  if (oop(q)->blueprint()->new_version() != NULL && oop(q)->blueprint()->new_version()->klass_part()->update_information() != NULL) {
+
+    size_t new_size = oop(q)->size_given_klass(oop(q)->blueprint()->new_version()->klass_part());
+    assert(size != new_size || oop(q)->is_perm(), "instances without changed size have to be updated prior to GC run");
+    forward_size = new_size;
+  }
+
+  compact_top = forward_compact_top(forward_size, cp, compact_top);
+
+  if (must_rescue(oop(q), oop(compact_top))) {
+    if (MarkSweep::_rescued_oops == NULL) {
+      MarkSweep::_rescued_oops = new GrowableArray<oop>(128);
+    }
+    MarkSweep::_rescued_oops->append(oop(q));
+    return compact_top;
+  }
+
+  return forward(q, forward_size, cp, compact_top);
+}
+
+// Compute the forwarding addresses for the objects that need to be rescued.
+HeapWord* CompactibleSpace::forward_rescued(CompactPoint* cp, HeapWord* compact_top) {
+  // TODO: empty the _rescued_oops after ALL spaces are compacted!
+  if (MarkSweep::_rescued_oops != NULL) {
+    for (int i=0; i<MarkSweep::_rescued_oops->length(); i++) {
+      oop q = MarkSweep::_rescued_oops->at(i);
+
+      /* size_t size = oop(q)->size();  changing this for cms for perm gen */
+      size_t size = block_size((HeapWord*)q);
+
+      // (tw) There is a new version of the class of q => different size
+      if (oop(q)->blueprint()->new_version() != NULL) {
+        size_t new_size = oop(q)->size_given_klass(oop(q)->blueprint()->new_version()->klass_part());
+        assert(size != new_size || oop(q)->is_perm(), "instances without changed size have to be updated prior to GC run");
+        size = new_size;
+      }
+
+      compact_top = cp->space->forward(oop(q), size, cp, compact_top);
+      assert(compact_top <= end(), "must not write over end of space!");
+    }
+    MarkSweep::_rescued_oops->clear();
+    MarkSweep::_rescued_oops = NULL;
+  }
+  return compact_top;
+}
 
 bool CompactibleSpace::insert_deadspace(size_t& allowed_deadspace_words,
                                         HeapWord* q, size_t deadlength) {
@@ -444,12 +502,17 @@ bool CompactibleSpace::insert_deadspace(size_t& allowed_deadspace_words,
 #define adjust_obj_size(s) s
 
 void CompactibleSpace::prepare_for_compaction(CompactPoint* cp) {
-  SCAN_AND_FORWARD(cp, end, block_is_obj, block_size);
+  SCAN_AND_FORWARD(cp, end, block_is_obj, block_size, false);
 }
 
 // Faster object search.
 void ContiguousSpace::prepare_for_compaction(CompactPoint* cp) {
-  SCAN_AND_FORWARD(cp, top, block_is_always_obj, obj_size);
+  if (!Universe::is_redefining_gc_run()) {
+    SCAN_AND_FORWARD(cp, top, block_is_always_obj, obj_size, false);
+  } else {
+    // Redefinition run
+    SCAN_AND_FORWARD(cp, top, block_is_always_obj, obj_size, true);
+  }
 }
 
 void Space::adjust_pointers() {
@@ -490,6 +553,111 @@ void Space::adjust_pointers() {
   assert(q == t, "just checking");
 }
 
+
+#ifdef ASSERT
+
+int CompactibleSpace::space_index(oop obj) {
+  GenCollectedHeap* heap = GenCollectedHeap::heap();
+
+  if (heap->is_in_permanent(obj)) {
+    return -1;
+  }
+
+  int index = 0;
+  for (int i = heap->n_gens() - 1; i >= 0; i--) {
+    Generation* gen = heap->get_gen(i);
+    CompactibleSpace* space = gen->first_compaction_space();
+    while (space != NULL) {
+      if (space->is_in_reserved(obj)) {
+        return index;
+      }
+      space = space->next_compaction_space();
+      index++;
+    }
+  }
+
+  tty->print_cr("could not compute space_index for %08xh", obj);
+  index = 0;
+  for (int i = heap->n_gens() - 1; i >= 0; i--) {
+    Generation* gen = heap->get_gen(i);
+    tty->print_cr("  generation %s: %08xh - %08xh", gen->name(), gen->reserved().start(), gen->reserved().end());
+
+    CompactibleSpace* space = gen->first_compaction_space();
+    while (space != NULL) {
+      tty->print_cr("    %2d space %08xh - %08xh", index, space->bottom(), space->end());
+      space = space->next_compaction_space();
+      index++;
+    }
+  }
+
+  ShouldNotReachHere();
+  return 0;
+}
+#endif
+
+bool CompactibleSpace::must_rescue(oop old_obj, oop new_obj) {
+  // Only redefined objects can have the need to be rescued.
+  if (oop(old_obj)->blueprint()->new_version() == NULL) return false;
+
+  if (old_obj->is_perm()) {
+    // This object is in perm gen: Always rescue to satisfy invariant obj->klass() <= obj.
+    return true;
+  }
+
+  int new_size = old_obj->size_given_klass(oop(old_obj)->blueprint()->new_version()->klass_part());
+  int original_size = old_obj->size();
+  
+  Generation* tenured_gen = GenCollectedHeap::heap()->get_gen(1);
+  bool old_in_tenured = tenured_gen->is_in_reserved(old_obj);
+  bool new_in_tenured = tenured_gen->is_in_reserved(new_obj);
+  if (old_in_tenured == new_in_tenured) {
+    // Rescue if object may overlap with a higher memory address.
+    bool overlap = (old_obj + original_size < new_obj + new_size);
+    if (old_in_tenured) {
+      // Old and new address are in same space, so just compare the address.
+      // Must rescue if object moves towards the top of the space.
+      assert(space_index(old_obj) == space_index(new_obj), "old_obj and new_obj must be in same space");
+    } else {
+      // In the new generation, eden is located before the from space, so a
+      // simple pointer comparison is sufficient.
+      assert(GenCollectedHeap::heap()->get_gen(0)->is_in_reserved(old_obj), "old_obj must be in DefNewGeneration");
+      assert(GenCollectedHeap::heap()->get_gen(0)->is_in_reserved(new_obj), "new_obj must be in DefNewGeneration");
+      assert(overlap == (space_index(old_obj) < space_index(new_obj)), "slow and fast computation must yield same result");
+    }
+    return overlap;
+
+  } else {
+    assert(space_index(old_obj) != space_index(new_obj), "old_obj and new_obj must be in different spaces");
+    if (tenured_gen->is_in_reserved(new_obj)) {
+      // Must never rescue when moving from the new into the old generation.
+      assert(GenCollectedHeap::heap()->get_gen(0)->is_in_reserved(old_obj), "old_obj must be in DefNewGeneration");
+      assert(space_index(old_obj) > space_index(new_obj), "must be");
+      return false;
+
+    } else /* if (tenured_gen->is_in_reserved(old_obj)) */ {
+      // Must always rescue when moving from the old into the new generation.
+      assert(GenCollectedHeap::heap()->get_gen(0)->is_in_reserved(new_obj), "new_obj must be in DefNewGeneration");
+      assert(space_index(old_obj) < space_index(new_obj), "must be");
+      return true;
+    }
+  }
+}
+
+oop CompactibleSpace::rescue(oop old_obj) {
+  assert(must_rescue(old_obj, old_obj->forwardee()), "do not call otherwise");
+
+  int size = old_obj->size();
+  oop rescued_obj = (oop)resource_allocate_bytes(size * HeapWordSize);
+  Copy::aligned_disjoint_words((HeapWord*)old_obj, (HeapWord*)rescued_obj, size);
+
+  if (MarkSweep::_rescued_oops == NULL) {
+    MarkSweep::_rescued_oops = new GrowableArray<oop>(128);
+  }
+
+  MarkSweep::_rescued_oops->append(rescued_obj);
+  return rescued_obj;
+}
+
 void CompactibleSpace::adjust_pointers() {
   // Check first is there is any work to do.
   if (used() == 0) {
@@ -500,7 +668,13 @@ void CompactibleSpace::adjust_pointers() {
 }
 
 void CompactibleSpace::compact() {
-  SCAN_AND_COMPACT(obj_size);
+
+  if(!Universe::is_redefining_gc_run()) {
+    SCAN_AND_COMPACT(obj_size, false);
+  } else {
+    // Redefinition run
+    SCAN_AND_COMPACT(obj_size, true)
+  }
 }
 
 void Space::print_short() const { print_short_on(tty); }
diff --git a/src/share/vm/memory/space.hpp b/src/share/vm/memory/space.hpp
index ef2f2c6..ff95a8b 100644
--- a/src/share/vm/memory/space.hpp
+++ b/src/share/vm/memory/space.hpp
@@ -445,6 +445,9 @@ public:
   // indicates when the next such action should be taken.
   virtual void prepare_for_compaction(CompactPoint* cp);
   // MarkSweep support phase3
+  DEBUG_ONLY(int space_index(oop obj));
+  bool must_rescue(oop old_obj, oop new_obj);
+  oop rescue(oop old_obj);
   virtual void adjust_pointers();
   // MarkSweep support phase4
   virtual void compact();
@@ -474,6 +477,15 @@ public:
   // accordingly".
   virtual HeapWord* forward(oop q, size_t size, CompactPoint* cp,
                     HeapWord* compact_top);
+  // (DCEVM) same as forwad, but can rescue objects. Invoked only during
+  // redefinition runs
+  HeapWord* forward_with_rescue(oop q, size_t size, CompactPoint* cp,
+                                        HeapWord* compact_top);
+
+  HeapWord* forward_rescued(CompactPoint* cp, HeapWord* compact_top);
+
+  // (tw) Compute new compact top without actually forwarding the object.
+  virtual HeapWord* forward_compact_top(size_t size, CompactPoint* cp, HeapWord* compact_top);
 
   // Return a size with adjusments as required of the space.
   virtual size_t adjust_object_size_v(size_t size) const { return size; }
@@ -504,7 +516,7 @@ protected:
                         size_t word_len);
 };
 
-#define SCAN_AND_FORWARD(cp,scan_limit,block_is_obj,block_size) {            \
+#define SCAN_AND_FORWARD(cp,scan_limit,block_is_obj,block_size,redefinition_run) {            \
   /* Compute the new addresses for the live objects and store it in the mark \
    * Used by universe::mark_sweep_phase2()                                   \
    */                                                                        \
@@ -564,7 +576,17 @@ protected:
       Prefetch::write(q, interval);                                          \
       /* size_t size = oop(q)->size();  changing this for cms for perm gen */\
       size_t size = block_size(q);                                           \
-      compact_top = cp->space->forward(oop(q), size, cp, compact_top);       \
+      if (redefinition_run) {                                                \
+        compact_top = cp->space->forward_with_rescue(oop(q), size,           \
+                                                     cp, compact_top);       \
+        if (q < first_dead && oop(q)->is_gc_marked()) {                      \
+          /* Was moved (otherwise, forward would reset mark),                \
+             set first_dead to here */                                       \
+          first_dead = q;                                                    \
+        }                                                                    \
+      } else {                                                               \
+        compact_top = cp->space->forward(oop(q), size, cp, compact_top);     \
+      }                                                                      \
       q += size;                                                             \
       end_of_live = q;                                                       \
     } else {                                                                 \
@@ -613,6 +635,8 @@ protected:
     }                                                                        \
   }                                                                          \
                                                                              \
+  if (redefinition_run) { compact_top = forward_rescued(cp, compact_top); }  \
+                                                                             \
   assert(q == t, "just checking");                                           \
   if (liveRange != NULL) {                                                   \
     liveRange->set_end(q);                                                   \
@@ -665,13 +689,8 @@ protected:
       q += size;                                                                \
     }                                                                           \
                                                                                 \
-    if (_first_dead == t) {                                                     \
-      q = t;                                                                    \
-    } else {                                                                    \
-      /* $$$ This is funky.  Using this to read the previously written          \
-       * LiveRange.  See also use below. */                                     \
-      q = (HeapWord*)oop(_first_dead)->mark()->decode_pointer();                \
-    }                                                                           \
+    /* (DCEVM) first_dead can be live object if we move/rescue resized objects */ \
+    q = _first_dead;                                                            \
   }                                                                             \
                                                                                 \
   const intx interval = PrefetchScanIntervalInBytes;                            \
@@ -702,7 +721,7 @@ protected:
   assert(q == t, "just checking");                                              \
 }
 
-#define SCAN_AND_COMPACT(obj_size) {                                            \
+#define SCAN_AND_COMPACT(obj_size, redefinition_run) {                          \
   /* Copy all live objects to their new location                                \
    * Used by MarkSweep::mark_sweep_phase4() */                                  \
                                                                                 \
@@ -728,12 +747,8 @@ protected:
     }                                                                           \
     )  /* debug_only */                                                         \
                                                                                 \
-    if (_first_dead == t) {                                                     \
-      q = t;                                                                    \
-    } else {                                                                    \
-      /* $$$ Funky */                                                           \
-      q = (HeapWord*) oop(_first_dead)->mark()->decode_pointer();               \
-    }                                                                           \
+    /* (DCEVM) first_dead can be live object if we move/rescue resized objects */ \
+    q = _first_dead;                                                            \
   }                                                                             \
                                                                                 \
   const intx scan_interval = PrefetchScanIntervalInBytes;                       \
@@ -752,13 +767,36 @@ protected:
       size_t size = obj_size(q);                                                \
       HeapWord* compaction_top = (HeapWord*)oop(q)->forwardee();                \
                                                                                 \
+      if (redefinition_run && must_rescue(oop(q), oop(q)->forwardee())) {       \
+        oop dest_obj = rescue(oop(q));                                          \
+        debug_only(Copy::fill_to_words(q, size, 0));                            \
+        q += size;                                                              \
+        continue;                                                               \
+      }                                                                         \
+                                                                                \
       /* prefetch beyond compaction_top */                                      \
       Prefetch::write(compaction_top, copy_interval);                           \
                                                                                 \
       /* copy object and reinit its mark */                                     \
       VALIDATE_MARK_SWEEP_ONLY(MarkSweep::live_oop_moved_to(q, size,            \
                                                             compaction_top));   \
-      assert(q != compaction_top, "everything in this pass should be moving");  \
+      assert(q != compaction_top || oop(q)->blueprint()->new_version() != NULL, \
+             "everything in this pass should be moving");                       \
+      if (redefinition_run && oop(q)->blueprint()->new_version() != NULL) {     \
+        klassOop new_version = oop(q)->blueprint()->new_version();              \
+        if (new_version->klass_part()->update_information() == NULL) {          \
+          Copy::aligned_conjoint_words(q, compaction_top, size);                \
+          oop(compaction_top)->set_klass_no_check(new_version);                 \
+        } else {                                                                \
+          MarkSweep::update_fields(oop(q), oop(compaction_top));                \
+        }                                                                       \
+        oop(compaction_top)->init_mark();                                       \
+        assert(oop(compaction_top)->klass() != NULL, "should have a class");    \
+                                                                                \
+        debug_only(prev_q = q);                                                 \
+        q += size;                                                              \
+        continue;                                                               \
+      }                                                                         \
       Copy::aligned_conjoint_words(q, compaction_top, size);                    \
       oop(compaction_top)->init_mark();                                         \
       assert(oop(compaction_top)->klass() != NULL, "should have a class");      \
diff --git a/src/share/vm/memory/specialized_oop_closures.hpp b/src/share/vm/memory/specialized_oop_closures.hpp
index 4d7c50b..671787e 100644
--- a/src/share/vm/memory/specialized_oop_closures.hpp
+++ b/src/share/vm/memory/specialized_oop_closures.hpp
@@ -37,6 +37,7 @@
 
 // Forward declarations.
 class OopClosure;
+class OopClosureNoHeader;
 class OopsInGenClosure;
 // DefNew
 class ScanClosure;
@@ -74,6 +75,7 @@ class CMSInnerParMarkAndPushClosure;
 #endif
 
 #define SPECIALIZED_OOP_OOP_ITERATE_CLOSURES_S(f)       \
+  f(OopClosureNoHeader,_v)                              \
   f(ScanClosure,_nv)                                    \
   f(FastScanClosure,_nv)                                \
   f(FilteringClosure,_nv)
@@ -132,6 +134,7 @@ class CMSInnerParMarkAndPushClosure;
 
 #define ALL_PAR_OOP_ITERATE_CLOSURES(f)                \
   f(OopClosure,_v)                                     \
+  f(OopClosureNoHeader,_v)                             \
   SPECIALIZED_PAR_OOP_ITERATE_CLOSURES(f)
 #endif // SERIALGC
 
diff --git a/src/share/vm/memory/universe.cpp b/src/share/vm/memory/universe.cpp
index 8ce17d9..fe37993 100644
--- a/src/share/vm/memory/universe.cpp
+++ b/src/share/vm/memory/universe.cpp
@@ -100,6 +100,8 @@
 #include "gc_implementation/parallelScavenge/parallelScavengeHeap.hpp"
 #endif
 
+bool Universe::_is_redefining_gc_run = false;
+
 // Known objects
 klassOop Universe::_boolArrayKlassObj                 = NULL;
 klassOop Universe::_byteArrayKlassObj                 = NULL;
@@ -204,6 +206,42 @@ void Universe::system_classes_do(void f(klassOop)) {
   f(systemObjArrayKlassObj());
 }
 
+// (tw) This method should iterate all pointers that are not within heap objects.
+void Universe::root_oops_do(OopClosure *oopClosure) {
+
+  class AlwaysTrueClosure: public BoolObjectClosure {
+  public:
+    void do_object(oop p) { ShouldNotReachHere(); }
+    bool do_object_b(oop p) { return true; }
+  };
+  AlwaysTrueClosure always_true;
+
+  Universe::oops_do(oopClosure);
+//  ReferenceProcessor::oops_do(oopClosure); (tw) check why no longer there
+  JNIHandles::oops_do(oopClosure);   // Global (strong) JNI handles
+  Threads::oops_do(oopClosure, NULL);
+  ObjectSynchronizer::oops_do(oopClosure);
+  FlatProfiler::oops_do(oopClosure);
+  JvmtiExport::oops_do(oopClosure);
+
+  // Now adjust pointers in remaining weak roots.  (All of which should
+  // have been cleared if they pointed to non-surviving objects.)
+  // Global (weak) JNI handles
+  JNIHandles::weak_oops_do(&always_true, oopClosure);
+
+  CodeCache::oops_do(oopClosure);
+  StringTable::oops_do(oopClosure);
+  
+  // (tw) TODO: Check if this is correct?
+  //CodeCache::scavenge_root_nmethods_oops_do(oopClosure);
+  //Management::oops_do(oopClosure);
+  //ref_processor()->weak_oops_do(&oopClosure);
+  //PSScavenge::reference_processor()->weak_oops_do(&oopClosure);
+
+  // SO_AllClasses
+  SystemDictionary::oops_do(oopClosure);
+}
+
 void Universe::oops_do(OopClosure* f, bool do_all) {
 
   f->do_oop((oop*) &_int_mirror);
@@ -1590,10 +1628,9 @@ void ActiveMethodOopsCache::add_previous_version(const methodOop method) {
   }
 
   // RC_TRACE macro has an embedded ResourceMark
-  RC_TRACE(0x00000100,
-    ("add: %s(%s): adding prev version ref for cached method @%d",
+  TRACE_RC2("add: %s(%s): adding prev version ref for cached method @%d",
     method->name()->as_C_string(), method->signature()->as_C_string(),
-    _prev_methods->length()));
+    _prev_methods->length());
 
   methodHandle method_h(method);
   jweak method_ref = JNIHandles::make_weak_global(method_h);
@@ -1620,9 +1657,8 @@ void ActiveMethodOopsCache::add_previous_version(const methodOop method) {
       JNIHandles::destroy_weak_global(method_ref);
       _prev_methods->remove_at(i);
     } else {
-      // RC_TRACE macro has an embedded ResourceMark
-      RC_TRACE(0x00000400, ("add: %s(%s): previous cached method @%d is alive",
-        m->name()->as_C_string(), m->signature()->as_C_string(), i));
+      TRACE_RC2("add: %s(%s): previous cached method @%d is alive",
+        m->name()->as_C_string(), m->signature()->as_C_string(), i);
     }
   }
 } // end add_previous_version()
diff --git a/src/share/vm/memory/universe.hpp b/src/share/vm/memory/universe.hpp
index da21a8b..676675e 100644
--- a/src/share/vm/memory/universe.hpp
+++ b/src/share/vm/memory/universe.hpp
@@ -127,6 +127,8 @@ class Universe: AllStatic {
   friend class SystemDictionary;
   friend class VMStructs;
   friend class CompactingPermGenGen;
+  friend class Space;
+  friend class ContiguousSpace;
   friend class VM_PopulateDumpSharedSpace;
 
   friend jint  universe_init();
@@ -258,7 +260,18 @@ class Universe: AllStatic {
 
   static void compute_verify_oop_data();
 
+  static bool _is_redefining_gc_run;
+
  public:
+
+   static bool is_redefining_gc_run() {
+     return _is_redefining_gc_run;
+   }
+
+   static void set_redefining_gc_run(bool b) {
+     _is_redefining_gc_run = b;
+   }
+
   // Known classes in the VM
   static klassOop boolArrayKlassObj()                 { return _boolArrayKlassObj;   }
   static klassOop byteArrayKlassObj()                 { return _byteArrayKlassObj;   }
@@ -403,6 +416,8 @@ class Universe: AllStatic {
 
   // Iteration
 
+  static void root_oops_do(OopClosure *f);
+
   // Apply "f" to the addresses of all the direct heap pointers maintained
   // as static fields of "Universe".
   static void oops_do(OopClosure* f, bool do_all = false);
@@ -419,6 +434,7 @@ class Universe: AllStatic {
 
   // Debugging
   static bool verify_in_progress() { return _verify_in_progress; }
+  static void set_verify_in_progress(bool b) { _verify_in_progress = b; }
   static void verify(bool silent, VerifyOption option);
   static void verify(bool silent) {
     verify(silent, VerifyOption_Default /* option */);
diff --git a/src/share/vm/oops/cpCacheOop.cpp b/src/share/vm/oops/cpCacheOop.cpp
index ad62921..1cd422c 100644
--- a/src/share/vm/oops/cpCacheOop.cpp
+++ b/src/share/vm/oops/cpCacheOop.cpp
@@ -40,6 +40,11 @@
 void ConstantPoolCacheEntry::initialize_entry(int index) {
   assert(0 < index && index < 0x10000, "sanity check");
   _indices = index;
+// (DCEVM) Should put something else to force JVM to fail if these invalid entries are accessed!
+//#ifdef ASSERT
+  _f1 = NULL;
+  _f2 = 0;
+//#endif
   assert(constant_pool_index() == index, "");
 }
 
@@ -162,7 +167,8 @@ void ConstantPoolCacheEntry::set_method(Bytecodes::Code invoke_code,
                                         int vtable_index) {
   assert(!is_secondary_entry(), "");
   assert(method->interpreter_entry() != NULL, "should have been set at this point");
-  assert(!method->is_obsolete(),  "attempt to write obsolete method to cpCache");
+  // (tw) No longer valid assert
+  //assert(!method->is_obsolete(),  "attempt to write obsolete method to cpCache");
 
   int byte_no = -1;
   bool change_to_virtual = false;
@@ -516,116 +522,6 @@ void ConstantPoolCacheEntry::update_pointers() {
 }
 #endif // SERIALGC
 
-// RedefineClasses() API support:
-// If this constantPoolCacheEntry refers to old_method then update it
-// to refer to new_method.
-bool ConstantPoolCacheEntry::adjust_method_entry(methodOop old_method,
-       methodOop new_method, bool * trace_name_printed) {
-
-  if (is_vfinal()) {
-    // virtual and final so _f2 contains method ptr instead of vtable index
-    if (f2_as_vfinal_method() == old_method) {
-      // match old_method so need an update
-      // NOTE: can't use set_f2_as_vfinal_method as it asserts on different values
-      _f2 = (intptr_t)new_method;
-      if (RC_TRACE_IN_RANGE(0x00100000, 0x00400000)) {
-        if (!(*trace_name_printed)) {
-          // RC_TRACE_MESG macro has an embedded ResourceMark
-          RC_TRACE_MESG(("adjust: name=%s",
-            Klass::cast(old_method->method_holder())->external_name()));
-          *trace_name_printed = true;
-        }
-        // RC_TRACE macro has an embedded ResourceMark
-        RC_TRACE(0x00400000, ("cpc vf-entry update: %s(%s)",
-          new_method->name()->as_C_string(),
-          new_method->signature()->as_C_string()));
-      }
-
-      return true;
-    }
-
-    // f1() is not used with virtual entries so bail out
-    return false;
-  }
-
-  if ((oop)_f1 == NULL) {
-    // NULL f1() means this is a virtual entry so bail out
-    // We are assuming that the vtable index does not need change.
-    return false;
-  }
-
-  if ((oop)_f1 == old_method) {
-    _f1 = new_method;
-    if (RC_TRACE_IN_RANGE(0x00100000, 0x00400000)) {
-      if (!(*trace_name_printed)) {
-        // RC_TRACE_MESG macro has an embedded ResourceMark
-        RC_TRACE_MESG(("adjust: name=%s",
-          Klass::cast(old_method->method_holder())->external_name()));
-        *trace_name_printed = true;
-      }
-      // RC_TRACE macro has an embedded ResourceMark
-      RC_TRACE(0x00400000, ("cpc entry update: %s(%s)",
-        new_method->name()->as_C_string(),
-        new_method->signature()->as_C_string()));
-    }
-
-    return true;
-  }
-
-  return false;
-}
-
-// a constant pool cache entry should never contain old or obsolete methods
-bool ConstantPoolCacheEntry::check_no_old_or_obsolete_entries() {
-  if (is_vfinal()) {
-    // virtual and final so _f2 contains method ptr instead of vtable index
-    methodOop m = (methodOop)_f2;
-    // Return false if _f2 refers to an old or an obsolete method.
-    // _f2 == NULL || !m->is_method() are just as unexpected here.
-    return (m != NULL && m->is_method() && !m->is_old() && !m->is_obsolete());
-  } else if ((oop)_f1 == NULL || !((oop)_f1)->is_method()) {
-    // _f1 == NULL || !_f1->is_method() are OK here
-    return true;
-  }
-
-  methodOop m = (methodOop)_f1;
-  // return false if _f1 refers to an old or an obsolete method
-  return (!m->is_old() && !m->is_obsolete());
-}
-
-bool ConstantPoolCacheEntry::is_interesting_method_entry(klassOop k) {
-  if (!is_method_entry()) {
-    // not a method entry so not interesting by default
-    return false;
-  }
-
-  methodOop m = NULL;
-  if (is_vfinal()) {
-    // virtual and final so _f2 contains method ptr instead of vtable index
-    m = f2_as_vfinal_method();
-  } else if (is_f1_null()) {
-    // NULL _f1 means this is a virtual entry so also not interesting
-    return false;
-  } else {
-    oop f1 = _f1;  // _f1 is volatile
-    if (!f1->is_method()) {
-      // _f1 can also contain a klassOop for an interface
-      return false;
-    }
-    m = f1_as_method();
-  }
-
-  assert(m != NULL && m->is_method(), "sanity check");
-  if (m == NULL || !m->is_method() || (k != NULL && m->method_holder() != k)) {
-    // robustness for above sanity checks or method is not in
-    // the interesting class
-    return false;
-  }
-
-  // the method is in the interesting class so the entry is interesting
-  return true;
-}
-
 void ConstantPoolCacheEntry::print(outputStream* st, int index) const {
   // print separator
   if (index == 0) st->print_cr("                 -------------");
@@ -663,60 +559,10 @@ void constantPoolCacheOopDesc::initialize(intArray& inverse_index_map) {
   }
 }
 
-// RedefineClasses() API support:
-// If any entry of this constantPoolCache points to any of
-// old_methods, replace it with the corresponding new_method.
-void constantPoolCacheOopDesc::adjust_method_entries(methodOop* old_methods, methodOop* new_methods,
-                                                     int methods_length, bool * trace_name_printed) {
-
-  if (methods_length == 0) {
-    // nothing to do if there are no methods
-    return;
-  }
-
-  // get shorthand for the interesting class
-  klassOop old_holder = old_methods[0]->method_holder();
+void constantPoolCacheOopDesc::adjust_entries() {
 
   for (int i = 0; i < length(); i++) {
-    if (!entry_at(i)->is_interesting_method_entry(old_holder)) {
-      // skip uninteresting methods
-      continue;
-    }
-
-    // The constantPoolCache contains entries for several different
-    // things, but we only care about methods. In fact, we only care
-    // about methods in the same class as the one that contains the
-    // old_methods. At this point, we have an interesting entry.
-
-    for (int j = 0; j < methods_length; j++) {
-      methodOop old_method = old_methods[j];
-      methodOop new_method = new_methods[j];
-
-      if (entry_at(i)->adjust_method_entry(old_method, new_method,
-          trace_name_printed)) {
-        // current old_method matched this entry and we updated it so
-        // break out and get to the next interesting entry if there one
-        break;
-      }
-    }
+     entry_at(i)->initialize_entry(entry_at(i)->constant_pool_index());
   }
 }
 
-// the constant pool cache should never contain old or obsolete methods
-bool constantPoolCacheOopDesc::check_no_old_or_obsolete_entries() {
-  for (int i = 1; i < length(); i++) {
-    if (entry_at(i)->is_interesting_method_entry(NULL) &&
-        !entry_at(i)->check_no_old_or_obsolete_entries()) {
-      return false;
-    }
-  }
-  return true;
-}
-
-void constantPoolCacheOopDesc::dump_cache() {
-  for (int i = 1; i < length(); i++) {
-    if (entry_at(i)->is_interesting_method_entry(NULL)) {
-      entry_at(i)->print(tty, i);
-    }
-  }
-}
diff --git a/src/share/vm/oops/cpCacheOop.hpp b/src/share/vm/oops/cpCacheOop.hpp
index ef26775..a270d0d 100644
--- a/src/share/vm/oops/cpCacheOop.hpp
+++ b/src/share/vm/oops/cpCacheOop.hpp
@@ -355,17 +355,6 @@ class ConstantPoolCacheEntry VALUE_OBJ_CLASS_SPEC {
 
   void update_pointers();
 
-  // RedefineClasses() API support:
-  // If this constantPoolCacheEntry refers to old_method then update it
-  // to refer to new_method.
-  // trace_name_printed is set to true if the current call has
-  // printed the klass name so that other routines in the adjust_*
-  // group don't print the klass name.
-  bool adjust_method_entry(methodOop old_method, methodOop new_method,
-         bool * trace_name_printed);
-  bool check_no_old_or_obsolete_entries();
-  bool is_interesting_method_entry(klassOop k);
-
   // Debugging & Printing
   void print (outputStream* st, int index) const;
   void verify(outputStream* st) const;
@@ -485,16 +474,8 @@ class constantPoolCacheOopDesc: public oopDesc {
     return (base_offset() + ConstantPoolCacheEntry::size_in_bytes() * index);
   }
 
-  // RedefineClasses() API support:
-  // If any entry of this constantPoolCache points to any of
-  // old_methods, replace it with the corresponding new_method.
-  // trace_name_printed is set to true if the current call has
-  // printed the klass name so that other routines in the adjust_*
-  // group don't print the klass name.
-  void adjust_method_entries(methodOop* old_methods, methodOop* new_methods,
-                             int methods_length, bool * trace_name_printed);
-  bool check_no_old_or_obsolete_entries();
-  void dump_cache();
+  // (tw) Clear references to methods and fields from this cache.
+  void adjust_entries();
 };
 
 #endif // SHARE_VM_OOPS_CPCACHEOOP_HPP
diff --git a/src/share/vm/oops/instanceKlass.cpp b/src/share/vm/oops/instanceKlass.cpp
index cd3dce0..666ffdf 100644
--- a/src/share/vm/oops/instanceKlass.cpp
+++ b/src/share/vm/oops/instanceKlass.cpp
@@ -255,7 +255,7 @@ bool instanceKlass::verify_code(
   // 1) Verify the bytecodes
   Verifier::Mode mode =
     throw_verifyerror ? Verifier::ThrowException : Verifier::NoException;
-  return Verifier::verify(this_oop, mode, this_oop->should_verify_class(), CHECK_false);
+  return Verifier::verify(this_oop, mode, this_oop->should_verify_class(), true, CHECK_false);
 }
 
 
@@ -362,7 +362,13 @@ bool instanceKlass::link_class_impl(
                                    jt->get_thread_stat()->perf_recursion_counts_addr(),
                                    jt->get_thread_stat()->perf_timers_addr(),
                                    PerfClassTraceTime::CLASS_VERIFY);
+          if (this_oop->is_redefining()) {
+            Thread::current()->set_pretend_new_universe(true);
+          }
           bool verify_ok = verify_code(this_oop, throw_verifyerror, THREAD);
+          if (this_oop->is_redefining()) {
+            Thread::current()->set_pretend_new_universe(false);
+          }
           if (!verify_ok) {
             return false;
           }
@@ -400,7 +406,8 @@ bool instanceKlass::link_class_impl(
       }
 #endif
       this_oop->set_init_state(linked);
-      if (JvmtiExport::should_post_class_prepare()) {
+      // (tw) Must check for old version in order to prevent infinite loops.
+      if (JvmtiExport::should_post_class_prepare() && this_oop->old_version() == NULL /* JVMTI deadlock otherwise */) {
         Thread *thread = THREAD;
         assert(thread->is_Java_thread(), "thread->is_Java_thread()");
         JvmtiExport::post_class_prepare((JavaThread *) thread, this_oop());
@@ -454,7 +461,9 @@ void instanceKlass::initialize_impl(instanceKlassHandle this_oop, TRAPS) {
     // If we were to use wait() instead of waitInterruptibly() then
     // we might end up throwing IE from link/symbol resolution sites
     // that aren't expected to throw.  This would wreak havoc.  See 6320309.
-    while(this_oop->is_being_initialized() && !this_oop->is_reentrant_initialization(self)) {
+    // (tw) Wait also for the old class version to be fully initialized.
+    while((this_oop->is_being_initialized() && !this_oop->is_reentrant_initialization(self))
+          || (this_oop->old_version() != NULL && ((instanceKlass*)this_oop->old_version()->klass_part())->is_being_initialized())) {
         wait = true;
       ol.waitUninterruptibly(CHECK);
     }
@@ -673,6 +682,18 @@ bool instanceKlass::implements_interface(klassOop k) const {
   return false;
 }
 
+bool instanceKlass::implements_interface_any_version(klassOop k) const {
+  k = k->klass_part()->newest_version();
+  if (this->newest_version() == k) return true;
+  assert(Klass::cast(k)->is_interface(), "should be an interface class");
+  for (int i = 0; i < transitive_interfaces()->length(); i++) {
+    if (((klassOop)transitive_interfaces()->obj_at(i))->klass_part()->newest_version() == k) {
+      return true;
+    }
+  }
+  return false;
+}
+
 objArrayOop instanceKlass::allocate_objArray(int n, int length, TRAPS) {
   if (length < 0) THROW_0(vmSymbols::java_lang_NegativeArraySizeException());
   if (length > arrayOopDesc::max_array_length(T_OBJECT)) {
@@ -949,6 +970,18 @@ void instanceKlass::methods_do(void f(methodOop method)) {
   }
 }
 
+void instanceKlass::store_update_information(GrowableArray<int> &values) {
+  int *arr = NEW_C_HEAP_ARRAY(int, values.length(), mtClass);
+  for (int i=0; i<values.length(); i++) {
+    arr[i] = values.at(i);
+  }
+  set_update_information(arr);
+}
+
+void instanceKlass::clear_update_information() {
+  FREE_C_HEAP_ARRAY(int, update_information(), mtClass);
+  set_update_information(NULL);
+}
 
 void instanceKlass::do_local_static_fields(FieldClosure* cl) {
   for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
@@ -1368,6 +1401,20 @@ jmethodID instanceKlass::jmethod_id_or_null(methodOop method) {
   return id;
 }
 
+bool instanceKlass::update_jmethod_id(methodOop method, jmethodID newMethodID) {
+  size_t idnum = (size_t)method->method_idnum();
+  jmethodID* jmeths = methods_jmethod_ids_acquire();
+  size_t length;                                // length assigned as debugging crumb
+  jmethodID id = NULL;
+  if (jmeths != NULL &&                         // If there is a cache
+    (length = (size_t)jmeths[0]) > idnum) {   // and if it is long enough,
+      jmeths[idnum+1] = newMethodID;                       // Set the id (may be NULL)
+      return true;
+  }
+
+  return false;
+}
+
 
 // Cache an itable index
 void instanceKlass::set_cached_itable_index(size_t idnum, int index) {
@@ -1527,6 +1574,13 @@ void instanceKlass::remove_dependent_nmethod(nmethod* nm) {
     last = b;
     b = b->next();
   }
+
+  // (tw) Hack as dependencies get wrong version of klassOop
+  if(this->old_version() != NULL) {
+    ((instanceKlass *)this->old_version()->klass_part())->remove_dependent_nmethod(nm);
+    return;
+  }
+
 #ifdef ASSERT
   tty->print_cr("### %s can't find dependent nmethod:", this->external_name());
   nm->print();
@@ -1922,16 +1976,6 @@ void instanceKlass::release_C_heap_structures() {
     assert(breakpoints() == 0x0, "should have cleared breakpoints");
   }
 
-  // deallocate information about previous versions
-  if (_previous_versions != NULL) {
-    for (int i = _previous_versions->length() - 1; i >= 0; i--) {
-      PreviousVersionNode * pv_node = _previous_versions->at(i);
-      delete pv_node;
-    }
-    delete _previous_versions;
-    _previous_versions = NULL;
-  }
-
   // deallocate the cached class file
   if (_cached_class_file_bytes != NULL) {
     os::free(_cached_class_file_bytes, mtClass);
@@ -2545,275 +2589,10 @@ void instanceKlass::set_init_state(ClassState state) {
 }
 #endif
 
-
-// RedefineClasses() support for previous versions:
-
-// Add an information node that contains weak references to the
-// interesting parts of the previous version of the_class.
-// This is also where we clean out any unused weak references.
-// Note that while we delete nodes from the _previous_versions
-// array, we never delete the array itself until the klass is
-// unloaded. The has_been_redefined() query depends on that fact.
-//
-void instanceKlass::add_previous_version(instanceKlassHandle ikh,
-       BitMap* emcp_methods, int emcp_method_count) {
-  assert(Thread::current()->is_VM_thread(),
-         "only VMThread can add previous versions");
-
-  if (_previous_versions == NULL) {
-    // This is the first previous version so make some space.
-    // Start with 2 elements under the assumption that the class
-    // won't be redefined much.
-    _previous_versions =  new (ResourceObj::C_HEAP, mtClass)
-                            GrowableArray<PreviousVersionNode *>(2, true);
-  }
-
-  // RC_TRACE macro has an embedded ResourceMark
-  RC_TRACE(0x00000100, ("adding previous version ref for %s @%d, EMCP_cnt=%d",
-    ikh->external_name(), _previous_versions->length(), emcp_method_count));
-  constantPoolHandle cp_h(ikh->constants());
-  jobject cp_ref;
-  if (cp_h->is_shared()) {
-    // a shared ConstantPool requires a regular reference; a weak
-    // reference would be collectible
-    cp_ref = JNIHandles::make_global(cp_h);
-  } else {
-    cp_ref = JNIHandles::make_weak_global(cp_h);
-  }
-  PreviousVersionNode * pv_node = NULL;
-  objArrayOop old_methods = ikh->methods();
-
-  if (emcp_method_count == 0) {
-    // non-shared ConstantPool gets a weak reference
-    pv_node = new PreviousVersionNode(cp_ref, !cp_h->is_shared(), NULL);
-    RC_TRACE(0x00000400,
-      ("add: all methods are obsolete; flushing any EMCP weak refs"));
-  } else {
-    int local_count = 0;
-    GrowableArray<jweak>* method_refs = new (ResourceObj::C_HEAP, mtClass)
-      GrowableArray<jweak>(emcp_method_count, true);
-    for (int i = 0; i < old_methods->length(); i++) {
-      if (emcp_methods->at(i)) {
-        // this old method is EMCP so save a weak ref
-        methodOop old_method = (methodOop) old_methods->obj_at(i);
-        methodHandle old_method_h(old_method);
-        jweak method_ref = JNIHandles::make_weak_global(old_method_h);
-        method_refs->append(method_ref);
-        if (++local_count >= emcp_method_count) {
-          // no more EMCP methods so bail out now
-          break;
-        }
-      }
-    }
-    // non-shared ConstantPool gets a weak reference
-    pv_node = new PreviousVersionNode(cp_ref, !cp_h->is_shared(), method_refs);
-  }
-
-  _previous_versions->append(pv_node);
-
-  // Using weak references allows the interesting parts of previous
-  // classes to be GC'ed when they are no longer needed. Since the
-  // caller is the VMThread and we are at a safepoint, this is a good
-  // time to clear out unused weak references.
-
-  RC_TRACE(0x00000400, ("add: previous version length=%d",
-    _previous_versions->length()));
-
-  // skip the last entry since we just added it
-  for (int i = _previous_versions->length() - 2; i >= 0; i--) {
-    // check the previous versions array for a GC'ed weak refs
-    pv_node = _previous_versions->at(i);
-    cp_ref = pv_node->prev_constant_pool();
-    assert(cp_ref != NULL, "cp ref was unexpectedly cleared");
-    if (cp_ref == NULL) {
-      delete pv_node;
-      _previous_versions->remove_at(i);
-      // Since we are traversing the array backwards, we don't have to
-      // do anything special with the index.
-      continue;  // robustness
-    }
-
-    constantPoolOop cp = (constantPoolOop)JNIHandles::resolve(cp_ref);
-    if (cp == NULL) {
-      // this entry has been GC'ed so remove it
-      delete pv_node;
-      _previous_versions->remove_at(i);
-      // Since we are traversing the array backwards, we don't have to
-      // do anything special with the index.
-      continue;
-    } else {
-      RC_TRACE(0x00000400, ("add: previous version @%d is alive", i));
-    }
-
-    GrowableArray<jweak>* method_refs = pv_node->prev_EMCP_methods();
-    if (method_refs != NULL) {
-      RC_TRACE(0x00000400, ("add: previous methods length=%d",
-        method_refs->length()));
-      for (int j = method_refs->length() - 1; j >= 0; j--) {
-        jweak method_ref = method_refs->at(j);
-        assert(method_ref != NULL, "weak method ref was unexpectedly cleared");
-        if (method_ref == NULL) {
-          method_refs->remove_at(j);
-          // Since we are traversing the array backwards, we don't have to
-          // do anything special with the index.
-          continue;  // robustness
-        }
-
-        methodOop method = (methodOop)JNIHandles::resolve(method_ref);
-        if (method == NULL || emcp_method_count == 0) {
-          // This method entry has been GC'ed or the current
-          // RedefineClasses() call has made all methods obsolete
-          // so remove it.
-          JNIHandles::destroy_weak_global(method_ref);
-          method_refs->remove_at(j);
-        } else {
-          // RC_TRACE macro has an embedded ResourceMark
-          RC_TRACE(0x00000400,
-            ("add: %s(%s): previous method @%d in version @%d is alive",
-            method->name()->as_C_string(), method->signature()->as_C_string(),
-            j, i));
-        }
-      }
-    }
-  }
-
-  int obsolete_method_count = old_methods->length() - emcp_method_count;
-
-  if (emcp_method_count != 0 && obsolete_method_count != 0 &&
-      _previous_versions->length() > 1) {
-    // We have a mix of obsolete and EMCP methods. If there is more
-    // than the previous version that we just added, then we have to
-    // clear out any matching EMCP method entries the hard way.
-    int local_count = 0;
-    for (int i = 0; i < old_methods->length(); i++) {
-      if (!emcp_methods->at(i)) {
-        // only obsolete methods are interesting
-        methodOop old_method = (methodOop) old_methods->obj_at(i);
-        Symbol* m_name = old_method->name();
-        Symbol* m_signature = old_method->signature();
-
-        // skip the last entry since we just added it
-        for (int j = _previous_versions->length() - 2; j >= 0; j--) {
-          // check the previous versions array for a GC'ed weak refs
-          pv_node = _previous_versions->at(j);
-          cp_ref = pv_node->prev_constant_pool();
-          assert(cp_ref != NULL, "cp ref was unexpectedly cleared");
-          if (cp_ref == NULL) {
-            delete pv_node;
-            _previous_versions->remove_at(j);
-            // Since we are traversing the array backwards, we don't have to
-            // do anything special with the index.
-            continue;  // robustness
-          }
-
-          constantPoolOop cp = (constantPoolOop)JNIHandles::resolve(cp_ref);
-          if (cp == NULL) {
-            // this entry has been GC'ed so remove it
-            delete pv_node;
-            _previous_versions->remove_at(j);
-            // Since we are traversing the array backwards, we don't have to
-            // do anything special with the index.
-            continue;
-          }
-
-          GrowableArray<jweak>* method_refs = pv_node->prev_EMCP_methods();
-          if (method_refs == NULL) {
-            // We have run into a PreviousVersion generation where
-            // all methods were made obsolete during that generation's
-            // RedefineClasses() operation. At the time of that
-            // operation, all EMCP methods were flushed so we don't
-            // have to go back any further.
-            //
-            // A NULL method_refs is different than an empty method_refs.
-            // We cannot infer any optimizations about older generations
-            // from an empty method_refs for the current generation.
-            break;
-          }
-
-          for (int k = method_refs->length() - 1; k >= 0; k--) {
-            jweak method_ref = method_refs->at(k);
-            assert(method_ref != NULL,
-              "weak method ref was unexpectedly cleared");
-            if (method_ref == NULL) {
-              method_refs->remove_at(k);
-              // Since we are traversing the array backwards, we don't
-              // have to do anything special with the index.
-              continue;  // robustness
-            }
-
-            methodOop method = (methodOop)JNIHandles::resolve(method_ref);
-            if (method == NULL) {
-              // this method entry has been GC'ed so skip it
-              JNIHandles::destroy_weak_global(method_ref);
-              method_refs->remove_at(k);
-              continue;
-            }
-
-            if (method->name() == m_name &&
-                method->signature() == m_signature) {
-              // The current RedefineClasses() call has made all EMCP
-              // versions of this method obsolete so mark it as obsolete
-              // and remove the weak ref.
-              RC_TRACE(0x00000400,
-                ("add: %s(%s): flush obsolete method @%d in version @%d",
-                m_name->as_C_string(), m_signature->as_C_string(), k, j));
-
-              method->set_is_obsolete();
-              JNIHandles::destroy_weak_global(method_ref);
-              method_refs->remove_at(k);
-              break;
-            }
-          }
-
-          // The previous loop may not find a matching EMCP method, but
-          // that doesn't mean that we can optimize and not go any
-          // further back in the PreviousVersion generations. The EMCP
-          // method for this generation could have already been GC'ed,
-          // but there still may be an older EMCP method that has not
-          // been GC'ed.
-        }
-
-        if (++local_count >= obsolete_method_count) {
-          // no more obsolete methods so bail out now
-          break;
-        }
-      }
-    }
-  }
-} // end add_previous_version()
-
-
 // Determine if instanceKlass has a previous version.
 bool instanceKlass::has_previous_version() const {
-  if (_previous_versions == NULL) {
-    // no previous versions array so answer is easy
-    return false;
-  }
-
-  for (int i = _previous_versions->length() - 1; i >= 0; i--) {
-    // Check the previous versions array for an info node that hasn't
-    // been GC'ed
-    PreviousVersionNode * pv_node = _previous_versions->at(i);
-
-    jobject cp_ref = pv_node->prev_constant_pool();
-    assert(cp_ref != NULL, "cp reference was unexpectedly cleared");
-    if (cp_ref == NULL) {
-      continue;  // robustness
-    }
-
-    constantPoolOop cp = (constantPoolOop)JNIHandles::resolve(cp_ref);
-    if (cp != NULL) {
-      // we have at least one previous version
-      return true;
-    }
-
-    // We don't have to check the method refs. If the constant pool has
-    // been GC'ed then so have the methods.
-  }
-
-  // all of the underlying nodes' info has been GC'ed
-  return false;
-} // end has_previous_version()
+  return _old_version != NULL;
+}
 
 methodOop instanceKlass::method_with_idnum(int idnum) {
   methodOop m = NULL;
@@ -2854,153 +2633,3 @@ void instanceKlass::set_methods_annotations_of(int idnum, typeArrayOop anno, obj
   } // if no array and idnum isn't included there is nothing to do
 }
 
-// Construct a PreviousVersionNode entry for the array hung off
-// the instanceKlass.
-PreviousVersionNode::PreviousVersionNode(jobject prev_constant_pool,
-  bool prev_cp_is_weak, GrowableArray<jweak>* prev_EMCP_methods) {
-
-  _prev_constant_pool = prev_constant_pool;
-  _prev_cp_is_weak = prev_cp_is_weak;
-  _prev_EMCP_methods = prev_EMCP_methods;
-}
-
-
-// Destroy a PreviousVersionNode
-PreviousVersionNode::~PreviousVersionNode() {
-  if (_prev_constant_pool != NULL) {
-    if (_prev_cp_is_weak) {
-      JNIHandles::destroy_weak_global(_prev_constant_pool);
-    } else {
-      JNIHandles::destroy_global(_prev_constant_pool);
-    }
-  }
-
-  if (_prev_EMCP_methods != NULL) {
-    for (int i = _prev_EMCP_methods->length() - 1; i >= 0; i--) {
-      jweak method_ref = _prev_EMCP_methods->at(i);
-      if (method_ref != NULL) {
-        JNIHandles::destroy_weak_global(method_ref);
-      }
-    }
-    delete _prev_EMCP_methods;
-  }
-}
-
-
-// Construct a PreviousVersionInfo entry
-PreviousVersionInfo::PreviousVersionInfo(PreviousVersionNode *pv_node) {
-  _prev_constant_pool_handle = constantPoolHandle();  // NULL handle
-  _prev_EMCP_method_handles = NULL;
-
-  jobject cp_ref = pv_node->prev_constant_pool();
-  assert(cp_ref != NULL, "constant pool ref was unexpectedly cleared");
-  if (cp_ref == NULL) {
-    return;  // robustness
-  }
-
-  constantPoolOop cp = (constantPoolOop)JNIHandles::resolve(cp_ref);
-  if (cp == NULL) {
-    // Weak reference has been GC'ed. Since the constant pool has been
-    // GC'ed, the methods have also been GC'ed.
-    return;
-  }
-
-  // make the constantPoolOop safe to return
-  _prev_constant_pool_handle = constantPoolHandle(cp);
-
-  GrowableArray<jweak>* method_refs = pv_node->prev_EMCP_methods();
-  if (method_refs == NULL) {
-    // the instanceKlass did not have any EMCP methods
-    return;
-  }
-
-  _prev_EMCP_method_handles = new GrowableArray<methodHandle>(10);
-
-  int n_methods = method_refs->length();
-  for (int i = 0; i < n_methods; i++) {
-    jweak method_ref = method_refs->at(i);
-    assert(method_ref != NULL, "weak method ref was unexpectedly cleared");
-    if (method_ref == NULL) {
-      continue;  // robustness
-    }
-
-    methodOop method = (methodOop)JNIHandles::resolve(method_ref);
-    if (method == NULL) {
-      // this entry has been GC'ed so skip it
-      continue;
-    }
-
-    // make the methodOop safe to return
-    _prev_EMCP_method_handles->append(methodHandle(method));
-  }
-}
-
-
-// Destroy a PreviousVersionInfo
-PreviousVersionInfo::~PreviousVersionInfo() {
-  // Since _prev_EMCP_method_handles is not C-heap allocated, we
-  // don't have to delete it.
-}
-
-
-// Construct a helper for walking the previous versions array
-PreviousVersionWalker::PreviousVersionWalker(instanceKlass *ik) {
-  _previous_versions = ik->previous_versions();
-  _current_index = 0;
-  // _hm needs no initialization
-  _current_p = NULL;
-}
-
-
-// Destroy a PreviousVersionWalker
-PreviousVersionWalker::~PreviousVersionWalker() {
-  // Delete the current info just in case the caller didn't walk to
-  // the end of the previous versions list. No harm if _current_p is
-  // already NULL.
-  delete _current_p;
-
-  // When _hm is destroyed, all the Handles returned in
-  // PreviousVersionInfo objects will be destroyed.
-  // Also, after this destructor is finished it will be
-  // safe to delete the GrowableArray allocated in the
-  // PreviousVersionInfo objects.
-}
-
-
-// Return the interesting information for the next previous version
-// of the klass. Returns NULL if there are no more previous versions.
-PreviousVersionInfo* PreviousVersionWalker::next_previous_version() {
-  if (_previous_versions == NULL) {
-    // no previous versions so nothing to return
-    return NULL;
-  }
-
-  delete _current_p;  // cleanup the previous info for the caller
-  _current_p = NULL;  // reset to NULL so we don't delete same object twice
-
-  int length = _previous_versions->length();
-
-  while (_current_index < length) {
-    PreviousVersionNode * pv_node = _previous_versions->at(_current_index++);
-    PreviousVersionInfo * pv_info = new (ResourceObj::C_HEAP, mtClass)
-                                          PreviousVersionInfo(pv_node);
-
-    constantPoolHandle cp_h = pv_info->prev_constant_pool_handle();
-    if (cp_h.is_null()) {
-      delete pv_info;
-
-      // The underlying node's info has been GC'ed so try the next one.
-      // We don't have to check the methods. If the constant pool has
-      // GC'ed then so have the methods.
-      continue;
-    }
-
-    // Found a node with non GC'ed info so return it. The caller will
-    // need to delete pv_info when they are done with it.
-    _current_p = pv_info;
-    return pv_info;
-  }
-
-  // all of the underlying nodes' info has been GC'ed
-  return NULL;
-} // end next_previous_version()
diff --git a/src/share/vm/oops/instanceKlass.hpp b/src/share/vm/oops/instanceKlass.hpp
index 8a849cb..53163b6 100644
--- a/src/share/vm/oops/instanceKlass.hpp
+++ b/src/share/vm/oops/instanceKlass.hpp
@@ -271,9 +271,6 @@ class instanceKlass: public Klass {
   nmethodBucket*  _dependencies;         // list of dependent nmethods
   nmethod*        _osr_nmethods_head;    // Head of list of on-stack replacement nmethods for this class
   BreakpointInfo* _breakpoints;          // bpt lists, managed by methodOop
-  // Array of interesting part(s) of the previous version(s) of this
-  // instanceKlass. See PreviousVersionWalker below.
-  GrowableArray<PreviousVersionNode *>* _previous_versions;
   // JVMTI fields can be moved to their own structure - see 6315920
   unsigned char * _cached_class_file_bytes;       // JVMTI: cached class file, before retransformable agent modified it in CFLH
   jint            _cached_class_file_len;         // JVMTI: length of above
@@ -571,20 +568,11 @@ class instanceKlass: public Klass {
     _nonstatic_oop_map_size = words;
   }
 
-  // RedefineClasses() support for previous versions:
-  void add_previous_version(instanceKlassHandle ikh, BitMap *emcp_methods,
-         int emcp_method_count);
   // If the _previous_versions array is non-NULL, then this klass
   // has been redefined at least once even if we aren't currently
   // tracking a previous version.
-  bool has_been_redefined() const { return _previous_versions != NULL; }
+  bool has_been_redefined() const { return _old_version != NULL; }
   bool has_previous_version() const;
-  void init_previous_versions() {
-    _previous_versions = NULL;
-  }
-  GrowableArray<PreviousVersionNode *>* previous_versions() const {
-    return _previous_versions;
-  }
 
   // JVMTI: Support for caching a class file before it is modified by an agent that can do retransformation
   void set_cached_class_file(unsigned char *class_file_bytes,
@@ -629,6 +617,7 @@ class instanceKlass: public Klass {
   static void get_jmethod_id_length_value(jmethodID* cache, size_t idnum,
                 size_t *length_p, jmethodID* id_p);
   jmethodID jmethod_id_or_null(methodOop method);
+  bool update_jmethod_id(methodOop method, jmethodID newMethodID);
 
   // cached itable index support
   void set_cached_itable_index(size_t idnum, int index);
@@ -711,6 +700,7 @@ class instanceKlass: public Klass {
 
   // subclass/subinterface checks
   bool implements_interface(klassOop k) const;
+  bool implements_interface_any_version(klassOop k) const;
 
   // Access to the implementor of an interface.
   klassOop implementor() const
@@ -760,6 +750,9 @@ class instanceKlass: public Klass {
   void do_local_static_fields(FieldClosure* cl);
   void do_nonstatic_fields(FieldClosure* cl); // including inherited fields
   void do_local_static_fields(void f(fieldDescriptor*, TRAPS), TRAPS);
+  void store_update_information(GrowableArray<int> &values);
+  void clear_update_information();
+
 
   void methods_do(void f(methodOop method));
   void array_klasses_do(void f(klassOop k));
@@ -895,7 +888,6 @@ class instanceKlass: public Klass {
   ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DECL)
 #endif // !SERIALGC
 
-private:
   // initialization state
 #ifdef ASSERT
   void set_init_state(ClassState state);
@@ -1057,106 +1049,6 @@ class JNIid: public CHeapObj<mtClass> {
   void verify(klassOop holder);
 };
 
-
-// If breakpoints are more numerous than just JVMTI breakpoints,
-// consider compressing this data structure.
-// It is currently a simple linked list defined in methodOop.hpp.
-
-class BreakpointInfo;
-
-
-// A collection point for interesting information about the previous
-// version(s) of an instanceKlass. This class uses weak references to
-// the information so that the information may be collected as needed
-// by the system. If the information is shared, then a regular
-// reference must be used because a weak reference would be seen as
-// collectible. A GrowableArray of PreviousVersionNodes is attached
-// to the instanceKlass as needed. See PreviousVersionWalker below.
-class PreviousVersionNode : public CHeapObj<mtClass> {
- private:
-  // A shared ConstantPool is never collected so we'll always have
-  // a reference to it so we can update items in the cache. We'll
-  // have a weak reference to a non-shared ConstantPool until all
-  // of the methods (EMCP or obsolete) have been collected; the
-  // non-shared ConstantPool becomes collectible at that point.
-  jobject _prev_constant_pool;  // regular or weak reference
-  bool    _prev_cp_is_weak;     // true if not a shared ConstantPool
-
-  // If the previous version of the instanceKlass doesn't have any
-  // EMCP methods, then _prev_EMCP_methods will be NULL. If all the
-  // EMCP methods have been collected, then _prev_EMCP_methods can
-  // have a length of zero.
-  GrowableArray<jweak>* _prev_EMCP_methods;
-
-public:
-  PreviousVersionNode(jobject prev_constant_pool, bool prev_cp_is_weak,
-    GrowableArray<jweak>* prev_EMCP_methods);
-  ~PreviousVersionNode();
-  jobject prev_constant_pool() const {
-    return _prev_constant_pool;
-  }
-  GrowableArray<jweak>* prev_EMCP_methods() const {
-    return _prev_EMCP_methods;
-  }
-};
-
-
-// A Handle-ized version of PreviousVersionNode.
-class PreviousVersionInfo : public ResourceObj {
- private:
-  constantPoolHandle   _prev_constant_pool_handle;
-  // If the previous version of the instanceKlass doesn't have any
-  // EMCP methods, then _prev_EMCP_methods will be NULL. Since the
-  // methods cannot be collected while we hold a handle,
-  // _prev_EMCP_methods should never have a length of zero.
-  GrowableArray<methodHandle>* _prev_EMCP_method_handles;
-
-public:
-  PreviousVersionInfo(PreviousVersionNode *pv_node);
-  ~PreviousVersionInfo();
-  constantPoolHandle prev_constant_pool_handle() const {
-    return _prev_constant_pool_handle;
-  }
-  GrowableArray<methodHandle>* prev_EMCP_method_handles() const {
-    return _prev_EMCP_method_handles;
-  }
-};
-
-
-// Helper object for walking previous versions. This helper cleans up
-// the Handles that it allocates when the helper object is destroyed.
-// The PreviousVersionInfo object returned by next_previous_version()
-// is only valid until a subsequent call to next_previous_version() or
-// the helper object is destroyed.
-class PreviousVersionWalker : public StackObj {
- private:
-  GrowableArray<PreviousVersionNode *>* _previous_versions;
-  int                                   _current_index;
-  // Fields for cleaning up when we are done walking the previous versions:
-  // A HandleMark for the PreviousVersionInfo handles:
-  HandleMark                            _hm;
-
-  // It would be nice to have a ResourceMark field in this helper also,
-  // but the ResourceMark code says to be careful to delete handles held
-  // in GrowableArrays _before_ deleting the GrowableArray. Since we
-  // can't guarantee the order in which the fields are destroyed, we
-  // have to let the creator of the PreviousVersionWalker object do
-  // the right thing. Also, adding a ResourceMark here causes an
-  // include loop.
-
-  // A pointer to the current info object so we can handle the deletes.
-  PreviousVersionInfo *                 _current_p;
-
- public:
-  PreviousVersionWalker(instanceKlass *ik);
-  ~PreviousVersionWalker();
-
-  // Return the interesting information for the next previous version
-  // of the klass. Returns NULL if there are no more previous versions.
-  PreviousVersionInfo* next_previous_version();
-};
-
-
 //
 // nmethodBucket is used to record dependent nmethods for
 // deoptimization.  nmethod dependencies are actually <klass, method>
diff --git a/src/share/vm/oops/instanceKlassKlass.cpp b/src/share/vm/oops/instanceKlassKlass.cpp
index 8e7dc12..5b9b266 100644
--- a/src/share/vm/oops/instanceKlassKlass.cpp
+++ b/src/share/vm/oops/instanceKlassKlass.cpp
@@ -358,7 +358,7 @@ instanceKlassKlass::allocate_instance_klass(Symbol* name, int vtable_len, int it
                                             unsigned nonstatic_oop_map_count,
                                             AccessFlags access_flags,
                                             ReferenceType rt,
-                                            KlassHandle host_klass, TRAPS) {
+                                            KlassHandle host_klass, KlassHandle old_klass, TRAPS) {
 
   const int nonstatic_oop_map_size =
     instanceKlass::nonstatic_oop_map_size(nonstatic_oop_map_count);
@@ -435,7 +435,6 @@ instanceKlassKlass::allocate_instance_klass(Symbol* name, int vtable_len, int it
     ik->set_jni_ids(NULL);
     ik->set_osr_nmethods_head(NULL);
     ik->set_breakpoints(NULL);
-    ik->init_previous_versions();
     ik->set_generic_signature(NULL);
     ik->release_set_methods_jmethod_ids(NULL);
     ik->release_set_methods_cached_itable_indices(NULL);
@@ -480,6 +479,28 @@ void instanceKlassKlass::oop_print_on(oop obj, outputStream* st) {
   instanceKlass* ik = instanceKlass::cast(klassOop(obj));
   klassKlass::oop_print_on(obj, st);
 
+  // (tw) Output revision number and revision numbers of older / newer and oldest / newest version of this class.
+
+  st->print(BULLET"revision:          %d", ik->revision_number());                    
+  
+  if (ik->new_version() != NULL) {
+    st->print(" (newer=%d)", ik->new_version()->klass_part()->revision_number());
+  }
+
+  if (ik->newest_version() != ik->new_version() && ik->newest_version() != obj) {
+    st->print(" (newest=%d)", ik->newest_version()->klass_part()->revision_number());
+  }
+
+  if (ik->old_version() != NULL) {
+    st->print(" (old=%d)", ik->old_version()->klass_part()->revision_number());
+  }
+
+  if (ik->oldest_version() != ik->old_version() && ik->oldest_version() != obj) {
+    st->print(" (oldest=%d)", ik->oldest_version()->klass_part()->revision_number());
+  }
+
+  st->cr();
+  
   st->print(BULLET"instance size:     %d", ik->size_helper());                        st->cr();
   st->print(BULLET"klass size:        %d", ik->object_size());                        st->cr();
   st->print(BULLET"access:            "); ik->access_flags().print_on(st);            st->cr();
@@ -537,26 +558,6 @@ void instanceKlassKlass::oop_print_on(oop obj, outputStream* st) {
     st->cr();
   }
 
-  {
-    ResourceMark rm;
-    // PreviousVersionInfo objects returned via PreviousVersionWalker
-    // contain a GrowableArray of handles. We have to clean up the
-    // GrowableArray _after_ the PreviousVersionWalker destructor
-    // has destroyed the handles.
-    {
-      bool have_pv = false;
-      PreviousVersionWalker pvw(ik);
-      for (PreviousVersionInfo * pv_info = pvw.next_previous_version();
-           pv_info != NULL; pv_info = pvw.next_previous_version()) {
-        if (!have_pv)
-          st->print(BULLET"previous version:  ");
-        have_pv = true;
-        pv_info->prev_constant_pool_handle()()->print_value_on(st);
-      }
-      if (have_pv)  st->cr();
-    } // pvw is cleaned up
-  } // rm is cleaned up
-
   if (ik->generic_signature() != NULL) {
     st->print(BULLET"generic signature: ");
     ik->generic_signature()->print_value_on(st);
@@ -663,7 +664,7 @@ void instanceKlassKlass::oop_verify_on(oop obj, outputStream* st) {
       }
       guarantee(sib->as_klassOop()->is_klass(), "should be klass");
       guarantee(sib->as_klassOop()->is_perm(),  "should be in permspace");
-      guarantee(sib->super() == super, "siblings should have same superklass");
+      guarantee(sib->super() == super || super->klass_part()->newest_version() == SystemDictionary::Object_klass(), "siblings should have same superklass");
       sib = sib->next_sibling();
     }
 
diff --git a/src/share/vm/oops/instanceKlassKlass.hpp b/src/share/vm/oops/instanceKlassKlass.hpp
index df674a9..45d0b66 100644
--- a/src/share/vm/oops/instanceKlassKlass.hpp
+++ b/src/share/vm/oops/instanceKlassKlass.hpp
@@ -50,6 +50,7 @@ class instanceKlassKlass : public klassKlass {
                                    AccessFlags access_flags,
                                    ReferenceType rt,
                                    KlassHandle host_klass,
+                                   KlassHandle old_klass,
                                    TRAPS);
 
   // Casting from klassOop
diff --git a/src/share/vm/oops/instanceMirrorKlass.cpp b/src/share/vm/oops/instanceMirrorKlass.cpp
index e0dd7d7..a7eec08 100644
--- a/src/share/vm/oops/instanceMirrorKlass.cpp
+++ b/src/share/vm/oops/instanceMirrorKlass.cpp
@@ -156,6 +156,13 @@ void instanceMirrorKlass::oop_follow_contents(oop obj) {
     assert_is_in_closed_subset)
 }
 
+void instanceMirrorKlass::oop_fields_iterate(oop obj, OopClosure* blk) {
+  InstanceMirrorKlass_OOP_ITERATE(                                                    \
+    start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),        \
+    blk->do_oop(p),                                                                   \
+    assert_is_in_closed_subset)
+}
+
 #ifndef SERIALGC
 void instanceMirrorKlass::oop_follow_contents(ParCompactionManager* cm,
                                               oop obj) {
diff --git a/src/share/vm/oops/instanceMirrorKlass.hpp b/src/share/vm/oops/instanceMirrorKlass.hpp
index 2b8b2f4..31969c7 100644
--- a/src/share/vm/oops/instanceMirrorKlass.hpp
+++ b/src/share/vm/oops/instanceMirrorKlass.hpp
@@ -79,6 +79,9 @@ class instanceMirrorKlass: public instanceKlass {
   DEFINE_ALLOCATE_PERMANENT(instanceMirrorKlass);
   instanceOop allocate_instance(KlassHandle k, TRAPS);
 
+  // Class redefinition, iterate static fields
+  static void oop_fields_iterate(oop obj, OopClosure* blk);
+
   // Garbage collection
   int  oop_adjust_pointers(oop obj);
   void oop_follow_contents(oop obj);
diff --git a/src/share/vm/oops/instanceRefKlass.cpp b/src/share/vm/oops/instanceRefKlass.cpp
index 7db4f03..1171487 100644
--- a/src/share/vm/oops/instanceRefKlass.cpp
+++ b/src/share/vm/oops/instanceRefKlass.cpp
@@ -455,10 +455,13 @@ void instanceRefKlass::update_nonstatic_oop_maps(klassOop k) {
   instanceKlass* ik = instanceKlass::cast(k);
 
   // Check that we have the right class
-  debug_only(static bool first_time = true);
-  assert(k == SystemDictionary::Reference_klass() && first_time,
-         "Invalid update of maps");
-  debug_only(first_time = false);
+  
+  // (tw) Asserts no longer valid for class redefinition
+  // debug_only(static bool first_time = true);
+  
+  //assert(k == SystemDictionary::Reference_klass() && first_time,
+  //       "Invalid update of maps");
+  //debug_only(first_time = false);
   assert(ik->nonstatic_oop_map_count() == 1, "just checking");
 
   OopMapBlock* map = ik->start_of_nonstatic_oop_maps();
diff --git a/src/share/vm/oops/klass.cpp b/src/share/vm/oops/klass.cpp
index 596d5ad..a928777 100644
--- a/src/share/vm/oops/klass.cpp
+++ b/src/share/vm/oops/klass.cpp
@@ -161,6 +161,13 @@ klassOop Klass::base_create_klass_oop(KlassHandle& klass, int size,
   kl->set_alloc_size(0);
   TRACE_INIT_ID(kl);
 
+  kl->set_redefinition_flags(Klass::NoRedefinition);
+  kl->set_redefining(false);
+  kl->set_new_version(NULL);
+  kl->set_old_version(NULL);
+  kl->set_redefinition_index(-1);
+  kl->set_revision_number(-1);
+
   kl->set_prototype_header(markOopDesc::prototype());
   kl->set_biased_lock_revocation_count(0);
   kl->set_last_biased_lock_bulk_revocation_time(0);
@@ -232,7 +239,7 @@ void Klass::initialize_supers(klassOop k, TRAPS) {
     set_super(NULL);
     oop_store_without_check((oop*) &_primary_supers[0], (oop) this->as_klassOop());
     assert(super_depth() == 0, "Object must already be initialized properly");
-  } else if (k != super() || k == SystemDictionary::Object_klass()) {
+  } else if (k != super() || k->klass_part()->super() == NULL) {
     assert(super() == NULL || super() == SystemDictionary::Object_klass(),
            "initialize this only once to a non-trivial value");
     set_super(k);
@@ -385,7 +392,7 @@ void Klass::append_to_sibling_list() {
 void Klass::remove_from_sibling_list() {
   // remove receiver from sibling list
   instanceKlass* super = superklass();
-  assert(super != NULL || as_klassOop() == SystemDictionary::Object_klass(), "should have super");
+  assert(super != NULL || as_klassOop()->klass_part()->newest_version() == SystemDictionary::Object_klass()->klass_part()->newest_version(), "should have super");
   if (super == NULL) return;        // special case: class Object
   if (super->subklass() == this) {
     // first subklass
diff --git a/src/share/vm/oops/klass.hpp b/src/share/vm/oops/klass.hpp
index bcbd4e7..bf242d9 100644
--- a/src/share/vm/oops/klass.hpp
+++ b/src/share/vm/oops/klass.hpp
@@ -170,6 +170,7 @@ class Klass_vtbl {
   void* operator new(size_t ignored, KlassHandle& klass, int size, TRAPS);
 };
 
+template<class L, class R> class Pair;
 
 class Klass : public Klass_vtbl {
   friend class VMStructs;
@@ -222,6 +223,31 @@ class Klass : public Klass_vtbl {
   oop* oop_block_beg() const { return adr_secondary_super_cache(); }
   oop* oop_block_end() const { return adr_next_sibling() + 1; }
 
+  // (tw) Different class redefinition flags of code evolution.
+  enum RedefinitionFlags {
+
+    // This class is not redefined at all!
+    NoRedefinition,
+
+    // There are changes to the class meta data.
+    ModifyClass = 1,
+
+    // The size of the class meta data changes.
+    ModifyClassSize = ModifyClass << 1,
+
+    // There are change to the instance format.
+    ModifyInstances = ModifyClassSize << 1,
+
+    // The size of instances changes.
+    ModifyInstanceSize = ModifyInstances << 1,
+
+    // A super type of this class is removed.
+    RemoveSuperType = ModifyInstanceSize << 1,
+
+    // This class has been marked as an affected class.
+    MarkedAsAffected = RemoveSuperType << 1
+  };
+
  protected:
   //
   // The oop block.  All oop fields must be declared here and only oop fields
@@ -241,6 +267,10 @@ class Klass : public Klass_vtbl {
   oop       _java_mirror;
   // Superclass
   klassOop  _super;
+  // Old class
+  klassOop _old_version;
+  // New class
+  klassOop _new_version;
   // First subclass (NULL if none); _subklass->next_sibling() is next one
   klassOop _subklass;
   // Sibling link (or NULL); links all subklasses of a klass
@@ -253,6 +283,16 @@ class Klass : public Klass_vtbl {
   jint        _modifier_flags;  // Processed access flags, for use by Class.getModifiers.
   AccessFlags _access_flags;    // Access flags. The class/interface distinction is stored here.
 
+  // (tw) Non-oop fields for enhanced class redefinition
+  jint                  _revision_number;        // The revision number for redefined classes
+  jint                  _redefinition_index;     // Index of this class when performing the redefinition
+  bool                  _subtype_changed;
+  int                   _redefinition_flags;     // Level of class redefinition
+  bool                  _is_copying_backwards;   // Does the class need to copy fields backwards? => possibly overwrite itself?
+  bool                  _original_field_offsets_changed; // Did the original field offsets of this class change during class redefinition?
+  int *                 _update_information;     // Update information
+  bool                  _is_redefining;
+
 #ifndef PRODUCT
   int           _verify_count;  // to avoid redundant verifies
 #endif
@@ -301,6 +341,75 @@ class Klass : public Klass_vtbl {
   klassOop secondary_super_cache() const     { return _secondary_super_cache; }
   void set_secondary_super_cache(klassOop k) { oop_store_without_check((oop*) &_secondary_super_cache, (oop) k); }
 
+  // BEGIN class redefinition utilities
+
+  // double links between new and old version of a class
+  klassOop old_version() const                         { return _old_version; }
+  void set_old_version(klassOop klass)                 { assert(_old_version == NULL || klass == NULL, "Can only be set once!"); _old_version = klass; }
+  klassOop new_version() const                         { return _new_version; }
+  void set_new_version(klassOop klass)                 { assert(_new_version == NULL || klass == NULL, "Can only be set once!"); _new_version = klass; }
+
+  // A subtype of this class is no longer a subtype
+  bool has_subtype_changed() const                     { return _subtype_changed; }
+  void set_subtype_changed(bool b)                     { assert(is_newest_version() || new_version()->klass_part()->is_newest_version(), "must be newest or second newest version");
+                                                         _subtype_changed = b; }
+  // state of being redefined
+  int redefinition_index() const                       { return _redefinition_index; }
+  void set_redefinition_index(int index)               { _redefinition_index = index; }
+  void set_redefining(bool b)                          { _is_redefining = b; }
+  bool is_redefining() const                           { return _is_redefining; }
+  int redefinition_flags() const                       { return _redefinition_flags; }
+  bool check_redefinition_flag(int flags) const        { return (_redefinition_flags & flags) != 0; }
+  void set_redefinition_flags(int flags)               { _redefinition_flags = flags; }
+  void set_redefinition_flag(int flag)                 { _redefinition_flags |= flag; }
+  void clear_redefinition_flag(int flag)                 { _redefinition_flags &= ~flag; }
+  bool is_copying_backwards() const                    { return _is_copying_backwards; }
+  void set_copying_backwards(bool b)                   { _is_copying_backwards = b; }
+
+  // update information
+  int *update_information() const                      { return _update_information; }
+  void set_update_information(int *info)               { _update_information = info; }
+
+  bool is_same_or_older_version(klassOop klass) const {
+    if (Klass::cast(klass) == this) { return true; }
+    else if (_old_version == NULL) { return false; }
+    else { return _old_version->klass_part()->is_same_or_older_version(klass); }
+  }
+
+  // Revision number for redefined classes, -1 for originally loaded classes
+  jint revision_number() const {
+    return _revision_number;
+  }
+
+  bool was_redefined() const {
+    return _revision_number != -1;
+  }
+
+  void set_revision_number(jint number) {
+    _revision_number = number;
+  }
+
+  klassOop oldest_version() const {
+    if (_old_version == NULL) { return this->as_klassOop(); }
+    else { return _old_version->klass_part()->oldest_version(); };
+  }
+
+  klassOop newest_version() const {
+    if (_new_version == NULL) { return this->as_klassOop(); }
+    else { return _new_version->klass_part()->newest_version(); };
+  }
+
+  klassOop active_version() const {
+    if (_new_version == NULL || _new_version->klass_part()->is_redefining()) { return this->as_klassOop(); assert(!this->is_redefining(), "just checking"); }
+    else { return _new_version->klass_part()->active_version(); };
+  }
+
+  bool is_newest_version() const {
+    return _new_version == NULL;
+  }
+
+  // END class redefinition utilities
+
   objArrayOop secondary_supers() const { return _secondary_supers; }
   void set_secondary_supers(objArrayOop k) { oop_store_without_check((oop*) &_secondary_supers, (oop) k); }
 
@@ -361,6 +470,8 @@ class Klass : public Klass_vtbl {
   void     set_next_sibling(klassOop s);
 
   oop* adr_super()           const { return (oop*)&_super;             }
+  oop* adr_old_version()           const { return (oop*)&_old_version; }
+  oop* adr_new_version()           const { return (oop*)&_new_version; }
   oop* adr_primary_supers()  const { return (oop*)&_primary_supers[0]; }
   oop* adr_secondary_super_cache() const { return (oop*)&_secondary_super_cache; }
   oop* adr_secondary_supers()const { return (oop*)&_secondary_supers;  }
diff --git a/src/share/vm/oops/klassKlass.cpp b/src/share/vm/oops/klassKlass.cpp
index 06809d5..1050eda 100644
--- a/src/share/vm/oops/klassKlass.cpp
+++ b/src/share/vm/oops/klassKlass.cpp
@@ -68,6 +68,8 @@ void klassKlass::oop_follow_contents(oop obj) {
   Klass* k = Klass::cast(klassOop(obj));
   // If we are alive it is valid to keep our superclass and subtype caches alive
   MarkSweep::mark_and_push(k->adr_super());
+  MarkSweep::mark_and_push(k->adr_old_version());
+  MarkSweep::mark_and_push(k->adr_new_version());
   for (juint i = 0; i < Klass::primary_super_limit(); i++)
     MarkSweep::mark_and_push(k->adr_primary_supers()+i);
   MarkSweep::mark_and_push(k->adr_secondary_super_cache());
@@ -87,6 +89,8 @@ void klassKlass::oop_follow_contents(ParCompactionManager* cm,
   Klass* k = Klass::cast(klassOop(obj));
   // If we are alive it is valid to keep our superclass and subtype caches alive
   PSParallelCompact::mark_and_push(cm, k->adr_super());
+  PSParallelCompact::mark_and_push(cm, k->adr_old_version());
+  PSParallelCompact::mark_and_push(cm, k->adr_new_version());
   for (juint i = 0; i < Klass::primary_super_limit(); i++)
     PSParallelCompact::mark_and_push(cm, k->adr_primary_supers()+i);
   PSParallelCompact::mark_and_push(cm, k->adr_secondary_super_cache());
@@ -106,6 +110,8 @@ int klassKlass::oop_oop_iterate(oop obj, OopClosure* blk) {
   int size = oop_size(obj);
   Klass* k = Klass::cast(klassOop(obj));
   blk->do_oop(k->adr_super());
+  blk->do_oop(k->adr_old_version());
+  blk->do_oop(k->adr_new_version());
   for (juint i = 0; i < Klass::primary_super_limit(); i++)
     blk->do_oop(k->adr_primary_supers()+i);
   blk->do_oop(k->adr_secondary_super_cache());
@@ -134,6 +140,10 @@ int klassKlass::oop_oop_iterate_m(oop obj, OopClosure* blk, MemRegion mr) {
   oop* adr;
   adr = k->adr_super();
   if (mr.contains(adr)) blk->do_oop(adr);
+  adr = k->adr_old_version();
+  if (mr.contains(adr)) blk->do_oop(adr);
+  adr = k->adr_new_version();
+  if (mr.contains(adr)) blk->do_oop(adr);
   for (juint i = 0; i < Klass::primary_super_limit(); i++) {
     adr = k->adr_primary_supers()+i;
     if (mr.contains(adr)) blk->do_oop(adr);
@@ -167,6 +177,8 @@ int klassKlass::oop_adjust_pointers(oop obj) {
   Klass* k = Klass::cast(klassOop(obj));
 
   MarkSweep::adjust_pointer(k->adr_super());
+  MarkSweep::adjust_pointer(k->adr_new_version());
+  MarkSweep::adjust_pointer(k->adr_old_version());
   for (juint i = 0; i < Klass::primary_super_limit(); i++)
     MarkSweep::adjust_pointer(k->adr_primary_supers()+i);
   MarkSweep::adjust_pointer(k->adr_secondary_super_cache());
diff --git a/src/share/vm/oops/klassVtable.cpp b/src/share/vm/oops/klassVtable.cpp
index 94e2e04..a683a4b 100644
--- a/src/share/vm/oops/klassVtable.cpp
+++ b/src/share/vm/oops/klassVtable.cpp
@@ -628,17 +628,13 @@ void klassVtable::adjust_method_entries(methodOop* old_methods, methodOop* new_m
       if (unchecked_method_at(index) == old_method) {
         put_method_at(new_method, index);
 
-        if (RC_TRACE_IN_RANGE(0x00100000, 0x00400000)) {
+        IF_TRACE_RC4 {
           if (!(*trace_name_printed)) {
-            // RC_TRACE_MESG macro has an embedded ResourceMark
-            RC_TRACE_MESG(("adjust: name=%s",
-                           Klass::cast(old_method->method_holder())->external_name()));
+            TRACE_RC4("adjust: name=%s", Klass::cast(old_method->method_holder())->external_name());
             *trace_name_printed = true;
           }
-          // RC_TRACE macro has an embedded ResourceMark
-          RC_TRACE(0x00100000, ("vtable method update: %s(%s)",
-                                new_method->name()->as_C_string(),
-                                new_method->signature()->as_C_string()));
+          TRACE_RC4("vtable method update: %s(%s)", new_method->name()->as_C_string(),
+            new_method->signature()->as_C_string());
         }
         // cannot 'break' here; see for-loop comment above.
       }
@@ -1008,17 +1004,13 @@ void klassItable::adjust_method_entries(methodOop* old_methods, methodOop* new_m
       if (ime->method() == old_method) {
         ime->initialize(new_method);
 
-        if (RC_TRACE_IN_RANGE(0x00100000, 0x00400000)) {
+        IF_TRACE_RC4 {
           if (!(*trace_name_printed)) {
-            // RC_TRACE_MESG macro has an embedded ResourceMark
-            RC_TRACE_MESG(("adjust: name=%s",
-              Klass::cast(old_method->method_holder())->external_name()));
+            TRACE_RC4("adjust: name=%s", Klass::cast(old_method->method_holder())->external_name());
             *trace_name_printed = true;
           }
-          // RC_TRACE macro has an embedded ResourceMark
-          RC_TRACE(0x00200000, ("itable method update: %s(%s)",
-            new_method->name()->as_C_string(),
-            new_method->signature()->as_C_string()));
+          TRACE_RC4("itable method update: %s(%s)", new_method->name()->as_C_string(),
+            new_method->signature()->as_C_string());
         }
         // cannot 'break' here; see for-loop comment above.
       }
@@ -1241,6 +1233,7 @@ void klassVtable::verify(outputStream* st, bool forced) {
 
 void klassVtable::verify_against(outputStream* st, klassVtable* vt, int index) {
   vtableEntry* vte = &vt->table()[index];
+  if (vte->method() == NULL || table()[index].method() == NULL) return;
   if (vte->method()->name()      != table()[index].method()->name() ||
       vte->method()->signature() != table()[index].method()->signature()) {
     fatal("mismatched name/signature of vtable entries");
@@ -1260,6 +1253,8 @@ void klassVtable::print() {
 
 void vtableEntry::verify(klassVtable* vt, outputStream* st) {
   NOT_PRODUCT(FlagSetting fs(IgnoreLockingAssertions, true));
+  // (tw) TODO: Check: Does not hold?
+  if (method() != NULL) {
   assert(method() != NULL, "must have set method");
   method()->verify();
   // we sub_type, because it could be a miranda method
@@ -1267,7 +1262,13 @@ void vtableEntry::verify(klassVtable* vt, outputStream* st) {
 #ifndef PRODUCT
     print();
 #endif
-    fatal(err_msg("vtableEntry " PTR_FORMAT ": method is from subclass", this));
+      klassOop first_klass = vt->klass()();
+      klassOop second_klass = method()->method_holder();
+      // (tw) the following fatal does not work for old versions of classes
+      if (first_klass->klass_part()->is_newest_version()) {
+        //fatal1("vtableEntry %#lx: method is from subclass", this);
+      }
+    }
   }
 }
 
@@ -1275,8 +1276,8 @@ void vtableEntry::verify(klassVtable* vt, outputStream* st) {
 
 void vtableEntry::print() {
   ResourceMark rm;
-  tty->print("vtableEntry %s: ", method()->name()->as_C_string());
   if (Verbose) {
+    tty->print("vtableEntry %s: ", (method() == NULL) ? "null" : method()->name()->as_C_string());
     tty->print("m %#lx ", (address)method());
   }
 }
diff --git a/src/share/vm/oops/methodKlass.cpp b/src/share/vm/oops/methodKlass.cpp
index 75d0b09..c4be146 100644
--- a/src/share/vm/oops/methodKlass.cpp
+++ b/src/share/vm/oops/methodKlass.cpp
@@ -93,6 +93,9 @@ methodOop methodKlass::allocate(constMethodHandle xconst,
   m->set_adapter_entry(NULL);
   m->clear_code(); // from_c/from_i get set to c2i/i2i
 
+  m->set_new_version(NULL);
+  m->set_old_version(NULL);
+
   if (access_flags.is_native()) {
     m->clear_native_function();
     m->set_signature_handler(NULL);
@@ -122,6 +125,8 @@ void methodKlass::oop_follow_contents(oop obj) {
   // Performance tweak: We skip iterating over the klass pointer since we
   // know that Universe::methodKlassObj never moves.
   MarkSweep::mark_and_push(m->adr_constMethod());
+  MarkSweep::mark_and_push(m->adr_new_version());
+  MarkSweep::mark_and_push(m->adr_old_version());
   if (m->method_data() != NULL) {
     MarkSweep::mark_and_push(m->adr_method_data());
   }
@@ -135,6 +140,8 @@ void methodKlass::oop_follow_contents(ParCompactionManager* cm,
   // Performance tweak: We skip iterating over the klass pointer since we
   // know that Universe::methodKlassObj never moves.
   PSParallelCompact::mark_and_push(cm, m->adr_constMethod());
+  PSParallelCompact::mark_and_push(cm, m->adr_new_version());
+  PSParallelCompact::mark_and_push(cm, m->adr_old_version());
 #ifdef COMPILER2
   if (m->method_data() != NULL) {
     PSParallelCompact::mark_and_push(cm, m->adr_method_data());
@@ -152,6 +159,8 @@ int methodKlass::oop_oop_iterate(oop obj, OopClosure* blk) {
   // Performance tweak: We skip iterating over the klass pointer since we
   // know that Universe::methodKlassObj never moves
   blk->do_oop(m->adr_constMethod());
+  blk->do_oop(m->adr_new_version());
+  blk->do_oop(m->adr_old_version());
   if (m->method_data() != NULL) {
     blk->do_oop(m->adr_method_data());
   }
@@ -170,6 +179,10 @@ int methodKlass::oop_oop_iterate_m(oop obj, OopClosure* blk, MemRegion mr) {
   oop* adr;
   adr = m->adr_constMethod();
   if (mr.contains(adr)) blk->do_oop(adr);
+  adr = m->adr_new_version();
+  if (mr.contains(adr)) blk->do_oop(adr);
+  adr = m->adr_old_version();
+  if (mr.contains(adr)) blk->do_oop(adr);
   if (m->method_data() != NULL) {
     adr = m->adr_method_data();
     if (mr.contains(adr)) blk->do_oop(adr);
@@ -187,6 +200,8 @@ int methodKlass::oop_adjust_pointers(oop obj) {
   // Performance tweak: We skip iterating over the klass pointer since we
   // know that Universe::methodKlassObj never moves.
   MarkSweep::adjust_pointer(m->adr_constMethod());
+  MarkSweep::adjust_pointer(m->adr_new_version());
+  MarkSweep::adjust_pointer(m->adr_old_version());
   if (m->method_data() != NULL) {
     MarkSweep::adjust_pointer(m->adr_method_data());
   }
@@ -202,6 +217,8 @@ int methodKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {
   assert(obj->is_method(), "should be method");
   methodOop m = methodOop(obj);
   PSParallelCompact::adjust_pointer(m->adr_constMethod());
+  PSParallelCompact::adjust_pointer(m->adr_new_version());
+  PSParallelCompact::adjust_pointer(m->adr_old_version());
 #ifdef COMPILER2
   if (m->method_data() != NULL) {
     PSParallelCompact::adjust_pointer(m->adr_method_data());
@@ -222,7 +239,18 @@ void methodKlass::oop_print_on(oop obj, outputStream* st) {
   methodOop m = methodOop(obj);
   // get the effect of PrintOopAddress, always, for methods:
   st->print_cr(" - this oop:          "INTPTR_FORMAT, (intptr_t)m);
-  st->print   (" - method holder:     ");    m->method_holder()->print_value_on(st); st->cr();
+  st->print   (" - method holder:     ");    m->method_holder()->print_value_on(st);
+
+  if (m->method_holder()->klass_part()->new_version() != NULL) {
+    st->print(" (old)");
+  }
+  st->cr();
+
+  st->print_cr(" - is obsolete:       %d",   (int)(m->is_obsolete()));
+  st->print_cr(" - is old:            %d",   (int)(m->is_old()));
+  st->print_cr(" - new version:       "INTPTR_FORMAT,   (address)m->new_version());
+  st->print_cr(" - old version:       "INTPTR_FORMAT,   (address)m->old_version());
+  st->print_cr(" - holder revision:   %d", m->method_holder()->klass_part()->revision_number());
   st->print   (" - constants:         "INTPTR_FORMAT" ", (address)m->constants());
   m->constants()->print_value_on(st); st->cr();
   st->print   (" - access:            0x%x  ", m->access_flags().as_int()); m->access_flags().print_on(st); st->cr();
diff --git a/src/share/vm/oops/methodOop.cpp b/src/share/vm/oops/methodOop.cpp
index 4f59d3a..32cb4cf 100644
--- a/src/share/vm/oops/methodOop.cpp
+++ b/src/share/vm/oops/methodOop.cpp
@@ -1061,6 +1061,8 @@ methodHandle methodOopDesc::clone_with_new_data(methodHandle m, u_char* new_code
 
   // Reset correct method/const method, method size, and parameter info
   newm->set_constMethod(newcm);
+  newm->set_new_version(newm->new_version());
+  newm->set_old_version(newm->old_version());
   newm->constMethod()->set_code_size(new_code_length);
   newm->constMethod()->set_constMethod_size(new_const_method_size);
   newm->set_method_size(new_method_size);
diff --git a/src/share/vm/oops/methodOop.hpp b/src/share/vm/oops/methodOop.hpp
index 486e106..e35d5ed 100644
--- a/src/share/vm/oops/methodOop.hpp
+++ b/src/share/vm/oops/methodOop.hpp
@@ -114,6 +114,10 @@ class methodOopDesc : public oopDesc {
   AccessFlags       _access_flags;               // Access flags
   int               _vtable_index;               // vtable index of this method (see VtableIndexFlag)
                                                  // note: can have vtables with >2**16 elements (because of inheritance)
+  // (tw) Newer version of method available?
+  methodOop         _new_version;
+  methodOop         _old_version;
+
 #ifdef CC_INTERP
   int               _result_index;               // C++ interpreter needs for converting results to/from stack
 #endif
@@ -175,6 +179,29 @@ class methodOopDesc : public oopDesc {
   int name_index() const                         { return constMethod()->name_index();         }
   void set_name_index(int index)                 { constMethod()->set_name_index(index);       }
 
+  methodOop new_version() const {return _new_version; }
+  void set_new_version(methodOop m) { _new_version = m; }
+  methodOop newest_version() { if(_new_version == NULL) return this; else return new_version()->newest_version(); }
+
+  methodOop old_version() const {return _old_version; };
+  void set_old_version(methodOop m) {
+    if (m == NULL) {
+      _old_version = NULL;
+      return;
+    }
+
+    assert(_old_version == NULL, "may only be set once");
+    assert(this->code_size() == m->code_size(), "must have same code length");
+    _old_version = m;
+  }
+
+  methodOop oldest_version() const { 
+    if(_old_version == NULL) return (methodOop)this;
+    else {
+      return old_version()->oldest_version();
+    }
+  }
+
   // signature
   Symbol* signature() const                      { return constants()->symbol_at(signature_index()); }
   int signature_index() const                    { return constMethod()->signature_index();         }
@@ -734,6 +761,8 @@ class methodOopDesc : public oopDesc {
 
   // Garbage collection support
   oop*  adr_constMethod() const                  { return (oop*)&_constMethod;     }
+  oop*  adr_new_version() const                  { return (oop*)&_new_version;     }
+  oop*  adr_old_version() const                  { return (oop*)&_old_version;     }
   oop*  adr_method_data() const                  { return (oop*)&_method_data;     }
 };
 
diff --git a/src/share/vm/oops/oop.hpp b/src/share/vm/oops/oop.hpp
index 5982c88..4873fca 100644
--- a/src/share/vm/oops/oop.hpp
+++ b/src/share/vm/oops/oop.hpp
@@ -95,6 +95,7 @@ class oopDesc {
   narrowOop* compressed_klass_addr();
 
   void set_klass(klassOop k);
+  void set_klass_no_check(klassOop k);
 
   // For klass field compression
   int klass_gap() const;
@@ -135,6 +136,7 @@ class oopDesc {
   bool is_array()              const;
   bool is_objArray()           const;
   bool is_klass()              const;
+  bool is_instanceKlass()      const;
   bool is_thread()             const;
   bool is_method()             const;
   bool is_constMethod()        const;
diff --git a/src/share/vm/oops/oop.inline.hpp b/src/share/vm/oops/oop.inline.hpp
index f4eb2f7..0acb346 100644
--- a/src/share/vm/oops/oop.inline.hpp
+++ b/src/share/vm/oops/oop.inline.hpp
@@ -123,6 +123,14 @@ inline void oopDesc::set_klass(klassOop k) {
   }
 }
 
+inline void oopDesc::set_klass_no_check(klassOop k) {
+  if (UseCompressedOops) {
+    oop_store_without_check(compressed_klass_addr(), (oop)k);
+  } else {
+    oop_store_without_check(klass_addr(), (oop) k);
+  }
+}
+
 inline int oopDesc::klass_gap() const {
   return *(int*)(((intptr_t)this) + klass_gap_offset_in_bytes());
 }
@@ -156,6 +164,7 @@ inline bool oopDesc::is_objArray()           const { return blueprint()->oop_is_
 inline bool oopDesc::is_typeArray()          const { return blueprint()->oop_is_typeArray(); }
 inline bool oopDesc::is_javaArray()          const { return blueprint()->oop_is_javaArray(); }
 inline bool oopDesc::is_klass()              const { return blueprint()->oop_is_klass(); }
+inline bool oopDesc::is_instanceKlass()      const { return blueprint()->oop_is_instanceKlass(); }
 inline bool oopDesc::is_thread()             const { return blueprint()->oop_is_thread(); }
 inline bool oopDesc::is_method()             const { return blueprint()->oop_is_method(); }
 inline bool oopDesc::is_constMethod()        const { return blueprint()->oop_is_constMethod(); }
diff --git a/src/share/vm/prims/jni.cpp b/src/share/vm/prims/jni.cpp
index 2123991..6cbd78c 100644
--- a/src/share/vm/prims/jni.cpp
+++ b/src/share/vm/prims/jni.cpp
@@ -406,7 +406,7 @@ JNI_ENTRY(jclass, jni_DefineClass(JNIEnv *env, const char *name, jobject loaderR
     }
   }
   klassOop k = SystemDictionary::resolve_from_stream(class_name, class_loader,
-                                                     Handle(), &st, true,
+                                                     Handle(), &st, true, KlassHandle(),
                                                      CHECK_NULL);
 
   if (TraceClassResolution && k != NULL) {
diff --git a/src/share/vm/prims/jvm.cpp b/src/share/vm/prims/jvm.cpp
index 7dcd968..d59052f 100644
--- a/src/share/vm/prims/jvm.cpp
+++ b/src/share/vm/prims/jvm.cpp
@@ -872,7 +872,7 @@ static jclass jvm_define_class_common(JNIEnv *env, const char *name,
   Handle protection_domain (THREAD, JNIHandles::resolve(pd));
   klassOop k = SystemDictionary::resolve_from_stream(class_name, class_loader,
                                                      protection_domain, &st,
-                                                     verify != 0,
+                                                     verify != 0, KlassHandle(),
                                                      CHECK_NULL);
 
   if (TraceClassResolution && k != NULL) {
diff --git a/src/share/vm/prims/jvm_misc.hpp b/src/share/vm/prims/jvm_misc.hpp
index 2b46e36..549e949 100644
--- a/src/share/vm/prims/jvm_misc.hpp
+++ b/src/share/vm/prims/jvm_misc.hpp
@@ -84,6 +84,7 @@ extern "C" {
     (JNIEnv *env, jobject obj, jfieldID fieldID);
 }
 
+// TODO(tw): Check if we need to "unquicken" because of class redefinition.
 void    quicken_jni_functions();
 address jni_GetBooleanField_addr();
 address jni_GetByteField_addr();
diff --git a/src/share/vm/prims/jvmtiEnv.cpp b/src/share/vm/prims/jvmtiEnv.cpp
index 4ac6b82..30b8e84 100644
--- a/src/share/vm/prims/jvmtiEnv.cpp
+++ b/src/share/vm/prims/jvmtiEnv.cpp
@@ -290,7 +290,10 @@ JvmtiEnv::RetransformClasses(jint class_count, const jclass* classes) {
     class_definitions[index].klass              = jcls;
   }
   VM_RedefineClasses op(class_count, class_definitions, jvmti_class_load_kind_retransform);
-  VMThread::execute(&op);
+  {
+    MutexLocker sd_mutex(RedefineClasses_lock);
+    VMThread::execute(&op);
+  }
   return (op.check_error());
 } /* end RetransformClasses */
 
@@ -299,9 +302,12 @@ JvmtiEnv::RetransformClasses(jint class_count, const jclass* classes) {
 // class_definitions - pre-checked for NULL
 jvmtiError
 JvmtiEnv::RedefineClasses(jint class_count, const jvmtiClassDefinition* class_definitions) {
-//TODO: add locking
+
   VM_RedefineClasses op(class_count, class_definitions, jvmti_class_load_kind_redefine);
-  VMThread::execute(&op);
+  {
+    MutexLocker sd_mutex(RedefineClasses_lock);
+    VMThread::execute(&op);
+  }
   return (op.check_error());
 } /* end RedefineClasses */
 
diff --git a/src/share/vm/prims/jvmtiExport.cpp b/src/share/vm/prims/jvmtiExport.cpp
index ec8ede3..2bd5983 100644
--- a/src/share/vm/prims/jvmtiExport.cpp
+++ b/src/share/vm/prims/jvmtiExport.cpp
@@ -2296,7 +2296,7 @@ JvmtiDynamicCodeEventCollector::JvmtiDynamicCodeEventCollector() : _code_blobs(N
 // iterate over any code blob descriptors collected and post a
 // DYNAMIC_CODE_GENERATED event to the profiler.
 JvmtiDynamicCodeEventCollector::~JvmtiDynamicCodeEventCollector() {
-  assert(!JavaThread::current()->owns_locks(), "all locks must be released to post deferred events");
+  assert(!JavaThread::current()->owns_locks_but_redefine_classes_lock(), "all locks must be released to post deferred events");
  // iterate over any code blob descriptors that we collected
  if (_code_blobs != NULL) {
    for (int i=0; i<_code_blobs->length(); i++) {
diff --git a/src/share/vm/prims/jvmtiImpl.cpp b/src/share/vm/prims/jvmtiImpl.cpp
index d3fa140..31a8a19 100644
--- a/src/share/vm/prims/jvmtiImpl.cpp
+++ b/src/share/vm/prims/jvmtiImpl.cpp
@@ -284,60 +284,11 @@ address JvmtiBreakpoint::getBcp() {
 }
 
 void JvmtiBreakpoint::each_method_version_do(method_action meth_act) {
-  ((methodOopDesc*)_method->*meth_act)(_bci);
-
-  // add/remove breakpoint to/from versions of the method that
-  // are EMCP. Directly or transitively obsolete methods are
-  // not saved in the PreviousVersionInfo.
-  Thread *thread = Thread::current();
-  instanceKlassHandle ikh = instanceKlassHandle(thread, _method->method_holder());
-  Symbol* m_name = _method->name();
-  Symbol* m_signature = _method->signature();
-
-  {
-    ResourceMark rm(thread);
-    // PreviousVersionInfo objects returned via PreviousVersionWalker
-    // contain a GrowableArray of handles. We have to clean up the
-    // GrowableArray _after_ the PreviousVersionWalker destructor
-    // has destroyed the handles.
-    {
-      // search previous versions if they exist
-      PreviousVersionWalker pvw((instanceKlass *)ikh()->klass_part());
-      for (PreviousVersionInfo * pv_info = pvw.next_previous_version();
-           pv_info != NULL; pv_info = pvw.next_previous_version()) {
-        GrowableArray<methodHandle>* methods =
-          pv_info->prev_EMCP_method_handles();
-
-        if (methods == NULL) {
-          // We have run into a PreviousVersion generation where
-          // all methods were made obsolete during that generation's
-          // RedefineClasses() operation. At the time of that
-          // operation, all EMCP methods were flushed so we don't
-          // have to go back any further.
-          //
-          // A NULL methods array is different than an empty methods
-          // array. We cannot infer any optimizations about older
-          // generations from an empty methods array for the current
-          // generation.
-          break;
-        }
-
-        for (int i = methods->length() - 1; i >= 0; i--) {
-          methodHandle method = methods->at(i);
-          if (method->name() == m_name && method->signature() == m_signature) {
-            RC_TRACE(0x00000800, ("%sing breakpoint in %s(%s)",
-              meth_act == &methodOopDesc::set_breakpoint ? "sett" : "clear",
-              method->name()->as_C_string(),
-              method->signature()->as_C_string()));
-            assert(!method->is_obsolete(), "only EMCP methods here");
-
-            ((methodOopDesc*)method()->*meth_act)(_bci);
-            break;
-          }
-        }
-      }
-    } // pvw is cleaned up
-  } // rm is cleaned up
+  methodOop method = _method;
+  while (method != NULL) {
+    ((methodOopDesc*)method->*meth_act)(_bci);
+    method = method->old_version();
+  }
 }
 
 void JvmtiBreakpoint::set() {
diff --git a/src/share/vm/prims/jvmtiRedefineClasses.cpp b/src/share/vm/prims/jvmtiRedefineClasses.cpp
index eb52388..432e15a 100644
--- a/src/share/vm/prims/jvmtiRedefineClasses.cpp
+++ b/src/share/vm/prims/jvmtiRedefineClasses.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -38,490 +38,669 @@
 #include "runtime/deoptimization.hpp"
 #include "runtime/relocator.hpp"
 #include "utilities/bitMap.inline.hpp"
+#include "prims/jvmtiClassFileReconstituter.hpp"
+#include "compiler/compileBroker.hpp"
+#include "oops/instanceMirrorKlass.hpp"
 
 
 objArrayOop VM_RedefineClasses::_old_methods = NULL;
 objArrayOop VM_RedefineClasses::_new_methods = NULL;
-methodOop*  VM_RedefineClasses::_matching_old_methods = NULL;
-methodOop*  VM_RedefineClasses::_matching_new_methods = NULL;
-methodOop*  VM_RedefineClasses::_deleted_methods      = NULL;
-methodOop*  VM_RedefineClasses::_added_methods        = NULL;
+int*        VM_RedefineClasses::_matching_old_methods = NULL;
+int*        VM_RedefineClasses::_matching_new_methods = NULL;
+int*        VM_RedefineClasses::_deleted_methods      = NULL;
+int*        VM_RedefineClasses::_added_methods        = NULL;
 int         VM_RedefineClasses::_matching_methods_length = 0;
 int         VM_RedefineClasses::_deleted_methods_length  = 0;
 int         VM_RedefineClasses::_added_methods_length    = 0;
-klassOop    VM_RedefineClasses::_the_class_oop = NULL;
+GrowableArray<instanceKlassHandle>* VM_RedefineClasses::_affected_klasses = NULL;
 
 
-VM_RedefineClasses::VM_RedefineClasses(jint class_count,
-                                       const jvmtiClassDefinition *class_defs,
-                                       JvmtiClassLoadKind class_load_kind) {
+// Holds the revision number of the current class redefinition
+int    VM_RedefineClasses::_revision_number = -1;
+
+VM_RedefineClasses::VM_RedefineClasses(jint class_count, const jvmtiClassDefinition *class_defs, JvmtiClassLoadKind class_load_kind)
+   : VM_GC_Operation(Universe::heap()->total_full_collections(), GCCause::_heap_inspection) {
+  RC_TIMER_START(_timer_total);
   _class_count = class_count;
   _class_defs = class_defs;
   _class_load_kind = class_load_kind;
-  _res = JVMTI_ERROR_NONE;
+  _result = JVMTI_ERROR_NONE;
 }
 
-bool VM_RedefineClasses::doit_prologue() {
-  if (_class_count == 0) {
-    _res = JVMTI_ERROR_NONE;
-    return false;
+VM_RedefineClasses::~VM_RedefineClasses() {
+  unlock_threads();
+  RC_TIMER_STOP(_timer_total);
+
+  if (TimeRedefineClasses) {
+    tty->print_cr("");
+    tty->print_cr("Timing Prologue:             %d", _timer_prologue.milliseconds());
+    tty->print_cr("Timing Class Loading:        %d", _timer_class_loading.milliseconds());
+    tty->print_cr("Timing Waiting for Lock:     %d", _timer_wait_for_locks.milliseconds());
+    tty->print_cr("Timing Class Linking:        %d", _timer_class_linking.milliseconds());
+    tty->print_cr("Timing Prepare Redefinition: %d", _timer_prepare_redefinition.milliseconds());
+    tty->print_cr("Timing Heap Iteration:       %d", _timer_heap_iteration.milliseconds());
+    tty->print_cr("Timing Redefinition GC:      %d", _timer_redefinition.milliseconds());
+    tty->print_cr("Timing Epilogue:             %d", _timer_vm_op_epilogue.milliseconds());
+    tty->print_cr("------------------------------------------------------------------");
+    tty->print_cr("Total Time:                  %d", _timer_total.milliseconds());
+    tty->print_cr("");
   }
-  if (_class_defs == NULL) {
-    _res = JVMTI_ERROR_NULL_POINTER;
-    return false;
+}
+
+void VM_RedefineClasses::swap_all_method_annotations(int i, int j, instanceKlassHandle scratch_class) {
+  typeArrayOop save;
+
+  save = scratch_class->get_method_annotations_of(i);
+  scratch_class->set_method_annotations_of(i, scratch_class->get_method_annotations_of(j));
+  scratch_class->set_method_annotations_of(j, save);
+
+  save = scratch_class->get_method_parameter_annotations_of(i);
+  scratch_class->set_method_parameter_annotations_of(i, scratch_class->get_method_parameter_annotations_of(j));
+  scratch_class->set_method_parameter_annotations_of(j, save);
+
+  save = scratch_class->get_method_default_annotations_of(i);
+  scratch_class->set_method_default_annotations_of(i, scratch_class->get_method_default_annotations_of(j));
+  scratch_class->set_method_default_annotations_of(j, save);
+}
+
+void VM_RedefineClasses::add_affected_klasses( klassOop klass )
+{
+  assert(!_affected_klasses->contains(klass), "must not occur more than once!");
+  assert(klass->klass_part()->new_version() == NULL, "Only last version is valid entry in system dictionary");
+
+  Klass* k = klass->klass_part();
+
+  if (k->check_redefinition_flag(Klass::MarkedAsAffected)) {
+    _affected_klasses->append(klass);
+    return;
   }
-  for (int i = 0; i < _class_count; i++) {
-    if (_class_defs[i].klass == NULL) {
-      _res = JVMTI_ERROR_INVALID_CLASS;
-      return false;
-    }
-    if (_class_defs[i].class_byte_count == 0) {
-      _res = JVMTI_ERROR_INVALID_CLASS_FORMAT;
-      return false;
+
+  for (juint i = 0; i < k->super_depth(); i++) {
+    klassOop primary_oop = k->primary_super_of_depth(i);
+    // super_depth returns "8" for interfaces, but they don't have primaries other than Object.
+    if (primary_oop == NULL) break;
+    Klass* primary = Klass::cast(primary_oop);
+    if (primary->check_redefinition_flag(Klass::MarkedAsAffected)) {
+      TRACE_RC3("Found affected class: %s", k->name()->as_C_string());
+      k->set_redefinition_flag(Klass::MarkedAsAffected);
+      _affected_klasses->append(klass);
+      return;
     }
-    if (_class_defs[i].class_bytes == NULL) {
-      _res = JVMTI_ERROR_NULL_POINTER;
-      return false;
+  }
+
+  // Check secondary supers
+  int cnt = k->secondary_supers()->length();
+  for (int i = 0; i < cnt; i++) {
+    Klass* secondary = Klass::cast((klassOop) k->secondary_supers()->obj_at(i));
+    if (secondary->check_redefinition_flag(Klass::MarkedAsAffected)) {
+      TRACE_RC3("Found affected class: %s", k->name()->as_C_string());
+      k->set_redefinition_flag(Klass::MarkedAsAffected);
+      _affected_klasses->append(klass);
+      return;
     }
   }
+}
 
-  // Start timer after all the sanity checks; not quite accurate, but
-  // better than adding a bunch of stop() calls.
-  RC_TIMER_START(_timer_vm_op_prologue);
 
-  // We first load new class versions in the prologue, because somewhere down the
-  // call chain it is required that the current thread is a Java thread.
-  _res = load_new_class_versions(Thread::current());
-  if (_res != JVMTI_ERROR_NONE) {
-    // Free os::malloc allocated memory in load_new_class_version.
-    os::free(_scratch_classes);
-    RC_TIMER_STOP(_timer_vm_op_prologue);
-    return false;
+// Searches for all affected classes and performs a sorting such that a supertype is always before a subtype.
+jvmtiError VM_RedefineClasses::find_sorted_affected_classes() {
+
+  assert(_affected_klasses, "");
+  for (int i = 0; i < _class_count; i++) {
+    oop mirror = JNIHandles::resolve_non_null(_class_defs[i].klass);
+    instanceKlassHandle klass_handle(Thread::current(), java_lang_Class::as_klassOop(mirror));
+    klass_handle->set_redefinition_flag(Klass::MarkedAsAffected);
+    assert(klass_handle->new_version() == NULL, "Must be new class");
   }
 
-  RC_TIMER_STOP(_timer_vm_op_prologue);
-  return true;
+  // Find classes not directly redefined, but affected by a redefinition (because one of its supertypes is redefined)
+  SystemDictionary::classes_do(VM_RedefineClasses::add_affected_klasses);
+  TRACE_RC1("%d classes affected", _affected_klasses->length());
+
+  // Sort the affected klasses such that a supertype is always on a smaller array index than its subtype.
+  jvmtiError result = do_topological_class_sorting(_class_defs, _class_count, Thread::current());
+  IF_TRACE_RC2 {
+    TRACE_RC2("Redefine order: ");  
+    for (int i = 0; i < _affected_klasses->length(); i++) {
+      TRACE_RC2("%s", _affected_klasses->at(i)->name()->as_C_string());
+    }
+  }
+
+  return result;
 }
 
-void VM_RedefineClasses::doit() {
-  Thread *thread = Thread::current();
+// Searches for the class bytes of the given class and returns them as a byte array.
+jvmtiError VM_RedefineClasses::find_class_bytes(instanceKlassHandle the_class, const unsigned char **class_bytes, jint *class_byte_count, jboolean *not_changed) {
 
-  if (UseSharedSpaces) {
-    // Sharing is enabled so we remap the shared readonly space to
-    // shared readwrite, private just in case we need to redefine
-    // a shared class. We do the remap during the doit() phase of
-    // the safepoint to be safer.
-    if (!CompactingPermGenGen::remap_shared_readonly_as_readwrite()) {
-      RC_TRACE_WITH_THREAD(0x00000001, thread,
-        ("failed to remap shared readonly space to readwrite, private"));
-      _res = JVMTI_ERROR_INTERNAL;
-      return;
+  *not_changed = false;
+
+  // Search for the index in the redefinition array that corresponds to the current class
+  int j;
+  for (j=0; j<_class_count; j++) {
+    oop mirror = JNIHandles::resolve_non_null(_class_defs[j].klass);
+    klassOop the_class_oop = java_lang_Class::as_klassOop(mirror);
+    if (the_class_oop == the_class()) {
+      break;
     }
   }
 
-  for (int i = 0; i < _class_count; i++) {
-    redefine_single_class(_class_defs[i].klass, _scratch_classes[i], thread);
-  }
-  // Disable any dependent concurrent compilations
-  SystemDictionary::notice_modification();
+  if (j == _class_count) {
 
-  // Set flag indicating that some invariants are no longer true.
-  // See jvmtiExport.hpp for detailed explanation.
-  JvmtiExport::set_has_redefined_a_class();
+    *not_changed = true;
 
-// check_class() is optionally called for product bits, but is
-// always called for non-product bits.
-#ifdef PRODUCT
-  if (RC_TRACE_ENABLED(0x00004000)) {
-#endif
-    RC_TRACE_WITH_THREAD(0x00004000, thread, ("calling check_class"));
-    SystemDictionary::classes_do(check_class, thread);
-#ifdef PRODUCT
+    // Redefine with same bytecodes. This is a class that is only indirectly affected by redefinition,
+    // so the user did not specify a different bytecode for that class.
+
+    if (the_class->get_cached_class_file_bytes() == NULL) {
+      // not cached, we need to reconstitute the class file from VM representation
+      constantPoolHandle  constants(Thread::current(), the_class->constants());
+      ObjectLocker ol(constants, Thread::current());    // lock constant pool while we query it
+
+      JvmtiClassFileReconstituter reconstituter(the_class);
+      if (reconstituter.get_error() != JVMTI_ERROR_NONE) {
+        return reconstituter.get_error();
+      }
+
+      *class_byte_count = (jint)reconstituter.class_file_size();
+      *class_bytes      = (unsigned char*)reconstituter.class_file_bytes();
+
+      TRACE_RC3("Reconstituted class bytes");
+
+    } else {
+
+      // it is cached, get it from the cache
+      *class_byte_count = the_class->get_cached_class_file_len();
+      *class_bytes      = the_class->get_cached_class_file_bytes();
+
+
+      TRACE_RC3("Retrieved cached class bytes");
+    }
+
+  } else {
+
+    // Redefine with bytecodes at index j
+    *class_bytes = _class_defs[j].class_bytes;
+    *class_byte_count = _class_defs[j].class_byte_count;
   }
-#endif
+
+  return JVMTI_ERROR_NONE;
 }
 
-void VM_RedefineClasses::doit_epilogue() {
-  // Free os::malloc allocated memory.
-  // The memory allocated in redefine will be free'ed in next VM operation.
-  os::free(_scratch_classes);
-
-  if (RC_TRACE_ENABLED(0x00000004)) {
-    // Used to have separate timers for "doit" and "all", but the timer
-    // overhead skewed the measurements.
-    jlong doit_time = _timer_rsc_phase1.milliseconds() +
-                      _timer_rsc_phase2.milliseconds();
-    jlong all_time = _timer_vm_op_prologue.milliseconds() + doit_time;
-
-    RC_TRACE(0x00000004, ("vm_op: all=" UINT64_FORMAT
-      "  prologue=" UINT64_FORMAT "  doit=" UINT64_FORMAT, all_time,
-      _timer_vm_op_prologue.milliseconds(), doit_time));
-    RC_TRACE(0x00000004,
-      ("redefine_single_class: phase1=" UINT64_FORMAT "  phase2=" UINT64_FORMAT,
-       _timer_rsc_phase1.milliseconds(), _timer_rsc_phase2.milliseconds()));
+// Prologue of the VM operation, called on the Java thread in parallel to normal program execution
+bool VM_RedefineClasses::doit_prologue() {
+
+  _revision_number++;
+  TRACE_RC1("Redefinition with revision number %d started!", _revision_number);
+  lock_threads();
+
+  assert(Thread::current()->is_Java_thread(), "must be Java thread");
+  RC_TIMER_START(_timer_prologue);
+
+  if (!check_arguments()) {
+    RC_TIMER_STOP(_timer_prologue);
+    return false;
   }
-}
 
-bool VM_RedefineClasses::is_modifiable_class(oop klass_mirror) {
-  // classes for primitives cannot be redefined
-  if (java_lang_Class::is_primitive(klass_mirror)) {
+  // We first load new class versions in the prologue, because somewhere down the
+  // call chain it is required that the current thread is a Java thread.
+  _new_classes = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<instanceKlassHandle>(5, true);
+
+  assert(_affected_klasses == NULL, "");
+  _affected_klasses = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<instanceKlassHandle>(_class_count, true);
+
+
+  _result = load_new_class_versions(Thread::current());
+
+  TRACE_RC1("Loaded new class versions!");
+  if (_result != JVMTI_ERROR_NONE) {
+    TRACE_RC1("error occured: %d!", _result);
+    delete _new_classes;
+    _new_classes = NULL;
+    delete _affected_klasses;
+    _affected_klasses = NULL;
+    RC_TIMER_STOP(_timer_prologue);
     return false;
   }
-  klassOop the_class_oop = java_lang_Class::as_klassOop(klass_mirror);
-  // classes for arrays cannot be redefined
-  if (the_class_oop == NULL || !Klass::cast(the_class_oop)->oop_is_instance()) {
+
+  TRACE_RC2("nearly finished");
+  VM_GC_Operation::doit_prologue();
+  RC_TIMER_STOP(_timer_prologue);
+  TRACE_RC2("doit_prologue finished!");
+  return true;
+}
+
+// Checks basic properties of the arguments of the redefinition command.
+jvmtiError VM_RedefineClasses::check_arguments_error() {
+  if (_class_defs == NULL) return JVMTI_ERROR_NULL_POINTER;
+  for (int i = 0; i < _class_count; i++) {
+    if (_class_defs[i].klass == NULL) return JVMTI_ERROR_INVALID_CLASS;
+    if (_class_defs[i].class_byte_count == 0) return JVMTI_ERROR_INVALID_CLASS_FORMAT;
+    if (_class_defs[i].class_bytes == NULL) return JVMTI_ERROR_NULL_POINTER;
+  }
+  return JVMTI_ERROR_NONE;
+}
+
+// Returns false and sets an result error code if the redefinition should be aborted.
+bool VM_RedefineClasses::check_arguments() {
+  jvmtiError error = check_arguments_error();
+  if (error != JVMTI_ERROR_NONE || _class_count == 0) {
+    _result = error;
     return false;
   }
   return true;
 }
 
-// Append the current entry at scratch_i in scratch_cp to *merge_cp_p
-// where the end of *merge_cp_p is specified by *merge_cp_length_p. For
-// direct CP entries, there is just the current entry to append. For
-// indirect and double-indirect CP entries, there are zero or more
-// referenced CP entries along with the current entry to append.
-// Indirect and double-indirect CP entries are handled by recursive
-// calls to append_entry() as needed. The referenced CP entries are
-// always appended to *merge_cp_p before the referee CP entry. These
-// referenced CP entries may already exist in *merge_cp_p in which case
-// there is nothing extra to append and only the current entry is
-// appended.
-void VM_RedefineClasses::append_entry(constantPoolHandle scratch_cp,
-       int scratch_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p,
-       TRAPS) {
-
-  // append is different depending on entry tag type
-  switch (scratch_cp->tag_at(scratch_i).value()) {
-
-    // The old verifier is implemented outside the VM. It loads classes,
-    // but does not resolve constant pool entries directly so we never
-    // see Class entries here with the old verifier. Similarly the old
-    // verifier does not like Class entries in the input constant pool.
-    // The split-verifier is implemented in the VM so it can optionally
-    // and directly resolve constant pool entries to load classes. The
-    // split-verifier can accept either Class entries or UnresolvedClass
-    // entries in the input constant pool. We revert the appended copy
-    // back to UnresolvedClass so that either verifier will be happy
-    // with the constant pool entry.
-    case JVM_CONSTANT_Class:
-    {
-      // revert the copy to JVM_CONSTANT_UnresolvedClass
-      (*merge_cp_p)->unresolved_klass_at_put(*merge_cp_length_p,
-        scratch_cp->klass_name_at(scratch_i));
-
-      if (scratch_i != *merge_cp_length_p) {
-        // The new entry in *merge_cp_p is at a different index than
-        // the new entry in scratch_cp so we need to map the index values.
-        map_index(scratch_cp, scratch_i, *merge_cp_length_p);
-      }
-      (*merge_cp_length_p)++;
-    } break;
+jvmtiError VM_RedefineClasses::check_exception() const {
+  Thread* THREAD = Thread::current();
+  if (HAS_PENDING_EXCEPTION) {
+
+    Symbol* ex_name = PENDING_EXCEPTION->klass()->klass_part()->name();
+    TRACE_RC1("parse_stream exception: '%s'", ex_name->as_C_string());      
+    if (TraceRedefineClasses >= 1) {
+      java_lang_Throwable::print(PENDING_EXCEPTION, tty);
+      tty->print_cr("");
+    }
+    CLEAR_PENDING_EXCEPTION;
+
+    if (ex_name == vmSymbols::java_lang_UnsupportedClassVersionError()) {
+      return JVMTI_ERROR_UNSUPPORTED_VERSION;
+    } else if (ex_name == vmSymbols::java_lang_ClassFormatError()) {
+      return JVMTI_ERROR_INVALID_CLASS_FORMAT;
+    } else if (ex_name == vmSymbols::java_lang_ClassCircularityError()) {
+      return JVMTI_ERROR_CIRCULAR_CLASS_DEFINITION;
+    } else if (ex_name == vmSymbols::java_lang_NoClassDefFoundError()) {
+      // The message will be "XXX (wrong name: YYY)"
+      return JVMTI_ERROR_NAMES_DONT_MATCH;
+    } else if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
+      return JVMTI_ERROR_OUT_OF_MEMORY;
+    } else {
+      // Just in case more exceptions can be thrown..
+      return JVMTI_ERROR_FAILS_VERIFICATION;
+    }
+  }
 
-    // these are direct CP entries so they can be directly appended,
-    // but double and long take two constant pool entries
-    case JVM_CONSTANT_Double:  // fall through
-    case JVM_CONSTANT_Long:
-    {
-      constantPoolOopDesc::copy_entry_to(scratch_cp, scratch_i, *merge_cp_p, *merge_cp_length_p,
-        THREAD);
+  return JVMTI_ERROR_NONE;
+}
 
-      if (scratch_i != *merge_cp_length_p) {
-        // The new entry in *merge_cp_p is at a different index than
-        // the new entry in scratch_cp so we need to map the index values.
-        map_index(scratch_cp, scratch_i, *merge_cp_length_p);
-      }
-      (*merge_cp_length_p) += 2;
-    } break;
-
-    // these are direct CP entries so they can be directly appended
-    case JVM_CONSTANT_Float:   // fall through
-    case JVM_CONSTANT_Integer: // fall through
-    case JVM_CONSTANT_Utf8:    // fall through
-
-    // This was an indirect CP entry, but it has been changed into
-    // an interned string so this entry can be directly appended.
-    case JVM_CONSTANT_String:      // fall through
-
-    // These were indirect CP entries, but they have been changed into
-    // Symbol*s so these entries can be directly appended.
-    case JVM_CONSTANT_UnresolvedClass:  // fall through
-    case JVM_CONSTANT_UnresolvedString:
-    {
-      constantPoolOopDesc::copy_entry_to(scratch_cp, scratch_i, *merge_cp_p, *merge_cp_length_p,
-        THREAD);
+// Loads all new class versions and stores the instanceKlass handles in an array.
+jvmtiError VM_RedefineClasses::load_new_class_versions(TRAPS) {
 
-      if (scratch_i != *merge_cp_length_p) {
-        // The new entry in *merge_cp_p is at a different index than
-        // the new entry in scratch_cp so we need to map the index values.
-        map_index(scratch_cp, scratch_i, *merge_cp_length_p);
-      }
-      (*merge_cp_length_p)++;
-    } break;
+  ResourceMark rm(THREAD);
 
-    // this is an indirect CP entry so it needs special handling
-    case JVM_CONSTANT_NameAndType:
-    {
-      int name_ref_i = scratch_cp->name_ref_index_at(scratch_i);
-      int new_name_ref_i = 0;
-      bool match = (name_ref_i < *merge_cp_length_p) &&
-        scratch_cp->compare_entry_to(name_ref_i, *merge_cp_p, name_ref_i,
-          THREAD);
-      if (!match) {
-        // forward reference in *merge_cp_p or not a direct match
-
-        int found_i = scratch_cp->find_matching_entry(name_ref_i, *merge_cp_p,
-          THREAD);
-        if (found_i != 0) {
-          guarantee(found_i != name_ref_i,
-            "compare_entry_to() and find_matching_entry() do not agree");
-
-          // Found a matching entry somewhere else in *merge_cp_p so
-          // just need a mapping entry.
-          new_name_ref_i = found_i;
-          map_index(scratch_cp, name_ref_i, found_i);
-        } else {
-          // no match found so we have to append this entry to *merge_cp_p
-          append_entry(scratch_cp, name_ref_i, merge_cp_p, merge_cp_length_p,
-            THREAD);
-          // The above call to append_entry() can only append one entry
-          // so the post call query of *merge_cp_length_p is only for
-          // the sake of consistency.
-          new_name_ref_i = *merge_cp_length_p - 1;
+  TRACE_RC1("===================================================================");
+  TRACE_RC1("redefinition started by thread \"%s\"", THREAD->name());
+  TRACE_RC1("load new class versions (%d)", _class_count);
+
+  // Retrieve an array of all classes that need to be redefined
+  jvmtiError err = find_sorted_affected_classes();
+  if (err != JVMTI_ERROR_NONE) {
+    TRACE_RC1("Error finding sorted affected classes: %d", (int)err);
+    return err;
+  }
+
+
+  JvmtiThreadState *state = JvmtiThreadState::state_for(JavaThread::current());
+
+  _max_redefinition_flags = Klass::NoRedefinition;
+  jvmtiError result = JVMTI_ERROR_NONE;
+
+  for (int i=0; i<_affected_klasses->length(); i++) {
+    TRACE_RC2("Processing affected class %d of %d", i+1, _affected_klasses->length());
+
+    instanceKlassHandle the_class = _affected_klasses->at(i);
+    TRACE_RC2("name=%s", the_class->name()->as_C_string());
+
+    the_class->link_class(THREAD);
+    result = check_exception();
+    if (result != JVMTI_ERROR_NONE) break;
+
+    // Find new class bytes
+    const unsigned char* class_bytes;
+    jint class_byte_count;
+    jvmtiError error;
+    jboolean not_changed;
+    if ((error = find_class_bytes(the_class, &class_bytes, &class_byte_count, &not_changed)) != JVMTI_ERROR_NONE) {
+      TRACE_RC1("Error finding class bytes: %d", (int)error);
+      result = error;
+      break;
+    }
+    assert(class_bytes != NULL && class_byte_count != 0, "Class bytes defined at this point!");
+
+
+    // Set redefined class handle in JvmtiThreadState class.
+    // This redefined class is sent to agent event handler for class file
+    // load hook event.
+    state->set_class_being_redefined(&the_class, _class_load_kind);
+
+    TRACE_RC2("Before resolving from stream");
+
+    RC_TIMER_STOP(_timer_prologue);
+    RC_TIMER_START(_timer_class_loading);
+
+
+    // Parse the stream.
+    Handle the_class_loader(THREAD, the_class->class_loader());
+    Handle protection_domain(THREAD, the_class->protection_domain());
+    ClassFileStream st((u1*) class_bytes, class_byte_count, (char *)"__VM_RedefineClasses__");
+    instanceKlassHandle new_class(THREAD, SystemDictionary::resolve_from_stream(the_class->name(),
+      the_class_loader,
+      protection_domain,
+      &st,
+      true,
+      the_class,
+      THREAD));
+
+    RC_TIMER_STOP(_timer_class_loading);
+    RC_TIMER_START(_timer_prologue);
+
+    TRACE_RC2("After resolving class from stream!");
+    // Clear class_being_redefined just to be sure.
+    state->clear_class_being_redefined();
+
+    result = check_exception();
+    if (result != JVMTI_ERROR_NONE) break;
+
+    not_changed = false;
+
+#ifdef ASSERT
+
+    assert(new_class() != NULL, "Class could not be loaded!");
+    assert(new_class() != the_class(), "must be different");
+    assert(new_class->new_version() == NULL && new_class->old_version() != NULL, "");
+
+
+    objArrayOop k_interfaces = new_class->local_interfaces();
+    for (int j=0; j<k_interfaces->length(); j++) {
+      assert(((klassOop)k_interfaces->obj_at(j))->klass_part()->is_newest_version(), "just checking");
+    }
+
+    if (!THREAD->is_Compiler_thread()) {
+
+      TRACE_RC2("name=%s loader="INTPTR_FORMAT" protection_domain="INTPTR_FORMAT, the_class->name()->as_C_string(), the_class->class_loader(), the_class->protection_domain());
+      // If we are on the compiler thread, we must not try to resolve a class.
+      klassOop systemLookup = SystemDictionary::resolve_or_null(the_class->name(), the_class->class_loader(), the_class->protection_domain(), THREAD);
+      
+      if (systemLookup != NULL) {
+        assert(systemLookup == new_class->old_version(), "Old class must be in system dictionary!");
+      
+
+        Klass *subklass = new_class()->klass_part()->subklass();
+        while (subklass != NULL) {
+          assert(subklass->new_version() == NULL, "Most recent version of class!");
+          subklass = subklass->next_sibling();
         }
+      } else {
+        // This can happen for reflection generated classes.. ?
+        CLEAR_PENDING_EXCEPTION;
       }
+    }
 
-      int signature_ref_i = scratch_cp->signature_ref_index_at(scratch_i);
-      int new_signature_ref_i = 0;
-      match = (signature_ref_i < *merge_cp_length_p) &&
-        scratch_cp->compare_entry_to(signature_ref_i, *merge_cp_p,
-          signature_ref_i, THREAD);
-      if (!match) {
-        // forward reference in *merge_cp_p or not a direct match
-
-        int found_i = scratch_cp->find_matching_entry(signature_ref_i,
-          *merge_cp_p, THREAD);
-        if (found_i != 0) {
-          guarantee(found_i != signature_ref_i,
-            "compare_entry_to() and find_matching_entry() do not agree");
-
-          // Found a matching entry somewhere else in *merge_cp_p so
-          // just need a mapping entry.
-          new_signature_ref_i = found_i;
-          map_index(scratch_cp, signature_ref_i, found_i);
-        } else {
-          // no match found so we have to append this entry to *merge_cp_p
-          append_entry(scratch_cp, signature_ref_i, merge_cp_p,
-            merge_cp_length_p, THREAD);
-          // The above call to append_entry() can only append one entry
-          // so the post call query of *merge_cp_length_p is only for
-          // the sake of consistency.
-          new_signature_ref_i = *merge_cp_length_p - 1;
-        }
+#endif
+
+    IF_TRACE_RC1 {
+      if (new_class->layout_helper() != the_class->layout_helper()) {
+        TRACE_RC1("Instance size change for class %s: new=%d old=%d", new_class->name()->as_C_string(), new_class->layout_helper(), the_class->layout_helper());
       }
+    }
 
-      // If the referenced entries already exist in *merge_cp_p, then
-      // both new_name_ref_i and new_signature_ref_i will both be 0.
-      // In that case, all we are appending is the current entry.
-      if (new_name_ref_i == 0) {
-        new_name_ref_i = name_ref_i;
-      } else {
-        RC_TRACE(0x00080000,
-          ("NameAndType entry@%d name_ref_index change: %d to %d",
-          *merge_cp_length_p, name_ref_i, new_name_ref_i));
+    // Set the new version of the class
+    new_class->set_revision_number(_revision_number);
+    new_class->set_redefinition_index(i);
+    the_class->set_new_version(new_class());
+    _new_classes->append(new_class);
+
+    assert(new_class->new_version() == NULL, "");
+
+    int redefinition_flags = Klass::NoRedefinition;
+
+    if (not_changed) {
+      redefinition_flags = Klass::NoRedefinition;
+    } else if (AllowAdvancedClassRedefinition) {
+      redefinition_flags = calculate_redefinition_flags(new_class);
+      if (redefinition_flags >= Klass::RemoveSuperType) {
+        TRACE_RC1("Remove super type is not allowed");
+        result = JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
+        break;
       }
-      if (new_signature_ref_i == 0) {
-        new_signature_ref_i = signature_ref_i;
-      } else {
-        RC_TRACE(0x00080000,
-          ("NameAndType entry@%d signature_ref_index change: %d to %d",
-          *merge_cp_length_p, signature_ref_i, new_signature_ref_i));
+    } else {
+      jvmtiError allowed = check_redefinition_allowed(new_class);
+      if (allowed != JVMTI_ERROR_NONE) {
+        TRACE_RC1("Error redefinition not allowed!");
+        result = allowed;
+        break;
       }
+      redefinition_flags = Klass::ModifyClass;
+    }
 
-      (*merge_cp_p)->name_and_type_at_put(*merge_cp_length_p,
-        new_name_ref_i, new_signature_ref_i);
-      if (scratch_i != *merge_cp_length_p) {
-        // The new entry in *merge_cp_p is at a different index than
-        // the new entry in scratch_cp so we need to map the index values.
-        map_index(scratch_cp, scratch_i, *merge_cp_length_p);
+    if (new_class->super() != NULL) {
+      redefinition_flags = redefinition_flags | new_class->super()->klass_part()->redefinition_flags();
+    }
+
+    for (int j=0; j<new_class->local_interfaces()->length(); j++) {
+      redefinition_flags = redefinition_flags | ((klassOop)new_class->local_interfaces()->obj_at(j))->klass_part()->redefinition_flags();
+    }
+
+    new_class->set_redefinition_flags(redefinition_flags);
+
+    _max_redefinition_flags = _max_redefinition_flags | redefinition_flags;
+
+    if ((redefinition_flags & Klass::ModifyInstances) != 0) {
+      // TODO: Check if watch access flags of static fields are updated correctly.
+      calculate_instance_update_information(_new_classes->at(i)());
+    } else {
+      // Fields were not changed, transfer special flags only
+      assert(new_class->layout_helper() >> 1 == new_class->old_version()->klass_part()->layout_helper() >> 1, "must be equal");
+      assert(new_class->fields()->length() == ((instanceKlass*)new_class->old_version()->klass_part())->fields()->length(), "must be equal");
+      
+      JavaFieldStream old_fs(the_class);
+      JavaFieldStream new_fs(new_class);
+      for (; !old_fs.done() && !new_fs.done(); old_fs.next(), new_fs.next()) {
+        AccessFlags flags = new_fs.access_flags();
+        flags.set_is_field_modification_watched(old_fs.access_flags().is_field_modification_watched());
+        flags.set_is_field_access_watched(old_fs.access_flags().is_field_access_watched());
+        new_fs.set_access_flags(flags);
       }
-      (*merge_cp_length_p)++;
-    } break;
+    }
 
-    // this is a double-indirect CP entry so it needs special handling
-    case JVM_CONSTANT_Fieldref:           // fall through
-    case JVM_CONSTANT_InterfaceMethodref: // fall through
-    case JVM_CONSTANT_Methodref:
-    {
-      int klass_ref_i = scratch_cp->uncached_klass_ref_index_at(scratch_i);
-      int new_klass_ref_i = 0;
-      bool match = (klass_ref_i < *merge_cp_length_p) &&
-        scratch_cp->compare_entry_to(klass_ref_i, *merge_cp_p, klass_ref_i,
-          THREAD);
-      if (!match) {
-        // forward reference in *merge_cp_p or not a direct match
-
-        int found_i = scratch_cp->find_matching_entry(klass_ref_i, *merge_cp_p,
-          THREAD);
-        if (found_i != 0) {
-          guarantee(found_i != klass_ref_i,
-            "compare_entry_to() and find_matching_entry() do not agree");
-
-          // Found a matching entry somewhere else in *merge_cp_p so
-          // just need a mapping entry.
-          new_klass_ref_i = found_i;
-          map_index(scratch_cp, klass_ref_i, found_i);
-        } else {
-          // no match found so we have to append this entry to *merge_cp_p
-          append_entry(scratch_cp, klass_ref_i, merge_cp_p, merge_cp_length_p,
-            THREAD);
-          // The above call to append_entry() can only append one entry
-          // so the post call query of *merge_cp_length_p is only for
-          // the sake of consistency. Without the optimization where we
-          // use JVM_CONSTANT_UnresolvedClass, then up to two entries
-          // could be appended.
-          new_klass_ref_i = *merge_cp_length_p - 1;
-        }
+    IF_TRACE_RC3 {
+      if (new_class->super() != NULL) {
+        TRACE_RC3("Super class is %s", new_class->super()->klass_part()->name()->as_C_string());
       }
+    }
 
-      int name_and_type_ref_i =
-        scratch_cp->uncached_name_and_type_ref_index_at(scratch_i);
-      int new_name_and_type_ref_i = 0;
-      match = (name_and_type_ref_i < *merge_cp_length_p) &&
-        scratch_cp->compare_entry_to(name_and_type_ref_i, *merge_cp_p,
-          name_and_type_ref_i, THREAD);
-      if (!match) {
-        // forward reference in *merge_cp_p or not a direct match
-
-        int found_i = scratch_cp->find_matching_entry(name_and_type_ref_i,
-          *merge_cp_p, THREAD);
-        if (found_i != 0) {
-          guarantee(found_i != name_and_type_ref_i,
-            "compare_entry_to() and find_matching_entry() do not agree");
-
-          // Found a matching entry somewhere else in *merge_cp_p so
-          // just need a mapping entry.
-          new_name_and_type_ref_i = found_i;
-          map_index(scratch_cp, name_and_type_ref_i, found_i);
-        } else {
-          // no match found so we have to append this entry to *merge_cp_p
-          append_entry(scratch_cp, name_and_type_ref_i, merge_cp_p,
-            merge_cp_length_p, THREAD);
-          // The above call to append_entry() can append more than
-          // one entry so the post call query of *merge_cp_length_p
-          // is required in order to get the right index for the
-          // JVM_CONSTANT_NameAndType entry.
-          new_name_and_type_ref_i = *merge_cp_length_p - 1;
+#ifdef ASSERT
+    assert(new_class->super() == NULL || new_class->super()->klass_part()->new_version() == NULL, "Super klass must be newest version!");
+
+    the_class->vtable()->verify(tty);
+    new_class->vtable()->verify(tty);
+#endif
+
+    TRACE_RC2("Verification done!");
+
+    if (i == _affected_klasses->length() - 1) {
+
+      // This was the last class processed => check if additional classes have been loaded in the meantime
+
+      for (int j=0; j<_affected_klasses->length(); j++) {
+
+        klassOop initial_klass = _affected_klasses->at(j)();
+        Klass *initial_subklass = initial_klass->klass_part()->subklass();
+        Klass *cur_klass = initial_subklass;
+        while(cur_klass != NULL) {
+
+          if(cur_klass->oop_is_instance() && cur_klass->is_newest_version() && !cur_klass->is_redefining()) {
+            instanceKlassHandle handle(THREAD, cur_klass->as_klassOop());
+            if (!_affected_klasses->contains(handle)) {
+
+              int k = i + 1;
+              for (; k<_affected_klasses->length(); k++) {
+                if (_affected_klasses->at(k)->is_subtype_of(cur_klass->as_klassOop())) {
+                  break;
+                }
+              }
+              _affected_klasses->insert_before(k, handle);
+              TRACE_RC2("Adding newly loaded class to affected classes: %s", cur_klass->name()->as_C_string());
+            }
+          }
+
+          cur_klass = cur_klass->next_sibling();
         }
       }
 
-      // If the referenced entries already exist in *merge_cp_p, then
-      // both new_klass_ref_i and new_name_and_type_ref_i will both be
-      // 0. In that case, all we are appending is the current entry.
-      if (new_klass_ref_i == 0) {
-        new_klass_ref_i = klass_ref_i;
-      }
-      if (new_name_and_type_ref_i == 0) {
-        new_name_and_type_ref_i = name_and_type_ref_i;
-      }
+      int new_count = _affected_klasses->length() - 1 - i;
+      if (new_count != 0) {
 
-      const char *entry_name;
-      switch (scratch_cp->tag_at(scratch_i).value()) {
-      case JVM_CONSTANT_Fieldref:
-        entry_name = "Fieldref";
-        (*merge_cp_p)->field_at_put(*merge_cp_length_p, new_klass_ref_i,
-          new_name_and_type_ref_i);
-        break;
-      case JVM_CONSTANT_InterfaceMethodref:
-        entry_name = "IFMethodref";
-        (*merge_cp_p)->interface_method_at_put(*merge_cp_length_p,
-          new_klass_ref_i, new_name_and_type_ref_i);
-        break;
-      case JVM_CONSTANT_Methodref:
-        entry_name = "Methodref";
-        (*merge_cp_p)->method_at_put(*merge_cp_length_p, new_klass_ref_i,
-          new_name_and_type_ref_i);
-        break;
-      default:
-        guarantee(false, "bad switch");
-        break;
+        TRACE_RC1("Found new number of affected classes: %d", new_count);
       }
+    }
+  }
 
-      if (klass_ref_i != new_klass_ref_i) {
-        RC_TRACE(0x00080000, ("%s entry@%d class_index changed: %d to %d",
-          entry_name, *merge_cp_length_p, klass_ref_i, new_klass_ref_i));
-      }
-      if (name_and_type_ref_i != new_name_and_type_ref_i) {
-        RC_TRACE(0x00080000,
-          ("%s entry@%d name_and_type_index changed: %d to %d",
-          entry_name, *merge_cp_length_p, name_and_type_ref_i,
-          new_name_and_type_ref_i));
-      }
+  if (result != JVMTI_ERROR_NONE) {
+    rollback();
+    return result;
+  }
 
-      if (scratch_i != *merge_cp_length_p) {
-        // The new entry in *merge_cp_p is at a different index than
-        // the new entry in scratch_cp so we need to map the index values.
-        map_index(scratch_cp, scratch_i, *merge_cp_length_p);
-      }
-      (*merge_cp_length_p)++;
-    } break;
+  RC_TIMER_STOP(_timer_prologue);
+  RC_TIMER_START(_timer_class_linking);
+  // Link and verify new classes _after_ all classes have been updated in the system dictionary!
+  for (int i=0; i<_affected_klasses->length(); i++) {
+    instanceKlassHandle the_class = _affected_klasses->at(i);
+    instanceKlassHandle new_class(the_class->new_version());
 
-    // At this stage, Class or UnresolvedClass could be here, but not
-    // ClassIndex
-    case JVM_CONSTANT_ClassIndex: // fall through
+    TRACE_RC2("Linking class %d/%d %s", i, _affected_klasses->length(), the_class->name()->as_C_string());
+    new_class->link_class(THREAD);
 
-    // Invalid is used as the tag for the second constant pool entry
-    // occupied by JVM_CONSTANT_Double or JVM_CONSTANT_Long. It should
-    // not be seen by itself.
-    case JVM_CONSTANT_Invalid: // fall through
+    result = check_exception();
+    if (result != JVMTI_ERROR_NONE) break;
+  }
+  RC_TIMER_STOP(_timer_class_linking);
+  RC_TIMER_START(_timer_prologue);
 
-    // At this stage, String or UnresolvedString could be here, but not
-    // StringIndex
-    case JVM_CONSTANT_StringIndex: // fall through
+  if (result != JVMTI_ERROR_NONE) {
+    rollback();
+    return result;
+  }
 
-    // At this stage JVM_CONSTANT_UnresolvedClassInError should not be
-    // here
-    case JVM_CONSTANT_UnresolvedClassInError: // fall through
+  TRACE_RC2("All classes loaded!");
 
-    default:
-    {
-      // leave a breadcrumb
-      jbyte bad_value = scratch_cp->tag_at(scratch_i).value();
-      ShouldNotReachHere();
-    } break;
-  } // end switch tag value
-} // end append_entry()
+#ifdef ASSERT
+  for (int i=0; i<_affected_klasses->length(); i++) {
+    instanceKlassHandle the_class = _affected_klasses->at(i);
+    assert(the_class->new_version() != NULL, "Must have been redefined");
+    instanceKlassHandle new_version = instanceKlassHandle(THREAD, the_class->new_version());
+    assert(new_version->new_version() == NULL, "Must be newest version");
 
+    if (!(new_version->super() == NULL || new_version->super()->klass_part()->new_version() == NULL)) {
+      new_version()->print();
+      new_version->super()->print();
+    }
+    assert(new_version->super() == NULL || new_version->super()->klass_part()->new_version() == NULL, "Super class must be newest version");
+  }
 
-void VM_RedefineClasses::swap_all_method_annotations(int i, int j, instanceKlassHandle scratch_class) {
-  typeArrayOop save;
+  SystemDictionary::classes_do(check_class, THREAD);
 
-  save = scratch_class->get_method_annotations_of(i);
-  scratch_class->set_method_annotations_of(i, scratch_class->get_method_annotations_of(j));
-  scratch_class->set_method_annotations_of(j, save);
+#endif
 
-  save = scratch_class->get_method_parameter_annotations_of(i);
-  scratch_class->set_method_parameter_annotations_of(i, scratch_class->get_method_parameter_annotations_of(j));
-  scratch_class->set_method_parameter_annotations_of(j, save);
+  TRACE_RC1("Finished verification!");
+  return JVMTI_ERROR_NONE;
+}
 
-  save = scratch_class->get_method_default_annotations_of(i);
-  scratch_class->set_method_default_annotations_of(i, scratch_class->get_method_default_annotations_of(j));
-  scratch_class->set_method_default_annotations_of(j, save);
+void VM_RedefineClasses::lock_threads() {
+
+  RC_TIMER_START(_timer_wait_for_locks);
+
+
+  JavaThread *javaThread = Threads::first();
+  while (javaThread != NULL) {
+    if (javaThread->is_Compiler_thread() && javaThread != Thread::current()) {
+      CompilerThread *compilerThread = (CompilerThread *)javaThread;
+      compilerThread->set_should_bailout(true);
+    }
+    javaThread = javaThread->next();
+  }
+
+  int cnt = 0;
+  javaThread = Threads::first();
+  while (javaThread != NULL) {
+    if (javaThread->is_Compiler_thread() && javaThread != Thread::current()) {
+      CompilerThread *compilerThread = (CompilerThread *)javaThread;
+      compilerThread->compilation_mutex()->lock();
+      cnt++;
+    }
+    javaThread = javaThread->next();
+  }
+
+  TRACE_RC2("Locked %d compiler threads", cnt);
+
+  cnt = 0;
+  javaThread = Threads::first();
+  while (javaThread != NULL) {
+    if (javaThread != Thread::current()) {
+      javaThread->redefine_classes_mutex()->lock();
+      cnt++;
+    }
+    javaThread = javaThread->next();
+  }
+
+
+  TRACE_RC2("Locked %d threads", cnt);
+
+  RC_TIMER_STOP(_timer_wait_for_locks);
+}
+
+void VM_RedefineClasses::unlock_threads() {
+
+  int cnt = 0;
+  JavaThread *javaThread = Threads::first();
+  Thread *thread = Thread::current();
+  while (javaThread != NULL) {
+    if (javaThread->is_Compiler_thread() && javaThread != Thread::current()) {
+      CompilerThread *compilerThread = (CompilerThread *)javaThread;
+      if (compilerThread->compilation_mutex()->owned_by_self()) {
+        compilerThread->compilation_mutex()->unlock();
+        cnt++;
+      }
+    }
+    javaThread = javaThread->next();
+  }
+
+  TRACE_RC2("Unlocked %d compiler threads", cnt);
+
+  cnt = 0;
+  javaThread = Threads::first();
+  while (javaThread != NULL) {
+    if (javaThread != Thread::current()) {
+      if (javaThread->redefine_classes_mutex()->owned_by_self()) {
+        javaThread->redefine_classes_mutex()->unlock();
+        cnt++;
+      }
+    }
+    javaThread = javaThread->next();
+  }
+
+  TRACE_RC2("Unlocked %d threads", cnt);
 }
 
+jvmtiError VM_RedefineClasses::check_redefinition_allowed(instanceKlassHandle scratch_class) {
+
+
+  
+  // Compatibility mode => check for unsupported modification
+
+
+  assert(scratch_class->old_version() != NULL, "must have old version");
+  instanceKlassHandle the_class(scratch_class->old_version());
 
-jvmtiError VM_RedefineClasses::compare_and_normalize_class_versions(
-             instanceKlassHandle the_class,
-             instanceKlassHandle scratch_class) {
   int i;
 
   // Check superclasses, or rather their names, since superclasses themselves can be
   // requested to replace.
   // Check for NULL superclass first since this might be java.lang.Object
   if (the_class->super() != scratch_class->super() &&
-      (the_class->super() == NULL || scratch_class->super() == NULL ||
-       Klass::cast(the_class->super())->name() !=
-       Klass::cast(scratch_class->super())->name())) {
-    return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
+    (the_class->super() == NULL || scratch_class->super() == NULL ||
+    Klass::cast(the_class->super())->name() !=
+    Klass::cast(scratch_class->super())->name())) {
+      return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
   }
 
   // Check if the number, names and order of directly implemented interfaces are the same.
@@ -539,8 +718,8 @@ jvmtiError VM_RedefineClasses::compare_and_normalize_class_versions(
   }
   for (i = 0; i < n_intfs; i++) {
     if (Klass::cast((klassOop) k_interfaces->obj_at(i))->name() !=
-        Klass::cast((klassOop) k_new_interfaces->obj_at(i))->name()) {
-      return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
+      Klass::cast((klassOop) k_new_interfaces->obj_at(i))->name()) {
+        return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
     }
   }
 
@@ -578,14 +757,283 @@ jvmtiError VM_RedefineClasses::compare_and_normalize_class_versions(
     Symbol* name_sym2 = scratch_class->constants()->symbol_at(new_fs.name_index());
     Symbol* sig_sym2 = scratch_class->constants()->symbol_at(new_fs.signature_index());
     if (name_sym1 != name_sym2 || sig_sym1 != sig_sym2) {
-      return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
+      return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
+    }
+  }
+
+  // If both streams aren't done then we have a differing number of
+  // fields.
+  if (!old_fs.done() || !new_fs.done()) {
+    return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
+  }
+
+  // Do a parallel walk through the old and new methods. Detect
+  // cases where they match (exist in both), have been added in
+  // the new methods, or have been deleted (exist only in the
+  // old methods).  The class file parser places methods in order
+  // by method name, but does not order overloaded methods by
+  // signature.  In order to determine what fate befell the methods,
+  // this code places the overloaded new methods that have matching
+  // old methods in the same order as the old methods and places
+  // new overloaded methods at the end of overloaded methods of
+  // that name. The code for this order normalization is adapted
+  // from the algorithm used in instanceKlass::find_method().
+  // Since we are swapping out of order entries as we find them,
+  // we only have to search forward through the overloaded methods.
+  // Methods which are added and have the same name as an existing
+  // method (but different signature) will be put at the end of
+  // the methods with that name, and the name mismatch code will
+  // handle them.
+  objArrayHandle k_old_methods(the_class->methods());
+  objArrayHandle k_new_methods(scratch_class->methods());
+  int n_old_methods = k_old_methods->length();
+  int n_new_methods = k_new_methods->length();
+
+  int ni = 0;
+  int oi = 0;
+  while (true) {
+    methodOop k_old_method;
+    methodOop k_new_method;
+    enum { matched, added, deleted, undetermined } method_was = undetermined;
+
+    if (oi >= n_old_methods) {
+      if (ni >= n_new_methods) {
+        break; // we've looked at everything, done
+      }
+      // New method at the end
+      k_new_method = (methodOop) k_new_methods->obj_at(ni);
+      method_was = added;
+    } else if (ni >= n_new_methods) {
+      // Old method, at the end, is deleted
+      k_old_method = (methodOop) k_old_methods->obj_at(oi);
+      method_was = deleted;
+    } else {
+      // There are more methods in both the old and new lists
+      k_old_method = (methodOop) k_old_methods->obj_at(oi);
+      k_new_method = (methodOop) k_new_methods->obj_at(ni);
+      if (k_old_method->name() != k_new_method->name()) {
+        // Methods are sorted by method name, so a mismatch means added
+        // or deleted
+        if (k_old_method->name()->fast_compare(k_new_method->name()) > 0) {
+          method_was = added;
+        } else {
+          method_was = deleted;
+        }
+      } else if (k_old_method->signature() == k_new_method->signature()) {
+        // Both the name and signature match
+        method_was = matched;
+      } else {
+        // The name matches, but the signature doesn't, which means we have to
+        // search forward through the new overloaded methods.
+        int nj;  // outside the loop for post-loop check
+        for (nj = ni + 1; nj < n_new_methods; nj++) {
+          methodOop m = (methodOop)k_new_methods->obj_at(nj);
+          if (k_old_method->name() != m->name()) {
+            // reached another method name so no more overloaded methods
+            method_was = deleted;
+            break;
+          }
+          if (k_old_method->signature() == m->signature()) {
+            // found a match so swap the methods
+            k_new_methods->obj_at_put(ni, m);
+            k_new_methods->obj_at_put(nj, k_new_method);
+            k_new_method = m;
+            method_was = matched;
+            break;
+          }
+        }
+
+        if (nj >= n_new_methods) {
+          // reached the end without a match; so method was deleted
+          method_was = deleted;
+        }
+      }
+    }
+
+    switch (method_was) {
+    case matched:
+      // methods match, be sure modifiers do too
+      old_flags = (jushort) k_old_method->access_flags().get_flags();
+      new_flags = (jushort) k_new_method->access_flags().get_flags();
+      if ((old_flags ^ new_flags) & ~(JVM_ACC_NATIVE)) {
+        return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_MODIFIERS_CHANGED;
+      }
+      {
+        u2 new_num = k_new_method->method_idnum();
+        u2 old_num = k_old_method->method_idnum();
+        if (new_num != old_num) {
+          methodOop idnum_owner = scratch_class->method_with_idnum(old_num);
+          if (idnum_owner != NULL) {
+            // There is already a method assigned this idnum -- switch them
+            idnum_owner->set_method_idnum(new_num);
+          }
+          k_new_method->set_method_idnum(old_num);
+        }
+      }
+      // advance to next pair of methods
+      ++oi;
+      ++ni;
+      break;
+    case added:
+      // method added, see if it is OK
+      new_flags = (jushort) k_new_method->access_flags().get_flags();
+      if ((new_flags & JVM_ACC_PRIVATE) == 0
+        // hack: private should be treated as final, but alas
+        || (new_flags & (JVM_ACC_FINAL|JVM_ACC_STATIC)) == 0
+        ) {
+          // new methods must be private
+          return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_ADDED;
+      }
+      {
+        u2 num = the_class->next_method_idnum();
+        if (num == constMethodOopDesc::UNSET_IDNUM) {
+          // cannot add any more methods
+          return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_ADDED;
+        }
+        u2 new_num = k_new_method->method_idnum();
+        methodOop idnum_owner = scratch_class->method_with_idnum(num);
+        if (idnum_owner != NULL) {
+          // There is already a method assigned this idnum -- switch them
+          idnum_owner->set_method_idnum(new_num);
+        }
+        k_new_method->set_method_idnum(num);
+      }
+      ++ni; // advance to next new method
+      break;
+    case deleted:
+      // method deleted, see if it is OK
+      old_flags = (jushort) k_old_method->access_flags().get_flags();
+      if ((old_flags & JVM_ACC_PRIVATE) == 0
+        // hack: private should be treated as final, but alas
+        || (old_flags & (JVM_ACC_FINAL|JVM_ACC_STATIC)) == 0
+        ) {
+          // deleted methods must be private
+          return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_DELETED;
+      }
+      ++oi; // advance to next old method
+      break;
+    default:
+      ShouldNotReachHere();
+    }
+  }
+
+  return JVMTI_ERROR_NONE;
+}
+
+int VM_RedefineClasses::calculate_redefinition_flags(instanceKlassHandle new_class) {
+
+  int result = Klass::NoRedefinition;
+
+
+
+  TRACE_RC2("Comparing different class versions of class %s", new_class->name()->as_C_string());
+
+  assert(new_class->old_version() != NULL, "must have old version");
+  instanceKlassHandle the_class(new_class->old_version());
+
+  // Check whether class is in the error init state.
+  if (the_class->is_in_error_state()) {
+    // TBD #5057930: special error code is needed in 1.6
+    //result = Klass::union_redefinition_level(result, Klass::Invalid);
+  }
+
+  int i;
+
+  //////////////////////////////////////////////////////////////////////////////////////////////////////////
+  // Check superclasses
+  assert(new_class->super() == NULL || new_class->super()->klass_part()->is_newest_version(), "");
+  if (the_class->super() != new_class->super()) {
+    // Super class changed
+
+    klassOop cur_klass = the_class->super();
+    while (cur_klass != NULL) {
+      if (!new_class->is_subclass_of(cur_klass->klass_part()->newest_version())) {
+        TRACE_RC2("Removed super class %s", cur_klass->klass_part()->name()->as_C_string());
+        result = result | Klass::RemoveSuperType | Klass::ModifyInstances | Klass::ModifyClass;
+
+        if (!cur_klass->klass_part()->has_subtype_changed()) {
+          TRACE_RC2("Subtype changed of class %s", cur_klass->klass_part()->name()->as_C_string());
+          cur_klass->klass_part()->set_subtype_changed(true);
+        }
+      }
+
+      cur_klass = cur_klass->klass_part()->super();
+    }
+
+    cur_klass = new_class->super();
+    while (cur_klass != NULL) {
+      if (!the_class->is_subclass_of(cur_klass->klass_part()->old_version())) {
+        TRACE_RC2("Added super class %s", cur_klass->klass_part()->name()->as_C_string());
+        result = result | Klass::ModifyClass | Klass::ModifyInstances;
+      }
+      cur_klass = cur_klass->klass_part()->super();
+    }
+  }
+
+  //////////////////////////////////////////////////////////////////////////////////////////////////////////
+  // Check interfaces
+
+  // Interfaces removed?
+  objArrayOop old_interfaces = the_class->transitive_interfaces();
+  for (i = 0; i<old_interfaces->length(); i++) {
+    instanceKlassHandle old_interface((klassOop)old_interfaces->obj_at(i));
+    if (!new_class->implements_interface_any_version(old_interface())) {
+      result = result | Klass::RemoveSuperType | Klass::ModifyClass;
+      TRACE_RC2("Removed interface %s", old_interface->name()->as_C_string());
+      
+      if (!old_interface->has_subtype_changed()) {
+        TRACE_RC2("Subtype changed of interface %s", old_interface->name()->as_C_string());
+        old_interface->set_subtype_changed(true);
+      }
+    }
+  }
+
+  // Interfaces added?
+  objArrayOop new_interfaces = new_class->transitive_interfaces();
+  for (i = 0; i<new_interfaces->length(); i++) {
+    if (!the_class->implements_interface_any_version((klassOop)new_interfaces->obj_at(i))) {
+      result = result | Klass::ModifyClass;
+      TRACE_RC2("Added interface %s", ((klassOop)new_interfaces->obj_at(i))->klass_part()->name()->as_C_string());
+    }
+  }
+
+
+  // Check whether class modifiers are the same.
+  jushort old_flags = (jushort) the_class->access_flags().get_flags();
+  jushort new_flags = (jushort) new_class->access_flags().get_flags();
+  if (old_flags != new_flags) {
+    // TODO (tw): Can this have any effects?
+  }
+  
+  // Check if the number, names, types and order of fields declared in these classes
+  // are the same.
+  JavaFieldStream old_fs(the_class);
+  JavaFieldStream new_fs(new_class);
+  for (; !old_fs.done() && !new_fs.done(); old_fs.next(), new_fs.next()) {
+    // access
+    old_flags = old_fs.access_flags().as_short();
+    new_flags = new_fs.access_flags().as_short();
+    if ((old_flags ^ new_flags) & JVM_RECOGNIZED_FIELD_MODIFIERS) {
+      // TODO (tw) can this have any effect?
+    }
+    // offset
+    if (old_fs.offset() != new_fs.offset()) {
+      result = result | Klass::ModifyInstances;
+    }
+    // name and signature
+    Symbol* name_sym1 = the_class->constants()->symbol_at(old_fs.name_index());
+    Symbol* sig_sym1 = the_class->constants()->symbol_at(old_fs.signature_index());
+    Symbol* name_sym2 = new_class->constants()->symbol_at(new_fs.name_index());
+    Symbol* sig_sym2 = new_class->constants()->symbol_at(new_fs.signature_index());
+    if (name_sym1 != name_sym2 || sig_sym1 != sig_sym2) {
+      result = result | Klass::ModifyInstances;
     }
   }
 
   // If both streams aren't done then we have a differing number of
   // fields.
   if (!old_fs.done() || !new_fs.done()) {
-    return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
+      result = result | Klass::ModifyInstances;
   }
 
   // Do a parallel walk through the old and new methods. Detect
@@ -606,7 +1054,7 @@ jvmtiError VM_RedefineClasses::compare_and_normalize_class_versions(
   // the methods with that name, and the name mismatch code will
   // handle them.
   objArrayHandle k_old_methods(the_class->methods());
-  objArrayHandle k_new_methods(scratch_class->methods());
+  objArrayHandle k_new_methods(new_class->methods());
   int n_old_methods = k_old_methods->length();
   int n_new_methods = k_new_methods->length();
 
@@ -672,2278 +1120,701 @@ jvmtiError VM_RedefineClasses::compare_and_normalize_class_versions(
     }
 
     switch (method_was) {
-    case matched:
-      // methods match, be sure modifiers do too
-      old_flags = (jushort) k_old_method->access_flags().get_flags();
-      new_flags = (jushort) k_new_method->access_flags().get_flags();
-      if ((old_flags ^ new_flags) & ~(JVM_ACC_NATIVE)) {
-        return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_MODIFIERS_CHANGED;
-      }
-      {
-        u2 new_num = k_new_method->method_idnum();
-        u2 old_num = k_old_method->method_idnum();
-        if (new_num != old_num) {
-          methodOop idnum_owner = scratch_class->method_with_idnum(old_num);
-          if (idnum_owner != NULL) {
-            // There is already a method assigned this idnum -- switch them
-            idnum_owner->set_method_idnum(new_num);
-          }
-          k_new_method->set_method_idnum(old_num);
-          swap_all_method_annotations(old_num, new_num, scratch_class);
-        }
-      }
-      RC_TRACE(0x00008000, ("Method matched: new: %s [%d] == old: %s [%d]",
-                            k_new_method->name_and_sig_as_C_string(), ni,
-                            k_old_method->name_and_sig_as_C_string(), oi));
-      // advance to next pair of methods
-      ++oi;
-      ++ni;
-      break;
-    case added:
-      // method added, see if it is OK
-      new_flags = (jushort) k_new_method->access_flags().get_flags();
-      if ((new_flags & JVM_ACC_PRIVATE) == 0
-           // hack: private should be treated as final, but alas
-          || (new_flags & (JVM_ACC_FINAL|JVM_ACC_STATIC)) == 0
-         ) {
-        // new methods must be private
-        return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_ADDED;
-      }
-      {
-        u2 num = the_class->next_method_idnum();
-        if (num == constMethodOopDesc::UNSET_IDNUM) {
-          // cannot add any more methods
-          return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_ADDED;
-        }
-        u2 new_num = k_new_method->method_idnum();
-        methodOop idnum_owner = scratch_class->method_with_idnum(num);
+  case matched:
+    // methods match, be sure modifiers do too
+    old_flags = (jushort) k_old_method->access_flags().get_flags();
+    new_flags = (jushort) k_new_method->access_flags().get_flags();
+    if ((old_flags ^ new_flags) & ~(JVM_ACC_NATIVE)) {
+      // (tw) Can this have any effects? Probably yes on vtables?
+      result = result | Klass::ModifyClass;
+    }
+    {
+      u2 new_num = k_new_method->method_idnum();
+      u2 old_num = k_old_method->method_idnum();
+      if (new_num != old_num) {
+        methodOop idnum_owner = new_class->method_with_idnum(old_num);
         if (idnum_owner != NULL) {
           // There is already a method assigned this idnum -- switch them
           idnum_owner->set_method_idnum(new_num);
         }
-        k_new_method->set_method_idnum(num);
-        swap_all_method_annotations(new_num, num, scratch_class);
-      }
-      RC_TRACE(0x00008000, ("Method added: new: %s [%d]",
-                            k_new_method->name_and_sig_as_C_string(), ni));
-      ++ni; // advance to next new method
-      break;
-    case deleted:
-      // method deleted, see if it is OK
-      old_flags = (jushort) k_old_method->access_flags().get_flags();
-      if ((old_flags & JVM_ACC_PRIVATE) == 0
-           // hack: private should be treated as final, but alas
-          || (old_flags & (JVM_ACC_FINAL|JVM_ACC_STATIC)) == 0
-         ) {
-        // deleted methods must be private
-        return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_DELETED;
-      }
-      RC_TRACE(0x00008000, ("Method deleted: old: %s [%d]",
-                            k_old_method->name_and_sig_as_C_string(), oi));
-      ++oi; // advance to next old method
-      break;
-    default:
-      ShouldNotReachHere();
-    }
-  }
-
-  return JVMTI_ERROR_NONE;
-}
-
-
-// Find new constant pool index value for old constant pool index value
-// by seaching the index map. Returns zero (0) if there is no mapped
-// value for the old constant pool index.
-int VM_RedefineClasses::find_new_index(int old_index) {
-  if (_index_map_count == 0) {
-    // map is empty so nothing can be found
-    return 0;
-  }
-
-  if (old_index < 1 || old_index >= _index_map_p->length()) {
-    // The old_index is out of range so it is not mapped. This should
-    // not happen in regular constant pool merging use, but it can
-    // happen if a corrupt annotation is processed.
-    return 0;
-  }
-
-  int value = _index_map_p->at(old_index);
-  if (value == -1) {
-    // the old_index is not mapped
-    return 0;
-  }
-
-  return value;
-} // end find_new_index()
-
-
-// Returns true if the current mismatch is due to a resolved/unresolved
-// class pair. Otherwise, returns false.
-bool VM_RedefineClasses::is_unresolved_class_mismatch(constantPoolHandle cp1,
-       int index1, constantPoolHandle cp2, int index2) {
-
-  jbyte t1 = cp1->tag_at(index1).value();
-  if (t1 != JVM_CONSTANT_Class && t1 != JVM_CONSTANT_UnresolvedClass) {
-    return false;  // wrong entry type; not our special case
-  }
-
-  jbyte t2 = cp2->tag_at(index2).value();
-  if (t2 != JVM_CONSTANT_Class && t2 != JVM_CONSTANT_UnresolvedClass) {
-    return false;  // wrong entry type; not our special case
-  }
-
-  if (t1 == t2) {
-    return false;  // not a mismatch; not our special case
-  }
-
-  char *s1 = cp1->klass_name_at(index1)->as_C_string();
-  char *s2 = cp2->klass_name_at(index2)->as_C_string();
-  if (strcmp(s1, s2) != 0) {
-    return false;  // strings don't match; not our special case
-  }
-
-  return true;  // made it through the gauntlet; this is our special case
-} // end is_unresolved_class_mismatch()
-
-
-// Returns true if the current mismatch is due to a resolved/unresolved
-// string pair. Otherwise, returns false.
-bool VM_RedefineClasses::is_unresolved_string_mismatch(constantPoolHandle cp1,
-       int index1, constantPoolHandle cp2, int index2) {
-
-  jbyte t1 = cp1->tag_at(index1).value();
-  if (t1 != JVM_CONSTANT_String && t1 != JVM_CONSTANT_UnresolvedString) {
-    return false;  // wrong entry type; not our special case
-  }
-
-  jbyte t2 = cp2->tag_at(index2).value();
-  if (t2 != JVM_CONSTANT_String && t2 != JVM_CONSTANT_UnresolvedString) {
-    return false;  // wrong entry type; not our special case
-  }
-
-  if (t1 == t2) {
-    return false;  // not a mismatch; not our special case
-  }
-
-  char *s1 = cp1->string_at_noresolve(index1);
-  char *s2 = cp2->string_at_noresolve(index2);
-  if (strcmp(s1, s2) != 0) {
-    return false;  // strings don't match; not our special case
-  }
-
-  return true;  // made it through the gauntlet; this is our special case
-} // end is_unresolved_string_mismatch()
-
-
-jvmtiError VM_RedefineClasses::load_new_class_versions(TRAPS) {
-  // For consistency allocate memory using os::malloc wrapper.
-  _scratch_classes = (instanceKlassHandle *)
-    os::malloc(sizeof(instanceKlassHandle) * _class_count, mtInternal);
-  if (_scratch_classes == NULL) {
-    return JVMTI_ERROR_OUT_OF_MEMORY;
-  }
-
-  ResourceMark rm(THREAD);
-
-  JvmtiThreadState *state = JvmtiThreadState::state_for(JavaThread::current());
-  // state can only be NULL if the current thread is exiting which
-  // should not happen since we're trying to do a RedefineClasses
-  guarantee(state != NULL, "exiting thread calling load_new_class_versions");
-  for (int i = 0; i < _class_count; i++) {
-    oop mirror = JNIHandles::resolve_non_null(_class_defs[i].klass);
-    // classes for primitives cannot be redefined
-    if (!is_modifiable_class(mirror)) {
-      return JVMTI_ERROR_UNMODIFIABLE_CLASS;
-    }
-    klassOop the_class_oop = java_lang_Class::as_klassOop(mirror);
-    instanceKlassHandle the_class = instanceKlassHandle(THREAD, the_class_oop);
-    Symbol*  the_class_sym = the_class->name();
-
-    // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
-    RC_TRACE_WITH_THREAD(0x00000001, THREAD,
-      ("loading name=%s kind=%d (avail_mem=" UINT64_FORMAT "K)",
-      the_class->external_name(), _class_load_kind,
-      os::available_memory() >> 10));
-
-    ClassFileStream st((u1*) _class_defs[i].class_bytes,
-      _class_defs[i].class_byte_count, (char *)"__VM_RedefineClasses__");
-
-    // Parse the stream.
-    Handle the_class_loader(THREAD, the_class->class_loader());
-    Handle protection_domain(THREAD, the_class->protection_domain());
-    // Set redefined class handle in JvmtiThreadState class.
-    // This redefined class is sent to agent event handler for class file
-    // load hook event.
-    state->set_class_being_redefined(&the_class, _class_load_kind);
-
-    klassOop k = SystemDictionary::parse_stream(the_class_sym,
-                                                the_class_loader,
-                                                protection_domain,
-                                                &st,
-                                                THREAD);
-    // Clear class_being_redefined just to be sure.
-    state->clear_class_being_redefined();
-
-    // TODO: if this is retransform, and nothing changed we can skip it
-
-    instanceKlassHandle scratch_class (THREAD, k);
-
-    if (HAS_PENDING_EXCEPTION) {
-      Symbol* ex_name = PENDING_EXCEPTION->klass()->klass_part()->name();
-      // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
-      RC_TRACE_WITH_THREAD(0x00000002, THREAD, ("parse_stream exception: '%s'",
-        ex_name->as_C_string()));
-      CLEAR_PENDING_EXCEPTION;
-
-      if (ex_name == vmSymbols::java_lang_UnsupportedClassVersionError()) {
-        return JVMTI_ERROR_UNSUPPORTED_VERSION;
-      } else if (ex_name == vmSymbols::java_lang_ClassFormatError()) {
-        return JVMTI_ERROR_INVALID_CLASS_FORMAT;
-      } else if (ex_name == vmSymbols::java_lang_ClassCircularityError()) {
-        return JVMTI_ERROR_CIRCULAR_CLASS_DEFINITION;
-      } else if (ex_name == vmSymbols::java_lang_NoClassDefFoundError()) {
-        // The message will be "XXX (wrong name: YYY)"
-        return JVMTI_ERROR_NAMES_DONT_MATCH;
-      } else if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
-        return JVMTI_ERROR_OUT_OF_MEMORY;
-      } else {  // Just in case more exceptions can be thrown..
-        return JVMTI_ERROR_FAILS_VERIFICATION;
-      }
-    }
-
-    // Ensure class is linked before redefine
-    if (!the_class->is_linked()) {
-      the_class->link_class(THREAD);
-      if (HAS_PENDING_EXCEPTION) {
-        Symbol* ex_name = PENDING_EXCEPTION->klass()->klass_part()->name();
-        // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
-        RC_TRACE_WITH_THREAD(0x00000002, THREAD, ("link_class exception: '%s'",
-          ex_name->as_C_string()));
-        CLEAR_PENDING_EXCEPTION;
-        if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
-          return JVMTI_ERROR_OUT_OF_MEMORY;
-        } else {
-          return JVMTI_ERROR_INTERNAL;
-        }
+        k_new_method->set_method_idnum(old_num);
+        TRACE_RC2("swapping idnum of new and old method %d / %d!", new_num, old_num);        
+        swap_all_method_annotations(old_num, new_num, new_class);
       }
     }
-
-    // Do the validity checks in compare_and_normalize_class_versions()
-    // before verifying the byte codes. By doing these checks first, we
-    // limit the number of functions that require redirection from
-    // the_class to scratch_class. In particular, we don't have to
-    // modify JNI GetSuperclass() and thus won't change its performance.
-    jvmtiError res = compare_and_normalize_class_versions(the_class,
-                       scratch_class);
-    if (res != JVMTI_ERROR_NONE) {
-      return res;
+    TRACE_RC3("Method matched: new: %s [%d] == old: %s [%d]",
+      k_new_method->name_and_sig_as_C_string(), ni,
+      k_old_method->name_and_sig_as_C_string(), oi);
+    // advance to next pair of methods
+    ++oi;
+    ++ni;
+    break;
+  case added:
+    // method added, see if it is OK
+    new_flags = (jushort) k_new_method->access_flags().get_flags();
+    if ((new_flags & JVM_ACC_PRIVATE) == 0
+      // hack: private should be treated as final, but alas
+      || (new_flags & (JVM_ACC_FINAL|JVM_ACC_STATIC)) == 0
+      ) {
+        // new methods must be private
+        result = result | Klass::ModifyClass;
     }
-
-    // verify what the caller passed us
     {
-      // The bug 6214132 caused the verification to fail.
-      // Information about the_class and scratch_class is temporarily
-      // recorded into jvmtiThreadState. This data is used to redirect
-      // the_class to scratch_class in the JVM_* functions called by the
-      // verifier. Please, refer to jvmtiThreadState.hpp for the detailed
-      // description.
-      RedefineVerifyMark rvm(&the_class, &scratch_class, state);
-      Verifier::verify(
-        scratch_class, Verifier::ThrowException, true, THREAD);
-    }
-
-    if (HAS_PENDING_EXCEPTION) {
-      Symbol* ex_name = PENDING_EXCEPTION->klass()->klass_part()->name();
-      // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
-      RC_TRACE_WITH_THREAD(0x00000002, THREAD,
-        ("verify_byte_codes exception: '%s'", ex_name->as_C_string()));
-      CLEAR_PENDING_EXCEPTION;
-      if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
-        return JVMTI_ERROR_OUT_OF_MEMORY;
-      } else {
-        // tell the caller the bytecodes are bad
-        return JVMTI_ERROR_FAILS_VERIFICATION;
-      }
-    }
-
-    res = merge_cp_and_rewrite(the_class, scratch_class, THREAD);
-    if (res != JVMTI_ERROR_NONE) {
-      return res;
-    }
-
-    if (VerifyMergedCPBytecodes) {
-      // verify what we have done during constant pool merging
-      {
-        RedefineVerifyMark rvm(&the_class, &scratch_class, state);
-        Verifier::verify(scratch_class, Verifier::ThrowException, true, THREAD);
-      }
-
-      if (HAS_PENDING_EXCEPTION) {
-        Symbol* ex_name = PENDING_EXCEPTION->klass()->klass_part()->name();
-        // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
-        RC_TRACE_WITH_THREAD(0x00000002, THREAD,
-          ("verify_byte_codes post merge-CP exception: '%s'",
-          ex_name->as_C_string()));
-        CLEAR_PENDING_EXCEPTION;
-        if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
-          return JVMTI_ERROR_OUT_OF_MEMORY;
-        } else {
-          // tell the caller that constant pool merging screwed up
-          return JVMTI_ERROR_INTERNAL;
-        }
-      }
-    }
-
-    Rewriter::rewrite(scratch_class, THREAD);
-    if (!HAS_PENDING_EXCEPTION) {
-      Rewriter::relocate_and_link(scratch_class, THREAD);
-    }
-    if (HAS_PENDING_EXCEPTION) {
-      Symbol* ex_name = PENDING_EXCEPTION->klass()->klass_part()->name();
-      CLEAR_PENDING_EXCEPTION;
-      if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
-        return JVMTI_ERROR_OUT_OF_MEMORY;
-      } else {
-        return JVMTI_ERROR_INTERNAL;
-      }
-    }
-
-    _scratch_classes[i] = scratch_class;
-
-    // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
-    RC_TRACE_WITH_THREAD(0x00000001, THREAD,
-      ("loaded name=%s (avail_mem=" UINT64_FORMAT "K)",
-      the_class->external_name(), os::available_memory() >> 10));
-  }
-
-  return JVMTI_ERROR_NONE;
-}
-
-
-// Map old_index to new_index as needed. scratch_cp is only needed
-// for RC_TRACE() calls.
-void VM_RedefineClasses::map_index(constantPoolHandle scratch_cp,
-       int old_index, int new_index) {
-  if (find_new_index(old_index) != 0) {
-    // old_index is already mapped
-    return;
-  }
-
-  if (old_index == new_index) {
-    // no mapping is needed
-    return;
-  }
-
-  _index_map_p->at_put(old_index, new_index);
-  _index_map_count++;
-
-  RC_TRACE(0x00040000, ("mapped tag %d at index %d to %d",
-    scratch_cp->tag_at(old_index).value(), old_index, new_index));
-} // end map_index()
-
-
-// Merge old_cp and scratch_cp and return the results of the merge via
-// merge_cp_p. The number of entries in *merge_cp_p is returned via
-// merge_cp_length_p. The entries in old_cp occupy the same locations
-// in *merge_cp_p. Also creates a map of indices from entries in
-// scratch_cp to the corresponding entry in *merge_cp_p. Index map
-// entries are only created for entries in scratch_cp that occupy a
-// different location in *merged_cp_p.
-bool VM_RedefineClasses::merge_constant_pools(constantPoolHandle old_cp,
-       constantPoolHandle scratch_cp, constantPoolHandle *merge_cp_p,
-       int *merge_cp_length_p, TRAPS) {
-
-  if (merge_cp_p == NULL) {
-    assert(false, "caller must provide scatch constantPool");
-    return false; // robustness
-  }
-  if (merge_cp_length_p == NULL) {
-    assert(false, "caller must provide scatch CP length");
-    return false; // robustness
-  }
-  // Worst case we need old_cp->length() + scratch_cp()->length(),
-  // but the caller might be smart so make sure we have at least
-  // the minimum.
-  if ((*merge_cp_p)->length() < old_cp->length()) {
-    assert(false, "merge area too small");
-    return false; // robustness
-  }
-
-  RC_TRACE_WITH_THREAD(0x00010000, THREAD,
-    ("old_cp_len=%d, scratch_cp_len=%d", old_cp->length(),
-    scratch_cp->length()));
-
-  {
-    // Pass 0:
-    // The old_cp is copied to *merge_cp_p; this means that any code
-    // using old_cp does not have to change. This work looks like a
-    // perfect fit for constantPoolOop::copy_cp_to(), but we need to
-    // handle one special case:
-    // - revert JVM_CONSTANT_Class to JVM_CONSTANT_UnresolvedClass
-    // This will make verification happy.
-
-    int old_i;  // index into old_cp
-
-    // index zero (0) is not used in constantPools
-    for (old_i = 1; old_i < old_cp->length(); old_i++) {
-      // leave debugging crumb
-      jbyte old_tag = old_cp->tag_at(old_i).value();
-      switch (old_tag) {
-      case JVM_CONSTANT_Class:
-      case JVM_CONSTANT_UnresolvedClass:
-        // revert the copy to JVM_CONSTANT_UnresolvedClass
-        // May be resolving while calling this so do the same for
-        // JVM_CONSTANT_UnresolvedClass (klass_name_at() deals with transition)
-        (*merge_cp_p)->unresolved_klass_at_put(old_i,
-          old_cp->klass_name_at(old_i));
-        break;
-
-      case JVM_CONSTANT_Double:
-      case JVM_CONSTANT_Long:
-        // just copy the entry to *merge_cp_p, but double and long take
-        // two constant pool entries
-        constantPoolOopDesc::copy_entry_to(old_cp, old_i, *merge_cp_p, old_i, CHECK_0);
-        old_i++;
-        break;
-
-      default:
-        // just copy the entry to *merge_cp_p
-        constantPoolOopDesc::copy_entry_to(old_cp, old_i, *merge_cp_p, old_i, CHECK_0);
-        break;
-      }
-    } // end for each old_cp entry
-
-    // We don't need to sanity check that *merge_cp_length_p is within
-    // *merge_cp_p bounds since we have the minimum on-entry check above.
-    (*merge_cp_length_p) = old_i;
-  }
-
-  // merge_cp_len should be the same as old_cp->length() at this point
-  // so this trace message is really a "warm-and-breathing" message.
-  RC_TRACE_WITH_THREAD(0x00020000, THREAD,
-    ("after pass 0: merge_cp_len=%d", *merge_cp_length_p));
-
-  int scratch_i;  // index into scratch_cp
-  {
-    // Pass 1a:
-    // Compare scratch_cp entries to the old_cp entries that we have
-    // already copied to *merge_cp_p. In this pass, we are eliminating
-    // exact duplicates (matching entry at same index) so we only
-    // compare entries in the common indice range.
-    int increment = 1;
-    int pass1a_length = MIN2(old_cp->length(), scratch_cp->length());
-    for (scratch_i = 1; scratch_i < pass1a_length; scratch_i += increment) {
-      switch (scratch_cp->tag_at(scratch_i).value()) {
-      case JVM_CONSTANT_Double:
-      case JVM_CONSTANT_Long:
-        // double and long take two constant pool entries
-        increment = 2;
-        break;
-
-      default:
-        increment = 1;
-        break;
-      }
-
-      bool match = scratch_cp->compare_entry_to(scratch_i, *merge_cp_p,
-        scratch_i, CHECK_0);
-      if (match) {
-        // found a match at the same index so nothing more to do
-        continue;
-      } else if (is_unresolved_class_mismatch(scratch_cp, scratch_i,
-                                              *merge_cp_p, scratch_i)) {
-        // The mismatch in compare_entry_to() above is because of a
-        // resolved versus unresolved class entry at the same index
-        // with the same string value. Since Pass 0 reverted any
-        // class entries to unresolved class entries in *merge_cp_p,
-        // we go with the unresolved class entry.
-        continue;
-      } else if (is_unresolved_string_mismatch(scratch_cp, scratch_i,
-                                               *merge_cp_p, scratch_i)) {
-        // The mismatch in compare_entry_to() above is because of a
-        // resolved versus unresolved string entry at the same index
-        // with the same string value. We can live with whichever
-        // happens to be at scratch_i in *merge_cp_p.
-        continue;
-      }
-
-      int found_i = scratch_cp->find_matching_entry(scratch_i, *merge_cp_p,
-        CHECK_0);
-      if (found_i != 0) {
-        guarantee(found_i != scratch_i,
-          "compare_entry_to() and find_matching_entry() do not agree");
-
-        // Found a matching entry somewhere else in *merge_cp_p so
-        // just need a mapping entry.
-        map_index(scratch_cp, scratch_i, found_i);
-        continue;
-      }
-
-      // The find_matching_entry() call above could fail to find a match
-      // due to a resolved versus unresolved class or string entry situation
-      // like we solved above with the is_unresolved_*_mismatch() calls.
-      // However, we would have to call is_unresolved_*_mismatch() over
-      // all of *merge_cp_p (potentially) and that doesn't seem to be
-      // worth the time.
-
-      // No match found so we have to append this entry and any unique
-      // referenced entries to *merge_cp_p.
-      append_entry(scratch_cp, scratch_i, merge_cp_p, merge_cp_length_p,
-        CHECK_0);
-    }
-  }
-
-  RC_TRACE_WITH_THREAD(0x00020000, THREAD,
-    ("after pass 1a: merge_cp_len=%d, scratch_i=%d, index_map_len=%d",
-    *merge_cp_length_p, scratch_i, _index_map_count));
-
-  if (scratch_i < scratch_cp->length()) {
-    // Pass 1b:
-    // old_cp is smaller than scratch_cp so there are entries in
-    // scratch_cp that we have not yet processed. We take care of
-    // those now.
-    int increment = 1;
-    for (; scratch_i < scratch_cp->length(); scratch_i += increment) {
-      switch (scratch_cp->tag_at(scratch_i).value()) {
-      case JVM_CONSTANT_Double:
-      case JVM_CONSTANT_Long:
-        // double and long take two constant pool entries
-        increment = 2;
-        break;
-
-      default:
-        increment = 1;
-        break;
-      }
-
-      int found_i =
-        scratch_cp->find_matching_entry(scratch_i, *merge_cp_p, CHECK_0);
-      if (found_i != 0) {
-        // Found a matching entry somewhere else in *merge_cp_p so
-        // just need a mapping entry.
-        map_index(scratch_cp, scratch_i, found_i);
-        continue;
-      }
-
-      // No match found so we have to append this entry and any unique
-      // referenced entries to *merge_cp_p.
-      append_entry(scratch_cp, scratch_i, merge_cp_p, merge_cp_length_p,
-        CHECK_0);
-    }
-
-    RC_TRACE_WITH_THREAD(0x00020000, THREAD,
-      ("after pass 1b: merge_cp_len=%d, scratch_i=%d, index_map_len=%d",
-      *merge_cp_length_p, scratch_i, _index_map_count));
-  }
-
-  return true;
-} // end merge_constant_pools()
-
-
-// Merge constant pools between the_class and scratch_class and
-// potentially rewrite bytecodes in scratch_class to use the merged
-// constant pool.
-jvmtiError VM_RedefineClasses::merge_cp_and_rewrite(
-             instanceKlassHandle the_class, instanceKlassHandle scratch_class,
-             TRAPS) {
-  // worst case merged constant pool length is old and new combined
-  int merge_cp_length = the_class->constants()->length()
-        + scratch_class->constants()->length();
-
-  constantPoolHandle old_cp(THREAD, the_class->constants());
-  constantPoolHandle scratch_cp(THREAD, scratch_class->constants());
-
-  // Constant pools are not easily reused so we allocate a new one
-  // each time.
-  // merge_cp is created unsafe for concurrent GC processing.  It
-  // should be marked safe before discarding it. Even though
-  // garbage,  if it crosses a card boundary, it may be scanned
-  // in order to find the start of the first complete object on the card.
-  constantPoolHandle merge_cp(THREAD,
-    oopFactory::new_constantPool(merge_cp_length,
-                                 oopDesc::IsUnsafeConc,
-                                 THREAD));
-  int orig_length = old_cp->orig_length();
-  if (orig_length == 0) {
-    // This old_cp is an actual original constant pool. We save
-    // the original length in the merged constant pool so that
-    // merge_constant_pools() can be more efficient. If a constant
-    // pool has a non-zero orig_length() value, then that constant
-    // pool was created by a merge operation in RedefineClasses.
-    merge_cp->set_orig_length(old_cp->length());
-  } else {
-    // This old_cp is a merged constant pool from a previous
-    // RedefineClasses() calls so just copy the orig_length()
-    // value.
-    merge_cp->set_orig_length(old_cp->orig_length());
-  }
-
-  ResourceMark rm(THREAD);
-  _index_map_count = 0;
-  _index_map_p = new intArray(scratch_cp->length(), -1);
-
-  bool result = merge_constant_pools(old_cp, scratch_cp, &merge_cp,
-                  &merge_cp_length, THREAD);
-  if (!result) {
-    // The merge can fail due to memory allocation failure or due
-    // to robustness checks.
-    return JVMTI_ERROR_INTERNAL;
-  }
-
-  RC_TRACE_WITH_THREAD(0x00010000, THREAD,
-    ("merge_cp_len=%d, index_map_len=%d", merge_cp_length, _index_map_count));
-
-  if (_index_map_count == 0) {
-    // there is nothing to map between the new and merged constant pools
-
-    if (old_cp->length() == scratch_cp->length()) {
-      // The old and new constant pools are the same length and the
-      // index map is empty. This means that the three constant pools
-      // are equivalent (but not the same). Unfortunately, the new
-      // constant pool has not gone through link resolution nor have
-      // the new class bytecodes gone through constant pool cache
-      // rewriting so we can't use the old constant pool with the new
-      // class.
-
-      merge_cp()->set_is_conc_safe(true);
-      merge_cp = constantPoolHandle();  // toss the merged constant pool
-    } else if (old_cp->length() < scratch_cp->length()) {
-      // The old constant pool has fewer entries than the new constant
-      // pool and the index map is empty. This means the new constant
-      // pool is a superset of the old constant pool. However, the old
-      // class bytecodes have already gone through constant pool cache
-      // rewriting so we can't use the new constant pool with the old
-      // class.
-
-      merge_cp()->set_is_conc_safe(true);
-      merge_cp = constantPoolHandle();  // toss the merged constant pool
-    } else {
-      // The old constant pool has more entries than the new constant
-      // pool and the index map is empty. This means that both the old
-      // and merged constant pools are supersets of the new constant
-      // pool.
-
-      // Replace the new constant pool with a shrunken copy of the
-      // merged constant pool; the previous new constant pool will
-      // get GCed.
-      set_new_constant_pool(scratch_class, merge_cp, merge_cp_length, true,
-        THREAD);
-      // drop local ref to the merged constant pool
-      merge_cp()->set_is_conc_safe(true);
-      merge_cp = constantPoolHandle();
-    }
-  } else {
-    if (RC_TRACE_ENABLED(0x00040000)) {
-      // don't want to loop unless we are tracing
-      int count = 0;
-      for (int i = 1; i < _index_map_p->length(); i++) {
-        int value = _index_map_p->at(i);
-
-        if (value != -1) {
-          RC_TRACE_WITH_THREAD(0x00040000, THREAD,
-            ("index_map[%d]: old=%d new=%d", count, i, value));
-          count++;
-        }
-      }
-    }
-
-    // We have entries mapped between the new and merged constant pools
-    // so we have to rewrite some constant pool references.
-    if (!rewrite_cp_refs(scratch_class, THREAD)) {
-      return JVMTI_ERROR_INTERNAL;
-    }
-
-    // Replace the new constant pool with a shrunken copy of the
-    // merged constant pool so now the rewritten bytecodes have
-    // valid references; the previous new constant pool will get
-    // GCed.
-    set_new_constant_pool(scratch_class, merge_cp, merge_cp_length, true,
-      THREAD);
-    merge_cp()->set_is_conc_safe(true);
-  }
-  assert(old_cp()->is_conc_safe(), "Just checking");
-  assert(scratch_cp()->is_conc_safe(), "Just checking");
-
-  return JVMTI_ERROR_NONE;
-} // end merge_cp_and_rewrite()
-
-
-// Rewrite constant pool references in klass scratch_class.
-bool VM_RedefineClasses::rewrite_cp_refs(instanceKlassHandle scratch_class,
-       TRAPS) {
-
-  // rewrite constant pool references in the methods:
-  if (!rewrite_cp_refs_in_methods(scratch_class, THREAD)) {
-    // propagate failure back to caller
-    return false;
-  }
-
-  // rewrite constant pool references in the class_annotations:
-  if (!rewrite_cp_refs_in_class_annotations(scratch_class, THREAD)) {
-    // propagate failure back to caller
-    return false;
-  }
-
-  // rewrite constant pool references in the fields_annotations:
-  if (!rewrite_cp_refs_in_fields_annotations(scratch_class, THREAD)) {
-    // propagate failure back to caller
-    return false;
-  }
-
-  // rewrite constant pool references in the methods_annotations:
-  if (!rewrite_cp_refs_in_methods_annotations(scratch_class, THREAD)) {
-    // propagate failure back to caller
-    return false;
-  }
-
-  // rewrite constant pool references in the methods_parameter_annotations:
-  if (!rewrite_cp_refs_in_methods_parameter_annotations(scratch_class,
-         THREAD)) {
-    // propagate failure back to caller
-    return false;
-  }
-
-  // rewrite constant pool references in the methods_default_annotations:
-  if (!rewrite_cp_refs_in_methods_default_annotations(scratch_class,
-         THREAD)) {
-    // propagate failure back to caller
-    return false;
-  }
-
-  return true;
-} // end rewrite_cp_refs()
-
-
-// Rewrite constant pool references in the methods.
-bool VM_RedefineClasses::rewrite_cp_refs_in_methods(
-       instanceKlassHandle scratch_class, TRAPS) {
-
-  objArrayHandle methods(THREAD, scratch_class->methods());
-
-  if (methods.is_null() || methods->length() == 0) {
-    // no methods so nothing to do
-    return true;
-  }
-
-  // rewrite constant pool references in the methods:
-  for (int i = methods->length() - 1; i >= 0; i--) {
-    methodHandle method(THREAD, (methodOop)methods->obj_at(i));
-    methodHandle new_method;
-    rewrite_cp_refs_in_method(method, &new_method, CHECK_false);
-    if (!new_method.is_null()) {
-      // the method has been replaced so save the new method version
-      methods->obj_at_put(i, new_method());
-    }
-  }
-
-  return true;
-}
-
-
-// Rewrite constant pool references in the specific method. This code
-// was adapted from Rewriter::rewrite_method().
-void VM_RedefineClasses::rewrite_cp_refs_in_method(methodHandle method,
-       methodHandle *new_method_p, TRAPS) {
-
-  *new_method_p = methodHandle();  // default is no new method
-
-  // We cache a pointer to the bytecodes here in code_base. If GC
-  // moves the methodOop, then the bytecodes will also move which
-  // will likely cause a crash. We create a No_Safepoint_Verifier
-  // object to detect whether we pass a possible safepoint in this
-  // code block.
-  No_Safepoint_Verifier nsv;
-
-  // Bytecodes and their length
-  address code_base = method->code_base();
-  int code_length = method->code_size();
-
-  int bc_length;
-  for (int bci = 0; bci < code_length; bci += bc_length) {
-    address bcp = code_base + bci;
-    Bytecodes::Code c = (Bytecodes::Code)(*bcp);
-
-    bc_length = Bytecodes::length_for(c);
-    if (bc_length == 0) {
-      // More complicated bytecodes report a length of zero so
-      // we have to try again a slightly different way.
-      bc_length = Bytecodes::length_at(method(), bcp);
-    }
-
-    assert(bc_length != 0, "impossible bytecode length");
-
-    switch (c) {
-      case Bytecodes::_ldc:
-      {
-        int cp_index = *(bcp + 1);
-        int new_index = find_new_index(cp_index);
-
-        if (StressLdcRewrite && new_index == 0) {
-          // If we are stressing ldc -> ldc_w rewriting, then we
-          // always need a new_index value.
-          new_index = cp_index;
-        }
-        if (new_index != 0) {
-          // the original index is mapped so we have more work to do
-          if (!StressLdcRewrite && new_index <= max_jubyte) {
-            // The new value can still use ldc instead of ldc_w
-            // unless we are trying to stress ldc -> ldc_w rewriting
-            RC_TRACE_WITH_THREAD(0x00080000, THREAD,
-              ("%s@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c),
-              bcp, cp_index, new_index));
-            *(bcp + 1) = new_index;
-          } else {
-            RC_TRACE_WITH_THREAD(0x00080000, THREAD,
-              ("%s->ldc_w@" INTPTR_FORMAT " old=%d, new=%d",
-              Bytecodes::name(c), bcp, cp_index, new_index));
-            // the new value needs ldc_w instead of ldc
-            u_char inst_buffer[4]; // max instruction size is 4 bytes
-            bcp = (address)inst_buffer;
-            // construct new instruction sequence
-            *bcp = Bytecodes::_ldc_w;
-            bcp++;
-            // Rewriter::rewrite_method() does not rewrite ldc -> ldc_w.
-            // See comment below for difference between put_Java_u2()
-            // and put_native_u2().
-            Bytes::put_Java_u2(bcp, new_index);
-
-            Relocator rc(method, NULL /* no RelocatorListener needed */);
-            methodHandle m;
-            {
-              Pause_No_Safepoint_Verifier pnsv(&nsv);
-
-              // ldc is 2 bytes and ldc_w is 3 bytes
-              m = rc.insert_space_at(bci, 3, inst_buffer, THREAD);
-              if (m.is_null() || HAS_PENDING_EXCEPTION) {
-                guarantee(false, "insert_space_at() failed");
-              }
-            }
-
-            // return the new method so that the caller can update
-            // the containing class
-            *new_method_p = method = m;
-            // switch our bytecode processing loop from the old method
-            // to the new method
-            code_base = method->code_base();
-            code_length = method->code_size();
-            bcp = code_base + bci;
-            c = (Bytecodes::Code)(*bcp);
-            bc_length = Bytecodes::length_for(c);
-            assert(bc_length != 0, "sanity check");
-          } // end we need ldc_w instead of ldc
-        } // end if there is a mapped index
-      } break;
-
-      // these bytecodes have a two-byte constant pool index
-      case Bytecodes::_anewarray      : // fall through
-      case Bytecodes::_checkcast      : // fall through
-      case Bytecodes::_getfield       : // fall through
-      case Bytecodes::_getstatic      : // fall through
-      case Bytecodes::_instanceof     : // fall through
-      case Bytecodes::_invokeinterface: // fall through
-      case Bytecodes::_invokespecial  : // fall through
-      case Bytecodes::_invokestatic   : // fall through
-      case Bytecodes::_invokevirtual  : // fall through
-      case Bytecodes::_ldc_w          : // fall through
-      case Bytecodes::_ldc2_w         : // fall through
-      case Bytecodes::_multianewarray : // fall through
-      case Bytecodes::_new            : // fall through
-      case Bytecodes::_putfield       : // fall through
-      case Bytecodes::_putstatic      :
-      {
-        address p = bcp + 1;
-        int cp_index = Bytes::get_Java_u2(p);
-        int new_index = find_new_index(cp_index);
-        if (new_index != 0) {
-          // the original index is mapped so update w/ new value
-          RC_TRACE_WITH_THREAD(0x00080000, THREAD,
-            ("%s@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c),
-            bcp, cp_index, new_index));
-          // Rewriter::rewrite_method() uses put_native_u2() in this
-          // situation because it is reusing the constant pool index
-          // location for a native index into the constantPoolCache.
-          // Since we are updating the constant pool index prior to
-          // verification and constantPoolCache initialization, we
-          // need to keep the new index in Java byte order.
-          Bytes::put_Java_u2(p, new_index);
-        }
-      } break;
+      u2 num = the_class->next_method_idnum();
+      if (num == constMethodOopDesc::UNSET_IDNUM) {
+        // cannot add any more methods
+        result = result | Klass::ModifyClass;
+      }
+      u2 new_num = k_new_method->method_idnum();
+      methodOop idnum_owner = new_class->method_with_idnum(num);
+      if (idnum_owner != NULL) {
+        // There is already a method assigned this idnum -- switch them
+        idnum_owner->set_method_idnum(new_num);
+      }
+      k_new_method->set_method_idnum(num);
+      swap_all_method_annotations(new_num, num, new_class);
     }
-  } // end for each bytecode
-} // end rewrite_cp_refs_in_method()
-
-
-// Rewrite constant pool references in the class_annotations field.
-bool VM_RedefineClasses::rewrite_cp_refs_in_class_annotations(
-       instanceKlassHandle scratch_class, TRAPS) {
+    TRACE_RC1("Method added: new: %s [%d]",
+      k_new_method->name_and_sig_as_C_string(), ni);
+    ++ni; // advance to next new method
+    break;
+  case deleted:
+    // method deleted, see if it is OK
+    old_flags = (jushort) k_old_method->access_flags().get_flags();
+    if ((old_flags & JVM_ACC_PRIVATE) == 0
+      // hack: private should be treated as final, but alas
+      || (old_flags & (JVM_ACC_FINAL|JVM_ACC_STATIC)) == 0
+      ) {
+        // deleted methods must be private
+        result = result | Klass::ModifyClass;
+    }
+    TRACE_RC1("Method deleted: old: %s [%d]",
+      k_old_method->name_and_sig_as_C_string(), oi);
+    ++oi; // advance to next old method
+    break;
+  default:
+    ShouldNotReachHere();
+    }
+  }
 
-  typeArrayHandle class_annotations(THREAD,
-    scratch_class->class_annotations());
-  if (class_annotations.is_null() || class_annotations->length() == 0) {
-    // no class_annotations so nothing to do
-    return true;
+  if (new_class()->size() != new_class->old_version()->size()) {
+    result |= Klass::ModifyClassSize;
   }
 
-  RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-    ("class_annotations length=%d", class_annotations->length()));
+  if (new_class->size_helper() != ((instanceKlass*)(new_class->old_version()->klass_part()))->size_helper()) {
+    result |= Klass::ModifyInstanceSize;
+  }
 
-  int byte_i = 0;  // byte index into class_annotations
-  return rewrite_cp_refs_in_annotations_typeArray(class_annotations, byte_i,
-           THREAD);
+  // (tw) Check method bodies to be able to return NoChange?
+  return result;
 }
 
+void VM_RedefineClasses::calculate_instance_update_information(klassOop new_version) {
 
-// Rewrite constant pool references in an annotations typeArray. This
-// "structure" is adapted from the RuntimeVisibleAnnotations_attribute
-// that is described in section 4.8.15 of the 2nd-edition of the VM spec:
-//
-// annotations_typeArray {
-//   u2 num_annotations;
-//   annotation annotations[num_annotations];
-// }
-//
-bool VM_RedefineClasses::rewrite_cp_refs_in_annotations_typeArray(
-       typeArrayHandle annotations_typeArray, int &byte_i_ref, TRAPS) {
-
-  if ((byte_i_ref + 2) > annotations_typeArray->length()) {
-    // not enough room for num_annotations field
-    RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-      ("length() is too small for num_annotations field"));
-    return false;
-  }
+  class CalculateFieldUpdates : public FieldClosure {
 
-  u2 num_annotations = Bytes::get_Java_u2((address)
-                         annotations_typeArray->byte_at_addr(byte_i_ref));
-  byte_i_ref += 2;
+  private:
+    instanceKlass* _old_ik;
+    GrowableArray<int> _update_info;
+    int _position;
+    bool _copy_backwards;
 
-  RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-    ("num_annotations=%d", num_annotations));
+  public:
 
-  int calc_num_annotations = 0;
-  for (; calc_num_annotations < num_annotations; calc_num_annotations++) {
-    if (!rewrite_cp_refs_in_annotation_struct(annotations_typeArray,
-           byte_i_ref, THREAD)) {
-      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-        ("bad annotation_struct at %d", calc_num_annotations));
-      // propagate failure back to caller
-      return false;
+    bool does_copy_backwards() {
+      return _copy_backwards;
     }
-  }
-  assert(num_annotations == calc_num_annotations, "sanity check");
 
-  return true;
-} // end rewrite_cp_refs_in_annotations_typeArray()
+    CalculateFieldUpdates(instanceKlass* old_ik) :
+        _old_ik(old_ik), _position(instanceOopDesc::base_offset_in_bytes()), _copy_backwards(false) {
+      _update_info.append(_position);
+      _update_info.append(0);
+    }
 
+    GrowableArray<int> &finish() {
+      _update_info.append(0);
+      return _update_info;
+    }
 
-// Rewrite constant pool references in the annotation struct portion of
-// an annotations_typeArray. This "structure" is from section 4.8.15 of
-// the 2nd-edition of the VM spec:
-//
-// struct annotation {
-//   u2 type_index;
-//   u2 num_element_value_pairs;
-//   {
-//     u2 element_name_index;
-//     element_value value;
-//   } element_value_pairs[num_element_value_pairs];
-// }
-//
-bool VM_RedefineClasses::rewrite_cp_refs_in_annotation_struct(
-       typeArrayHandle annotations_typeArray, int &byte_i_ref, TRAPS) {
-  if ((byte_i_ref + 2 + 2) > annotations_typeArray->length()) {
-    // not enough room for smallest annotation_struct
-    RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-      ("length() is too small for annotation_struct"));
-    return false;
-  }
+    void do_field(fieldDescriptor* fd) {
+      int alignment = fd->offset() - _position;
+      if (alignment > 0) {
+        // This field was aligned, so we need to make sure that we fill the gap
+        fill(alignment);
+      }
 
-  u2 type_index = rewrite_cp_ref_in_annotation_data(annotations_typeArray,
-                    byte_i_ref, "mapped old type_index=%d", THREAD);
+      assert(_position == fd->offset(), "must be correct offset!");
 
-  u2 num_element_value_pairs = Bytes::get_Java_u2((address)
-                                 annotations_typeArray->byte_at_addr(
-                                 byte_i_ref));
-  byte_i_ref += 2;
+      fieldDescriptor old_fd;
+      if (_old_ik->find_field(fd->name(), fd->signature(), false, &old_fd) != NULL) {
+        // Found field in the old class, copy
+        copy(old_fd.offset(), type2aelembytes(fd->field_type()));
 
-  RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-    ("type_index=%d  num_element_value_pairs=%d", type_index,
-    num_element_value_pairs));
+        if (old_fd.offset() < fd->offset()) {
+          _copy_backwards = true;
+        }
 
-  int calc_num_element_value_pairs = 0;
-  for (; calc_num_element_value_pairs < num_element_value_pairs;
-       calc_num_element_value_pairs++) {
-    if ((byte_i_ref + 2) > annotations_typeArray->length()) {
-      // not enough room for another element_name_index, let alone
-      // the rest of another component
-      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-        ("length() is too small for element_name_index"));
-      return false;
+        // Transfer special flags
+        fd->set_is_field_modification_watched(old_fd.is_field_modification_watched());
+        fd->set_is_field_access_watched(old_fd.is_field_access_watched());
+      } else {
+        // New field, fill
+        fill(type2aelembytes(fd->field_type()));
+      }
     }
 
-    u2 element_name_index = rewrite_cp_ref_in_annotation_data(
-                              annotations_typeArray, byte_i_ref,
-                              "mapped old element_name_index=%d", THREAD);
+  private:
 
-    RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-      ("element_name_index=%d", element_name_index));
-
-    if (!rewrite_cp_refs_in_element_value(annotations_typeArray,
-           byte_i_ref, THREAD)) {
-      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-        ("bad element_value at %d", calc_num_element_value_pairs));
-      // propagate failure back to caller
-      return false;
+    void fill(int size) {
+      if (_update_info.length() > 0 && _update_info.at(_update_info.length() - 1) < 0) {
+        (*_update_info.adr_at(_update_info.length() - 1)) -= size;
+      } else {
+        _update_info.append(-size);
+      }
+      _position += size;
     }
-  } // end for each component
-  assert(num_element_value_pairs == calc_num_element_value_pairs,
-    "sanity check");
-
-  return true;
-} // end rewrite_cp_refs_in_annotation_struct()
-
-
-// Rewrite a constant pool reference at the current position in
-// annotations_typeArray if needed. Returns the original constant
-// pool reference if a rewrite was not needed or the new constant
-// pool reference if a rewrite was needed.
-u2 VM_RedefineClasses::rewrite_cp_ref_in_annotation_data(
-     typeArrayHandle annotations_typeArray, int &byte_i_ref,
-     const char * trace_mesg, TRAPS) {
-
-  address cp_index_addr = (address)
-    annotations_typeArray->byte_at_addr(byte_i_ref);
-  u2 old_cp_index = Bytes::get_Java_u2(cp_index_addr);
-  u2 new_cp_index = find_new_index(old_cp_index);
-  if (new_cp_index != 0) {
-    RC_TRACE_WITH_THREAD(0x02000000, THREAD, (trace_mesg, old_cp_index));
-    Bytes::put_Java_u2(cp_index_addr, new_cp_index);
-    old_cp_index = new_cp_index;
-  }
-  byte_i_ref += 2;
-  return old_cp_index;
-}
-
-
-// Rewrite constant pool references in the element_value portion of an
-// annotations_typeArray. This "structure" is from section 4.8.15.1 of
-// the 2nd-edition of the VM spec:
-//
-// struct element_value {
-//   u1 tag;
-//   union {
-//     u2 const_value_index;
-//     {
-//       u2 type_name_index;
-//       u2 const_name_index;
-//     } enum_const_value;
-//     u2 class_info_index;
-//     annotation annotation_value;
-//     struct {
-//       u2 num_values;
-//       element_value values[num_values];
-//     } array_value;
-//   } value;
-// }
-//
-bool VM_RedefineClasses::rewrite_cp_refs_in_element_value(
-       typeArrayHandle annotations_typeArray, int &byte_i_ref, TRAPS) {
 
-  if ((byte_i_ref + 1) > annotations_typeArray->length()) {
-    // not enough room for a tag let alone the rest of an element_value
-    RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-      ("length() is too small for a tag"));
-    return false;
-  }
+    void copy(int offset, int size) {
+      int prev_end = -1;
+      if (_update_info.length() > 0 && _update_info.at(_update_info.length() - 1) > 0) {
+        prev_end = _update_info.at(_update_info.length() - 2) + _update_info.at(_update_info.length() - 1);
+      }
 
-  u1 tag = annotations_typeArray->byte_at(byte_i_ref);
-  byte_i_ref++;
-  RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("tag='%c'", tag));
-
-  switch (tag) {
-    // These BaseType tag values are from Table 4.2 in VM spec:
-    case 'B':  // byte
-    case 'C':  // char
-    case 'D':  // double
-    case 'F':  // float
-    case 'I':  // int
-    case 'J':  // long
-    case 'S':  // short
-    case 'Z':  // boolean
-
-    // The remaining tag values are from Table 4.8 in the 2nd-edition of
-    // the VM spec:
-    case 's':
-    {
-      // For the above tag values (including the BaseType values),
-      // value.const_value_index is right union field.
-
-      if ((byte_i_ref + 2) > annotations_typeArray->length()) {
-        // not enough room for a const_value_index
-        RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-          ("length() is too small for a const_value_index"));
-        return false;
+      if (prev_end == offset) {
+        (*_update_info.adr_at(_update_info.length() - 2)) += size;
+      } else {
+        _update_info.append(size);
+        _update_info.append(offset);
       }
 
-      u2 const_value_index = rewrite_cp_ref_in_annotation_data(
-                               annotations_typeArray, byte_i_ref,
-                               "mapped old const_value_index=%d", THREAD);
+      _position += size;
+    }
+  };
 
-      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-        ("const_value_index=%d", const_value_index));
-    } break;
+  instanceKlass* ik = instanceKlass::cast(new_version);
+  instanceKlass* old_ik = instanceKlass::cast(new_version->klass_part()->old_version());
+  CalculateFieldUpdates cl(old_ik);
+  ik->do_nonstatic_fields(&cl);
 
-    case 'e':
-    {
-      // for the above tag value, value.enum_const_value is right union field
+  GrowableArray<int> result = cl.finish();
+  ik->store_update_information(result);
+  ik->set_copying_backwards(cl.does_copy_backwards());
 
-      if ((byte_i_ref + 4) > annotations_typeArray->length()) {
-        // not enough room for a enum_const_value
-        RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-          ("length() is too small for a enum_const_value"));
-        return false;
+  IF_TRACE_RC2 {
+    TRACE_RC2("Instance update information for %s:", new_version->klass_part()->name()->as_C_string());
+    if (cl.does_copy_backwards()) {
+      TRACE_RC2("\tDoes copy backwards!");
+    }
+    for (int i=0; i<result.length(); i++) {
+      int curNum = result.at(i);
+      if (curNum < 0) {
+        TRACE_RC2("\t%d CLEAN", curNum);
+      } else if (curNum > 0) {
+        TRACE_RC2("\t%d COPY from %d", curNum, result.at(i + 1));
+        i++;
+      } else {
+        TRACE_RC2("\tEND");
       }
+    }
+  }
+}
 
-      u2 type_name_index = rewrite_cp_ref_in_annotation_data(
-                             annotations_typeArray, byte_i_ref,
-                             "mapped old type_name_index=%d", THREAD);
-
-      u2 const_name_index = rewrite_cp_ref_in_annotation_data(
-                              annotations_typeArray, byte_i_ref,
-                              "mapped old const_name_index=%d", THREAD);
+void VM_RedefineClasses::rollback() {
+  TRACE_RC1("Rolling back redefinition!");
+  SystemDictionary::rollback_redefinition();
 
-      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-        ("type_name_index=%d  const_name_index=%d", type_name_index,
-        const_name_index));
-    } break;
+  TRACE_RC1("After rolling back system dictionary!");
+  for (int i=0; i<_new_classes->length(); i++) {
+    SystemDictionary::remove_from_hierarchy(_new_classes->at(i));
+  }
 
-    case 'c':
-    {
-      // for the above tag value, value.class_info_index is right union field
+  for (int i=0; i<_new_classes->length(); i++) {
+    instanceKlassHandle new_class = _new_classes->at(i);
+    new_class->set_redefining(false);
+    new_class->old_version()->klass_part()->set_new_version(NULL);
+    new_class->set_old_version(NULL);
+  }
 
-      if ((byte_i_ref + 2) > annotations_typeArray->length()) {
-        // not enough room for a class_info_index
-        RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-          ("length() is too small for a class_info_index"));
-        return false;
-      }
+}
 
-      u2 class_info_index = rewrite_cp_ref_in_annotation_data(
-                              annotations_typeArray, byte_i_ref,
-                              "mapped old class_info_index=%d", THREAD);
-
-      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-        ("class_info_index=%d", class_info_index));
-    } break;
-
-    case '@':
-      // For the above tag value, value.attr_value is the right union
-      // field. This is a nested annotation.
-      if (!rewrite_cp_refs_in_annotation_struct(annotations_typeArray,
-             byte_i_ref, THREAD)) {
-        // propagate failure back to caller
-        return false;
-      }
-      break;
+void VM_RedefineClasses::swap_marks(oop first, oop second) {
+  markOop first_mark = first->mark();
+  markOop second_mark = second->mark();
+  first->set_mark(second_mark);
+  second->set_mark(first_mark);
+}
 
-    case '[':
-    {
-      if ((byte_i_ref + 2) > annotations_typeArray->length()) {
-        // not enough room for a num_values field
-        RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-          ("length() is too small for a num_values field"));
-        return false;
-      }
 
-      // For the above tag value, value.array_value is the right union
-      // field. This is an array of nested element_value.
-      u2 num_values = Bytes::get_Java_u2((address)
-                        annotations_typeArray->byte_at_addr(byte_i_ref));
-      byte_i_ref += 2;
-      RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("num_values=%d", num_values));
-
-      int calc_num_values = 0;
-      for (; calc_num_values < num_values; calc_num_values++) {
-        if (!rewrite_cp_refs_in_element_value(
-               annotations_typeArray, byte_i_ref, THREAD)) {
-          RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-            ("bad nested element_value at %d", calc_num_values));
-          // propagate failure back to caller
-          return false;
+class FieldCopier : public FieldClosure {
+  public:
+  void do_field(fieldDescriptor* fd) {
+    instanceKlass* cur = instanceKlass::cast(fd->field_holder());
+    oop cur_oop = cur->java_mirror();
+
+    instanceKlass* old = instanceKlass::cast(cur->old_version());
+    oop old_oop = old->java_mirror();
+
+    fieldDescriptor result;
+    bool found = old->find_local_field(fd->name(), fd->signature(), &result);
+    if (found && result.is_static()) {
+      TRACE_RC3("Copying static field value for field %s old_offset=%d new_offset=%d",
+        fd->name()->as_C_string(), result.offset(), fd->offset());
+      memcpy(cur_oop->obj_field_addr<HeapWord>(fd->offset()),
+             old_oop->obj_field_addr<HeapWord>(result.offset()),
+             type2aelembytes(fd->field_type()));
+
+      // Static fields may have references to java.lang.Class
+      if (fd->field_type() == T_OBJECT) {
+         oop oop = cur_oop->obj_field(fd->offset());
+         if (oop != NULL && oop->is_instanceMirror()) {
+            klassOop klass = java_lang_Class::as_klassOop(oop);
+            if (klass != NULL && klass->klass_part()->oop_is_instance()) {
+              assert(oop == instanceKlass::cast(klass)->java_mirror(), "just checking");
+              if (klass->klass_part()->new_version() != NULL) {
+              oop = instanceKlass::cast(klass->klass_part()->new_version())->java_mirror();
+
+              cur_oop->obj_field_put(fd->offset(), oop);
+            }
+          }
         }
       }
-      assert(num_values == calc_num_values, "sanity check");
-    } break;
-
-    default:
-      RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("bad tag=0x%x", tag));
-      return false;
-  } // end decode tag field
-
-  return true;
-} // end rewrite_cp_refs_in_element_value()
-
-
-// Rewrite constant pool references in a fields_annotations field.
-bool VM_RedefineClasses::rewrite_cp_refs_in_fields_annotations(
-       instanceKlassHandle scratch_class, TRAPS) {
-
-  objArrayHandle fields_annotations(THREAD,
-    scratch_class->fields_annotations());
+    }
+  }
+};
 
-  if (fields_annotations.is_null() || fields_annotations->length() == 0) {
-    // no fields_annotations so nothing to do
-    return true;
+void VM_RedefineClasses::mark_as_scavengable(nmethod* nm) {
+  if (!nm->on_scavenge_root_list()) {
+    CodeCache::add_scavenge_root_nmethod(nm);
   }
+}
 
-  RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-    ("fields_annotations length=%d", fields_annotations->length()));
+struct StoreBarrier {
+  template <class T> static void oop_store(T* p, oop v) { ::oop_store(p, v); }
+};
 
-  for (int i = 0; i < fields_annotations->length(); i++) {
-    typeArrayHandle field_annotations(THREAD,
-      (typeArrayOop)fields_annotations->obj_at(i));
-    if (field_annotations.is_null() || field_annotations->length() == 0) {
-      // this field does not have any annotations so skip it
-      continue;
-    }
+struct StoreNoBarrier {
+  template <class T> static void oop_store(T* p, oop v) { oopDesc::encode_store_heap_oop_not_null(p, v); }
+};
 
-    int byte_i = 0;  // byte index into field_annotations
-    if (!rewrite_cp_refs_in_annotations_typeArray(field_annotations, byte_i,
-           THREAD)) {
-      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-        ("bad field_annotations at %d", i));
-      // propagate failure back to caller
-      return false;
+template <class S>
+class ChangePointersOopClosure : public OopClosureNoHeader {
+  // Forward pointers to instanceKlass and mirror class to new versions
+  template <class T>
+  inline void do_oop_work(T* p) {
+    oop oop = oopDesc::load_decode_heap_oop(p);
+    if (oop == NULL) {
+      return;
+    }
+    if (oop->is_instanceKlass()) {
+      klassOop klass = (klassOop) oop;
+      if (klass->klass_part()->new_version() != NULL) {
+        oop = klass->klass_part()->new_version();
+        S::oop_store(p, oop);
+      }
+    } else if (oop->is_instanceMirror()) {
+      klassOop klass = java_lang_Class::as_klassOop(oop);
+      if (klass != NULL && klass->klass_part()->oop_is_instance()) {
+        assert(oop == instanceKlass::cast(klass)->java_mirror(), "just checking");
+        if (klass->klass_part()->new_version() != NULL) {
+          oop = instanceKlass::cast(klass->klass_part()->new_version())->java_mirror();
+          S::oop_store(p, oop);
+        }
+      }
     }
   }
 
-  return true;
-} // end rewrite_cp_refs_in_fields_annotations()
-
-
-// Rewrite constant pool references in a methods_annotations field.
-bool VM_RedefineClasses::rewrite_cp_refs_in_methods_annotations(
-       instanceKlassHandle scratch_class, TRAPS) {
-
-  objArrayHandle methods_annotations(THREAD,
-    scratch_class->methods_annotations());
+  virtual void do_oop(oop* o) {
+    do_oop_work(o);
+  }
 
-  if (methods_annotations.is_null() || methods_annotations->length() == 0) {
-    // no methods_annotations so nothing to do
-    return true;
+  virtual void do_oop(narrowOop* o) {
+    do_oop_work(o);
   }
+};
 
-  RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-    ("methods_annotations length=%d", methods_annotations->length()));
+void VM_RedefineClasses::doit() {
+  Thread *thread = Thread::current();
+  
+  TRACE_RC1("Entering doit!");
 
-  for (int i = 0; i < methods_annotations->length(); i++) {
-    typeArrayHandle method_annotations(THREAD,
-      (typeArrayOop)methods_annotations->obj_at(i));
-    if (method_annotations.is_null() || method_annotations->length() == 0) {
-      // this method does not have any annotations so skip it
-      continue;
-    }
+  assert((_max_redefinition_flags & Klass::RemoveSuperType) == 0, "removing super types not allowed");
 
-    int byte_i = 0;  // byte index into method_annotations
-    if (!rewrite_cp_refs_in_annotations_typeArray(method_annotations, byte_i,
-           THREAD)) {
-      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-        ("bad method_annotations at %d", i));
-      // propagate failure back to caller
-      return false;
+  if (UseSharedSpaces) {
+    // Sharing is enabled so we remap the shared readonly space to
+    // shared readwrite, private just in case we need to redefine
+    // a shared class. We do the remap during the doit() phase of
+    // the safepoint to be safer.
+    if (!CompactingPermGenGen::remap_shared_readonly_as_readwrite()) {
+      TRACE_RC1("failed to remap shared readonly space to readwrite, private");
+      _result = JVMTI_ERROR_INTERNAL;
+      return;
     }
   }
+  
+  RC_TIMER_START(_timer_prepare_redefinition);
+  for (int i = 0; i < _new_classes->length(); i++) {
+    redefine_single_class(_new_classes->at(i), thread);
+  }
+  
+  // Deoptimize all compiled code that depends on this class
+  flush_dependent_code(instanceKlassHandle(Thread::current(), (klassOop)NULL), Thread::current());
 
-  return true;
-} // end rewrite_cp_refs_in_methods_annotations()
+  // Adjust constantpool caches and vtables for all classes
+  // that reference methods of the evolved class.
+  SystemDictionary::classes_do(adjust_cpool_cache, Thread::current());
 
+  RC_TIMER_STOP(_timer_prepare_redefinition);
+  RC_TIMER_START(_timer_heap_iteration);
 
-// Rewrite constant pool references in a methods_parameter_annotations
-// field. This "structure" is adapted from the
-// RuntimeVisibleParameterAnnotations_attribute described in section
-// 4.8.17 of the 2nd-edition of the VM spec:
-//
-// methods_parameter_annotations_typeArray {
-//   u1 num_parameters;
-//   {
-//     u2 num_annotations;
-//     annotation annotations[num_annotations];
-//   } parameter_annotations[num_parameters];
-// }
-//
-bool VM_RedefineClasses::rewrite_cp_refs_in_methods_parameter_annotations(
-       instanceKlassHandle scratch_class, TRAPS) {
+    class ChangePointersObjectClosure : public ObjectClosure {
 
-  objArrayHandle methods_parameter_annotations(THREAD,
-    scratch_class->methods_parameter_annotations());
+    private:
 
-  if (methods_parameter_annotations.is_null()
-      || methods_parameter_annotations->length() == 0) {
-    // no methods_parameter_annotations so nothing to do
-    return true;
-  }
+      OopClosureNoHeader *_closure;
+      bool _needs_instance_update;
+      oop _tmp_obj;
+      int _tmp_obj_size;
 
-  RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-    ("methods_parameter_annotations length=%d",
-    methods_parameter_annotations->length()));
+    public:
+      ChangePointersObjectClosure(OopClosureNoHeader *closure) : _closure(closure), _needs_instance_update(false), _tmp_obj(NULL), _tmp_obj_size(0) {}
 
-  for (int i = 0; i < methods_parameter_annotations->length(); i++) {
-    typeArrayHandle method_parameter_annotations(THREAD,
-      (typeArrayOop)methods_parameter_annotations->obj_at(i));
-    if (method_parameter_annotations.is_null()
-        || method_parameter_annotations->length() == 0) {
-      // this method does not have any parameter annotations so skip it
-      continue;
-    }
+      bool needs_instance_update() {
+        return _needs_instance_update;
+      }
 
-    if (method_parameter_annotations->length() < 1) {
-      // not enough room for a num_parameters field
-      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-        ("length() is too small for a num_parameters field at %d", i));
-      return false;
-    }
+      void copy_to_tmp(oop o) {
+        int size = o->size();
+        if (_tmp_obj_size < size) {
+          _tmp_obj_size = size;
+          _tmp_obj = (oop)resource_allocate_bytes(size * HeapWordSize);
+        }
+        Copy::aligned_disjoint_words((HeapWord*)o, (HeapWord*)_tmp_obj, size);
+      }
 
-    int byte_i = 0;  // byte index into method_parameter_annotations
+      virtual void do_object(oop obj) {
+        if (obj->is_instanceKlass()) return;
+        if (obj->is_instanceMirror()) {
+          // static fields may have references to old java.lang.Class instances, update them
+          // at the same time, we don't want to update other oops in the java.lang.Class
+          // Causes SIGSEGV?
+          //instanceMirrorKlass::oop_fields_iterate(obj, _closure);
+        } else {
+          obj->oop_iterate(_closure);
+        }
 
-    u1 num_parameters = method_parameter_annotations->byte_at(byte_i);
-    byte_i++;
+        if (obj->blueprint()->new_version() != NULL) {
+          Klass* new_klass = obj->blueprint()->new_version()->klass_part();
+          if (obj->is_perm()) {
+            _needs_instance_update = true;
+          } else if(new_klass->update_information() != NULL) {
+            int size_diff = obj->size() - obj->size_given_klass(new_klass);
+
+            // Either new size is bigger or gap is to small to be filled
+            if (size_diff < 0 || (size_diff > 0 && (size_t) size_diff < CollectedHeap::min_fill_size())) {
+              // We need an instance update => set back to old klass
+              _needs_instance_update = true;
+            } else {
+              oop src = obj;
+              if (new_klass->is_copying_backwards()) {
+                copy_to_tmp(obj);
+                src = _tmp_obj;
+              }
+              src->set_klass_no_check(obj->blueprint()->new_version());
+              MarkSweep::update_fields(obj, src, new_klass->update_information());
 
-    RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-      ("num_parameters=%d", num_parameters));
+              if (size_diff > 0) {
+                HeapWord* dead_space = ((HeapWord *)obj) + obj->size();
+                CollectedHeap::fill_with_object(dead_space, size_diff);
+              }
+            }
+          } else {
+            obj->set_klass_no_check(obj->blueprint()->new_version());
+          }
+        }
+      }
+    };
+    
+    ChangePointersOopClosure<StoreNoBarrier> oopClosureNoBarrier;
+    ChangePointersOopClosure<StoreBarrier> oopClosure;
+    ChangePointersObjectClosure objectClosure(&oopClosure);
 
-    int calc_num_parameters = 0;
-    for (; calc_num_parameters < num_parameters; calc_num_parameters++) {
-      if (!rewrite_cp_refs_in_annotations_typeArray(
-             method_parameter_annotations, byte_i, THREAD)) {
-        RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-          ("bad method_parameter_annotations at %d", calc_num_parameters));
-        // propagate failure back to caller
-        return false;
+    {
+      // Since we may update oops inside nmethod's code blob to point to java.lang.Class in new generation, we need to
+      // make sure such references are properly recognized by GC. For that, If ScavengeRootsInCode is true, we need to
+      // mark such nmethod's as "scavengable".
+      // For now, mark all nmethod's as scavengable that are not scavengable already
+      if (ScavengeRootsInCode) {
+        CodeCache::nmethods_do(mark_as_scavengable);
       }
+
+      SharedHeap::heap()->gc_prologue(true);
+      Universe::heap()->object_iterate(&objectClosure);
+      Universe::root_oops_do(&oopClosureNoBarrier);
+      SharedHeap::heap()->gc_epilogue(false);
     }
-    assert(num_parameters == calc_num_parameters, "sanity check");
-  }
 
-  return true;
-} // end rewrite_cp_refs_in_methods_parameter_annotations()
 
+    for (int i=0; i<_new_classes->length(); i++) {
+      klassOop cur_oop = _new_classes->at(i)();
+      instanceKlass* cur = instanceKlass::cast(cur_oop);
+      klassOop old_oop = cur->old_version();
+      instanceKlass* old = instanceKlass::cast(old_oop);
 
-// Rewrite constant pool references in a methods_default_annotations
-// field. This "structure" is adapted from the AnnotationDefault_attribute
-// that is described in section 4.8.19 of the 2nd-edition of the VM spec:
-//
-// methods_default_annotations_typeArray {
-//   element_value default_value;
-// }
-//
-bool VM_RedefineClasses::rewrite_cp_refs_in_methods_default_annotations(
-       instanceKlassHandle scratch_class, TRAPS) {
+      // Swap marks to have same hashcodes
+      swap_marks(cur_oop, old_oop);
+      swap_marks(cur->java_mirror(), old->java_mirror());
 
-  objArrayHandle methods_default_annotations(THREAD,
-    scratch_class->methods_default_annotations());
+      // Revert pool holder for old version of klass (it was updated by one of ours closure!)
+      old->constants()->set_pool_holder(old_oop);
 
-  if (methods_default_annotations.is_null()
-      || methods_default_annotations->length() == 0) {
-    // no methods_default_annotations so nothing to do
-    return true;
-  }
 
-  RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-    ("methods_default_annotations length=%d",
-    methods_default_annotations->length()));
+      if (old->array_klasses() != NULL) {
+        // Transfer the array classes, otherwise we might get cast exceptions when casting array types.
+        assert(cur->array_klasses() == NULL, "just checking");
+        cur->set_array_klasses(old->array_klasses());
+      }
 
-  for (int i = 0; i < methods_default_annotations->length(); i++) {
-    typeArrayHandle method_default_annotations(THREAD,
-      (typeArrayOop)methods_default_annotations->obj_at(i));
-    if (method_default_annotations.is_null()
-        || method_default_annotations->length() == 0) {
-      // this method does not have any default annotations so skip it
-      continue;
+      // Initialize the new class! Special static initialization that does not execute the
+      // static constructor but copies static field values from the old class if name
+      // and signature of a static field match.
+      FieldCopier copier;
+      cur->do_local_static_fields(&copier); // TODO (tw): What about internal static fields??
+      old->set_java_mirror(cur->java_mirror());
+
+      // Transfer init state
+      instanceKlass::ClassState state = old->init_state();
+      if (state > instanceKlass::linked) {
+        cur->set_init_state(state);
+      }
     }
 
-    int byte_i = 0;  // byte index into method_default_annotations
+  RC_TIMER_STOP(_timer_heap_iteration);
+  RC_TIMER_START(_timer_redefinition);
+  if (objectClosure.needs_instance_update()){
 
-    if (!rewrite_cp_refs_in_element_value(
-           method_default_annotations, byte_i, THREAD)) {
-      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
-        ("bad default element_value at %d", i));
-      // propagate failure back to caller
-      return false;
-    }
+    // Do a full garbage collection to update the instance sizes accordingly
+    TRACE_RC1("Before performing full GC!");
+    Universe::set_redefining_gc_run(true);
+    notify_gc_begin(true);
+    Universe::heap()->collect_as_vm_thread(GCCause::_heap_inspection);
+    notify_gc_end();
+    Universe::set_redefining_gc_run(false);
+    TRACE_RC1("GC done!");
   }
 
-  return true;
-} // end rewrite_cp_refs_in_methods_default_annotations()
+  // Unmark klassOops as "redefining"
+  for (int i=0; i<_new_classes->length(); i++) {
+    klassOop cur_klass = _new_classes->at(i)();
+    instanceKlass* cur = (instanceKlass*)cur_klass->klass_part();
+    cur->set_redefining(false);
+    cur->clear_update_information();
+  }
 
+  // Disable any dependent concurrent compilations
+  SystemDictionary::notice_modification();
 
-// Rewrite constant pool references in the method's stackmap table.
-// These "structures" are adapted from the StackMapTable_attribute that
-// is described in section 4.8.4 of the 6.0 version of the VM spec
-// (dated 2005.10.26):
-// file:///net/quincunx.sfbay/export/gbracha/ClassFile-Java6.pdf
-//
-// stack_map {
-//   u2 number_of_entries;
-//   stack_map_frame entries[number_of_entries];
-// }
-//
-void VM_RedefineClasses::rewrite_cp_refs_in_stack_map_table(
-       methodHandle method, TRAPS) {
+  // Set flag indicating that some invariants are no longer true.
+  // See jvmtiExport.hpp for detailed explanation.
+  JvmtiExport::set_has_redefined_a_class();
 
-  if (!method->has_stackmap_table()) {
-    return;
-  }
+  // Clean up caches in the compiler interface and compiler threads
+  ciObjectFactory::resort_shared_ci_objects();
 
-  typeArrayOop stackmap_data = method->stackmap_data();
-  address stackmap_p = (address)stackmap_data->byte_at_addr(0);
-  address stackmap_end = stackmap_p + stackmap_data->length();
-
-  assert(stackmap_p + 2 <= stackmap_end, "no room for number_of_entries");
-  u2 number_of_entries = Bytes::get_Java_u2(stackmap_p);
-  stackmap_p += 2;
-
-  RC_TRACE_WITH_THREAD(0x04000000, THREAD,
-    ("number_of_entries=%u", number_of_entries));
-
-  // walk through each stack_map_frame
-  u2 calc_number_of_entries = 0;
-  for (; calc_number_of_entries < number_of_entries; calc_number_of_entries++) {
-    // The stack_map_frame structure is a u1 frame_type followed by
-    // 0 or more bytes of data:
-    //
-    // union stack_map_frame {
-    //   same_frame;
-    //   same_locals_1_stack_item_frame;
-    //   same_locals_1_stack_item_frame_extended;
-    //   chop_frame;
-    //   same_frame_extended;
-    //   append_frame;
-    //   full_frame;
-    // }
-
-    assert(stackmap_p + 1 <= stackmap_end, "no room for frame_type");
-    // The Linux compiler does not like frame_type to be u1 or u2. It
-    // issues the following warning for the first if-statement below:
-    //
-    // "warning: comparison is always true due to limited range of data type"
-    //
-    u4 frame_type = *stackmap_p;
-    stackmap_p++;
-
-    // same_frame {
-    //   u1 frame_type = SAME; /* 0-63 */
-    // }
-    if (frame_type >= 0 && frame_type <= 63) {
-      // nothing more to do for same_frame
-    }
-
-    // same_locals_1_stack_item_frame {
-    //   u1 frame_type = SAME_LOCALS_1_STACK_ITEM; /* 64-127 */
-    //   verification_type_info stack[1];
-    // }
-    else if (frame_type >= 64 && frame_type <= 127) {
-      rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
-        calc_number_of_entries, frame_type, THREAD);
-    }
-
-    // reserved for future use
-    else if (frame_type >= 128 && frame_type <= 246) {
-      // nothing more to do for reserved frame_types
-    }
-
-    // same_locals_1_stack_item_frame_extended {
-    //   u1 frame_type = SAME_LOCALS_1_STACK_ITEM_EXTENDED; /* 247 */
-    //   u2 offset_delta;
-    //   verification_type_info stack[1];
-    // }
-    else if (frame_type == 247) {
-      stackmap_p += 2;
-      rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
-        calc_number_of_entries, frame_type, THREAD);
-    }
-
-    // chop_frame {
-    //   u1 frame_type = CHOP; /* 248-250 */
-    //   u2 offset_delta;
-    // }
-    else if (frame_type >= 248 && frame_type <= 250) {
-      stackmap_p += 2;
-    }
-
-    // same_frame_extended {
-    //   u1 frame_type = SAME_FRAME_EXTENDED; /* 251*/
-    //   u2 offset_delta;
-    // }
-    else if (frame_type == 251) {
-      stackmap_p += 2;
-    }
-
-    // append_frame {
-    //   u1 frame_type = APPEND; /* 252-254 */
-    //   u2 offset_delta;
-    //   verification_type_info locals[frame_type - 251];
-    // }
-    else if (frame_type >= 252 && frame_type <= 254) {
-      assert(stackmap_p + 2 <= stackmap_end,
-        "no room for offset_delta");
-      stackmap_p += 2;
-      u1 len = frame_type - 251;
-      for (u1 i = 0; i < len; i++) {
-        rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
-          calc_number_of_entries, frame_type, THREAD);
-      }
-    }
+#ifdef ASSERT
 
-    // full_frame {
-    //   u1 frame_type = FULL_FRAME; /* 255 */
-    //   u2 offset_delta;
-    //   u2 number_of_locals;
-    //   verification_type_info locals[number_of_locals];
-    //   u2 number_of_stack_items;
-    //   verification_type_info stack[number_of_stack_items];
-    // }
-    else if (frame_type == 255) {
-      assert(stackmap_p + 2 + 2 <= stackmap_end,
-        "no room for smallest full_frame");
-      stackmap_p += 2;
-
-      u2 number_of_locals = Bytes::get_Java_u2(stackmap_p);
-      stackmap_p += 2;
-
-      for (u2 locals_i = 0; locals_i < number_of_locals; locals_i++) {
-        rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
-          calc_number_of_entries, frame_type, THREAD);
-      }
+  // Universe::verify();
+  // JNIHandles::verify();
 
-      // Use the largest size for the number_of_stack_items, but only get
-      // the right number of bytes.
-      u2 number_of_stack_items = Bytes::get_Java_u2(stackmap_p);
-      stackmap_p += 2;
+  SystemDictionary::classes_do(check_class, thread);
+#endif
 
-      for (u2 stack_i = 0; stack_i < number_of_stack_items; stack_i++) {
-        rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
-          calc_number_of_entries, frame_type, THREAD);
-      }
-    }
-  } // end while there is a stack_map_frame
-  assert(number_of_entries == calc_number_of_entries, "sanity check");
-} // end rewrite_cp_refs_in_stack_map_table()
+  RC_TIMER_STOP(_timer_redefinition);
 
+  if (TraceRedefineClasses > 0) {
+    tty->flush();
+  }
+}
 
-// Rewrite constant pool references in the verification type info
-// portion of the method's stackmap table. These "structures" are
-// adapted from the StackMapTable_attribute that is described in
-// section 4.8.4 of the 6.0 version of the VM spec (dated 2005.10.26):
-// file:///net/quincunx.sfbay/export/gbracha/ClassFile-Java6.pdf
-//
-// The verification_type_info structure is a u1 tag followed by 0 or
-// more bytes of data:
-//
-// union verification_type_info {
-//   Top_variable_info;
-//   Integer_variable_info;
-//   Float_variable_info;
-//   Long_variable_info;
-//   Double_variable_info;
-//   Null_variable_info;
-//   UninitializedThis_variable_info;
-//   Object_variable_info;
-//   Uninitialized_variable_info;
-// }
-//
-void VM_RedefineClasses::rewrite_cp_refs_in_verification_type_info(
-       address& stackmap_p_ref, address stackmap_end, u2 frame_i,
-       u1 frame_type, TRAPS) {
-
-  assert(stackmap_p_ref + 1 <= stackmap_end, "no room for tag");
-  u1 tag = *stackmap_p_ref;
-  stackmap_p_ref++;
-
-  switch (tag) {
-  // Top_variable_info {
-  //   u1 tag = ITEM_Top; /* 0 */
-  // }
-  // verificationType.hpp has zero as ITEM_Bogus instead of ITEM_Top
-  case 0:  // fall through
-
-  // Integer_variable_info {
-  //   u1 tag = ITEM_Integer; /* 1 */
-  // }
-  case ITEM_Integer:  // fall through
-
-  // Float_variable_info {
-  //   u1 tag = ITEM_Float; /* 2 */
-  // }
-  case ITEM_Float:  // fall through
-
-  // Double_variable_info {
-  //   u1 tag = ITEM_Double; /* 3 */
-  // }
-  case ITEM_Double:  // fall through
-
-  // Long_variable_info {
-  //   u1 tag = ITEM_Long; /* 4 */
-  // }
-  case ITEM_Long:  // fall through
-
-  // Null_variable_info {
-  //   u1 tag = ITEM_Null; /* 5 */
-  // }
-  case ITEM_Null:  // fall through
-
-  // UninitializedThis_variable_info {
-  //   u1 tag = ITEM_UninitializedThis; /* 6 */
-  // }
-  case ITEM_UninitializedThis:
-    // nothing more to do for the above tag types
-    break;
+void VM_RedefineClasses::doit_epilogue() {
 
-  // Object_variable_info {
-  //   u1 tag = ITEM_Object; /* 7 */
-  //   u2 cpool_index;
-  // }
-  case ITEM_Object:
-  {
-    assert(stackmap_p_ref + 2 <= stackmap_end, "no room for cpool_index");
-    u2 cpool_index = Bytes::get_Java_u2(stackmap_p_ref);
-    u2 new_cp_index = find_new_index(cpool_index);
-    if (new_cp_index != 0) {
-      RC_TRACE_WITH_THREAD(0x04000000, THREAD,
-        ("mapped old cpool_index=%d", cpool_index));
-      Bytes::put_Java_u2(stackmap_p_ref, new_cp_index);
-      cpool_index = new_cp_index;
-    }
-    stackmap_p_ref += 2;
-
-    RC_TRACE_WITH_THREAD(0x04000000, THREAD,
-      ("frame_i=%u, frame_type=%u, cpool_index=%d", frame_i,
-      frame_type, cpool_index));
-  } break;
-
-  // Uninitialized_variable_info {
-  //   u1 tag = ITEM_Uninitialized; /* 8 */
-  //   u2 offset;
-  // }
-  case ITEM_Uninitialized:
-    assert(stackmap_p_ref + 2 <= stackmap_end, "no room for offset");
-    stackmap_p_ref += 2;
-    break;
+  RC_TIMER_START(_timer_vm_op_epilogue);
 
-  default:
-    RC_TRACE_WITH_THREAD(0x04000000, THREAD,
-      ("frame_i=%u, frame_type=%u, bad tag=0x%x", frame_i, frame_type, tag));
-    ShouldNotReachHere();
-    break;
-  } // end switch (tag)
-} // end rewrite_cp_refs_in_verification_type_info()
-
-
-// Change the constant pool associated with klass scratch_class to
-// scratch_cp. If shrink is true, then scratch_cp_length elements
-// are copied from scratch_cp to a smaller constant pool and the
-// smaller constant pool is associated with scratch_class.
-void VM_RedefineClasses::set_new_constant_pool(
-       instanceKlassHandle scratch_class, constantPoolHandle scratch_cp,
-       int scratch_cp_length, bool shrink, TRAPS) {
-  assert(!shrink || scratch_cp->length() >= scratch_cp_length, "sanity check");
-
-  if (shrink) {
-    // scratch_cp is a merged constant pool and has enough space for a
-    // worst case merge situation. We want to associate the minimum
-    // sized constant pool with the klass to save space.
-    constantPoolHandle smaller_cp(THREAD,
-      oopFactory::new_constantPool(scratch_cp_length,
-                                   oopDesc::IsUnsafeConc,
-                                   THREAD));
-    // preserve orig_length() value in the smaller copy
-    int orig_length = scratch_cp->orig_length();
-    assert(orig_length != 0, "sanity check");
-    smaller_cp->set_orig_length(orig_length);
-    scratch_cp->copy_cp_to(1, scratch_cp_length - 1, smaller_cp, 1, THREAD);
-    scratch_cp = smaller_cp;
-    smaller_cp()->set_is_conc_safe(true);
-  }
-
-  // attach new constant pool to klass
-  scratch_cp->set_pool_holder(scratch_class());
-
-  // attach klass to new constant pool
-  scratch_class->set_constants(scratch_cp());
-
-  int i;  // for portability
-
-  // update each field in klass to use new constant pool indices as needed
-  for (JavaFieldStream fs(scratch_class); !fs.done(); fs.next()) {
-    jshort cur_index = fs.name_index();
-    jshort new_index = find_new_index(cur_index);
-    if (new_index != 0) {
-      RC_TRACE_WITH_THREAD(0x00080000, THREAD,
-        ("field-name_index change: %d to %d", cur_index, new_index));
-      fs.set_name_index(new_index);
-    }
-    cur_index = fs.signature_index();
-    new_index = find_new_index(cur_index);
-    if (new_index != 0) {
-      RC_TRACE_WITH_THREAD(0x00080000, THREAD,
-        ("field-signature_index change: %d to %d", cur_index, new_index));
-      fs.set_signature_index(new_index);
-    }
-    cur_index = fs.initval_index();
-    new_index = find_new_index(cur_index);
-    if (new_index != 0) {
-      RC_TRACE_WITH_THREAD(0x00080000, THREAD,
-        ("field-initval_index change: %d to %d", cur_index, new_index));
-      fs.set_initval_index(new_index);
-    }
-    cur_index = fs.generic_signature_index();
-    new_index = find_new_index(cur_index);
-    if (new_index != 0) {
-      RC_TRACE_WITH_THREAD(0x00080000, THREAD,
-        ("field-generic_signature change: %d to %d", cur_index, new_index));
-      fs.set_generic_signature_index(new_index);
-    }
-  } // end for each field
-
-  // Update constant pool indices in the inner classes info to use
-  // new constant indices as needed. The inner classes info is a
-  // quadruple:
-  // (inner_class_info, outer_class_info, inner_name, inner_access_flags)
-  InnerClassesIterator iter(scratch_class);
-  for (; !iter.done(); iter.next()) {
-    int cur_index = iter.inner_class_info_index();
-    if (cur_index == 0) {
-      continue;  // JVM spec. allows null inner class refs so skip it
-    }
-    int new_index = find_new_index(cur_index);
-    if (new_index != 0) {
-      RC_TRACE_WITH_THREAD(0x00080000, THREAD,
-        ("inner_class_info change: %d to %d", cur_index, new_index));
-      iter.set_inner_class_info_index(new_index);
-    }
-    cur_index = iter.outer_class_info_index();
-    new_index = find_new_index(cur_index);
-    if (new_index != 0) {
-      RC_TRACE_WITH_THREAD(0x00080000, THREAD,
-        ("outer_class_info change: %d to %d", cur_index, new_index));
-      iter.set_outer_class_info_index(new_index);
-    }
-    cur_index = iter.inner_name_index();
-    new_index = find_new_index(cur_index);
-    if (new_index != 0) {
-      RC_TRACE_WITH_THREAD(0x00080000, THREAD,
-        ("inner_name change: %d to %d", cur_index, new_index));
-      iter.set_inner_name_index(new_index);
-    }
-  } // end for each inner class
-
-  // Attach each method in klass to the new constant pool and update
-  // to use new constant pool indices as needed:
-  objArrayHandle methods(THREAD, scratch_class->methods());
-  for (i = methods->length() - 1; i >= 0; i--) {
-    methodHandle method(THREAD, (methodOop)methods->obj_at(i));
-    method->set_constants(scratch_cp());
-
-    int new_index = find_new_index(method->name_index());
-    if (new_index != 0) {
-      RC_TRACE_WITH_THREAD(0x00080000, THREAD,
-        ("method-name_index change: %d to %d", method->name_index(),
-        new_index));
-      method->set_name_index(new_index);
-    }
-    new_index = find_new_index(method->signature_index());
-    if (new_index != 0) {
-      RC_TRACE_WITH_THREAD(0x00080000, THREAD,
-        ("method-signature_index change: %d to %d",
-        method->signature_index(), new_index));
-      method->set_signature_index(new_index);
-    }
-    new_index = find_new_index(method->generic_signature_index());
-    if (new_index != 0) {
-      RC_TRACE_WITH_THREAD(0x00080000, THREAD,
-        ("method-generic_signature_index change: %d to %d",
-        method->generic_signature_index(), new_index));
-      method->set_generic_signature_index(new_index);
-    }
-
-    // Update constant pool indices in the method's checked exception
-    // table to use new constant indices as needed.
-    int cext_length = method->checked_exceptions_length();
-    if (cext_length > 0) {
-      CheckedExceptionElement * cext_table =
-        method->checked_exceptions_start();
-      for (int j = 0; j < cext_length; j++) {
-        int cur_index = cext_table[j].class_cp_index;
-        int new_index = find_new_index(cur_index);
-        if (new_index != 0) {
-          RC_TRACE_WITH_THREAD(0x00080000, THREAD,
-            ("cext-class_cp_index change: %d to %d", cur_index, new_index));
-          cext_table[j].class_cp_index = (u2)new_index;
-        }
-      } // end for each checked exception table entry
-    } // end if there are checked exception table entries
-
-    // Update each catch type index in the method's exception table
-    // to use new constant pool indices as needed. The exception table
-    // holds quadruple entries of the form:
-    //   (beg_bci, end_bci, handler_bci, klass_index)
-
-    ExceptionTable ex_table(method());
-    int ext_length = ex_table.length();
-
-    for (int j = 0; j < ext_length; j ++) {
-      int cur_index = ex_table.catch_type_index(j);
-      int new_index = find_new_index(cur_index);
-      if (new_index != 0) {
-        RC_TRACE_WITH_THREAD(0x00080000, THREAD,
-          ("ext-klass_index change: %d to %d", cur_index, new_index));
-        ex_table.set_catch_type_index(j, new_index);
-      }
-    } // end for each exception table entry
-
-    // Update constant pool indices in the method's local variable
-    // table to use new constant indices as needed. The local variable
-    // table hold sextuple entries of the form:
-    // (start_pc, length, name_index, descriptor_index, signature_index, slot)
-    int lvt_length = method->localvariable_table_length();
-    if (lvt_length > 0) {
-      LocalVariableTableElement * lv_table =
-        method->localvariable_table_start();
-      for (int j = 0; j < lvt_length; j++) {
-        int cur_index = lv_table[j].name_cp_index;
-        int new_index = find_new_index(cur_index);
-        if (new_index != 0) {
-          RC_TRACE_WITH_THREAD(0x00080000, THREAD,
-            ("lvt-name_cp_index change: %d to %d", cur_index, new_index));
-          lv_table[j].name_cp_index = (u2)new_index;
-        }
-        cur_index = lv_table[j].descriptor_cp_index;
-        new_index = find_new_index(cur_index);
-        if (new_index != 0) {
-          RC_TRACE_WITH_THREAD(0x00080000, THREAD,
-            ("lvt-descriptor_cp_index change: %d to %d", cur_index,
-            new_index));
-          lv_table[j].descriptor_cp_index = (u2)new_index;
-        }
-        cur_index = lv_table[j].signature_cp_index;
-        new_index = find_new_index(cur_index);
-        if (new_index != 0) {
-          RC_TRACE_WITH_THREAD(0x00080000, THREAD,
-            ("lvt-signature_cp_index change: %d to %d", cur_index, new_index));
-          lv_table[j].signature_cp_index = (u2)new_index;
-        }
-      } // end for each local variable table entry
-    } // end if there are local variable table entries
+  //unlock_threads();
 
-    rewrite_cp_refs_in_stack_map_table(method, THREAD);
-  } // end for each method
-  assert(scratch_cp()->is_conc_safe(), "Just checking");
-} // end set_new_constant_pool()
+  ResourceMark mark;
 
+  VM_GC_Operation::doit_epilogue();
+  TRACE_RC1("GC Operation epilogue finished! ");
 
-// Unevolving classes may point to methods of the_class directly
-// from their constant pool caches, itables, and/or vtables. We
-// use the SystemDictionary::classes_do() facility and this helper
-// to fix up these pointers.
-//
-// Note: We currently don't support updating the vtable in
-// arrayKlassOops. See Open Issues in jvmtiRedefineClasses.hpp.
-void VM_RedefineClasses::adjust_cpool_cache_and_vtable(klassOop k_oop,
-       oop initiating_loader, TRAPS) {
-  Klass *k = k_oop->klass_part();
-  if (k->oop_is_instance()) {
-    HandleMark hm(THREAD);
-    instanceKlass *ik = (instanceKlass *) k;
+  // Free the array of scratch classes
+  delete _new_classes;
+  _new_classes = NULL;
 
-    // HotSpot specific optimization! HotSpot does not currently
-    // support delegation from the bootstrap class loader to a
-    // user-defined class loader. This means that if the bootstrap
-    // class loader is the initiating class loader, then it will also
-    // be the defining class loader. This also means that classes
-    // loaded by the bootstrap class loader cannot refer to classes
-    // loaded by a user-defined class loader. Note: a user-defined
-    // class loader can delegate to the bootstrap class loader.
-    //
-    // If the current class being redefined has a user-defined class
-    // loader as its defining class loader, then we can skip all
-    // classes loaded by the bootstrap class loader.
-    bool is_user_defined =
-           instanceKlass::cast(_the_class_oop)->class_loader() != NULL;
-    if (is_user_defined && ik->class_loader() == NULL) {
-      return;
-    }
+  // Free the array of affected classes
+  delete _affected_klasses;
+  _affected_klasses = NULL;
 
-    // This is a very busy routine. We don't want too much tracing
-    // printed out.
-    bool trace_name_printed = false;
-
-    // Very noisy: only enable this call if you are trying to determine
-    // that a specific class gets found by this routine.
-    // RC_TRACE macro has an embedded ResourceMark
-    // RC_TRACE_WITH_THREAD(0x00100000, THREAD,
-    //   ("adjust check: name=%s", ik->external_name()));
-    // trace_name_printed = true;
-
-    // Fix the vtable embedded in the_class and subclasses of the_class,
-    // if one exists. We discard scratch_class and we don't keep an
-    // instanceKlass around to hold obsolete methods so we don't have
-    // any other instanceKlass embedded vtables to update. The vtable
-    // holds the methodOops for virtual (but not final) methods.
-    if (ik->vtable_length() > 0 && ik->is_subtype_of(_the_class_oop)) {
-      // ik->vtable() creates a wrapper object; rm cleans it up
-      ResourceMark rm(THREAD);
-      ik->vtable()->adjust_method_entries(_matching_old_methods,
-                                          _matching_new_methods,
-                                          _matching_methods_length,
-                                          &trace_name_printed);
-    }
-
-    // If the current class has an itable and we are either redefining an
-    // interface or if the current class is a subclass of the_class, then
-    // we potentially have to fix the itable. If we are redefining an
-    // interface, then we have to call adjust_method_entries() for
-    // every instanceKlass that has an itable since there isn't a
-    // subclass relationship between an interface and an instanceKlass.
-    if (ik->itable_length() > 0 && (Klass::cast(_the_class_oop)->is_interface()
-        || ik->is_subclass_of(_the_class_oop))) {
-      // ik->itable() creates a wrapper object; rm cleans it up
-      ResourceMark rm(THREAD);
-      ik->itable()->adjust_method_entries(_matching_old_methods,
-                                          _matching_new_methods,
-                                          _matching_methods_length,
-                                          &trace_name_printed);
-    }
-
-    // The constant pools in other classes (other_cp) can refer to
-    // methods in the_class. We have to update method information in
-    // other_cp's cache. If other_cp has a previous version, then we
-    // have to repeat the process for each previous version. The
-    // constant pool cache holds the methodOops for non-virtual
-    // methods and for virtual, final methods.
-    //
-    // Special case: if the current class is the_class, then new_cp
-    // has already been attached to the_class and old_cp has already
-    // been added as a previous version. The new_cp doesn't have any
-    // cached references to old methods so it doesn't need to be
-    // updated. We can simply start with the previous version(s) in
-    // that case.
-    constantPoolHandle other_cp;
-    constantPoolCacheOop cp_cache;
-
-    if (k_oop != _the_class_oop) {
-      // this klass' constant pool cache may need adjustment
-      other_cp = constantPoolHandle(ik->constants());
-      cp_cache = other_cp->cache();
-      if (cp_cache != NULL) {
-        cp_cache->adjust_method_entries(_matching_old_methods,
-                                        _matching_new_methods,
-                                        _matching_methods_length,
-                                        &trace_name_printed);
-      }
-    }
-    {
-      ResourceMark rm(THREAD);
-      // PreviousVersionInfo objects returned via PreviousVersionWalker
-      // contain a GrowableArray of handles. We have to clean up the
-      // GrowableArray _after_ the PreviousVersionWalker destructor
-      // has destroyed the handles.
-      {
-        // the previous versions' constant pool caches may need adjustment
-        PreviousVersionWalker pvw(ik);
-        for (PreviousVersionInfo * pv_info = pvw.next_previous_version();
-             pv_info != NULL; pv_info = pvw.next_previous_version()) {
-          other_cp = pv_info->prev_constant_pool_handle();
-          cp_cache = other_cp->cache();
-          if (cp_cache != NULL) {
-            cp_cache->adjust_method_entries(_matching_old_methods,
-                                            _matching_new_methods,
-                                            _matching_methods_length,
-                                            &trace_name_printed);
-          }
-        }
-      } // pvw is cleaned up
-    } // rm is cleaned up
-  }
-}
+  TRACE_RC1("Redefinition finished!");  
 
-void VM_RedefineClasses::update_jmethod_ids() {
-  for (int j = 0; j < _matching_methods_length; ++j) {
-    methodOop old_method = _matching_old_methods[j];
-    jmethodID jmid = old_method->find_jmethod_id_or_null();
-    if (jmid != NULL) {
-      // There is a jmethodID, change it to point to the new method
-      methodHandle new_method_h(_matching_new_methods[j]);
-      JNIHandles::change_method_associated_with_jmethod_id(jmid, new_method_h);
-      assert(JNIHandles::resolve_jmethod_id(jmid) == _matching_new_methods[j],
-             "should be replaced");
-    }
-  }
+  RC_TIMER_STOP(_timer_vm_op_epilogue);
 }
 
-void VM_RedefineClasses::check_methods_and_mark_as_obsolete(
-       BitMap *emcp_methods, int * emcp_method_count_p) {
-  *emcp_method_count_p = 0;
-  int obsolete_count = 0;
-  int old_index = 0;
-  for (int j = 0; j < _matching_methods_length; ++j, ++old_index) {
-    methodOop old_method = _matching_old_methods[j];
-    methodOop new_method = _matching_new_methods[j];
-    methodOop old_array_method;
-
-    // Maintain an old_index into the _old_methods array by skipping
-    // deleted methods
-    while ((old_array_method = (methodOop) _old_methods->obj_at(old_index))
-                                                            != old_method) {
-      ++old_index;
-    }
-
-    if (MethodComparator::methods_EMCP(old_method, new_method)) {
-      // The EMCP definition from JSR-163 requires the bytecodes to be
-      // the same with the exception of constant pool indices which may
-      // differ. However, the constants referred to by those indices
-      // must be the same.
-      //
-      // We use methods_EMCP() for comparison since constant pool
-      // merging can remove duplicate constant pool entries that were
-      // present in the old method and removed from the rewritten new
-      // method. A faster binary comparison function would consider the
-      // old and new methods to be different when they are actually
-      // EMCP.
-      //
-      // The old and new methods are EMCP and you would think that we
-      // could get rid of one of them here and now and save some space.
-      // However, the concept of EMCP only considers the bytecodes and
-      // the constant pool entries in the comparison. Other things,
-      // e.g., the line number table (LNT) or the local variable table
-      // (LVT) don't count in the comparison. So the new (and EMCP)
-      // method can have a new LNT that we need so we can't just
-      // overwrite the new method with the old method.
-      //
-      // When this routine is called, we have already attached the new
-      // methods to the_class so the old methods are effectively
-      // overwritten. However, if an old method is still executing,
-      // then the old method cannot be collected until sometime after
-      // the old method call has returned. So the overwriting of old
-      // methods by new methods will save us space except for those
-      // (hopefully few) old methods that are still executing.
-      //
-      // A method refers to a constMethodOop and this presents another
-      // possible avenue to space savings. The constMethodOop in the
-      // new method contains possibly new attributes (LNT, LVT, etc).
-      // At first glance, it seems possible to save space by replacing
-      // the constMethodOop in the old method with the constMethodOop
-      // from the new method. The old and new methods would share the
-      // same constMethodOop and we would save the space occupied by
-      // the old constMethodOop. However, the constMethodOop contains
-      // a back reference to the containing method. Sharing the
-      // constMethodOop between two methods could lead to confusion in
-      // the code that uses the back reference. This would lead to
-      // brittle code that could be broken in non-obvious ways now or
-      // in the future.
-      //
-      // Another possibility is to copy the constMethodOop from the new
-      // method to the old method and then overwrite the new method with
-      // the old method. Since the constMethodOop contains the bytecodes
-      // for the method embedded in the oop, this option would change
-      // the bytecodes out from under any threads executing the old
-      // method and make the thread's bcp invalid. Since EMCP requires
-      // that the bytecodes be the same modulo constant pool indices, it
-      // is straight forward to compute the correct new bcp in the new
-      // constMethodOop from the old bcp in the old constMethodOop. The
-      // time consuming part would be searching all the frames in all
-      // of the threads to find all of the calls to the old method.
-      //
-      // It looks like we will have to live with the limited savings
-      // that we get from effectively overwriting the old methods
-      // when the new methods are attached to the_class.
-
-      // track which methods are EMCP for add_previous_version() call
-      emcp_methods->set_bit(old_index);
-      (*emcp_method_count_p)++;
-
-      // An EMCP method is _not_ obsolete. An obsolete method has a
-      // different jmethodID than the current method. An EMCP method
-      // has the same jmethodID as the current method. Having the
-      // same jmethodID for all EMCP versions of a method allows for
-      // a consistent view of the EMCP methods regardless of which
-      // EMCP method you happen to have in hand. For example, a
-      // breakpoint set in one EMCP method will work for all EMCP
-      // versions of the method including the current one.
-    } else {
-      // mark obsolete methods as such
-      old_method->set_is_obsolete();
-      obsolete_count++;
-
-      // obsolete methods need a unique idnum
-      u2 num = instanceKlass::cast(_the_class_oop)->next_method_idnum();
-      if (num != constMethodOopDesc::UNSET_IDNUM) {
-//      u2 old_num = old_method->method_idnum();
-        old_method->set_method_idnum(num);
-// TO DO: attach obsolete annotations to obsolete method's new idnum
-      }
-      // With tracing we try not to "yack" too much. The position of
-      // this trace assumes there are fewer obsolete methods than
-      // EMCP methods.
-      RC_TRACE(0x00000100, ("mark %s(%s) as obsolete",
-        old_method->name()->as_C_string(),
-        old_method->signature()->as_C_string()));
-    }
-    old_method->set_is_old();
-  }
-  for (int i = 0; i < _deleted_methods_length; ++i) {
-    methodOop old_method = _deleted_methods[i];
-
-    assert(old_method->vtable_index() < 0,
-           "cannot delete methods with vtable entries");;
-
-    // Mark all deleted methods as old and obsolete
-    old_method->set_is_old();
-    old_method->set_is_obsolete();
-    ++obsolete_count;
-    // With tracing we try not to "yack" too much. The position of
-    // this trace assumes there are fewer obsolete methods than
-    // EMCP methods.
-    RC_TRACE(0x00000100, ("mark deleted %s(%s) as obsolete",
-                          old_method->name()->as_C_string(),
-                          old_method->signature()->as_C_string()));
-  }
-  assert((*emcp_method_count_p + obsolete_count) == _old_methods->length(),
-    "sanity check");
-  RC_TRACE(0x00000100, ("EMCP_cnt=%d, obsolete_cnt=%d", *emcp_method_count_p,
-    obsolete_count));
+bool VM_RedefineClasses::is_modifiable_class(oop klass_mirror) {
+  // classes for primitives cannot be redefined
+  if (java_lang_Class::is_primitive(klass_mirror)) {
+    return false;
+  }
+  klassOop the_class_oop = java_lang_Class::as_klassOop(klass_mirror);
+  // classes for arrays cannot be redefined
+  if (the_class_oop == NULL || !Klass::cast(the_class_oop)->oop_is_instance()) {
+    return false;
+  }
+  return true;
 }
 
-// This internal class transfers the native function registration from old methods
-// to new methods.  It is designed to handle both the simple case of unchanged
-// native methods and the complex cases of native method prefixes being added and/or
-// removed.
-// It expects only to be used during the VM_RedefineClasses op (a safepoint).
-//
-// This class is used after the new methods have been installed in "the_class".
-//
-// So, for example, the following must be handled.  Where 'm' is a method and
-// a number followed by an underscore is a prefix.
-//
-//                                      Old Name    New Name
-// Simple transfer to new method        m       ->  m
-// Add prefix                           m       ->  1_m
-// Remove prefix                        1_m     ->  m
-// Simultaneous add of prefixes         m       ->  3_2_1_m
-// Simultaneous removal of prefixes     3_2_1_m ->  m
-// Simultaneous add and remove          1_m     ->  2_m
-// Same, caused by prefix removal only  3_2_1_m ->  3_2_m
-//
-class TransferNativeFunctionRegistration {
- private:
-  instanceKlassHandle the_class;
-  int prefix_count;
-  char** prefixes;
-
-  // Recursively search the binary tree of possibly prefixed method names.
-  // Iteration could be used if all agents were well behaved. Full tree walk is
-  // more resilent to agents not cleaning up intermediate methods.
-  // Branch at each depth in the binary tree is:
-  //    (1) without the prefix.
-  //    (2) with the prefix.
-  // where 'prefix' is the prefix at that 'depth' (first prefix, second prefix,...)
-  methodOop search_prefix_name_space(int depth, char* name_str, size_t name_len,
-                                     Symbol* signature) {
-    TempNewSymbol name_symbol = SymbolTable::probe(name_str, (int)name_len);
-    if (name_symbol != NULL) {
-      methodOop method = Klass::cast(the_class())->lookup_method(name_symbol, signature);
-      if (method != NULL) {
-        // Even if prefixed, intermediate methods must exist.
-        if (method->is_native()) {
-          // Wahoo, we found a (possibly prefixed) version of the method, return it.
-          return method;
-        }
-        if (depth < prefix_count) {
-          // Try applying further prefixes (other than this one).
-          method = search_prefix_name_space(depth+1, name_str, name_len, signature);
-          if (method != NULL) {
-            return method; // found
-          }
-
-          // Try adding this prefix to the method name and see if it matches
-          // another method name.
-          char* prefix = prefixes[depth];
-          size_t prefix_len = strlen(prefix);
-          size_t trial_len = name_len + prefix_len;
-          char* trial_name_str = NEW_RESOURCE_ARRAY(char, trial_len + 1);
-          strcpy(trial_name_str, prefix);
-          strcat(trial_name_str, name_str);
-          method = search_prefix_name_space(depth+1, trial_name_str, trial_len,
-                                            signature);
-          if (method != NULL) {
-            // If found along this branch, it was prefixed, mark as such
-            method->set_is_prefixed_native();
-            return method; // found
-          }
-        }
-      }
-    }
-    return NULL;  // This whole branch bore nothing
+#ifdef ASSERT
+
+void VM_RedefineClasses::verify_classes(klassOop k_oop_latest, oop initiating_loader, TRAPS) {
+  klassOop k_oop = k_oop_latest;
+  while (k_oop != NULL) {
+
+    instanceKlassHandle k_handle(THREAD, k_oop);
+    Verifier::verify(k_handle, Verifier::ThrowException, true, true, THREAD);
+    k_oop = k_oop->klass_part()->old_version();
   }
+}
 
-  // Return the method name with old prefixes stripped away.
-  char* method_name_without_prefixes(methodOop method) {
-    Symbol* name = method->name();
-    char* name_str = name->as_utf8();
+#endif
 
-    // Old prefixing may be defunct, strip prefixes, if any.
-    for (int i = prefix_count-1; i >= 0; i--) {
-      char* prefix = prefixes[i];
-      size_t prefix_len = strlen(prefix);
-      if (strncmp(prefix, name_str, prefix_len) == 0) {
-        name_str += prefix_len;
+// Rewrite faster byte-codes back to their slower equivalent. Undoes rewriting happening in templateTable_xxx.cpp
+// The reason is that once we zero cpool caches, we need to re-resolve all entries again. Faster bytecodes do not
+// do that, they assume that cache entry is resolved already.
+void VM_RedefineClasses::unpatch_bytecode(methodOop method) {
+  RawBytecodeStream bcs(method);
+  Bytecodes::Code code;
+  Bytecodes::Code java_code;
+  while (!bcs.is_last_bytecode()) {
+    code = bcs.raw_next();
+    address bcp = bcs.bcp();
+
+    if (code == Bytecodes::_breakpoint) {
+      int bci = method->bci_from(bcp);
+      code = method->orig_bytecode_at(bci);
+      java_code = Bytecodes::java_code(code);
+      if (code != java_code &&
+           (java_code == Bytecodes::_getfield ||
+            java_code == Bytecodes::_putfield ||
+            java_code == Bytecodes::_aload_0)) {
+        // Let breakpoint table handling unpatch bytecode
+        method->set_orig_bytecode_at(bci, java_code);
+      }
+    } else {
+      java_code = Bytecodes::java_code(code);
+      if (code != java_code &&
+           (java_code == Bytecodes::_getfield ||
+            java_code == Bytecodes::_putfield ||
+            java_code == Bytecodes::_aload_0)) {
+        *bcp = java_code;
       }
     }
-    return name_str;
-  }
 
-  // Strip any prefixes off the old native method, then try to find a
-  // (possibly prefixed) new native that matches it.
-  methodOop strip_and_search_for_new_native(methodOop method) {
-    ResourceMark rm;
-    char* name_str = method_name_without_prefixes(method);
-    return search_prefix_name_space(0, name_str, strlen(name_str),
-                                    method->signature());
+    // Additionally, we need to unpatch bytecode at bcp+1 for fast_xaccess (which would be fast field access)
+    if (code == Bytecodes::_fast_iaccess_0 || code == Bytecodes::_fast_aaccess_0 || code == Bytecodes::_fast_faccess_0) {
+      Bytecodes::Code code2 = Bytecodes::code_or_bp_at(bcp + 1);
+      assert(code2 == Bytecodes::_fast_igetfield ||
+             code2 == Bytecodes::_fast_agetfield ||
+             code2 == Bytecodes::_fast_fgetfield, "");
+      *(bcp + 1) = Bytecodes::java_code(code2);
+    }
   }
+}
 
- public:
+// Unevolving classes may point to old methods directly
+// from their constant pool caches, itables, and/or vtables. We
+// use the SystemDictionary::classes_do() facility and this helper
+// to fix up these pointers. Additional field offsets and vtable indices
+// in the constant pool cache entries are fixed.
+//
+// Note: We currently don't support updating the vtable in
+// arrayKlassOops. See Open Issues in jvmtiRedefineClasses.hpp.
+void VM_RedefineClasses::adjust_cpool_cache(klassOop k_oop_latest, oop initiating_loader, TRAPS) {
+  klassOop k_oop = k_oop_latest;
+  while (k_oop != NULL) {
+    Klass *k = k_oop->klass_part();
+    if (k->oop_is_instance()) {
+      HandleMark hm(THREAD);
+      instanceKlass *ik = (instanceKlass *) k;
 
-  // Construct a native method transfer processor for this class.
-  TransferNativeFunctionRegistration(instanceKlassHandle _the_class) {
-    assert(SafepointSynchronize::is_at_safepoint(), "sanity check");
+      constantPoolHandle other_cp;
+      constantPoolCacheOop cp_cache;
 
-    the_class = _the_class;
-    prefixes = JvmtiExport::get_all_native_method_prefixes(&prefix_count);
-  }
+      other_cp = constantPoolHandle(ik->constants());
 
-  // Attempt to transfer any of the old or deleted methods that are native
-  void transfer_registrations(methodOop* old_methods, int methods_length) {
-    for (int j = 0; j < methods_length; j++) {
-      methodOop old_method = old_methods[j];
+      for (int i=0; i<other_cp->length(); i++) {
+        if (other_cp->tag_at(i).is_klass()) {
+          klassOop klass = other_cp->klass_at(i, THREAD);
+          if (klass->klass_part()->new_version() != NULL) {
 
-      if (old_method->is_native() && old_method->has_native_function()) {
-        methodOop new_method = strip_and_search_for_new_native(old_method);
-        if (new_method != NULL) {
-          // Actually set the native function in the new method.
-          // Redefine does not send events (except CFLH), certainly not this
-          // behind the scenes re-registration.
-          new_method->set_native_function(old_method->native_function(),
-                              !methodOopDesc::native_bind_event_is_interesting);
+            // (tw) TODO: check why/if this is necessary
+            other_cp->klass_at_put(i, klass->klass_part()->new_version());
+          }
+          klass = other_cp->klass_at(i, THREAD);
+          assert(klass->klass_part()->new_version() == NULL, "Must be new klass!");
         }
       }
+
+      cp_cache = other_cp->cache();
+
+      if (cp_cache != NULL) {
+        cp_cache->adjust_entries();
+      }
+
+      // If bytecode rewriting is enabled, we also need to unpatch bytecode to force resolution of zeroed entries
+      if (RewriteBytecodes) {
+        ik->methods_do(unpatch_bytecode);
+      }
     }
+    k_oop = k_oop->klass_part()->old_version();
   }
-};
+}
 
-// Don't lose the association between a native method and its JNI function.
-void VM_RedefineClasses::transfer_old_native_function_registrations(instanceKlassHandle the_class) {
-  TransferNativeFunctionRegistration transfer(the_class);
-  transfer.transfer_registrations(_deleted_methods, _deleted_methods_length);
-  transfer.transfer_registrations(_matching_old_methods, _matching_methods_length);
+void VM_RedefineClasses::update_jmethod_ids() {
+  for (int j = 0; j < _matching_methods_length; ++j) {
+    methodOop old_method = (methodOop)_old_methods->obj_at(_matching_old_methods[j]);
+    TRACE_RC3("matching method %s", old_method->name_and_sig_as_C_string());
+    
+    jmethodID jmid = old_method->find_jmethod_id_or_null();
+    if (old_method->new_version() != NULL && jmid == NULL) {
+       // (tw) Have to create jmethodID in this case
+       jmid = old_method->jmethod_id();
+    }
+
+    if (jmid != NULL) {
+      // There is a jmethodID, change it to point to the new method
+      methodHandle new_method_h((methodOop)_new_methods->obj_at(_matching_new_methods[j]));
+      if (old_method->new_version() == NULL) {
+        methodHandle old_method_h((methodOop)_old_methods->obj_at(_matching_old_methods[j]));
+        jmethodID new_jmethod_id = JNIHandles::make_jmethod_id(old_method_h);
+        bool result = instanceKlass::cast(old_method_h->method_holder())->update_jmethod_id(old_method_h(), new_jmethod_id);
+        //TRACE_RC3("Changed jmethodID for old method assigned to %d / result=%d", new_jmethod_id, result);
+        //TRACE_RC3("jmethodID new method: %d jmethodID old method: %d", new_method_h->jmethod_id(), old_method->jmethod_id());
+      } else {
+        jmethodID mid = new_method_h->jmethod_id();
+        bool result = instanceKlass::cast(new_method_h->method_holder())->update_jmethod_id(new_method_h(), jmid);
+        //TRACE_RC3("Changed jmethodID for new method assigned to %d / result=%d", jmid, result);
+      }
+      JNIHandles::change_method_associated_with_jmethod_id(jmid, new_method_h);
+      //TRACE_RC3("changing method associated with jmethod id %d to %s", (int)jmid, new_method_h->name()->as_C_string());
+      assert(JNIHandles::resolve_jmethod_id(jmid) == (methodOop)_new_methods->obj_at(_matching_new_methods[j]), "should be replaced");
+      jmethodID mid = ((methodOop)_new_methods->obj_at(_matching_new_methods[j]))->jmethod_id();
+      assert(JNIHandles::resolve_non_null((jobject)mid) == new_method_h(), "must match!");
+
+      //TRACE_RC3("jmethodID new method: %d jmethodID old method: %d", new_method_h->jmethod_id(), old_method->jmethod_id());
+    }
+  }
 }
 
+
 // Deoptimize all compiled code that depends on this class.
 //
 // If the can_redefine_classes capability is obtained in the onload
@@ -2964,7 +1835,10 @@ void VM_RedefineClasses::flush_dependent_code(instanceKlassHandle k_h, TRAPS) {
 
   // All dependencies have been recorded from startup or this is a second or
   // subsequent use of RedefineClasses
-  if (JvmtiExport::all_dependencies_are_recorded()) {
+
+  // For now deopt all
+  // (tw) TODO: Improve the dependency system such that we can safely deopt only a subset of the methods
+  if (0 && JvmtiExport::all_dependencies_are_recorded()) {
     Universe::flush_evol_dependents_on(k_h);
   } else {
     CodeCache::mark_all_nmethods_for_deoptimization();
@@ -2987,10 +1861,10 @@ void VM_RedefineClasses::compute_added_deleted_matching_methods() {
   methodOop old_method;
   methodOop new_method;
 
-  _matching_old_methods = NEW_RESOURCE_ARRAY(methodOop, _old_methods->length());
-  _matching_new_methods = NEW_RESOURCE_ARRAY(methodOop, _old_methods->length());
-  _added_methods        = NEW_RESOURCE_ARRAY(methodOop, _new_methods->length());
-  _deleted_methods      = NEW_RESOURCE_ARRAY(methodOop, _old_methods->length());
+  _matching_old_methods = NEW_RESOURCE_ARRAY(int, _old_methods->length());
+  _matching_new_methods = NEW_RESOURCE_ARRAY(int, _old_methods->length());
+  _added_methods        = NEW_RESOURCE_ARRAY(int, _new_methods->length());
+  _deleted_methods      = NEW_RESOURCE_ARRAY(int, _old_methods->length());
 
   _matching_methods_length = 0;
   _deleted_methods_length  = 0;
@@ -3005,36 +1879,36 @@ void VM_RedefineClasses::compute_added_deleted_matching_methods() {
       }
       // New method at the end
       new_method = (methodOop) _new_methods->obj_at(nj);
-      _added_methods[_added_methods_length++] = new_method;
+      _added_methods[_added_methods_length++] = nj;
       ++nj;
     } else if (nj >= _new_methods->length()) {
       // Old method, at the end, is deleted
       old_method = (methodOop) _old_methods->obj_at(oj);
-      _deleted_methods[_deleted_methods_length++] = old_method;
+      _deleted_methods[_deleted_methods_length++] = oj;
       ++oj;
     } else {
       old_method = (methodOop) _old_methods->obj_at(oj);
       new_method = (methodOop) _new_methods->obj_at(nj);
       if (old_method->name() == new_method->name()) {
         if (old_method->signature() == new_method->signature()) {
-          _matching_old_methods[_matching_methods_length  ] = old_method;
-          _matching_new_methods[_matching_methods_length++] = new_method;
+          _matching_old_methods[_matching_methods_length  ] = oj;//old_method;
+          _matching_new_methods[_matching_methods_length++] = nj;//new_method;
           ++nj;
           ++oj;
         } else {
           // added overloaded have already been moved to the end,
           // so this is a deleted overloaded method
-          _deleted_methods[_deleted_methods_length++] = old_method;
+          _deleted_methods[_deleted_methods_length++] = oj;//old_method;
           ++oj;
         }
       } else { // names don't match
         if (old_method->name()->fast_compare(new_method->name()) > 0) {
           // new method
-          _added_methods[_added_methods_length++] = new_method;
+          _added_methods[_added_methods_length++] = nj;//new_method;
           ++nj;
         } else {
           // deleted method
-          _deleted_methods[_deleted_methods_length++] = old_method;
+          _deleted_methods[_deleted_methods_length++] = oj;//old_method;
           ++oj;
         }
       }
@@ -3042,6 +1916,7 @@ void VM_RedefineClasses::compute_added_deleted_matching_methods() {
   }
   assert(_matching_methods_length + _deleted_methods_length == _old_methods->length(), "sanity");
   assert(_matching_methods_length + _added_methods_length == _new_methods->length(), "sanity");
+  TRACE_RC3("Matching methods = %d / deleted methods = %d / added methods = %d", _matching_methods_length, _deleted_methods_length, _added_methods_length);
 }
 
 
@@ -3049,287 +1924,176 @@ void VM_RedefineClasses::compute_added_deleted_matching_methods() {
 // Install the redefinition of a class:
 //    - house keeping (flushing breakpoints and caches, deoptimizing
 //      dependent compiled code)
-//    - replacing parts in the_class with parts from scratch_class
-//    - adding a weak reference to track the obsolete but interesting
-//      parts of the_class
 //    - adjusting constant pool caches and vtables in other classes
-//      that refer to methods in the_class. These adjustments use the
-//      SystemDictionary::classes_do() facility which only allows
-//      a helper method to be specified. The interesting parameters
-//      that we would like to pass to the helper method are saved in
-//      static global fields in the VM operation.
-void VM_RedefineClasses::redefine_single_class(jclass the_jclass,
-       instanceKlassHandle scratch_class, TRAPS) {
+void VM_RedefineClasses::redefine_single_class(instanceKlassHandle the_new_class, TRAPS) {
+
+  ResourceMark rm(THREAD);
 
-  RC_TIMER_START(_timer_rsc_phase1);
+  assert(the_new_class->old_version() != NULL, "Must not be null");
+  assert(the_new_class->old_version()->klass_part()->new_version() == the_new_class(), "Must equal");
 
-  oop the_class_mirror = JNIHandles::resolve_non_null(the_jclass);
-  klassOop the_class_oop = java_lang_Class::as_klassOop(the_class_mirror);
-  instanceKlassHandle the_class = instanceKlassHandle(THREAD, the_class_oop);
+  instanceKlassHandle the_old_class = instanceKlassHandle(THREAD, the_new_class->old_version());
 
+#ifndef JVMTI_KERNEL
   // Remove all breakpoints in methods of this class
   JvmtiBreakpoints& jvmti_breakpoints = JvmtiCurrentBreakpoints::get_jvmti_breakpoints();
-  jvmti_breakpoints.clearall_in_class_at_safepoint(the_class_oop);
+  jvmti_breakpoints.clearall_in_class_at_safepoint(the_old_class());
+#endif // !JVMTI_KERNEL
 
-  if (the_class_oop == Universe::reflect_invoke_cache()->klass()) {
+  if (the_old_class() == Universe::reflect_invoke_cache()->klass()) {
     // We are redefining java.lang.reflect.Method. Method.invoke() is
     // cached and users of the cache care about each active version of
     // the method so we have to track this previous version.
     // Do this before methods get switched
     Universe::reflect_invoke_cache()->add_previous_version(
-      the_class->method_with_idnum(Universe::reflect_invoke_cache()->method_idnum()));
+      the_old_class->method_with_idnum(Universe::reflect_invoke_cache()->method_idnum()));
   }
 
-  // Deoptimize all compiled code that depends on this class
-  flush_dependent_code(the_class, THREAD);
-
-  _old_methods = the_class->methods();
-  _new_methods = scratch_class->methods();
-  _the_class_oop = the_class_oop;
+  _old_methods = the_old_class->methods();
+  _new_methods = the_new_class->methods();
   compute_added_deleted_matching_methods();
-  update_jmethod_ids();
-
-  // Attach new constant pool to the original klass. The original
-  // klass still refers to the old constant pool (for now).
-  scratch_class->constants()->set_pool_holder(the_class());
-
-#if 0
-  // In theory, with constant pool merging in place we should be able
-  // to save space by using the new, merged constant pool in place of
-  // the old constant pool(s). By "pool(s)" I mean the constant pool in
-  // the klass version we are replacing now and any constant pool(s) in
-  // previous versions of klass. Nice theory, doesn't work in practice.
-  // When this code is enabled, even simple programs throw NullPointer
-  // exceptions. I'm guessing that this is caused by some constant pool
-  // cache difference between the new, merged constant pool and the
-  // constant pool that was just being used by the klass. I'm keeping
-  // this code around to archive the idea, but the code has to remain
-  // disabled for now.
-
-  // Attach each old method to the new constant pool. This can be
-  // done here since we are past the bytecode verification and
-  // constant pool optimization phases.
-  for (int i = _old_methods->length() - 1; i >= 0; i--) {
-    methodOop method = (methodOop)_old_methods->obj_at(i);
-    method->set_constants(scratch_class->constants());
-  }
-
-  {
-    // walk all previous versions of the klass
-    instanceKlass *ik = (instanceKlass *)the_class()->klass_part();
-    PreviousVersionWalker pvw(ik);
-    instanceKlassHandle ikh;
-    do {
-      ikh = pvw.next_previous_version();
-      if (!ikh.is_null()) {
-        ik = ikh();
-
-        // attach previous version of klass to the new constant pool
-        ik->set_constants(scratch_class->constants());
-
-        // Attach each method in the previous version of klass to the
-        // new constant pool
-        objArrayOop prev_methods = ik->methods();
-        for (int i = prev_methods->length() - 1; i >= 0; i--) {
-          methodOop method = (methodOop)prev_methods->obj_at(i);
-          method->set_constants(scratch_class->constants());
-        }
-      }
-    } while (!ikh.is_null());
-  }
-#endif
-
-  // Replace methods and constantpool
-  the_class->set_methods(_new_methods);
-  scratch_class->set_methods(_old_methods);     // To prevent potential GCing of the old methods,
-                                          // and to be able to undo operation easily.
-
-  constantPoolOop old_constants = the_class->constants();
-  the_class->set_constants(scratch_class->constants());
-  scratch_class->set_constants(old_constants);  // See the previous comment.
-#if 0
-  // We are swapping the guts of "the new class" with the guts of "the
-  // class". Since the old constant pool has just been attached to "the
-  // new class", it seems logical to set the pool holder in the old
-  // constant pool also. However, doing this will change the observable
-  // class hierarchy for any old methods that are still executing. A
-  // method can query the identity of its "holder" and this query uses
-  // the method's constant pool link to find the holder. The change in
-  // holding class from "the class" to "the new class" can confuse
-  // things.
-  //
-  // Setting the old constant pool's holder will also cause
-  // verification done during vtable initialization below to fail.
-  // During vtable initialization, the vtable's class is verified to be
-  // a subtype of the method's holder. The vtable's class is "the
-  // class" and the method's holder is gotten from the constant pool
-  // link in the method itself. For "the class"'s directly implemented
-  // methods, the method holder is "the class" itself (as gotten from
-  // the new constant pool). The check works fine in this case. The
-  // check also works fine for methods inherited from super classes.
-  //
-  // Miranda methods are a little more complicated. A miranda method is
-  // provided by an interface when the class implementing the interface
-  // does not provide its own method.  These interfaces are implemented
-  // internally as an instanceKlass. These special instanceKlasses
-  // share the constant pool of the class that "implements" the
-  // interface. By sharing the constant pool, the method holder of a
-  // miranda method is the class that "implements" the interface. In a
-  // non-redefine situation, the subtype check works fine. However, if
-  // the old constant pool's pool holder is modified, then the check
-  // fails because there is no class hierarchy relationship between the
-  // vtable's class and "the new class".
-
-  old_constants->set_pool_holder(scratch_class());
-#endif
 
   // track which methods are EMCP for add_previous_version() call below
-  BitMap emcp_methods(_old_methods->length());
+  
+  // (tw) TODO: Check if we need the concept of EMCP?
+   BitMap emcp_methods(_old_methods->length());
   int emcp_method_count = 0;
   emcp_methods.clear();  // clears 0..(length() - 1)
+  
+  // We need to mark methods as old!!
   check_methods_and_mark_as_obsolete(&emcp_methods, &emcp_method_count);
-  transfer_old_native_function_registrations(the_class);
-
-  // The class file bytes from before any retransformable agents mucked
-  // with them was cached on the scratch class, move to the_class.
-  // Note: we still want to do this if nothing needed caching since it
-  // should get cleared in the_class too.
-  if (the_class->get_cached_class_file_bytes() == 0) {
-    // the_class doesn't have a cache yet so copy it
-    the_class->set_cached_class_file(
-      scratch_class->get_cached_class_file_bytes(),
-      scratch_class->get_cached_class_file_len());
-  }
-#ifndef PRODUCT
-  else {
-    assert(the_class->get_cached_class_file_bytes() ==
-      scratch_class->get_cached_class_file_bytes(), "cache ptrs must match");
-    assert(the_class->get_cached_class_file_len() ==
-      scratch_class->get_cached_class_file_len(), "cache lens must match");
-  }
-#endif
+  update_jmethod_ids();
 
-  // Replace inner_classes
-  typeArrayOop old_inner_classes = the_class->inner_classes();
-  the_class->set_inner_classes(scratch_class->inner_classes());
-  scratch_class->set_inner_classes(old_inner_classes);
+  // TODO:
+  transfer_old_native_function_registrations(the_old_class);
 
-  // Initialize the vtable and interface table after
-  // methods have been rewritten
-  {
-    ResourceMark rm(THREAD);
-    // no exception should happen here since we explicitly
-    // do not check loader constraints.
-    // compare_and_normalize_class_versions has already checked:
-    //  - classloaders unchanged, signatures unchanged
-    //  - all instanceKlasses for redefined classes reused & contents updated
-    the_class->vtable()->initialize_vtable(false, THREAD);
-    the_class->itable()->initialize_itable(false, THREAD);
-    assert(!HAS_PENDING_EXCEPTION || (THREAD->pending_exception()->is_a(SystemDictionary::ThreadDeath_klass())), "redefine exception");
-  }
-
-  // Leave arrays of jmethodIDs and itable index cache unchanged
-
-  // Copy the "source file name" attribute from new class version
-  the_class->set_source_file_name(scratch_class->source_file_name());
-
-  // Copy the "source debug extension" attribute from new class version
-  the_class->set_source_debug_extension(
-    scratch_class->source_debug_extension(),
-    scratch_class->source_debug_extension() == NULL ? 0 :
-    (int)strlen(scratch_class->source_debug_extension()));
-
-  // Use of javac -g could be different in the old and the new
-  if (scratch_class->access_flags().has_localvariable_table() !=
-      the_class->access_flags().has_localvariable_table()) {
-
-    AccessFlags flags = the_class->access_flags();
-    if (scratch_class->access_flags().has_localvariable_table()) {
-      flags.set_has_localvariable_table();
-    } else {
-      flags.clear_has_localvariable_table();
-    }
-    the_class->set_access_flags(flags);
-  }
-
-  // Replace class annotation fields values
-  typeArrayOop old_class_annotations = the_class->class_annotations();
-  the_class->set_class_annotations(scratch_class->class_annotations());
-  scratch_class->set_class_annotations(old_class_annotations);
-
-  // Replace fields annotation fields values
-  objArrayOop old_fields_annotations = the_class->fields_annotations();
-  the_class->set_fields_annotations(scratch_class->fields_annotations());
-  scratch_class->set_fields_annotations(old_fields_annotations);
-
-  // Replace methods annotation fields values
-  objArrayOop old_methods_annotations = the_class->methods_annotations();
-  the_class->set_methods_annotations(scratch_class->methods_annotations());
-  scratch_class->set_methods_annotations(old_methods_annotations);
-
-  // Replace methods parameter annotation fields values
-  objArrayOop old_methods_parameter_annotations =
-    the_class->methods_parameter_annotations();
-  the_class->set_methods_parameter_annotations(
-    scratch_class->methods_parameter_annotations());
-  scratch_class->set_methods_parameter_annotations(old_methods_parameter_annotations);
-
-  // Replace methods default annotation fields values
-  objArrayOop old_methods_default_annotations =
-    the_class->methods_default_annotations();
-  the_class->set_methods_default_annotations(
-    scratch_class->methods_default_annotations());
-  scratch_class->set_methods_default_annotations(old_methods_default_annotations);
-
-  // Replace minor version number of class file
-  u2 old_minor_version = the_class->minor_version();
-  the_class->set_minor_version(scratch_class->minor_version());
-  scratch_class->set_minor_version(old_minor_version);
-
-  // Replace major version number of class file
-  u2 old_major_version = the_class->major_version();
-  the_class->set_major_version(scratch_class->major_version());
-  scratch_class->set_major_version(old_major_version);
-
-  // Replace CP indexes for class and name+type of enclosing method
-  u2 old_class_idx  = the_class->enclosing_method_class_index();
-  u2 old_method_idx = the_class->enclosing_method_method_index();
-  the_class->set_enclosing_method_indices(
-    scratch_class->enclosing_method_class_index(),
-    scratch_class->enclosing_method_method_index());
-  scratch_class->set_enclosing_method_indices(old_class_idx, old_method_idx);
-
-  // keep track of previous versions of this class
-  the_class->add_previous_version(scratch_class, &emcp_methods,
-    emcp_method_count);
-
-  RC_TIMER_STOP(_timer_rsc_phase1);
-  RC_TIMER_START(_timer_rsc_phase2);
 
-  // Adjust constantpool caches and vtables for all classes
-  // that reference methods of the evolved class.
-  SystemDictionary::classes_do(adjust_cpool_cache_and_vtable, THREAD);
 
-  if (the_class->oop_map_cache() != NULL) {
-    // Flush references to any obsolete methods from the oop map cache
-    // so that obsolete methods are not pinned.
-    the_class->oop_map_cache()->flush_obsolete_entries();
+#ifdef ASSERT
+
+//  klassOop systemLookup1 = SystemDictionary::resolve_or_null(the_old_class->name(), the_old_class->class_loader(), the_old_class->protection_domain(), THREAD);
+//  assert(systemLookup1 == the_new_class(), "New class must be in system dictionary!");
+
+  //JNIHandles::verify();
+
+//  klassOop systemLookup = SystemDictionary::resolve_or_null(the_old_class->name(), the_old_class->class_loader(), the_old_class->protection_domain(), THREAD);
+
+//  assert(systemLookup == the_new_class(), "New class must be in system dictionary!");
+  assert(the_new_class->old_version() != NULL, "Must not be null");
+  assert(the_new_class->old_version()->klass_part()->new_version() == the_new_class(), "Must equal");
+
+  for (int i=0; i<the_new_class->methods()->length(); i++) {
+    assert(((methodOop)the_new_class->methods()->obj_at(i))->method_holder() == the_new_class(), "method holder must match!");
   }
 
+  _old_methods->verify();
+  _new_methods->verify();
+
+  the_new_class->vtable()->verify(tty);
+  the_old_class->vtable()->verify(tty);
+
+#endif
+
   // increment the classRedefinedCount field in the_class and in any
   // direct and indirect subclasses of the_class
-  increment_class_counter((instanceKlass *)the_class()->klass_part(), THREAD);
+  increment_class_counter((instanceKlass *)the_old_class()->klass_part(), THREAD);
+
+}
+
+
+void VM_RedefineClasses::check_methods_and_mark_as_obsolete(BitMap *emcp_methods, int * emcp_method_count_p) {
+    TRACE_RC3("Checking matching methods for EMCP");
+    *emcp_method_count_p = 0;
+    int obsolete_count = 0;
+    int old_index = 0;
+    for (int j = 0; j < _matching_methods_length; ++j, ++old_index) {
+      methodOop old_method = (methodOop)_old_methods->obj_at(_matching_old_methods[j]);
+      methodOop new_method = (methodOop)_new_methods->obj_at(_matching_new_methods[j]);
+      methodOop old_array_method;
+
+      // Maintain an old_index into the _old_methods array by skipping
+      // deleted methods
+      while ((old_array_method = (methodOop) _old_methods->obj_at(old_index))
+        != old_method) {
+          ++old_index;
+      }
 
-  // RC_TRACE macro has an embedded ResourceMark
-  RC_TRACE_WITH_THREAD(0x00000001, THREAD,
-    ("redefined name=%s, count=%d (avail_mem=" UINT64_FORMAT "K)",
-    the_class->external_name(),
-    java_lang_Class::classRedefinedCount(the_class_mirror),
-    os::available_memory() >> 10));
+      if (MethodComparator::methods_EMCP(old_method, new_method)) {
+        // The EMCP definition from JSR-163 requires the bytecodes to be
+        // the same with the exception of constant pool indices which may
+        // differ. However, the constants referred to by those indices
+        // must be the same.
+        //
+        // We use methods_EMCP() for comparison since constant pool
+        // merging can remove duplicate constant pool entries that were
+        // present in the old method and removed from the rewritten new
+        // method. A faster binary comparison function would consider the
+        // old and new methods to be different when they are actually
+        // EMCP.
+
+        // track which methods are EMCP for add_previous_version() call
+        emcp_methods->set_bit(old_index);
+        (*emcp_method_count_p)++;
+
+        // An EMCP method is _not_ obsolete. An obsolete method has a
+        // different jmethodID than the current method. An EMCP method
+        // has the same jmethodID as the current method. Having the
+        // same jmethodID for all EMCP versions of a method allows for
+        // a consistent view of the EMCP methods regardless of which
+        // EMCP method you happen to have in hand. For example, a
+        // breakpoint set in one EMCP method will work for all EMCP
+        // versions of the method including the current one.
+
+        old_method->set_new_version(new_method);
+        new_method->set_old_version(old_method);
+
+        TRACE_RC3("Found EMCP method %s", old_method->name_and_sig_as_C_string());
+
+        // Transfer breakpoints
+        instanceKlass *ik = instanceKlass::cast(old_method->method_holder());
+        for (BreakpointInfo* bp = ik->breakpoints(); bp != NULL; bp = bp->next()) {
+          TRACE_RC2("Checking breakpoint");
+          TRACE_RC2("%d / %d", bp->match(old_method), bp->match(new_method));
+          if (bp->match(old_method)) {
+            assert(bp->match(new_method), "if old method is method, then new method must match too");
+            TRACE_RC2("Found a breakpoint in an old EMCP method");
+            new_method->set_breakpoint(bp->bci());
+          }
+        }
+      } else {
+        // mark obsolete methods as such
+        old_method->set_is_obsolete();
+        obsolete_count++;
+
+        // With tracing we try not to "yack" too much. The position of
+        // this trace assumes there are fewer obsolete methods than
+        // EMCP methods.
+        TRACE_RC3("mark %s(%s) as obsolete",
+          old_method->name()->as_C_string(),
+          old_method->signature()->as_C_string());
+      }
+      old_method->set_is_old();
+    }
+    for (int i = 0; i < _deleted_methods_length; ++i) {
+      methodOop old_method = (methodOop)_old_methods->obj_at(_deleted_methods[i]);
 
-  RC_TIMER_STOP(_timer_rsc_phase2);
-} // end redefine_single_class()
+      //assert(old_method->vtable_index() < 0,
+      //  "cannot delete methods with vtable entries");;
 
+      // Mark all deleted methods as old and obsolete
+      old_method->set_is_old();
+      old_method->set_is_obsolete();
+      ++obsolete_count;
+      // With tracing we try not to "yack" too much. The position of
+      // this trace assumes there are fewer obsolete methods than
+      // EMCP methods.
+      TRACE_RC3("mark deleted %s(%s) as obsolete",
+        old_method->name()->as_C_string(),
+        old_method->signature()->as_C_string());
+    }
+    //assert((*emcp_method_count_p + obsolete_count) == _old_methods->length(), "sanity check");
+    TRACE_RC3("EMCP_cnt=%d, obsolete_cnt=%d !", *emcp_method_count_p, obsolete_count);
+}
 
 // Increment the classRedefinedCount field in the specific instanceKlass
 // and in all direct and indirect subclasses.
@@ -3338,134 +2102,267 @@ void VM_RedefineClasses::increment_class_counter(instanceKlass *ik, TRAPS) {
   klassOop class_oop = java_lang_Class::as_klassOop(class_mirror);
   int new_count = java_lang_Class::classRedefinedCount(class_mirror) + 1;
   java_lang_Class::set_classRedefinedCount(class_mirror, new_count);
-
-  if (class_oop != _the_class_oop) {
-    // _the_class_oop count is printed at end of redefine_single_class()
-    RC_TRACE_WITH_THREAD(0x00000008, THREAD,
-      ("updated count in subclass=%s to %d", ik->external_name(), new_count));
-  }
-
-  for (Klass *subk = ik->subklass(); subk != NULL;
-       subk = subk->next_sibling()) {
-    if (subk->oop_is_instance()) {
-      // Only update instanceKlasses
-      instanceKlass *subik = (instanceKlass*)subk;
-      // recursively do subclasses of the current subclass
-      increment_class_counter(subik, THREAD);
-    }
-  }
+  TRACE_RC3("updated count for class=%s to %d", ik->external_name(), new_count);
 }
 
-void VM_RedefineClasses::check_class(klassOop k_oop,
-       oop initiating_loader, TRAPS) {
+#ifndef PRODUCT
+void VM_RedefineClasses::check_class(klassOop k_oop, TRAPS) {
   Klass *k = k_oop->klass_part();
   if (k->oop_is_instance()) {
     HandleMark hm(THREAD);
     instanceKlass *ik = (instanceKlass *) k;
-    bool no_old_methods = true;  // be optimistic
-    ResourceMark rm(THREAD);
+    assert(ik->is_newest_version(), "must be latest version in system dictionary");
+
+    if (ik->vtable_length() > 0) {
+      ResourceMark rm(THREAD);
+      if (!ik->vtable()->check_no_old_or_obsolete_entries()) {
+        TRACE_RC1("size of class: %d\n", k_oop->size());
+        TRACE_RC1("klassVtable::check_no_old_entries failure -- OLD method found -- class: %s", ik->signature_name());
+        assert(false, "OLD method found");
+      }
+
+      ik->vtable()->verify(tty, true);
+    }
+  }
+}
+
+#endif
 
-    // a vtable should never contain old or obsolete methods
-    if (ik->vtable_length() > 0 &&
-        !ik->vtable()->check_no_old_or_obsolete_entries()) {
-      if (RC_TRACE_ENABLED(0x00004000)) {
-        RC_TRACE_WITH_THREAD(0x00004000, THREAD,
-          ("klassVtable::check_no_old_or_obsolete_entries failure"
-           " -- OLD or OBSOLETE method found -- class: %s",
-           ik->signature_name()));
-        ik->vtable()->dump_vtable();
+static bool match_right(void* value, Pair<klassOop, klassOop> elem) {
+  return elem.right() == value;
+}
+
+jvmtiError VM_RedefineClasses::do_topological_class_sorting( const jvmtiClassDefinition *class_defs, int class_count, TRAPS)
+{
+  GrowableArray< Pair<klassOop, klassOop> > links;
+
+  for (int i=0; i<class_count; i++) {
+
+    oop mirror = JNIHandles::resolve_non_null(class_defs[i].klass);
+    instanceKlassHandle the_class(THREAD, java_lang_Class::as_klassOop(mirror));
+    Handle the_class_loader(THREAD, the_class->class_loader());
+    Handle protection_domain(THREAD, the_class->protection_domain());
+
+    ClassFileStream st((u1*) class_defs[i].class_bytes,
+      class_defs[i].class_byte_count, (char *)"__VM_RedefineClasses__");
+    ClassFileParser cfp(&st);
+
+    GrowableArray<Symbol*> symbolArr;
+    TempNewSymbol parsed_name;
+    TRACE_RC2("Before find super symbols of class %s", the_class->name()->as_C_string());
+    cfp.parseClassFile(the_class->name(), the_class_loader, protection_domain, the_class, KlassHandle(), NULL, &symbolArr, parsed_name, false, THREAD);
+    
+    for (int j=0; j<symbolArr.length(); j++) {
+      Symbol* sym = symbolArr.at(j);
+      TRACE_RC3("Before adding link to super class %s", sym->as_C_string());
+      klassOop super_klass = SystemDictionary::resolve_or_null(sym, the_class_loader, protection_domain, THREAD);
+      if (super_klass != NULL) {
+        instanceKlassHandle the_super_class(THREAD, super_klass);
+        if (_affected_klasses->contains(the_super_class)) {
+          TRACE_RC2("Found class to link");
+          links.append(Pair<klassOop, klassOop>(super_klass, the_class()));
+        }
       }
-      no_old_methods = false;
-    }
-
-    // an itable should never contain old or obsolete methods
-    if (ik->itable_length() > 0 &&
-        !ik->itable()->check_no_old_or_obsolete_entries()) {
-      if (RC_TRACE_ENABLED(0x00004000)) {
-        RC_TRACE_WITH_THREAD(0x00004000, THREAD,
-          ("klassItable::check_no_old_or_obsolete_entries failure"
-           " -- OLD or OBSOLETE method found -- class: %s",
-           ik->signature_name()));
-        ik->itable()->dump_itable();
+    }
+
+    assert(the_class->check_redefinition_flag(Klass::MarkedAsAffected), "");
+    the_class->clear_redefinition_flag(Klass::MarkedAsAffected);
+  }
+
+
+  TRACE_RC1("Identified links between classes! ");
+
+  for (int i=0; i < _affected_klasses->length(); i++) {
+    instanceKlassHandle klass = _affected_klasses->at(i);
+
+    if (klass->check_redefinition_flag(Klass::MarkedAsAffected)) {
+      klass->clear_redefinition_flag(Klass::MarkedAsAffected);
+      klassOop superKlass = klass->super();
+      if (_affected_klasses->contains(superKlass)) {
+        links.append(Pair<klassOop, klassOop>(superKlass, klass()));
       }
-      no_old_methods = false;
-    }
-
-    // the constant pool cache should never contain old or obsolete methods
-    if (ik->constants() != NULL &&
-        ik->constants()->cache() != NULL &&
-        !ik->constants()->cache()->check_no_old_or_obsolete_entries()) {
-      if (RC_TRACE_ENABLED(0x00004000)) {
-        RC_TRACE_WITH_THREAD(0x00004000, THREAD,
-          ("cp-cache::check_no_old_or_obsolete_entries failure"
-           " -- OLD or OBSOLETE method found -- class: %s",
-           ik->signature_name()));
-        ik->constants()->cache()->dump_cache();
+
+      objArrayOop superInterfaces = klass->local_interfaces();
+      for (int j=0; j<superInterfaces->length(); j++) {
+        klassOop interfaceKlass = (klassOop)superInterfaces->obj_at(j);
+        if (_affected_klasses->contains(interfaceKlass)) {
+          links.append(Pair<klassOop, klassOop>(interfaceKlass, klass()));
+        }
       }
-      no_old_methods = false;
+    }
+  }
+
+  IF_TRACE_RC2 {
+    TRACE_RC2("Identified links: ");
+    for (int i=0; i<links.length(); i++) {
+      TRACE_RC2("%s to %s", links.at(i).left()->klass_part()->name()->as_C_string(),
+        links.at(i).right()->klass_part()->name()->as_C_string());
+    }
+  }
+
+  for (int i = 0; i < _affected_klasses->length(); i++) {
+    int j;
+    for (j = i; j < _affected_klasses->length(); j++) {
+      // Search for node with no incoming edges
+      klassOop oop = _affected_klasses->at(j)();
+      int k = links.find(oop, match_right);
+      if (k == -1) break;
+    }
+    if (j == _affected_klasses->length()) {
+      return JVMTI_ERROR_CIRCULAR_CLASS_DEFINITION;
     }
 
-    if (!no_old_methods) {
-      if (RC_TRACE_ENABLED(0x00004000)) {
-        dump_methods();
+    // Remove all links from this node
+    klassOop oop = _affected_klasses->at(j)();
+    int k = 0;
+    while (k < links.length()) {
+      if (links.adr_at(k)->left() == oop) {
+        links.delete_at(k);
       } else {
-        tty->print_cr("INFO: use the '-XX:TraceRedefineClasses=16384' option "
-          "to see more info about the following guarantee() failure.");
+        k++;
       }
-      guarantee(false, "OLD and/or OBSOLETE method(s) found");
     }
+
+    // Swap node
+    instanceKlassHandle tmp = _affected_klasses->at(j);
+    _affected_klasses->at_put(j, _affected_klasses->at(i));
+    _affected_klasses->at_put(i, tmp);
   }
+
+  return JVMTI_ERROR_NONE;
 }
 
-void VM_RedefineClasses::dump_methods() {
-  int j;
-  RC_TRACE(0x00004000, ("_old_methods --"));
-  for (j = 0; j < _old_methods->length(); ++j) {
-    methodOop m = (methodOop) _old_methods->obj_at(j);
-    RC_TRACE_NO_CR(0x00004000, ("%4d  (%5d)  ", j, m->vtable_index()));
-    m->access_flags().print_on(tty);
-    tty->print(" --  ");
-    m->print_name(tty);
-    tty->cr();
-  }
-  RC_TRACE(0x00004000, ("_new_methods --"));
-  for (j = 0; j < _new_methods->length(); ++j) {
-    methodOop m = (methodOop) _new_methods->obj_at(j);
-    RC_TRACE_NO_CR(0x00004000, ("%4d  (%5d)  ", j, m->vtable_index()));
-    m->access_flags().print_on(tty);
-    tty->print(" --  ");
-    m->print_name(tty);
-    tty->cr();
-  }
-  RC_TRACE(0x00004000, ("_matching_(old/new)_methods --"));
-  for (j = 0; j < _matching_methods_length; ++j) {
-    methodOop m = _matching_old_methods[j];
-    RC_TRACE_NO_CR(0x00004000, ("%4d  (%5d)  ", j, m->vtable_index()));
-    m->access_flags().print_on(tty);
-    tty->print(" --  ");
-    m->print_name(tty);
-    tty->cr();
-    m = _matching_new_methods[j];
-    RC_TRACE_NO_CR(0x00004000, ("      (%5d)  ", m->vtable_index()));
-    m->access_flags().print_on(tty);
-    tty->cr();
-  }
-  RC_TRACE(0x00004000, ("_deleted_methods --"));
-  for (j = 0; j < _deleted_methods_length; ++j) {
-    methodOop m = _deleted_methods[j];
-    RC_TRACE_NO_CR(0x00004000, ("%4d  (%5d)  ", j, m->vtable_index()));
-    m->access_flags().print_on(tty);
-    tty->print(" --  ");
-    m->print_name(tty);
-    tty->cr();
-  }
-  RC_TRACE(0x00004000, ("_added_methods --"));
-  for (j = 0; j < _added_methods_length; ++j) {
-    methodOop m = _added_methods[j];
-    RC_TRACE_NO_CR(0x00004000, ("%4d  (%5d)  ", j, m->vtable_index()));
-    m->access_flags().print_on(tty);
-    tty->print(" --  ");
-    m->print_name(tty);
-    tty->cr();
+// This internal class transfers the native function registration from old methods
+// to new methods.  It is designed to handle both the simple case of unchanged
+// native methods and the complex cases of native method prefixes being added and/or
+// removed.
+// It expects only to be used during the VM_RedefineClasses op (a safepoint).
+//
+// This class is used after the new methods have been installed in "the_class".
+//
+// So, for example, the following must be handled.  Where 'm' is a method and
+// a number followed by an underscore is a prefix.
+//
+//                                      Old Name    New Name
+// Simple transfer to new method        m       ->  m
+// Add prefix                           m       ->  1_m
+// Remove prefix                        1_m     ->  m
+// Simultaneous add of prefixes         m       ->  3_2_1_m
+// Simultaneous removal of prefixes     3_2_1_m ->  m
+// Simultaneous add and remove          1_m     ->  2_m
+// Same, caused by prefix removal only  3_2_1_m ->  3_2_m
+//
+class TransferNativeFunctionRegistration {
+private:
+  instanceKlassHandle the_class;
+  int prefix_count;
+  char** prefixes;
+
+  // Recursively search the binary tree of possibly prefixed method names.
+  // Iteration could be used if all agents were well behaved. Full tree walk is
+  // more resilent to agents not cleaning up intermediate methods.
+  // Branch at each depth in the binary tree is:
+  //    (1) without the prefix.
+  //    (2) with the prefix.
+  // where 'prefix' is the prefix at that 'depth' (first prefix, second prefix,...)
+  methodOop search_prefix_name_space(int depth, char* name_str, size_t name_len,
+    Symbol* signature) {
+      Symbol* name_symbol = SymbolTable::probe(name_str, (int)name_len);
+      if (name_symbol != NULL) {
+        methodOop method = Klass::cast(the_class()->klass_part()->new_version())->lookup_method(name_symbol, signature);
+        if (method != NULL) {
+          // Even if prefixed, intermediate methods must exist.
+          if (method->is_native()) {
+            // Wahoo, we found a (possibly prefixed) version of the method, return it.
+            return method;
+          }
+          if (depth < prefix_count) {
+            // Try applying further prefixes (other than this one).
+            method = search_prefix_name_space(depth+1, name_str, name_len, signature);
+            if (method != NULL) {
+              return method; // found
+            }
+
+            // Try adding this prefix to the method name and see if it matches
+            // another method name.
+            char* prefix = prefixes[depth];
+            size_t prefix_len = strlen(prefix);
+            size_t trial_len = name_len + prefix_len;
+            char* trial_name_str = NEW_RESOURCE_ARRAY(char, trial_len + 1);
+            strcpy(trial_name_str, prefix);
+            strcat(trial_name_str, name_str);
+            method = search_prefix_name_space(depth+1, trial_name_str, trial_len,
+              signature);
+            if (method != NULL) {
+              // If found along this branch, it was prefixed, mark as such
+              method->set_is_prefixed_native();
+              return method; // found
+            }
+          }
+        }
+      }
+      return NULL;  // This whole branch bore nothing
+  }
+
+  // Return the method name with old prefixes stripped away.
+  char* method_name_without_prefixes(methodOop method) {
+    Symbol* name = method->name();
+    char* name_str = name->as_utf8();
+
+    // Old prefixing may be defunct, strip prefixes, if any.
+    for (int i = prefix_count-1; i >= 0; i--) {
+      char* prefix = prefixes[i];
+      size_t prefix_len = strlen(prefix);
+      if (strncmp(prefix, name_str, prefix_len) == 0) {
+        name_str += prefix_len;
+      }
+    }
+    return name_str;
+  }
+
+  // Strip any prefixes off the old native method, then try to find a
+  // (possibly prefixed) new native that matches it.
+  methodOop strip_and_search_for_new_native(methodOop method) {
+    ResourceMark rm;
+    char* name_str = method_name_without_prefixes(method);
+    return search_prefix_name_space(0, name_str, strlen(name_str),
+      method->signature());
+  }
+
+public:
+
+  // Construct a native method transfer processor for this class.
+  TransferNativeFunctionRegistration(instanceKlassHandle _the_class) {
+    assert(SafepointSynchronize::is_at_safepoint(), "sanity check");
+
+    the_class = _the_class;
+    prefixes = JvmtiExport::get_all_native_method_prefixes(&prefix_count);
+  }
+
+  // Attempt to transfer any of the old or deleted methods that are native
+  void transfer_registrations(instanceKlassHandle old_klass, int* old_methods, int methods_length) {
+    for (int j = 0; j < methods_length; j++) {
+      methodOop old_method = (methodOop)old_klass->methods()->obj_at(old_methods[j]);
+
+      if (old_method->is_native() && old_method->has_native_function()) {
+        methodOop new_method = strip_and_search_for_new_native(old_method);
+        if (new_method != NULL) {
+          // Actually set the native function in the new method.
+          // Redefine does not send events (except CFLH), certainly not this
+          // behind the scenes re-registration.
+          new_method->set_native_function(old_method->native_function(),
+            !methodOopDesc::native_bind_event_is_interesting);
+
+          TRACE_RC3("Transfering native function for method %s", old_method->name()->as_C_string());
+        }
+      }
+    }
   }
+};
+
+// Don't lose the association between a native method and its JNI function.
+void VM_RedefineClasses::transfer_old_native_function_registrations(instanceKlassHandle old_klass) {
+  TransferNativeFunctionRegistration transfer(old_klass);
+  transfer.transfer_registrations(old_klass, _deleted_methods, _deleted_methods_length);
+  transfer.transfer_registrations(old_klass, _matching_old_methods, _matching_methods_length);
 }
diff --git a/src/share/vm/prims/jvmtiRedefineClasses.hpp b/src/share/vm/prims/jvmtiRedefineClasses.hpp
index 671f2ae..8333cee 100644
--- a/src/share/vm/prims/jvmtiRedefineClasses.hpp
+++ b/src/share/vm/prims/jvmtiRedefineClasses.hpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -30,332 +30,29 @@
 #include "memory/resourceArea.hpp"
 #include "oops/objArrayKlass.hpp"
 #include "oops/objArrayOop.hpp"
+#include "oops/fieldStreams.hpp"
 #include "prims/jvmtiRedefineClassesTrace.hpp"
-#include "runtime/vm_operations.hpp"
-
-// Introduction:
-//
-// The RedefineClasses() API is used to change the definition of one or
-// more classes. While the API supports redefining more than one class
-// in a single call, in general, the API is discussed in the context of
-// changing the definition of a single current class to a single new
-// class. For clarity, the current class is will always be called
-// "the_class" and the new class will always be called "scratch_class".
-//
-// The name "the_class" is used because there is only one structure
-// that represents a specific class; redefinition does not replace the
-// structure, but instead replaces parts of the structure. The name
-// "scratch_class" is used because the structure that represents the
-// new definition of a specific class is simply used to carry around
-// the parts of the new definition until they are used to replace the
-// appropriate parts in the_class. Once redefinition of a class is
-// complete, scratch_class is thrown away.
-//
-//
-// Implementation Overview:
-//
-// The RedefineClasses() API is mostly a wrapper around the VM op that
-// does the real work. The work is split in varying degrees between
-// doit_prologue(), doit() and doit_epilogue().
-//
-// 1) doit_prologue() is called by the JavaThread on the way to a
-//    safepoint. It does parameter verification and loads scratch_class
-//    which involves:
-//    - parsing the incoming class definition using the_class' class
-//      loader and security context
-//    - linking scratch_class
-//    - merging constant pools and rewriting bytecodes as needed
-//      for the merged constant pool
-//    - verifying the bytecodes in scratch_class
-//    - setting up the constant pool cache and rewriting bytecodes
-//      as needed to use the cache
-//    - finally, scratch_class is compared to the_class to verify
-//      that it is a valid replacement class
-//    - if everything is good, then scratch_class is saved in an
-//      instance field in the VM operation for the doit() call
-//
-//    Note: A JavaThread must do the above work.
-//
-// 2) doit() is called by the VMThread during a safepoint. It installs
-//    the new class definition(s) which involves:
-//    - retrieving the scratch_class from the instance field in the
-//      VM operation
-//    - house keeping (flushing breakpoints and caches, deoptimizing
-//      dependent compiled code)
-//    - replacing parts in the_class with parts from scratch_class
-//    - adding weak reference(s) to track the obsolete but interesting
-//      parts of the_class
-//    - adjusting constant pool caches and vtables in other classes
-//      that refer to methods in the_class. These adjustments use the
-//      SystemDictionary::classes_do() facility which only allows
-//      a helper method to be specified. The interesting parameters
-//      that we would like to pass to the helper method are saved in
-//      static global fields in the VM operation.
-//    - telling the SystemDictionary to notice our changes
-//
-//    Note: the above work must be done by the VMThread to be safe.
-//
-// 3) doit_epilogue() is called by the JavaThread after the VM op
-//    is finished and the safepoint is done. It simply cleans up
-//    memory allocated in doit_prologue() and used in doit().
-//
-//
-// Constant Pool Details:
-//
-// When the_class is redefined, we cannot just replace the constant
-// pool in the_class with the constant pool from scratch_class because
-// that could confuse obsolete methods that may still be running.
-// Instead, the constant pool from the_class, old_cp, is merged with
-// the constant pool from scratch_class, scratch_cp. The resulting
-// constant pool, merge_cp, replaces old_cp in the_class.
-//
-// The key part of any merging algorithm is the entry comparison
-// function so we have to know the types of entries in a constant pool
-// in order to merge two of them together. Constant pools can contain
-// up to 12 different kinds of entries; the JVM_CONSTANT_Unicode entry
-// is not presently used so we only have to worry about the other 11
-// entry types. For the purposes of constant pool merging, it is
-// helpful to know that the 11 entry types fall into 3 different
-// subtypes: "direct", "indirect" and "double-indirect".
-//
-// Direct CP entries contain data and do not contain references to
-// other CP entries. The following are direct CP entries:
-//     JVM_CONSTANT_{Double,Float,Integer,Long,Utf8}
-//
-// Indirect CP entries contain 1 or 2 references to a direct CP entry
-// and no other data. The following are indirect CP entries:
-//     JVM_CONSTANT_{Class,NameAndType,String}
-//
-// Double-indirect CP entries contain two references to indirect CP
-// entries and no other data. The following are double-indirect CP
-// entries:
-//     JVM_CONSTANT_{Fieldref,InterfaceMethodref,Methodref}
-//
-// When comparing entries between two constant pools, the entry types
-// are compared first and if they match, then further comparisons are
-// made depending on the entry subtype. Comparing direct CP entries is
-// simply a matter of comparing the data associated with each entry.
-// Comparing both indirect and double-indirect CP entries requires
-// recursion.
-//
-// Fortunately, the recursive combinations are limited because indirect
-// CP entries can only refer to direct CP entries and double-indirect
-// CP entries can only refer to indirect CP entries. The following is
-// an example illustration of the deepest set of indirections needed to
-// access the data associated with a JVM_CONSTANT_Fieldref entry:
-//
-//     JVM_CONSTANT_Fieldref {
-//         class_index => JVM_CONSTANT_Class {
-//             name_index => JVM_CONSTANT_Utf8 {
-//                 <data-1>
-//             }
-//         }
-//         name_and_type_index => JVM_CONSTANT_NameAndType {
-//             name_index => JVM_CONSTANT_Utf8 {
-//                 <data-2>
-//             }
-//             descriptor_index => JVM_CONSTANT_Utf8 {
-//                 <data-3>
-//             }
-//         }
-//     }
-//
-// The above illustration is not a data structure definition for any
-// computer language. The curly braces ('{' and '}') are meant to
-// delimit the context of the "fields" in the CP entry types shown.
-// Each indirection from the JVM_CONSTANT_Fieldref entry is shown via
-// "=>", e.g., the class_index is used to indirectly reference a
-// JVM_CONSTANT_Class entry where the name_index is used to indirectly
-// reference a JVM_CONSTANT_Utf8 entry which contains the interesting
-// <data-1>. In order to understand a JVM_CONSTANT_Fieldref entry, we
-// have to do a total of 5 indirections just to get to the CP entries
-// that contain the interesting pieces of data and then we have to
-// fetch the three pieces of data. This means we have to do a total of
-// (5 + 3) * 2 == 16 dereferences to compare two JVM_CONSTANT_Fieldref
-// entries.
-//
-// Here is the indirection, data and dereference count for each entry
-// type:
-//
-//    JVM_CONSTANT_Class               1 indir, 1 data, 2 derefs
-//    JVM_CONSTANT_Double              0 indir, 1 data, 1 deref
-//    JVM_CONSTANT_Fieldref            2 indir, 3 data, 8 derefs
-//    JVM_CONSTANT_Float               0 indir, 1 data, 1 deref
-//    JVM_CONSTANT_Integer             0 indir, 1 data, 1 deref
-//    JVM_CONSTANT_InterfaceMethodref  2 indir, 3 data, 8 derefs
-//    JVM_CONSTANT_Long                0 indir, 1 data, 1 deref
-//    JVM_CONSTANT_Methodref           2 indir, 3 data, 8 derefs
-//    JVM_CONSTANT_NameAndType         1 indir, 2 data, 4 derefs
-//    JVM_CONSTANT_String              1 indir, 1 data, 2 derefs
-//    JVM_CONSTANT_Utf8                0 indir, 1 data, 1 deref
-//
-// So different subtypes of CP entries require different amounts of
-// work for a proper comparison.
-//
-// Now that we've talked about the different entry types and how to
-// compare them we need to get back to merging. This is not a merge in
-// the "sort -u" sense or even in the "sort" sense. When we merge two
-// constant pools, we copy all the entries from old_cp to merge_cp,
-// preserving entry order. Next we append all the unique entries from
-// scratch_cp to merge_cp and we track the index changes from the
-// location in scratch_cp to the possibly new location in merge_cp.
-// When we are done, any obsolete code that is still running that
-// uses old_cp should not be able to observe any difference if it
-// were to use merge_cp. As for the new code in scratch_class, it is
-// modified to use the appropriate index values in merge_cp before it
-// is used to replace the code in the_class.
-//
-// There is one small complication in copying the entries from old_cp
-// to merge_cp. Two of the CP entry types are special in that they are
-// lazily resolved. Before explaining the copying complication, we need
-// to digress into CP entry resolution.
-//
-// JVM_CONSTANT_Class and JVM_CONSTANT_String entries are present in
-// the class file, but are not stored in memory as such until they are
-// resolved. The entries are not resolved unless they are used because
-// resolution is expensive. During class file parsing the entries are
-// initially stored in memory as JVM_CONSTANT_ClassIndex and
-// JVM_CONSTANT_StringIndex entries. These special CP entry types
-// indicate that the JVM_CONSTANT_Class and JVM_CONSTANT_String entries
-// have been parsed, but the index values in the entries have not been
-// validated. After the entire constant pool has been parsed, the index
-// values can be validated and then the entries are converted into
-// JVM_CONSTANT_UnresolvedClass and JVM_CONSTANT_UnresolvedString
-// entries. During this conversion process, the UTF8 values that are
-// indirectly referenced by the JVM_CONSTANT_ClassIndex and
-// JVM_CONSTANT_StringIndex entries are changed into Symbol*s and the
-// entries are modified to refer to the Symbol*s. This optimization
-// eliminates one level of indirection for those two CP entry types and
-// gets the entries ready for verification. During class file parsing
-// it is also possible for JVM_CONSTANT_UnresolvedString entries to be
-// resolved into JVM_CONSTANT_String entries. Verification expects to
-// find JVM_CONSTANT_UnresolvedClass and either JVM_CONSTANT_String or
-// JVM_CONSTANT_UnresolvedString entries and not JVM_CONSTANT_Class
-// entries.
-//
-// Now we can get back to the copying complication. When we copy
-// entries from old_cp to merge_cp, we have to revert any
-// JVM_CONSTANT_Class entries to JVM_CONSTANT_UnresolvedClass entries
-// or verification will fail.
-//
-// It is important to explicitly state that the merging algorithm
-// effectively unresolves JVM_CONSTANT_Class entries that were in the
-// old_cp when they are changed into JVM_CONSTANT_UnresolvedClass
-// entries in the merge_cp. This is done both to make verification
-// happy and to avoid adding more brittleness between RedefineClasses
-// and the constant pool cache. By allowing the constant pool cache
-// implementation to (re)resolve JVM_CONSTANT_UnresolvedClass entries
-// into JVM_CONSTANT_Class entries, we avoid having to embed knowledge
-// about those algorithms in RedefineClasses.
-//
-// Appending unique entries from scratch_cp to merge_cp is straight
-// forward for direct CP entries and most indirect CP entries. For the
-// indirect CP entry type JVM_CONSTANT_NameAndType and for the double-
-// indirect CP entry types, the presence of more than one piece of
-// interesting data makes appending the entries more complicated.
-//
-// For the JVM_CONSTANT_{Double,Float,Integer,Long,Utf8} entry types,
-// the entry is simply copied from scratch_cp to the end of merge_cp.
-// If the index in scratch_cp is different than the destination index
-// in merge_cp, then the change in index value is tracked.
-//
-// Note: the above discussion for the direct CP entries also applies
-// to the JVM_CONSTANT_Unresolved{Class,String} entry types.
-//
-// For the JVM_CONSTANT_{Class,String} entry types, since there is only
-// one data element at the end of the recursion, we know that we have
-// either one or two unique entries. If the JVM_CONSTANT_Utf8 entry is
-// unique then it is appended to merge_cp before the current entry.
-// If the JVM_CONSTANT_Utf8 entry is not unique, then the current entry
-// is updated to refer to the duplicate entry in merge_cp before it is
-// appended to merge_cp. Again, any changes in index values are tracked
-// as needed.
-//
-// Note: the above discussion for JVM_CONSTANT_{Class,String} entry
-// types is theoretical. Since those entry types have already been
-// optimized into JVM_CONSTANT_Unresolved{Class,String} entry types,
-// they are handled as direct CP entries.
-//
-// For the JVM_CONSTANT_NameAndType entry type, since there are two
-// data elements at the end of the recursions, we know that we have
-// between one and three unique entries. Any unique JVM_CONSTANT_Utf8
-// entries are appended to merge_cp before the current entry. For any
-// JVM_CONSTANT_Utf8 entries that are not unique, the current entry is
-// updated to refer to the duplicate entry in merge_cp before it is
-// appended to merge_cp. Again, any changes in index values are tracked
-// as needed.
-//
-// For the JVM_CONSTANT_{Fieldref,InterfaceMethodref,Methodref} entry
-// types, since there are two indirect CP entries and three data
-// elements at the end of the recursions, we know that we have between
-// one and six unique entries. See the JVM_CONSTANT_Fieldref diagram
-// above for an example of all six entries. The uniqueness algorithm
-// for the JVM_CONSTANT_Class and JVM_CONSTANT_NameAndType entries is
-// covered above. Any unique entries are appended to merge_cp before
-// the current entry. For any entries that are not unique, the current
-// entry is updated to refer to the duplicate entry in merge_cp before
-// it is appended to merge_cp. Again, any changes in index values are
-// tracked as needed.
-//
-//
-// Other Details:
-//
-// Details for other parts of RedefineClasses need to be written.
-// This is a placeholder section.
-//
-//
-// Open Issues (in no particular order):
-//
-// - How do we serialize the RedefineClasses() API without deadlocking?
-//
-// - SystemDictionary::parse_stream() was called with a NULL protection
-//   domain since the initial version. This has been changed to pass
-//   the_class->protection_domain(). This change has been tested with
-//   all NSK tests and nothing broke, but what will adding it now break
-//   in ways that we don't test?
-//
-// - GenerateOopMap::rewrite_load_or_store() has a comment in its
-//   (indirect) use of the Relocator class that the max instruction
-//   size is 4 bytes. goto_w and jsr_w are 5 bytes and wide/iinc is
-//   6 bytes. Perhaps Relocator only needs a 4 byte buffer to do
-//   what it does to the bytecodes. More investigation is needed.
-//
-// - java.lang.Object methods can be called on arrays. This is
-//   implemented via the arrayKlassOop vtable which we don't
-//   update. For example, if we redefine java.lang.Object.toString(),
-//   then the new version of the method will not be called for array
-//   objects.
-//
-// - How do we know if redefine_single_class() and the guts of
-//   instanceKlass are out of sync? I don't think this can be
-//   automated, but we should probably order the work in
-//   redefine_single_class() to match the order of field
-//   definitions in instanceKlass. We also need to add some
-//   comments about keeping things in sync.
-//
-// - set_new_constant_pool() is huge and we should consider refactoring
-//   it into smaller chunks of work.
-//
-// - The exception table update code in set_new_constant_pool() defines
-//   const values that are also defined in a local context elsewhere.
-//   The same literal values are also used in elsewhere. We need to
-//   coordinate a cleanup of these constants with Runtime.
-//
-
-class VM_RedefineClasses: public VM_Operation {
+#include "gc_implementation/shared/vmGCOperations.hpp"
+
+// New version that allows arbitrary changes to already loaded classes.
+class VM_RedefineClasses: public VM_GC_Operation {
  private:
+
   // These static fields are needed by SystemDictionary::classes_do()
   // facility and the adjust_cpool_cache_and_vtable() helper:
   static objArrayOop     _old_methods;
   static objArrayOop     _new_methods;
-  static methodOop*      _matching_old_methods;
-  static methodOop*      _matching_new_methods;
-  static methodOop*      _deleted_methods;
-  static methodOop*      _added_methods;
+  static int*            _matching_old_methods;
+  static int*            _matching_new_methods;
+  static int*            _deleted_methods;
+  static int*            _added_methods;
   static int             _matching_methods_length;
   static int             _deleted_methods_length;
   static int             _added_methods_length;
-  static klassOop        _the_class_oop;
+
+  static int             _revision_number;
+
+  static GrowableArray<instanceKlassHandle>* _affected_klasses;
 
   // The instance fields are used to pass information from
   // doit_prologue() to doit() and doit_epilogue().
@@ -366,40 +63,28 @@ class VM_RedefineClasses: public VM_Operation {
   // RetransformClasses.  Indicate which.
   JvmtiClassLoadKind          _class_load_kind;
 
-  // _index_map_count is just an optimization for knowing if
-  // _index_map_p contains any entries.
-  int                         _index_map_count;
-  intArray *                  _index_map_p;
-  // ptr to _class_count scratch_classes
-  instanceKlassHandle *       _scratch_classes;
-  jvmtiError                  _res;
+  GrowableArray<instanceKlassHandle>* _new_classes;
+  jvmtiError                  _result;
+  int                         _max_redefinition_flags;
 
   // Performance measurement support. These timers do not cover all
   // the work done for JVM/TI RedefineClasses() but they do cover
   // the heavy lifting.
-  elapsedTimer  _timer_rsc_phase1;
-  elapsedTimer  _timer_rsc_phase2;
-  elapsedTimer  _timer_vm_op_prologue;
-
-  // These routines are roughly in call order unless otherwise noted.
-
-  // Load the caller's new class definition(s) into _scratch_classes.
-  // Constant pool merging work is done here as needed. Also calls
-  // compare_and_normalize_class_versions() to verify the class
-  // definition(s).
+  elapsedTimer _timer_total;
+  elapsedTimer _timer_prologue;
+  elapsedTimer _timer_class_linking;
+  elapsedTimer _timer_class_loading;
+  elapsedTimer _timer_prepare_redefinition;
+  elapsedTimer _timer_wait_for_locks;
+  elapsedTimer _timer_heap_iteration;
+  elapsedTimer _timer_redefinition;
+  elapsedTimer _timer_vm_op_epilogue;
+
+  jvmtiError check_redefinition_allowed(instanceKlassHandle new_class);
+  jvmtiError find_sorted_affected_classes( );
+  jvmtiError find_class_bytes(instanceKlassHandle the_class, const unsigned char **class_bytes, jint *class_byte_count, jboolean *not_changed);
   jvmtiError load_new_class_versions(TRAPS);
 
-  // Verify that the caller provided class definition(s) that meet
-  // the restrictions of RedefineClasses. Normalize the order of
-  // overloaded methods as needed.
-  jvmtiError compare_and_normalize_class_versions(
-    instanceKlassHandle the_class, instanceKlassHandle scratch_class);
-
-  // Swap annotations[i] with annotations[j]
-  // Used by compare_and_normalize_class_versions() when normalizing
-  // overloaded methods or changing idnum as when adding or deleting methods.
-  void swap_all_method_annotations(int i, int j, instanceKlassHandle scratch_class);
-
   // Figure out which new methods match old methods in name and signature,
   // which methods have been added, and which are no longer present
   void compute_added_deleted_matching_methods();
@@ -407,95 +92,71 @@ class VM_RedefineClasses: public VM_Operation {
   // Change jmethodIDs to point to the new methods
   void update_jmethod_ids();
 
-  // In addition to marking methods as obsolete, this routine
-  // records which methods are EMCP (Equivalent Module Constant
-  // Pool) in the emcp_methods BitMap and returns the number of
-  // EMCP methods via emcp_method_count_p. This information is
-  // used when information about the previous version of the_class
-  // is squirreled away.
-  void check_methods_and_mark_as_obsolete(BitMap *emcp_methods,
-         int * emcp_method_count_p);
-  void transfer_old_native_function_registrations(instanceKlassHandle the_class);
+  void swap_all_method_annotations(int i, int j, instanceKlassHandle scratch_class);
+
+  static void add_affected_klasses( klassOop obj );
 
-  // Unevolving classes may point to methods of the_class directly
-  // from their constant pool caches, itables, and/or vtables. We
-  // use the SystemDictionary::classes_do() facility and this helper
-  // to fix up these pointers.
-  static void adjust_cpool_cache_and_vtable(klassOop k_oop, oop loader, TRAPS);
+  static jvmtiError do_topological_class_sorting(const jvmtiClassDefinition *class_definitions, int class_count, TRAPS);
 
   // Install the redefinition of a class
-  void redefine_single_class(jclass the_jclass,
-    instanceKlassHandle scratch_class, TRAPS);
+  void redefine_single_class(instanceKlassHandle the_new_class, TRAPS);
 
   // Increment the classRedefinedCount field in the specific instanceKlass
   // and in all direct and indirect subclasses.
   void increment_class_counter(instanceKlass *ik, TRAPS);
 
-  // Support for constant pool merging (these routines are in alpha
-  // order):
-  void append_entry(constantPoolHandle scratch_cp, int scratch_i,
-    constantPoolHandle *merge_cp_p, int *merge_cp_length_p, TRAPS);
-  int find_new_index(int old_index);
-  bool is_unresolved_class_mismatch(constantPoolHandle cp1, int index1,
-    constantPoolHandle cp2, int index2);
-  bool is_unresolved_string_mismatch(constantPoolHandle cp1, int index1,
-    constantPoolHandle cp2, int index2);
-  void map_index(constantPoolHandle scratch_cp, int old_index, int new_index);
-  bool merge_constant_pools(constantPoolHandle old_cp,
-    constantPoolHandle scratch_cp, constantPoolHandle *merge_cp_p,
-    int *merge_cp_length_p, TRAPS);
-  jvmtiError merge_cp_and_rewrite(instanceKlassHandle the_class,
-    instanceKlassHandle scratch_class, TRAPS);
-  u2 rewrite_cp_ref_in_annotation_data(
-    typeArrayHandle annotations_typeArray, int &byte_i_ref,
-    const char * trace_mesg, TRAPS);
-  bool rewrite_cp_refs(instanceKlassHandle scratch_class, TRAPS);
-  bool rewrite_cp_refs_in_annotation_struct(
-    typeArrayHandle class_annotations, int &byte_i_ref, TRAPS);
-  bool rewrite_cp_refs_in_annotations_typeArray(
-    typeArrayHandle annotations_typeArray, int &byte_i_ref, TRAPS);
-  bool rewrite_cp_refs_in_class_annotations(
-    instanceKlassHandle scratch_class, TRAPS);
-  bool rewrite_cp_refs_in_element_value(
-    typeArrayHandle class_annotations, int &byte_i_ref, TRAPS);
-  bool rewrite_cp_refs_in_fields_annotations(
-    instanceKlassHandle scratch_class, TRAPS);
-  void rewrite_cp_refs_in_method(methodHandle method,
-    methodHandle * new_method_p, TRAPS);
-  bool rewrite_cp_refs_in_methods(instanceKlassHandle scratch_class, TRAPS);
-  bool rewrite_cp_refs_in_methods_annotations(
-    instanceKlassHandle scratch_class, TRAPS);
-  bool rewrite_cp_refs_in_methods_default_annotations(
-    instanceKlassHandle scratch_class, TRAPS);
-  bool rewrite_cp_refs_in_methods_parameter_annotations(
-    instanceKlassHandle scratch_class, TRAPS);
-  void rewrite_cp_refs_in_stack_map_table(methodHandle method, TRAPS);
-  void rewrite_cp_refs_in_verification_type_info(
-         address& stackmap_addr_ref, address stackmap_end, u2 frame_i,
-         u1 frame_size, TRAPS);
-  void set_new_constant_pool(instanceKlassHandle scratch_class,
-    constantPoolHandle scratch_cp, int scratch_cp_length, bool shrink, TRAPS);
 
   void flush_dependent_code(instanceKlassHandle k_h, TRAPS);
 
-  static void check_class(klassOop k_oop, oop initiating_loader, TRAPS);
-  static void dump_methods();
+  static void check_class(klassOop k_oop,/* oop initiating_loader,*/ TRAPS) PRODUCT_RETURN;
+
+  static void adjust_cpool_cache(klassOop k_oop, oop initiating_loader, TRAPS);
+
+  static void unpatch_bytecode(methodOop method);
+
+#ifdef ASSERT
+  static void verify_classes(klassOop k_oop, oop initiating_loader, TRAPS);
+#endif
+
+  int calculate_redefinition_flags(instanceKlassHandle new_version);
+  void calculate_instance_update_information(klassOop new_version);
+  void check_methods_and_mark_as_obsolete(BitMap *emcp_methods, int * emcp_method_count_p);
+  static void mark_as_scavengable(nmethod* nm);
+  
+  bool check_arguments();
+  jvmtiError check_arguments_error();
 
  public:
-  VM_RedefineClasses(jint class_count,
-                     const jvmtiClassDefinition *class_defs,
-                     JvmtiClassLoadKind class_load_kind);
-  VMOp_Type type() const { return VMOp_RedefineClasses; }
+  VM_RedefineClasses(jint class_count, const jvmtiClassDefinition *class_defs, JvmtiClassLoadKind class_load_kind);
+  virtual ~VM_RedefineClasses();
+  
   bool doit_prologue();
   void doit();
   void doit_epilogue();
+  void rollback();
 
-  bool allow_nested_vm_operations() const        { return true; }
-  jvmtiError check_error()                       { return _res; }
+  jvmtiError check_exception() const;
+  VMOp_Type type() const                         { return VMOp_RedefineClasses; }
+  bool skip_operation() const                    { return false; }
+  bool allow_nested_vm_operations() const        { return true;  }
+  jvmtiError check_error()                       { return _result;  }
 
   // Modifiable test must be shared between IsModifiableClass query
   // and redefine implementation
   static bool is_modifiable_class(oop klass_mirror);
+
+  // Utility methods for transfering field access flags
+
+  static void transfer_special_access_flags(JavaFieldStream *from, JavaFieldStream *to);
+  static void transfer_special_access_flags(fieldDescriptor *from, fieldDescriptor *to);
+
+  void transfer_old_native_function_registrations(instanceKlassHandle the_class);
+
+  void lock_threads();
+  void unlock_threads();
+
+  static void swap_marks(oop first, oop second);
+
 };
 
 #endif // SHARE_VM_PRIMS_JVMTIREDEFINECLASSES_HPP
diff --git a/src/share/vm/prims/jvmtiRedefineClassesTrace.hpp b/src/share/vm/prims/jvmtiRedefineClassesTrace.hpp
index 878d300..9dbe748 100644
--- a/src/share/vm/prims/jvmtiRedefineClassesTrace.hpp
+++ b/src/share/vm/prims/jvmtiRedefineClassesTrace.hpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -22,114 +22,26 @@
  *
  */
 
-#ifndef SHARE_VM_PRIMS_JVMTIREDEFINECLASSESTRACE_HPP
-#define SHARE_VM_PRIMS_JVMTIREDEFINECLASSESTRACE_HPP
+#define IF_TRACE_RC1 if (TraceRedefineClasses >= 1) 
+#define IF_TRACE_RC2 if (TraceRedefineClasses >= 2) 
+#define IF_TRACE_RC3 if (TraceRedefineClasses >= 3) 
+#define IF_TRACE_RC4 if (TraceRedefineClasses >= 4) 
+#define IF_TRACE_RC5 if (TraceRedefineClasses >= 5) 
 
-// RedefineClasses tracing support via the TraceRedefineClasses
-// option. A bit is assigned to each group of trace messages.
-// Groups of messages are individually selectable. We have to use
-// decimal values on the command line since the command option
-// parsing logic doesn't like non-decimal numerics. The HEX values
-// are used in the actual RC_TRACE() calls for sanity. To achieve
-// the old cumulative behavior, pick the level after the one in
-// which you are interested and subtract one, e.g., 33554431 will
-// print every tracing message.
-//
-//    0x00000000 |          0 - default; no tracing messages
-//    0x00000001 |          1 - name each target class before loading, after
-//                              loading and after redefinition is completed
-//    0x00000002 |          2 - print info if parsing, linking or
-//                              verification throws an exception
-//    0x00000004 |          4 - print timer info for the VM operation
-//    0x00000008 |          8 - print subclass counter updates
-//    0x00000010 |         16 - unused
-//    0x00000020 |         32 - unused
-//    0x00000040 |         64 - unused
-//    0x00000080 |        128 - unused
-//    0x00000100 |        256 - previous class weak reference addition
-//    0x00000200 |        512 - previous class weak reference mgmt during
-//                              class unloading checks (GC)
-//    0x00000400 |       1024 - previous class weak reference mgmt during
-//                              add previous ops (GC)
-//    0x00000800 |       2048 - previous class breakpoint mgmt
-//    0x00001000 |       4096 - detect calls to obsolete methods
-//    0x00002000 |       8192 - fail a guarantee() in addition to detection
-//    0x00004000 |      16384 - detect old/obsolete methods in metadata
-//    0x00008000 |      32768 - old/new method matching/add/delete
-//    0x00010000 |      65536 - impl details: CP size info
-//    0x00020000 |     131072 - impl details: CP merge pass info
-//    0x00040000 |     262144 - impl details: CP index maps
-//    0x00080000 |     524288 - impl details: modified CP index values
-//    0x00100000 |    1048576 - impl details: vtable updates
-//    0x00200000 |    2097152 - impl details: itable updates
-//    0x00400000 |    4194304 - impl details: constant pool cache updates
-//    0x00800000 |    8388608 - impl details: methodComparator info
-//    0x01000000 |   16777216 - impl details: nmethod evolution info
-//    0x02000000 |   33554432 - impl details: annotation updates
-//    0x04000000 |   67108864 - impl details: StackMapTable updates
-//    0x08000000 |  134217728 - impl details: OopMapCache updates
-//    0x10000000 |  268435456 - unused
-//    0x20000000 |  536870912 - unused
-//    0x40000000 | 1073741824 - unused
-//    0x80000000 | 2147483648 - unused
-//
-// Note: The ResourceMark is to cleanup resource allocated args.
-//   The "while (0)" is so we can use semi-colon at end of RC_TRACE().
-#define RC_TRACE(level, args) \
-  if ((TraceRedefineClasses & level) != 0) { \
-    ResourceMark rm; \
-    tty->print("RedefineClasses-0x%x: ", level); \
-    tty->print_cr args; \
-  } while (0)
-
-#define RC_TRACE_NO_CR(level, args) \
-  if ((TraceRedefineClasses & level) != 0) { \
-    ResourceMark rm; \
-    tty->print("RedefineClasses-0x%x: ", level); \
-    tty->print args; \
-  } while (0)
-
-#define RC_TRACE_WITH_THREAD(level, thread, args) \
-  if ((TraceRedefineClasses & level) != 0) { \
-    ResourceMark rm(thread); \
-    tty->print("RedefineClasses-0x%x: ", level); \
-    tty->print_cr args; \
-  } while (0)
-
-#define RC_TRACE_MESG(args) \
-  { \
-    ResourceMark rm; \
-    tty->print("RedefineClasses: "); \
-    tty->print_cr args; \
-  } while (0)
-
-// Macro for checking if TraceRedefineClasses has a specific bit
-// enabled. Returns true if the bit specified by level is set.
-#define RC_TRACE_ENABLED(level) ((TraceRedefineClasses & level) != 0)
-
-// Macro for checking if TraceRedefineClasses has one or more bits
-// set in a range of bit values. Returns true if one or more bits
-// is set in the range from low..high inclusive. Assumes that low
-// and high are single bit values.
-//
-// ((high << 1) - 1)
-//     Yields a mask that removes bits greater than the high bit value.
-//     This algorithm doesn't work with highest bit.
-// ~(low - 1)
-//     Yields a mask that removes bits lower than the low bit value.
-#define RC_TRACE_IN_RANGE(low, high) \
-(((TraceRedefineClasses & ((high << 1) - 1)) & ~(low - 1)) != 0)
+#define TRACE_RC1 if (TraceRedefineClasses >= 1) tty->print("TraceRedefineClasses-1: "); if (TraceRedefineClasses >= 1) tty->print_cr
+#define TRACE_RC2 if (TraceRedefineClasses >= 2) tty->print("   TraceRedefineClasses-2: "); if (TraceRedefineClasses >= 2) tty->print_cr
+#define TRACE_RC3 if (TraceRedefineClasses >= 3) tty->print("      TraceRedefineClasses-3: "); if (TraceRedefineClasses >= 3) tty->print_cr
+#define TRACE_RC4 if (TraceRedefineClasses >= 4) tty->print("         TraceRedefineClasses-4: "); if (TraceRedefineClasses >= 4) tty->print_cr
+#define TRACE_RC5 if (TraceRedefineClasses >= 5) tty->print("            TraceRedefineClasses-5: "); if (TraceRedefineClasses >= 5) tty->print_cr
 
 // Timer support macros. Only do timer operations if timer tracing
 // is enabled. The "while (0)" is so we can use semi-colon at end of
 // the macro.
 #define RC_TIMER_START(t) \
-  if (RC_TRACE_ENABLED(0x00000004)) { \
+  if (TimeRedefineClasses) { \
     t.start(); \
   } while (0)
 #define RC_TIMER_STOP(t) \
-  if (RC_TRACE_ENABLED(0x00000004)) { \
+  if (TimeRedefineClasses) { \
     t.stop(); \
   } while (0)
-
-#endif // SHARE_VM_PRIMS_JVMTIREDEFINECLASSESTRACE_HPP
diff --git a/src/share/vm/prims/methodComparator.cpp b/src/share/vm/prims/methodComparator.cpp
index 60eaf97..785dc24 100644
--- a/src/share/vm/prims/methodComparator.cpp
+++ b/src/share/vm/prims/methodComparator.cpp
@@ -42,10 +42,9 @@ bool MethodComparator::methods_EMCP(methodOop old_method, methodOop new_method)
   if (old_method->code_size() != new_method->code_size())
     return false;
   if (check_stack_and_locals_size(old_method, new_method) != 0) {
-    // RC_TRACE macro has an embedded ResourceMark
-    RC_TRACE(0x00800000, ("Methods %s non-comparable with diagnosis %d",
+    TRACE_RC4("Methods %s non-comparable with diagnosis %d",
       old_method->name()->as_C_string(),
-      check_stack_and_locals_size(old_method, new_method)));
+      check_stack_and_locals_size(old_method, new_method));
     return false;
   }
 
@@ -114,10 +113,9 @@ bool MethodComparator::methods_switchable(methodOop old_method, methodOop new_me
   // Now we can test all forward jumps
   for (int i = 0; i < fwd_jmps.length() / 2; i++) {
     if (! bci_map.old_and_new_locations_same(fwd_jmps.at(i*2), fwd_jmps.at(i*2+1))) {
-      RC_TRACE(0x00800000,
-        ("Fwd jump miss: old dest = %d, calc new dest = %d, act new dest = %d",
+      TRACE_RC4("Fwd jump miss: old dest = %d, calc new dest = %d, act new dest = %d",
         fwd_jmps.at(i*2), bci_map.new_bci_for_old(fwd_jmps.at(i*2)),
-        fwd_jmps.at(i*2+1)));
+        fwd_jmps.at(i*2+1));
       return false;
     }
   }
diff --git a/src/share/vm/runtime/arguments.cpp b/src/share/vm/runtime/arguments.cpp
index 22d450b..bac1ae6 100644
--- a/src/share/vm/runtime/arguments.cpp
+++ b/src/share/vm/runtime/arguments.cpp
@@ -54,8 +54,8 @@
 #include "gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.hpp"
 #endif
 
-// Note: This is a special bug reporting site for the JVM
-#define DEFAULT_VENDOR_URL_BUG "http://bugreport.sun.com/bugreport/crash.jsp"
+// (tw) The DCE VM has its own JIRA bug tracking system.
+#define DEFAULT_VENDOR_URL_BUG "http://ssw.jku.at/dcevm/bugreport/"
 #define DEFAULT_JAVA_LAUNCHER  "generic"
 
 char**  Arguments::_jvm_flags_array             = NULL;
@@ -1792,6 +1792,16 @@ bool Arguments::check_gc_consistency() {
     status = false;
   }
 
+  // (tw) Must use serial GC. This limitation applies because the instance size changing GC modifications
+  // are only built into the mark and compact algorithm.
+  if (!UseSerialGC && i >= 1) {
+    //jio_fprintf(defaultStream::error_stream(),
+    //  "Must use the serial GC in the Dynamic Code Evolution VM\n");
+    //status = false;
+  } else {
+    UseSerialGC = true;
+  }
+
   return status;
 }
 
@@ -3208,7 +3218,7 @@ jint Arguments::parse(const JavaVMInitArgs* args) {
 
   // Set flags if Aggressive optimization flags (-XX:+AggressiveOpts) enabled.
   set_aggressive_opts_flags();
-
+#ifndef COMPILER2
   // Turn off biased locking for locking debug mode flags,
   // which are subtlely different from each other but neither works with
   // biased locking.
@@ -3225,6 +3235,7 @@ jint Arguments::parse(const JavaVMInitArgs* args) {
     }
     UseBiasedLocking = false;
   }
+#endif
 
 #ifdef CC_INTERP
   // Clear flags not supported by the C++ interpreter
diff --git a/src/share/vm/runtime/fieldDescriptor.cpp b/src/share/vm/runtime/fieldDescriptor.cpp
index 3d5213f..9cc701b 100644
--- a/src/share/vm/runtime/fieldDescriptor.cpp
+++ b/src/share/vm/runtime/fieldDescriptor.cpp
@@ -92,7 +92,8 @@ void fieldDescriptor::initialize(klassOop k, int index) {
   instanceKlass* ik = instanceKlass::cast(k);
   _cp = ik->constants();
   FieldInfo* f = ik->field(index);
-  assert(!f->is_internal(), "regular Java fields only");
+  // (tw) do we need this?
+//  assert(!f->is_internal(), "regular Java fields only");
 
   _access_flags = accessFlags_from(f->access_flags());
   guarantee(f->name_index() != 0 && f->signature_index() != 0, "bad constant pool index for fieldDescriptor");
diff --git a/src/share/vm/runtime/globals.hpp b/src/share/vm/runtime/globals.hpp
index 8df7220..341b399 100644
--- a/src/share/vm/runtime/globals.hpp
+++ b/src/share/vm/runtime/globals.hpp
@@ -1230,6 +1230,11 @@ class CommandLineFlags {
   product(intx, TraceRedefineClasses, 0,                                    \
           "Trace level for JVMTI RedefineClasses")                          \
                                                                             \
+  product(bool, TimeRedefineClasses, false,                                 \
+          "Measure timing for JVMTI RedefineClasses")                       \
+                                                                            \
+  product(bool, AllowAdvancedClassRedefinition, true,                       \
+          "Allow advanced class redefinition beyond swapping method bodies")\
   develop(bool, StressMethodComparator, false,                              \
           "run the MethodComparator on all loaded methods")                 \
                                                                             \
diff --git a/src/share/vm/runtime/interfaceSupport.hpp b/src/share/vm/runtime/interfaceSupport.hpp
index 2875ee0..61fd8fe 100644
--- a/src/share/vm/runtime/interfaceSupport.hpp
+++ b/src/share/vm/runtime/interfaceSupport.hpp
@@ -296,7 +296,7 @@ class ThreadToNativeFromVM : public ThreadStateTransition {
   ThreadToNativeFromVM(JavaThread *thread) : ThreadStateTransition(thread) {
     // We are leaving the VM at this point and going directly to native code.
     // Block, if we are in the middle of a safepoint synchronization.
-    assert(!thread->owns_locks(), "must release all locks when leaving VM");
+    assert(!thread->owns_locks_but_redefine_classes_lock(), "must release all locks when leaving VM");
     thread->frame_anchor()->make_walkable(thread);
     trans_and_fence(_thread_in_vm, _thread_in_native);
     // Check for pending. async. exceptions or suspends.
diff --git a/src/share/vm/runtime/javaCalls.cpp b/src/share/vm/runtime/javaCalls.cpp
index edbba98..4a27925 100644
--- a/src/share/vm/runtime/javaCalls.cpp
+++ b/src/share/vm/runtime/javaCalls.cpp
@@ -60,7 +60,7 @@ JavaCallWrapper::JavaCallWrapper(methodHandle callee_method, Handle receiver, Ja
   bool clear_pending_exception = true;
 
   guarantee(thread->is_Java_thread(), "crucial check - the VM thread cannot and must not escape to Java code");
-  assert(!thread->owns_locks(), "must release all locks when leaving VM");
+  assert(!thread->owns_locks_but_redefine_classes_lock(), "must release all locks when leaving VM");
   guarantee(!thread->is_Compiler_thread(), "cannot make java calls from the compiler");
   _result   = result;
 
diff --git a/src/share/vm/runtime/jniHandles.cpp b/src/share/vm/runtime/jniHandles.cpp
index 3cbcaca..30839d7 100644
--- a/src/share/vm/runtime/jniHandles.cpp
+++ b/src/share/vm/runtime/jniHandles.cpp
@@ -112,6 +112,10 @@ jobject JNIHandles::make_weak_global(Handle obj) {
 }
 
 jmethodID JNIHandles::make_jmethod_id(methodHandle mh) {
+  if (mh->newest_version() != mh()) {
+    methodHandle mh_new(Thread::current(), mh()->newest_version());
+    return (jmethodID) make_weak_global(mh_new);
+  }
   return (jmethodID) make_weak_global(mh);
 }
 
diff --git a/src/share/vm/runtime/mutex.cpp b/src/share/vm/runtime/mutex.cpp
index 2095237..c541434 100644
--- a/src/share/vm/runtime/mutex.cpp
+++ b/src/share/vm/runtime/mutex.cpp
@@ -1227,7 +1227,7 @@ Monitor * Monitor::get_least_ranked_lock(Monitor * locks) {
     // in increasing rank order (modulo any native ranks)
     for (tmp = locks; tmp != NULL; tmp = tmp->next()) {
       if (tmp->next() != NULL) {
-        assert(tmp->rank() == Mutex::native ||
+        assert(tmp->rank() == Mutex::native || tmp->rank() == Mutex::redefine_classes ||
                tmp->rank() <= tmp->next()->rank(), "mutex rank anomaly?");
       }
     }
@@ -1247,7 +1247,7 @@ Monitor* Monitor::get_least_ranked_lock_besides_this(Monitor* locks) {
     // in increasing rank order (modulo any native ranks)
     for (tmp = locks; tmp != NULL; tmp = tmp->next()) {
       if (tmp->next() != NULL) {
-        assert(tmp->rank() == Mutex::native ||
+        assert(tmp->rank() == Mutex::native || tmp->rank() == Mutex::redefine_classes ||
                tmp->rank() <= tmp->next()->rank(), "mutex rank anomaly?");
       }
     }
@@ -1310,6 +1310,7 @@ void Monitor::set_owner_implementation(Thread *new_owner) {
       //   already hold Terminator_lock - may happen because of periodic safepoints
       if (this->rank() != Mutex::native &&
           this->rank() != Mutex::suspend_resume &&
+          this->rank() != Mutex::redefine_classes && 
           locks != NULL && locks->rank() <= this->rank() &&
           !SafepointSynchronize::is_at_safepoint() &&
           this != Interrupt_lock &&
diff --git a/src/share/vm/runtime/mutex.hpp b/src/share/vm/runtime/mutex.hpp
index 7d2cd82..11eb32e 100644
--- a/src/share/vm/runtime/mutex.hpp
+++ b/src/share/vm/runtime/mutex.hpp
@@ -109,7 +109,8 @@ class Monitor : public CHeapObj<mtInternal> {
        barrier     = safepoint      +   1,
        nonleaf     = barrier        +   1,
        max_nonleaf = nonleaf        + 900,
-       native      = max_nonleaf    +   1
+       native      = max_nonleaf    +   1,
+       redefine_classes = native    +   1
   };
 
   // The WaitSet and EntryList linked lists are composed of ParkEvents.
diff --git a/src/share/vm/runtime/mutexLocker.cpp b/src/share/vm/runtime/mutexLocker.cpp
index a6b2106..3a3110b 100644
--- a/src/share/vm/runtime/mutexLocker.cpp
+++ b/src/share/vm/runtime/mutexLocker.cpp
@@ -49,6 +49,7 @@
 // Consider using GCC's __read_mostly.
 
 Mutex*   Patching_lock                = NULL;
+Mutex*   RedefineClasses_lock         = NULL;
 Monitor* SystemDictionary_lock        = NULL;
 Mutex*   PackageTable_lock            = NULL;
 Mutex*   CompiledIC_lock              = NULL;
@@ -278,6 +279,7 @@ void mutex_init() {
   def(MethodCompileQueue_lock      , Monitor, nonleaf+4,   true );
   def(Debug2_lock                  , Mutex  , nonleaf+4,   true );
   def(Debug3_lock                  , Mutex  , nonleaf+4,   true );
+  def(RedefineClasses_lock         , Mutex  , nonleaf+7,   false ); // for ensuring that class redefinition is not done in parallel
   def(CompileThread_lock           , Monitor, nonleaf+5,   false );
 
   def(JfrMsg_lock                  , Monitor, leaf,        true);
diff --git a/src/share/vm/runtime/mutexLocker.hpp b/src/share/vm/runtime/mutexLocker.hpp
index 40008bb..60c7cce 100644
--- a/src/share/vm/runtime/mutexLocker.hpp
+++ b/src/share/vm/runtime/mutexLocker.hpp
@@ -43,6 +43,7 @@
 // Mutexes used in the VM.
 
 extern Mutex*   Patching_lock;                   // a lock used to guard code patching of compiled code
+extern Mutex*   RedefineClasses_lock;            // a lock on class redefinition
 extern Monitor* SystemDictionary_lock;           // a lock on the system dictonary
 extern Mutex*   PackageTable_lock;               // a lock on the class loader package table
 extern Mutex*   CompiledIC_lock;                 // a lock used to guard compiled IC patching and access
diff --git a/src/share/vm/runtime/reflection.cpp b/src/share/vm/runtime/reflection.cpp
index cd009ed..a53ad09 100644
--- a/src/share/vm/runtime/reflection.cpp
+++ b/src/share/vm/runtime/reflection.cpp
@@ -468,7 +468,8 @@ bool Reflection::verify_class_access(klassOop current_class, klassOop new_class,
   // sun/reflect/MagicAccessorImpl subclasses to succeed trivially.
   if (   JDK_Version::is_gte_jdk14x_version()
       && UseNewReflection
-      && Klass::cast(current_class)->is_subclass_of(SystemDictionary::reflect_MagicAccessorImpl_klass())) {
+      && (Klass::cast(current_class)->is_subclass_of(SystemDictionary::reflect_MagicAccessorImpl_klass()) ||
+      Klass::cast(current_class)->is_subclass_of(SystemDictionary::reflect_MagicAccessorImpl_klass()->klass_part()->newest_version()))) {
     return true;
   }
 
@@ -519,6 +520,12 @@ bool Reflection::verify_field_access(klassOop current_class,
                                      AccessFlags access,
                                      bool classloader_only,
                                      bool protected_restriction) {
+
+  // (tw) Decide accessibility based on active version
+  if (current_class != NULL) {
+    current_class = current_class->klass_part()->active_version();
+  }
+  
   // Verify that current_class can access a field of field_class, where that
   // field's access bits are "access".  We assume that we've already verified
   // that current_class can access field_class.
@@ -560,7 +567,8 @@ bool Reflection::verify_field_access(klassOop current_class,
   // sun/reflect/MagicAccessorImpl subclasses to succeed trivially.
   if (   JDK_Version::is_gte_jdk14x_version()
       && UseNewReflection
-      && Klass::cast(current_class)->is_subclass_of(SystemDictionary::reflect_MagicAccessorImpl_klass())) {
+      && (Klass::cast(current_class)->is_subclass_of(SystemDictionary::reflect_MagicAccessorImpl_klass()) ||
+      Klass::cast(current_class)->is_subclass_of(SystemDictionary::reflect_MagicAccessorImpl_klass()->klass_part()->newest_version()))) {
     return true;
   }
 
diff --git a/src/share/vm/runtime/sharedRuntime.cpp b/src/share/vm/runtime/sharedRuntime.cpp
index 709d783..689b9a2 100644
--- a/src/share/vm/runtime/sharedRuntime.cpp
+++ b/src/share/vm/runtime/sharedRuntime.cpp
@@ -603,21 +603,13 @@ void SharedRuntime::throw_and_post_jvmti_exception(JavaThread *thread, Symbol* n
 //
 JRT_LEAF(int, SharedRuntime::rc_trace_method_entry(
     JavaThread* thread, methodOopDesc* method))
-  assert(RC_TRACE_IN_RANGE(0x00001000, 0x00002000), "wrong call");
+  assert(TraceRedefineClasses >= 4, "wrong call");
 
   if (method->is_obsolete()) {
     // We are calling an obsolete method, but this is not necessarily
     // an error. Our method could have been redefined just after we
     // fetched the methodOop from the constant pool.
-
-    // RC_TRACE macro has an embedded ResourceMark
-    RC_TRACE_WITH_THREAD(0x00001000, thread,
-                         ("calling obsolete method '%s'",
-                          method->name_and_sig_as_C_string()));
-    if (RC_TRACE_ENABLED(0x00002000)) {
-      // this option is provided to debug calls to obsolete methods
-      guarantee(false, "faulting at call to an obsolete method.");
-    }
+    TRACE_RC4("calling obsolete method '%s'", method->name_and_sig_as_C_string());
   }
   return 0;
 JRT_END
@@ -1137,7 +1129,20 @@ methodHandle SharedRuntime::resolve_helper(JavaThread *thread,
   if (JvmtiExport::can_hotswap_or_post_breakpoint()) {
     int retry_count = 0;
     while (!HAS_PENDING_EXCEPTION && callee_method->is_old() &&
-           callee_method->method_holder() != SystemDictionary::Object_klass()) {
+           callee_method->method_holder()->klass_part()->super() != NULL) {
+
+      // (tw) If we are executing an old method, this is OK!
+      {
+        ResourceMark rm(thread);
+        RegisterMap cbl_map(thread, false);
+        frame caller_frame = thread->last_frame().sender(&cbl_map);
+
+        CodeBlob* caller_cb = caller_frame.cb();
+        guarantee(caller_cb != NULL && caller_cb->is_nmethod(), "must be called from nmethod");
+        nmethod* caller_nm = caller_cb->as_nmethod_or_null();
+        if (caller_nm->method()->is_old()) break;
+      }
+
       // If has a pending exception then there is no need to re-try to
       // resolve this method.
       // If the method has been redefined, we need to try again.
diff --git a/src/share/vm/runtime/thread.cpp b/src/share/vm/runtime/thread.cpp
index ae28b65..bb0681e 100644
--- a/src/share/vm/runtime/thread.cpp
+++ b/src/share/vm/runtime/thread.cpp
@@ -216,6 +216,8 @@ Thread::Thread() {
   set_self_raw_id(0);
   set_lgrp_id(-1);
 
+  _redefine_classes_mutex = new Mutex(Mutex::redefine_classes, "redefine classes lock", false);
+
   // allocated data structures
   set_osthread(NULL);
   set_resource_area(new (mtThread)ResourceArea());
@@ -249,6 +251,7 @@ Thread::Thread() {
   omFreeProvision = 32 ;
   omInUseList = NULL ;
   omInUseCount = 0 ;
+  _pretend_new_universe = false;
 
 #ifdef ASSERT
   _visited_for_critical_count = false;
@@ -884,6 +887,15 @@ bool Thread::owns_locks_but_compiled_lock() const {
   return false;
 }
 
+bool Thread::owns_locks_but_redefine_classes_lock() const {
+  for(Monitor *cur = _owned_locks; cur; cur = cur->next()) {
+    if (cur != RedefineClasses_lock && cur->rank() != Mutex::redefine_classes) {
+      return true;
+    }
+  }
+  return false;
+}
+
 
 #endif
 
@@ -1637,7 +1649,7 @@ void JavaThread::run() {
   ThreadStateTransition::transition_and_fence(this, _thread_new, _thread_in_vm);
 
   assert(JavaThread::current() == this, "sanity check");
-  assert(!Thread::current()->owns_locks(), "sanity check");
+  assert(!Thread::current()->owns_locks_but_redefine_classes_lock(), "sanity check");
 
   DTRACE_THREAD_PROBE(start, this);
 
@@ -3193,7 +3205,7 @@ static void compiler_thread_entry(JavaThread* thread, TRAPS) {
 
 // Create a CompilerThread
 CompilerThread::CompilerThread(CompileQueue* queue, CompilerCounters* counters)
-: JavaThread(&compiler_thread_entry) {
+: JavaThread(&compiler_thread_entry), _should_bailout(false) {
   _env   = NULL;
   _log   = NULL;
   _task  = NULL;
@@ -3201,6 +3213,7 @@ CompilerThread::CompilerThread(CompileQueue* queue, CompilerCounters* counters)
   _counters = counters;
   _buffer_blob = NULL;
   _scanned_nmethod = NULL;
+  _compilation_mutex = new Mutex(Mutex::redefine_classes, "compilationMutex", false);
 
 #ifndef PRODUCT
   _ideal_graph_printer = NULL;
diff --git a/src/share/vm/runtime/thread.hpp b/src/share/vm/runtime/thread.hpp
index 774bd27..4620820 100644
--- a/src/share/vm/runtime/thread.hpp
+++ b/src/share/vm/runtime/thread.hpp
@@ -202,12 +202,15 @@ class Thread: public ThreadShadow {
  public:
   void enter_signal_handler() { _num_nested_signal++; }
   void leave_signal_handler() { _num_nested_signal--; }
-  bool is_inside_signal_handler() const { return _num_nested_signal > 0; }
+  bool is_inside_signal_handler() const  { return _num_nested_signal > 0; }
+  Mutex* redefine_classes_mutex() { return _redefine_classes_mutex; }
 
  private:
   // Debug tracing
   static void trace(const char* msg, const Thread* const thread) PRODUCT_RETURN;
 
+  Mutex* _redefine_classes_mutex;
+
   // Active_handles points to a block of handles
   JNIHandleBlock* _active_handles;
 
@@ -530,10 +533,15 @@ public:
   uintptr_t        _self_raw_id;      // used by get_thread (mutable)
   int              _lgrp_id;
 
+
+  bool             _pretend_new_universe;
+
  public:
   // Stack overflow support
   address stack_base() const           { assert(_stack_base != NULL,"Sanity check"); return _stack_base; }
 
+  void    set_pretend_new_universe(bool b) { if (_pretend_new_universe != b) { if (TraceRedefineClasses >= 5) tty->print_cr("Changing pretend universe to %d", (int)b); _pretend_new_universe = b; } }
+  bool    pretend_new_universe() { return _pretend_new_universe; }
   void    set_stack_base(address base) { _stack_base = base; }
   size_t  stack_size() const           { return _stack_size; }
   void    set_stack_size(size_t size)  { _stack_size = size; }
@@ -570,6 +578,7 @@ public:
   void print_owned_locks() const                 { print_owned_locks_on(tty);    }
   Monitor* owned_locks() const                   { return _owned_locks;          }
   bool owns_locks() const                        { return owned_locks() != NULL; }
+  bool owns_locks_but_redefine_classes_lock() const;
   bool owns_locks_but_compiled_lock() const;
 
   // Deadlock detection
@@ -1793,6 +1802,8 @@ class CompilerThread : public JavaThread {
   CompileTask*  _task;
   CompileQueue* _queue;
   BufferBlob*   _buffer_blob;
+  bool          _should_bailout;
+  Mutex*        _compilation_mutex;
 
   nmethod*      _scanned_nmethod;  // nmethod being scanned by the sweeper
 
@@ -1802,12 +1813,16 @@ class CompilerThread : public JavaThread {
 
   CompilerThread(CompileQueue* queue, CompilerCounters* counters);
 
+  bool should_bailout() const                    { return _should_bailout; }
+  void set_should_bailout(bool b)                { _should_bailout = false; }
+
   bool is_Compiler_thread() const                { return true; }
   // Hide this compiler thread from external view.
   bool is_hidden_from_external_view() const      { return true; }
 
   CompileQueue* queue()                          { return _queue; }
   CompilerCounters* counters()                   { return _counters; }
+  Mutex *compilation_mutex()                      { return _compilation_mutex; }
 
   // Get/set the thread's compilation environment.
   ciEnv*        env()                            { return _env; }
diff --git a/src/share/vm/runtime/vmThread.cpp b/src/share/vm/runtime/vmThread.cpp
index 7643670..0d3cd70 100644
--- a/src/share/vm/runtime/vmThread.cpp
+++ b/src/share/vm/runtime/vmThread.cpp
@@ -691,6 +691,9 @@ void VMThread::execute(VM_Operation* op) {
 void VMThread::oops_do(OopClosure* f, CodeBlobClosure* cf) {
   Thread::oops_do(f, cf);
   _vm_queue->oops_do(f);
+  if (_cur_vm_operation != NULL) {
+    _cur_vm_operation->oops_do(f);
+  }
 }
 
 //------------------------------------------------------------------------------------------------------------------
diff --git a/src/share/vm/utilities/exceptions.cpp b/src/share/vm/utilities/exceptions.cpp
index 03f254d..18e324b 100644
--- a/src/share/vm/utilities/exceptions.cpp
+++ b/src/share/vm/utilities/exceptions.cpp
@@ -254,6 +254,8 @@ Handle Exceptions::new_exception(Thread *thread, Symbol* name,
   assert(thread->is_Java_thread(), "can only be called by a Java thread");
   assert(!thread->has_pending_exception(), "already has exception");
 
+  bool old_pretend_value = Thread::current()->pretend_new_universe();
+  Thread::current()->set_pretend_new_universe(false);
   Handle h_exception;
 
   // Resolve exception klass
@@ -285,6 +287,7 @@ Handle Exceptions::new_exception(Thread *thread, Symbol* name,
     h_exception = Handle(thread, thread->pending_exception());
     thread->clear_pending_exception();
   }
+  Thread::current()->set_pretend_new_universe(old_pretend_value);
   return h_exception;
 }
 
diff --git a/src/share/vm/utilities/growableArray.hpp b/src/share/vm/utilities/growableArray.hpp
index 2a6d6b8..4b6927f 100644
--- a/src/share/vm/utilities/growableArray.hpp
+++ b/src/share/vm/utilities/growableArray.hpp
@@ -145,6 +145,33 @@ class GenericGrowableArray : public ResourceObj {
     assert(on_stack(), "fast ResourceObj path only");
     return (void*)resource_allocate_bytes(thread, elementSize * _max);
   }
+
+};
+
+template<class E, class F> class Pair : public StackObj
+{
+private:
+  E _left;
+  F _right;
+
+public:
+
+  Pair() {
+
+  }
+
+  Pair(E left, F right) {
+    this->_left = left;
+    this->_right = right;
+  }
+
+  E left() {
+    return _left;
+  }
+
+  F right() {
+    return _right;
+  }
 };
 
 template<class E> class GrowableArray : public GenericGrowableArray {