From 615430e4e81681cbb25f831c6b0b4add1bfe324d Mon Sep 17 00:00:00 2001 From: Ivan Dubrov Date: Wed, 30 Apr 2014 13:26:58 -0700 Subject: [PATCH] Started moving patches into Mercurial mq Starting using Mercurial MQ for managing DCEVM patches. --- .gitignore | 8 +- hotspot/.hg/patches/light-jdk8u5-b13.patch | 4263 ++++++++++++++++++++ hotspot/.hg/patches/series | 3 + 3 files changed, 4273 insertions(+), 1 deletion(-) create mode 100644 hotspot/.hg/patches/light-jdk8u5-b13.patch create mode 100644 hotspot/.hg/patches/series diff --git a/.gitignore b/.gitignore index 870cce4a..be08e9db 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,12 @@ /.gradle /.idea -/hotspot +/hotspot/* +/hotspot/.hg/* +/hotspot/.hg/patches/* +!/hotspot/.hg/ +!/hotspot/.hg/patches +!/hotspot/.hg/patches/*.patch +!/hotspot/.hg/patches/series build *.iml hs_err* diff --git a/hotspot/.hg/patches/light-jdk8u5-b13.patch b/hotspot/.hg/patches/light-jdk8u5-b13.patch new file mode 100644 index 00000000..32152bc1 --- /dev/null +++ b/hotspot/.hg/patches/light-jdk8u5-b13.patch @@ -0,0 +1,4263 @@ +# HG changeset patch +# Parent 8a67179106085689906732013a282efeeb9bd5f4 + +diff --git a/make/openjdk_distro b/make/openjdk_distro +--- a/make/openjdk_distro ++++ b/make/openjdk_distro +@@ -27,6 +27,6 @@ + # + + # Don't put quotes (fail windows build). +-HOTSPOT_VM_DISTRO=OpenJDK ++HOTSPOT_VM_DISTRO=Dynamic Code Evolution + COMPANY_NAME= + PRODUCT_NAME=OpenJDK +diff --git a/src/share/vm/ci/ciObjectFactory.cpp b/src/share/vm/ci/ciObjectFactory.cpp +--- a/src/share/vm/ci/ciObjectFactory.cpp ++++ b/src/share/vm/ci/ciObjectFactory.cpp +@@ -750,3 +750,27 @@ + _unloaded_instances->length(), + _unloaded_klasses->length()); + } ++ ++int ciObjectFactory::compare_cimetadata(ciMetadata** a, ciMetadata** b) { ++ Metadata* am = (*a)->constant_encoding(); ++ Metadata* bm = (*b)->constant_encoding(); ++ return ((am > bm) ? 1 : ((am == bm) ? 0 : -1)); ++} ++ ++// (DCEVM) Resoring the ciObject arrays after class redefinition ++void ciObjectFactory::resort_shared_ci_metadata() { ++ if (_shared_ci_metadata == NULL) return; ++ _shared_ci_metadata->sort(ciObjectFactory::compare_cimetadata); ++ ++#ifdef ASSERT ++ if (CIObjectFactoryVerify) { ++ Metadata* last = NULL; ++ for (int j = 0; j< _shared_ci_metadata->length(); j++) { ++ Metadata* o = _shared_ci_metadata->at(j)->constant_encoding(); ++ assert(last < o, "out of order"); ++ last = o; ++ } ++ } ++#endif // ASSERT ++} ++ +diff --git a/src/share/vm/ci/ciObjectFactory.hpp b/src/share/vm/ci/ciObjectFactory.hpp +--- a/src/share/vm/ci/ciObjectFactory.hpp ++++ b/src/share/vm/ci/ciObjectFactory.hpp +@@ -90,6 +90,7 @@ + + ciInstance* get_unloaded_instance(ciInstanceKlass* klass); + ++ static int compare_cimetadata(ciMetadata** a, ciMetadata** b); + public: + static bool is_initialized() { return _initialized; } + +@@ -145,6 +146,8 @@ + + void print_contents(); + void print(); ++ ++ static void resort_shared_ci_metadata(); + }; + + #endif // SHARE_VM_CI_CIOBJECTFACTORY_HPP +diff --git a/src/share/vm/classfile/classFileParser.cpp b/src/share/vm/classfile/classFileParser.cpp +--- a/src/share/vm/classfile/classFileParser.cpp ++++ b/src/share/vm/classfile/classFileParser.cpp +@@ -759,6 +759,7 @@ + Array* ClassFileParser::parse_interfaces(int length, + Handle protection_domain, + Symbol* class_name, ++ bool pick_newest, + bool* has_default_methods, + TRAPS) { + if (length == 0) { +@@ -777,7 +778,11 @@ + "Interface name has bad constant pool index %u in class file %s", + interface_index, CHECK_NULL); + if (_cp->tag_at(interface_index).is_klass()) { +- interf = KlassHandle(THREAD, _cp->resolved_klass_at(interface_index)); ++ Klass* resolved_klass = _cp->resolved_klass_at(interface_index); ++ if (pick_newest) { ++ resolved_klass = resolved_klass->newest_version(); ++ } ++ interf = KlassHandle(THREAD, resolved_klass); + } else { + Symbol* unresolved_klass = _cp->klass_name_at(interface_index); + +@@ -791,6 +796,9 @@ + Klass* k = SystemDictionary::resolve_super_or_fail(class_name, + unresolved_klass, class_loader, protection_domain, + false, CHECK_NULL); ++ if (pick_newest) { ++ k = k->newest_version(); ++ } + interf = KlassHandle(THREAD, k); + } + +@@ -3093,6 +3101,7 @@ + } + + instanceKlassHandle ClassFileParser::parse_super_class(int super_class_index, ++ bool pick_newest, + TRAPS) { + instanceKlassHandle super_klass; + if (super_class_index == 0) { +@@ -3109,7 +3118,11 @@ + // However, make sure it is not an array type. + bool is_array = false; + if (_cp->tag_at(super_class_index).is_klass()) { +- super_klass = instanceKlassHandle(THREAD, _cp->resolved_klass_at(super_class_index)); ++ Klass* resolved_klass = _cp->resolved_klass_at(super_class_index); ++ if (pick_newest) { ++ resolved_klass = resolved_klass->newest_version(); ++ } ++ super_klass = instanceKlassHandle(THREAD, resolved_klass); + if (_need_verify) + is_array = super_klass->oop_is_array(); + } else if (_need_verify) { +@@ -3658,8 +3671,10 @@ + instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name, + ClassLoaderData* loader_data, + Handle protection_domain, ++ KlassHandle old_klass, + KlassHandle host_klass, + GrowableArray* cp_patches, ++ GrowableArray* parsed_super_symbols, + TempNewSymbol& parsed_name, + bool verify, + TRAPS) { +@@ -3672,6 +3687,7 @@ + JvmtiCachedClassFileData *cached_class_file = NULL; + Handle class_loader(THREAD, loader_data->class_loader()); + bool has_default_methods = false; ++ bool pick_newest = !old_klass.is_null(); + ResourceMark rm(THREAD); + + ClassFileStream* cfs = stream(); +@@ -3688,7 +3704,7 @@ + + init_parsed_class_attributes(loader_data); + +- if (JvmtiExport::should_post_class_file_load_hook()) { ++ if (parsed_super_symbols == NULL && JvmtiExport::should_post_class_file_load_hook()) { + // Get the cached class file bytes (if any) from the class that + // is being redefined or retransformed. We use jvmti_thread_state() + // instead of JvmtiThreadState::state_for(jt) so we don't allocate +@@ -3823,6 +3839,26 @@ + CHECK_(nullHandle)); + } + ++ // (DCEVM) Do not parse full class file, only get super symbols and return. ++ if (parsed_super_symbols != NULL) { ++ u2 super_class_index = cfs->get_u2_fast(); ++ ++ if (super_class_index != 0) { ++ parsed_super_symbols->append(cp->klass_name_at(super_class_index)); ++ } ++ ++ // Interfaces ++ u2 itfs_len = cfs->get_u2_fast(); ++ Array* local_interfaces = ++ parse_interfaces(itfs_len, protection_domain, _class_name, pick_newest, &has_default_methods, CHECK_NULL); ++ ++ for (int i = 0; i < local_interfaces->length(); i++) { ++ Klass* o = local_interfaces->at(i); ++ parsed_super_symbols->append(o->name()); ++ } ++ return NULL; ++ } ++ + Klass* preserve_this_klass; // for storing result across HandleMark + + // release all handles when parsing is done +@@ -3849,13 +3885,14 @@ + + u2 super_class_index = cfs->get_u2_fast(); + instanceKlassHandle super_klass = parse_super_class(super_class_index, ++ pick_newest, + CHECK_NULL); + + // Interfaces + u2 itfs_len = cfs->get_u2_fast(); + Array* local_interfaces = + parse_interfaces(itfs_len, protection_domain, _class_name, +- &has_default_methods, CHECK_(nullHandle)); ++ pick_newest, &has_default_methods, CHECK_(nullHandle)); + + u2 java_fields_count = 0; + // Fields (offsets are filled in later) +@@ -3897,6 +3934,9 @@ + true, + CHECK_(nullHandle)); + ++ if (pick_newest) { ++ k = k->newest_version(); ++ } + KlassHandle kh (THREAD, k); + super_klass = instanceKlassHandle(THREAD, kh()); + } +@@ -4056,7 +4096,7 @@ + fill_oop_maps(this_klass, info.nonstatic_oop_map_count, info.nonstatic_oop_offsets, info.nonstatic_oop_counts); + + // Fill in has_finalizer, has_vanilla_constructor, and layout_helper +- set_precomputed_flags(this_klass); ++ set_precomputed_flags(this_klass, old_klass); + + // reinitialize modifiers, using the InnerClasses attribute + int computed_modifiers = this_klass->compute_modifier_flags(CHECK_(nullHandle)); +@@ -4283,7 +4323,7 @@ + } + + +-void ClassFileParser::set_precomputed_flags(instanceKlassHandle k) { ++void ClassFileParser::set_precomputed_flags(instanceKlassHandle k, KlassHandle old_klass) { + Klass* super = k->super(); + + // Check if this klass has an empty finalize method (i.e. one with return bytecode only), +@@ -4291,7 +4331,9 @@ + if (!_has_empty_finalizer) { + if (_has_finalizer || + (super != NULL && super->has_finalizer())) { +- k->set_has_finalizer(); ++ if (old_klass.is_null() || old_klass->has_finalizer()) { ++ k->set_has_finalizer(); ++ } + } + } + +@@ -4307,7 +4349,7 @@ + + // Check if this klass supports the java.lang.Cloneable interface + if (SystemDictionary::Cloneable_klass_loaded()) { +- if (k->is_subtype_of(SystemDictionary::Cloneable_klass())) { ++ if (k->is_subtype_of(SystemDictionary::Cloneable_klass()) || k->is_subtype_of(SystemDictionary::Cloneable_klass()->newest_version())) { + k->set_is_cloneable(); + } + } +diff --git a/src/share/vm/classfile/classFileParser.hpp b/src/share/vm/classfile/classFileParser.hpp +--- a/src/share/vm/classfile/classFileParser.hpp ++++ b/src/share/vm/classfile/classFileParser.hpp +@@ -214,11 +214,12 @@ + Array* parse_interfaces(int length, + Handle protection_domain, + Symbol* class_name, ++ bool pick_newest, + bool* has_default_methods, + TRAPS); + void record_defined_class_dependencies(instanceKlassHandle defined_klass, TRAPS); + +- instanceKlassHandle parse_super_class(int super_class_index, TRAPS); ++ instanceKlassHandle parse_super_class(int super_class_index, bool pick_newest, TRAPS); + // Field parsing + void parse_field_attributes(u2 attributes_count, + bool is_static, u2 signature_index, +@@ -299,7 +300,7 @@ + unsigned int nonstatic_oop_map_count, + int* nonstatic_oop_offsets, + unsigned int* nonstatic_oop_counts); +- void set_precomputed_flags(instanceKlassHandle k); ++ void set_precomputed_flags(instanceKlassHandle k, KlassHandle old_klass); + Array* compute_transitive_interfaces(instanceKlassHandle super, + Array* local_ifs, TRAPS); + +@@ -461,17 +462,20 @@ + instanceKlassHandle parseClassFile(Symbol* name, + ClassLoaderData* loader_data, + Handle protection_domain, ++ KlassHandle old_klass, + TempNewSymbol& parsed_name, + bool verify, + TRAPS) { + KlassHandle no_host_klass; +- return parseClassFile(name, loader_data, protection_domain, no_host_klass, NULL, parsed_name, verify, THREAD); ++ return parseClassFile(name, loader_data, protection_domain, old_klass, no_host_klass, NULL, NULL, parsed_name, verify, THREAD); + } + instanceKlassHandle parseClassFile(Symbol* name, + ClassLoaderData* loader_data, + Handle protection_domain, ++ KlassHandle old_klass, + KlassHandle host_klass, + GrowableArray* cp_patches, ++ GrowableArray* parsed_super_symbols, + TempNewSymbol& parsed_name, + bool verify, + TRAPS); +diff --git a/src/share/vm/classfile/classLoader.cpp b/src/share/vm/classfile/classLoader.cpp +--- a/src/share/vm/classfile/classLoader.cpp ++++ b/src/share/vm/classfile/classLoader.cpp +@@ -926,6 +926,7 @@ + instanceKlassHandle result = parser.parseClassFile(h_name, + loader_data, + protection_domain, ++ KlassHandle(), + parsed_name, + false, + CHECK_(h)); +diff --git a/src/share/vm/classfile/dictionary.cpp b/src/share/vm/classfile/dictionary.cpp +--- a/src/share/vm/classfile/dictionary.cpp ++++ b/src/share/vm/classfile/dictionary.cpp +@@ -145,7 +145,7 @@ + InstanceKlass* ik = InstanceKlass::cast(e); + + // Non-unloadable classes were handled in always_strong_oops_do +- if (!is_strongly_reachable(loader_data, e)) { ++ if (!ik->is_redefining() && !is_strongly_reachable(loader_data, e)) { + // Entry was not visited in phase1 (negated test from phase1) + assert(!loader_data->is_the_null_class_loader_data(), "unloading entry with null class loader"); + ClassLoaderData* k_def_class_loader_data = ik->class_loader_data(); +@@ -336,6 +336,32 @@ + add_entry(index, entry); + } + ++// (DCEVM) Updates the klass entry to point to the new Klass*. Necessary only for class redefinition. ++bool Dictionary::update_klass(int index, unsigned int hash, Symbol* name, ClassLoaderData* loader_data, KlassHandle k, KlassHandle old_class) { ++ ++ // There are several entries for the same class in the dictionary: One extra entry for each parent classloader of the classloader of the class. ++ bool found = false; ++ for (int index = 0; index < table_size(); index++) { ++ for (DictionaryEntry* entry = bucket(index); entry != NULL; entry = entry->next()) { ++ if (entry->klass() == old_class()) { ++ entry->set_literal(k()); ++ found = true; ++ } ++ } ++ } ++ return found; ++} ++ ++// (DCEVM) Undo previous updates to the system dictionary ++void Dictionary::rollback_redefinition() { ++ for (int index = 0; index < table_size(); index++) { ++ for (DictionaryEntry* entry = bucket(index); entry != NULL; entry = entry->next()) { ++ if (entry->klass()->is_redefining()) { ++ entry->set_literal(entry->klass()->old_version()); ++ } ++ } ++ } ++} + + // This routine does not lock the system dictionary. + // +@@ -366,7 +392,7 @@ + ClassLoaderData* loader_data, Handle protection_domain, TRAPS) { + DictionaryEntry* entry = get_entry(index, hash, name, loader_data); + if (entry != NULL && entry->is_valid_protection_domain(protection_domain)) { +- return entry->klass(); ++ return intercept_for_version(entry->klass()); + } else { + return NULL; + } +@@ -379,7 +405,7 @@ + assert (index == index_for(name, loader_data), "incorrect index?"); + + DictionaryEntry* entry = get_entry(index, hash, name, loader_data); +- return (entry != NULL) ? entry->klass() : (Klass*)NULL; ++ return intercept_for_version((entry != NULL) ? entry->klass() : (Klass*)NULL); + } + + +@@ -391,7 +417,7 @@ + assert (index == index_for(name, NULL), "incorrect index?"); + + DictionaryEntry* entry = get_entry(index, hash, name, NULL); +- return (entry != NULL) ? entry->klass() : (Klass*)NULL; ++ return intercept_for_version((entry != NULL) ? entry->klass() : (Klass*)NULL); + } + + +diff --git a/src/share/vm/classfile/dictionary.hpp b/src/share/vm/classfile/dictionary.hpp +--- a/src/share/vm/classfile/dictionary.hpp ++++ b/src/share/vm/classfile/dictionary.hpp +@@ -78,6 +78,10 @@ + + void add_klass(Symbol* class_name, ClassLoaderData* loader_data,KlassHandle obj); + ++ bool update_klass(int index, unsigned int hash, Symbol* name, ClassLoaderData* loader_data, KlassHandle k, KlassHandle old_class); ++ ++ void rollback_redefinition(); ++ + Klass* find_class(int index, unsigned int hash, + Symbol* name, ClassLoaderData* loader_data); + +@@ -107,6 +111,11 @@ + return (loader_data->is_the_null_class_loader_data() || !ClassUnloading); + } + ++ // (DCEVM) During enhanced class redefinition we want old version if new is being redefined ++ static Klass* intercept_for_version(Klass* k) { ++ return (k != NULL && k->is_redefining()) ? k->old_version() : k; ++ } ++ + // Unload (that is, break root links to) all unmarked classes and + // loaders. Returns "true" iff something was unloaded. + bool do_unloading(); +diff --git a/src/share/vm/classfile/javaClasses.cpp b/src/share/vm/classfile/javaClasses.cpp +--- a/src/share/vm/classfile/javaClasses.cpp ++++ b/src/share/vm/classfile/javaClasses.cpp +@@ -1629,6 +1629,8 @@ + skip_throwableInit_check = true; + } + } ++ // (DCEVM): Line numbers from newest version must be used for EMCP-swapped methods ++ method = method->newest_version(); + if (method->is_hidden()) { + if (skip_hidden) continue; + } +diff --git a/src/share/vm/classfile/loaderConstraints.cpp b/src/share/vm/classfile/loaderConstraints.cpp +--- a/src/share/vm/classfile/loaderConstraints.cpp ++++ b/src/share/vm/classfile/loaderConstraints.cpp +@@ -446,7 +446,7 @@ + if (k != NULL) { + // We found the class in the system dictionary, so we should + // make sure that the Klass* matches what we already have. +- guarantee(k == probe->klass(), "klass should be in dictionary"); ++ guarantee(k == probe->klass()->newest_version(), "klass should be in dictionary"); + } else { + // If we don't find the class in the system dictionary, it + // has to be in the placeholders table. +diff --git a/src/share/vm/classfile/systemDictionary.cpp b/src/share/vm/classfile/systemDictionary.cpp +--- a/src/share/vm/classfile/systemDictionary.cpp ++++ b/src/share/vm/classfile/systemDictionary.cpp +@@ -174,6 +174,7 @@ + // can return a null klass + klass = handle_resolution_exception(class_name, class_loader, protection_domain, throw_error, k_h, THREAD); + } ++ assert(klass == NULL || klass->is_newest_version() || klass->newest_version()->is_redefining(), "must be"); + return klass; + } + +@@ -216,7 +217,7 @@ + // Forwards to resolve_instance_class_or_null + + Klass* SystemDictionary::resolve_or_null(Symbol* class_name, Handle class_loader, Handle protection_domain, TRAPS) { +- assert(!THREAD->is_Compiler_thread(), ++ assert(!THREAD->is_Compiler_thread() || JvmtiThreadState::state_for(JavaThread::current())->get_class_being_redefined() != NULL, + err_msg("can not load classes with compiler thread: class=%s, classloader=%s", + class_name->as_C_string(), + class_loader.is_null() ? "null" : class_loader->klass()->name()->as_C_string())); +@@ -1029,8 +1030,10 @@ + instanceKlassHandle k = ClassFileParser(st).parseClassFile(class_name, + loader_data, + protection_domain, ++ KlassHandle(), + host_klass, + cp_patches, ++ NULL, + parsed_name, + true, + THREAD); +@@ -1085,6 +1088,7 @@ + Handle protection_domain, + ClassFileStream* st, + bool verify, ++ KlassHandle old_class, + TRAPS) { + + // Classloaders that support parallelism, e.g. bootstrap classloader, +@@ -1112,9 +1116,15 @@ + instanceKlassHandle k = ClassFileParser(st).parseClassFile(class_name, + loader_data, + protection_domain, ++ old_class, + parsed_name, + verify, + THREAD); ++ // (DCEVM) During enhanced class redefinition, mark loaded class as being redefined ++ if (!old_class.is_null() && !k.is_null()) { ++ k->set_redefining(true); ++ k->set_old_version(old_class()); ++ } + + const char* pkg = "java/"; + if (!HAS_PENDING_EXCEPTION && +@@ -1149,10 +1159,11 @@ + // Add class just loaded + // If a class loader supports parallel classloading handle parallel define requests + // find_or_define_instance_class may return a different InstanceKlass +- if (is_parallelCapable(class_loader)) { ++ // (DCEVM) TODO: for class redefinition the parallel version does not work, check if this is a problem? ++ if (is_parallelCapable(class_loader) && old_class.is_null()) { + k = find_or_define_instance_class(class_name, class_loader, k, THREAD); + } else { +- define_instance_class(k, THREAD); ++ define_instance_class(k, old_class, THREAD); + } + } + +@@ -1166,7 +1177,7 @@ + MutexLocker mu(SystemDictionary_lock, THREAD); + + Klass* check = find_class(parsed_name, loader_data); +- assert(check == k(), "should be present in the dictionary"); ++ assert((check == k() && !k->is_redefining()) || (k->is_redefining() && check == k->old_version()), "should be present in the dictionary"); + + Klass* check2 = find_class(h_name, defining_loader_data); + assert(check == check2, "name inconsistancy in SystemDictionary"); +@@ -1386,7 +1397,11 @@ + } + } + +-void SystemDictionary::define_instance_class(instanceKlassHandle k, TRAPS) { ++void SystemDictionary::rollback_redefinition() { ++ dictionary()->rollback_redefinition(); ++} ++ ++void SystemDictionary::define_instance_class(instanceKlassHandle k, KlassHandle old_class, TRAPS) { + + ClassLoaderData* loader_data = k->class_loader_data(); + Handle class_loader_h(THREAD, loader_data->class_loader()); +@@ -1416,7 +1431,17 @@ + Symbol* name_h = k->name(); + unsigned int d_hash = dictionary()->compute_hash(name_h, loader_data); + int d_index = dictionary()->hash_to_index(d_hash); +- check_constraints(d_index, d_hash, k, class_loader_h, true, CHECK); ++ ++ // (DCEVM) Update version of the Klass* in the system dictionary ++ // TODO: Check for thread safety! ++ if (!old_class.is_null()) { ++ bool ok = dictionary()->update_klass(d_index, d_hash, name_h, loader_data, k, old_class); ++ assert (ok, "must have found old class and updated!"); ++ } ++ check_constraints(d_index, d_hash, k, class_loader_h, old_class.is_null(), CHECK); ++ ++ // FIXME: (DCEVM) clean this... ++ if(!old_class.is_null() && TraceRedefineClasses >= 3){ tty->print_cr("Class has been updated!"); } + + // Register class just loaded with class loader (placed in Vector) + // Note we do this before updating the dictionary, as this can +@@ -1449,8 +1474,9 @@ + } + k->eager_initialize(THREAD); + ++ // (DCEVM) Only notify jvmti if not redefining a class. + // notify jvmti +- if (JvmtiExport::should_post_class_load()) { ++ if (JvmtiExport::should_post_class_load() && old_class.is_null()) { + assert(THREAD->is_Java_thread(), "thread->is_Java_thread()"); + JvmtiExport::post_class_load((JavaThread *) THREAD, k()); + +@@ -1524,7 +1550,7 @@ + } + } + +- define_instance_class(k, THREAD); ++ define_instance_class(k, KlassHandle(), THREAD); + + Handle linkage_exception = Handle(); // null handle + +@@ -1654,6 +1680,14 @@ + Universe::flush_dependents_on(k); + } + ++// (DCEVM) Remove from hierarchy - Undo add_to_hierarchy. ++void SystemDictionary::remove_from_hierarchy(instanceKlassHandle k) { ++ assert(k.not_null(), "just checking"); ++ ++ // remove receiver from sibling list ++ k->remove_from_sibling_list(); ++ // TODO (DCEVM): Remove from interfaces. ++} + + // ---------------------------------------------------------------------------- + // GC support +@@ -2000,7 +2034,7 @@ + // also holds array classes + + assert(check->oop_is_instance(), "noninstance in systemdictionary"); +- if ((defining == true) || (k() != check)) { ++ if ((defining == true) && ((k() != check) && k->old_version() != check)) { + linkage_error = "loader (instance of %s): attempted duplicate class " + "definition for name: \"%s\""; + } else { +diff --git a/src/share/vm/classfile/systemDictionary.hpp b/src/share/vm/classfile/systemDictionary.hpp +--- a/src/share/vm/classfile/systemDictionary.hpp ++++ b/src/share/vm/classfile/systemDictionary.hpp +@@ -269,7 +269,7 @@ + // Resolve from stream (called by jni_DefineClass and JVM_DefineClass) + static Klass* resolve_from_stream(Symbol* class_name, Handle class_loader, + Handle protection_domain, +- ClassFileStream* st, bool verify, TRAPS); ++ ClassFileStream* st, bool verify, KlassHandle old_class, TRAPS); + + // Lookup an already loaded class. If not found NULL is returned. + static Klass* find(Symbol* class_name, Handle class_loader, Handle protection_domain, TRAPS); +@@ -339,6 +339,8 @@ + // System loader lock + static oop system_loader_lock() { return _system_loader_lock_obj; } + ++ // (DCEVM) Remove link to hierarchy ++ static void remove_from_hierarchy(instanceKlassHandle k); + private: + // Extended Redefine classes support (tbi) + static void preloaded_classes_do(KlassClosure* f); +@@ -408,6 +410,9 @@ + initialize_wk_klasses_until((WKID) limit, start_id, THREAD); + } + ++ // (DCEVM) rollback class redefinition ++ static void rollback_redefinition(); ++ + public: + #define WK_KLASS_DECLARE(name, symbol, option) \ + static Klass* name() { return check_klass_##option(_well_known_klasses[WK_KLASS_ENUM_NAME(name)]); } \ +@@ -613,7 +618,7 @@ + // after waiting, but before reentering SystemDictionary_lock + // to preserve lock order semantics. + static void double_lock_wait(Handle lockObject, TRAPS); +- static void define_instance_class(instanceKlassHandle k, TRAPS); ++ static void define_instance_class(instanceKlassHandle k, KlassHandle old_class, TRAPS); + static instanceKlassHandle find_or_define_instance_class(Symbol* class_name, + Handle class_loader, + instanceKlassHandle k, TRAPS); +diff --git a/src/share/vm/classfile/verifier.cpp b/src/share/vm/classfile/verifier.cpp +--- a/src/share/vm/classfile/verifier.cpp ++++ b/src/share/vm/classfile/verifier.cpp +@@ -189,7 +189,7 @@ + Symbol* name = klass->name(); + Klass* refl_magic_klass = SystemDictionary::reflect_MagicAccessorImpl_klass(); + +- bool is_reflect = refl_magic_klass != NULL && klass->is_subtype_of(refl_magic_klass); ++ bool is_reflect = refl_magic_klass != NULL && (klass->is_subtype_of(refl_magic_klass) || klass->is_subtype_of(refl_magic_klass->newest_version())); + + return (should_verify_for(klass->class_loader(), should_verify_class) && + // return if the class is a bootstrapping class +@@ -518,7 +518,7 @@ + + ClassVerifier::ClassVerifier( + instanceKlassHandle klass, TRAPS) +- : _thread(THREAD), _exception_type(NULL), _message(NULL), _klass(klass) { ++ : _thread(THREAD), _exception_type(NULL), _message(NULL), _klass(klass->newest_version()), _klass_to_verify(klass) { + _this_type = VerificationType::reference_type(klass->name()); + // Create list to hold symbols in reference area. + _symbols = new GrowableArray(100, 0, NULL); +@@ -548,7 +548,7 @@ + _klass->external_name()); + } + +- Array* methods = _klass->methods(); ++ Array* methods = _klass_to_verify->methods(); + int num_methods = methods->length(); + + for (int index = 0; index < num_methods; index++) { +diff --git a/src/share/vm/classfile/verifier.hpp b/src/share/vm/classfile/verifier.hpp +--- a/src/share/vm/classfile/verifier.hpp ++++ b/src/share/vm/classfile/verifier.hpp +@@ -331,6 +331,7 @@ + + VerificationType object_type() const; + ++ instanceKlassHandle _klass_to_verify; + instanceKlassHandle _klass; // the class being verified + methodHandle _method; // current method being verified + VerificationType _this_type; // the verification type of the current class +diff --git a/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.cpp b/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.cpp +--- a/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.cpp ++++ b/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.cpp +@@ -161,6 +161,12 @@ + } + } + ++HeapWord* CompactibleFreeListSpace::forward_compact_top(size_t size, ++ CompactPoint* cp, HeapWord* compact_top) { ++ ShouldNotReachHere(); ++ return NULL; ++} ++ + // Like CompactibleSpace forward() but always calls cross_threshold() to + // update the block offset table. Removed initialize_threshold call because + // CFLS does not use a block offset array for contiguous spaces. +@@ -2098,7 +2104,7 @@ + // Support for compaction + + void CompactibleFreeListSpace::prepare_for_compaction(CompactPoint* cp) { +- SCAN_AND_FORWARD(cp,end,block_is_obj,block_size); ++ SCAN_AND_FORWARD(cp,end,block_is_obj,block_size,false); + // prepare_for_compaction() uses the space between live objects + // so that later phase can skip dead space quickly. So verification + // of the free lists doesn't work after. +@@ -2119,7 +2125,7 @@ + } + + void CompactibleFreeListSpace::compact() { +- SCAN_AND_COMPACT(obj_size); ++ SCAN_AND_COMPACT(obj_size, false); + } + + // fragmentation_metric = 1 - [sum of (fbs**2) / (sum of fbs)**2] +diff --git a/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.hpp b/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.hpp +--- a/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.hpp ++++ b/src/share/vm/gc_implementation/concurrentMarkSweep/compactibleFreeListSpace.hpp +@@ -150,6 +150,7 @@ + + // Support for compacting cms + HeapWord* cross_threshold(HeapWord* start, HeapWord* end); ++ HeapWord* forward_compact_top(size_t size, CompactPoint* cp, HeapWord* compact_top); + HeapWord* forward(oop q, size_t size, CompactPoint* cp, HeapWord* compact_top); + + // Initialization helpers. +diff --git a/src/share/vm/gc_implementation/shared/markSweep.cpp b/src/share/vm/gc_implementation/shared/markSweep.cpp +--- a/src/share/vm/gc_implementation/shared/markSweep.cpp ++++ b/src/share/vm/gc_implementation/shared/markSweep.cpp +@@ -32,6 +32,8 @@ + #include "oops/objArrayKlass.inline.hpp" + #include "oops/oop.inline.hpp" + ++GrowableArray* MarkSweep::_rescued_oops = NULL; ++ + uint MarkSweep::_total_invocations = 0; + + Stack MarkSweep::_marking_stack; +@@ -171,3 +173,100 @@ + } + + #endif ++ ++// (DCEVM) Copy the rescued objects to their destination address after compaction. ++void MarkSweep::copy_rescued_objects_back() { ++ ++ if (_rescued_oops != NULL) { ++ ++ for (int i=0; i<_rescued_oops->length(); i++) { ++ HeapWord* rescued_ptr = _rescued_oops->at(i); ++ oop rescued_obj = (oop) rescued_ptr; ++ ++ int size = rescued_obj->size(); ++ oop new_obj = rescued_obj->forwardee(); ++ ++ assert(rescued_obj->klass()->new_version() != NULL, "just checking"); ++ ++ if (rescued_obj->klass()->new_version()->update_information() != NULL) { ++ MarkSweep::update_fields(rescued_obj, new_obj); ++ } else { ++ rescued_obj->set_klass(rescued_obj->klass()->new_version()); ++ Copy::aligned_disjoint_words((HeapWord*)rescued_obj, (HeapWord*)new_obj, size); ++ } ++ ++ FREE_RESOURCE_ARRAY(HeapWord, rescued_ptr, size); ++ ++ new_obj->init_mark(); ++ assert(new_obj->is_oop(), "must be a valid oop"); ++ } ++ _rescued_oops->clear(); ++ _rescued_oops = NULL; ++ } ++} ++ ++// (DCEVM) Update instances of a class whose fields changed. ++void MarkSweep::update_fields(oop q, oop new_location) { ++ ++ assert(q->klass()->new_version() != NULL, "class of old object must have new version"); ++ ++ Klass* old_klass_oop = q->klass(); ++ Klass* new_klass_oop = q->klass()->new_version(); ++ ++ InstanceKlass *old_klass = InstanceKlass::cast(old_klass_oop); ++ InstanceKlass *new_klass = InstanceKlass::cast(new_klass_oop); ++ ++ int size = q->size_given_klass(old_klass); ++ int new_size = q->size_given_klass(new_klass); ++ ++ HeapWord* tmp = NULL; ++ oop tmp_obj = q; ++ ++ // Save object somewhere, there is an overlap in fields ++ if (new_klass_oop->is_copying_backwards()) { ++ if (((HeapWord *)q >= (HeapWord *)new_location && (HeapWord *)q < (HeapWord *)new_location + new_size) || ++ ((HeapWord *)new_location >= (HeapWord *)q && (HeapWord *)new_location < (HeapWord *)q + size)) { ++ tmp = NEW_RESOURCE_ARRAY(HeapWord, size); ++ q = (oop) tmp; ++ Copy::aligned_disjoint_words((HeapWord*)q, (HeapWord*)tmp_obj, size); ++ } ++ } ++ ++ q->set_klass(new_klass_oop); ++ int *cur = new_klass_oop->update_information(); ++ assert(cur != NULL, "just checking"); ++ MarkSweep::update_fields(new_location, q, cur); ++ ++ if (tmp != NULL) { ++ FREE_RESOURCE_ARRAY(HeapWord, tmp, size); ++ } ++} ++ ++void MarkSweep::update_fields(oop new_location, oop tmp_obj, int *cur) { ++ assert(cur != NULL, "just checking"); ++ char* to = (char*)(HeapWord*)new_location; ++ while (*cur != 0) { ++ int size = *cur; ++ if (size > 0) { ++ cur++; ++ int offset = *cur; ++ HeapWord* from = (HeapWord*)(((char *)(HeapWord*)tmp_obj) + offset); ++ if (size == HeapWordSize) { ++ *((HeapWord*)to) = *from; ++ } else if (size == HeapWordSize * 2) { ++ *((HeapWord*)to) = *from; ++ *(((HeapWord*)to) + 1) = *(from + 1); ++ } else { ++ Copy::conjoint_jbytes(from, to, size); ++ } ++ to += size; ++ cur++; ++ } else { ++ assert(size < 0, ""); ++ int skip = -*cur; ++ Copy::fill_to_bytes(to, skip, 0); ++ to += skip; ++ cur++; ++ } ++ } ++} +diff --git a/src/share/vm/gc_implementation/shared/markSweep.hpp b/src/share/vm/gc_implementation/shared/markSweep.hpp +--- a/src/share/vm/gc_implementation/shared/markSweep.hpp ++++ b/src/share/vm/gc_implementation/shared/markSweep.hpp +@@ -107,8 +107,12 @@ + friend class AdjustPointerClosure; + friend class KeepAliveClosure; + friend class VM_MarkSweep; ++ friend class GenMarkSweep; + friend void marksweep_init(); + ++public: ++ static GrowableArray* _rescued_oops; ++ + // + // Vars + // +@@ -169,6 +173,9 @@ + + static inline void push_objarray(oop obj, size_t index); + ++ static void copy_rescued_objects_back(); ++ static void update_fields(oop q, oop new_location); ++ static void update_fields(oop new_location, oop tmp_obj, int *cur); + static void follow_stack(); // Empty marking stack. + + static void follow_klass(Klass* klass); +diff --git a/src/share/vm/interpreter/linkResolver.cpp b/src/share/vm/interpreter/linkResolver.cpp +--- a/src/share/vm/interpreter/linkResolver.cpp ++++ b/src/share/vm/interpreter/linkResolver.cpp +@@ -215,8 +215,8 @@ + // Klass resolution + + void LinkResolver::check_klass_accessability(KlassHandle ref_klass, KlassHandle sel_klass, TRAPS) { +- if (!Reflection::verify_class_access(ref_klass(), +- sel_klass(), ++ if (!Reflection::verify_class_access(ref_klass()->newest_version(), ++ sel_klass()->newest_version(), + true)) { + ResourceMark rm(THREAD); + Exceptions::fthrow( +@@ -444,7 +444,7 @@ + // We'll check for the method name first, as that's most likely + // to be false (so we'll short-circuit out of these tests). + if (sel_method->name() == vmSymbols::clone_name() && +- sel_klass() == SystemDictionary::Object_klass() && ++ sel_klass()->newest_version() == SystemDictionary::Object_klass()->newest_version() && + resolved_klass->oop_is_array()) { + // We need to change "protected" to "public". + assert(flags.is_protected(), "clone not protected?"); +@@ -802,7 +802,7 @@ + } + + // Final fields can only be accessed from its own class. +- if (is_put && fd.access_flags().is_final() && sel_klass() != current_klass()) { ++ if (is_put && fd.access_flags().is_final() && sel_klass() != current_klass() && sel_klass() != current_klass()->active_version()) { + THROW(vmSymbols::java_lang_IllegalAccessError()); + } + +@@ -1199,6 +1199,16 @@ + // recv_klass might be an arrayKlassOop but all vtables start at + // the same place. The cast is to avoid virtual call and assertion. + InstanceKlass* inst = (InstanceKlass*)recv_klass(); ++ ++ // (DCEVM) Check that the receiver is a subtype of the holder of the resolved method. ++ if (!inst->is_subtype_of(resolved_method->method_holder())) { ++ inst->print(); ++ tty->print_cr("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"); ++ resolved_method->method_holder()->print(); ++ tty->print_cr("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"); ++ resolved_method->print(); ++ } ++ assert(inst->is_subtype_of(resolved_method->method_holder()), "receiver and resolved method holder are inconsistent"); + selected_method = methodHandle(THREAD, inst->method_at_vtable(vtable_index)); + } + } +diff --git a/src/share/vm/memory/genMarkSweep.cpp b/src/share/vm/memory/genMarkSweep.cpp +--- a/src/share/vm/memory/genMarkSweep.cpp ++++ b/src/share/vm/memory/genMarkSweep.cpp +@@ -334,11 +334,16 @@ + // in the same order in phase2, phase3 and phase4. We don't quite do that + // here (perm_gen first rather than last), so we tell the validate code + // to use a higher index (saved from phase2) when verifying perm_gen. ++ assert(_rescued_oops == NULL, "must be empty before processing"); + GenCollectedHeap* gch = GenCollectedHeap::heap(); + + GCTraceTime tm("phase 4", PrintGC && Verbose, true, _gc_timer); + trace("4"); + ++ MarkSweep::copy_rescued_objects_back(); ++ + GenCompactClosure blk; + gch->generation_iterate(&blk, true); ++ ++ MarkSweep::copy_rescued_objects_back(); + } +diff --git a/src/share/vm/memory/space.cpp b/src/share/vm/memory/space.cpp +--- a/src/share/vm/memory/space.cpp ++++ b/src/share/vm/memory/space.cpp +@@ -379,9 +379,8 @@ + _compaction_top = bottom(); + } + +-HeapWord* CompactibleSpace::forward(oop q, size_t size, +- CompactPoint* cp, HeapWord* compact_top) { +- // q is alive ++// (DCEVM) Calculates the compact_top that will be used for placing the next object with the giving size on the heap. ++HeapWord* CompactibleSpace::forward_compact_top(size_t size, CompactPoint* cp, HeapWord* compact_top) { + // First check if we should switch compaction space + assert(this == cp->space, "'this' should be current compaction space."); + size_t compaction_max_size = pointer_delta(end(), compact_top); +@@ -401,8 +400,15 @@ + compaction_max_size = pointer_delta(cp->space->end(), compact_top); + } + ++ return compact_top; ++} ++ ++HeapWord* CompactibleSpace::forward(oop q, size_t size, ++ CompactPoint* cp, HeapWord* compact_top) { ++ compact_top = forward_compact_top(size, cp, compact_top); ++ + // store the forwarding pointer into the mark word +- if ((HeapWord*)q != compact_top) { ++ if ((HeapWord*)q != compact_top || (size_t)q->size() != size) { + q->forward_to(oop(compact_top)); + assert(q->is_gc_marked(), "encoding the pointer should preserve the mark"); + } else { +@@ -423,6 +429,58 @@ + return compact_top; + } + ++// Compute the forward sizes and leave out objects whose position could ++// possibly overlap other objects. ++HeapWord* CompactibleSpace::forward_with_rescue(HeapWord* q, size_t size, ++ CompactPoint* cp, HeapWord* compact_top) { ++ size_t forward_size = size; ++ ++ // (DCEVM) There is a new version of the class of q => different size ++ if (oop(q)->klass()->new_version() != NULL && oop(q)->klass()->new_version()->update_information() != NULL) { ++ ++ size_t new_size = oop(q)->size_given_klass(oop(q)->klass()->new_version()); ++ assert(size != new_size, "instances without changed size have to be updated prior to GC run"); ++ forward_size = new_size; ++ } ++ ++ compact_top = forward_compact_top(forward_size, cp, compact_top); ++ ++ if (must_rescue(oop(q), oop(compact_top))) { ++ if (MarkSweep::_rescued_oops == NULL) { ++ MarkSweep::_rescued_oops = new GrowableArray(128); ++ } ++ MarkSweep::_rescued_oops->append(q); ++ return compact_top; ++ } ++ ++ return forward(oop(q), forward_size, cp, compact_top); ++} ++ ++// Compute the forwarding addresses for the objects that need to be rescued. ++HeapWord* CompactibleSpace::forward_rescued(CompactPoint* cp, HeapWord* compact_top) { ++ // TODO: empty the _rescued_oops after ALL spaces are compacted! ++ if (MarkSweep::_rescued_oops != NULL) { ++ for (int i=0; ilength(); i++) { ++ HeapWord* q = MarkSweep::_rescued_oops->at(i); ++ ++ /* size_t size = oop(q)->size(); changing this for cms for perm gen */ ++ size_t size = block_size(q); ++ ++ // (DCEVM) There is a new version of the class of q => different size ++ if (oop(q)->klass()->new_version() != NULL) { ++ size_t new_size = oop(q)->size_given_klass(oop(q)->klass()->new_version()); ++ assert(size != new_size, "instances without changed size have to be updated prior to GC run"); ++ size = new_size; ++ } ++ ++ compact_top = cp->space->forward(oop(q), size, cp, compact_top); ++ assert(compact_top <= end(), "must not write over end of space!"); ++ } ++ MarkSweep::_rescued_oops->clear(); ++ MarkSweep::_rescued_oops = NULL; ++ } ++ return compact_top; ++} + + bool CompactibleSpace::insert_deadspace(size_t& allowed_deadspace_words, + HeapWord* q, size_t deadlength) { +@@ -444,12 +502,17 @@ + #define adjust_obj_size(s) s + + void CompactibleSpace::prepare_for_compaction(CompactPoint* cp) { +- SCAN_AND_FORWARD(cp, end, block_is_obj, block_size); ++ SCAN_AND_FORWARD(cp, end, block_is_obj, block_size, false); + } + + // Faster object search. + void ContiguousSpace::prepare_for_compaction(CompactPoint* cp) { +- SCAN_AND_FORWARD(cp, top, block_is_always_obj, obj_size); ++ if (!Universe::is_redefining_gc_run()) { ++ SCAN_AND_FORWARD(cp, top, block_is_always_obj, obj_size, false); ++ } else { ++ // Redefinition run ++ SCAN_AND_FORWARD(cp, top, block_is_always_obj, obj_size, true); ++ } + } + + void Space::adjust_pointers() { +@@ -487,6 +550,111 @@ + assert(q == t, "just checking"); + } + ++ ++#ifdef ASSERT ++ ++int CompactibleSpace::space_index(oop obj) { ++ GenCollectedHeap* heap = GenCollectedHeap::heap(); ++ ++ //if (heap->is_in_permanent(obj)) { ++ // return -1; ++ //} ++ ++ int index = 0; ++ for (int i = heap->n_gens() - 1; i >= 0; i--) { ++ Generation* gen = heap->get_gen(i); ++ CompactibleSpace* space = gen->first_compaction_space(); ++ while (space != NULL) { ++ if (space->is_in_reserved(obj)) { ++ return index; ++ } ++ space = space->next_compaction_space(); ++ index++; ++ } ++ } ++ ++ tty->print_cr("could not compute space_index for %08xh", (HeapWord*)obj); ++ index = 0; ++ for (int i = heap->n_gens() - 1; i >= 0; i--) { ++ Generation* gen = heap->get_gen(i); ++ tty->print_cr(" generation %s: %08xh - %08xh", gen->name(), gen->reserved().start(), gen->reserved().end()); ++ ++ CompactibleSpace* space = gen->first_compaction_space(); ++ while (space != NULL) { ++ tty->print_cr(" %2d space %08xh - %08xh", index, space->bottom(), space->end()); ++ space = space->next_compaction_space(); ++ index++; ++ } ++ } ++ ++ ShouldNotReachHere(); ++ return 0; ++} ++#endif ++ ++bool CompactibleSpace::must_rescue(oop old_obj, oop new_obj) { ++ // Only redefined objects can have the need to be rescued. ++ if (oop(old_obj)->klass()->new_version() == NULL) return false; ++ ++ //if (old_obj->is_perm()) { ++ // // This object is in perm gen: Always rescue to satisfy invariant obj->klass() <= obj. ++ // return true; ++ //} ++ ++ int new_size = old_obj->size_given_klass(oop(old_obj)->klass()->new_version()); ++ int original_size = old_obj->size(); ++ ++ Generation* tenured_gen = GenCollectedHeap::heap()->get_gen(1); ++ bool old_in_tenured = tenured_gen->is_in_reserved(old_obj); ++ bool new_in_tenured = tenured_gen->is_in_reserved(new_obj); ++ if (old_in_tenured == new_in_tenured) { ++ // Rescue if object may overlap with a higher memory address. ++ bool overlap = ((HeapWord*)old_obj + original_size < (HeapWord*)new_obj + new_size); ++ if (old_in_tenured) { ++ // Old and new address are in same space, so just compare the address. ++ // Must rescue if object moves towards the top of the space. ++ assert(space_index(old_obj) == space_index(new_obj), "old_obj and new_obj must be in same space"); ++ } else { ++ // In the new generation, eden is located before the from space, so a ++ // simple pointer comparison is sufficient. ++ assert(GenCollectedHeap::heap()->get_gen(0)->is_in_reserved(old_obj), "old_obj must be in DefNewGeneration"); ++ assert(GenCollectedHeap::heap()->get_gen(0)->is_in_reserved(new_obj), "new_obj must be in DefNewGeneration"); ++ assert(overlap == (space_index(old_obj) < space_index(new_obj)), "slow and fast computation must yield same result"); ++ } ++ return overlap; ++ ++ } else { ++ assert(space_index(old_obj) != space_index(new_obj), "old_obj and new_obj must be in different spaces"); ++ if (tenured_gen->is_in_reserved(new_obj)) { ++ // Must never rescue when moving from the new into the old generation. ++ assert(GenCollectedHeap::heap()->get_gen(0)->is_in_reserved(old_obj), "old_obj must be in DefNewGeneration"); ++ assert(space_index(old_obj) > space_index(new_obj), "must be"); ++ return false; ++ ++ } else /* if (tenured_gen->is_in_reserved(old_obj)) */ { ++ // Must always rescue when moving from the old into the new generation. ++ assert(GenCollectedHeap::heap()->get_gen(0)->is_in_reserved(new_obj), "new_obj must be in DefNewGeneration"); ++ assert(space_index(old_obj) < space_index(new_obj), "must be"); ++ return true; ++ } ++ } ++} ++ ++HeapWord* CompactibleSpace::rescue(HeapWord* old_obj) { ++ assert(must_rescue(oop(old_obj), oop(old_obj)->forwardee()), "do not call otherwise"); ++ ++ int size = oop(old_obj)->size(); ++ HeapWord* rescued_obj = NEW_RESOURCE_ARRAY(HeapWord, size); ++ Copy::aligned_disjoint_words(old_obj, rescued_obj, size); ++ ++ if (MarkSweep::_rescued_oops == NULL) { ++ MarkSweep::_rescued_oops = new GrowableArray(128); ++ } ++ ++ MarkSweep::_rescued_oops->append(rescued_obj); ++ return rescued_obj; ++} ++ + void CompactibleSpace::adjust_pointers() { + // Check first is there is any work to do. + if (used() == 0) { +@@ -497,7 +665,12 @@ + } + + void CompactibleSpace::compact() { +- SCAN_AND_COMPACT(obj_size); ++ if(!Universe::is_redefining_gc_run()) { ++ SCAN_AND_COMPACT(obj_size, false); ++ } else { ++ // Redefinition run ++ SCAN_AND_COMPACT(obj_size, true) ++ } + } + + void Space::print_short() const { print_short_on(tty); } +diff --git a/src/share/vm/memory/space.hpp b/src/share/vm/memory/space.hpp +--- a/src/share/vm/memory/space.hpp ++++ b/src/share/vm/memory/space.hpp +@@ -450,6 +450,9 @@ + // indicates when the next such action should be taken. + virtual void prepare_for_compaction(CompactPoint* cp); + // MarkSweep support phase3 ++ DEBUG_ONLY(int space_index(oop obj)); ++ bool must_rescue(oop old_obj, oop new_obj); ++ HeapWord* rescue(HeapWord* old_obj); + virtual void adjust_pointers(); + // MarkSweep support phase4 + virtual void compact(); +@@ -479,6 +482,15 @@ + // accordingly". + virtual HeapWord* forward(oop q, size_t size, CompactPoint* cp, + HeapWord* compact_top); ++ // (DCEVM) same as forwad, but can rescue objects. Invoked only during ++ // redefinition runs ++ HeapWord* forward_with_rescue(HeapWord* q, size_t size, CompactPoint* cp, ++ HeapWord* compact_top); ++ ++ HeapWord* forward_rescued(CompactPoint* cp, HeapWord* compact_top); ++ ++ // (tw) Compute new compact top without actually forwarding the object. ++ virtual HeapWord* forward_compact_top(size_t size, CompactPoint* cp, HeapWord* compact_top); + + // Return a size with adjusments as required of the space. + virtual size_t adjust_object_size_v(size_t size) const { return size; } +@@ -509,7 +521,7 @@ + size_t word_len); + }; + +-#define SCAN_AND_FORWARD(cp,scan_limit,block_is_obj,block_size) { \ ++#define SCAN_AND_FORWARD(cp,scan_limit,block_is_obj,block_size,redefinition_run) { \ + /* Compute the new addresses for the live objects and store it in the mark \ + * Used by universe::mark_sweep_phase2() \ + */ \ +@@ -567,7 +579,17 @@ + /* prefetch beyond q */ \ + Prefetch::write(q, interval); \ + size_t size = block_size(q); \ ++ if (redefinition_run) { \ ++ compact_top = cp->space->forward_with_rescue(q, size, \ ++ cp, compact_top); \ ++ if (q < first_dead && oop(q)->is_gc_marked()) { \ ++ /* Was moved (otherwise, forward would reset mark), \ ++ set first_dead to here */ \ ++ first_dead = q; \ ++ } \ ++ } else { \ + compact_top = cp->space->forward(oop(q), size, cp, compact_top); \ ++ } \ + q += size; \ + end_of_live = q; \ + } else { \ +@@ -616,6 +638,8 @@ + } \ + } \ + \ ++ if (redefinition_run) { compact_top = forward_rescued(cp, compact_top); } \ ++ \ + assert(q == t, "just checking"); \ + if (liveRange != NULL) { \ + liveRange->set_end(q); \ +@@ -662,13 +686,8 @@ + q += size; \ + } \ + \ +- if (_first_dead == t) { \ +- q = t; \ +- } else { \ +- /* $$$ This is funky. Using this to read the previously written \ +- * LiveRange. See also use below. */ \ +- q = (HeapWord*)oop(_first_dead)->mark()->decode_pointer(); \ +- } \ ++ /* (DCEVM) first_dead can be live object if we move/rescue resized objects */ \ ++ q = _first_dead; \ + } \ + \ + const intx interval = PrefetchScanIntervalInBytes; \ +@@ -696,7 +715,7 @@ + assert(q == t, "just checking"); \ + } + +-#define SCAN_AND_COMPACT(obj_size) { \ ++#define SCAN_AND_COMPACT(obj_size, redefinition_run) { \ + /* Copy all live objects to their new location \ + * Used by MarkSweep::mark_sweep_phase4() */ \ + \ +@@ -721,13 +740,9 @@ + } \ + ) /* debug_only */ \ + \ +- if (_first_dead == t) { \ +- q = t; \ +- } else { \ +- /* $$$ Funky */ \ +- q = (HeapWord*) oop(_first_dead)->mark()->decode_pointer(); \ ++ /* (DCEVM) first_dead can be live object if we move/rescue resized objects */ \ ++ q = _first_dead; \ + } \ +- } \ + \ + const intx scan_interval = PrefetchScanIntervalInBytes; \ + const intx copy_interval = PrefetchCopyIntervalInBytes; \ +@@ -745,11 +760,34 @@ + size_t size = obj_size(q); \ + HeapWord* compaction_top = (HeapWord*)oop(q)->forwardee(); \ + \ ++ if (redefinition_run && must_rescue(oop(q), oop(q)->forwardee())) { \ ++ rescue(q); \ ++ debug_only(Copy::fill_to_words(q, size, 0)); \ ++ q += size; \ ++ continue; \ ++ } \ ++ \ + /* prefetch beyond compaction_top */ \ + Prefetch::write(compaction_top, copy_interval); \ + \ + /* copy object and reinit its mark */ \ +- assert(q != compaction_top, "everything in this pass should be moving"); \ ++ assert(q != compaction_top || oop(q)->klass()->new_version() != NULL, \ ++ "everything in this pass should be moving"); \ ++ if (redefinition_run && oop(q)->klass()->new_version() != NULL) { \ ++ Klass* new_version = oop(q)->klass()->new_version(); \ ++ if (new_version->update_information() == NULL) { \ ++ Copy::aligned_conjoint_words(q, compaction_top, size); \ ++ oop(compaction_top)->set_klass(new_version); \ ++ } else { \ ++ MarkSweep::update_fields(oop(q), oop(compaction_top)); \ ++ } \ ++ oop(compaction_top)->init_mark(); \ ++ assert(oop(compaction_top)->klass() != NULL, "should have a class"); \ ++ \ ++ debug_only(prev_q = q); \ ++ q += size; \ ++ continue; \ ++ } \ + Copy::aligned_conjoint_words(q, compaction_top, size); \ + oop(compaction_top)->init_mark(); \ + assert(oop(compaction_top)->klass() != NULL, "should have a class"); \ +diff --git a/src/share/vm/memory/universe.cpp b/src/share/vm/memory/universe.cpp +--- a/src/share/vm/memory/universe.cpp ++++ b/src/share/vm/memory/universe.cpp +@@ -78,6 +78,8 @@ + #include "gc_implementation/parallelScavenge/parallelScavengeHeap.hpp" + #endif // INCLUDE_ALL_GCS + ++bool Universe::_is_redefining_gc_run = false; ++ + // Known objects + Klass* Universe::_boolArrayKlassObj = NULL; + Klass* Universe::_byteArrayKlassObj = NULL; +@@ -157,6 +159,42 @@ + f(doubleArrayKlassObj()); + } + ++// (DCEVM) This method should iterate all pointers that are not within heap objects. ++void Universe::root_oops_do(OopClosure *oopClosure) { ++ ++ class AlwaysTrueClosure: public BoolObjectClosure { ++ public: ++ void do_object(oop p) { ShouldNotReachHere(); } ++ bool do_object_b(oop p) { return true; } ++ }; ++ AlwaysTrueClosure always_true; ++ ++ Universe::oops_do(oopClosure); ++// ReferenceProcessor::oops_do(oopClosure); (tw) check why no longer there ++ JNIHandles::oops_do(oopClosure); // Global (strong) JNI handles ++ Threads::oops_do(oopClosure, NULL, NULL); ++ ObjectSynchronizer::oops_do(oopClosure); ++ FlatProfiler::oops_do(oopClosure); ++ JvmtiExport::oops_do(oopClosure); ++ ++ // Now adjust pointers in remaining weak roots. (All of which should ++ // have been cleared if they pointed to non-surviving objects.) ++ // Global (weak) JNI handles ++ JNIHandles::weak_oops_do(&always_true, oopClosure); ++ ++ CodeCache::oops_do(oopClosure); ++ StringTable::oops_do(oopClosure); ++ ++ // (DCEVM) TODO: Check if this is correct? ++ //CodeCache::scavenge_root_nmethods_oops_do(oopClosure); ++ //Management::oops_do(oopClosure); ++ //ref_processor()->weak_oops_do(&oopClosure); ++ //PSScavenge::reference_processor()->weak_oops_do(&oopClosure); ++ ++ // SO_AllClasses ++ SystemDictionary::oops_do(oopClosure); ++} ++ + void Universe::oops_do(OopClosure* f, bool do_all) { + + f->do_oop((oop*) &_int_mirror); +diff --git a/src/share/vm/memory/universe.hpp b/src/share/vm/memory/universe.hpp +--- a/src/share/vm/memory/universe.hpp ++++ b/src/share/vm/memory/universe.hpp +@@ -248,7 +248,13 @@ + + static void compute_verify_oop_data(); + ++ static bool _is_redefining_gc_run; ++ + public: ++ ++ static bool is_redefining_gc_run() { return _is_redefining_gc_run; } ++ static void set_redefining_gc_run(bool b) { _is_redefining_gc_run = b; } ++ + // Known classes in the VM + static Klass* boolArrayKlassObj() { return _boolArrayKlassObj; } + static Klass* byteArrayKlassObj() { return _byteArrayKlassObj; } +@@ -401,6 +407,7 @@ + static void run_finalizers_on_exit(); + + // Iteration ++ static void root_oops_do(OopClosure *f); + + // Apply "f" to the addresses of all the direct heap pointers maintained + // as static fields of "Universe". +diff --git a/src/share/vm/oops/cpCache.cpp b/src/share/vm/oops/cpCache.cpp +--- a/src/share/vm/oops/cpCache.cpp ++++ b/src/share/vm/oops/cpCache.cpp +@@ -336,7 +336,8 @@ + if (has_appendix) { + const int appendix_index = f2_as_index() + _indy_resolved_references_appendix_offset; + assert(appendix_index >= 0 && appendix_index < resolved_references->length(), "oob"); +- assert(resolved_references->obj_at(appendix_index) == NULL, "init just once"); ++ // FIXME (DCEVM) relaxing for now... ++ //assert(resolved_references->obj_at(appendix_index) == NULL, "init just once"); + resolved_references->obj_at_put(appendix_index, appendix()); + } + +@@ -344,7 +345,8 @@ + if (has_method_type) { + const int method_type_index = f2_as_index() + _indy_resolved_references_method_type_offset; + assert(method_type_index >= 0 && method_type_index < resolved_references->length(), "oob"); +- assert(resolved_references->obj_at(method_type_index) == NULL, "init just once"); ++ // FIXME (DCEVM) relaxing for now... ++ //assert(resolved_references->obj_at(method_type_index) == NULL, "init just once"); + resolved_references->obj_at_put(method_type_index, method_type()); + } + +@@ -532,6 +534,26 @@ + // the method is in the interesting class so the entry is interesting + return true; + } ++ ++// Enhanced RedefineClasses() API support (DCEVM): ++// Clear cached entry, let it be re-resolved ++void ConstantPoolCacheEntry::clear_entry() { ++ // Clear entry during class redefinition. Note that we still keep flags. ++ if (has_appendix()) { ++ // (DCEVM): Now this gets really ugly. If this entry is used by invokehandle, we cannot ++ // clear it just like that (data is used by _invokehandle bytecode which wouldn't re-resolve anything). ++ // FIXME: (DCEVM) need to figure out if we need to rewrite it somehow or it should be fine... ++ return; ++ } ++ _indices = constant_pool_index(); ++ _f1 = NULL; ++ _f2 = 0; ++ ++ // FIXME: (DCEVM) we want to clear flags, but parameter size is actually used ++ // after we return from the method, before entry is re-initialized. So let's ++ // keep parameter size the same. ++ _flags &= 0x0000000f; ++} + #endif // INCLUDE_JVMTI + + void ConstantPoolCacheEntry::print(outputStream* st, int index) const { +@@ -660,6 +682,14 @@ + } + } + } ++ ++// Enhanced RedefineClasses() API support (DCEVM): ++// Clear all entries ++void ConstantPoolCache::clear_entries() { ++ for (int i = 0; i < length(); i++) { ++ entry_at(i)->clear_entry(); ++ } ++} + #endif // INCLUDE_JVMTI + + +diff --git a/src/share/vm/oops/cpCache.hpp b/src/share/vm/oops/cpCache.hpp +--- a/src/share/vm/oops/cpCache.hpp ++++ b/src/share/vm/oops/cpCache.hpp +@@ -373,6 +373,10 @@ + bool * trace_name_printed); + bool check_no_old_or_obsolete_entries(); + bool is_interesting_method_entry(Klass* k); ++ ++ // Enhanced RedefineClasses() API support (DCEVM): ++ // Clear cached entry, let it be re-resolved ++ void clear_entry(); + #endif // INCLUDE_JVMTI + + // Debugging & Printing +@@ -472,6 +476,10 @@ + int methods_length, bool * trace_name_printed); + bool check_no_old_or_obsolete_entries(); + void dump_cache(); ++ ++ // Enhanced RedefineClasses() API support (DCEVM): ++ // Clear all entries ++ void clear_entries(); + #endif // INCLUDE_JVMTI + + // Deallocate - no fields to deallocate +diff --git a/src/share/vm/oops/instanceKlass.cpp b/src/share/vm/oops/instanceKlass.cpp +--- a/src/share/vm/oops/instanceKlass.cpp ++++ b/src/share/vm/oops/instanceKlass.cpp +@@ -718,7 +718,8 @@ + } + #endif + this_oop->set_init_state(linked); +- if (JvmtiExport::should_post_class_prepare()) { ++ // (DCEVM) Must check for old version in order to prevent infinite loops. ++ if (JvmtiExport::should_post_class_prepare() && this_oop->old_version() == NULL /* JVMTI deadlock otherwise */) { + Thread *thread = THREAD; + assert(thread->is_Java_thread(), "thread->is_Java_thread()"); + JvmtiExport::post_class_prepare((JavaThread *) thread, this_oop()); +@@ -795,7 +796,9 @@ + // If we were to use wait() instead of waitInterruptibly() then + // we might end up throwing IE from link/symbol resolution sites + // that aren't expected to throw. This would wreak havoc. See 6320309. +- while(this_oop->is_being_initialized() && !this_oop->is_reentrant_initialization(self)) { ++ // (DCEVM) Wait also for the old class version to be fully initialized. ++ while((this_oop->is_being_initialized() && !this_oop->is_reentrant_initialization(self)) ++ || (this_oop->old_version() != NULL && InstanceKlass::cast(this_oop->old_version())->is_being_initialized())) { + wait = true; + ol.waitUninterruptibly(CHECK); + } +@@ -1051,6 +1054,18 @@ + return false; + } + ++bool InstanceKlass::implements_interface_any_version(Klass* k) const { ++ k = k->newest_version(); ++ if (this->newest_version() == k) return true; ++ assert(k->is_interface(), "should be an interface class"); ++ for (int i = 0; i < transitive_interfaces()->length(); i++) { ++ if (transitive_interfaces()->at(i)->newest_version() == k) { ++ return true; ++ } ++ } ++ return false; ++} ++ + bool InstanceKlass::is_same_or_direct_interface(Klass *k) const { + // Verify direct super interface + if (this == k) return true; +@@ -1314,6 +1329,18 @@ + } + } + ++void InstanceKlass::store_update_information(GrowableArray &values) { ++ int *arr = NEW_C_HEAP_ARRAY(int, values.length(), mtClass); ++ for (int i=0; imethod_idnum(); ++ jmethodID* jmeths = methods_jmethod_ids_acquire(); ++ size_t length; // length assigned as debugging crumb ++ jmethodID id = NULL; ++ if (jmeths != NULL && // If there is a cache ++ (length = (size_t)jmeths[0]) > idnum) { // and if it is long enough, ++ jmeths[idnum+1] = newMethodID; // Set method id (may be NULL) ++ return true; ++ } ++ return false; ++} + + // + // Walk the list of dependent nmethods searching for nmethods which +@@ -1875,6 +1914,13 @@ + last = b; + b = b->next(); + } ++ ++ // (DCEVM) Hack as dependencies get wrong version of Klass* ++ if (this->old_version() != NULL) { ++ InstanceKlass::cast(this->old_version())->remove_dependent_nmethod(nm); ++ return; ++ } ++ + #ifdef ASSERT + tty->print_cr("### %s can't find dependent nmethod:", this->external_name()); + nm->print(); +@@ -2884,6 +2930,24 @@ + assert(is_klass(), "must be klass"); + Klass::print_on(st); + ++ // (DCEVM) Output revision number and revision numbers of older / newer and oldest / newest version of this class. ++ if (AllowEnhancedClassRedefinition) { ++ st->print(BULLET"revision: %d", revision_number()); ++ if (new_version() != NULL) { ++ st->print(" (newer=%d)", new_version()->revision_number()); ++ } ++ if (newest_version() != new_version() && newest_version() != this) { ++ st->print(" (newest=%d)", newest_version()->revision_number()); ++ } ++ if (old_version() != NULL) { ++ st->print(" (old=%d)", old_version()->revision_number()); ++ } ++ if (oldest_version() != old_version() && oldest_version() != this) { ++ st->print(" (oldest=%d)", oldest_version()->revision_number()); ++ } ++ st->cr(); ++ } ++ + st->print(BULLET"instance size: %d", size_helper()); st->cr(); + st->print(BULLET"klass size: %d", size()); st->cr(); + st->print(BULLET"access: "); access_flags().print_on(st); st->cr(); +@@ -3219,7 +3283,7 @@ + } + + guarantee(sib->is_klass(), "should be klass"); +- guarantee(sib->super() == super, "siblings should have same superklass"); ++ guarantee(sib->super() == super || super->newest_version() == SystemDictionary::Object_klass(), "siblings should have same superklass"); + } + + // Verify implementor fields +@@ -3384,6 +3448,7 @@ + + // Purge previous versions + static void purge_previous_versions_internal(InstanceKlass* ik, int emcp_method_count) { ++ // FIXME: (DCEVM) Should we purge something? + if (ik->previous_versions() != NULL) { + // This klass has previous versions so see what we can cleanup + // while it is safe to do so. +@@ -3621,7 +3686,7 @@ + + // Determine if InstanceKlass has a previous version. + bool InstanceKlass::has_previous_version() const { +- return (_previous_versions != NULL && _previous_versions->length() > 0); ++ return _old_version != NULL || (_previous_versions != NULL && _previous_versions->length() > 0); + } // end has_previous_version() + + +diff --git a/src/share/vm/oops/instanceKlass.hpp b/src/share/vm/oops/instanceKlass.hpp +--- a/src/share/vm/oops/instanceKlass.hpp ++++ b/src/share/vm/oops/instanceKlass.hpp +@@ -139,6 +139,7 @@ + friend class VMStructs; + friend class ClassFileParser; + friend class CompileReplay; ++ friend class VM_EnhancedRedefineClasses; + + protected: + // Constructor +@@ -637,7 +638,7 @@ + // If the _previous_versions array is non-NULL, then this klass + // has been redefined at least once even if we aren't currently + // tracking a previous version. +- bool has_been_redefined() const { return _previous_versions != NULL; } ++ bool has_been_redefined() const { return _old_version != NULL || _previous_versions != NULL; } + bool has_previous_version() const; + void init_previous_versions() { + _previous_versions = NULL; +@@ -711,6 +712,7 @@ + static void get_jmethod_id_length_value(jmethodID* cache, size_t idnum, + size_t *length_p, jmethodID* id_p); + jmethodID jmethod_id_or_null(Method* method); ++ bool update_jmethod_id(Method* method, jmethodID newMethodID); + + // annotations support + Annotations* annotations() const { return _annotations; } +@@ -780,6 +782,7 @@ + // subclass/subinterface checks + bool implements_interface(Klass* k) const; + bool is_same_or_direct_interface(Klass* k) const; ++ bool implements_interface_any_version(Klass* k) const; + + // Access to the implementor of an interface. + Klass* implementor() const +@@ -831,6 +834,10 @@ + void do_nonstatic_fields(FieldClosure* cl); // including inherited fields + void do_local_static_fields(void f(fieldDescriptor*, TRAPS), TRAPS); + ++ // (DCEVM) instance update information to be used in GC run ++ void store_update_information(GrowableArray &values); ++ void clear_update_information(); ++ + void methods_do(void f(Method* method)); + void array_klasses_do(void f(Klass* k)); + void array_klasses_do(void f(Klass* k, TRAPS), TRAPS); +diff --git a/src/share/vm/oops/klass.cpp b/src/share/vm/oops/klass.cpp +--- a/src/share/vm/oops/klass.cpp ++++ b/src/share/vm/oops/klass.cpp +@@ -170,6 +170,13 @@ + set_next_link(NULL); + TRACE_INIT_ID(this); + ++ set_redefinition_flags(Klass::NoRedefinition); ++ set_redefining(false); ++ set_new_version(NULL); ++ set_old_version(NULL); ++ set_redefinition_index(-1); ++ set_revision_number(-1); ++ + set_prototype_header(markOopDesc::prototype()); + set_biased_lock_revocation_count(0); + set_last_biased_lock_bulk_revocation_time(0); +@@ -375,6 +382,24 @@ + debug_only(verify();) + } + ++// (DCEVM) ++void Klass::remove_from_sibling_list() { ++ debug_only(verify();) ++ // remove ourselves to superklass' subklass list ++ InstanceKlass* super = superklass(); ++ assert(super != NULL, "should have super"); ++ if (super->subklass() == this) { ++ // first subklass ++ super->set_subklass(next_sibling()); ++ } else { ++ Klass* sib = super->subklass(); ++ while (sib->next_sibling() != this) { ++ sib = sib->next_sibling(); ++ }; ++ sib->set_next_sibling(next_sibling()); ++ } ++} ++ + bool Klass::is_loader_alive(BoolObjectClosure* is_alive) { + assert(ClassLoaderDataGraph::contains((address)this), "is in the metaspace"); + +diff --git a/src/share/vm/oops/klass.hpp b/src/share/vm/oops/klass.hpp +--- a/src/share/vm/oops/klass.hpp ++++ b/src/share/vm/oops/klass.hpp +@@ -149,6 +149,10 @@ + oop _java_mirror; + // Superclass + Klass* _super; ++ // Old class ++ Klass* _old_version; ++ // New class ++ Klass* _new_version; + // First subclass (NULL if none); _subklass->next_sibling() is next one + Klass* _subklass; + // Sibling link (or NULL); links all subklasses of a klass +@@ -164,6 +168,16 @@ + jint _modifier_flags; // Processed access flags, for use by Class.getModifiers. + AccessFlags _access_flags; // Access flags. The class/interface distinction is stored here. + ++ // (DCEVM) fields for enhanced class redefinition ++ jint _revision_number; // The revision number for redefined classes ++ jint _redefinition_index; // Index of this class when performing the redefinition ++ bool _subtype_changed; ++ int _redefinition_flags; // Level of class redefinition ++ bool _is_copying_backwards; // Does the class need to copy fields backwards? => possibly overwrite itself? ++ bool _original_field_offsets_changed; // Did the original field offsets of this class change during class redefinition? ++ int * _update_information; // Update information ++ bool _is_redefining; ++ + // Biased locking implementation and statistics + // (the 64-bit chunk goes first, to avoid some fragmentation) + jlong _last_biased_lock_bulk_revocation_time; +@@ -208,6 +222,53 @@ + Array* secondary_supers() const { return _secondary_supers; } + void set_secondary_supers(Array* k) { _secondary_supers = k; } + ++ // BEGIN class redefinition utilities ++ ++ // double links between new and old version of a class ++ Klass* old_version() const { return _old_version; } ++ void set_old_version(Klass* klass) { assert(_old_version == NULL || klass == NULL, "Can only be set once!"); _old_version = klass; } ++ Klass* new_version() const { return _new_version; } ++ void set_new_version(Klass* klass) { assert(_new_version == NULL || klass == NULL, "Can only be set once!"); _new_version = klass; } ++ ++ // A subtype of this class is no longer a subtype ++ bool has_subtype_changed() const { return _subtype_changed; } ++ void set_subtype_changed(bool b) { assert(is_newest_version() || new_version()->is_newest_version(), "must be newest or second newest version"); ++ _subtype_changed = b; } ++ // state of being redefined ++ int redefinition_index() const { return _redefinition_index; } ++ void set_redefinition_index(int index) { _redefinition_index = index; } ++ void set_redefining(bool b) { _is_redefining = b; } ++ bool is_redefining() const { return _is_redefining; } ++ int redefinition_flags() const { return _redefinition_flags; } ++ bool check_redefinition_flag(int flags) const { return (_redefinition_flags & flags) != 0; } ++ void set_redefinition_flags(int flags) { _redefinition_flags = flags; } ++ void set_redefinition_flag(int flag) { _redefinition_flags |= flag; } ++ void clear_redefinition_flag(int flag) { _redefinition_flags &= ~flag; } ++ bool is_copying_backwards() const { return _is_copying_backwards; } ++ void set_copying_backwards(bool b) { _is_copying_backwards = b; } ++ ++ // update information ++ int *update_information() const { return _update_information; } ++ void set_update_information(int *info) { _update_information = info; } ++ ++ // Revision number for redefined classes, -1 for originally loaded classes ++ bool was_redefined() const { return _revision_number != -1; } ++ jint revision_number() const { return _revision_number; } ++ void set_revision_number(jint number) { _revision_number = number; } ++ ++ const Klass* oldest_version() const { return _old_version == NULL ? this : _old_version->oldest_version(); } ++ Klass* oldest_version() { return _old_version == NULL ? this : _old_version->oldest_version(); } ++ ++ const Klass* newest_version() const { return _new_version == NULL ? this : _new_version->newest_version(); } ++ Klass* newest_version() { return _new_version == NULL ? this : _new_version->newest_version(); } ++ ++ const Klass* active_version() const { return _new_version == NULL || _new_version->is_redefining() ? this : _new_version->active_version(); } ++ Klass* active_version() { return _new_version == NULL || _new_version->is_redefining() ? this : _new_version->active_version(); } ++ ++ bool is_newest_version() const { return _new_version == NULL; } ++ ++ // END class redefinition utilities ++ + // Return the element of the _super chain of the given depth. + // If there is no such element, return either NULL or this. + Klass* primary_super_of_depth(juint i) const { +@@ -261,6 +322,7 @@ + Klass* subklass() const; + Klass* next_sibling() const; + void append_to_sibling_list(); // add newly created receiver to superklass' subklass list ++ void remove_from_sibling_list(); // (DCEVM) remove receiver from sibling list + + void set_next_link(Klass* k) { _next_link = k; } + Klass* next_link() const { return _next_link; } // The next klass defined by the class loader. +@@ -287,6 +349,16 @@ + void set_next_sibling(Klass* s); + + public: ++ // (DCEVM) Different class redefinition flags of code evolution. ++ enum RedefinitionFlags { ++ NoRedefinition, // This class is not redefined at all! ++ ModifyClass = 1, // There are changes to the class meta data. ++ ModifyClassSize = ModifyClass << 1, // The size of the class meta data changes. ++ ModifyInstances = ModifyClassSize << 1, // There are change to the instance format. ++ ModifyInstanceSize = ModifyInstances << 1, // The size of instances changes. ++ RemoveSuperType = ModifyInstanceSize << 1, // A super type of this class is removed. ++ MarkedAsAffected = RemoveSuperType << 1 // This class has been marked as an affected class. ++ }; + + // Compiler support + static ByteSize super_offset() { return in_ByteSize(offset_of(Klass, _super)); } +diff --git a/src/share/vm/oops/klassVtable.cpp b/src/share/vm/oops/klassVtable.cpp +--- a/src/share/vm/oops/klassVtable.cpp ++++ b/src/share/vm/oops/klassVtable.cpp +@@ -1409,6 +1409,8 @@ + + void klassVtable::verify_against(outputStream* st, klassVtable* vt, int index) { + vtableEntry* vte = &vt->table()[index]; ++ // (DCEVM) FIXME-isd: do we need the following line? ++ if (vte->method() == NULL || table()[index].method() == NULL) return; + if (vte->method()->name() != table()[index].method()->name() || + vte->method()->signature() != table()[index].method()->signature()) { + fatal("mismatched name/signature of vtable entries"); +@@ -1428,6 +1430,8 @@ + + void vtableEntry::verify(klassVtable* vt, outputStream* st) { + NOT_PRODUCT(FlagSetting fs(IgnoreLockingAssertions, true)); ++ // FIXME: (DCEVM) does not hold? ++ if (method() != NULL) { + assert(method() != NULL, "must have set method"); + method()->verify(); + // we sub_type, because it could be a miranda method +@@ -1435,7 +1439,9 @@ + #ifndef PRODUCT + print(); + #endif +- fatal(err_msg("vtableEntry " PTR_FORMAT ": method is from subclass", this)); ++ // (DCEVM) the following fatal does not work for old versions of classes ++ //fatal(err_msg("vtableEntry " PTR_FORMAT ": method is from subclass", this)); ++ } + } + } + +diff --git a/src/share/vm/oops/method.cpp b/src/share/vm/oops/method.cpp +--- a/src/share/vm/oops/method.cpp ++++ b/src/share/vm/oops/method.cpp +@@ -1185,6 +1185,8 @@ + + // Reset correct method/const method, method size, and parameter info + newm->set_constMethod(newcm); ++ newm->set_new_version(newm->new_version()); ++ newm->set_old_version(newm->old_version()); + newm->constMethod()->set_code_size(new_code_length); + newm->constMethod()->set_constMethod_size(new_const_method_size); + newm->set_method_size(new_method_size); +@@ -1788,6 +1790,10 @@ + + // Add a method id to the jmethod_ids + jmethodID Method::make_jmethod_id(ClassLoaderData* loader_data, Method* m) { ++ // FIXME: (DCEVM) ??? ++ if (m != m->newest_version()) { ++ m = m->newest_version(); ++ } + ClassLoaderData* cld = loader_data; + + if (!SafepointSynchronize::is_at_safepoint()) { +diff --git a/src/share/vm/oops/method.hpp b/src/share/vm/oops/method.hpp +--- a/src/share/vm/oops/method.hpp ++++ b/src/share/vm/oops/method.hpp +@@ -105,6 +105,10 @@ + AccessFlags _access_flags; // Access flags + int _vtable_index; // vtable index of this method (see VtableIndexFlag) + // note: can have vtables with >2**16 elements (because of inheritance) ++ // (DCEVM) Newer version of method available? ++ Method* _new_version; ++ Method* _old_version; ++ + #ifdef CC_INTERP + int _result_index; // C++ interpreter needs for converting results to/from stack + #endif +@@ -175,6 +179,23 @@ + int name_index() const { return constMethod()->name_index(); } + void set_name_index(int index) { constMethod()->set_name_index(index); } + ++ Method* new_version() const { return _new_version; } ++ void set_new_version(Method* m) { _new_version = m; } ++ Method* newest_version() { return (_new_version == NULL) ? this : _new_version->newest_version(); } ++ ++ Method* old_version() const { return _old_version; } ++ void set_old_version(Method* m) { ++ /*if (m == NULL) { ++ _old_version = NULL; ++ return; ++ }*/ ++ ++ assert(_old_version == NULL, "may only be set once"); ++ assert(this->code_size() == m->code_size(), "must have same code length"); ++ _old_version = m; ++ } ++ const Method* oldest_version() const { return (_old_version == NULL) ? this : _old_version->oldest_version(); } ++ + // signature + Symbol* signature() const { return constants()->symbol_at(signature_index()); } + int signature_index() const { return constMethod()->signature_index(); } +diff --git a/src/share/vm/prims/jni.cpp b/src/share/vm/prims/jni.cpp +--- a/src/share/vm/prims/jni.cpp ++++ b/src/share/vm/prims/jni.cpp +@@ -406,6 +406,7 @@ + } + Klass* k = SystemDictionary::resolve_from_stream(class_name, class_loader, + Handle(), &st, true, ++ KlassHandle(), + CHECK_NULL); + + if (TraceClassResolution && k != NULL) { +diff --git a/src/share/vm/prims/jvm.cpp b/src/share/vm/prims/jvm.cpp +--- a/src/share/vm/prims/jvm.cpp ++++ b/src/share/vm/prims/jvm.cpp +@@ -904,6 +904,7 @@ + Klass* k = SystemDictionary::resolve_from_stream(class_name, class_loader, + protection_domain, &st, + verify != 0, ++ KlassHandle(), + CHECK_NULL); + + if (TraceClassResolution && k != NULL) { +diff --git a/src/share/vm/prims/jvmtiEnv.cpp b/src/share/vm/prims/jvmtiEnv.cpp +--- a/src/share/vm/prims/jvmtiEnv.cpp ++++ b/src/share/vm/prims/jvmtiEnv.cpp +@@ -42,6 +42,7 @@ + #include "prims/jvmtiManageCapabilities.hpp" + #include "prims/jvmtiRawMonitor.hpp" + #include "prims/jvmtiRedefineClasses.hpp" ++#include "prims/jvmtiRedefineClasses2.hpp" + #include "prims/jvmtiTagMap.hpp" + #include "prims/jvmtiThreadState.inline.hpp" + #include "prims/jvmtiUtil.hpp" +@@ -206,8 +207,10 @@ + // is_modifiable_class_ptr - pre-checked for NULL + jvmtiError + JvmtiEnv::IsModifiableClass(oop k_mirror, jboolean* is_modifiable_class_ptr) { +- *is_modifiable_class_ptr = VM_RedefineClasses::is_modifiable_class(k_mirror)? +- JNI_TRUE : JNI_FALSE; ++ bool is_modifiable_class = AllowEnhancedClassRedefinition ? ++ VM_EnhancedRedefineClasses::is_modifiable_class(k_mirror) : ++ VM_RedefineClasses::is_modifiable_class(k_mirror); ++ *is_modifiable_class_ptr = is_modifiable_class ? JNI_TRUE : JNI_FALSE; + return JVMTI_ERROR_NONE; + } /* end IsModifiableClass */ + +@@ -276,6 +279,11 @@ + } + class_definitions[index].klass = jcls; + } ++ if (AllowEnhancedClassRedefinition) { ++ VM_EnhancedRedefineClasses op(class_count, class_definitions, jvmti_class_load_kind_retransform); ++ VMThread::execute(&op); ++ return (op.check_error()); ++ } + VM_RedefineClasses op(class_count, class_definitions, jvmti_class_load_kind_retransform); + VMThread::execute(&op); + return (op.check_error()); +@@ -287,6 +295,11 @@ + jvmtiError + JvmtiEnv::RedefineClasses(jint class_count, const jvmtiClassDefinition* class_definitions) { + //TODO: add locking ++ if (AllowEnhancedClassRedefinition) { ++ VM_EnhancedRedefineClasses op(class_count, class_definitions, jvmti_class_load_kind_redefine); ++ VMThread::execute(&op); ++ return (op.check_error()); ++ } + VM_RedefineClasses op(class_count, class_definitions, jvmti_class_load_kind_redefine); + VMThread::execute(&op); + return (op.check_error()); +diff --git a/src/share/vm/prims/jvmtiExport.hpp b/src/share/vm/prims/jvmtiExport.hpp +--- a/src/share/vm/prims/jvmtiExport.hpp ++++ b/src/share/vm/prims/jvmtiExport.hpp +@@ -188,6 +188,7 @@ + // systems as needed to relax invariant checks. + static bool _has_redefined_a_class; + friend class VM_RedefineClasses; ++ friend class VM_EnhancedRedefineClasses; + inline static void set_has_redefined_a_class() { + JVMTI_ONLY(_has_redefined_a_class = true;) + } +diff --git a/src/share/vm/prims/jvmtiImpl.cpp b/src/share/vm/prims/jvmtiImpl.cpp +--- a/src/share/vm/prims/jvmtiImpl.cpp ++++ b/src/share/vm/prims/jvmtiImpl.cpp +@@ -289,6 +289,11 @@ + Symbol* m_name = _method->name(); + Symbol* m_signature = _method->signature(); + ++ // (DCEVM) Go through old versions of method ++ for (Method* m = _method->old_version(); m != NULL; m = m->old_version()) { ++ (m->*meth_act)(_bci); ++ } ++ + // search previous versions if they exist + PreviousVersionWalker pvw(thread, (InstanceKlass *)ikh()); + for (PreviousVersionNode * pv_node = pvw.next_previous_version(); +diff --git a/src/share/vm/prims/jvmtiRedefineClasses2.cpp b/src/share/vm/prims/jvmtiRedefineClasses2.cpp +new file mode 100644 +--- /dev/null ++++ b/src/share/vm/prims/jvmtiRedefineClasses2.cpp +@@ -0,0 +1,2034 @@ ++/* ++ * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved. ++ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. ++ * ++ * This code is free software; you can redistribute it and/or modify it ++ * under the terms of the GNU General Public License version 2 only, as ++ * published by the Free Software Foundation. ++ * ++ * This code is distributed in the hope that it will be useful, but WITHOUT ++ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or ++ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License ++ * version 2 for more details (a copy is included in the LICENSE file that ++ * accompanied this code). ++ * ++ * You should have received a copy of the GNU General Public License version ++ * 2 along with this work; if not, write to the Free Software Foundation, ++ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. ++ * ++ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA ++ * or visit www.oracle.com if you need additional information or have any ++ * questions. ++ * ++ */ ++ ++#include "precompiled.hpp" ++#include "classfile/systemDictionary.hpp" ++#include "classfile/verifier.hpp" ++#include "code/codeCache.hpp" ++#include "interpreter/oopMapCache.hpp" ++#include "interpreter/rewriter.hpp" ++#include "memory/gcLocker.hpp" ++#include "memory/universe.inline.hpp" ++#include "memory/metaspaceShared.hpp" ++#include "oops/fieldStreams.hpp" ++#include "oops/klassVtable.hpp" ++#include "prims/jvmtiImpl.hpp" ++#include "prims/jvmtiRedefineClasses2.hpp" ++#include "prims/methodComparator.hpp" ++#include "prims/jvmtiClassFileReconstituter.hpp" ++#include "runtime/deoptimization.hpp" ++#include "runtime/relocator.hpp" ++#include "utilities/bitMap.inline.hpp" ++#include "compiler/compileBroker.hpp" ++#include "oops/instanceMirrorKlass.hpp" ++#include "utilities/pair.hpp" ++ ++ ++Array* VM_EnhancedRedefineClasses::_old_methods = NULL; ++Array* VM_EnhancedRedefineClasses::_new_methods = NULL; ++int* VM_EnhancedRedefineClasses::_matching_old_methods = NULL; ++int* VM_EnhancedRedefineClasses::_matching_new_methods = NULL; ++int* VM_EnhancedRedefineClasses::_deleted_methods = NULL; ++int* VM_EnhancedRedefineClasses::_added_methods = NULL; ++int VM_EnhancedRedefineClasses::_matching_methods_length = 0; ++int VM_EnhancedRedefineClasses::_deleted_methods_length = 0; ++int VM_EnhancedRedefineClasses::_added_methods_length = 0; ++GrowableArray* VM_EnhancedRedefineClasses::_affected_klasses = NULL; ++ ++ ++// Holds the revision number of the current class redefinition ++int VM_EnhancedRedefineClasses::_revision_number = -1; ++ ++VM_EnhancedRedefineClasses::VM_EnhancedRedefineClasses(jint class_count, const jvmtiClassDefinition *class_defs, JvmtiClassLoadKind class_load_kind) ++ : VM_GC_Operation(Universe::heap()->total_full_collections(), GCCause::_heap_inspection) { ++ RC_TIMER_START(_timer_total); ++ _class_count = class_count; ++ _class_defs = class_defs; ++ _class_load_kind = class_load_kind; ++ _result = JVMTI_ERROR_NONE; ++} ++ ++VM_EnhancedRedefineClasses::~VM_EnhancedRedefineClasses() { ++ RC_TIMER_STOP(_timer_total); ++} ++ ++void VM_EnhancedRedefineClasses::swap_all_method_annotations(ConstMethod* old_method, ConstMethod* new_method) { ++ return; // FIXME-isd: swap annotations! ++ ++ AnnotationArray* save; ++ ++ save = old_method->method_annotations(); ++ old_method->set_method_annotations(new_method->method_annotations()); ++ new_method->set_method_annotations(save); ++ ++ save = old_method->parameter_annotations(); ++ old_method->set_parameter_annotations(new_method->parameter_annotations()); ++ new_method->set_parameter_annotations(save); ++ ++ save = old_method->default_annotations(); ++ old_method->set_default_annotations(new_method->default_annotations()); ++ new_method->set_default_annotations(save); ++ ++ save = old_method->type_annotations(); ++ old_method->set_type_annotations(new_method->type_annotations()); ++ new_method->set_type_annotations(save); ++} ++ ++void VM_EnhancedRedefineClasses::add_affected_klasses( Klass* klass ) ++{ ++ assert(!_affected_klasses->contains(klass), "must not occur more than once!"); ++ assert(klass->new_version() == NULL, "Only last version is valid entry in system dictionary"); ++ ++ Klass* k = klass; ++ ++ if (k->check_redefinition_flag(Klass::MarkedAsAffected)) { ++ _affected_klasses->append(klass); ++ return; ++ } ++ ++ for (juint i = 0; i < k->super_depth(); i++) { ++ Klass* primary = k->primary_super_of_depth(i); ++ // super_depth returns "8" for interfaces, but they don't have primaries other than Object. ++ if (primary == NULL) break; ++ if (primary->check_redefinition_flag(Klass::MarkedAsAffected)) { ++ RC_TRACE(0x00000001, ("Found affected class: %s", k->name()->as_C_string())); ++ k->set_redefinition_flag(Klass::MarkedAsAffected); ++ _affected_klasses->append(klass); ++ return; ++ } ++ } ++ ++ // Check secondary supers ++ int cnt = k->secondary_supers()->length(); ++ for (int i = 0; i < cnt; i++) { ++ Klass* secondary = k->secondary_supers()->at(i); ++ if (secondary->check_redefinition_flag(Klass::MarkedAsAffected)) { ++ RC_TRACE(0x00000001, ("Found affected class: %s", k->name()->as_C_string())); ++ k->set_redefinition_flag(Klass::MarkedAsAffected); ++ _affected_klasses->append(klass); ++ return; ++ } ++ } ++} ++ ++ ++// Searches for all affected classes and performs a sorting such that a supertype is always before a subtype. ++jvmtiError VM_EnhancedRedefineClasses::find_sorted_affected_classes() { ++ ++ assert(_affected_klasses, ""); ++ for (int i = 0; i < _class_count; i++) { ++ oop mirror = JNIHandles::resolve_non_null(_class_defs[i].klass); ++ instanceKlassHandle klass_handle(Thread::current(), java_lang_Class::as_Klass(mirror)); ++ klass_handle->set_redefinition_flag(Klass::MarkedAsAffected); ++ assert(klass_handle->new_version() == NULL, "Must be new class"); ++ } ++ ++ // Find classes not directly redefined, but affected by a redefinition (because one of its supertypes is redefined) ++ SystemDictionary::classes_do(VM_EnhancedRedefineClasses::add_affected_klasses); ++ RC_TRACE(0x00000001, ("%d classes affected", _affected_klasses->length())); ++ ++ // Sort the affected klasses such that a supertype is always on a smaller array index than its subtype. ++ jvmtiError result = do_topological_class_sorting(_class_defs, _class_count, Thread::current()); ++ if (RC_TRACE_ENABLED(0x00000001)) { ++ RC_TRACE(0x00000001, ("Redefine order: ")); ++ for (int i = 0; i < _affected_klasses->length(); i++) { ++ RC_TRACE(0x00000001, ("%s", _affected_klasses->at(i)->name()->as_C_string())); ++ } ++ } ++ ++ return result; ++} ++ ++// Searches for the class bytes of the given class and returns them as a byte array. ++jvmtiError VM_EnhancedRedefineClasses::find_class_bytes(instanceKlassHandle the_class, const unsigned char **class_bytes, jint *class_byte_count, jboolean *not_changed) { ++ ++ *not_changed = false; ++ ++ // Search for the index in the redefinition array that corresponds to the current class ++ int j; ++ for (j=0; j<_class_count; j++) { ++ oop mirror = JNIHandles::resolve_non_null(_class_defs[j].klass); ++ Klass* the_class_oop = java_lang_Class::as_Klass(mirror); ++ if (the_class_oop == the_class()) { ++ break; ++ } ++ } ++ ++ if (j == _class_count) { ++ ++ *not_changed = true; ++ ++ // Redefine with same bytecodes. This is a class that is only indirectly affected by redefinition, ++ // so the user did not specify a different bytecode for that class. ++ ++ if (the_class->get_cached_class_file_bytes() == NULL) { ++ // not cached, we need to reconstitute the class file from VM representation ++ constantPoolHandle constants(Thread::current(), the_class->constants()); ++ MonitorLockerEx ml(constants->lock()); // lock constant pool while we query it ++ //ObjectLocker ol(constants, Thread::current()); // lock constant pool while we query it ++ ++ JvmtiClassFileReconstituter reconstituter(the_class); ++ if (reconstituter.get_error() != JVMTI_ERROR_NONE) { ++ return reconstituter.get_error(); ++ } ++ ++ *class_byte_count = (jint)reconstituter.class_file_size(); ++ *class_bytes = (unsigned char*)reconstituter.class_file_bytes(); ++ } else { ++ ++ // it is cached, get it from the cache ++ *class_byte_count = the_class->get_cached_class_file_len(); ++ *class_bytes = the_class->get_cached_class_file_bytes(); ++ } ++ ++ } else { ++ ++ // Redefine with bytecodes at index j ++ *class_bytes = _class_defs[j].class_bytes; ++ *class_byte_count = _class_defs[j].class_byte_count; ++ } ++ ++ return JVMTI_ERROR_NONE; ++ } ++ ++// Prologue of the VM operation, called on the Java thread in parallel to normal program execution ++bool VM_EnhancedRedefineClasses::doit_prologue() { ++ ++ _revision_number++; ++ RC_TRACE(0x00000001, ++ ("Redefinition with revision number %d started!", _revision_number)); ++ ++ assert(Thread::current()->is_Java_thread(), "must be Java thread"); ++ RC_TIMER_START(_timer_prologue); ++ ++ if (!check_arguments()) { ++ RC_TIMER_STOP(_timer_prologue); ++ return false; ++ } ++ ++ // We first load new class versions in the prologue, because somewhere down the ++ // call chain it is required that the current thread is a Java thread. ++ _new_classes = new (ResourceObj::C_HEAP, mtInternal) GrowableArray(5, true); ++ ++ assert(_affected_klasses == NULL, ""); ++ _affected_klasses = new (ResourceObj::C_HEAP, mtInternal) GrowableArray(_class_count, true); ++ ++ _result = load_new_class_versions(Thread::current()); ++ ++ RC_TRACE(0x00000001, ++ ("Loaded new class versions!")); ++ if (_result != JVMTI_ERROR_NONE) { ++ RC_TRACE(0x00000001, ++ ("error occured: %d!", _result)); ++ delete _new_classes; ++ _new_classes = NULL; ++ delete _affected_klasses; ++ _affected_klasses = NULL; ++ RC_TIMER_STOP(_timer_prologue); ++ return false; ++ } ++ ++ VM_GC_Operation::doit_prologue(); ++ RC_TIMER_STOP(_timer_prologue); ++ ++ RC_TRACE(0x00000001, ("doit_prologue finished!")); ++ return true; ++} ++ ++// Checks basic properties of the arguments of the redefinition command. ++jvmtiError VM_EnhancedRedefineClasses::check_arguments_error() { ++ if (_class_defs == NULL) return JVMTI_ERROR_NULL_POINTER; ++ for (int i = 0; i < _class_count; i++) { ++ if (_class_defs[i].klass == NULL) return JVMTI_ERROR_INVALID_CLASS; ++ if (_class_defs[i].class_byte_count == 0) return JVMTI_ERROR_INVALID_CLASS_FORMAT; ++ if (_class_defs[i].class_bytes == NULL) return JVMTI_ERROR_NULL_POINTER; ++ } ++ return JVMTI_ERROR_NONE; ++ } ++ ++// Returns false and sets an result error code if the redefinition should be aborted. ++bool VM_EnhancedRedefineClasses::check_arguments() { ++ jvmtiError error = check_arguments_error(); ++ if (error != JVMTI_ERROR_NONE || _class_count == 0) { ++ _result = error; ++ return false; ++ } ++ return true; ++} ++ ++jvmtiError VM_EnhancedRedefineClasses::check_exception() const { ++ Thread* THREAD = Thread::current(); ++ if (HAS_PENDING_EXCEPTION) { ++ ++ Symbol* ex_name = PENDING_EXCEPTION->klass()->name(); ++ RC_TRACE_WITH_THREAD(0x00000002, THREAD, ("parse_stream exception: '%s'", ex_name->as_C_string())); ++ CLEAR_PENDING_EXCEPTION; ++ ++ if (ex_name == vmSymbols::java_lang_UnsupportedClassVersionError()) { ++ return JVMTI_ERROR_UNSUPPORTED_VERSION; ++ } else if (ex_name == vmSymbols::java_lang_ClassFormatError()) { ++ return JVMTI_ERROR_INVALID_CLASS_FORMAT; ++ } else if (ex_name == vmSymbols::java_lang_ClassCircularityError()) { ++ return JVMTI_ERROR_CIRCULAR_CLASS_DEFINITION; ++ } else if (ex_name == vmSymbols::java_lang_NoClassDefFoundError()) { ++ // The message will be "XXX (wrong name: YYY)" ++ return JVMTI_ERROR_NAMES_DONT_MATCH; ++ } else if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) { ++ return JVMTI_ERROR_OUT_OF_MEMORY; ++ } else { ++ // Just in case more exceptions can be thrown.. ++ return JVMTI_ERROR_FAILS_VERIFICATION; ++ } ++ } ++ ++ return JVMTI_ERROR_NONE; ++} ++ ++// Loads all new class versions and stores the InstanceKlass handles in an array. ++jvmtiError VM_EnhancedRedefineClasses::load_new_class_versions(TRAPS) { ++ ++ ResourceMark rm(THREAD); ++ ++ RC_TRACE(0x00000001, ++ ("loading new class versions (%d)", _class_count)); ++ ++ // Retrieve an array of all classes that need to be redefined ++ jvmtiError err = find_sorted_affected_classes(); ++ if (err != JVMTI_ERROR_NONE) { ++ RC_TRACE(0x00000001, ++ ("Error finding sorted affected classes: %d", (int)err)); ++ return err; ++ } ++ ++ ++ JvmtiThreadState *state = JvmtiThreadState::state_for(JavaThread::current()); ++ ++ _max_redefinition_flags = Klass::NoRedefinition; ++ jvmtiError result = JVMTI_ERROR_NONE; ++ ++ for (int i = 0; i < _affected_klasses->length(); i++) { ++ instanceKlassHandle the_class = _affected_klasses->at(i); ++ ++ RC_TRACE(0x00000001, ++ ("Processing affected class %s (%d of %d)", ++ the_class->name()->as_C_string(), ++ i + 1, ++ _affected_klasses->length())); ++ ++ the_class->link_class(THREAD); ++ result = check_exception(); ++ if (result != JVMTI_ERROR_NONE) break; ++ ++ // Find new class bytes ++ const unsigned char* class_bytes; ++ jint class_byte_count; ++ jvmtiError error; ++ jboolean not_changed; ++ if ((error = find_class_bytes(the_class, &class_bytes, &class_byte_count, ¬_changed)) != JVMTI_ERROR_NONE) { ++ RC_TRACE_WITH_THREAD(0x00000002, THREAD, ++ ("Error finding class bytes: %d", (int)error)); ++ result = error; ++ break; ++ } ++ assert(class_bytes != NULL && class_byte_count != 0, "Class bytes defined at this point!"); ++ ++ ++ // Set redefined class handle in JvmtiThreadState class. ++ // This redefined class is sent to agent event handler for class file ++ // load hook event. ++ state->set_class_being_redefined(&the_class, _class_load_kind); ++ ++ RC_TIMER_STOP(_timer_prologue); ++ RC_TIMER_START(_timer_class_loading); ++ ++ // Parse the stream. ++ Handle the_class_loader(THREAD, the_class->class_loader()); ++ Handle protection_domain(THREAD, the_class->protection_domain()); ++ ClassFileStream st((u1*) class_bytes, class_byte_count, (char *)"__VM_EhnancedRedefineClasses__"); ++ ++ Klass* klass = ++ SystemDictionary::resolve_from_stream( ++ the_class->name(), ++ the_class_loader, ++ protection_domain, ++ &st, ++ true, ++ the_class, ++ THREAD); ++ instanceKlassHandle new_class(THREAD, klass); ++ ++ RC_TIMER_STOP(_timer_class_loading); ++ RC_TIMER_START(_timer_prologue); ++ ++ // Clear class_being_redefined just to be sure. ++ state->clear_class_being_redefined(); ++ ++ result = check_exception(); ++ if (result != JVMTI_ERROR_NONE) break; ++ ++ not_changed = false; ++ ++#ifdef ASSERT ++ ++ assert(new_class() != NULL, "Class could not be loaded!"); ++ assert(new_class() != the_class(), "must be different"); ++ assert(new_class->new_version() == NULL && new_class->old_version() != NULL, ""); ++ ++ ++ Array* k_interfaces = new_class->local_interfaces(); ++ for (int j = 0; j < k_interfaces->length(); j++) { ++ assert(k_interfaces->at(j)->is_newest_version(), "just checking"); ++ } ++ ++ if (!THREAD->is_Compiler_thread()) { ++ RC_TRACE(0x00000001, ("name=%s loader="INTPTR_FORMAT" protection_domain="INTPTR_FORMAT, ++ the_class->name()->as_C_string(), ++ (intptr_t) (oopDesc*) the_class->class_loader(), ++ (intptr_t) (oopDesc*) the_class->protection_domain())); ++ // If we are on the compiler thread, we must not try to resolve a class. ++ Klass* systemLookup = SystemDictionary::resolve_or_null(the_class->name(), the_class->class_loader(), the_class->protection_domain(), THREAD); ++ ++ if (systemLookup != NULL) { ++ assert(systemLookup == new_class->old_version(), "Old class must be in system dictionary!"); ++ Klass *subklass = new_class()->subklass(); ++ while (subklass != NULL) { ++ assert(subklass->new_version() == NULL, "Most recent version of class!"); ++ subklass = subklass->next_sibling(); ++ } ++ } else { ++ // This can happen for reflection generated classes.. ? ++ CLEAR_PENDING_EXCEPTION; ++ } ++ } ++ ++#endif ++ ++ if (RC_TRACE_ENABLED(0x00000001)) { ++ if (new_class->layout_helper() != the_class->layout_helper()) { ++ RC_TRACE(0x00000001, ++ ("Instance size change for class %s: new=%d old=%d", ++ new_class->name()->as_C_string(), ++ new_class->layout_helper(), ++ the_class->layout_helper())); ++ } ++ } ++ ++ // Set the new version of the class ++ new_class->set_revision_number(_revision_number); ++ new_class->set_redefinition_index(i); ++ the_class->set_new_version(new_class()); ++ _new_classes->append(new_class); ++ ++ assert(new_class->new_version() == NULL, ""); ++ ++ int redefinition_flags = Klass::NoRedefinition; ++ ++ if (not_changed) { ++ redefinition_flags = Klass::NoRedefinition; ++ } else { ++ redefinition_flags = calculate_redefinition_flags(new_class); ++ if (redefinition_flags >= Klass::RemoveSuperType) { ++ result = JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED; ++ break; ++ } ++ } ++ ++ if (new_class->super() != NULL) { ++ redefinition_flags = redefinition_flags | new_class->super()->redefinition_flags(); ++ } ++ ++ for (int j = 0; jlocal_interfaces()->length(); j++) { ++ redefinition_flags = redefinition_flags | (new_class->local_interfaces()->at(j))->redefinition_flags(); ++ } ++ ++ new_class->set_redefinition_flags(redefinition_flags); ++ ++ _max_redefinition_flags = _max_redefinition_flags | redefinition_flags; ++ ++ if ((redefinition_flags & Klass::ModifyInstances) != 0) { ++ // TODO: Check if watch access flags of static fields are updated correctly. ++ calculate_instance_update_information(_new_classes->at(i)()); ++ } else { ++ // Fields were not changed, transfer special flags only ++ assert(new_class->layout_helper() >> 1 == new_class->old_version()->layout_helper() >> 1, "must be equal"); ++ assert(new_class->fields()->length() == InstanceKlass::cast(new_class->old_version())->fields()->length(), "must be equal"); ++ ++ JavaFieldStream old_fs(the_class); ++ JavaFieldStream new_fs(new_class); ++ for (; !old_fs.done() && !new_fs.done(); old_fs.next(), new_fs.next()) { ++ AccessFlags flags = new_fs.access_flags(); ++ flags.set_is_field_modification_watched(old_fs.access_flags().is_field_modification_watched()); ++ flags.set_is_field_access_watched(old_fs.access_flags().is_field_access_watched()); ++ new_fs.set_access_flags(flags); ++ } ++ } ++ ++ if (RC_TRACE_ENABLED(0x00000001)) { ++ RC_TRACE(0x00000001, ++ ("Super class is %s", new_class->super()->name()->as_C_string())); ++ } ++ ++#ifdef ASSERT ++ assert(new_class->super() == NULL || new_class->super()->new_version() == NULL, "Super klass must be newest version!"); ++ ++ the_class->vtable()->verify(tty); ++ new_class->vtable()->verify(tty); ++#endif ++ ++ if (i == _affected_klasses->length() - 1) { ++ // This was the last class processed => check if additional classes have been loaded in the meantime ++ for (int j = 0; j<_affected_klasses->length(); j++) { ++ ++ Klass* initial_klass = _affected_klasses->at(j)(); ++ Klass *initial_subklass = initial_klass->subklass(); ++ Klass *cur_klass = initial_subklass; ++ while(cur_klass != NULL) { ++ ++ if(cur_klass->oop_is_instance() && cur_klass->is_newest_version() && !cur_klass->is_redefining()) { ++ instanceKlassHandle handle(THREAD, cur_klass); ++ if (!_affected_klasses->contains(handle)) { ++ ++ int k = i + 1; ++ for (; k<_affected_klasses->length(); k++) { ++ if (_affected_klasses->at(k)->is_subtype_of(cur_klass)) { ++ break; ++ } ++ } ++ _affected_klasses->insert_before(k, handle); ++ RC_TRACE(0x00000001, ++ ("Adding newly loaded class to affected classes: %s", cur_klass->name()->as_C_string())); ++ } ++ } ++ ++ cur_klass = cur_klass->next_sibling(); ++ } ++ } ++ ++ int new_count = _affected_klasses->length() - 1 - i; ++ if (new_count != 0) { ++ RC_TRACE(0x00000001, ++ ("Found new number of affected classes: %d", new_count)); ++ } ++ } ++ } ++ ++ if (result != JVMTI_ERROR_NONE) { ++ rollback(); ++ return result; ++ } ++ ++ RC_TIMER_STOP(_timer_prologue); ++ RC_TIMER_START(_timer_class_linking); ++ // Link and verify new classes _after_ all classes have been updated in the system dictionary! ++ for (int i=0; i<_affected_klasses->length(); i++) { ++ instanceKlassHandle the_class = _affected_klasses->at(i); ++ instanceKlassHandle new_class(the_class->new_version()); ++ ++ RC_TRACE(0x00000001, ++ ("Linking class %d/%d %s", i, _affected_klasses->length(), the_class->name()->as_C_string())); ++ new_class->link_class(THREAD); ++ ++ result = check_exception(); ++ if (result != JVMTI_ERROR_NONE) break; ++ } ++ RC_TIMER_STOP(_timer_class_linking); ++ RC_TIMER_START(_timer_prologue); ++ ++ if (result != JVMTI_ERROR_NONE) { ++ rollback(); ++ return result; ++ } ++ ++ RC_TRACE(0x00000001, ("All classes loaded!")); ++ ++#ifdef ASSERT ++ for (int i=0; i<_affected_klasses->length(); i++) { ++ instanceKlassHandle the_class = _affected_klasses->at(i); ++ assert(the_class->new_version() != NULL, "Must have been redefined"); ++ instanceKlassHandle new_version = instanceKlassHandle(THREAD, the_class->new_version()); ++ assert(new_version->new_version() == NULL, "Must be newest version"); ++ ++ if (!(new_version->super() == NULL || new_version->super()->new_version() == NULL)) { ++ new_version()->print(); ++ new_version->super()->print(); ++ } ++ assert(new_version->super() == NULL || new_version->super()->new_version() == NULL, "Super class must be newest version"); ++ } ++ ++ SystemDictionary::classes_do(check_class, THREAD); ++ ++#endif ++ ++ RC_TRACE(0x00000001, ("Finished verification!")); ++ return JVMTI_ERROR_NONE; ++} ++ ++int VM_EnhancedRedefineClasses::calculate_redefinition_flags(instanceKlassHandle new_class) { ++ ++ int result = Klass::NoRedefinition; ++ RC_TRACE(0x00000001, ++ ("Comparing different class versions of class %s", new_class->name()->as_C_string())); ++ ++ assert(new_class->old_version() != NULL, "must have old version"); ++ instanceKlassHandle the_class(new_class->old_version()); ++ ++ // Check whether class is in the error init state. ++ if (the_class->is_in_error_state()) { ++ // TBD #5057930: special error code is needed in 1.6 ++ //result = Klass::union_redefinition_level(result, Klass::Invalid); ++ } ++ ++ int i; ++ ++ ////////////////////////////////////////////////////////////////////////////////////////////////////////// ++ // Check superclasses ++ assert(new_class->super() == NULL || new_class->super()->is_newest_version(), ""); ++ if (the_class->super() != new_class->super()) { ++ // Super class changed ++ Klass* cur_klass = the_class->super(); ++ while (cur_klass != NULL) { ++ if (!new_class->is_subclass_of(cur_klass->newest_version())) { ++ RC_TRACE(0x00000001, ++ ("Removed super class %s", cur_klass->name()->as_C_string())); ++ result = result | Klass::RemoveSuperType | Klass::ModifyInstances | Klass::ModifyClass; ++ ++ if (!cur_klass->has_subtype_changed()) { ++ RC_TRACE(0x00000001, ++ ("Subtype changed of class %s", cur_klass->name()->as_C_string())); ++ cur_klass->set_subtype_changed(true); ++ } ++ } ++ ++ cur_klass = cur_klass->super(); ++ } ++ ++ cur_klass = new_class->super(); ++ while (cur_klass != NULL) { ++ if (!the_class->is_subclass_of(cur_klass->old_version())) { ++ RC_TRACE(0x00000001, ++ ("Added super class %s", cur_klass->name()->as_C_string())); ++ result = result | Klass::ModifyClass | Klass::ModifyInstances; ++ } ++ cur_klass = cur_klass->super(); ++ } ++ } ++ ++ ////////////////////////////////////////////////////////////////////////////////////////////////////////// ++ // Check interfaces ++ ++ // Interfaces removed? ++ Array* old_interfaces = the_class->transitive_interfaces(); ++ for (i = 0; ilength(); i++) { ++ instanceKlassHandle old_interface(old_interfaces->at(i)); ++ if (!new_class->implements_interface_any_version(old_interface())) { ++ result = result | Klass::RemoveSuperType | Klass::ModifyClass; ++ RC_TRACE(0x00000001, ++ ("Removed interface %s", old_interface->name()->as_C_string())); ++ ++ if (!old_interface->has_subtype_changed()) { ++ RC_TRACE(0x00000001, ++ ("Subtype changed of interface %s", old_interface->name()->as_C_string())); ++ old_interface->set_subtype_changed(true); ++ } ++ } ++ } ++ ++ // Interfaces added? ++ Array* new_interfaces = new_class->transitive_interfaces(); ++ for (i = 0; ilength(); i++) { ++ if (!the_class->implements_interface_any_version(new_interfaces->at(i))) { ++ result = result | Klass::ModifyClass; ++ RC_TRACE(0x00000001, ++ ("Added interface %s", new_interfaces->at(i)->name()->as_C_string())); ++ } ++ } ++ ++ ++ // Check whether class modifiers are the same. ++ jushort old_flags = (jushort) the_class->access_flags().get_flags(); ++ jushort new_flags = (jushort) new_class->access_flags().get_flags(); ++ if (old_flags != new_flags) { ++ // TODO Can this have any effects? ++ } ++ ++ // Check if the number, names, types and order of fields declared in these classes ++ // are the same. ++ JavaFieldStream old_fs(the_class); ++ JavaFieldStream new_fs(new_class); ++ for (; !old_fs.done() && !new_fs.done(); old_fs.next(), new_fs.next()) { ++ // access ++ old_flags = old_fs.access_flags().as_short(); ++ new_flags = new_fs.access_flags().as_short(); ++ if ((old_flags ^ new_flags) & JVM_RECOGNIZED_FIELD_MODIFIERS) { ++ // TODO can this have any effect? ++ } ++ // offset ++ if (old_fs.offset() != new_fs.offset()) { ++ result = result | Klass::ModifyInstances; ++ } ++ // name and signature ++ Symbol* name_sym1 = the_class->constants()->symbol_at(old_fs.name_index()); ++ Symbol* sig_sym1 = the_class->constants()->symbol_at(old_fs.signature_index()); ++ Symbol* name_sym2 = new_class->constants()->symbol_at(new_fs.name_index()); ++ Symbol* sig_sym2 = new_class->constants()->symbol_at(new_fs.signature_index()); ++ if (name_sym1 != name_sym2 || sig_sym1 != sig_sym2) { ++ result = result | Klass::ModifyInstances; ++ } ++ } ++ ++ // If both streams aren't done then we have a differing number of ++ // fields. ++ if (!old_fs.done() || !new_fs.done()) { ++ result = result | Klass::ModifyInstances; ++ } ++ ++ // Do a parallel walk through the old and new methods. Detect ++ // cases where they match (exist in both), have been added in ++ // the new methods, or have been deleted (exist only in the ++ // old methods). The class file parser places methods in order ++ // by method name, but does not order overloaded methods by ++ // signature. In order to determine what fate befell the methods, ++ // this code places the overloaded new methods that have matching ++ // old methods in the same order as the old methods and places ++ // new overloaded methods at the end of overloaded methods of ++ // that name. The code for this order normalization is adapted ++ // from the algorithm used in InstanceKlass::find_method(). ++ // Since we are swapping out of order entries as we find them, ++ // we only have to search forward through the overloaded methods. ++ // Methods which are added and have the same name as an existing ++ // method (but different signature) will be put at the end of ++ // the methods with that name, and the name mismatch code will ++ // handle them. ++ Array* k_old_methods(the_class->methods()); // FIXME-isd: handles??? ++ Array* k_new_methods(new_class->methods()); ++ int n_old_methods = k_old_methods->length(); ++ int n_new_methods = k_new_methods->length(); ++ ++ int ni = 0; ++ int oi = 0; ++ while (true) { ++ Method* k_old_method; ++ Method* k_new_method; ++ enum { matched, added, deleted, undetermined } method_was = undetermined; ++ ++ if (oi >= n_old_methods) { ++ if (ni >= n_new_methods) { ++ break; // we've looked at everything, done ++ } ++ // New method at the end ++ k_new_method = k_new_methods->at(ni); ++ method_was = added; ++ } else if (ni >= n_new_methods) { ++ // Old method, at the end, is deleted ++ k_old_method = k_old_methods->at(oi); ++ method_was = deleted; ++ } else { ++ // There are more methods in both the old and new lists ++ k_old_method = k_old_methods->at(oi); ++ k_new_method = k_new_methods->at(ni); ++ if (k_old_method->name() != k_new_method->name()) { ++ // Methods are sorted by method name, so a mismatch means added ++ // or deleted ++ if (k_old_method->name()->fast_compare(k_new_method->name()) > 0) { ++ method_was = added; ++ } else { ++ method_was = deleted; ++ } ++ } else if (k_old_method->signature() == k_new_method->signature()) { ++ // Both the name and signature match ++ method_was = matched; ++ } else { ++ // The name matches, but the signature doesn't, which means we have to ++ // search forward through the new overloaded methods. ++ int nj; // outside the loop for post-loop check ++ for (nj = ni + 1; nj < n_new_methods; nj++) { ++ Method* m = k_new_methods->at(nj); ++ if (k_old_method->name() != m->name()) { ++ // reached another method name so no more overloaded methods ++ method_was = deleted; ++ break; ++ } ++ if (k_old_method->signature() == m->signature()) { ++ // found a match so swap the methods ++ k_new_methods->at_put(ni, m); ++ k_new_methods->at_put(nj, k_new_method); ++ k_new_method = m; ++ method_was = matched; ++ break; ++ } ++ } ++ ++ if (nj >= n_new_methods) { ++ // reached the end without a match; so method was deleted ++ method_was = deleted; ++ } ++ } ++ } ++ ++ switch (method_was) { ++ case matched: ++ // methods match, be sure modifiers do too ++ old_flags = (jushort) k_old_method->access_flags().get_flags(); ++ new_flags = (jushort) k_new_method->access_flags().get_flags(); ++ if ((old_flags ^ new_flags) & ~(JVM_ACC_NATIVE)) { ++ // TODO Can this have any effects? Probably yes on vtables? ++ result = result | Klass::ModifyClass; ++ } ++ { ++ u2 new_num = k_new_method->method_idnum(); ++ u2 old_num = k_old_method->method_idnum(); ++ if (new_num != old_num) { ++ Method* idnum_owner = new_class->method_with_idnum(old_num); ++ if (idnum_owner != NULL) { ++ // There is already a method assigned this idnum -- switch them ++ idnum_owner->set_method_idnum(new_num); ++ } ++ k_new_method->set_method_idnum(old_num); ++ RC_TRACE(0x00008000, ++ ("swapping idnum of new and old method %d / %d!", new_num, old_num); ++ swap_all_method_annotations(k_old_method->constMethod(), k_new_method->constMethod())); ++ } ++ } ++ RC_TRACE(0x00008000, ("Method matched: new: %s [%d] == old: %s [%d]", ++ k_new_method->name_and_sig_as_C_string(), ni, ++ k_old_method->name_and_sig_as_C_string(), oi)); ++ // advance to next pair of methods ++ ++oi; ++ ++ni; ++ break; ++ case added: ++ // method added, see if it is OK ++ new_flags = (jushort) k_new_method->access_flags().get_flags(); ++ if ((new_flags & JVM_ACC_PRIVATE) == 0 ++ // hack: private should be treated as final, but alas ++ || (new_flags & (JVM_ACC_FINAL|JVM_ACC_STATIC)) == 0) { ++ // new methods must be private ++ result = result | Klass::ModifyClass; ++ } ++ { ++ u2 num = the_class->next_method_idnum(); ++ if (num == ConstMethod::UNSET_IDNUM) { ++ // cannot add any more methods ++ result = result | Klass::ModifyClass; ++ } ++ u2 new_num = k_new_method->method_idnum(); ++ Method* idnum_owner = new_class->method_with_idnum(num); ++ if (idnum_owner != NULL) { ++ // There is already a method assigned this idnum -- switch them ++ idnum_owner->set_method_idnum(new_num); ++ } ++ k_new_method->set_method_idnum(num); ++ swap_all_method_annotations(k_old_method->constMethod(), k_new_method->constMethod()); ++ } ++ RC_TRACE(0x00008000, ("Method added: new: %s [%d]", ++ k_new_method->name_and_sig_as_C_string(), ni)); ++ ++ni; // advance to next new method ++ break; ++ case deleted: ++ // method deleted, see if it is OK ++ old_flags = (jushort) k_old_method->access_flags().get_flags(); ++ if ((old_flags & JVM_ACC_PRIVATE) == 0 ++ // hack: private should be treated as final, but alas ++ || (old_flags & (JVM_ACC_FINAL|JVM_ACC_STATIC)) == 0 ++ ) { ++ // deleted methods must be private ++ result = result | Klass::ModifyClass; ++ } ++ RC_TRACE(0x00008000, ("Method deleted: old: %s [%d]", ++ k_old_method->name_and_sig_as_C_string(), oi)); ++ ++oi; // advance to next old method ++ break; ++ default: ++ ShouldNotReachHere(); ++ } ++ } ++ ++ if (new_class()->size() != new_class->old_version()->size()) { ++ result |= Klass::ModifyClassSize; ++ } ++ ++ if (new_class->size_helper() != ((InstanceKlass*)(new_class->old_version()))->size_helper()) { ++ result |= Klass::ModifyInstanceSize; ++ } ++ ++ // TODO Check method bodies to be able to return NoChange? ++ return result; ++} ++ ++void VM_EnhancedRedefineClasses::calculate_instance_update_information(Klass* new_version) { ++ ++ class CalculateFieldUpdates : public FieldClosure { ++ ++ private: ++ InstanceKlass* _old_ik; ++ GrowableArray _update_info; ++ int _position; ++ bool _copy_backwards; ++ ++ public: ++ ++ bool does_copy_backwards() { ++ return _copy_backwards; ++ } ++ ++ CalculateFieldUpdates(InstanceKlass* old_ik) : ++ _old_ik(old_ik), _position(instanceOopDesc::base_offset_in_bytes()), _copy_backwards(false) { ++ _update_info.append(_position); ++ _update_info.append(0); ++ } ++ ++ GrowableArray &finish() { ++ _update_info.append(0); ++ return _update_info; ++ } ++ ++ void do_field(fieldDescriptor* fd) { ++ int alignment = fd->offset() - _position; ++ if (alignment > 0) { ++ // This field was aligned, so we need to make sure that we fill the gap ++ fill(alignment); ++ } ++ ++ assert(_position == fd->offset(), "must be correct offset!"); ++ ++ fieldDescriptor old_fd; ++ if (_old_ik->find_field(fd->name(), fd->signature(), false, &old_fd) != NULL) { ++ // Found field in the old class, copy ++ copy(old_fd.offset(), type2aelembytes(fd->field_type())); ++ ++ if (old_fd.offset() < fd->offset()) { ++ _copy_backwards = true; ++ } ++ ++ // Transfer special flags ++ fd->set_is_field_modification_watched(old_fd.is_field_modification_watched()); ++ fd->set_is_field_access_watched(old_fd.is_field_access_watched()); ++ } else { ++ // New field, fill ++ fill(type2aelembytes(fd->field_type())); ++ } ++ } ++ ++ private: ++ ++ void fill(int size) { ++ if (_update_info.length() > 0 && _update_info.at(_update_info.length() - 1) < 0) { ++ (*_update_info.adr_at(_update_info.length() - 1)) -= size; ++ } else { ++ _update_info.append(-size); ++ } ++ _position += size; ++ } ++ ++ void copy(int offset, int size) { ++ int prev_end = -1; ++ if (_update_info.length() > 0 && _update_info.at(_update_info.length() - 1) > 0) { ++ prev_end = _update_info.at(_update_info.length() - 2) + _update_info.at(_update_info.length() - 1); ++ } ++ ++ if (prev_end == offset) { ++ (*_update_info.adr_at(_update_info.length() - 2)) += size; ++ } else { ++ _update_info.append(size); ++ _update_info.append(offset); ++ } ++ ++ _position += size; ++ } ++ }; ++ ++ InstanceKlass* ik = InstanceKlass::cast(new_version); ++ InstanceKlass* old_ik = InstanceKlass::cast(new_version->old_version()); ++ CalculateFieldUpdates cl(old_ik); ++ ik->do_nonstatic_fields(&cl); ++ ++ GrowableArray result = cl.finish(); ++ ik->store_update_information(result); ++ ik->set_copying_backwards(cl.does_copy_backwards()); ++ ++ ++ if (RC_TRACE_ENABLED(0x00000001)) { ++ RC_TRACE(0x00000001, ("Instance update information for %s:", new_version->name()->as_C_string())); ++ if (cl.does_copy_backwards()) { ++ RC_TRACE(0x00000001, ("\tDoes copy backwards!")); ++ } ++ for (int i=0; i 0) { ++ RC_TRACE(0x00000001, ("\t%d COPY from %d", curNum, result.at(i + 1))); ++ i++; ++ } else { ++ RC_TRACE(0x00000001, ("\tEND")); ++ } ++ } ++ } ++} ++ ++void VM_EnhancedRedefineClasses::rollback() { ++ RC_TRACE(0x00000001, ("Rolling back redefinition!")); ++ SystemDictionary::rollback_redefinition(); ++ ++ for (int i=0; i<_new_classes->length(); i++) { ++ SystemDictionary::remove_from_hierarchy(_new_classes->at(i)); ++ } ++ ++ for (int i=0; i<_new_classes->length(); i++) { ++ instanceKlassHandle new_class = _new_classes->at(i); ++ new_class->set_redefining(false); ++ new_class->old_version()->set_new_version(NULL); ++ new_class->set_old_version(NULL); ++ } ++ ++} ++ ++void VM_EnhancedRedefineClasses::swap_marks(oop first, oop second) { ++ markOop first_mark = first->mark(); ++ markOop second_mark = second->mark(); ++ first->set_mark(second_mark); ++ second->set_mark(first_mark); ++} ++ ++class FieldCopier : public FieldClosure { ++ public: ++ void do_field(fieldDescriptor* fd) { ++ InstanceKlass* cur = InstanceKlass::cast(fd->field_holder()); ++ oop cur_oop = cur->java_mirror(); ++ ++ InstanceKlass* old = InstanceKlass::cast(cur->old_version()); ++ oop old_oop = old->java_mirror(); ++ ++ fieldDescriptor result; ++ bool found = old->find_local_field(fd->name(), fd->signature(), &result); ++ if (found && result.is_static()) { ++ RC_TRACE(0x00000001, ("Copying static field value for field %s old_offset=%d new_offset=%d", ++ fd->name()->as_C_string(), result.offset(), fd->offset())); ++ memcpy(cur_oop->obj_field_addr(fd->offset()), ++ old_oop->obj_field_addr(result.offset()), ++ type2aelembytes(fd->field_type())); ++ ++ // Static fields may have references to java.lang.Class ++ if (fd->field_type() == T_OBJECT) { ++ oop oop = cur_oop->obj_field(fd->offset()); ++ if (oop != NULL && oop->is_instanceMirror()) { ++ Klass* klass = java_lang_Class::as_Klass(oop); ++ if (klass != NULL && klass->oop_is_instance()) { ++ assert(oop == InstanceKlass::cast(klass)->java_mirror(), "just checking"); ++ if (klass->new_version() != NULL) { ++ oop = InstanceKlass::cast(klass->new_version())->java_mirror(); ++ cur_oop->obj_field_put(fd->offset(), oop); ++ } ++ } ++ } ++ } ++ } ++ } ++}; ++ ++void VM_EnhancedRedefineClasses::mark_as_scavengable(nmethod* nm) { ++ if (!nm->on_scavenge_root_list()) { ++ CodeCache::add_scavenge_root_nmethod(nm); ++ } ++} ++ ++struct StoreBarrier { ++ template static void oop_store(T* p, oop v) { ::oop_store(p, v); } ++}; ++ ++struct StoreNoBarrier { ++ template static void oop_store(T* p, oop v) { oopDesc::encode_store_heap_oop_not_null(p, v); } ++}; ++ ++template ++class ChangePointersOopClosure : public ExtendedOopClosure { ++ // Forward pointers to InstanceKlass and mirror class to new versions ++ template ++ inline void do_oop_work(T* p) { ++ oop oop = oopDesc::load_decode_heap_oop(p); ++ if (oop == NULL) { ++ return; ++ } ++ if (oop->is_instanceMirror()) { ++ Klass* klass = java_lang_Class::as_Klass(oop); ++ if (klass != NULL && klass->oop_is_instance()) { ++ assert(oop == InstanceKlass::cast(klass)->java_mirror(), "just checking"); ++ if (klass->new_version() != NULL) { ++ oop = InstanceKlass::cast(klass->new_version())->java_mirror(); ++ S::oop_store(p, oop); ++ } ++ } ++ } ++ } ++ ++ virtual void do_oop(oop* o) { ++ do_oop_work(o); ++ } ++ ++ virtual void do_oop(narrowOop* o) { ++ do_oop_work(o); ++ } ++}; ++ ++void VM_EnhancedRedefineClasses::doit() { ++ ++ Thread *thread = Thread::current(); ++ ++ assert((_max_redefinition_flags & Klass::RemoveSuperType) == 0, "removing super types not allowed"); ++ ++ if (UseSharedSpaces) { ++ // Sharing is enabled so we remap the shared readonly space to ++ // shared readwrite, private just in case we need to redefine ++ // a shared class. We do the remap during the doit() phase of ++ // the safepoint to be safer. ++ if (!MetaspaceShared::remap_shared_readonly_as_readwrite()) { ++ RC_TRACE(0x00000001, ++ ("failed to remap shared readonly space to readwrite, private")); ++ _result = JVMTI_ERROR_INTERNAL; ++ return; ++ } ++ } ++ ++ RC_TIMER_START(_timer_prepare_redefinition); ++ for (int i = 0; i < _new_classes->length(); i++) { ++ redefine_single_class(_new_classes->at(i), thread); ++ } ++ ++ // Deoptimize all compiled code that depends on this class ++ flush_dependent_code(instanceKlassHandle(Thread::current(), (Klass*)NULL), Thread::current()); ++ ++ // Adjust constantpool caches and vtables for all classes ++ // that reference methods of the evolved class. ++ SystemDictionary::classes_do(adjust_cpool_cache, Thread::current()); ++ ++ RC_TIMER_STOP(_timer_prepare_redefinition); ++ RC_TIMER_START(_timer_heap_iteration); ++ ++ class ChangePointersObjectClosure : public ObjectClosure { ++ ++ private: ++ ++ OopClosure *_closure; ++ bool _needs_instance_update; ++ oop _tmp_obj; ++ int _tmp_obj_size; ++ ++ public: ++ ChangePointersObjectClosure(OopClosure *closure) : _closure(closure), _needs_instance_update(false), _tmp_obj(NULL), _tmp_obj_size(0) {} ++ ++ bool needs_instance_update() { ++ return _needs_instance_update; ++ } ++ ++ void copy_to_tmp(oop o) { ++ int size = o->size(); ++ if (_tmp_obj_size < size) { ++ _tmp_obj_size = size; ++ _tmp_obj = (oop)resource_allocate_bytes(size * HeapWordSize); ++ } ++ Copy::aligned_disjoint_words((HeapWord*)o, (HeapWord*)_tmp_obj, size); ++ } ++ ++ virtual void do_object(oop obj) { ++ // FIXME: if (obj->is_instanceKlass()) return; ++ if (obj->is_instanceMirror()) { ++ // static fields may have references to old java.lang.Class instances, update them ++ // at the same time, we don't want to update other oops in the java.lang.Class ++ // Causes SIGSEGV? ++ //instanceMirrorKlass::oop_fields_iterate(obj, _closure); ++ } else { ++ obj->oop_iterate_no_header(_closure); ++ } ++ ++ if (obj->klass()->new_version() != NULL) { ++ Klass* new_klass = obj->klass()->new_version(); ++ /* FIXME: if (obj->is_perm()) { ++ _needs_instance_update = true; ++ } else */if(new_klass->update_information() != NULL) { ++ int size_diff = obj->size() - obj->size_given_klass(new_klass); ++ ++ // Either new size is bigger or gap is to small to be filled ++ if (size_diff < 0 || (size_diff > 0 && (size_t) size_diff < CollectedHeap::min_fill_size())) { ++ // We need an instance update => set back to old klass ++ _needs_instance_update = true; ++ } else { ++ oop src = obj; ++ if (new_klass->is_copying_backwards()) { ++ copy_to_tmp(obj); ++ src = _tmp_obj; ++ } ++ src->set_klass(obj->klass()->new_version()); ++ MarkSweep::update_fields(obj, src, new_klass->update_information()); ++ ++ if (size_diff > 0) { ++ HeapWord* dead_space = ((HeapWord *)obj) + obj->size(); ++ CollectedHeap::fill_with_object(dead_space, size_diff); ++ } ++ } ++ } else { ++ obj->set_klass(obj->klass()->new_version()); ++ } ++ } ++ } ++ }; ++ ++ ChangePointersOopClosure oopClosureNoBarrier; ++ ChangePointersOopClosure oopClosure; ++ ChangePointersObjectClosure objectClosure(&oopClosure); ++ ++ RC_TRACE(0x00000001, ("Before updating instances")); ++ { ++ // Since we may update oops inside nmethod's code blob to point to java.lang.Class in new generation, we need to ++ // make sure such references are properly recognized by GC. For that, If ScavengeRootsInCode is true, we need to ++ // mark such nmethod's as "scavengable". ++ // For now, mark all nmethod's as scavengable that are not scavengable already ++ if (ScavengeRootsInCode) { ++ CodeCache::nmethods_do(mark_as_scavengable); ++ } ++ ++ SharedHeap::heap()->gc_prologue(true); ++ Universe::heap()->object_iterate(&objectClosure); ++ Universe::root_oops_do(&oopClosureNoBarrier); ++ SharedHeap::heap()->gc_epilogue(false); ++ } ++ RC_TRACE(0x00000001, ("After updating instances")); ++ ++ for (int i = 0; i < _new_classes->length(); i++) { ++ InstanceKlass* cur = InstanceKlass::cast(_new_classes->at(i)()); ++ InstanceKlass* old = InstanceKlass::cast(cur->old_version()); ++ ++ // Swap marks to have same hashcodes ++ markOop cur_mark = cur->prototype_header(); ++ markOop old_mark = old->prototype_header(); ++ cur->set_prototype_header(old_mark); ++ old->set_prototype_header(cur_mark); ++ ++ //swap_marks(cur, old); ++ swap_marks(cur->java_mirror(), old->java_mirror()); ++ ++ // Revert pool holder for old version of klass (it was updated by one of ours closure!) ++ old->constants()->set_pool_holder(old); ++ ++ Klass* array_klasses = old->array_klasses(); ++ if (array_klasses != NULL) { ++ assert(cur->array_klasses() == NULL, "just checking"); ++ ++ // Transfer the array classes, otherwise we might get cast exceptions when casting array types. ++ // Also, set array klasses element klass. ++ cur->set_array_klasses(array_klasses); ++ ObjArrayKlass::cast(array_klasses)->set_element_klass(cur); ++ } ++ ++ // Initialize the new class! Special static initialization that does not execute the ++ // static constructor but copies static field values from the old class if name ++ // and signature of a static field match. ++ FieldCopier copier; ++ cur->do_local_static_fields(&copier); // TODO (tw): What about internal static fields?? ++ //java_lang_Class::set_klass(old->java_mirror(), cur); // FIXME-isd: is that correct? ++ //FIXME-isd: do we need this: ??? old->set_java_mirror(cur->java_mirror()); ++ ++ // Transfer init state ++ InstanceKlass::ClassState state = old->init_state(); ++ if (state > InstanceKlass::linked) { ++ cur->set_init_state(state); ++ } ++ } ++ ++ RC_TIMER_STOP(_timer_heap_iteration); ++ RC_TIMER_START(_timer_redefinition); ++ if (objectClosure.needs_instance_update()) { ++ // Do a full garbage collection to update the instance sizes accordingly ++ RC_TRACE(0x00000001, ("Before performing full GC!")); ++ Universe::set_redefining_gc_run(true); ++ notify_gc_begin(true); ++ Universe::heap()->collect_as_vm_thread(GCCause::_heap_inspection); ++ notify_gc_end(); ++ Universe::set_redefining_gc_run(false); ++ RC_TRACE(0x00000001, ("GC done!")); ++ } ++ ++ // Unmark Klass*s as "redefining" ++ for (int i=0; i<_new_classes->length(); i++) { ++ Klass* cur_klass = _new_classes->at(i)(); ++ InstanceKlass* cur = (InstanceKlass*)cur_klass; ++ cur->set_redefining(false); ++ cur->clear_update_information(); ++ } ++ ++ // Disable any dependent concurrent compilations ++ SystemDictionary::notice_modification(); ++ ++ // Set flag indicating that some invariants are no longer true. ++ // See jvmtiExport.hpp for detailed explanation. ++ JvmtiExport::set_has_redefined_a_class(); ++ ++ // Clean up caches in the compiler interface and compiler threads ++ ciObjectFactory::resort_shared_ci_metadata(); ++ ++#ifdef ASSERT ++ ++ // Universe::verify(); ++ // JNIHandles::verify(); ++ ++ SystemDictionary::classes_do(check_class, thread); ++#endif ++ ++ RC_TIMER_STOP(_timer_redefinition); ++ ++ if (TraceRedefineClasses > 0) { ++ tty->flush(); ++ } ++} ++ ++void VM_EnhancedRedefineClasses::doit_epilogue() { ++ ++ RC_TIMER_START(_timer_vm_op_epilogue); ++ ++ ResourceMark mark; ++ ++ VM_GC_Operation::doit_epilogue(); ++ RC_TRACE(0x00000001, ("GC Operation epilogue finished!")); ++ ++ // Free the array of scratch classes ++ delete _new_classes; ++ _new_classes = NULL; ++ ++ // Free the array of affected classes ++ delete _affected_klasses; ++ _affected_klasses = NULL; ++ ++ RC_TRACE(0x00000001, ("Redefinition finished!")); ++ ++ RC_TIMER_STOP(_timer_vm_op_epilogue); ++} ++ ++bool VM_EnhancedRedefineClasses::is_modifiable_class(oop klass_mirror) { ++ // classes for primitives cannot be redefined ++ if (java_lang_Class::is_primitive(klass_mirror)) { ++ return false; ++ } ++ Klass* klass = java_lang_Class::as_Klass(klass_mirror); ++ // classes for arrays cannot be redefined ++ if (klass == NULL || !klass->oop_is_instance()) { ++ return false; ++ } ++ return true; ++} ++ ++#ifdef ASSERT ++ ++void VM_EnhancedRedefineClasses::verify_classes(Klass* k_oop_latest, oop initiating_loader, TRAPS) { ++ Klass* k_oop = k_oop_latest; ++ while (k_oop != NULL) { ++ ++ instanceKlassHandle k_handle(THREAD, k_oop); ++ Verifier::verify(k_handle, Verifier::ThrowException, true, THREAD); ++ k_oop = k_oop->old_version(); ++ } ++} ++ ++#endif ++ ++// Rewrite faster byte-codes back to their slower equivalent. Undoes rewriting happening in templateTable_xxx.cpp ++// The reason is that once we zero cpool caches, we need to re-resolve all entries again. Faster bytecodes do not ++// do that, they assume that cache entry is resolved already. ++void VM_EnhancedRedefineClasses::unpatch_bytecode(Method* method) { ++ RawBytecodeStream bcs(method); ++ Bytecodes::Code code; ++ Bytecodes::Code java_code; ++ while (!bcs.is_last_bytecode()) { ++ code = bcs.raw_next(); ++ address bcp = bcs.bcp(); ++ ++ if (code == Bytecodes::_breakpoint) { ++ int bci = method->bci_from(bcp); ++ code = method->orig_bytecode_at(bci); ++ java_code = Bytecodes::java_code(code); ++ if (code != java_code && ++ (java_code == Bytecodes::_getfield || ++ java_code == Bytecodes::_putfield || ++ java_code == Bytecodes::_aload_0)) { ++ // Let breakpoint table handling unpatch bytecode ++ method->set_orig_bytecode_at(bci, java_code); ++ } ++ } else { ++ java_code = Bytecodes::java_code(code); ++ if (code != java_code && ++ (java_code == Bytecodes::_getfield || ++ java_code == Bytecodes::_putfield || ++ java_code == Bytecodes::_aload_0)) { ++ *bcp = java_code; ++ } ++ } ++ ++ // Additionally, we need to unpatch bytecode at bcp+1 for fast_xaccess (which would be fast field access) ++ if (code == Bytecodes::_fast_iaccess_0 || code == Bytecodes::_fast_aaccess_0 || code == Bytecodes::_fast_faccess_0) { ++ Bytecodes::Code code2 = Bytecodes::code_or_bp_at(bcp + 1); ++ assert(code2 == Bytecodes::_fast_igetfield || ++ code2 == Bytecodes::_fast_agetfield || ++ code2 == Bytecodes::_fast_fgetfield, ""); ++ *(bcp + 1) = Bytecodes::java_code(code2); ++ } ++ } ++ } ++ ++// Unevolving classes may point to old methods directly ++// from their constant pool caches, itables, and/or vtables. We ++// use the SystemDictionary::classes_do() facility and this helper ++// to fix up these pointers. Additional field offsets and vtable indices ++// in the constant pool cache entries are fixed. ++// ++// Note: We currently don't support updating the vtable in ++// arrayKlassOops. See Open Issues in jvmtiRedefineClasses.hpp. ++void VM_EnhancedRedefineClasses::adjust_cpool_cache(Klass* klass_latest, TRAPS) { ++ Klass* k = klass_latest; ++ while (k != NULL) { ++ if (k->oop_is_instance()) { ++ HandleMark hm(THREAD); ++ InstanceKlass *ik = InstanceKlass::cast(k); ++ ++ constantPoolHandle other_cp; ++ ConstantPoolCache* cp_cache; ++ other_cp = constantPoolHandle(ik->constants()); ++ ++ for (int i = 0; i < other_cp->length(); i++) { ++ if (other_cp->tag_at(i).is_klass()) { ++ Klass* klass = other_cp->klass_at(i, THREAD); ++ if (klass->new_version() != NULL) { ++ // (DCEVM) TODO: check why/if this is necessary ++ other_cp->klass_at_put(i, klass->new_version()); ++ } ++ klass = other_cp->klass_at(i, THREAD); ++ assert(klass->new_version() == NULL, "Must be new klass!"); ++ } ++ } ++ ++ cp_cache = other_cp->cache(); ++ ++ if (cp_cache != NULL) { ++ cp_cache->clear_entries(); ++ } ++ ++ // If bytecode rewriting is enabled, we also need to unpatch bytecode to force resolution of zeroed entries ++ if (RewriteBytecodes) { ++ ik->methods_do(unpatch_bytecode); ++ } ++ } ++ k = k->old_version(); ++ } ++} ++ ++void VM_EnhancedRedefineClasses::update_jmethod_ids() { ++ for (int j = 0; j < _matching_methods_length; ++j) { ++ Method* old_method = _old_methods->at(_matching_old_methods[j]); ++ jmethodID jmid = old_method->find_jmethod_id_or_null(); ++ RC_TRACE(0x00008000, ("matching method %s, jmid %d", old_method->name_and_sig_as_C_string(), jmid)); ++ if (old_method->new_version() != NULL && jmid == NULL) { ++ // (DCEVM) Have to create jmethodID in this case ++ jmid = old_method->jmethod_id(); ++ } ++ ++ if (jmid != NULL) { ++ // There is a jmethodID, change it to point to the new method ++ methodHandle new_method_h(_new_methods->at(_matching_new_methods[j])); ++ if (old_method->new_version() == NULL) { ++ methodHandle old_method_h(_old_methods->at(_matching_old_methods[j])); ++ jmethodID new_jmethod_id = Method::make_jmethod_id(old_method_h->method_holder()->class_loader_data(), old_method_h()); ++ bool result = InstanceKlass::cast(old_method_h->method_holder())->update_jmethod_id(old_method_h(), new_jmethod_id); ++ } else { ++ jmethodID mid = new_method_h->jmethod_id(); ++ bool result = InstanceKlass::cast(new_method_h->method_holder())->update_jmethod_id(new_method_h(), jmid); ++ } ++ Method::change_method_associated_with_jmethod_id(jmid, new_method_h()); ++ assert(Method::resolve_jmethod_id(jmid) == _new_methods->at(_matching_new_methods[j]), "should be replaced"); ++ jmethodID mid = (_new_methods->at(_matching_new_methods[j]))->jmethod_id(); ++ //assert(JNIHandles::resolve_non_null((jobject)mid) == new_method_h(), "must match!"); ++ } ++ } ++} ++ ++ ++// Deoptimize all compiled code that depends on this class. ++// ++// If the can_redefine_classes capability is obtained in the onload ++// phase then the compiler has recorded all dependencies from startup. ++// In that case we need only deoptimize and throw away all compiled code ++// that depends on the class. ++// ++// If can_redefine_classes is obtained sometime after the onload ++// phase then the dependency information may be incomplete. In that case ++// the first call to RedefineClasses causes all compiled code to be ++// thrown away. As can_redefine_classes has been obtained then ++// all future compilations will record dependencies so second and ++// subsequent calls to RedefineClasses need only throw away code ++// that depends on the class. ++// ++void VM_EnhancedRedefineClasses::flush_dependent_code(instanceKlassHandle k_h, TRAPS) { ++ assert_locked_or_safepoint(Compile_lock); ++ ++ // All dependencies have been recorded from startup or this is a second or ++ // subsequent use of RedefineClasses ++ ++ // For now deopt all ++ // (tw) TODO: Improve the dependency system such that we can safely deopt only a subset of the methods ++ if (0 && JvmtiExport::all_dependencies_are_recorded()) { ++ Universe::flush_evol_dependents_on(k_h); ++ } else { ++ CodeCache::mark_all_nmethods_for_deoptimization(); ++ ++ ResourceMark rm(THREAD); ++ DeoptimizationMarker dm; ++ ++ // Deoptimize all activations depending on marked nmethods ++ Deoptimization::deoptimize_dependents(); ++ ++ // Make the dependent methods not entrant (in VM_Deoptimize they are made zombies) ++ CodeCache::make_marked_nmethods_not_entrant(); ++ ++ // From now on we know that the dependency information is complete ++ JvmtiExport::set_all_dependencies_are_recorded(true); ++ } ++ } ++ ++void VM_EnhancedRedefineClasses::compute_added_deleted_matching_methods() { ++ Method* old_method; ++ Method* new_method; ++ ++ _matching_old_methods = NEW_RESOURCE_ARRAY(int, _old_methods->length()); ++ _matching_new_methods = NEW_RESOURCE_ARRAY(int, _old_methods->length()); ++ _added_methods = NEW_RESOURCE_ARRAY(int, _new_methods->length()); ++ _deleted_methods = NEW_RESOURCE_ARRAY(int, _old_methods->length()); ++ ++ _matching_methods_length = 0; ++ _deleted_methods_length = 0; ++ _added_methods_length = 0; ++ ++ int nj = 0; ++ int oj = 0; ++ while (true) { ++ if (oj >= _old_methods->length()) { ++ if (nj >= _new_methods->length()) { ++ break; // we've looked at everything, done ++ } ++ // New method at the end ++ new_method = _new_methods->at(nj); ++ _added_methods[_added_methods_length++] = nj; ++ ++nj; ++ } else if (nj >= _new_methods->length()) { ++ // Old method, at the end, is deleted ++ old_method = _old_methods->at(oj); ++ _deleted_methods[_deleted_methods_length++] = oj; ++ ++oj; ++ } else { ++ old_method = _old_methods->at(oj); ++ new_method = _new_methods->at(nj); ++ if (old_method->name() == new_method->name()) { ++ if (old_method->signature() == new_method->signature()) { ++ _matching_old_methods[_matching_methods_length] = oj;//old_method; ++ _matching_new_methods[_matching_methods_length] = nj;//new_method; ++ _matching_methods_length++; ++ ++nj; ++ ++oj; ++ } else { ++ // added overloaded have already been moved to the end, ++ // so this is a deleted overloaded method ++ _deleted_methods[_deleted_methods_length++] = oj;//old_method; ++ ++oj; ++ } ++ } else { // names don't match ++ if (old_method->name()->fast_compare(new_method->name()) > 0) { ++ // new method ++ _added_methods[_added_methods_length++] = nj;//new_method; ++ ++nj; ++ } else { ++ // deleted method ++ _deleted_methods[_deleted_methods_length++] = oj;//old_method; ++ ++oj; ++ } ++ } ++ } ++ } ++ assert(_matching_methods_length + _deleted_methods_length == _old_methods->length(), "sanity"); ++ assert(_matching_methods_length + _added_methods_length == _new_methods->length(), "sanity"); ++ RC_TRACE(0x00008000, ("Matching methods = %d / deleted methods = %d / added methods = %d", ++ _matching_methods_length, _deleted_methods_length, _added_methods_length)); ++} ++ ++ ++ ++// Install the redefinition of a class: ++// - house keeping (flushing breakpoints and caches, deoptimizing ++// dependent compiled code) ++// - adjusting constant pool caches and vtables in other classes ++void VM_EnhancedRedefineClasses::redefine_single_class(instanceKlassHandle the_new_class, TRAPS) { ++ ++ ResourceMark rm(THREAD); ++ ++ assert(the_new_class->old_version() != NULL, "Must not be null"); ++ assert(the_new_class->old_version()->new_version() == the_new_class(), "Must equal"); ++ ++ instanceKlassHandle the_old_class = instanceKlassHandle(THREAD, the_new_class->old_version()); ++ ++#ifndef JVMTI_KERNEL ++ // Remove all breakpoints in methods of this class ++ JvmtiBreakpoints& jvmti_breakpoints = JvmtiCurrentBreakpoints::get_jvmti_breakpoints(); ++ jvmti_breakpoints.clearall_in_class_at_safepoint(the_old_class()); ++#endif // !JVMTI_KERNEL ++ ++ /* FIXME ++ if (the_old_class() == Universe::reflect_invoke_cache()->klass()) { ++ // We are redefining java.lang.reflect.Method. Method.invoke() is ++ // cached and users of the cache care about each active version of ++ // the method so we have to track this previous version. ++ // Do this before methods get switched ++ Universe::reflect_invoke_cache()->add_previous_version( ++ the_old_class->method_with_idnum(Universe::reflect_invoke_cache()->method_idnum())); ++ }*/ ++ ++ _old_methods = the_old_class->methods(); ++ _new_methods = the_new_class->methods(); ++ compute_added_deleted_matching_methods(); ++ ++ // track which methods are EMCP for add_previous_version() call below ++ ++ // TODO: Check if we need the concept of EMCP? ++ BitMap emcp_methods(_old_methods->length()); ++ int emcp_method_count = 0; ++ emcp_methods.clear(); // clears 0..(length() - 1) ++ ++ // We need to mark methods as old!! ++ check_methods_and_mark_as_obsolete(&emcp_methods, &emcp_method_count); ++ update_jmethod_ids(); ++ ++ // TODO: ++ transfer_old_native_function_registrations(the_old_class); ++ ++ ++ // JSR-292 support ++ ++ // Transfer method handles ++ MemberNameTable* mnt = the_old_class->member_names(); ++ the_new_class->set_member_names(mnt); ++ the_old_class->set_member_names(NULL); ++ if (mnt != NULL) { ++ for (int i = 0; i < mnt->length(); i++) { ++ oop mem_name = mnt->get_member_name(i); ++ if (mem_name != NULL) { ++ Method* method = (Method*) java_lang_invoke_MemberName::vmtarget(mem_name); ++ ++ // Replace the method with matching one from the new class ++ Method* new_method = the_new_class->find_method(method->name(), method->signature()); ++ java_lang_invoke_MemberName::set_vmtarget(mem_name, new_method); ++ } ++ } ++ } ++ ++ ++#ifdef ASSERT ++ ++// Klass* systemLookup1 = SystemDictionary::resolve_or_null(the_old_class->name(), the_old_class->class_loader(), the_old_class->protection_domain(), THREAD); ++// assert(systemLookup1 == the_new_class(), "New class must be in system dictionary!"); ++ ++ //JNIHandles::verify(); ++ ++// Klass* systemLookup = SystemDictionary::resolve_or_null(the_old_class->name(), the_old_class->class_loader(), the_old_class->protection_domain(), THREAD); ++ ++// assert(systemLookup == the_new_class(), "New class must be in system dictionary!"); ++ assert(the_new_class->old_version() != NULL, "Must not be null"); ++ assert(the_new_class->old_version()->new_version() == the_new_class(), "Must equal"); ++ ++ for (int i=0; imethods()->length(); i++) { ++ assert((the_new_class->methods()->at(i))->method_holder() == the_new_class(), "method holder must match!"); ++ } ++ ++ // FIXME: ++ //_old_methods->verify(); ++ //_new_methods->verify(); ++ ++ the_new_class->vtable()->verify(tty); ++ the_old_class->vtable()->verify(tty); ++ ++#endif ++ ++ // increment the classRedefinedCount field in the_class and in any ++ // direct and indirect subclasses of the_class ++ increment_class_counter((InstanceKlass *)the_old_class(), THREAD); ++ ++} ++ ++ ++void VM_EnhancedRedefineClasses::check_methods_and_mark_as_obsolete(BitMap *emcp_methods, int * emcp_method_count_p) { ++ RC_TRACE(0x00000100, ("Checking matching methods for EMCP")); ++ *emcp_method_count_p = 0; ++ int obsolete_count = 0; ++ int old_index = 0; ++ for (int j = 0; j < _matching_methods_length; ++j, ++old_index) { ++ Method* old_method = _old_methods->at(_matching_old_methods[j]); ++ Method* new_method = _new_methods->at(_matching_new_methods[j]); ++ Method* old_array_method; ++ ++ // Maintain an old_index into the _old_methods array by skipping ++ // deleted methods ++ while ((old_array_method = _old_methods->at(old_index)) != old_method) { ++ ++old_index; ++ } ++ ++ if (MethodComparator::methods_EMCP(old_method, new_method)) { ++ // The EMCP definition from JSR-163 requires the bytecodes to be ++ // the same with the exception of constant pool indices which may ++ // differ. However, the constants referred to by those indices ++ // must be the same. ++ // ++ // We use methods_EMCP() for comparison since constant pool ++ // merging can remove duplicate constant pool entries that were ++ // present in the old method and removed from the rewritten new ++ // method. A faster binary comparison function would consider the ++ // old and new methods to be different when they are actually ++ // EMCP. ++ ++ // track which methods are EMCP for add_previous_version() call ++ emcp_methods->set_bit(old_index); ++ (*emcp_method_count_p)++; ++ ++ // An EMCP method is _not_ obsolete. An obsolete method has a ++ // different jmethodID than the current method. An EMCP method ++ // has the same jmethodID as the current method. Having the ++ // same jmethodID for all EMCP versions of a method allows for ++ // a consistent view of the EMCP methods regardless of which ++ // EMCP method you happen to have in hand. For example, a ++ // breakpoint set in one EMCP method will work for all EMCP ++ // versions of the method including the current one. ++ ++ old_method->set_new_version(new_method); ++ new_method->set_old_version(old_method); ++ ++ RC_TRACE(0x00000100, ("Found EMCP method %s", old_method->name_and_sig_as_C_string())); ++ ++ // Transfer breakpoints ++ InstanceKlass *ik = InstanceKlass::cast(old_method->method_holder()); ++ for (BreakpointInfo* bp = ik->breakpoints(); bp != NULL; bp = bp->next()) { ++ RC_TRACE(0x00000100, ("Checking breakpoint: %d / %d", ++ bp->match(old_method), bp->match(new_method))); ++ if (bp->match(old_method)) { ++ assert(bp->match(new_method), "if old method is method, then new method must match too"); ++ RC_TRACE(0x00000100, ("Found a breakpoint in an old EMCP method")); ++ new_method->set_breakpoint(bp->bci()); ++ } ++ } ++ } else { ++ // mark obsolete methods as such ++ old_method->set_is_obsolete(); ++ obsolete_count++; ++ ++ // With tracing we try not to "yack" too much. The position of ++ // this trace assumes there are fewer obsolete methods than ++ // EMCP methods. ++ RC_TRACE(0x00000100, ("mark %s(%s) as obsolete", ++ old_method->name()->as_C_string(), ++ old_method->signature()->as_C_string())); ++ } ++ old_method->set_is_old(); ++ } ++ for (int i = 0; i < _deleted_methods_length; ++i) { ++ Method* old_method = _old_methods->at(_deleted_methods[i]); ++ ++ //assert(old_method->vtable_index() < 0, ++ // "cannot delete methods with vtable entries");; ++ ++ // Mark all deleted methods as old and obsolete ++ old_method->set_is_old(); ++ old_method->set_is_obsolete(); ++ ++obsolete_count; ++ // With tracing we try not to "yack" too much. The position of ++ // this trace assumes there are fewer obsolete methods than ++ // EMCP methods. ++ RC_TRACE(0x00000100, ("mark deleted %s(%s) as obsolete", ++ old_method->name()->as_C_string(), ++ old_method->signature()->as_C_string())); ++ } ++ //assert((*emcp_method_count_p + obsolete_count) == _old_methods->length(), "sanity check"); ++ RC_TRACE(0x00000100, ("EMCP_cnt=%d, obsolete_cnt=%d !", ++ *emcp_method_count_p, obsolete_count)); ++} ++ ++// Increment the classRedefinedCount field in the specific InstanceKlass ++// and in all direct and indirect subclasses. ++void VM_EnhancedRedefineClasses::increment_class_counter(Klass* klass, TRAPS) { ++ oop class_mirror = klass->java_mirror(); ++ int new_count = java_lang_Class::classRedefinedCount(class_mirror) + 1; ++ java_lang_Class::set_classRedefinedCount(class_mirror, new_count); ++ RC_TRACE(0x00000008, ("updated count for class=%s to %d", klass->external_name(), new_count)); ++} ++ ++#ifndef PRODUCT ++void VM_EnhancedRedefineClasses::check_class(Klass* k_oop, TRAPS) { ++ Klass *k = k_oop; ++ if (k->oop_is_instance()) { ++ HandleMark hm(THREAD); ++ InstanceKlass *ik = (InstanceKlass *) k; ++ assert(ik->is_newest_version(), "must be latest version in system dictionary"); ++ ++ if (ik->vtable_length() > 0) { ++ ResourceMark rm(THREAD); ++ assert(ik->vtable()->check_no_old_or_obsolete_entries(), "old method found"); ++ ik->vtable()->verify(tty, true); ++ } ++ } ++} ++ ++#endif ++ ++static bool match_second(void* value, Pair elem) { ++ return elem.second == value; ++} ++ ++jvmtiError VM_EnhancedRedefineClasses::do_topological_class_sorting( const jvmtiClassDefinition *class_defs, int class_count, TRAPS) { ++ ResourceMark mark(THREAD); ++ GrowableArray > links; ++ ++ for (int i=0; iclass_loader()); ++ Handle protection_domain(THREAD, the_class->protection_domain()); ++ ++ ClassFileStream st((u1*) class_defs[i].class_bytes, ++ class_defs[i].class_byte_count, (char *)"__VM_EnhancedRedefineClasses__"); ++ ClassFileParser cfp(&st); ++ ++ ++ ++ TempNewSymbol parsed_name; ++ GrowableArray* super_symbols = new (ResourceObj::C_HEAP, mtInternal) GrowableArray(0, true); ++ cfp.parseClassFile(the_class->name(), ++ the_class->class_loader_data(), ++ protection_domain, ++ the_class, KlassHandle(), ++ NULL, ++ super_symbols, ++ parsed_name, ++ false, ++ THREAD); ++ ++ for (int j = 0; j < super_symbols->length(); j++) { ++ Symbol* sym = super_symbols->at(j); ++ Klass* super_klass = SystemDictionary::resolve_or_null(sym, the_class_loader, protection_domain, THREAD); ++ if (super_klass != NULL) { ++ instanceKlassHandle the_super_class(THREAD, super_klass); ++ if (_affected_klasses->contains(the_super_class)) { ++ links.append(Pair(super_klass, the_class())); ++ } ++ } ++ } ++ delete super_symbols; ++ ++ assert(the_class->check_redefinition_flag(Klass::MarkedAsAffected), ""); ++ the_class->clear_redefinition_flag(Klass::MarkedAsAffected); ++ } ++ ++ for (int i=0; i < _affected_klasses->length(); i++) { ++ instanceKlassHandle klass = _affected_klasses->at(i); ++ ++ if (klass->check_redefinition_flag(Klass::MarkedAsAffected)) { ++ klass->clear_redefinition_flag(Klass::MarkedAsAffected); ++ Klass* superKlass = klass->super(); ++ if (_affected_klasses->contains(superKlass)) { ++ links.append(Pair(superKlass, klass())); ++ } ++ ++ Array* superInterfaces = klass->local_interfaces(); ++ for (int j=0; jlength(); j++) { ++ Klass* interfaceKlass = superInterfaces->at(j); ++ if (_affected_klasses->contains(interfaceKlass)) { ++ links.append(Pair(interfaceKlass, klass())); ++ } ++ } ++ } ++ } ++ ++ for (int i = 0; i < _affected_klasses->length(); i++) { ++ int j; ++ for (j = i; j < _affected_klasses->length(); j++) { ++ // Search for node with no incoming edges ++ Klass* oop = _affected_klasses->at(j)(); ++ int k = links.find(oop, match_second); ++ if (k == -1) break; ++ } ++ if (j == _affected_klasses->length()) { ++ return JVMTI_ERROR_CIRCULAR_CLASS_DEFINITION; ++ } ++ ++ // Remove all links from this node ++ Klass* oop = _affected_klasses->at(j)(); ++ int k = 0; ++ while (k < links.length()) { ++ if (links.adr_at(k)->first == oop) { ++ links.delete_at(k); ++ } else { ++ k++; ++ } ++ } ++ ++ // Swap node ++ instanceKlassHandle tmp = _affected_klasses->at(j); ++ _affected_klasses->at_put(j, _affected_klasses->at(i)); ++ _affected_klasses->at_put(i, tmp); ++ } ++ ++ return JVMTI_ERROR_NONE; ++} ++ ++// This internal class transfers the native function registration from old methods ++// to new methods. It is designed to handle both the simple case of unchanged ++// native methods and the complex cases of native method prefixes being added and/or ++// removed. ++// It expects only to be used during the VM_EnhancedRedefineClasses op (a safepoint). ++// ++// This class is used after the new methods have been installed in "the_class". ++// ++// So, for example, the following must be handled. Where 'm' is a method and ++// a number followed by an underscore is a prefix. ++// ++// Old Name New Name ++// Simple transfer to new method m -> m ++// Add prefix m -> 1_m ++// Remove prefix 1_m -> m ++// Simultaneous add of prefixes m -> 3_2_1_m ++// Simultaneous removal of prefixes 3_2_1_m -> m ++// Simultaneous add and remove 1_m -> 2_m ++// Same, caused by prefix removal only 3_2_1_m -> 3_2_m ++// ++class TransferNativeFunctionRegistration { ++ private: ++ instanceKlassHandle the_class; ++ int prefix_count; ++ char** prefixes; ++ ++ // Recursively search the binary tree of possibly prefixed method names. ++ // Iteration could be used if all agents were well behaved. Full tree walk is ++ // more resilent to agents not cleaning up intermediate methods. ++ // Branch at each depth in the binary tree is: ++ // (1) without the prefix. ++ // (2) with the prefix. ++ // where 'prefix' is the prefix at that 'depth' (first prefix, second prefix,...) ++ Method* search_prefix_name_space(int depth, char* name_str, size_t name_len, ++ Symbol* signature) { ++ Symbol* name_symbol = SymbolTable::probe(name_str, (int)name_len); ++ if (name_symbol != NULL) { ++ Method* method = the_class()->new_version()->lookup_method(name_symbol, signature); ++ if (method != NULL) { ++ // Even if prefixed, intermediate methods must exist. ++ if (method->is_native()) { ++ // Wahoo, we found a (possibly prefixed) version of the method, return it. ++ return method; ++ } ++ if (depth < prefix_count) { ++ // Try applying further prefixes (other than this one). ++ method = search_prefix_name_space(depth+1, name_str, name_len, signature); ++ if (method != NULL) { ++ return method; // found ++ } ++ ++ // Try adding this prefix to the method name and see if it matches ++ // another method name. ++ char* prefix = prefixes[depth]; ++ size_t prefix_len = strlen(prefix); ++ size_t trial_len = name_len + prefix_len; ++ char* trial_name_str = NEW_RESOURCE_ARRAY(char, trial_len + 1); ++ strcpy(trial_name_str, prefix); ++ strcat(trial_name_str, name_str); ++ method = search_prefix_name_space(depth+1, trial_name_str, trial_len, ++ signature); ++ if (method != NULL) { ++ // If found along this branch, it was prefixed, mark as such ++ method->set_is_prefixed_native(); ++ return method; // found ++ } ++ } ++ } ++ } ++ return NULL; // This whole branch bore nothing ++ } ++ ++ // Return the method name with old prefixes stripped away. ++ char* method_name_without_prefixes(Method* method) { ++ Symbol* name = method->name(); ++ char* name_str = name->as_utf8(); ++ ++ // Old prefixing may be defunct, strip prefixes, if any. ++ for (int i = prefix_count-1; i >= 0; i--) { ++ char* prefix = prefixes[i]; ++ size_t prefix_len = strlen(prefix); ++ if (strncmp(prefix, name_str, prefix_len) == 0) { ++ name_str += prefix_len; ++ } ++ } ++ return name_str; ++ } ++ ++ // Strip any prefixes off the old native method, then try to find a ++ // (possibly prefixed) new native that matches it. ++ Method* strip_and_search_for_new_native(Method* method) { ++ ResourceMark rm; ++ char* name_str = method_name_without_prefixes(method); ++ return search_prefix_name_space(0, name_str, strlen(name_str), ++ method->signature()); ++ } ++ ++ public: ++ ++ // Construct a native method transfer processor for this class. ++ TransferNativeFunctionRegistration(instanceKlassHandle _the_class) { ++ assert(SafepointSynchronize::is_at_safepoint(), "sanity check"); ++ ++ the_class = _the_class; ++ prefixes = JvmtiExport::get_all_native_method_prefixes(&prefix_count); ++ } ++ ++ // Attempt to transfer any of the old or deleted methods that are native ++ void transfer_registrations(instanceKlassHandle old_klass, int* old_methods, int methods_length) { ++ for (int j = 0; j < methods_length; j++) { ++ Method* old_method = old_klass->methods()->at(old_methods[j]); ++ ++ if (old_method->is_native() && old_method->has_native_function()) { ++ Method* new_method = strip_and_search_for_new_native(old_method); ++ if (new_method != NULL) { ++ // Actually set the native function in the new method. ++ // Redefine does not send events (except CFLH), certainly not this ++ // behind the scenes re-registration. ++ new_method->set_native_function(old_method->native_function(), ++ !Method::native_bind_event_is_interesting); ++ } ++ } ++ } ++ } ++}; ++ ++// Don't lose the association between a native method and its JNI function. ++void VM_EnhancedRedefineClasses::transfer_old_native_function_registrations(instanceKlassHandle old_klass) { ++ TransferNativeFunctionRegistration transfer(old_klass); ++ transfer.transfer_registrations(old_klass, _deleted_methods, _deleted_methods_length); ++ transfer.transfer_registrations(old_klass, _matching_old_methods, _matching_methods_length); ++} +diff --git a/src/share/vm/prims/jvmtiRedefineClasses2.hpp b/src/share/vm/prims/jvmtiRedefineClasses2.hpp +new file mode 100644 +--- /dev/null ++++ b/src/share/vm/prims/jvmtiRedefineClasses2.hpp +@@ -0,0 +1,156 @@ ++/* ++ * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved. ++ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. ++ * ++ * This code is free software; you can redistribute it and/or modify it ++ * under the terms of the GNU General Public License version 2 only, as ++ * published by the Free Software Foundation. ++ * ++ * This code is distributed in the hope that it will be useful, but WITHOUT ++ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or ++ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License ++ * version 2 for more details (a copy is included in the LICENSE file that ++ * accompanied this code). ++ * ++ * You should have received a copy of the GNU General Public License version ++ * 2 along with this work; if not, write to the Free Software Foundation, ++ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. ++ * ++ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA ++ * or visit www.oracle.com if you need additional information or have any ++ * questions. ++ * ++ */ ++ ++#ifndef SHARE_VM_PRIMS_JVMTIENHANCEDREDEFINECLASSES_HPP ++#define SHARE_VM_PRIMS_JVMTIENHANCEDREDEFINECLASSES_HPP ++ ++#include "jvmtifiles/jvmtiEnv.hpp" ++#include "memory/oopFactory.hpp" ++#include "memory/resourceArea.hpp" ++#include "oops/objArrayKlass.hpp" ++#include "oops/objArrayOop.hpp" ++#include "oops/fieldStreams.hpp" ++#include "prims/jvmtiRedefineClassesTrace.hpp" ++#include "gc_implementation/shared/vmGCOperations.hpp" ++ ++// New version that allows arbitrary changes to already loaded classes. ++class VM_EnhancedRedefineClasses: public VM_GC_Operation { ++ private: ++ ++ // These static fields are needed by SystemDictionary::classes_do() ++ // facility and the adjust_cpool_cache_and_vtable() helper: ++ static Array* _old_methods; ++ static Array* _new_methods; ++ static int* _matching_old_methods; ++ static int* _matching_new_methods; ++ static int* _deleted_methods; ++ static int* _added_methods; ++ static int _matching_methods_length; ++ static int _deleted_methods_length; ++ static int _added_methods_length; ++ ++ static int _revision_number; ++ ++ static GrowableArray* _affected_klasses; ++ ++ // The instance fields are used to pass information from ++ // doit_prologue() to doit() and doit_epilogue(). ++ jint _class_count; ++ const jvmtiClassDefinition *_class_defs; // ptr to _class_count defs ++ ++ // This operation is used by both RedefineClasses and ++ // RetransformClasses. Indicate which. ++ JvmtiClassLoadKind _class_load_kind; ++ ++ GrowableArray* _new_classes; ++ jvmtiError _result; ++ int _max_redefinition_flags; ++ ++ // Performance measurement support. These timers do not cover all ++ // the work done for JVM/TI RedefineClasses() but they do cover ++ // the heavy lifting. ++ elapsedTimer _timer_total; ++ elapsedTimer _timer_prologue; ++ elapsedTimer _timer_class_linking; ++ elapsedTimer _timer_class_loading; ++ elapsedTimer _timer_prepare_redefinition; ++ elapsedTimer _timer_heap_iteration; ++ elapsedTimer _timer_redefinition; ++ elapsedTimer _timer_vm_op_epilogue; ++ ++ jvmtiError find_sorted_affected_classes( ); ++ jvmtiError find_class_bytes(instanceKlassHandle the_class, const unsigned char **class_bytes, jint *class_byte_count, jboolean *not_changed); ++ jvmtiError load_new_class_versions(TRAPS); ++ ++ // Figure out which new methods match old methods in name and signature, ++ // which methods have been added, and which are no longer present ++ void compute_added_deleted_matching_methods(); ++ ++ // Change jmethodIDs to point to the new methods ++ void update_jmethod_ids(); ++ ++ void swap_all_method_annotations(ConstMethod* old_method, ConstMethod* new_method); ++ ++ static void add_affected_klasses( Klass* obj ); ++ ++ static jvmtiError do_topological_class_sorting(const jvmtiClassDefinition *class_definitions, int class_count, TRAPS); ++ ++ // Install the redefinition of a class ++ void redefine_single_class(instanceKlassHandle the_new_class, TRAPS); ++ ++ // Increment the classRedefinedCount field in the specific instanceKlass ++ // and in all direct and indirect subclasses. ++ void increment_class_counter(Klass* klass, TRAPS); ++ ++ ++ void flush_dependent_code(instanceKlassHandle k_h, TRAPS); ++ ++ static void check_class(Klass* k_oop,/* oop initiating_loader,*/ TRAPS) PRODUCT_RETURN; ++ ++ static void adjust_cpool_cache(Klass* k_oop, TRAPS); ++ ++ static void unpatch_bytecode(Method* method); ++ ++#ifdef ASSERT ++ static void verify_classes(Klass* k_oop, oop initiating_loader, TRAPS); ++#endif ++ ++ int calculate_redefinition_flags(instanceKlassHandle new_version); ++ void calculate_instance_update_information(Klass* new_version); ++ void check_methods_and_mark_as_obsolete(BitMap *emcp_methods, int * emcp_method_count_p); ++ static void mark_as_scavengable(nmethod* nm); ++ ++ bool check_arguments(); ++ jvmtiError check_arguments_error(); ++ ++ public: ++ VM_EnhancedRedefineClasses(jint class_count, const jvmtiClassDefinition *class_defs, JvmtiClassLoadKind class_load_kind); ++ virtual ~VM_EnhancedRedefineClasses(); ++ ++ bool doit_prologue(); ++ void doit(); ++ void doit_epilogue(); ++ void rollback(); ++ ++ jvmtiError check_exception() const; ++ VMOp_Type type() const { return VMOp_RedefineClasses; } ++ bool skip_operation() const { return false; } ++ bool allow_nested_vm_operations() const { return true; } ++ jvmtiError check_error() { return _result; } ++ ++ // Modifiable test must be shared between IsModifiableClass query ++ // and redefine implementation ++ static bool is_modifiable_class(oop klass_mirror); ++ ++ // Utility methods for transfering field access flags ++ ++ static void transfer_special_access_flags(JavaFieldStream *from, JavaFieldStream *to); ++ static void transfer_special_access_flags(fieldDescriptor *from, fieldDescriptor *to); ++ ++ void transfer_old_native_function_registrations(instanceKlassHandle the_class); ++ ++ static void swap_marks(oop first, oop second); ++}; ++ ++#endif // SHARE_VM_PRIMS_JVMTIENHANCEDREDEFINECLASSES_HPP +diff --git a/src/share/vm/runtime/arguments.cpp b/src/share/vm/runtime/arguments.cpp +--- a/src/share/vm/runtime/arguments.cpp ++++ b/src/share/vm/runtime/arguments.cpp +@@ -59,8 +59,8 @@ + #include "gc_implementation/parallelScavenge/parallelScavengeHeap.hpp" + #endif // INCLUDE_ALL_GCS + +-// Note: This is a special bug reporting site for the JVM +-#define DEFAULT_VENDOR_URL_BUG "http://bugreport.sun.com/bugreport/crash.jsp" ++// (DCEVM) The DCE VM has its own JIRA bug tracking system. ++#define DEFAULT_VENDOR_URL_BUG "https://github.com/Guidewire/DCEVM/issues" + #define DEFAULT_JAVA_LAUNCHER "generic" + + // Disable options not supported in this release, with a warning if they +@@ -1507,6 +1507,10 @@ + + void Arguments::set_ergonomics_flags() { + ++ if (AllowEnhancedClassRedefinition) { ++ // (DCEVM) enforces serial GC ++ FLAG_SET_ERGO(bool, UseSerialGC, true); ++ } + if (os::is_server_class_machine()) { + // If no other collector is requested explicitly, + // let the VM select the collector based on +@@ -1948,6 +1952,17 @@ + if (UseConcMarkSweepGC || UseParNewGC) i++; + if (UseParallelGC || UseParallelOldGC) i++; + if (UseG1GC) i++; ++ ++ if (AllowEnhancedClassRedefinition) { ++ // (DCEVM) Must use serial GC. This limitation applies because the instance size changing GC modifications ++ // are only built into the mark and compact algorithm. ++ if (!UseSerialGC && i >= 1) { ++ jio_fprintf(defaultStream::error_stream(), ++ "Must use the serial GC in the DCEVM\n"); ++ status = false; ++ } ++ } ++ + if (i > 1) { + jio_fprintf(defaultStream::error_stream(), + "Conflicting collector combinations in option list; " +diff --git a/src/share/vm/runtime/globals.hpp b/src/share/vm/runtime/globals.hpp +--- a/src/share/vm/runtime/globals.hpp ++++ b/src/share/vm/runtime/globals.hpp +@@ -1273,6 +1273,9 @@ + product(intx, TraceRedefineClasses, 0, \ + "Trace level for JVMTI RedefineClasses") \ + \ ++ product(bool, AllowEnhancedClassRedefinition, true, \ ++ "Allow enhanced class redefinition beyond swapping method bodies")\ ++ \ + develop(bool, StressMethodComparator, false, \ + "Run the MethodComparator on all loaded methods") \ + \ +diff --git a/src/share/vm/runtime/reflection.cpp b/src/share/vm/runtime/reflection.cpp +--- a/src/share/vm/runtime/reflection.cpp ++++ b/src/share/vm/runtime/reflection.cpp +@@ -519,6 +519,12 @@ + AccessFlags access, + bool classloader_only, + bool protected_restriction) { ++ ++ // (DCEVM) Decide accessibility based on active version ++ if (current_class != NULL) { ++ current_class = current_class->active_version(); ++ } ++ + // Verify that current_class can access a field of field_class, where that + // field's access bits are "access". We assume that we've already verified + // that current_class can access field_class. diff --git a/hotspot/.hg/patches/series b/hotspot/.hg/patches/series new file mode 100644 index 00000000..b17eafbf --- /dev/null +++ b/hotspot/.hg/patches/series @@ -0,0 +1,3 @@ + +# Rest of the changes +light-jdk8u5-b13.patch #+light #+jdk8 #+u5-b13 -- 2.39.5