aboutsummaryrefslogtreecommitdiff
path: root/src/share/vm
diff options
context:
space:
mode:
authoramurillo <none@none>2013-11-01 08:26:54 -0700
committeramurillo <none@none>2013-11-01 08:26:54 -0700
commit275be0d8f1c7556f4fa608e0e039baa6a00e1db3 (patch)
tree3050a23093402c125d5e6ffcdc7227a72a33e292 /src/share/vm
parentd3e86f60571bc6e9dc0b09b96003d56970d68fb1 (diff)
parent21c86c80ce5e2b00f63338dadf8a78a5ce625c4d (diff)
Diffstat (limited to 'src/share/vm')
-rw-r--r--src/share/vm/c1/c1_GraphBuilder.cpp2
-rw-r--r--src/share/vm/ci/ciInstanceKlass.cpp1
-rw-r--r--src/share/vm/ci/ciInstanceKlass.hpp6
-rw-r--r--src/share/vm/classfile/classLoaderData.cpp17
-rw-r--r--src/share/vm/classfile/classLoaderData.hpp2
-rw-r--r--src/share/vm/classfile/systemDictionary.hpp2
-rw-r--r--src/share/vm/compiler/compileBroker.cpp4
-rw-r--r--src/share/vm/interpreter/abstractInterpreter.hpp4
-rw-r--r--src/share/vm/interpreter/cppInterpreter.hpp2
-rw-r--r--src/share/vm/interpreter/interpreter.cpp18
-rw-r--r--src/share/vm/interpreter/templateInterpreter.cpp86
-rw-r--r--src/share/vm/interpreter/templateInterpreter.hpp18
-rw-r--r--src/share/vm/interpreter/templateInterpreterGenerator.hpp2
-rw-r--r--src/share/vm/memory/metaspace.cpp74
-rw-r--r--src/share/vm/memory/metaspace.hpp13
-rw-r--r--src/share/vm/memory/universe.cpp40
-rw-r--r--src/share/vm/oops/instanceKlass.cpp29
-rw-r--r--src/share/vm/oops/method.cpp5
-rw-r--r--src/share/vm/opto/library_call.cpp6
-rw-r--r--src/share/vm/opto/loopTransform.cpp4
-rw-r--r--src/share/vm/opto/postaloc.cpp13
-rw-r--r--src/share/vm/opto/type.cpp41
-rw-r--r--src/share/vm/opto/type.hpp5
-rw-r--r--src/share/vm/prims/jvmtiGetLoadedClasses.cpp109
-rw-r--r--src/share/vm/runtime/arguments.cpp9
-rw-r--r--src/share/vm/runtime/handles.cpp2
-rw-r--r--src/share/vm/runtime/thread.cpp2
-rw-r--r--src/share/vm/services/jmm.h1
-rw-r--r--src/share/vm/services/management.cpp3
-rw-r--r--src/share/vm/utilities/globalDefinitions.hpp2
30 files changed, 355 insertions, 167 deletions
diff --git a/src/share/vm/c1/c1_GraphBuilder.cpp b/src/share/vm/c1/c1_GraphBuilder.cpp
index 75e827b3b..26bb8d022 100644
--- a/src/share/vm/c1/c1_GraphBuilder.cpp
+++ b/src/share/vm/c1/c1_GraphBuilder.cpp
@@ -1873,7 +1873,7 @@ void GraphBuilder::invoke(Bytecodes::Code code) {
// number of implementors for decl_interface is 0 or 1. If
// it's 0 then no class implements decl_interface and there's
// no point in inlining.
- if (!holder->is_loaded() || decl_interface->nof_implementors() != 1) {
+ if (!holder->is_loaded() || decl_interface->nof_implementors() != 1 || decl_interface->has_default_methods()) {
singleton = NULL;
}
}
diff --git a/src/share/vm/ci/ciInstanceKlass.cpp b/src/share/vm/ci/ciInstanceKlass.cpp
index d40e460dc..f4389da46 100644
--- a/src/share/vm/ci/ciInstanceKlass.cpp
+++ b/src/share/vm/ci/ciInstanceKlass.cpp
@@ -57,6 +57,7 @@ ciInstanceKlass::ciInstanceKlass(KlassHandle h_k) :
_init_state = ik->init_state();
_nonstatic_field_size = ik->nonstatic_field_size();
_has_nonstatic_fields = ik->has_nonstatic_fields();
+ _has_default_methods = ik->has_default_methods();
_nonstatic_fields = NULL; // initialized lazily by compute_nonstatic_fields:
_implementor = NULL; // we will fill these lazily
diff --git a/src/share/vm/ci/ciInstanceKlass.hpp b/src/share/vm/ci/ciInstanceKlass.hpp
index fdd93ddc5..34eb84ccf 100644
--- a/src/share/vm/ci/ciInstanceKlass.hpp
+++ b/src/share/vm/ci/ciInstanceKlass.hpp
@@ -52,6 +52,7 @@ private:
bool _has_finalizer;
bool _has_subklass;
bool _has_nonstatic_fields;
+ bool _has_default_methods;
ciFlags _flags;
jint _nonstatic_field_size;
@@ -171,6 +172,11 @@ public:
}
}
+ bool has_default_methods() {
+ assert(is_loaded(), "must be loaded");
+ return _has_default_methods;
+ }
+
ciInstanceKlass* get_canonical_holder(int offset);
ciField* get_field_by_offset(int field_offset, bool is_static);
ciField* get_field_by_name(ciSymbol* name, ciSymbol* signature, bool is_static);
diff --git a/src/share/vm/classfile/classLoaderData.cpp b/src/share/vm/classfile/classLoaderData.cpp
index 84c191e33..e3209ae18 100644
--- a/src/share/vm/classfile/classLoaderData.cpp
+++ b/src/share/vm/classfile/classLoaderData.cpp
@@ -131,6 +131,17 @@ void ClassLoaderData::classes_do(void f(Klass * const)) {
}
}
+void ClassLoaderData::loaded_classes_do(KlassClosure* klass_closure) {
+ // Lock to avoid classes being modified/added/removed during iteration
+ MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag);
+ for (Klass* k = _klasses; k != NULL; k = k->next_link()) {
+ // Do not filter ArrayKlass oops here...
+ if (k->oop_is_array() || (k->oop_is_instance() && InstanceKlass::cast(k)->is_loaded())) {
+ klass_closure->do_klass(k);
+ }
+ }
+}
+
void ClassLoaderData::classes_do(void f(InstanceKlass*)) {
for (Klass* k = _klasses; k != NULL; k = k->next_link()) {
if (k->oop_is_instance()) {
@@ -600,6 +611,12 @@ void ClassLoaderDataGraph::classes_do(void f(Klass* const)) {
}
}
+void ClassLoaderDataGraph::loaded_classes_do(KlassClosure* klass_closure) {
+ for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
+ cld->loaded_classes_do(klass_closure);
+ }
+}
+
void ClassLoaderDataGraph::classes_unloading_do(void f(Klass* const)) {
assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!");
for (ClassLoaderData* cld = _unloading; cld != NULL; cld = cld->next()) {
diff --git a/src/share/vm/classfile/classLoaderData.hpp b/src/share/vm/classfile/classLoaderData.hpp
index 6d5747483..cee114c75 100644
--- a/src/share/vm/classfile/classLoaderData.hpp
+++ b/src/share/vm/classfile/classLoaderData.hpp
@@ -78,6 +78,7 @@ class ClassLoaderDataGraph : public AllStatic {
static void keep_alive_oops_do(OopClosure* blk, KlassClosure* klass_closure, bool must_claim);
static void classes_do(KlassClosure* klass_closure);
static void classes_do(void f(Klass* const));
+ static void loaded_classes_do(KlassClosure* klass_closure);
static void classes_unloading_do(void f(Klass* const));
static bool do_unloading(BoolObjectClosure* is_alive);
@@ -186,6 +187,7 @@ class ClassLoaderData : public CHeapObj<mtClass> {
bool keep_alive() const { return _keep_alive; }
bool is_alive(BoolObjectClosure* is_alive_closure) const;
void classes_do(void f(Klass*));
+ void loaded_classes_do(KlassClosure* klass_closure);
void classes_do(void f(InstanceKlass*));
// Deallocate free list during class unloading.
diff --git a/src/share/vm/classfile/systemDictionary.hpp b/src/share/vm/classfile/systemDictionary.hpp
index 85735a732..c64363645 100644
--- a/src/share/vm/classfile/systemDictionary.hpp
+++ b/src/share/vm/classfile/systemDictionary.hpp
@@ -173,8 +173,6 @@ class SymbolPropertyTable;
/* It's okay if this turns out to be NULL in non-1.4 JDKs. */ \
do_klass(nio_Buffer_klass, java_nio_Buffer, Opt ) \
\
- do_klass(PostVMInitHook_klass, sun_misc_PostVMInitHook, Opt ) \
- \
/* Preload boxing klasses */ \
do_klass(Boolean_klass, java_lang_Boolean, Pre ) \
do_klass(Character_klass, java_lang_Character, Pre ) \
diff --git a/src/share/vm/compiler/compileBroker.cpp b/src/share/vm/compiler/compileBroker.cpp
index bdfe2abd3..fb47df282 100644
--- a/src/share/vm/compiler/compileBroker.cpp
+++ b/src/share/vm/compiler/compileBroker.cpp
@@ -780,6 +780,10 @@ CompilerCounters::CompilerCounters(const char* thread_name, int instance, TRAPS)
void CompileBroker::compilation_init() {
_last_method_compiled[0] = '\0';
+ // No need to initialize compilation system if we do not use it.
+ if (!UseCompiler) {
+ return;
+ }
#ifndef SHARK
// Set the interface to the current compiler(s).
int c1_count = CompilationPolicy::policy()->compiler_count(CompLevel_simple);
diff --git a/src/share/vm/interpreter/abstractInterpreter.hpp b/src/share/vm/interpreter/abstractInterpreter.hpp
index 4d7647ce0..0ebfd1cdf 100644
--- a/src/share/vm/interpreter/abstractInterpreter.hpp
+++ b/src/share/vm/interpreter/abstractInterpreter.hpp
@@ -158,8 +158,8 @@ class AbstractInterpreter: AllStatic {
// Runtime support
// length = invoke bytecode length (to advance to next bytecode)
- static address deopt_entry (TosState state, int length) { ShouldNotReachHere(); return NULL; }
- static address return_entry (TosState state, int length) { ShouldNotReachHere(); return NULL; }
+ static address deopt_entry(TosState state, int length) { ShouldNotReachHere(); return NULL; }
+ static address return_entry(TosState state, int length, Bytecodes::Code code) { ShouldNotReachHere(); return NULL; }
static address rethrow_exception_entry() { return _rethrow_exception_entry; }
diff --git a/src/share/vm/interpreter/cppInterpreter.hpp b/src/share/vm/interpreter/cppInterpreter.hpp
index 4997a4432..71f78840b 100644
--- a/src/share/vm/interpreter/cppInterpreter.hpp
+++ b/src/share/vm/interpreter/cppInterpreter.hpp
@@ -78,7 +78,7 @@ class CppInterpreter: public AbstractInterpreter {
static address stack_result_to_stack(int index) { return _stack_to_stack[index]; }
static address stack_result_to_native(int index) { return _stack_to_native_abi[index]; }
- static address return_entry (TosState state, int length);
+ static address return_entry (TosState state, int length, Bytecodes::Code code);
static address deopt_entry (TosState state, int length);
#ifdef TARGET_ARCH_x86
diff --git a/src/share/vm/interpreter/interpreter.cpp b/src/share/vm/interpreter/interpreter.cpp
index dfd8b5b14..60246e901 100644
--- a/src/share/vm/interpreter/interpreter.cpp
+++ b/src/share/vm/interpreter/interpreter.cpp
@@ -329,15 +329,21 @@ void AbstractInterpreter::print_method_kind(MethodKind kind) {
//------------------------------------------------------------------------------------------------------------------------
// Deoptimization support
-// If deoptimization happens, this function returns the point of next bytecode to continue execution
+/**
+ * If a deoptimization happens, this function returns the point of next bytecode to continue execution.
+ */
address AbstractInterpreter::deopt_continue_after_entry(Method* method, address bcp, int callee_parameters, bool is_top_frame) {
assert(method->contains(bcp), "just checkin'");
- Bytecodes::Code code = Bytecodes::java_code_at(method, bcp);
+
+ // Get the original and rewritten bytecode.
+ Bytecodes::Code code = Bytecodes::java_code_at(method, bcp);
assert(!Interpreter::bytecode_should_reexecute(code), "should not reexecute");
- int bci = method->bci_from(bcp);
- int length = -1; // initial value for debugging
+
+ const int bci = method->bci_from(bcp);
+
// compute continuation length
- length = Bytecodes::length_at(method, bcp);
+ const int length = Bytecodes::length_at(method, bcp);
+
// compute result type
BasicType type = T_ILLEGAL;
@@ -393,7 +399,7 @@ address AbstractInterpreter::deopt_continue_after_entry(Method* method, address
return
is_top_frame
? Interpreter::deopt_entry (as_TosState(type), length)
- : Interpreter::return_entry(as_TosState(type), length);
+ : Interpreter::return_entry(as_TosState(type), length, code);
}
// If deoptimization happens, this function returns the point where the interpreter reexecutes
diff --git a/src/share/vm/interpreter/templateInterpreter.cpp b/src/share/vm/interpreter/templateInterpreter.cpp
index 9f7ed4c7e..e3e89e8ee 100644
--- a/src/share/vm/interpreter/templateInterpreter.cpp
+++ b/src/share/vm/interpreter/templateInterpreter.cpp
@@ -184,8 +184,9 @@ EntryPoint TemplateInterpreter::_deopt_entry [TemplateInterpreter::number_of_deo
EntryPoint TemplateInterpreter::_continuation_entry;
EntryPoint TemplateInterpreter::_safept_entry;
-address TemplateInterpreter::_return_3_addrs_by_index[TemplateInterpreter::number_of_return_addrs];
-address TemplateInterpreter::_return_5_addrs_by_index[TemplateInterpreter::number_of_return_addrs];
+address TemplateInterpreter::_invoke_return_entry[TemplateInterpreter::number_of_return_addrs];
+address TemplateInterpreter::_invokeinterface_return_entry[TemplateInterpreter::number_of_return_addrs];
+address TemplateInterpreter::_invokedynamic_return_entry[TemplateInterpreter::number_of_return_addrs];
DispatchTable TemplateInterpreter::_active_table;
DispatchTable TemplateInterpreter::_normal_table;
@@ -237,22 +238,37 @@ void TemplateInterpreterGenerator::generate_all() {
#endif // !PRODUCT
{ CodeletMark cm(_masm, "return entry points");
+ const int index_size = sizeof(u2);
for (int i = 0; i < Interpreter::number_of_return_entries; i++) {
Interpreter::_return_entry[i] =
EntryPoint(
- generate_return_entry_for(itos, i),
- generate_return_entry_for(itos, i),
- generate_return_entry_for(itos, i),
- generate_return_entry_for(atos, i),
- generate_return_entry_for(itos, i),
- generate_return_entry_for(ltos, i),
- generate_return_entry_for(ftos, i),
- generate_return_entry_for(dtos, i),
- generate_return_entry_for(vtos, i)
+ generate_return_entry_for(itos, i, index_size),
+ generate_return_entry_for(itos, i, index_size),
+ generate_return_entry_for(itos, i, index_size),
+ generate_return_entry_for(atos, i, index_size),
+ generate_return_entry_for(itos, i, index_size),
+ generate_return_entry_for(ltos, i, index_size),
+ generate_return_entry_for(ftos, i, index_size),
+ generate_return_entry_for(dtos, i, index_size),
+ generate_return_entry_for(vtos, i, index_size)
);
}
}
+ { CodeletMark cm(_masm, "invoke return entry points");
+ const TosState states[] = {itos, itos, itos, itos, ltos, ftos, dtos, atos, vtos};
+ const int invoke_length = Bytecodes::length_for(Bytecodes::_invokestatic);
+ const int invokeinterface_length = Bytecodes::length_for(Bytecodes::_invokeinterface);
+ const int invokedynamic_length = Bytecodes::length_for(Bytecodes::_invokedynamic);
+
+ for (int i = 0; i < Interpreter::number_of_return_addrs; i++) {
+ TosState state = states[i];
+ Interpreter::_invoke_return_entry[i] = generate_return_entry_for(state, invoke_length, sizeof(u2));
+ Interpreter::_invokeinterface_return_entry[i] = generate_return_entry_for(state, invokeinterface_length, sizeof(u2));
+ Interpreter::_invokedynamic_return_entry[i] = generate_return_entry_for(state, invokedynamic_length, sizeof(u4));
+ }
+ }
+
{ CodeletMark cm(_masm, "earlyret entry points");
Interpreter::_earlyret_entry =
EntryPoint(
@@ -298,13 +314,6 @@ void TemplateInterpreterGenerator::generate_all() {
}
}
- for (int j = 0; j < number_of_states; j++) {
- const TosState states[] = {btos, ctos, stos, itos, ltos, ftos, dtos, atos, vtos};
- int index = Interpreter::TosState_as_index(states[j]);
- Interpreter::_return_3_addrs_by_index[index] = Interpreter::return_entry(states[j], 3);
- Interpreter::_return_5_addrs_by_index[index] = Interpreter::return_entry(states[j], 5);
- }
-
{ CodeletMark cm(_masm, "continuation entry points");
Interpreter::_continuation_entry =
EntryPoint(
@@ -534,9 +543,46 @@ void TemplateInterpreterGenerator::generate_and_dispatch(Template* t, TosState t
//------------------------------------------------------------------------------------------------------------------------
// Entry points
-address TemplateInterpreter::return_entry(TosState state, int length) {
+/**
+ * Returns the return entry table for the given invoke bytecode.
+ */
+address* TemplateInterpreter::invoke_return_entry_table_for(Bytecodes::Code code) {
+ switch (code) {
+ case Bytecodes::_invokestatic:
+ case Bytecodes::_invokespecial:
+ case Bytecodes::_invokevirtual:
+ case Bytecodes::_invokehandle:
+ return Interpreter::invoke_return_entry_table();
+ case Bytecodes::_invokeinterface:
+ return Interpreter::invokeinterface_return_entry_table();
+ case Bytecodes::_invokedynamic:
+ return Interpreter::invokedynamic_return_entry_table();
+ default:
+ fatal(err_msg("invalid bytecode: %s", Bytecodes::name(code)));
+ return NULL;
+ }
+}
+
+/**
+ * Returns the return entry address for the given top-of-stack state and bytecode.
+ */
+address TemplateInterpreter::return_entry(TosState state, int length, Bytecodes::Code code) {
guarantee(0 <= length && length < Interpreter::number_of_return_entries, "illegal length");
- return _return_entry[length].entry(state);
+ const int index = TosState_as_index(state);
+ switch (code) {
+ case Bytecodes::_invokestatic:
+ case Bytecodes::_invokespecial:
+ case Bytecodes::_invokevirtual:
+ case Bytecodes::_invokehandle:
+ return _invoke_return_entry[index];
+ case Bytecodes::_invokeinterface:
+ return _invokeinterface_return_entry[index];
+ case Bytecodes::_invokedynamic:
+ return _invokedynamic_return_entry[index];
+ default:
+ assert(!Bytecodes::is_invoke(code), err_msg("invoke instructions should be handled separately: %s", Bytecodes::name(code)));
+ return _return_entry[length].entry(state);
+ }
}
diff --git a/src/share/vm/interpreter/templateInterpreter.hpp b/src/share/vm/interpreter/templateInterpreter.hpp
index 43fe4bdb1..838e2e084 100644
--- a/src/share/vm/interpreter/templateInterpreter.hpp
+++ b/src/share/vm/interpreter/templateInterpreter.hpp
@@ -120,8 +120,9 @@ class TemplateInterpreter: public AbstractInterpreter {
static EntryPoint _continuation_entry;
static EntryPoint _safept_entry;
- static address _return_3_addrs_by_index[number_of_return_addrs]; // for invokevirtual return entries
- static address _return_5_addrs_by_index[number_of_return_addrs]; // for invokeinterface return entries
+ static address _invoke_return_entry[number_of_return_addrs]; // for invokestatic, invokespecial, invokevirtual return entries
+ static address _invokeinterface_return_entry[number_of_return_addrs]; // for invokeinterface return entries
+ static address _invokedynamic_return_entry[number_of_return_addrs]; // for invokedynamic return entries
static DispatchTable _active_table; // the active dispatch table (used by the interpreter for dispatch)
static DispatchTable _normal_table; // the normal dispatch table (used to set the active table in normal mode)
@@ -161,12 +162,15 @@ class TemplateInterpreter: public AbstractInterpreter {
static address* normal_table() { return _normal_table.table_for(); }
// Support for invokes
- static address* return_3_addrs_by_index_table() { return _return_3_addrs_by_index; }
- static address* return_5_addrs_by_index_table() { return _return_5_addrs_by_index; }
- static int TosState_as_index(TosState state); // computes index into return_3_entry_by_index table
+ static address* invoke_return_entry_table() { return _invoke_return_entry; }
+ static address* invokeinterface_return_entry_table() { return _invokeinterface_return_entry; }
+ static address* invokedynamic_return_entry_table() { return _invokedynamic_return_entry; }
+ static int TosState_as_index(TosState state);
- static address return_entry (TosState state, int length);
- static address deopt_entry (TosState state, int length);
+ static address* invoke_return_entry_table_for(Bytecodes::Code code);
+
+ static address deopt_entry(TosState state, int length);
+ static address return_entry(TosState state, int length, Bytecodes::Code code);
// Safepoint support
static void notice_safepoints(); // stops the thread when reaching a safepoint
diff --git a/src/share/vm/interpreter/templateInterpreterGenerator.hpp b/src/share/vm/interpreter/templateInterpreterGenerator.hpp
index fb7bdc5b6..a80caa964 100644
--- a/src/share/vm/interpreter/templateInterpreterGenerator.hpp
+++ b/src/share/vm/interpreter/templateInterpreterGenerator.hpp
@@ -53,7 +53,7 @@ class TemplateInterpreterGenerator: public AbstractInterpreterGenerator {
address generate_ClassCastException_handler();
address generate_ArrayIndexOutOfBounds_handler(const char* name);
address generate_continuation_for(TosState state);
- address generate_return_entry_for(TosState state, int step);
+ address generate_return_entry_for(TosState state, int step, size_t index_size);
address generate_earlyret_entry_for(TosState state);
address generate_deopt_entry_for(TosState state, int step);
address generate_safept_entry_for(TosState state, address runtime_entry);
diff --git a/src/share/vm/memory/metaspace.cpp b/src/share/vm/memory/metaspace.cpp
index 187796740..8df4e32c3 100644
--- a/src/share/vm/memory/metaspace.cpp
+++ b/src/share/vm/memory/metaspace.cpp
@@ -56,7 +56,7 @@ size_t const allocation_from_dictionary_limit = 4 * K;
MetaWord* last_allocated = 0;
-size_t Metaspace::_class_metaspace_size;
+size_t Metaspace::_compressed_class_space_size;
// Used in declarations in SpaceManager and ChunkManager
enum ChunkIndex {
@@ -2843,6 +2843,8 @@ ChunkManager* Metaspace::_chunk_manager_class = NULL;
#define VIRTUALSPACEMULTIPLIER 2
#ifdef _LP64
+static const uint64_t UnscaledClassSpaceMax = (uint64_t(max_juint) + 1);
+
void Metaspace::set_narrow_klass_base_and_shift(address metaspace_base, address cds_base) {
// Figure out the narrow_klass_base and the narrow_klass_shift. The
// narrow_klass_base is the lower of the metaspace base and the cds base
@@ -2852,14 +2854,22 @@ void Metaspace::set_narrow_klass_base_and_shift(address metaspace_base, address
address higher_address;
if (UseSharedSpaces) {
higher_address = MAX2((address)(cds_base + FileMapInfo::shared_spaces_size()),
- (address)(metaspace_base + class_metaspace_size()));
+ (address)(metaspace_base + compressed_class_space_size()));
lower_base = MIN2(metaspace_base, cds_base);
} else {
- higher_address = metaspace_base + class_metaspace_size();
+ higher_address = metaspace_base + compressed_class_space_size();
lower_base = metaspace_base;
+
+ uint64_t klass_encoding_max = UnscaledClassSpaceMax << LogKlassAlignmentInBytes;
+ // If compressed class space fits in lower 32G, we don't need a base.
+ if (higher_address <= (address)klass_encoding_max) {
+ lower_base = 0; // effectively lower base is zero.
+ }
}
+
Universe::set_narrow_klass_base(lower_base);
- if ((uint64_t)(higher_address - lower_base) < (uint64_t)max_juint) {
+
+ if ((uint64_t)(higher_address - lower_base) < UnscaledClassSpaceMax) {
Universe::set_narrow_klass_shift(0);
} else {
assert(!UseSharedSpaces, "Cannot shift with UseSharedSpaces");
@@ -2874,24 +2884,24 @@ bool Metaspace::can_use_cds_with_metaspace_addr(char* metaspace_base, address cd
assert(UseCompressedClassPointers, "Only use with CompressedKlassPtrs");
address lower_base = MIN2((address)metaspace_base, cds_base);
address higher_address = MAX2((address)(cds_base + FileMapInfo::shared_spaces_size()),
- (address)(metaspace_base + class_metaspace_size()));
- return ((uint64_t)(higher_address - lower_base) < (uint64_t)max_juint);
+ (address)(metaspace_base + compressed_class_space_size()));
+ return ((uint64_t)(higher_address - lower_base) < UnscaledClassSpaceMax);
}
// Try to allocate the metaspace at the requested addr.
void Metaspace::allocate_metaspace_compressed_klass_ptrs(char* requested_addr, address cds_base) {
assert(using_class_space(), "called improperly");
assert(UseCompressedClassPointers, "Only use with CompressedKlassPtrs");
- assert(class_metaspace_size() < KlassEncodingMetaspaceMax,
+ assert(compressed_class_space_size() < KlassEncodingMetaspaceMax,
"Metaspace size is too big");
- assert_is_ptr_aligned(requested_addr, _reserve_alignment);
- assert_is_ptr_aligned(cds_base, _reserve_alignment);
- assert_is_size_aligned(class_metaspace_size(), _reserve_alignment);
+ assert_is_ptr_aligned(requested_addr, _reserve_alignment);
+ assert_is_ptr_aligned(cds_base, _reserve_alignment);
+ assert_is_size_aligned(compressed_class_space_size(), _reserve_alignment);
// Don't use large pages for the class space.
bool large_pages = false;
- ReservedSpace metaspace_rs = ReservedSpace(class_metaspace_size(),
+ ReservedSpace metaspace_rs = ReservedSpace(compressed_class_space_size(),
_reserve_alignment,
large_pages,
requested_addr, 0);
@@ -2906,7 +2916,7 @@ void Metaspace::allocate_metaspace_compressed_klass_ptrs(char* requested_addr, a
while (!metaspace_rs.is_reserved() && (addr + increment > addr) &&
can_use_cds_with_metaspace_addr(addr + increment, cds_base)) {
addr = addr + increment;
- metaspace_rs = ReservedSpace(class_metaspace_size(),
+ metaspace_rs = ReservedSpace(compressed_class_space_size(),
_reserve_alignment, large_pages, addr, 0);
}
}
@@ -2917,11 +2927,11 @@ void Metaspace::allocate_metaspace_compressed_klass_ptrs(char* requested_addr, a
// initialization has happened that depends on UseCompressedClassPointers.
// So, UseCompressedClassPointers cannot be turned off at this point.
if (!metaspace_rs.is_reserved()) {
- metaspace_rs = ReservedSpace(class_metaspace_size(),
+ metaspace_rs = ReservedSpace(compressed_class_space_size(),
_reserve_alignment, large_pages);
if (!metaspace_rs.is_reserved()) {
vm_exit_during_initialization(err_msg("Could not allocate metaspace: %d bytes",
- class_metaspace_size()));
+ compressed_class_space_size()));
}
}
}
@@ -2943,8 +2953,8 @@ void Metaspace::allocate_metaspace_compressed_klass_ptrs(char* requested_addr, a
if (PrintCompressedOopsMode || (PrintMiscellaneous && Verbose)) {
gclog_or_tty->print_cr("Narrow klass base: " PTR_FORMAT ", Narrow klass shift: " SIZE_FORMAT,
Universe::narrow_klass_base(), Universe::narrow_klass_shift());
- gclog_or_tty->print_cr("Metaspace Size: " SIZE_FORMAT " Address: " PTR_FORMAT " Req Addr: " PTR_FORMAT,
- class_metaspace_size(), metaspace_rs.base(), requested_addr);
+ gclog_or_tty->print_cr("Compressed class space size: " SIZE_FORMAT " Address: " PTR_FORMAT " Req Addr: " PTR_FORMAT,
+ compressed_class_space_size(), metaspace_rs.base(), requested_addr);
}
}
@@ -3010,7 +3020,7 @@ void Metaspace::ergo_initialize() {
MaxMetaspaceExpansion = restricted_align_down(MaxMetaspaceExpansion, _commit_alignment);
CompressedClassSpaceSize = restricted_align_down(CompressedClassSpaceSize, _reserve_alignment);
- set_class_metaspace_size(CompressedClassSpaceSize);
+ set_compressed_class_space_size(CompressedClassSpaceSize);
}
void Metaspace::global_initialize() {
@@ -3039,12 +3049,12 @@ void Metaspace::global_initialize() {
}
#ifdef _LP64
- if (cds_total + class_metaspace_size() > (uint64_t)max_juint) {
+ if (cds_total + compressed_class_space_size() > UnscaledClassSpaceMax) {
vm_exit_during_initialization("Unable to dump shared archive.",
err_msg("Size of archive (" SIZE_FORMAT ") + compressed class space ("
SIZE_FORMAT ") == total (" SIZE_FORMAT ") is larger than compressed "
- "klass limit: " SIZE_FORMAT, cds_total, class_metaspace_size(),
- cds_total + class_metaspace_size(), (size_t)max_juint));
+ "klass limit: " SIZE_FORMAT, cds_total, compressed_class_space_size(),
+ cds_total + compressed_class_space_size(), UnscaledClassSpaceMax));
}
// Set the compressed klass pointer base so that decoding of these pointers works
@@ -3092,7 +3102,8 @@ void Metaspace::global_initialize() {
cds_end = (char *)align_ptr_up(cds_end, _reserve_alignment);
allocate_metaspace_compressed_klass_ptrs(cds_end, cds_address);
} else {
- allocate_metaspace_compressed_klass_ptrs((char *)CompressedKlassPointersBase, 0);
+ char* base = (char*)align_ptr_up(Universe::heap()->reserved_region().end(), _reserve_alignment);
+ allocate_metaspace_compressed_klass_ptrs(base, 0);
}
}
#endif
@@ -3354,6 +3365,11 @@ MetaWord* Metaspace::allocate(ClassLoaderData* loader_data, size_t word_size,
return result;
}
+size_t Metaspace::class_chunk_size(size_t word_size) {
+ assert(using_class_space(), "Has to use class space");
+ return class_vsm()->calc_chunk_size(word_size);
+}
+
void Metaspace::report_metadata_oome(ClassLoaderData* loader_data, size_t word_size, MetadataType mdtype, TRAPS) {
// If result is still null, we are out of memory.
if (Verbose && TraceMetadataChunkAllocation) {
@@ -3365,9 +3381,19 @@ void Metaspace::report_metadata_oome(ClassLoaderData* loader_data, size_t word_s
MetaspaceAux::dump(gclog_or_tty);
}
+ bool out_of_compressed_class_space = false;
+ if (is_class_space_allocation(mdtype)) {
+ Metaspace* metaspace = loader_data->metaspace_non_null();
+ out_of_compressed_class_space =
+ MetaspaceAux::committed_bytes(Metaspace::ClassType) +
+ (metaspace->class_chunk_size(word_size) * BytesPerWord) >
+ CompressedClassSpaceSize;
+ }
+
// -XX:+HeapDumpOnOutOfMemoryError and -XX:OnOutOfMemoryError support
- const char* space_string = is_class_space_allocation(mdtype) ? "Compressed class space" :
- "Metadata space";
+ const char* space_string = out_of_compressed_class_space ?
+ "Compressed class space" : "Metaspace";
+
report_java_out_of_memory(space_string);
if (JvmtiExport::should_post_resource_exhausted()) {
@@ -3380,7 +3406,7 @@ void Metaspace::report_metadata_oome(ClassLoaderData* loader_data, size_t word_s
vm_exit_during_initialization("OutOfMemoryError", space_string);
}
- if (is_class_space_allocation(mdtype)) {
+ if (out_of_compressed_class_space) {
THROW_OOP(Universe::out_of_memory_error_class_metaspace());
} else {
THROW_OOP(Universe::out_of_memory_error_metaspace());
diff --git a/src/share/vm/memory/metaspace.hpp b/src/share/vm/memory/metaspace.hpp
index 974a1aac6..c22d21707 100644
--- a/src/share/vm/memory/metaspace.hpp
+++ b/src/share/vm/memory/metaspace.hpp
@@ -115,13 +115,13 @@ class Metaspace : public CHeapObj<mtClass> {
static size_t align_word_size_up(size_t);
// Aligned size of the metaspace.
- static size_t _class_metaspace_size;
+ static size_t _compressed_class_space_size;
- static size_t class_metaspace_size() {
- return _class_metaspace_size;
+ static size_t compressed_class_space_size() {
+ return _compressed_class_space_size;
}
- static void set_class_metaspace_size(size_t metaspace_size) {
- _class_metaspace_size = metaspace_size;
+ static void set_compressed_class_space_size(size_t size) {
+ _compressed_class_space_size = size;
}
static size_t _first_chunk_word_size;
@@ -192,6 +192,8 @@ class Metaspace : public CHeapObj<mtClass> {
AllocRecord * _alloc_record_head;
AllocRecord * _alloc_record_tail;
+ size_t class_chunk_size(size_t word_size);
+
public:
Metaspace(Mutex* lock, MetaspaceType type);
@@ -252,6 +254,7 @@ class Metaspace : public CHeapObj<mtClass> {
static bool is_class_space_allocation(MetadataType mdType) {
return mdType == ClassType && using_class_space();
}
+
};
class MetaspaceAux : AllStatic {
diff --git a/src/share/vm/memory/universe.cpp b/src/share/vm/memory/universe.cpp
index 1f632ae47..043962c63 100644
--- a/src/share/vm/memory/universe.cpp
+++ b/src/share/vm/memory/universe.cpp
@@ -677,13 +677,13 @@ jint universe_init() {
// HeapBased - Use compressed oops with heap base + encoding.
// 4Gb
-static const uint64_t NarrowOopHeapMax = (uint64_t(max_juint) + 1);
+static const uint64_t UnscaledOopHeapMax = (uint64_t(max_juint) + 1);
// 32Gb
-// OopEncodingHeapMax == NarrowOopHeapMax << LogMinObjAlignmentInBytes;
+// OopEncodingHeapMax == UnscaledOopHeapMax << LogMinObjAlignmentInBytes;
char* Universe::preferred_heap_base(size_t heap_size, size_t alignment, NARROW_OOP_MODE mode) {
assert(is_size_aligned((size_t)OopEncodingHeapMax, alignment), "Must be");
- assert(is_size_aligned((size_t)NarrowOopHeapMax, alignment), "Must be");
+ assert(is_size_aligned((size_t)UnscaledOopHeapMax, alignment), "Must be");
assert(is_size_aligned(heap_size, alignment), "Must be");
uintx heap_base_min_address_aligned = align_size_up(HeapBaseMinAddress, alignment);
@@ -702,20 +702,40 @@ char* Universe::preferred_heap_base(size_t heap_size, size_t alignment, NARROW_O
// If the total size is small enough to allow UnscaledNarrowOop then
// just use UnscaledNarrowOop.
} else if ((total_size <= OopEncodingHeapMax) && (mode != HeapBasedNarrowOop)) {
- if ((total_size <= NarrowOopHeapMax) && (mode == UnscaledNarrowOop) &&
+ if ((total_size <= UnscaledOopHeapMax) && (mode == UnscaledNarrowOop) &&
(Universe::narrow_oop_shift() == 0)) {
// Use 32-bits oops without encoding and
// place heap's top on the 4Gb boundary
- base = (NarrowOopHeapMax - heap_size);
+ base = (UnscaledOopHeapMax - heap_size);
} else {
// Can't reserve with NarrowOopShift == 0
Universe::set_narrow_oop_shift(LogMinObjAlignmentInBytes);
+
if (mode == UnscaledNarrowOop ||
- mode == ZeroBasedNarrowOop && total_size <= NarrowOopHeapMax) {
+ mode == ZeroBasedNarrowOop && total_size <= UnscaledOopHeapMax) {
+
// Use zero based compressed oops with encoding and
// place heap's top on the 32Gb boundary in case
// total_size > 4Gb or failed to reserve below 4Gb.
- base = (OopEncodingHeapMax - heap_size);
+ uint64_t heap_top = OopEncodingHeapMax;
+
+ // For small heaps, save some space for compressed class pointer
+ // space so it can be decoded with no base.
+ if (UseCompressedClassPointers && !UseSharedSpaces &&
+ OopEncodingHeapMax <= 32*G) {
+
+ uint64_t class_space = align_size_up(CompressedClassSpaceSize, alignment);
+ assert(is_size_aligned((size_t)OopEncodingHeapMax-class_space,
+ alignment), "difference must be aligned too");
+ uint64_t new_top = OopEncodingHeapMax-class_space;
+
+ if (total_size <= new_top) {
+ heap_top = new_top;
+ }
+ }
+
+ // Align base to the adjusted top of the heap
+ base = heap_top - heap_size;
}
}
} else {
@@ -737,7 +757,7 @@ char* Universe::preferred_heap_base(size_t heap_size, size_t alignment, NARROW_O
// Set to a non-NULL value so the ReservedSpace ctor computes
// the correct no-access prefix.
// The final value will be set in initialize_heap() below.
- Universe::set_narrow_oop_base((address)NarrowOopHeapMax);
+ Universe::set_narrow_oop_base((address)UnscaledOopHeapMax);
#ifdef _WIN64
if (UseLargePages) {
// Cannot allocate guard pages for implicit checks in indexed
@@ -833,7 +853,7 @@ jint Universe::initialize_heap() {
Universe::set_narrow_oop_use_implicit_null_checks(true);
}
#endif // _WIN64
- if((uint64_t)Universe::heap()->reserved_region().end() > NarrowOopHeapMax) {
+ if((uint64_t)Universe::heap()->reserved_region().end() > UnscaledOopHeapMax) {
// Can't reserve heap below 4Gb.
Universe::set_narrow_oop_shift(LogMinObjAlignmentInBytes);
} else {
@@ -1029,7 +1049,7 @@ bool universe_post_init() {
Handle msg = java_lang_String::create_from_str("Java heap space", CHECK_false);
java_lang_Throwable::set_message(Universe::_out_of_memory_error_java_heap, msg());
- msg = java_lang_String::create_from_str("Metadata space", CHECK_false);
+ msg = java_lang_String::create_from_str("Metaspace", CHECK_false);
java_lang_Throwable::set_message(Universe::_out_of_memory_error_metaspace, msg());
msg = java_lang_String::create_from_str("Compressed class space", CHECK_false);
java_lang_Throwable::set_message(Universe::_out_of_memory_error_class_metaspace, msg());
diff --git a/src/share/vm/oops/instanceKlass.cpp b/src/share/vm/oops/instanceKlass.cpp
index a8fa7b3ee..510102d4e 100644
--- a/src/share/vm/oops/instanceKlass.cpp
+++ b/src/share/vm/oops/instanceKlass.cpp
@@ -2393,15 +2393,38 @@ address InstanceKlass::static_field_addr(int offset) {
const char* InstanceKlass::signature_name() const {
+ int hash_len = 0;
+ char hash_buf[40];
+
+ // If this is an anonymous class, append a hash to make the name unique
+ if (is_anonymous()) {
+ assert(EnableInvokeDynamic, "EnableInvokeDynamic was not set.");
+ intptr_t hash = (java_mirror() != NULL) ? java_mirror()->identity_hash() : 0;
+ sprintf(hash_buf, "/" UINTX_FORMAT, (uintx)hash);
+ hash_len = (int)strlen(hash_buf);
+ }
+
+ // Get the internal name as a c string
const char* src = (const char*) (name()->as_C_string());
const int src_length = (int)strlen(src);
- char* dest = NEW_RESOURCE_ARRAY(char, src_length + 3);
- int src_index = 0;
+
+ char* dest = NEW_RESOURCE_ARRAY(char, src_length + hash_len + 3);
+
+ // Add L as type indicator
int dest_index = 0;
dest[dest_index++] = 'L';
- while (src_index < src_length) {
+
+ // Add the actual class name
+ for (int src_index = 0; src_index < src_length; ) {
dest[dest_index++] = src[src_index++];
}
+
+ // If we have a hash, append it
+ for (int hash_index = 0; hash_index < hash_len; ) {
+ dest[dest_index++] = hash_buf[hash_index++];
+ }
+
+ // Add the semicolon and the NULL
dest[dest_index++] = ';';
dest[dest_index] = '\0';
return dest;
diff --git a/src/share/vm/oops/method.cpp b/src/share/vm/oops/method.cpp
index c0b4a97fc..7c292c3ff 100644
--- a/src/share/vm/oops/method.cpp
+++ b/src/share/vm/oops/method.cpp
@@ -1515,7 +1515,10 @@ Bytecodes::Code Method::orig_bytecode_at(int bci) const {
return bp->orig_bytecode();
}
}
- ShouldNotReachHere();
+ {
+ ResourceMark rm;
+ fatal(err_msg("no original bytecode found in %s at bci %d", name_and_sig_as_C_string(), bci));
+ }
return Bytecodes::_shouldnotreachhere;
}
diff --git a/src/share/vm/opto/library_call.cpp b/src/share/vm/opto/library_call.cpp
index 6a7ee6288..df6c29f57 100644
--- a/src/share/vm/opto/library_call.cpp
+++ b/src/share/vm/opto/library_call.cpp
@@ -2006,9 +2006,9 @@ bool LibraryCallKit::inline_math_addExactI(bool is_increment) {
Node* arg2 = NULL;
if (is_increment) {
- arg2 = intcon(1);
+ arg2 = intcon(1);
} else {
- arg2 = argument(1);
+ arg2 = argument(1);
}
Node* add = _gvn.transform( new(C) AddExactINode(NULL, arg1, arg2) );
@@ -2056,7 +2056,7 @@ bool LibraryCallKit::inline_math_subtractExactL(bool is_decrement) {
if (is_decrement) {
arg2 = longcon(1);
} else {
- Node* arg2 = argument(2); // type long
+ arg2 = argument(2); // type long
// argument(3) == TOP
}
diff --git a/src/share/vm/opto/loopTransform.cpp b/src/share/vm/opto/loopTransform.cpp
index 41c0a9d41..55a533bc3 100644
--- a/src/share/vm/opto/loopTransform.cpp
+++ b/src/share/vm/opto/loopTransform.cpp
@@ -713,6 +713,10 @@ bool IdealLoopTree::policy_unroll( PhaseIdealLoop *phase ) const {
case Op_ModL: body_size += 30; break;
case Op_DivL: body_size += 30; break;
case Op_MulL: body_size += 10; break;
+ case Op_FlagsProj:
+ // Can't handle unrolling of loops containing
+ // nodes that generate a FlagsProj at the moment
+ return false;
case Op_StrComp:
case Op_StrEquals:
case Op_StrIndexOf:
diff --git a/src/share/vm/opto/postaloc.cpp b/src/share/vm/opto/postaloc.cpp
index 76de2ed16..2ad809d1a 100644
--- a/src/share/vm/opto/postaloc.cpp
+++ b/src/share/vm/opto/postaloc.cpp
@@ -97,7 +97,8 @@ int PhaseChaitin::yank( Node *old, Block *current_block, Node_List *value, Node_
static bool expected_yanked_node(Node *old, Node *orig_old) {
// This code is expected only next original nodes:
// - load from constant table node which may have next data input nodes:
- // MachConstantBase, Phi, MachTemp, MachSpillCopy
+ // MachConstantBase, MachTemp, MachSpillCopy
+ // - Phi nodes that are considered Junk
// - load constant node which may have next data input nodes:
// MachTemp, MachSpillCopy
// - MachSpillCopy
@@ -112,7 +113,9 @@ static bool expected_yanked_node(Node *old, Node *orig_old) {
return (old == orig_old);
} else if (old->is_MachTemp()) {
return orig_old->is_Con();
- } else if (old->is_Phi() || old->is_MachConstantBase()) {
+ } else if (old->is_Phi()) { // Junk phi's
+ return true;
+ } else if (old->is_MachConstantBase()) {
return (orig_old->is_Con() && orig_old->is_MachConstant());
}
return false;
@@ -522,11 +525,9 @@ void PhaseChaitin::post_allocate_copy_removal() {
u = u ? NodeSentinel : x; // Capture unique input, or NodeSentinel for 2nd input
}
if (u != NodeSentinel) { // Junk Phi. Remove
- block->remove_node(j--);
- phi_dex--;
- _cfg.unmap_node_from_block(phi);
phi->replace_by(u);
- phi->disconnect_inputs(NULL, C);
+ j -= yank_if_dead(phi, block, &value, &regnd);
+ phi_dex--;
continue;
}
// Note that if value[pidx] exists, then we merged no new values here
diff --git a/src/share/vm/opto/type.cpp b/src/share/vm/opto/type.cpp
index faf155516..3f628195f 100644
--- a/src/share/vm/opto/type.cpp
+++ b/src/share/vm/opto/type.cpp
@@ -2787,13 +2787,11 @@ intptr_t TypeOopPtr::get_con() const {
//-----------------------------filter------------------------------------------
// Do not allow interface-vs.-noninterface joins to collapse to top.
-const Type *TypeOopPtr::filter( const Type *kills ) const {
+const Type *TypeOopPtr::filter(const Type *kills) const {
const Type* ft = join(kills);
const TypeInstPtr* ftip = ft->isa_instptr();
const TypeInstPtr* ktip = kills->isa_instptr();
- const TypeKlassPtr* ftkp = ft->isa_klassptr();
- const TypeKlassPtr* ktkp = kills->isa_klassptr();
if (ft->empty()) {
// Check for evil case of 'this' being a class and 'kills' expecting an
@@ -2807,8 +2805,6 @@ const Type *TypeOopPtr::filter( const Type *kills ) const {
// uplift the type.
if (!empty() && ktip != NULL && ktip->is_loaded() && ktip->klass()->is_interface())
return kills; // Uplift to interface
- if (!empty() && ktkp != NULL && ktkp->klass()->is_loaded() && ktkp->klass()->is_interface())
- return kills; // Uplift to interface
return Type::TOP; // Canonical empty value
}
@@ -2825,14 +2821,6 @@ const Type *TypeOopPtr::filter( const Type *kills ) const {
assert(!ftip->klass_is_exact(), "interface could not be exact");
return ktip->cast_to_ptr_type(ftip->ptr());
}
- // Interface klass type could be exact in opposite to interface type,
- // return it here instead of incorrect Constant ptr J/L/Object (6894807).
- if (ftkp != NULL && ktkp != NULL &&
- ftkp->is_loaded() && ftkp->klass()->is_interface() &&
- !ftkp->klass_is_exact() && // Keep exact interface klass
- ktkp->is_loaded() && !ktkp->klass()->is_interface()) {
- return ktkp->cast_to_ptr_type(ftkp->ptr());
- }
return ft;
}
@@ -4385,6 +4373,33 @@ bool TypeKlassPtr::singleton(void) const {
return (_offset == 0) && !below_centerline(_ptr);
}
+// Do not allow interface-vs.-noninterface joins to collapse to top.
+const Type *TypeKlassPtr::filter(const Type *kills) const {
+ // logic here mirrors the one from TypeOopPtr::filter. See comments
+ // there.
+ const Type* ft = join(kills);
+ const TypeKlassPtr* ftkp = ft->isa_klassptr();
+ const TypeKlassPtr* ktkp = kills->isa_klassptr();
+
+ if (ft->empty()) {
+ if (!empty() && ktkp != NULL && ktkp->klass()->is_loaded() && ktkp->klass()->is_interface())
+ return kills; // Uplift to interface
+
+ return Type::TOP; // Canonical empty value
+ }
+
+ // Interface klass type could be exact in opposite to interface type,
+ // return it here instead of incorrect Constant ptr J/L/Object (6894807).
+ if (ftkp != NULL && ktkp != NULL &&
+ ftkp->is_loaded() && ftkp->klass()->is_interface() &&
+ !ftkp->klass_is_exact() && // Keep exact interface klass
+ ktkp->is_loaded() && !ktkp->klass()->is_interface()) {
+ return ktkp->cast_to_ptr_type(ftkp->ptr());
+ }
+
+ return ft;
+}
+
//----------------------compute_klass------------------------------------------
// Compute the defining klass for this class
ciKlass* TypeAryPtr::compute_klass(DEBUG_ONLY(bool verify)) const {
diff --git a/src/share/vm/opto/type.hpp b/src/share/vm/opto/type.hpp
index e72baa96b..9810edfe7 100644
--- a/src/share/vm/opto/type.hpp
+++ b/src/share/vm/opto/type.hpp
@@ -63,7 +63,7 @@ class TypeRawPtr;
class TypeOopPtr;
class TypeInstPtr;
class TypeAryPtr;
-class TypeKlassPtr;
+class TypeKlassPtr;
class TypeMetadataPtr;
//------------------------------Type-------------------------------------------
@@ -1202,6 +1202,9 @@ public:
virtual intptr_t get_con() const;
+ // Do not allow interface-vs.-noninterface joins to collapse to top.
+ virtual const Type *filter( const Type *kills ) const;
+
// Convenience common pre-built types.
static const TypeKlassPtr* OBJECT; // Not-null object klass or below
static const TypeKlassPtr* OBJECT_OR_NULL; // Maybe-null version of same
diff --git a/src/share/vm/prims/jvmtiGetLoadedClasses.cpp b/src/share/vm/prims/jvmtiGetLoadedClasses.cpp
index 51cfb384a..f58a5a3d5 100644
--- a/src/share/vm/prims/jvmtiGetLoadedClasses.cpp
+++ b/src/share/vm/prims/jvmtiGetLoadedClasses.cpp
@@ -29,8 +29,43 @@
#include "runtime/thread.hpp"
+// The closure for GetLoadedClasses
+class LoadedClassesClosure : public KlassClosure {
+private:
+ Stack<jclass, mtInternal> _classStack;
+ JvmtiEnv* _env;
+
+public:
+ LoadedClassesClosure(JvmtiEnv* env) {
+ _env = env;
+ }
+
+ void do_klass(Klass* k) {
+ // Collect all jclasses
+ _classStack.push((jclass) _env->jni_reference(k->java_mirror()));
+ }
+
+ int extract(jclass* result_list) {
+ // The size of the Stack will be 0 after extract, so get it here
+ int count = (int)_classStack.size();
+ int i = count;
-// The closure for GetLoadedClasses and GetClassLoaderClasses
+ // Pop all jclasses, fill backwards
+ while (!_classStack.is_empty()) {
+ result_list[--i] = _classStack.pop();
+ }
+
+ // Return the number of elements written
+ return count;
+ }
+
+ // Return current size of the Stack
+ int get_count() {
+ return (int)_classStack.size();
+ }
+};
+
+// The closure for GetClassLoaderClasses
class JvmtiGetLoadedClassesClosure : public StackObj {
// Since the SystemDictionary::classes_do callback
// doesn't pass a closureData pointer,
@@ -165,19 +200,6 @@ class JvmtiGetLoadedClassesClosure : public StackObj {
}
}
- // Finally, the static methods that are the callbacks
- static void increment(Klass* k) {
- JvmtiGetLoadedClassesClosure* that = JvmtiGetLoadedClassesClosure::get_this();
- if (that->get_initiatingLoader() == NULL) {
- for (Klass* l = k; l != NULL; l = l->array_klass_or_null()) {
- that->set_count(that->get_count() + 1);
- }
- } else if (k != NULL) {
- // if initiating loader not null, just include the instance with 1 dimension
- that->set_count(that->get_count() + 1);
- }
- }
-
static void increment_with_loader(Klass* k, ClassLoaderData* loader_data) {
JvmtiGetLoadedClassesClosure* that = JvmtiGetLoadedClassesClosure::get_this();
oop class_loader = loader_data->class_loader();
@@ -196,24 +218,6 @@ class JvmtiGetLoadedClassesClosure : public StackObj {
}
}
- static void add(Klass* k) {
- JvmtiGetLoadedClassesClosure* that = JvmtiGetLoadedClassesClosure::get_this();
- if (that->available()) {
- if (that->get_initiatingLoader() == NULL) {
- for (Klass* l = k; l != NULL; l = l->array_klass_or_null()) {
- oop mirror = l->java_mirror();
- that->set_element(that->get_index(), mirror);
- that->set_index(that->get_index() + 1);
- }
- } else if (k != NULL) {
- // if initiating loader not null, just include the instance with 1 dimension
- oop mirror = k->java_mirror();
- that->set_element(that->get_index(), mirror);
- that->set_index(that->get_index() + 1);
- }
- }
- }
-
static void add_with_loader(Klass* k, ClassLoaderData* loader_data) {
JvmtiGetLoadedClassesClosure* that = JvmtiGetLoadedClassesClosure::get_this();
if (that->available()) {
@@ -255,39 +259,30 @@ class JvmtiGetLoadedClassesClosure : public StackObj {
jvmtiError
JvmtiGetLoadedClasses::getLoadedClasses(JvmtiEnv *env, jint* classCountPtr, jclass** classesPtr) {
- // Since SystemDictionary::classes_do only takes a function pointer
- // and doesn't call back with a closure data pointer,
- // we can only pass static methods.
- JvmtiGetLoadedClassesClosure closure;
+ LoadedClassesClosure closure(env);
{
// To get a consistent list of classes we need MultiArray_lock to ensure
- // array classes aren't created, and SystemDictionary_lock to ensure that
- // classes aren't added to the system dictionary,
+ // array classes aren't created.
MutexLocker ma(MultiArray_lock);
- MutexLocker sd(SystemDictionary_lock);
- // First, count the classes
- SystemDictionary::classes_do(&JvmtiGetLoadedClassesClosure::increment);
- Universe::basic_type_classes_do(&JvmtiGetLoadedClassesClosure::increment);
- // Next, fill in the classes
- closure.allocate();
- SystemDictionary::classes_do(&JvmtiGetLoadedClassesClosure::add);
- Universe::basic_type_classes_do(&JvmtiGetLoadedClassesClosure::add);
- // Drop the SystemDictionary_lock, so the results could be wrong from here,
- // but we still have a snapshot.
+ // Iterate through all classes in ClassLoaderDataGraph
+ // and collect them using the LoadedClassesClosure
+ ClassLoaderDataGraph::loaded_classes_do(&closure);
}
- // Post results
+
+ // Return results by extracting the collected contents into a list
+ // allocated via JvmtiEnv
jclass* result_list;
- jvmtiError err = env->Allocate(closure.get_count() * sizeof(jclass),
- (unsigned char**)&result_list);
- if (err != JVMTI_ERROR_NONE) {
- return err;
+ jvmtiError error = env->Allocate(closure.get_count() * sizeof(jclass),
+ (unsigned char**)&result_list);
+
+ if (error == JVMTI_ERROR_NONE) {
+ int count = closure.extract(result_list);
+ *classCountPtr = count;
+ *classesPtr = result_list;
}
- closure.extract(env, result_list);
- *classCountPtr = closure.get_count();
- *classesPtr = result_list;
- return JVMTI_ERROR_NONE;
+ return error;
}
jvmtiError
diff --git a/src/share/vm/runtime/arguments.cpp b/src/share/vm/runtime/arguments.cpp
index fce983f8c..8060dae03 100644
--- a/src/share/vm/runtime/arguments.cpp
+++ b/src/share/vm/runtime/arguments.cpp
@@ -1988,6 +1988,15 @@ void Arguments::check_deprecated_gc_flags() {
warning("DefaultMaxRAMFraction is deprecated and will likely be removed in a future release. "
"Use MaxRAMFraction instead.");
}
+ if (FLAG_IS_CMDLINE(UseCMSCompactAtFullCollection)) {
+ warning("UseCMSCompactAtFullCollection is deprecated and will likely be removed in a future release.");
+ }
+ if (FLAG_IS_CMDLINE(CMSFullGCsBeforeCompaction)) {
+ warning("CMSFullGCsBeforeCompaction is deprecated and will likely be removed in a future release.");
+ }
+ if (FLAG_IS_CMDLINE(UseCMSCollectionPassing)) {
+ warning("UseCMSCollectionPassing is deprecated and will likely be removed in a future release.");
+ }
}
// Check stack pages settings
diff --git a/src/share/vm/runtime/handles.cpp b/src/share/vm/runtime/handles.cpp
index 1b4e9faec..ca73f86ba 100644
--- a/src/share/vm/runtime/handles.cpp
+++ b/src/share/vm/runtime/handles.cpp
@@ -45,7 +45,7 @@
oop* HandleArea::allocate_handle(oop obj) {
assert(_handle_mark_nesting > 1, "memory leak: allocating handle outside HandleMark");
assert(_no_handle_mark_nesting == 0, "allocating handle inside NoHandleMark");
- assert(obj->is_oop(), "sanity check");
+ assert(obj->is_oop(), err_msg("not an oop: " INTPTR_FORMAT, (intptr_t*) obj));
return real_allocate_handle(obj);
}
diff --git a/src/share/vm/runtime/thread.cpp b/src/share/vm/runtime/thread.cpp
index f645be31d..9f31c25b7 100644
--- a/src/share/vm/runtime/thread.cpp
+++ b/src/share/vm/runtime/thread.cpp
@@ -1097,7 +1097,7 @@ static const char* get_java_runtime_version(TRAPS) {
// General purpose hook into Java code, run once when the VM is initialized.
// The Java library method itself may be changed independently from the VM.
static void call_postVMInitHook(TRAPS) {
- Klass* k = SystemDictionary::PostVMInitHook_klass();
+ Klass* k = SystemDictionary::resolve_or_null(vmSymbols::sun_misc_PostVMInitHook(), THREAD);
instanceKlassHandle klass (THREAD, k);
if (klass.not_null()) {
JavaValue result(T_VOID);
diff --git a/src/share/vm/services/jmm.h b/src/share/vm/services/jmm.h
index 9f46499b4..e0a748a41 100644
--- a/src/share/vm/services/jmm.h
+++ b/src/share/vm/services/jmm.h
@@ -78,6 +78,7 @@ typedef enum {
JMM_COMPILE_TOTAL_TIME_MS = 8, /* Total accumulated time spent in compilation */
JMM_GC_TIME_MS = 9, /* Total accumulated time spent in collection */
JMM_GC_COUNT = 10, /* Total number of collections */
+ JMM_JVM_UPTIME_MS = 11, /* The JVM uptime in milliseconds */
JMM_INTERNAL_ATTRIBUTE_INDEX = 100,
JMM_CLASS_LOADED_BYTES = 101, /* Number of bytes loaded instance classes */
diff --git a/src/share/vm/services/management.cpp b/src/share/vm/services/management.cpp
index 9c8798960..9585960da 100644
--- a/src/share/vm/services/management.cpp
+++ b/src/share/vm/services/management.cpp
@@ -1032,6 +1032,9 @@ static jlong get_long_attribute(jmmLongAttribute att) {
case JMM_JVM_INIT_DONE_TIME_MS:
return Management::vm_init_done_time();
+ case JMM_JVM_UPTIME_MS:
+ return Management::ticks_to_ms(os::elapsed_counter());
+
case JMM_COMPILE_TOTAL_TIME_MS:
return Management::ticks_to_ms(CompileBroker::total_compilation_ticks());
diff --git a/src/share/vm/utilities/globalDefinitions.hpp b/src/share/vm/utilities/globalDefinitions.hpp
index 06a32dc90..7806cdc08 100644
--- a/src/share/vm/utilities/globalDefinitions.hpp
+++ b/src/share/vm/utilities/globalDefinitions.hpp
@@ -368,8 +368,6 @@ const int KlassAlignment = KlassAlignmentInBytes / HeapWordSize;
// Klass encoding metaspace max size
const uint64_t KlassEncodingMetaspaceMax = (uint64_t(max_juint) + 1) << LogKlassAlignmentInBytes;
-const jlong CompressedKlassPointersBase = NOT_LP64(0) LP64_ONLY(CONST64(0x800000000)); // 32*G
-
// Machine dependent stuff
#ifdef TARGET_ARCH_x86