aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorshade <none@none>2013-09-27 11:52:24 +0400
committershade <none@none>2013-09-27 11:52:24 +0400
commit777335e133592d6aefaa90d96b8582ac2f84c964 (patch)
treeb1ea93872c297acbac71669d7e1ab6483ae03ca3
parent7f4d4357cc0459b77488ad5a22f6ff6486ed6188 (diff)
8014447: Object.hashCode intrinsic breaks inline caches
Summary: Try to inline as normal method first, then fall back to intrinsic. Reviewed-by: kvn, twisti
-rw-r--r--src/share/vm/opto/callGenerator.hpp2
-rw-r--r--src/share/vm/opto/doCall.cpp19
-rw-r--r--src/share/vm/opto/library_call.cpp10
3 files changed, 28 insertions, 3 deletions
diff --git a/src/share/vm/opto/callGenerator.hpp b/src/share/vm/opto/callGenerator.hpp
index a1616de4d..956f227c3 100644
--- a/src/share/vm/opto/callGenerator.hpp
+++ b/src/share/vm/opto/callGenerator.hpp
@@ -65,6 +65,8 @@ class CallGenerator : public ResourceObj {
virtual bool is_predicted() const { return false; }
// is_trap: Does not return to the caller. (E.g., uncommon trap.)
virtual bool is_trap() const { return false; }
+ // does_virtual_dispatch: Should try inlining as normal method first.
+ virtual bool does_virtual_dispatch() const { return false; }
// is_late_inline: supports conversion of call into an inline
virtual bool is_late_inline() const { return false; }
diff --git a/src/share/vm/opto/doCall.cpp b/src/share/vm/opto/doCall.cpp
index 8784bbe2d..9558d6040 100644
--- a/src/share/vm/opto/doCall.cpp
+++ b/src/share/vm/opto/doCall.cpp
@@ -110,6 +110,7 @@ CallGenerator* Compile::call_generator(ciMethod* callee, int vtable_index, bool
// then we return it as the inlined version of the call.
// We do this before the strict f.p. check below because the
// intrinsics handle strict f.p. correctly.
+ CallGenerator* cg_intrinsic = NULL;
if (allow_inline && allow_intrinsics) {
CallGenerator* cg = find_intrinsic(callee, call_does_dispatch);
if (cg != NULL) {
@@ -121,7 +122,16 @@ CallGenerator* Compile::call_generator(ciMethod* callee, int vtable_index, bool
cg = CallGenerator::for_predicted_intrinsic(cg, inline_cg);
}
}
- return cg;
+
+ // If intrinsic does the virtual dispatch, we try to use the type profile
+ // first, and hopefully inline it as the regular virtual call below.
+ // We will retry the intrinsic if nothing had claimed it afterwards.
+ if (cg->does_virtual_dispatch()) {
+ cg_intrinsic = cg;
+ cg = NULL;
+ } else {
+ return cg;
+ }
}
}
@@ -266,6 +276,13 @@ CallGenerator* Compile::call_generator(ciMethod* callee, int vtable_index, bool
}
}
+ // Nothing claimed the intrinsic, we go with straight-forward inlining
+ // for already discovered intrinsic.
+ if (allow_inline && allow_intrinsics && cg_intrinsic != NULL) {
+ assert(cg_intrinsic->does_virtual_dispatch(), "sanity");
+ return cg_intrinsic;
+ }
+
// There was no special inlining tactic, or it bailed out.
// Use a more generic tactic, like a simple call.
if (call_does_dispatch) {
diff --git a/src/share/vm/opto/library_call.cpp b/src/share/vm/opto/library_call.cpp
index 903726001..45b8f3337 100644
--- a/src/share/vm/opto/library_call.cpp
+++ b/src/share/vm/opto/library_call.cpp
@@ -47,19 +47,22 @@ class LibraryIntrinsic : public InlineCallGenerator {
private:
bool _is_virtual;
bool _is_predicted;
+ bool _does_virtual_dispatch;
vmIntrinsics::ID _intrinsic_id;
public:
- LibraryIntrinsic(ciMethod* m, bool is_virtual, bool is_predicted, vmIntrinsics::ID id)
+ LibraryIntrinsic(ciMethod* m, bool is_virtual, bool is_predicted, bool does_virtual_dispatch, vmIntrinsics::ID id)
: InlineCallGenerator(m),
_is_virtual(is_virtual),
_is_predicted(is_predicted),
+ _does_virtual_dispatch(does_virtual_dispatch),
_intrinsic_id(id)
{
}
virtual bool is_intrinsic() const { return true; }
virtual bool is_virtual() const { return _is_virtual; }
virtual bool is_predicted() const { return _is_predicted; }
+ virtual bool does_virtual_dispatch() const { return _does_virtual_dispatch; }
virtual JVMState* generate(JVMState* jvms);
virtual Node* generate_predicate(JVMState* jvms);
vmIntrinsics::ID intrinsic_id() const { return _intrinsic_id; }
@@ -355,6 +358,7 @@ CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) {
}
bool is_predicted = false;
+ bool does_virtual_dispatch = false;
switch (id) {
case vmIntrinsics::_compareTo:
@@ -381,8 +385,10 @@ CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) {
break;
case vmIntrinsics::_hashCode:
if (!InlineObjectHash) return NULL;
+ does_virtual_dispatch = true;
break;
case vmIntrinsics::_clone:
+ does_virtual_dispatch = true;
case vmIntrinsics::_copyOf:
case vmIntrinsics::_copyOfRange:
if (!InlineObjectCopy) return NULL;
@@ -541,7 +547,7 @@ CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) {
if (!InlineUnsafeOps) return NULL;
}
- return new LibraryIntrinsic(m, is_virtual, is_predicted, (vmIntrinsics::ID) id);
+ return new LibraryIntrinsic(m, is_virtual, is_predicted, does_virtual_dispatch, (vmIntrinsics::ID) id);
}
//----------------------register_library_intrinsics-----------------------