Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

8263377: Store method handle linkers in the 'non-nmethods' heap #8760

Closed
wants to merge 21 commits into from
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/hotspot/cpu/aarch64/frame_aarch64.cpp
Original file line number Diff line number Diff line change
@@ -104,7 +104,7 @@ bool frame::safe_for_sender(JavaThread *thread) {
// ok. adapter blobs never have a frame complete and are never ok.

if (!_cb->is_frame_complete_at(_pc)) {
if (_cb->is_nmethod() || _cb->is_adapter_blob() || _cb->is_runtime_stub()) {
if (_cb->is_nmethod() || _cb->is_adapter_blob() || _cb->is_runtime_stub() || _cb->is_mh_intrinsic()) {
return false;
}
}
4 changes: 2 additions & 2 deletions src/hotspot/cpu/aarch64/nativeInst_aarch64.cpp
Original file line number Diff line number Diff line change
@@ -324,7 +324,7 @@ void NativeMovRegMem::verify() {
void NativeJump::verify() { ; }


void NativeJump::check_verified_entry_alignment(address entry, address verified_entry) {
void NativeJump::check_verified_entry_alignment(address verified_entry) {
}


@@ -473,7 +473,7 @@ bool NativeInstruction::is_stop() {
// MT-safe inserting of a jump over a jump or a nop (used by
// nmethod::make_not_entrant_or_zombie)

void NativeJump::patch_verified_entry(address entry, address verified_entry, address dest) {
void NativeJump::patch_verified_entry(address verified_entry, address dest) {

assert(dest == SharedRuntime::get_handle_wrong_method_stub(), "expected fixed destination of patch");
assert(nativeInstruction_at(verified_entry)->is_jump_or_nop()
4 changes: 2 additions & 2 deletions src/hotspot/cpu/aarch64/nativeInst_aarch64.hpp
Original file line number Diff line number Diff line change
@@ -456,8 +456,8 @@ class NativeJump: public NativeInstruction {
// Insertion of native jump instruction
static void insert(address code_pos, address entry);
// MT-safe insertion of native jump at verified method entry
static void check_verified_entry_alignment(address entry, address verified_entry);
static void patch_verified_entry(address entry, address verified_entry, address dest);
static void check_verified_entry_alignment(address verified_entry);
static void patch_verified_entry(address verified_entry, address dest);
};

inline NativeJump* nativeJump_at(address address) {
45 changes: 20 additions & 25 deletions src/hotspot/cpu/aarch64/sharedRuntime_aarch64.cpp
Original file line number Diff line number Diff line change
@@ -1187,6 +1187,25 @@ static void gen_special_dispatch(MacroAssembler* masm,
receiver_reg, member_reg, /*for_compiler_entry:*/ true);
}

CodeBlob* SharedRuntime::generate_method_handle_intrinsic_wrapper(MacroAssembler* masm,
const methodHandle& method,
int compile_id,
BasicType* in_sig_bt,
VMRegPair* in_regs,
BasicType ret_type) {
assert(method->is_method_handle_intrinsic(), "should be MH method");
assert(method->is_native(), "should be native method");

// First instruction must be a nop as it may need to be patched on deoptimisation
__ nop();
gen_special_dispatch(masm,
method,
in_sig_bt,
in_regs);
__ flush();
return MethodHandleIntrinsicBlob::create(method, masm->code());
}

// ---------------------------------------------------------------------------
// Generate a native wrapper for a given method. The method takes arguments
// in the Java compiled code convention, marshals them to the native
@@ -1209,6 +1228,7 @@ nmethod* SharedRuntime::generate_native_wrapper(MacroAssembler* masm,
BasicType* in_sig_bt,
VMRegPair* in_regs,
BasicType ret_type) {
assert(!method->is_method_handle_intrinsic(), "must not be MethodHandle intrinsic");
if (method->is_continuation_enter_intrinsic()) {
vmIntrinsics::ID iid = method->intrinsic_id();
intptr_t start = (intptr_t)__ pc();
@@ -1242,31 +1262,6 @@ nmethod* SharedRuntime::generate_native_wrapper(MacroAssembler* masm,
ContinuationEntry::set_enter_code(nm, interpreted_entry_offset);
return nm;
}

if (method->is_method_handle_intrinsic()) {
vmIntrinsics::ID iid = method->intrinsic_id();
intptr_t start = (intptr_t)__ pc();
int vep_offset = ((intptr_t)__ pc()) - start;

// First instruction must be a nop as it may need to be patched on deoptimisation
__ nop();
gen_special_dispatch(masm,
method,
in_sig_bt,
in_regs);
int frame_complete = ((intptr_t)__ pc()) - start; // not complete, period
__ flush();
int stack_slots = SharedRuntime::out_preserve_stack_slots(); // no out slots at all, actually
return nmethod::new_native_nmethod(method,
compile_id,
masm->code(),
vep_offset,
frame_complete,
stack_slots / VMRegImpl::slots_per_word,
in_ByteSize(-1),
in_ByteSize(-1),
(OopMapSet*)NULL);
}
address native_func = method->native_function();
assert(native_func != NULL, "must have function");

2 changes: 1 addition & 1 deletion src/hotspot/cpu/arm/frame_arm.cpp
Original file line number Diff line number Diff line change
@@ -80,7 +80,7 @@ bool frame::safe_for_sender(JavaThread *thread) {
// ok. adapter blobs never have a frame complete and are never ok.

if (!_cb->is_frame_complete_at(_pc)) {
if (_cb->is_compiled() || _cb->is_adapter_blob() || _cb->is_runtime_stub()) {
if (_cb->is_compiled() || _cb->is_adapter_blob() || _cb->is_runtime_stub() || _cb->is_mh_intrinsic()) {
return false;
}
}
4 changes: 2 additions & 2 deletions src/hotspot/cpu/arm/nativeInst_arm_32.cpp
Original file line number Diff line number Diff line change
@@ -284,10 +284,10 @@ void NativeMovConstReg::set_pc_relative_offset(address addr, address pc) {
}
}

void RawNativeJump::check_verified_entry_alignment(address entry, address verified_entry) {
void RawNativeJump::check_verified_entry_alignment(address verified_entry) {
}

void RawNativeJump::patch_verified_entry(address entry, address verified_entry, address dest) {
void RawNativeJump::patch_verified_entry(address verified_entry, address dest) {
assert(dest == SharedRuntime::get_handle_wrong_method_stub(), "should be");
int *a = (int *)verified_entry;
a[0] = zombie_illegal_instruction; // always illegal
4 changes: 2 additions & 2 deletions src/hotspot/cpu/arm/nativeInst_arm_32.hpp
Original file line number Diff line number Diff line change
@@ -271,9 +271,9 @@ class RawNativeJump: public NativeInstruction {
}
}

static void check_verified_entry_alignment(address entry, address verified_entry);
static void check_verified_entry_alignment(address verified_entry);

static void patch_verified_entry(address entry, address verified_entry, address dest);
static void patch_verified_entry(address verified_entry, address dest);

};

38 changes: 17 additions & 21 deletions src/hotspot/cpu/arm/sharedRuntime_arm.cpp
Original file line number Diff line number Diff line change
@@ -739,6 +739,22 @@ static void gen_special_dispatch(MacroAssembler* masm,
receiver_reg, member_reg, /*for_compiler_entry:*/ true);
}

CodeBlob* SharedRuntime::generate_method_handle_intrinsic_wrapper(MacroAssembler* masm,
const methodHandle& method,
int compile_id,
BasicType* in_sig_bt,
VMRegPair* in_regs,
BasicType ret_type) {
assert(method->is_method_handle_intrinsic(), "must be MethodHandle intrinsic");

gen_special_dispatch(masm,
method,
in_sig_bt,
in_regs);
__ flush();
return MethodHandleIntrinsicBlob::create(method, masm->code());
}

// ---------------------------------------------------------------------------
// Generate a native wrapper for a given method. The method takes arguments
// in the Java compiled code convention, marshals them to the native
@@ -751,27 +767,7 @@ nmethod* SharedRuntime::generate_native_wrapper(MacroAssembler* masm,
BasicType* in_sig_bt,
VMRegPair* in_regs,
BasicType ret_type) {
if (method->is_method_handle_intrinsic()) {
vmIntrinsics::ID iid = method->intrinsic_id();
intptr_t start = (intptr_t)__ pc();
int vep_offset = ((intptr_t)__ pc()) - start;
gen_special_dispatch(masm,
method,
in_sig_bt,
in_regs);
int frame_complete = ((intptr_t)__ pc()) - start; // not complete, period
__ flush();
int stack_slots = SharedRuntime::out_preserve_stack_slots(); // no out slots at all, actually
return nmethod::new_native_nmethod(method,
compile_id,
masm->code(),
vep_offset,
frame_complete,
stack_slots / VMRegImpl::slots_per_word,
in_ByteSize(-1),
in_ByteSize(-1),
(OopMapSet*)NULL);
}
assert(!method->is_method_handle_intrinsic(), "must not be MethodHandle intrinsic");
// Arguments for JNI method include JNIEnv and Class if static

// Usage of Rtemp should be OK since scratched by native call
2 changes: 1 addition & 1 deletion src/hotspot/cpu/ppc/frame_ppc.cpp
Original file line number Diff line number Diff line change
@@ -87,7 +87,7 @@ bool frame::safe_for_sender(JavaThread *thread) {
// so we just assume they are OK.
// Adapter blobs never have a complete frame and are never OK
if (!_cb->is_frame_complete_at(_pc)) {
if (_cb->is_compiled() || _cb->is_adapter_blob() || _cb->is_runtime_stub()) {
if (_cb->is_compiled() || _cb->is_adapter_blob() || _cb->is_runtime_stub() || _cb->is_mh_intrinsic()) {
return false;
}
}
2 changes: 1 addition & 1 deletion src/hotspot/cpu/ppc/nativeInst_ppc.cpp
Original file line number Diff line number Diff line change
@@ -330,7 +330,7 @@ void NativeMovConstReg::verify() {
}
#endif // ASSERT

void NativeJump::patch_verified_entry(address entry, address verified_entry, address dest) {
void NativeJump::patch_verified_entry(address verified_entry, address dest) {
ResourceMark rm;
int code_size = 1 * BytesPerInstWord;
CodeBuffer cb(verified_entry, code_size + 1);
4 changes: 2 additions & 2 deletions src/hotspot/cpu/ppc/nativeInst_ppc.hpp
Original file line number Diff line number Diff line change
@@ -325,11 +325,11 @@ class NativeJump: public NativeInstruction {
}

// MT-safe insertion of native jump at verified method entry
static void patch_verified_entry(address entry, address verified_entry, address dest);
static void patch_verified_entry(address verified_entry, address dest);

void verify() NOT_DEBUG_RETURN;

static void check_verified_entry_alignment(address entry, address verified_entry) {
static void check_verified_entry_alignment(address verified_entry) {
// We just patch one instruction on ppc64, so the jump doesn't have to
// be aligned. Nothing to do here.
}
38 changes: 17 additions & 21 deletions src/hotspot/cpu/ppc/sharedRuntime_ppc.cpp
Original file line number Diff line number Diff line change
@@ -1618,6 +1618,22 @@ static void gen_special_dispatch(MacroAssembler* masm,
receiver_reg, member_reg, /*for_compiler_entry:*/ true);
}

CodeBlob *SharedRuntime::generate_method_handle_intrinsic_wrapper(MacroAssembler *masm,
const methodHandle& method,
int compile_id,
BasicType *in_sig_bt,
VMRegPair *in_regs,
BasicType ret_type) {
assert(method->is_method_handle_intrinsic(), "must be MethodHandle intrinsic");

gen_special_dispatch(masm,
method,
in_sig_bt,
in_regs);
__ flush();
return MethodHandleIntrinsicBlob::create(method, masm->code());
}

// ---------------------------------------------------------------------------
// Generate a native wrapper for a given method. The method takes arguments
// in the Java compiled code convention, marshals them to the native
@@ -1640,27 +1656,7 @@ nmethod *SharedRuntime::generate_native_wrapper(MacroAssembler *masm,
BasicType *in_sig_bt,
VMRegPair *in_regs,
BasicType ret_type) {
if (method->is_method_handle_intrinsic()) {
vmIntrinsics::ID iid = method->intrinsic_id();
intptr_t start = (intptr_t)__ pc();
int vep_offset = ((intptr_t)__ pc()) - start;
gen_special_dispatch(masm,
method,
in_sig_bt,
in_regs);
int frame_complete = ((intptr_t)__ pc()) - start; // not complete, period
__ flush();
int stack_slots = SharedRuntime::out_preserve_stack_slots(); // no out slots at all, actually
return nmethod::new_native_nmethod(method,
compile_id,
masm->code(),
vep_offset,
frame_complete,
stack_slots / VMRegImpl::slots_per_word,
in_ByteSize(-1),
in_ByteSize(-1),
(OopMapSet*)NULL);
}
assert(!method->is_method_handle_intrinsic(), "must not be MethodHandle intrinsic");

address native_func = method->native_function();
assert(native_func != NULL, "must have function");
2 changes: 1 addition & 1 deletion src/hotspot/cpu/riscv/frame_riscv.cpp
Original file line number Diff line number Diff line change
@@ -103,7 +103,7 @@ bool frame::safe_for_sender(JavaThread *thread) {
// ok. adapter blobs never have a frame complete and are never ok.

if (!_cb->is_frame_complete_at(_pc)) {
if (_cb->is_nmethod() || _cb->is_adapter_blob() || _cb->is_runtime_stub()) {
if (_cb->is_nmethod() || _cb->is_adapter_blob() || _cb->is_runtime_stub() || _cb->is_mh_intrinsic()) {
return false;
}
}
4 changes: 2 additions & 2 deletions src/hotspot/cpu/riscv/nativeInst_riscv.cpp
Original file line number Diff line number Diff line change
@@ -264,7 +264,7 @@ void NativeMovRegMem::verify() {
void NativeJump::verify() { }


void NativeJump::check_verified_entry_alignment(address entry, address verified_entry) {
void NativeJump::check_verified_entry_alignment(address verified_entry) {
}


@@ -347,7 +347,7 @@ bool NativeInstruction::is_stop() {
// MT-safe inserting of a jump over a jump or a nop (used by
// nmethod::make_not_entrant_or_zombie)

void NativeJump::patch_verified_entry(address entry, address verified_entry, address dest) {
void NativeJump::patch_verified_entry(address verified_entry, address dest) {

assert(dest == SharedRuntime::get_handle_wrong_method_stub(), "expected fixed destination of patch");

4 changes: 2 additions & 2 deletions src/hotspot/cpu/riscv/nativeInst_riscv.hpp
Original file line number Diff line number Diff line change
@@ -448,8 +448,8 @@ class NativeJump: public NativeInstruction {
// Insertion of native jump instruction
static void insert(address code_pos, address entry);
// MT-safe insertion of native jump at verified method entry
static void check_verified_entry_alignment(address entry, address verified_entry);
static void patch_verified_entry(address entry, address verified_entry, address dest);
static void check_verified_entry_alignment(address verified_entry);
static void patch_verified_entry(address verified_entry, address dest);
};

inline NativeJump* nativeJump_at(address addr) {
43 changes: 19 additions & 24 deletions src/hotspot/cpu/riscv/sharedRuntime_riscv.cpp
Original file line number Diff line number Diff line change
@@ -1113,6 +1113,24 @@ static void gen_special_dispatch(MacroAssembler* masm,
receiver_reg, member_reg, /*for_compiler_entry:*/ true);
}

CodeBlob* SharedRuntime::generate_method_handle_intrinsic_wrapper(MacroAssembler* masm,
const methodHandle& method,
int compile_id,
BasicType* in_sig_bt,
VMRegPair* in_regs,
BasicType ret_type) {
assert(method->is_method_handle_intrinsic(), "must be MethodHandle intrinsic");

// First instruction must be a nop as it may need to be patched on deoptimisation
__ nop();
gen_special_dispatch(masm,
method,
in_sig_bt,
in_regs);
__ flush();
return MethodHandleIntrinsicBlob::create(method, masm->code());
}

// ---------------------------------------------------------------------------
// Generate a native wrapper for a given method. The method takes arguments
// in the Java compiled code convention, marshals them to the native
@@ -1147,30 +1165,7 @@ nmethod* SharedRuntime::generate_native_wrapper(MacroAssembler* masm,
BasicType* in_sig_bt,
VMRegPair* in_regs,
BasicType ret_type) {
if (method->is_method_handle_intrinsic()) {
vmIntrinsics::ID iid = method->intrinsic_id();
intptr_t start = (intptr_t)__ pc();
int vep_offset = ((intptr_t)__ pc()) - start;

// First instruction must be a nop as it may need to be patched on deoptimisation
__ nop();
gen_special_dispatch(masm,
method,
in_sig_bt,
in_regs);
int frame_complete = ((intptr_t)__ pc()) - start; // not complete, period
__ flush();
int stack_slots = SharedRuntime::out_preserve_stack_slots(); // no out slots at all, actually
return nmethod::new_native_nmethod(method,
compile_id,
masm->code(),
vep_offset,
frame_complete,
stack_slots / VMRegImpl::slots_per_word,
in_ByteSize(-1),
in_ByteSize(-1),
(OopMapSet*)NULL);
}
assert(!method->is_method_handle_intrinsic(), "must not be MethodHandle intrinsic");
address native_func = method->native_function();
assert(native_func != NULL, "must have function");

2 changes: 1 addition & 1 deletion src/hotspot/cpu/s390/frame_s390.cpp
Original file line number Diff line number Diff line change
@@ -90,7 +90,7 @@ bool frame::safe_for_sender(JavaThread *thread) {
// Adapter blobs never have a complete frame and are never OK.
// nmethods should be OK on s390.
if (!_cb->is_frame_complete_at(_pc)) {
if (_cb->is_adapter_blob() || _cb->is_runtime_stub()) {
if (_cb->is_adapter_blob() || _cb->is_runtime_stub() || _cb->is_mh_intrinsic()) {
return false;
}
}
2 changes: 1 addition & 1 deletion src/hotspot/cpu/s390/nativeInst_s390.cpp
Original file line number Diff line number Diff line change
@@ -622,7 +622,7 @@ void NativeJump::verify() {
}

// Patch atomically with an illtrap.
void NativeJump::patch_verified_entry(address entry, address verified_entry, address dest) {
void NativeJump::patch_verified_entry(address verified_entry, address dest) {
ResourceMark rm;
int code_size = 2;
CodeBuffer cb(verified_entry, code_size + 1);
4 changes: 2 additions & 2 deletions src/hotspot/cpu/s390/nativeInst_s390.hpp
Original file line number Diff line number Diff line change
@@ -610,9 +610,9 @@ class NativeJump: public NativeInstruction {
static void insert(address code_pos, address entry);

// MT-safe insertion of native jump at verified method entry.
static void check_verified_entry_alignment(address entry, address verified_entry) { }
static void check_verified_entry_alignment(address verified_entry) { }

static void patch_verified_entry(address entry, address verified_entry, address dest);
static void patch_verified_entry(address verified_entry, address dest);
};

//-------------------------------------
Loading