105 lines
5.2 KiB
Diff
105 lines
5.2 KiB
Diff
|
|
From cfa76d24fb8fdee972a0e9a35479bceb288ca59e Mon Sep 17 00:00:00 2001
|
||
|
|
Date: Wed, 4 Jan 2023 20:46:49 +0800
|
||
|
|
Subject: Apply TBI barrier patch to C1
|
||
|
|
|
||
|
|
---
|
||
|
|
.../gc/z/zBarrierSetAssembler_aarch64.cpp | 14 ++++++----
|
||
|
|
src/hotspot/cpu/aarch64/gc/z/z_aarch64.ad | 28 ++++++++++++++++---
|
||
|
|
2 files changed, 32 insertions(+), 10 deletions(-)
|
||
|
|
|
||
|
|
diff --git a/src/hotspot/cpu/aarch64/gc/z/zBarrierSetAssembler_aarch64.cpp b/src/hotspot/cpu/aarch64/gc/z/zBarrierSetAssembler_aarch64.cpp
|
||
|
|
index cafd4e58f..41f047f2c 100644
|
||
|
|
--- a/src/hotspot/cpu/aarch64/gc/z/zBarrierSetAssembler_aarch64.cpp
|
||
|
|
+++ b/src/hotspot/cpu/aarch64/gc/z/zBarrierSetAssembler_aarch64.cpp
|
||
|
|
@@ -212,11 +212,8 @@ static void change_immediate(uint32_t& instr, uint32_t imm, uint32_t start, uint
|
||
|
|
void ZBarrierSetAssembler::patch_barrier_relocation(address addr) {
|
||
|
|
uint32_t* const patch_addr = (uint32_t*)addr;
|
||
|
|
|
||
|
|
- // The next 3 insns should be movz, andr, cbnz.
|
||
|
|
- assert(nativeInstruction_at(addr)->is_movz() &&
|
||
|
|
- Instruction_aarch64::extract(*(patch_addr + 1), 30, 24) == 0b0001010 &&
|
||
|
|
- Instruction_aarch64::extract(*(patch_addr + 2), 31, 24) == 0b10110101,
|
||
|
|
- "wrong insns in barrier patch");
|
||
|
|
+ // The next insn should be movz.
|
||
|
|
+ assert(nativeInstruction_at(addr)->is_movz(), "wrong insn in barrier patch");
|
||
|
|
|
||
|
|
change_immediate(*patch_addr, (uint16_t) (ZAddressBadMask >> 48), 5, 20);
|
||
|
|
OrderAccess::fence();
|
||
|
|
@@ -232,7 +229,12 @@ void ZBarrierSetAssembler::generate_c1_load_barrier_test(LIR_Assembler* ce,
|
||
|
|
LIR_Opr ref) const {
|
||
|
|
assert_different_registers(rscratch1, rthread, ref->as_register());
|
||
|
|
|
||
|
|
- __ ldr(rscratch1, address_bad_mask_from_thread(rthread));
|
||
|
|
+ if (UseTBI) {
|
||
|
|
+ __ relocate(barrier_Relocation::spec());
|
||
|
|
+ __ movz(rscratch1, barrier_Relocation::unpatched, 48);
|
||
|
|
+ } else {
|
||
|
|
+ __ ldr(rscratch1, address_bad_mask_from_thread(rthread));
|
||
|
|
+ }
|
||
|
|
__ tst(ref->as_register(), rscratch1);
|
||
|
|
}
|
||
|
|
|
||
|
|
diff --git a/src/hotspot/cpu/aarch64/gc/z/z_aarch64.ad b/src/hotspot/cpu/aarch64/gc/z/z_aarch64.ad
|
||
|
|
index 426a1cc2a..4dc54c280 100644
|
||
|
|
--- a/src/hotspot/cpu/aarch64/gc/z/z_aarch64.ad
|
||
|
|
+++ b/src/hotspot/cpu/aarch64/gc/z/z_aarch64.ad
|
||
|
|
@@ -113,7 +113,12 @@ instruct zCompareAndSwapP(iRegINoSp res, indirect mem, iRegP oldval, iRegP newva
|
||
|
|
__ cset($res$$Register, Assembler::EQ);
|
||
|
|
if (barrier_data() != ZLoadBarrierElided) {
|
||
|
|
Label good;
|
||
|
|
- __ ldr(rscratch1, Address(rthread, ZThreadLocalData::address_bad_mask_offset()));
|
||
|
|
+ if (UseTBI) {
|
||
|
|
+ __ relocate(barrier_Relocation::spec());
|
||
|
|
+ __ movz(rscratch1, barrier_Relocation::unpatched, 48);
|
||
|
|
+ } else {
|
||
|
|
+ __ ldr(rscratch1, Address(rthread, ZThreadLocalData::address_bad_mask_offset()));
|
||
|
|
+ }
|
||
|
|
__ andr(rscratch1, rscratch1, rscratch2);
|
||
|
|
__ cbz(rscratch1, good);
|
||
|
|
z_load_barrier_slow_path(_masm, this, Address($mem$$Register), rscratch2 /* ref */, rscratch1 /* tmp */);
|
||
|
|
@@ -145,7 +150,12 @@ instruct zCompareAndSwapPAcq(iRegINoSp res, indirect mem, iRegP oldval, iRegP ne
|
||
|
|
__ cset($res$$Register, Assembler::EQ);
|
||
|
|
if (barrier_data() != ZLoadBarrierElided) {
|
||
|
|
Label good;
|
||
|
|
- __ ldr(rscratch1, Address(rthread, ZThreadLocalData::address_bad_mask_offset()));
|
||
|
|
+ if (UseTBI) {
|
||
|
|
+ __ relocate(barrier_Relocation::spec());
|
||
|
|
+ __ movz(rscratch1, barrier_Relocation::unpatched, 48);
|
||
|
|
+ } else {
|
||
|
|
+ __ ldr(rscratch1, Address(rthread, ZThreadLocalData::address_bad_mask_offset()));
|
||
|
|
+ }
|
||
|
|
__ andr(rscratch1, rscratch1, rscratch2);
|
||
|
|
__ cbz(rscratch1, good);
|
||
|
|
z_load_barrier_slow_path(_masm, this, Address($mem$$Register), rscratch2 /* ref */, rscratch1 /* tmp */ );
|
||
|
|
@@ -174,7 +184,12 @@ instruct zCompareAndExchangeP(iRegPNoSp res, indirect mem, iRegP oldval, iRegP n
|
||
|
|
false /* acquire */, true /* release */, false /* weak */, $res$$Register);
|
||
|
|
if (barrier_data() != ZLoadBarrierElided) {
|
||
|
|
Label good;
|
||
|
|
- __ ldr(rscratch1, Address(rthread, ZThreadLocalData::address_bad_mask_offset()));
|
||
|
|
+ if (UseTBI) {
|
||
|
|
+ __ relocate(barrier_Relocation::spec());
|
||
|
|
+ __ movz(rscratch1, barrier_Relocation::unpatched, 48);
|
||
|
|
+ } else {
|
||
|
|
+ __ ldr(rscratch1, Address(rthread, ZThreadLocalData::address_bad_mask_offset()));
|
||
|
|
+ }
|
||
|
|
__ andr(rscratch1, rscratch1, $res$$Register);
|
||
|
|
__ cbz(rscratch1, good);
|
||
|
|
z_load_barrier_slow_path(_masm, this, Address($mem$$Register), $res$$Register /* ref */, rscratch1 /* tmp */);
|
||
|
|
@@ -202,7 +217,12 @@ instruct zCompareAndExchangePAcq(iRegPNoSp res, indirect mem, iRegP oldval, iReg
|
||
|
|
true /* acquire */, true /* release */, false /* weak */, $res$$Register);
|
||
|
|
if (barrier_data() != ZLoadBarrierElided) {
|
||
|
|
Label good;
|
||
|
|
- __ ldr(rscratch1, Address(rthread, ZThreadLocalData::address_bad_mask_offset()));
|
||
|
|
+ if (UseTBI) {
|
||
|
|
+ __ relocate(barrier_Relocation::spec());
|
||
|
|
+ __ movz(rscratch1, barrier_Relocation::unpatched, 48);
|
||
|
|
+ } else {
|
||
|
|
+ __ ldr(rscratch1, Address(rthread, ZThreadLocalData::address_bad_mask_offset()));
|
||
|
|
+ }
|
||
|
|
__ andr(rscratch1, rscratch1, $res$$Register);
|
||
|
|
__ cbz(rscratch1, good);
|
||
|
|
z_load_barrier_slow_path(_masm, this, Address($mem$$Register), $res$$Register /* ref */, rscratch1 /* tmp */);
|
||
|
|
--
|
||
|
|
2.37.0
|
||
|
|
|