Skip to content

Commit a7c0f4b

Browse files
committed
8365146: Remove LockingMode related code from ppc64
Reviewed-by: aboldtch, mdoerr
1 parent c74c60f commit a7c0f4b

8 files changed

+53
-631
lines changed

src/hotspot/cpu/ppc/c1_LIRAssembler_ppc.cpp

Lines changed: 10 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -228,11 +228,7 @@ int LIR_Assembler::emit_unwind_handler() {
228228
if (method()->is_synchronized()) {
229229
monitor_address(0, FrameMap::R4_opr);
230230
stub = new MonitorExitStub(FrameMap::R4_opr, true, 0);
231-
if (LockingMode == LM_MONITOR) {
232-
__ b(*stub->entry());
233-
} else {
234-
__ unlock_object(R5, R6, R4, *stub->entry());
235-
}
231+
__ unlock_object(R5, R6, R4, *stub->entry());
236232
__ bind(*stub->continuation());
237233
}
238234

@@ -2618,44 +2614,20 @@ void LIR_Assembler::emit_lock(LIR_OpLock* op) {
26182614
// Obj may not be an oop.
26192615
if (op->code() == lir_lock) {
26202616
MonitorEnterStub* stub = (MonitorEnterStub*)op->stub();
2621-
if (LockingMode != LM_MONITOR) {
2622-
assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
2623-
// Add debug info for NullPointerException only if one is possible.
2624-
if (op->info() != nullptr) {
2625-
if (!os::zero_page_read_protected() || !ImplicitNullChecks) {
2626-
explicit_null_check(obj, op->info());
2627-
} else {
2628-
add_debug_info_for_null_check_here(op->info());
2629-
}
2630-
}
2631-
__ lock_object(hdr, obj, lock, op->scratch_opr()->as_register(), *op->stub()->entry());
2632-
} else {
2633-
// always do slow locking
2634-
// note: The slow locking code could be inlined here, however if we use
2635-
// slow locking, speed doesn't matter anyway and this solution is
2636-
// simpler and requires less duplicated code - additionally, the
2637-
// slow locking code is the same in either case which simplifies
2638-
// debugging.
2639-
if (op->info() != nullptr) {
2617+
assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
2618+
// Add debug info for NullPointerException only if one is possible.
2619+
if (op->info() != nullptr) {
2620+
if (!os::zero_page_read_protected() || !ImplicitNullChecks) {
2621+
explicit_null_check(obj, op->info());
2622+
} else {
26402623
add_debug_info_for_null_check_here(op->info());
2641-
__ null_check(obj);
26422624
}
2643-
__ b(*op->stub()->entry());
26442625
}
2626+
__ lock_object(hdr, obj, lock, op->scratch_opr()->as_register(), *op->stub()->entry());
26452627
} else {
26462628
assert (op->code() == lir_unlock, "Invalid code, expected lir_unlock");
2647-
if (LockingMode != LM_MONITOR) {
2648-
assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
2649-
__ unlock_object(hdr, obj, lock, *op->stub()->entry());
2650-
} else {
2651-
// always do slow unlocking
2652-
// note: The slow unlocking code could be inlined here, however if we use
2653-
// slow unlocking, speed doesn't matter anyway and this solution is
2654-
// simpler and requires less duplicated code - additionally, the
2655-
// slow unlocking code is the same in either case which simplifies
2656-
// debugging.
2657-
__ b(*op->stub()->entry());
2658-
}
2629+
assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
2630+
__ unlock_object(hdr, obj, lock, *op->stub()->entry());
26592631
}
26602632
__ bind(*op->stub()->continuation());
26612633
}

src/hotspot/cpu/ppc/c1_MacroAssembler_ppc.cpp

Lines changed: 2 additions & 74 deletions
Original file line numberDiff line numberDiff line change
@@ -82,59 +82,13 @@ void C1_MacroAssembler::lock_object(Register Rmark, Register Roop, Register Rbox
8282
// Save object being locked into the BasicObjectLock...
8383
std(Roop, in_bytes(BasicObjectLock::obj_offset()), Rbox);
8484

85-
if (LockingMode == LM_LIGHTWEIGHT) {
86-
lightweight_lock(Rbox, Roop, Rmark, Rscratch, slow_int);
87-
} else if (LockingMode == LM_LEGACY) {
88-
89-
if (DiagnoseSyncOnValueBasedClasses != 0) {
90-
load_klass(Rscratch, Roop);
91-
lbz(Rscratch, in_bytes(Klass::misc_flags_offset()), Rscratch);
92-
testbitdi(CR0, R0, Rscratch, exact_log2(KlassFlags::_misc_is_value_based_class));
93-
bne(CR0, slow_int);
94-
}
95-
96-
// ... and mark it unlocked.
97-
ori(Rmark, Rmark, markWord::unlocked_value);
98-
99-
// Save unlocked object header into the displaced header location on the stack.
100-
std(Rmark, BasicLock::displaced_header_offset_in_bytes(), Rbox);
101-
102-
// Compare object markWord with Rmark and if equal exchange Rscratch with object markWord.
103-
assert(oopDesc::mark_offset_in_bytes() == 0, "cas must take a zero displacement");
104-
cmpxchgd(/*flag=*/CR0,
105-
/*current_value=*/Rscratch,
106-
/*compare_value=*/Rmark,
107-
/*exchange_value=*/Rbox,
108-
/*where=*/Roop/*+0==mark_offset_in_bytes*/,
109-
MacroAssembler::MemBarRel | MacroAssembler::MemBarAcq,
110-
MacroAssembler::cmpxchgx_hint_acquire_lock(),
111-
noreg,
112-
&cas_failed,
113-
/*check without membar and ldarx first*/true);
114-
// If compare/exchange succeeded we found an unlocked object and we now have locked it
115-
// hence we are done.
116-
} else {
117-
assert(false, "Unhandled LockingMode:%d", LockingMode);
118-
}
85+
lightweight_lock(Rbox, Roop, Rmark, Rscratch, slow_int);
11986
b(done);
12087

12188
bind(slow_int);
12289
b(slow_case); // far
12390

124-
if (LockingMode == LM_LEGACY) {
125-
bind(cas_failed);
126-
// We did not find an unlocked object so see if this is a recursive case.
127-
sub(Rscratch, Rscratch, R1_SP);
128-
load_const_optimized(R0, (~(os::vm_page_size()-1) | markWord::lock_mask_in_place));
129-
and_(R0/*==0?*/, Rscratch, R0);
130-
std(R0/*==0, perhaps*/, BasicLock::displaced_header_offset_in_bytes(), Rbox);
131-
bne(CR0, slow_int);
132-
}
133-
13491
bind(done);
135-
if (LockingMode == LM_LEGACY) {
136-
inc_held_monitor_count(Rmark /*tmp*/);
137-
}
13892
}
13993

14094

@@ -146,43 +100,17 @@ void C1_MacroAssembler::unlock_object(Register Rmark, Register Roop, Register Rb
146100
Address mark_addr(Roop, oopDesc::mark_offset_in_bytes());
147101
assert(mark_addr.disp() == 0, "cas must take a zero displacement");
148102

149-
if (LockingMode != LM_LIGHTWEIGHT) {
150-
// Test first if it is a fast recursive unlock.
151-
ld(Rmark, BasicLock::displaced_header_offset_in_bytes(), Rbox);
152-
cmpdi(CR0, Rmark, 0);
153-
beq(CR0, done);
154-
}
155-
156103
// Load object.
157104
ld(Roop, in_bytes(BasicObjectLock::obj_offset()), Rbox);
158105
verify_oop(Roop, FILE_AND_LINE);
159106

160-
if (LockingMode == LM_LIGHTWEIGHT) {
161-
lightweight_unlock(Roop, Rmark, slow_int);
162-
} else if (LockingMode == LM_LEGACY) {
163-
// Check if it is still a light weight lock, this is is true if we see
164-
// the stack address of the basicLock in the markWord of the object.
165-
cmpxchgd(/*flag=*/CR0,
166-
/*current_value=*/R0,
167-
/*compare_value=*/Rbox,
168-
/*exchange_value=*/Rmark,
169-
/*where=*/Roop,
170-
MacroAssembler::MemBarRel,
171-
MacroAssembler::cmpxchgx_hint_release_lock(),
172-
noreg,
173-
&slow_int);
174-
} else {
175-
assert(false, "Unhandled LockingMode:%d", LockingMode);
176-
}
107+
lightweight_unlock(Roop, Rmark, slow_int);
177108
b(done);
178109
bind(slow_int);
179110
b(slow_case); // far
180111

181112
// Done
182113
bind(done);
183-
if (LockingMode == LM_LEGACY) {
184-
dec_held_monitor_count(Rmark /*tmp*/);
185-
}
186114
}
187115

188116

0 commit comments

Comments
 (0)