Quellcode-Bibliothek
© Kompilation durch diese Firma
[Weder Korrektheit noch Funktionsfähigkeit der Software werden zugesichert.]
Datei:
TEST.properties
Sprache: Unknown
Untersuchungsergebnis.ad Download desCS {CS[91] C[297] BAT[377]}zum Wurzelverzeichnis wechseln //
// Copyright (c) 2019, 2021, Oracle and/or its affiliates. All rights reserved.
// Copyright (c) 2020, 2021, Huawei Technologies Co., Ltd. All rights reserved.
// DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
//
// This code is free software; you can redistribute it and/or modify it
// under the terms of the GNU General Public License version 2 only, as
// published by the Free Software Foundation.
//
// This code is distributed in the hope that it will be useful, but WITHOUT
// ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
// FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
// version 2 for more details (a copy is included in the LICENSE file that
// accompanied this code).
//
// You should have received a copy of the GNU General Public License version
// 2 along with this work; if not, write to the Free Software Foundation,
// Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
//
// Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
// or visit www.oracle.com if you need additional information or have any
// questions.
//
source_hpp %{
#include "gc/shared/gc_globals.hpp"
#include "gc/z/c2/zBarrierSetC2.hpp"
#include "gc/z/zThreadLocalData.hpp"
%}
source %{
static void z_load_barrier(MacroAssembler& _masm, const MachNode* node, Address ref_addr, Register ref, Register tmp, int barrier_data) {
if (barrier_data == ZLoadBarrierElided) {
return;
}
ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, barrier_data);
__ ld(tmp, Address(xthread, ZThreadLocalData::address_bad_mask_offset()));
__ andr(tmp, tmp, ref);
__ bnez(tmp, *stub->entry(), true /* far */);
__ bind(*stub->continuation());
}
static void z_load_barrier_slow_path(MacroAssembler& _masm, const MachNode* node, Address ref_addr, Register ref, Register tmp) {
ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, ZLoadBarrierStrong);
__ j(*stub->entry());
__ bind(*stub->continuation());
}
%}
// Load Pointer
instruct zLoadP(iRegPNoSp dst, memory mem)
%{
match(Set dst (LoadP mem));
predicate(UseZGC && (n->as_Load()->barrier_data() != 0));
effect(TEMP dst);
ins_cost(4 * DEFAULT_COST);
format %{ "ld $dst, $mem, #@zLoadP" %}
ins_encode %{
const Address ref_addr (as_Register($mem$$base), $mem$$disp);
__ ld($dst$$Register, ref_addr);
z_load_barrier(_masm, this, ref_addr, $dst$$Register, t0 /* tmp */, barrier_data());
%}
ins_pipe(iload_reg_mem);
%}
instruct zCompareAndSwapP(iRegINoSp res, indirect mem, iRegP oldval, iRegP newval, rFlagsReg cr) %{
match(Set res (CompareAndSwapP mem (Binary oldval newval)));
match(Set res (WeakCompareAndSwapP mem (Binary oldval newval)));
predicate(UseZGC && !needs_acquiring_load_reserved(n) && n->as_LoadStore()->barrier_data() == ZLoadBarrierStrong);
effect(KILL cr, TEMP_DEF res);
ins_cost(2 * VOLATILE_REF_COST);
format %{ "cmpxchg $mem, $oldval, $newval, #@zCompareAndSwapP\n\t"
"mv $res, $res == $oldval" %}
ins_encode %{
Label failed;
guarantee($mem$$index == -1 && $mem$$disp == 0, "impossible encoding");
__ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::int64,
Assembler::relaxed /* acquire */, Assembler::rl /* release */, $res$$Register,
true /* result_as_bool */);
__ beqz($res$$Register, failed);
__ mv(t0, $oldval$$Register);
__ bind(failed);
if (barrier_data() != ZLoadBarrierElided) {
Label good;
__ ld(t1, Address(xthread, ZThreadLocalData::address_bad_mask_offset()), t1 /* tmp */);
__ andr(t1, t1, t0);
__ beqz(t1, good);
z_load_barrier_slow_path(_masm, this, Address($mem$$Register), t0 /* ref */, t1 /* tmp */);
__ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::int64,
Assembler::relaxed /* acquire */, Assembler::rl /* release */, $res$$Register,
true /* result_as_bool */);
__ bind(good);
}
%}
ins_pipe(pipe_slow);
%}
instruct zCompareAndSwapPAcq(iRegINoSp res, indirect mem, iRegP oldval, iRegP newval, rFlagsReg cr) %{
match(Set res (CompareAndSwapP mem (Binary oldval newval)));
match(Set res (WeakCompareAndSwapP mem (Binary oldval newval)));
predicate(UseZGC && needs_acquiring_load_reserved(n) && (n->as_LoadStore()->barrier_data() == ZLoadBarrierStrong));
effect(KILL cr, TEMP_DEF res);
ins_cost(2 * VOLATILE_REF_COST);
format %{ "cmpxchg $mem, $oldval, $newval, #@zCompareAndSwapPAcq\n\t"
"mv $res, $res == $oldval" %}
ins_encode %{
Label failed;
guarantee($mem$$index == -1 && $mem$$disp == 0, "impossible encoding");
__ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::int64,
Assembler::aq /* acquire */, Assembler::rl /* release */, $res$$Register,
true /* result_as_bool */);
__ beqz($res$$Register, failed);
__ mv(t0, $oldval$$Register);
__ bind(failed);
if (barrier_data() != ZLoadBarrierElided) {
Label good;
__ ld(t1, Address(xthread, ZThreadLocalData::address_bad_mask_offset()), t1 /* tmp */);
__ andr(t1, t1, t0);
__ beqz(t1, good);
z_load_barrier_slow_path(_masm, this, Address($mem$$Register), t0 /* ref */, t1 /* tmp */);
__ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::int64,
Assembler::aq /* acquire */, Assembler::rl /* release */, $res$$Register,
true /* result_as_bool */);
__ bind(good);
}
%}
ins_pipe(pipe_slow);
%}
instruct zCompareAndExchangeP(iRegPNoSp res, indirect mem, iRegP oldval, iRegP newval) %{
match(Set res (CompareAndExchangeP mem (Binary oldval newval)));
predicate(UseZGC && !needs_acquiring_load_reserved(n) && n->as_LoadStore()->barrier_data() == ZLoadBarrierStrong);
effect(TEMP_DEF res);
ins_cost(2 * VOLATILE_REF_COST);
format %{ "cmpxchg $res = $mem, $oldval, $newval, #@zCompareAndExchangeP" %}
ins_encode %{
guarantee($mem$$index == -1 && $mem$$disp == 0, "impossible encoding");
__ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::int64,
Assembler::relaxed /* acquire */, Assembler::rl /* release */, $res$$Register);
if (barrier_data() != ZLoadBarrierElided) {
Label good;
__ ld(t0, Address(xthread, ZThreadLocalData::address_bad_mask_offset()));
__ andr(t0, t0, $res$$Register);
__ beqz(t0, good);
z_load_barrier_slow_path(_masm, this, Address($mem$$Register), $res$$Register /* ref */, t0 /* tmp */);
__ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::int64,
Assembler::relaxed /* acquire */, Assembler::rl /* release */, $res$$Register);
__ bind(good);
}
%}
ins_pipe(pipe_slow);
%}
instruct zCompareAndExchangePAcq(iRegPNoSp res, indirect mem, iRegP oldval, iRegP newval) %{
match(Set res (CompareAndExchangeP mem (Binary oldval newval)));
predicate(UseZGC && needs_acquiring_load_reserved(n) && n->as_LoadStore()->barrier_data() == ZLoadBarrierStrong);
effect(TEMP_DEF res);
ins_cost(2 * VOLATILE_REF_COST);
format %{ "cmpxchg $res = $mem, $oldval, $newval, #@zCompareAndExchangePAcq" %}
ins_encode %{
guarantee($mem$$index == -1 && $mem$$disp == 0, "impossible encoding");
__ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::int64,
Assembler::aq /* acquire */, Assembler::rl /* release */, $res$$Register);
if (barrier_data() != ZLoadBarrierElided) {
Label good;
__ ld(t0, Address(xthread, ZThreadLocalData::address_bad_mask_offset()));
__ andr(t0, t0, $res$$Register);
__ beqz(t0, good);
z_load_barrier_slow_path(_masm, this, Address($mem$$Register), $res$$Register /* ref */, t0 /* tmp */);
__ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::int64,
Assembler::aq /* acquire */, Assembler::rl /* release */, $res$$Register);
__ bind(good);
}
%}
ins_pipe(pipe_slow);
%}
instruct zGetAndSetP(indirect mem, iRegP newv, iRegPNoSp prev, rFlagsReg cr) %{
match(Set prev (GetAndSetP mem newv));
predicate(UseZGC && !needs_acquiring_load_reserved(n) && n->as_LoadStore()->barrier_data() != 0);
effect(TEMP_DEF prev, KILL cr);
ins_cost(2 * VOLATILE_REF_COST);
format %{ "atomic_xchg $prev, $newv, [$mem], #@zGetAndSetP" %}
ins_encode %{
__ atomic_xchg($prev$$Register, $newv$$Register, as_Register($mem$$base));
z_load_barrier(_masm, this, Address(noreg, 0), $prev$$Register, t0 /* tmp */, barrier_data());
%}
ins_pipe(pipe_serial);
%}
instruct zGetAndSetPAcq(indirect mem, iRegP newv, iRegPNoSp prev, rFlagsReg cr) %{
match(Set prev (GetAndSetP mem newv));
predicate(UseZGC && needs_acquiring_load_reserved(n) && (n->as_LoadStore()->barrier_data() != 0));
effect(TEMP_DEF prev, KILL cr);
ins_cost(VOLATILE_REF_COST);
format %{ "atomic_xchg_acq $prev, $newv, [$mem], #@zGetAndSetPAcq" %}
ins_encode %{
__ atomic_xchgal($prev$$Register, $newv$$Register, as_Register($mem$$base));
z_load_barrier(_masm, this, Address(noreg, 0), $prev$$Register, t0 /* tmp */, barrier_data());
%}
ins_pipe(pipe_serial);
%}
[ zur Elbe Produktseite wechseln0.236Quellennavigators
]
|
|