Quellcode-Bibliothek templateTable_arm.cpp
Sprache: C
/* * Copyright (c) 2008, 2022, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. *
*/
// At top of Java expression stack which may be different than SP. // It isn't for category 1 objects. staticinline Address at_tos() { return Address(Rstack_top, Interpreter::expr_offset_in_bytes(0));
}
// Loads double/long local into R0_tos_lo/R1_tos_hi with two // separate ldr instructions (supports nonadjacent values). // Used for longs in all modes, and for doubles in SOFTFP mode. void TemplateTable::load_category2_local(Register Rlocal_index, Register tmp) { constRegister Rlocal_base = tmp;
assert_different_registers(Rlocal_index, tmp);
// Stores R0_tos_lo/R1_tos_hi to double/long local with two // separate str instructions (supports nonadjacent values). // Used for longs in all modes, and for doubles in SOFTFP mode void TemplateTable::store_category2_local(Register Rlocal_index, Register tmp) { constRegister Rlocal_base = tmp;
assert_different_registers(Rlocal_index, tmp);
switch (bc) { case Bytecodes::_fast_aputfield: case Bytecodes::_fast_bputfield: case Bytecodes::_fast_zputfield: case Bytecodes::_fast_cputfield: case Bytecodes::_fast_dputfield: case Bytecodes::_fast_fputfield: case Bytecodes::_fast_iputfield: case Bytecodes::_fast_lputfield: case Bytecodes::_fast_sputfield:
{ // We skip bytecode quickening for putfield instructions when // the put_code written to the constant pool cache is zero. // This is required so that every execution of this instruction // calls out to InterpreterRuntime::resolve_get_put to do // additional, required work.
assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
assert(load_bc_into_bc_reg, "we use bc_reg as temp");
__ get_cache_and_index_and_bytecode_at_bcp(bc_reg, temp_reg, temp_reg, byte_no, 1, sizeof(u2));
__ mov(bc_reg, bc);
__ cbz(temp_reg, L_patch_done); // test if bytecode is zero
} break; default:
assert(byte_no == -1, "sanity"); // the pair bytecodes have already done the load. if (load_bc_into_bc_reg) {
__ mov(bc_reg, bc);
}
}
if (__ can_post_breakpoint()) {
Label L_fast_patch; // if a breakpoint is present we can't rewrite the stream directly
__ ldrb(temp_reg, at_bcp(0));
__ cmp(temp_reg, Bytecodes::_breakpoint);
__ b(L_fast_patch, ne); if (bc_reg != R3) {
__ mov(R3, bc_reg);
}
__ mov(R1, Rmethod);
__ mov(R2, Rbcp); // Let breakpoint table handling rewrite to quicker bytecode
__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::set_original_bytecode_at), R1, R2, R3);
__ b(L_patch_done);
__ bind(L_fast_patch);
}
void TemplateTable::dconst(int value) {
transition(vtos, dtos); constint one_lo = 0; // low part of 1.0 constint one_hi = 0x3ff00000; // high part of 1.0
// get const type
__ add(Rtemp, Rtags, tags_offset);
__ ldrb(RtagType, Address(Rtemp, Rindex));
volatile_barrier(MacroAssembler::LoadLoad, Rtemp);
// unresolved class - get the resolved class
__ cmp(RtagType, JVM_CONSTANT_UnresolvedClass);
// unresolved class in error (resolution failed) - call into runtime // so that the same error from first resolution attempt is thrown.
__ cond_cmp(RtagType, JVM_CONSTANT_UnresolvedClassInError, ne);
// resolved class - need to call vm to get java mirror of the class
__ cond_cmp(RtagType, JVM_CONSTANT_Class, ne);
// first time invocation - must resolve first
__ mov(R1, (int)bytecode());
__ call_VM(R0_tos, entry, R1);
__ bind(resolved);
{ // Check for the null sentinel. // If we just called the VM, that already did the mapping for us, // but it's harmless to retry.
Label notNull; Register result = R0; Register tmp = R1; Register rarg = R2;
// Stash null_sentinel address to get its value later
__ mov_slow(rarg, (uintptr_t)Universe::the_null_sentinel_addr());
__ ldr(tmp, Address(rarg));
__ resolve_oop_handle(tmp);
__ cmp(result, tmp);
__ b(notNull, ne);
__ mov(result, 0); // NULL object reference
__ bind(notNull);
}
__ logical_shift_right(flags, flags, ConstantPoolCacheEntry::tos_state_shift); // Make sure we don't need to mask flags after the above shift
ConstantPoolCacheEntry::verify_tos_state_shift();
switch (bytecode()) { case Bytecodes::_ldc: case Bytecodes::_ldc_w:
{ // tos in (itos, ftos, stos, btos, ctos, ztos)
Label notIntFloat, notShort, notByte, notChar, notBool;
__ cmp(flags, itos);
__ cond_cmp(flags, ftos, ne);
__ b(notIntFloat, ne);
__ ldr(R0_tos, field);
__ push(itos);
__ b(Done);
// get next byte
__ ldrb(next_bytecode, at_bcp(Bytecodes::length_for(Bytecodes::_iload))); // if _iload, wait to rewrite to iload2. We only want to rewrite the // last two iloads in a pair. Comparing against fast_iload means that // the next bytecode is neither an iload or a caload, and therefore // an iload pair.
__ cmp(next_bytecode, Bytecodes::_iload);
__ b(done, eq);
// Get the local value into tos constRegister Rlocal_index = R1_tmp;
locals_index(Rlocal_index);
Address local = load_iaddress(Rlocal_index, Rtemp);
__ ldr_s32(R0_tos, local);
}
// Get the local value into tos
locals_index(Rlocal_index);
Address local = load_faddress(Rlocal_index, Rtemp); #ifdef __SOFTFP__
__ ldr(R0_tos, local); #else
__ ldr_float(S0_tos, local); #endif// __SOFTFP__
}
void TemplateTable::aload_0_internal(RewriteControl rc) {
transition(vtos, atos); // According to bytecode histograms, the pairs: // // _aload_0, _fast_igetfield // _aload_0, _fast_agetfield // _aload_0, _fast_fgetfield // // occur frequently. If RewriteFrequentPairs is set, the (slow) _aload_0 // bytecode checks if the next bytecode is either _fast_igetfield, // _fast_agetfield or _fast_fgetfield and then rewrites the // current bytecode into a pair bytecode; otherwise it rewrites the current // bytecode into _fast_aload_0 that doesn't do the pair check anymore. // // Note: If the next bytecode is _getfield, the rewrite must be delayed, // otherwise we may miss an opportunity for a pair. // // Also rewrite frequent pairs // aload_0, aload_1 // aload_0, iload_1 // These bytecodes with a small amount of code are most profitable to rewrite if ((rc == may_rewrite) && __ rewrite_frequent_pairs()) {
Label rewrite, done; constRegister next_bytecode = R1_tmp; constRegister target_bytecode = R2_tmp;
// get next byte
__ ldrb(next_bytecode, at_bcp(Bytecodes::length_for(Bytecodes::_aload_0)));
// if _getfield then wait with rewrite
__ cmp(next_bytecode, Bytecodes::_getfield);
__ b(done, eq);
// if _igetfield then rewrite to _fast_iaccess_0
assert(Bytecodes::java_code(Bytecodes::_fast_iaccess_0) == Bytecodes::_aload_0, "fix bytecode definition");
__ cmp(next_bytecode, Bytecodes::_fast_igetfield);
__ mov(target_bytecode, Bytecodes::_fast_iaccess_0);
__ b(rewrite, eq);
// if _agetfield then rewrite to _fast_aaccess_0
assert(Bytecodes::java_code(Bytecodes::_fast_aaccess_0) == Bytecodes::_aload_0, "fix bytecode definition");
__ cmp(next_bytecode, Bytecodes::_fast_agetfield);
__ mov(target_bytecode, Bytecodes::_fast_aaccess_0);
__ b(rewrite, eq);
// if _fgetfield then rewrite to _fast_faccess_0, else rewrite to _fast_aload0
assert(Bytecodes::java_code(Bytecodes::_fast_faccess_0) == Bytecodes::_aload_0, "fix bytecode definition");
assert(Bytecodes::java_code(Bytecodes::_fast_aload_0) == Bytecodes::_aload_0, "fix bytecode definition");
void TemplateTable::sastore() {
assert(arrayOopDesc::base_offset_in_bytes(T_CHAR) ==
arrayOopDesc::base_offset_in_bytes(T_SHORT), "base offsets for char and short should be equal");
castore();
}
void TemplateTable::dup() {
transition(vtos, vtos); // stack: ..., a
__ load_ptr(0, R0_tmp);
__ push_ptr(R0_tmp); // stack: ..., a, a
}
void TemplateTable::dup_x1() {
transition(vtos, vtos); // stack: ..., a, b
__ load_ptr(0, R0_tmp); // load b
__ load_ptr(1, R2_tmp); // load a
__ store_ptr(1, R0_tmp); // store b
__ store_ptr(0, R2_tmp); // store a
__ push_ptr(R0_tmp); // push b // stack: ..., b, a, b
}
void TemplateTable::dup_x2() {
transition(vtos, vtos); // stack: ..., a, b, c
__ load_ptr(0, R0_tmp); // load c
__ load_ptr(1, R2_tmp); // load b
__ load_ptr(2, R4_tmp); // load a
__ push_ptr(R0_tmp); // push c
// stack: ..., a, b, c, c
__ store_ptr(1, R2_tmp); // store b
__ store_ptr(2, R4_tmp); // store a
__ store_ptr(3, R0_tmp); // store c // stack: ..., c, a, b, c
}
void TemplateTable::dup2() {
transition(vtos, vtos); // stack: ..., a, b
__ load_ptr(1, R0_tmp); // load a
__ push_ptr(R0_tmp); // push a
__ load_ptr(1, R0_tmp); // load b
__ push_ptr(R0_tmp); // push b // stack: ..., a, b, a, b
}
// stack: ..., a, b, c
__ load_ptr(0, R4_tmp); // load c
__ load_ptr(1, R2_tmp); // load b
__ load_ptr(2, R0_tmp); // load a
__ push_ptr(R2_tmp); // push b
__ push_ptr(R4_tmp); // push c
// stack: ..., a, b, c, b, c
__ store_ptr(2, R0_tmp); // store a
__ store_ptr(3, R4_tmp); // store c
__ store_ptr(4, R2_tmp); // store b
// stack: ..., b, c, a, b, c
}
void TemplateTable::dup2_x2() {
transition(vtos, vtos); // stack: ..., a, b, c, d
__ load_ptr(0, R0_tmp); // load d
__ load_ptr(1, R2_tmp); // load c
__ push_ptr(R2_tmp); // push c
__ push_ptr(R0_tmp); // push d // stack: ..., a, b, c, d, c, d
__ load_ptr(4, R4_tmp); // load b
__ store_ptr(4, R0_tmp); // store d in b
__ store_ptr(2, R4_tmp); // store b in d // stack: ..., a, d, c, b, c, d
__ load_ptr(5, R4_tmp); // load a
__ store_ptr(5, R2_tmp); // store c in a
__ store_ptr(3, R4_tmp); // store a in c // stack: ..., c, d, a, b, c, d
}
void TemplateTable::swap() {
transition(vtos, vtos); // stack: ..., a, b
__ load_ptr(1, R0_tmp); // load a
__ load_ptr(0, R2_tmp); // load b
__ store_ptr(0, R0_tmp); // store a in b
__ store_ptr(1, R2_tmp); // store b in a // stack: ..., b, a
}
void TemplateTable::dneg() {
transition(dtos, dtos); #ifdef __SOFTFP__ // Invert sign bit in the high part of the double constint sign_mask_hi = 0x80000000;
__ eor(R1_tos_hi, R1_tos_hi, sign_mask_hi); #else
__ neg_double(D0_tos, D0_tos); #endif// __SOFTFP__
}
void TemplateTable::convert() { // Checking #ifdef ASSERT
{ TosState tos_in = ilgl;
TosState tos_out = ilgl; switch (bytecode()) { case Bytecodes::_i2l: // fall through case Bytecodes::_i2f: // fall through case Bytecodes::_i2d: // fall through case Bytecodes::_i2b: // fall through case Bytecodes::_i2c: // fall through case Bytecodes::_i2s: tos_in = itos; break; case Bytecodes::_l2i: // fall through case Bytecodes::_l2f: // fall through case Bytecodes::_l2d: tos_in = ltos; break; case Bytecodes::_f2i: // fall through case Bytecodes::_f2l: // fall through case Bytecodes::_f2d: tos_in = ftos; break; case Bytecodes::_d2i: // fall through case Bytecodes::_d2l: // fall through case Bytecodes::_d2f: tos_in = dtos; break; default : ShouldNotReachHere();
} switch (bytecode()) { case Bytecodes::_l2i: // fall through case Bytecodes::_f2i: // fall through case Bytecodes::_d2i: // fall through case Bytecodes::_i2b: // fall through case Bytecodes::_i2c: // fall through case Bytecodes::_i2s: tos_out = itos; break; case Bytecodes::_i2l: // fall through case Bytecodes::_f2l: // fall through case Bytecodes::_d2l: tos_out = ltos; break; case Bytecodes::_i2f: // fall through case Bytecodes::_l2f: // fall through case Bytecodes::_d2f: tos_out = ftos; break; case Bytecodes::_i2d: // fall through case Bytecodes::_l2d: // fall through case Bytecodes::_f2d: tos_out = dtos; break; default : ShouldNotReachHere();
}
transition(tos_in, tos_out);
} #endif// ASSERT
// comparison result | flag N | flag Z | flag C | flag V // "<" | 1 | 0 | 0 | 0 // "==" | 0 | 1 | 1 | 0 // ">" | 0 | 0 | 1 | 0 // unordered | 0 | 0 | 1 | 1
if (unordered_result < 0) {
__ mov(R0_tos, 1); // result == 1 if greater
__ mvn(R0_tos, 0, lt); // result == -1 if less or unordered (N!=V)
} else {
__ mov(R0_tos, 1); // result == 1 if greater or unordered
__ mvn(R0_tos, 0, mi); // result == -1 if less (N=1)
}
__ mov(R0_tos, 0, eq); // result == 0 if equ (Z=1) #endif// __SOFTFP__
}
// Handle all the JSR stuff here, then exit. // It's much shorter and cleaner than intermingling with the // non-JSR normal-branch stuff occurring below. if (is_jsr) { // compute return address as bci in R1 constRegister Rret_addr = R1_tmp;
assert_different_registers(Rdisp, Rret_addr, Rtemp);
// R0: osr nmethod (osr ok) or NULL (osr not possible) constRegister Rnmethod = R0;
__ ldrb(R3_bytecode, Address(Rbcp)); // reload next bytecode
__ cbz(Rnmethod, dispatch); // test result, no osr if null
// nmethod may have been invalidated (VM may block upon call_VM return)
__ ldrb(R1_tmp, Address(Rnmethod, nmethod::state_offset()));
__ cmp(R1_tmp, nmethod::in_use);
__ b(dispatch, ne);
// We have the address of an on stack replacement routine in Rnmethod, // We need to prepare to execute the OSR method. First we must // migrate the locals and monitors off of the stack.
void TemplateTable::if_0cmp(Condition cc) {
transition(itos, vtos); // assume branch is more often taken than not (loops use backward branches)
Label not_taken;
__ cmp_32(R0_tos, 0);
__ b(not_taken, convNegCond(cc));
branch(false, false);
__ bind(not_taken);
__ profile_not_taken_branch(R0_tmp);
}
void TemplateTable::if_icmp(Condition cc) {
transition(itos, vtos); // assume branch is more often taken than not (loops use backward branches)
Label not_taken;
__ pop_i(R1_tmp);
__ cmp_32(R1_tmp, R0_tos);
__ b(not_taken, convNegCond(cc));
branch(false, false);
__ bind(not_taken);
__ profile_not_taken_branch(R0_tmp);
}
// assume branch is more often taken than not (loops use backward branches)
Label not_taken; if (cc == equal) {
__ cbnz(R0_tos, not_taken);
} else {
__ cbz(R0_tos, not_taken);
}
branch(false, false);
__ bind(not_taken);
__ profile_not_taken_branch(R0_tmp);
}
void TemplateTable::if_acmp(Condition cc) {
transition(atos, vtos); // assume branch is more often taken than not (loops use backward branches)
Label not_taken;
__ pop_ptr(R1_tmp);
__ cmpoop(R1_tmp, R0_tos);
__ b(not_taken, convNegCond(cc));
branch(false, false);
__ bind(not_taken);
__ profile_not_taken_branch(R0_tmp);
}
¤ Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.0.76Bemerkung:
(vorverarbeitet)
¤
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung ist noch experimentell.