/* * Copyright (c) 1999, 2022, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. *
*/
// Check for blackhole intrinsic and then populate the intrinsic ID.
CompilerOracle::tag_blackhole_if_possible(h_m);
_intrinsic_id = h_m->intrinsic_id();
ciEnv *env = CURRENT_ENV; if (env->jvmti_can_hotswap_or_post_breakpoint()) { // 6328518 check hotswap conditions under the right lock.
MutexLocker locker(Compile_lock); if (Dependencies::check_evol_method(h_m()) != NULL) {
_is_c1_compilable = false;
_is_c2_compilable = false;
_can_be_parsed = false;
}
} else {
DEBUG_ONLY(CompilerThread::current()->check_possible_safepoint());
}
if (h_m->method_holder()->is_linked()) {
_can_be_statically_bound = h_m->can_be_statically_bound();
_can_omit_stack_trace = h_m->can_omit_stack_trace();
} else { // Have to use a conservative value in this case.
_can_be_statically_bound = false;
_can_omit_stack_trace = true;
}
// Adjust the definition of this condition to be more useful: // %%% take these conditions into account in vtable generation if (!_can_be_statically_bound && h_m->is_private())
_can_be_statically_bound = true; if (_can_be_statically_bound && h_m->is_abstract())
_can_be_statically_bound = false;
// generating _signature may allow GC and therefore move m. // These fields are always filled in.
_name = env->get_symbol(h_m->name());
ciSymbol* sig_symbol = env->get_symbol(h_m->signature());
constantPoolHandle cpool(Thread::current(), h_m->constants());
_signature = new (env->arena()) ciSignature(_holder, cpool, sig_symbol);
_method_data = NULL; // Take a snapshot of these values, so they will be commensurate with the MDO. if (ProfileInterpreter || CompilerConfig::is_c1_profiling()) { int invcnt = h_m->interpreter_invocation_count(); // if the value overflowed report it as max int
_interpreter_invocation_count = invcnt < 0 ? max_jint : invcnt ;
_interpreter_throwout_count = h_m->interpreter_throwout_count();
} else {
_interpreter_invocation_count = 0;
_interpreter_throwout_count = 0;
} if (_interpreter_invocation_count == 0)
_interpreter_invocation_count = 1;
_instructions_size = -1; if (ReplayCompiles) {
ciReplay::initialize(this);
}
}
// ------------------------------------------------------------------ // ciMethod::ciMethod // // Unloaded method.
ciMethod::ciMethod(ciInstanceKlass* holder,
ciSymbol* name,
ciSymbol* signature,
ciInstanceKlass* accessor) :
ciMetadata((Metadata*)NULL),
_name( name),
_holder( holder),
_method_data( NULL),
_method_blocks( NULL),
_intrinsic_id( vmIntrinsics::_none),
_instructions_size(-1),
_can_be_statically_bound(false),
_can_omit_stack_trace(true),
_liveness( NULL) #ifdefined(COMPILER2)
,
_flow( NULL),
_bcea( NULL) #endif// COMPILER2
{ // Usually holder and accessor are the same type but in some cases // the holder has the wrong class loader (e.g. invokedynamic call // sites) so we pass the accessor.
_signature = new (CURRENT_ENV->arena()) ciSignature(accessor, constantPoolHandle(), signature);
}
// ------------------------------------------------------------------ // ciMethod::load_code // // Load the bytecodes and exception handler table for this method. void ciMethod::load_code() {
VM_ENTRY_MARK;
assert(is_loaded(), "only loaded methods have code");
Method* me = get_Method();
Arena* arena = CURRENT_THREAD_ENV->arena();
// Load the bytecodes.
_code = (address)arena->Amalloc(code_size());
memcpy(_code, me->code_base(), code_size());
#if INCLUDE_JVMTI // Revert any breakpoint bytecodes in ci's copy if (me->number_of_breakpoints() > 0) {
BreakpointInfo* bp = me->method_holder()->breakpoints(); for (; bp != NULL; bp = bp->next()) { if (bp->match(me)) {
code_at_put(bp->bci(), bp->orig_bytecode());
}
}
} #endif
// And load the exception table.
ExceptionTable exc_table(me);
// Allocate one extra spot in our list of exceptions. This // last entry will be used to represent the possibility that // an exception escapes the method. See ciExceptionHandlerStream // for details.
_exception_handlers =
(ciExceptionHandler**)arena->Amalloc(sizeof(ciExceptionHandler*)
* (_handler_count + 1)); if (_handler_count > 0) { for (int i=0; i<_handler_count; i++) {
_exception_handlers[i] = new (arena) ciExceptionHandler(
holder(), /* start */ exc_table.start_pc(i), /* limit */ exc_table.end_pc(i), /* goto pc */ exc_table.handler_pc(i), /* cp index */ exc_table.catch_type_index(i));
}
}
// Put an entry at the end of our list to represent the possibility // of exceptional exit.
_exception_handlers[_handler_count] = new (arena) ciExceptionHandler(holder(), 0, code_size(), -1, 0);
// ------------------------------------------------------------------ // ciMethod::vtable_index // // Get the position of this method's entry in the vtable, if any. int ciMethod::vtable_index() {
check_is_loaded();
assert(holder()->is_linked(), "must be linked");
VM_ENTRY_MARK; return get_Method()->vtable_index();
}
// ------------------------------------------------------------------ // ciMethod::uses_balanced_monitors // // Does this method use monitors in a strict stack-disciplined manner? bool ciMethod::has_balanced_monitors() {
check_is_loaded(); if (_balanced_monitors) returntrue;
// Analyze the method to see if monitors are used properly.
VM_ENTRY_MARK;
methodHandle method(THREAD, get_Method());
assert(method->has_monitor_bytecodes(), "should have checked this");
// Check to see if a previous compilation computed the // monitor-matching analysis. if (method->guaranteed_monitor_matching()) {
_balanced_monitors = true; returntrue;
}
{
ExceptionMark em(THREAD);
ResourceMark rm(THREAD);
GeneratePairingInfo gpi(method); if (!gpi.compute_map(THREAD)) {
fatal("Unrecoverable verification or out-of-memory error");
} if (!gpi.monitor_safe()) { returnfalse;
}
method->set_guaranteed_monitor_matching();
_balanced_monitors = true;
} returntrue;
}
// ------------------------------------------------------------------ // ciMethod::get_osr_flow_analysis
ciTypeFlow* ciMethod::get_osr_flow_analysis(int osr_bci) { #ifdefined(COMPILER2) // OSR entry points are always place after a call bytecode of some sort
assert(osr_bci >= 0, "must supply valid OSR entry point");
ciEnv* env = CURRENT_ENV;
ciTypeFlow* flow = new (env->arena()) ciTypeFlow(env, this, osr_bci);
flow->do_flow(); return flow; #else// COMPILER2
ShouldNotReachHere(); return NULL; #endif// COMPILER2
}
// ------------------------------------------------------------------ // ciMethod::raw_liveness_at_bci // // Which local variables are live at a specific bci?
MethodLivenessResult ciMethod::raw_liveness_at_bci(int bci) {
check_is_loaded(); if (_liveness == NULL) { // Create the liveness analyzer.
Arena* arena = CURRENT_ENV->arena();
_liveness = new (arena) MethodLiveness(arena, this);
_liveness->compute_liveness();
} return _liveness->get_liveness_at(bci);
}
// ------------------------------------------------------------------ // ciMethod::liveness_at_bci // // Which local variables are live at a specific bci? When debugging // will return true for all locals in some cases to improve debug // information.
MethodLivenessResult ciMethod::liveness_at_bci(int bci) { if (CURRENT_ENV->should_retain_local_variables() || DeoptimizeALot) { // Keep all locals live for the user's edification and amusement.
MethodLivenessResult result(_max_locals);
result.set_range(0, _max_locals);
result.set_is_valid(); return result;
} return raw_liveness_at_bci(bci);
}
// ciMethod::live_local_oops_at_bci // // find all the live oops in the locals array for a particular bci // Compute what the interpreter believes by using the interpreter // oopmap generator. This is used as a double check during osr to // guard against conservative result from MethodLiveness making us // think a dead oop is live. MethodLiveness is conservative in the // sense that it may consider locals to be live which cannot be live, // like in the case where a local could contain an oop or a primitive // along different paths. In that case the local must be dead when // those paths merge. Since the interpreter's viewpoint is used when // gc'ing an interpreter frame we need to use its viewpoint during // OSR when loading the locals.
ResourceBitMap ciMethod::live_local_oops_at_bci(int bci) {
VM_ENTRY_MARK;
InterpreterOopMap mask;
OopMapCache::compute_one_oop_map(methodHandle(THREAD, get_Method()), bci, &mask); int mask_size = max_locals();
ResourceBitMap result(mask_size); int i; for (i = 0; i < mask_size ; i++ ) { if (mask.is_oop(i)) result.set_bit(i);
} return result;
}
#ifdef COMPILER1 // ------------------------------------------------------------------ // ciMethod::bci_block_start // // Marks all bcis where a new basic block starts const BitMap& ciMethod::bci_block_start() {
check_is_loaded(); if (_liveness == NULL) { // Create the liveness analyzer.
Arena* arena = CURRENT_ENV->arena();
_liveness = new (arena) MethodLiveness(arena, this);
_liveness->compute_liveness();
}
// ------------------------------------------------------------------ // ciMethod::check_overflow // // Check whether the profile counter is overflowed and adjust if true. // For invoke* it will turn negative values into max_jint, // and for checkcast/aastore/instanceof turn positive values into min_jint. int ciMethod::check_overflow(int c, Bytecodes::Code code) { switch (code) { case Bytecodes::_aastore: // fall-through case Bytecodes::_checkcast: // fall-through case Bytecodes::_instanceof: { return (c > 0 ? min_jint : c); // always non-positive
} default: {
assert(Bytecodes::is_invoke(code), "%s", Bytecodes::name(code)); return (c < 0 ? max_jint : c); // always non-negative
}
}
}
// ------------------------------------------------------------------ // ciMethod::call_profile_at_bci // // Get the ciCallProfile for the invocation of this method. // Also reports receiver types for non-call type checks (if TypeProfileCasts).
ciCallProfile ciMethod::call_profile_at_bci(int bci) {
ResourceMark rm;
ciCallProfile result; if (method_data() != NULL && method_data()->is_mature()) {
ciProfileData* data = method_data()->bci_to_data(bci); if (data != NULL && data->is_CounterData()) { // Every profiled call site has a counter. int count = check_overflow(data->as_CounterData()->count(), java_code_at_bci(bci));
if (!data->is_ReceiverTypeData()) {
result._receiver_count[0] = 0; // that's a definite zero
} else { // ReceiverTypeData is a subclass of CounterData
ciReceiverTypeData* call = (ciReceiverTypeData*)data->as_ReceiverTypeData(); // In addition, virtual call sites have receiver type information int receivers_count_total = 0; int morphism = 0; // Precompute morphism for the possible fixup for (uint i = 0; i < call->row_limit(); i++) {
ciKlass* receiver = call->receiver(i); if (receiver == NULL) continue;
morphism++;
} int epsilon = 0; // For a call, it is assumed that either the type of the receiver(s) // is recorded or an associated counter is incremented, but not both. With // tiered compilation, however, both can happen due to the interpreter and // C1 profiling invocations differently. Address that inconsistency here. if (morphism == 1 && count > 0) {
epsilon = count;
count = 0;
} for (uint i = 0; i < call->row_limit(); i++) {
ciKlass* receiver = call->receiver(i); if (receiver == NULL) continue; int rcount = saturated_add(call->receiver_count(i), epsilon); if (rcount == 0) rcount = 1; // Should be valid value
receivers_count_total = saturated_add(receivers_count_total, rcount); // Add the receiver to result data.
result.add_receiver(receiver, rcount); // If we extend profiling to record methods, // we will set result._method also.
} // Determine call site's morphism. // The call site count is 0 with known morphism (only 1 or 2 receivers) // or < 0 in the case of a type check failure for checkcast, aastore, instanceof. // The call site count is > 0 in the case of a polymorphic virtual call. if (morphism > 0 && morphism == result._limit) { // The morphism <= MorphismLimit. if ((morphism < ciCallProfile::MorphismLimit) ||
(morphism == ciCallProfile::MorphismLimit && count == 0)) { #ifdef ASSERT if (count > 0) {
this->print_short_name(tty);
tty->print_cr(" @ bci:%d", bci);
this->print_codes();
assert(false, "this call site should not be polymorphic");
} #endif
result._morphism = morphism;
}
} // Make the count consistent if this is a call profile. If count is // zero or less, presume that this is a typecheck profile and // do nothing. Otherwise, increase count to be the sum of all // receiver's counts. if (count >= 0) {
count = saturated_add(count, receivers_count_total);
}
}
result._count = count;
}
} return result;
}
// ------------------------------------------------------------------ // Add new receiver and sort data by receiver's profile count. void ciCallProfile::add_receiver(ciKlass* receiver, int receiver_count) { // Add new receiver and sort data by receiver's counts when we have space // for it otherwise replace the less called receiver (less called receiver // is placed to the last array element which is not used). // First array's element contains most called receiver. int i = _limit; for (; i > 0 && receiver_count > _receiver_count[i-1]; i--) {
_receiver[i] = _receiver[i-1];
_receiver_count[i] = _receiver_count[i-1];
}
_receiver[i] = receiver;
_receiver_count[i] = receiver_count; if (_limit < MorphismLimit) _limit++;
}
/** * Check whether profiling provides a type for the argument i to the * call at bci bci * * @param [in]bci bci of the call * @param [in]i argument number * @param [out]type profiled type of argument, NULL if none * @param [out]ptr_kind whether always null, never null or maybe null * @return true if profiling exists *
*/ bool ciMethod::argument_profiled_type(int bci, int i, ciKlass*& type, ProfilePtrKind& ptr_kind) { if (MethodData::profile_parameters() && method_data() != NULL && method_data()->is_mature()) {
ciProfileData* data = method_data()->bci_to_data(bci); if (data != NULL) { if (data->is_VirtualCallTypeData()) {
assert_virtual_call_type_ok(bci);
ciVirtualCallTypeData* call = (ciVirtualCallTypeData*)data->as_VirtualCallTypeData(); if (i >= call->number_of_arguments()) { returnfalse;
}
type = call->valid_argument_type(i);
ptr_kind = call->argument_ptr_kind(i); returntrue;
} elseif (data->is_CallTypeData()) {
assert_call_type_ok(bci);
ciCallTypeData* call = (ciCallTypeData*)data->as_CallTypeData(); if (i >= call->number_of_arguments()) { returnfalse;
}
type = call->valid_argument_type(i);
ptr_kind = call->argument_ptr_kind(i); returntrue;
}
}
} returnfalse;
}
/** * Check whether profiling provides a type for the return value from * the call at bci bci * * @param [in]bci bci of the call * @param [out]type profiled type of argument, NULL if none * @param [out]ptr_kind whether always null, never null or maybe null * @return true if profiling exists *
*/ bool ciMethod::return_profiled_type(int bci, ciKlass*& type, ProfilePtrKind& ptr_kind) { if (MethodData::profile_return() && method_data() != NULL && method_data()->is_mature()) {
ciProfileData* data = method_data()->bci_to_data(bci); if (data != NULL) { if (data->is_VirtualCallTypeData()) {
assert_virtual_call_type_ok(bci);
ciVirtualCallTypeData* call = (ciVirtualCallTypeData*)data->as_VirtualCallTypeData(); if (call->has_return()) {
type = call->valid_return_type();
ptr_kind = call->return_ptr_kind(); returntrue;
}
} elseif (data->is_CallTypeData()) {
assert_call_type_ok(bci);
ciCallTypeData* call = (ciCallTypeData*)data->as_CallTypeData(); if (call->has_return()) {
type = call->valid_return_type();
ptr_kind = call->return_ptr_kind();
} returntrue;
}
}
} returnfalse;
}
/** * Check whether profiling provides a type for the parameter i * * @param [in]i parameter number * @param [out]type profiled type of parameter, NULL if none * @param [out]ptr_kind whether always null, never null or maybe null * @return true if profiling exists *
*/ bool ciMethod::parameter_profiled_type(int i, ciKlass*& type, ProfilePtrKind& ptr_kind) { if (MethodData::profile_parameters() && method_data() != NULL && method_data()->is_mature()) {
ciParametersTypeData* parameters = method_data()->parameters_type_data(); if (parameters != NULL && i < parameters->number_of_parameters()) {
type = parameters->valid_parameter_type(i);
ptr_kind = parameters->parameter_ptr_kind(i); returntrue;
}
} returnfalse;
}
// ------------------------------------------------------------------ // ciMethod::find_monomorphic_target // // Given a certain calling environment, find the monomorphic target // for the call. Return NULL if the call is not monomorphic in // its calling environment, or if there are only abstract methods. // The returned method is never abstract. // Note: If caller uses a non-null result, it must inform dependencies // via assert_unique_concrete_method or assert_leaf_type.
ciMethod* ciMethod::find_monomorphic_target(ciInstanceKlass* caller,
ciInstanceKlass* callee_holder,
ciInstanceKlass* actual_recv, bool check_access) {
check_is_loaded();
if (actual_recv->is_interface()) { // %%% We cannot trust interface types, yet. See bug 6312651. return NULL;
}
ciMethod* root_m = resolve_invoke(caller, actual_recv, check_access, true/* allow_abstract */); if (root_m == NULL) { // Something went wrong looking up the actual receiver method. return NULL;
}
// Make certain quick checks even if UseCHA is false.
// Is it private or final? if (root_m->can_be_statically_bound()) {
assert(!root_m->is_abstract(), "sanity"); return root_m;
}
if (actual_recv->is_leaf_type() && actual_recv == root_m->holder()) { // Easy case. There is no other place to put a method, so don't bother // to go through the VM_ENTRY_MARK and all the rest. if (root_m->is_abstract()) { return NULL;
} return root_m;
}
// Array methods (clone, hashCode, etc.) are always statically bound. // If we were to see an array type here, we'd return root_m. // However, this method processes only ciInstanceKlasses. (See 4962591.) // The inline_native_clone intrinsic narrows Object to T[] properly, // so there is no need to do the same job here.
if (!UseCHA) return NULL;
VM_ENTRY_MARK;
methodHandle target;
{
MutexLocker locker(Compile_lock);
InstanceKlass* context = actual_recv->get_instanceKlass(); if (UseVtableBasedCHA) {
target = methodHandle(THREAD, Dependencies::find_unique_concrete_method(context,
root_m->get_Method(),
callee_holder->get_Klass(),
this->get_Method()));
} else { if (root_m->is_abstract()) { return NULL; // not supported
}
target = methodHandle(THREAD, Dependencies::find_unique_concrete_method(context, root_m->get_Method()));
}
assert(target() == NULL || !target()->is_abstract(), "not allowed"); // %%% Should upgrade this ciMethod API to look for 1 or 2 concrete methods.
}
if (target() == NULL) { return NULL;
} if (target() == root_m->get_Method()) { return root_m;
} if (!root_m->is_public() &&
!root_m->is_protected()) { // If we are going to reason about inheritance, it's easiest // if the method in question is public, protected, or private. // If the answer is not root_m, it is conservatively correct // to return NULL, even if the CHA encountered irrelevant // methods in other packages. // %%% TO DO: Work out logic for package-private methods // with the same name but different vtable indexes. return NULL;
} return CURRENT_THREAD_ENV->get_method(target());
}
// ------------------------------------------------------------------ // ciMethod::can_be_statically_bound // // Tries to determine whether a method can be statically bound in some context. bool ciMethod::can_be_statically_bound(ciInstanceKlass* context) const { return (holder() == context) && can_be_statically_bound();
}
// ------------------------------------------------------------------ // ciMethod::can_omit_stack_trace // // Tries to determine whether a method can omit stack trace in throw in compiled code. bool ciMethod::can_omit_stack_trace() const { if (!StackTraceInThrowable) { returntrue; // stack trace is switched off.
} if (!OmitStackTraceInFastThrow) { returnfalse; // Have to provide stack trace.
} return _can_omit_stack_trace;
}
// ------------------------------------------------------------------ // ciMethod::resolve_invoke // // Given a known receiver klass, find the target for the call. // Return NULL if the call has no target or the target is abstract.
ciMethod* ciMethod::resolve_invoke(ciKlass* caller, ciKlass* exact_receiver, bool check_access, bool allow_abstract) {
check_is_loaded();
VM_ENTRY_MARK;
LinkInfo link_info(resolved, h_name, h_signature, caller_klass,
check_access ? LinkInfo::AccessCheck::required : LinkInfo::AccessCheck::skip,
check_access ? LinkInfo::LoaderConstraintCheck::required : LinkInfo::LoaderConstraintCheck::skip);
Method* m = NULL; // Only do exact lookup if receiver klass has been linked. Otherwise, // the vtable has not been setup, and the LinkResolver will fail. if (recv->is_array_klass()
||
(InstanceKlass::cast(recv)->is_linked() && !exact_receiver->is_interface())) { if (holder()->is_interface()) {
m = LinkResolver::resolve_interface_call_or_null(recv, link_info);
} else {
m = LinkResolver::resolve_virtual_call_or_null(recv, link_info);
}
}
if (m == NULL) { // Return NULL only if there was a problem with lookup (uninitialized class, etc.) return NULL;
}
ciMethod* result = this; if (m != get_Method()) {
result = CURRENT_THREAD_ENV->get_method(m);
}
if (result->is_abstract() && !allow_abstract) { // Don't return abstract methods because they aren't optimizable or interesting. return NULL;
} return result;
}
// ------------------------------------------------------------------ // ciMethod::resolve_vtable_index // // Given a known receiver klass, find the vtable index for the call. // Return Method::invalid_vtable_index if the vtable_index is unknown. int ciMethod::resolve_vtable_index(ciKlass* caller, ciKlass* receiver) {
check_is_loaded();
int vtable_index = Method::invalid_vtable_index; // Only do lookup if receiver klass has been linked. Otherwise, // the vtable has not been setup, and the LinkResolver will fail. if (!receiver->is_interface()
&& (!receiver->is_instance_klass() ||
receiver->as_instance_klass()->is_linked())) {
VM_ENTRY_MARK;
// ------------------------------------------------------------------ // Adjust a CounterData count to be commensurate with // interpreter_invocation_count. If the MDO exists for // only 25% of the time the method exists, then the // counts in the MDO should be scaled by 4X, so that // they can be usefully and stably compared against the // invocation counts in methods. int ciMethod::scale_count(int count, float prof_factor) { if (count > 0 && method_data() != NULL) { int counter_life = method_data()->invocation_count(); int method_life = interpreter_invocation_count(); if (method_life < counter_life) { // may happen because of the snapshot timing
method_life = counter_life;
} if (counter_life > 0) {
count = (int)((double)count * prof_factor * method_life / counter_life + 0.5);
count = (count > 0) ? count : 1;
} else {
count = 1;
}
} return count;
}
// ------------------------------------------------------------------ // invokedynamic support
// ------------------------------------------------------------------ // ciMethod::is_method_handle_intrinsic // // Return true if the method is an instance of the JVM-generated // signature-polymorphic MethodHandle methods, _invokeBasic, _linkToVirtual, etc. bool ciMethod::is_method_handle_intrinsic() const {
vmIntrinsics::ID iid = _intrinsic_id; // do not check if loaded return (MethodHandles::is_signature_polymorphic(iid) &&
MethodHandles::is_signature_polymorphic_intrinsic(iid));
}
// ------------------------------------------------------------------ // ciMethod::is_compiled_lambda_form // // Return true if the method is a generated MethodHandle adapter. // These are built by Java code. bool ciMethod::is_compiled_lambda_form() const {
vmIntrinsics::ID iid = _intrinsic_id; // do not check if loaded return iid == vmIntrinsics::_compiledLambdaForm;
}
// ------------------------------------------------------------------ // ciMethod::has_member_arg // // Return true if the method is a linker intrinsic like _linkToVirtual. // These are built by the JVM. bool ciMethod::has_member_arg() const {
vmIntrinsics::ID iid = _intrinsic_id; // do not check if loaded return (MethodHandles::is_signature_polymorphic(iid) &&
MethodHandles::has_member_arg(iid));
}
// ------------------------------------------------------------------ // ciMethod::ensure_method_data // // Generate new MethodData* objects at compile time. // Return true if allocation was successful or no MDO is required. bool ciMethod::ensure_method_data(const methodHandle& h_m) {
EXCEPTION_CONTEXT; if (is_native() || is_abstract() || h_m()->is_accessor()) { returntrue;
} if (h_m()->method_data() == NULL) {
Method::build_profiling_method_data(h_m, THREAD); if (HAS_PENDING_EXCEPTION) {
CLEAR_PENDING_EXCEPTION;
}
} if (h_m()->method_data() != NULL) {
_method_data = CURRENT_ENV->get_method_data(h_m()->method_data()); return _method_data->load_data();
} else {
_method_data = CURRENT_ENV->get_empty_methodData(); returnfalse;
}
}
// public, retroactive version bool ciMethod::ensure_method_data() { bool result = true; if (_method_data == NULL || _method_data->is_empty()) {
GUARDED_VM_ENTRY({
methodHandle mh(Thread::current(), get_Method());
result = ensure_method_data(mh);
});
} return result;
}
int ciMethod::highest_osr_comp_level() {
check_is_loaded();
VM_ENTRY_MARK; return get_Method()->highest_osr_comp_level();
}
// ------------------------------------------------------------------ // ciMethod::code_size_for_inlining // // Code size for inlining decisions. This method returns a code // size of 1 for methods which has the ForceInline annotation. int ciMethod::code_size_for_inlining() {
check_is_loaded(); if (get_Method()->force_inline()) { return 1;
} return code_size();
}
// ------------------------------------------------------------------ // ciMethod::instructions_size // // This is a rough metric for "fat" methods, compared before inlining // with InlineSmallCode. The CodeBlob::code_size accessor includes // junk like exception handler, stubs, and constant table, which are // not highly relevant to an inlined method. So we use the more // specific accessor nmethod::insts_size. int ciMethod::instructions_size() { if (_instructions_size == -1) {
GUARDED_VM_ENTRY(
CompiledMethod* code = get_Method()->code(); if (code != NULL && (code->comp_level() == CompLevel_full_optimization)) {
_instructions_size = code->insts_end() - code->verified_entry_point();
} else {
_instructions_size = 0;
}
);
} return _instructions_size;
}
// ------------------------------------------------------------------ // ciMethod::has_unloaded_classes_in_signature bool ciMethod::has_unloaded_classes_in_signature() { // ciSignature is resolved against some accessing class and // signature classes aren't required to be local. As a benefit, // it makes signature classes visible through loader constraints. // So, encountering an unloaded class signals it is absent both in // the callee (local) and caller contexts. return signature()->has_unloaded_classes();
}
// ------------------------------------------------------------------ // ciMethod::check_call bool ciMethod::check_call(int refinfo_index, bool is_static) const { // This method is used only in C2 from InlineTree::ok_to_inline, // and is only used under -Xcomp. // It appears to fail when applied to an invokeinterface call site. // FIXME: Remove this method and resolve_method_statically; refactor to use the other LinkResolver entry points.
VM_ENTRY_MARK;
{
ExceptionMark em(THREAD);
HandleMark hm(THREAD);
constantPoolHandle pool (THREAD, get_Method()->constants());
Bytecodes::Code code = (is_static ? Bytecodes::_invokestatic : Bytecodes::_invokevirtual);
Method* spec_method = LinkResolver::resolve_method_statically(code, pool, refinfo_index, THREAD); if (HAS_PENDING_EXCEPTION) {
CLEAR_PENDING_EXCEPTION; returnfalse;
} else { return (spec_method->is_static() == is_static);
}
} returnfalse;
} // ------------------------------------------------------------------ // ciMethod::print_codes // // Print the bytecodes for this method. void ciMethod::print_codes_on(outputStream* st) {
check_is_loaded();
GUARDED_VM_ENTRY(get_Method()->print_codes_on(st);)
}
bool ciMethod::is_boxing_method() const { if (intrinsic_id() != vmIntrinsics::_none && holder()->is_box_klass()) { switch (intrinsic_id()) { case vmIntrinsics::_Boolean_valueOf: case vmIntrinsics::_Byte_valueOf: case vmIntrinsics::_Character_valueOf: case vmIntrinsics::_Short_valueOf: case vmIntrinsics::_Integer_valueOf: case vmIntrinsics::_Long_valueOf: case vmIntrinsics::_Float_valueOf: case vmIntrinsics::_Double_valueOf: returntrue; default: returnfalse;
}
} returnfalse;
}
bool ciMethod::is_unboxing_method() const { if (intrinsic_id() != vmIntrinsics::_none && holder()->is_box_klass()) { switch (intrinsic_id()) { case vmIntrinsics::_booleanValue: case vmIntrinsics::_byteValue: case vmIntrinsics::_charValue: case vmIntrinsics::_shortValue: case vmIntrinsics::_intValue: case vmIntrinsics::_longValue: case vmIntrinsics::_floatValue: case vmIntrinsics::_doubleValue: returntrue; default: returnfalse;
}
} returnfalse;
}
// ------------------------------------------------------------------ // ciMethod::print_codes // // Print a range of the bytecodes for this method. void ciMethod::print_codes_on(int from, int to, outputStream* st) {
check_is_loaded();
GUARDED_VM_ENTRY(get_Method()->print_codes_on(from, to, st);)
}
// ------------------------------------------------------------------ // ciMethod::print_name // // Print the name of this method, including signature and some flags. void ciMethod::print_name(outputStream* st) {
check_is_loaded();
GUARDED_VM_ENTRY(get_Method()->print_name(st);)
}
// ------------------------------------------------------------------ // ciMethod::print_short_name // // Print the name of this method, without signature. void ciMethod::print_short_name(outputStream* st) { if (is_loaded()) {
GUARDED_VM_ENTRY(get_Method()->print_short_name(st););
} else { // Fall back if method is not loaded.
holder()->print_name_on(st);
st->print("::");
name()->print_symbol_on(st); if (WizardMode)
signature()->as_symbol()->print_symbol_on(st);
}
}
if (!invoke_through_mh_intrinsic) { // Method name & descriptor should stay the same. // Signatures may reference unloaded types and thus they may be not strictly equal.
ciSymbol* declared_signature = declared_method->signature()->as_symbol();
ciSymbol* resolved_signature = resolved_method->signature()->as_symbol();
int sbase = 0, rbase = 0; switch (linker->intrinsic_id()) { case vmIntrinsics::_linkToVirtual: case vmIntrinsics::_linkToInterface: case vmIntrinsics::_linkToSpecial: { if (target->is_static()) { returnfalse;
} if (linker_sig->type_at(0)->is_primitive_type()) { returnfalse; // receiver should be an oop
}
sbase = 1; // skip receiver break;
} case vmIntrinsics::_linkToStatic: { if (!target->is_static()) { returnfalse;
} break;
} case vmIntrinsics::_invokeBasic: { if (target->is_static()) { if (target_sig->type_at(0)->is_primitive_type()) { returnfalse; // receiver should be an oop
}
rbase = 1; // skip receiver
} break;
} default: break;
}
assert(target_sig->count() - rbase == linker_sig->count() - sbase - has_appendix, "argument count mismatch"); int arg_count = target_sig->count() - rbase; for (int i = 0; i < arg_count; i++) { if (!basic_types_match(linker_sig->type_at(sbase + i), target_sig->type_at(rbase + i))) { returnfalse;
}
} // Only check the return type if the symbolic info has non-void return type. // I.e. the return value of the resolved method can be dropped. if (!linker->return_type()->is_void() &&
!basic_types_match(linker->return_type(), target->return_type())) { returnfalse;
} returntrue; // no mismatch found
}
¤ Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.0.21Bemerkung:
¤
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung ist noch experimentell.