/* * Check if we can optimize this probe. Returns NIP post-emulation if this can * be optimized and 0 otherwise.
*/ staticunsignedlong can_optimize(struct kprobe *p)
{ struct pt_regs regs; struct instruction_op op; unsignedlong nip = 0; unsignedlong addr = (unsignedlong)p->addr;
/* * kprobe placed for kretprobe during boot time * has a 'nop' instruction, which can be emulated. * So further checks can be skipped.
*/ if (p->addr == (kprobe_opcode_t *)&arch_rethook_trampoline) return addr + sizeof(kprobe_opcode_t);
/* * We only support optimizing kernel addresses, but not * module addresses. * * FIXME: Optimize kprobes placed in module addresses.
*/ if (!is_kernel_addr(addr)) return 0;
/* * Kprobe placed in conditional branch instructions are * not optimized, as we can't predict the nip prior with * dummy pt_regs and can not ensure that the return branch * from detour buffer falls in the range of address (i.e 32MB). * A branch back from trampoline is set up in the detour buffer * to the nip returned by the analyse_instr() here. * * Ensure that the instruction is not a conditional branch, * and that can be emulated.
*/ if (!is_conditional_branch(ppc_inst_read(p->ainsn.insn)) &&
analyse_instr(&op, ®s, ppc_inst_read(p->ainsn.insn)) == 1) {
emulate_update_regs(®s, &op);
nip = regs.nip;
}
return nip;
}
staticvoid optimized_callback(struct optimized_kprobe *op, struct pt_regs *regs)
{ /* This is possible if op is under delayed unoptimizing */ if (kprobe_disabled(&op->kp)) return;
/* * Generate instructions to load provided immediate 64-bit value * to register 'reg' and patch these instructions at 'addr'.
*/ staticvoid patch_imm64_load_insns(unsignedlonglong val, int reg, kprobe_opcode_t *addr)
{
patch_instruction(addr++, ppc_inst(PPC_RAW_LIS(reg, PPC_HIGHEST(val))));
patch_instruction(addr++, ppc_inst(PPC_RAW_ORI(reg, reg, PPC_HIGHER(val))));
patch_instruction(addr++, ppc_inst(PPC_RAW_SLDI(reg, reg, 32)));
patch_instruction(addr++, ppc_inst(PPC_RAW_ORIS(reg, reg, PPC_HI(val))));
patch_instruction(addr, ppc_inst(PPC_RAW_ORI(reg, reg, PPC_LO(val))));
}
staticvoid patch_imm_load_insns(unsignedlong val, int reg, kprobe_opcode_t *addr)
{ if (IS_ENABLED(CONFIG_PPC64))
patch_imm64_load_insns(val, reg, addr); else
patch_imm32_load_insns(val, reg, addr);
}
int arch_prepare_optimized_kprobe(struct optimized_kprobe *op, struct kprobe *p)
{
ppc_inst_t branch_op_callback, branch_emulate_step, temp; unsignedlong op_callback_addr, emulate_step_addr;
kprobe_opcode_t *buff; long b_offset; unsignedlong nip, size; int rc, i;
nip = can_optimize(p); if (!nip) return -EILSEQ;
/* Allocate instruction slot for detour buffer */
buff = get_optinsn_slot(); if (!buff) return -ENOMEM;
/* * OPTPROBE uses 'b' instruction to branch to optinsn.insn. * * The target address has to be relatively nearby, to permit use * of branch instruction in powerpc, because the address is specified * in an immediate field in the instruction opcode itself, ie 24 bits * in the opcode specify the address. Therefore the address should * be within 32MB on either side of the current instruction.
*/
b_offset = (unsignedlong)buff - (unsignedlong)p->addr; if (!is_offset_in_branch_range(b_offset)) goto error;
/* Check if the return address is also within 32MB range */
b_offset = (unsignedlong)(buff + TMPL_RET_IDX) - nip; if (!is_offset_in_branch_range(b_offset)) goto error;
/* Setup template */ /* We can optimize this via patch_instruction_window later */
size = (TMPL_END_IDX * sizeof(kprobe_opcode_t)) / sizeof(int);
pr_devel("Copying template to %p, size %lu\n", buff, size); for (i = 0; i < size; i++) {
rc = patch_instruction(buff + i, ppc_inst(*(optprobe_template_entry + i))); if (rc < 0) goto error;
}
/* * Fixup the template with instructions to: * 1. load the address of the actual probepoint
*/
patch_imm_load_insns((unsignedlong)op, 3, buff + TMPL_OP_IDX);
/* * 2. branch to optimized_callback() and emulate_step()
*/
op_callback_addr = ppc_kallsyms_lookup_name("optimized_callback");
emulate_step_addr = ppc_kallsyms_lookup_name("emulate_step"); if (!op_callback_addr || !emulate_step_addr) {
WARN(1, "Unable to lookup optimized_callback()/emulate_step()\n"); goto error;
}
int arch_prepared_optinsn(struct arch_optimized_insn *optinsn)
{ return optinsn->insn != NULL;
}
/* * On powerpc, Optprobes always replaces one instruction (4 bytes * aligned and 4 bytes long). It is impossible to encounter another * kprobe in this address range. So always return 0.
*/ int arch_check_optimized_kprobe(struct optimized_kprobe *op)
{ return 0;
}
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.