/* * Initial cache maintenance of the xol page done via set_pte_at(). * Subsequent CMOs only needed if the xol slot changes.
*/ if (!memcmp(dst, src, len)) goto done;
/* TODO: Currently we do not support AARCH32 instruction probing */ if (mm->context.flags & MMCF_AARCH32) return -EOPNOTSUPP; elseif (!IS_ALIGNED(addr, AARCH64_INSN_SIZE)) return -EINVAL;
insn = le32_to_cpu(auprobe->insn);
switch (arm_probe_decode_insn(insn, &auprobe->api)) { case INSN_REJECTED: return -EINVAL;
case INSN_GOOD_NO_SLOT:
auprobe->simulate = true; break;
/* Instruction points to execute next to breakpoint address */
instruction_pointer_set(regs, utask->vaddr + 4);
user_disable_single_step(current);
return 0;
} bool arch_uprobe_xol_was_trapped(struct task_struct *t)
{ /* * Between arch_uprobe_pre_xol and arch_uprobe_post_xol, if an xol * insn itself is trapped, then detect the case with the help of * invalid fault code which is being set in arch_uprobe_pre_xol
*/ if (t->thread.fault_code != UPROBE_INV_FAULT_CODE) returntrue;
/* * Task has received a fatal signal, so reset back to probbed * address.
*/
instruction_pointer_set(regs, utask->vaddr);
user_disable_single_step(current);
}
bool arch_uretprobe_is_alive(struct return_instance *ret, enum rp_check ctx, struct pt_regs *regs)
{ /* * If a simple branch instruction (B) was called for retprobed * assembly label then return true even when regs->sp and ret->stack * are same. It will ensure that cleanup and reporting of return * instances corresponding to callee label is done when * handle_trampoline for called function is executed.
*/ if (ctx == RP_CHECK_CHAIN_CALL) return regs->sp <= ret->stack; else return regs->sp < ret->stack;
}
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.