/* Retpoline alternative for a jump table? */ return alt_group && alt_group->orig_group &&
insn_jump_table(alt_group->orig_group->first_insn);
}
staticbool is_sibling_call(struct instruction *insn)
{ /* * Assume only STT_FUNC calls have jump-tables.
*/ if (insn_func(insn)) { /* An indirect jump is either a sibling call or a jump to a table. */ if (insn->type == INSN_JUMP_DYNAMIC) return !is_jump_table_jump(insn);
}
/* * Checks if a string ends with another.
*/ staticbool str_ends_with(constchar *s, constchar *sub)
{ constint slen = strlen(s); constint sublen = strlen(sub);
if (sublen > slen) return 0;
return !memcmp(s + slen - sublen, sub, sublen);
}
/* * Checks if a function is a Rust "noreturn" one.
*/ staticbool is_rust_noreturn(conststruct symbol *func)
{ /* * If it does not start with "_R", then it is not a Rust symbol.
*/ if (strncmp(func->name, "_R", 2)) returnfalse;
/* * These are just heuristics -- we do not control the precise symbol * name, due to the crate disambiguators (which depend on the compiler) * as well as changes to the source code itself between versions (since * these come from the Rust standard library).
*/ return str_ends_with(func->name, "_4core5sliceSp15copy_from_slice17len_mismatch_fail") ||
str_ends_with(func->name, "_4core6option13expect_failed") ||
str_ends_with(func->name, "_4core6option13unwrap_failed") ||
str_ends_with(func->name, "_4core6result13unwrap_failed") ||
str_ends_with(func->name, "_4core9panicking5panic") ||
str_ends_with(func->name, "_4core9panicking9panic_fmt") ||
str_ends_with(func->name, "_4core9panicking14panic_explicit") ||
str_ends_with(func->name, "_4core9panicking14panic_nounwind") ||
str_ends_with(func->name, "_4core9panicking18panic_bounds_check") ||
str_ends_with(func->name, "_4core9panicking18panic_nounwind_fmt") ||
str_ends_with(func->name, "_4core9panicking19assert_failed_inner") ||
str_ends_with(func->name, "_4core9panicking30panic_null_pointer_dereference") ||
str_ends_with(func->name, "_4core9panicking36panic_misaligned_pointer_dereference") ||
str_ends_with(func->name, "_7___rustc17rust_begin_unwind") ||
strstr(func->name, "_4core9panicking13assert_failed") ||
strstr(func->name, "_4core9panicking11panic_const24panic_const_") ||
(strstr(func->name, "_4core5slice5index") &&
strstr(func->name, "slice_") &&
str_ends_with(func->name, "_fail"));
}
/* * This checks to see if the given function is a "noreturn" function. * * For global functions which are outside the scope of this object file, we * have to keep a manual list of them. * * For local functions, we have to detect them manually by simply looking for * the lack of a return instruction.
*/ staticbool __dead_end_function(struct objtool_file *file, struct symbol *func, int recursion)
{ int i; struct instruction *insn; bool empty = true;
/* * A function can have a sibling call instead of a return. In that * case, the function's dead-end status depends on whether the target * of the sibling call returns.
*/
func_for_each_insn(file, func, insn) { if (is_sibling_call(insn)) { struct instruction *dest = insn->jump_dest;
if (!dest) /* sibling call to another file */ returnfalse;
/* local sibling call */ if (recursion == 5) { /* * Infinite recursion: two functions have * sibling calls to each other. This is a very * rare case. It means they aren't dead ends.
*/ returnfalse;
}
/* * Call the arch-specific instruction decoder for all the instructions and add * them to the global instruction list.
*/ staticint decode_instructions(struct objtool_file *file)
{ struct section *sec; struct symbol *func; unsignedlong offset; struct instruction *insn; int ret;
/* * .init.text code is ran before userspace and thus doesn't * strictly need retpolines, except for modules which are * loaded late, they very much do need retpoline in their * .init.text
*/ if (!strcmp(sec->name, ".init.text") && !opts.module)
sec->init = true;
ret = arch_decode_instruction(file, sec, offset,
sec->sh.sh_size - offset,
insn); if (ret) return ret;
prev_len = insn->len;
/* * By default, "ud2" is a dead end unless otherwise * annotated, because GCC 7 inserts it for certain * divide-by-zero cases.
*/ if (insn->type == INSN_BUG)
insn->dead_end = true;
if (func->offset == sec->sh.sh_size) { /* Heuristic: likely an "end" symbol */ if (func->type == STT_NOTYPE) continue;
ERROR("%s(): STT_FUNC at end of section", func->name); return -1;
}
if (func->embedded_insn || func->alias != func) continue;
/* populate reloc for 'addr' */ if (!elf_init_reloc_text_sym(file->elf, sec,
idx * sizeof(*site), idx * 2,
insn->sec, insn->offset)) return -1;
/* find key symbol */
key_name = strdup(insn_call_dest(insn)->name); if (!key_name) {
ERROR_GLIBC("strdup"); return -1;
} if (strncmp(key_name, STATIC_CALL_TRAMP_PREFIX_STR,
STATIC_CALL_TRAMP_PREFIX_LEN)) {
ERROR("static_call: trampoline name malformed: %s", key_name); return -1;
}
tmp = key_name + STATIC_CALL_TRAMP_PREFIX_LEN - STATIC_CALL_KEY_PREFIX_LEN;
memcpy(tmp, STATIC_CALL_KEY_PREFIX_STR, STATIC_CALL_KEY_PREFIX_LEN);
key_sym = find_symbol_by_name(file->elf, tmp); if (!key_sym) { if (!opts.module) {
ERROR("static_call: can't find static_call_key symbol: %s", tmp); return -1;
}
/* * For modules(), the key might not be exported, which * means the module can make static calls but isn't * allowed to change them. * * In that case we temporarily set the key to be the * trampoline address. This is fixed up in * static_call_add_module().
*/
key_sym = insn_call_dest(insn);
}
case STT_SECTION:
func = find_func_by_offset(reloc->sym->sec, reloc_addend(reloc)); if (!func) continue; break;
default:
ERROR("unexpected relocation symbol type in %s: %d",
rsec->name, reloc->sym->type); return -1;
}
func->ignore = true; if (func->cfunc)
func->cfunc->ignore = true;
}
return 0;
}
/* * This is a whitelist of functions that is allowed to be called with AC set. * The list is meant to be minimal and only contains compiler instrumentation * ABI and a few functions used to implement *_{to,from}_user() functions. * * These functions must not directly change AC, but may PUSHF/POPF.
*/ staticconstchar *uaccess_safe_builtin[] = { /* KASAN */ "kasan_report", "kasan_check_range", /* KASAN out-of-line */ "__asan_loadN_noabort", "__asan_load1_noabort", "__asan_load2_noabort", "__asan_load4_noabort", "__asan_load8_noabort", "__asan_load16_noabort", "__asan_storeN_noabort", "__asan_store1_noabort", "__asan_store2_noabort", "__asan_store4_noabort", "__asan_store8_noabort", "__asan_store16_noabort", "__kasan_check_read", "__kasan_check_write", /* KASAN in-line */ "__asan_report_load_n_noabort", "__asan_report_load1_noabort", "__asan_report_load2_noabort", "__asan_report_load4_noabort", "__asan_report_load8_noabort", "__asan_report_load16_noabort", "__asan_report_store_n_noabort", "__asan_report_store1_noabort", "__asan_report_store2_noabort", "__asan_report_store4_noabort", "__asan_report_store8_noabort", "__asan_report_store16_noabort", /* KCSAN */ "__kcsan_check_access", "__kcsan_mb", "__kcsan_wmb", "__kcsan_rmb", "__kcsan_release", "kcsan_found_watchpoint", "kcsan_setup_watchpoint", "kcsan_check_scoped_accesses", "kcsan_disable_current", "kcsan_enable_current_nowarn", /* KCSAN/TSAN */ "__tsan_func_entry", "__tsan_func_exit", "__tsan_read_range", "__tsan_write_range", "__tsan_read1", "__tsan_read2", "__tsan_read4", "__tsan_read8", "__tsan_read16", "__tsan_write1", "__tsan_write2", "__tsan_write4", "__tsan_write8", "__tsan_write16", "__tsan_read_write1", "__tsan_read_write2", "__tsan_read_write4", "__tsan_read_write8", "__tsan_read_write16", "__tsan_volatile_read1", "__tsan_volatile_read2", "__tsan_volatile_read4", "__tsan_volatile_read8", "__tsan_volatile_read16", "__tsan_volatile_write1", "__tsan_volatile_write2", "__tsan_volatile_write4", "__tsan_volatile_write8", "__tsan_volatile_write16", "__tsan_atomic8_load", "__tsan_atomic16_load", "__tsan_atomic32_load", "__tsan_atomic64_load", "__tsan_atomic8_store", "__tsan_atomic16_store", "__tsan_atomic32_store", "__tsan_atomic64_store", "__tsan_atomic8_exchange", "__tsan_atomic16_exchange", "__tsan_atomic32_exchange", "__tsan_atomic64_exchange", "__tsan_atomic8_fetch_add", "__tsan_atomic16_fetch_add", "__tsan_atomic32_fetch_add", "__tsan_atomic64_fetch_add", "__tsan_atomic8_fetch_sub", "__tsan_atomic16_fetch_sub", "__tsan_atomic32_fetch_sub", "__tsan_atomic64_fetch_sub", "__tsan_atomic8_fetch_and", "__tsan_atomic16_fetch_and", "__tsan_atomic32_fetch_and", "__tsan_atomic64_fetch_and", "__tsan_atomic8_fetch_or", "__tsan_atomic16_fetch_or", "__tsan_atomic32_fetch_or", "__tsan_atomic64_fetch_or", "__tsan_atomic8_fetch_xor", "__tsan_atomic16_fetch_xor", "__tsan_atomic32_fetch_xor", "__tsan_atomic64_fetch_xor", "__tsan_atomic8_fetch_nand", "__tsan_atomic16_fetch_nand", "__tsan_atomic32_fetch_nand", "__tsan_atomic64_fetch_nand", "__tsan_atomic8_compare_exchange_strong", "__tsan_atomic16_compare_exchange_strong", "__tsan_atomic32_compare_exchange_strong", "__tsan_atomic64_compare_exchange_strong", "__tsan_atomic8_compare_exchange_weak", "__tsan_atomic16_compare_exchange_weak", "__tsan_atomic32_compare_exchange_weak", "__tsan_atomic64_compare_exchange_weak", "__tsan_atomic8_compare_exchange_val", "__tsan_atomic16_compare_exchange_val", "__tsan_atomic32_compare_exchange_val", "__tsan_atomic64_compare_exchange_val", "__tsan_atomic_thread_fence", "__tsan_atomic_signal_fence", "__tsan_unaligned_read16", "__tsan_unaligned_write16", /* KCOV */ "write_comp_data", "check_kcov_mode", "__sanitizer_cov_trace_pc", "__sanitizer_cov_trace_const_cmp1", "__sanitizer_cov_trace_const_cmp2", "__sanitizer_cov_trace_const_cmp4", "__sanitizer_cov_trace_const_cmp8", "__sanitizer_cov_trace_cmp1", "__sanitizer_cov_trace_cmp2", "__sanitizer_cov_trace_cmp4", "__sanitizer_cov_trace_cmp8", "__sanitizer_cov_trace_switch", /* KMSAN */ "kmsan_copy_to_user", "kmsan_disable_current", "kmsan_enable_current", "kmsan_report", "kmsan_unpoison_entry_regs", "kmsan_unpoison_memory", "__msan_chain_origin", "__msan_get_context_state", "__msan_instrument_asm_store", "__msan_metadata_ptr_for_load_1", "__msan_metadata_ptr_for_load_2", "__msan_metadata_ptr_for_load_4", "__msan_metadata_ptr_for_load_8", "__msan_metadata_ptr_for_load_n", "__msan_metadata_ptr_for_store_1", "__msan_metadata_ptr_for_store_2", "__msan_metadata_ptr_for_store_4", "__msan_metadata_ptr_for_store_8", "__msan_metadata_ptr_for_store_n", "__msan_poison_alloca", "__msan_warning", /* UBSAN */ "ubsan_type_mismatch_common", "__ubsan_handle_type_mismatch", "__ubsan_handle_type_mismatch_v1", "__ubsan_handle_shift_out_of_bounds", "__ubsan_handle_load_invalid_value", /* KSTACK_ERASE */ "__sanitizer_cov_stack_depth", /* TRACE_BRANCH_PROFILING */ "ftrace_likely_update", /* STACKPROTECTOR */ "__stack_chk_fail", /* misc */ "csum_partial_copy_generic", "copy_mc_fragile", "copy_mc_fragile_handle_tail", "copy_mc_enhanced_fast_string", "rep_stos_alternative", "rep_movs_alternative", "__copy_user_nocache",
NULL
};
staticvoid add_uaccess_safe(struct objtool_file *file)
{ struct symbol *func; constchar **name;
if (!opts.uaccess) return;
for (name = uaccess_safe_builtin; *name; name++) {
func = find_symbol_by_name(file->elf, *name); if (!func) continue;
func->uaccess_safe = true;
}
}
/* * Symbols that replace INSN_CALL_DYNAMIC, every (tail) call to such a symbol * will be added to the .retpoline_sites section.
*/
__weak bool arch_is_retpoline(struct symbol *sym)
{ returnfalse;
}
/* * Symbols that replace INSN_RETURN, every (tail) call to such a symbol * will be added to the .return_sites section.
*/
__weak bool arch_is_rethunk(struct symbol *sym)
{ returnfalse;
}
/* * Symbols that are embedded inside other instructions, because sometimes crazy * code exists. These are mostly ignored for validation purposes.
*/
__weak bool arch_is_embedded_insn(struct symbol *sym)
{ returnfalse;
}
if (sym->static_call_tramp) {
list_add_tail(&insn->call_node, &file->static_call_list); return 0;
}
if (sym->retpoline_thunk) {
list_add_tail(&insn->call_node, &file->retpoline_call_list); return 0;
}
/* * Many compilers cannot disable KCOV or sanitizer calls with a function * attribute so they need a little help, NOP out any such calls from * noinstr text.
*/ if (opts.hack_noinstr && insn->sec->noinstr && sym->profiling_func) { if (reloc)
set_reloc_type(file->elf, reloc, R_NONE);
if (sibling) { /* * We've replaced the tail-call JMP insn by two new * insn: RET; INT3, except we only have a single struct * insn here. Mark it retpoline_safe to avoid the SLS * warning, instead of adding another insn.
*/
insn->retpoline_safe = true;
}
return 0;
}
if (opts.mcount && sym->fentry) { if (sibling)
WARN_INSN(insn, "tail call to __fentry__ !?!?"); if (opts.mnop) { if (reloc)
set_reloc_type(file->elf, reloc, R_NONE);
if (elf_write_insn(file->elf, insn->sec,
insn->offset, insn->len,
arch_nop_insn(insn->len))) { return -1;
}
if (insn->type == INSN_CALL && !insn->sec->init &&
!insn->_call_dest->embedded_insn)
list_add_tail(&insn->call_node, &file->call_list);
if (!sibling && dead_end_function(file, sym))
insn->dead_end = true;
return 0;
}
staticint add_call_dest(struct objtool_file *file, struct instruction *insn, struct symbol *dest, bool sibling)
{
insn->_call_dest = dest; if (!dest) return 0;
/* * Whatever stack impact regular CALLs have, should be undone * by the RETURN of the called function. * * Annotated intra-function calls retain the stack_ops but * are converted to JUMP, see read_intra_function_calls().
*/
remove_insn_ops(insn);
return annotate_call_site(file, insn, sibling);
}
staticint add_retpoline_call(struct objtool_file *file, struct instruction *insn)
{ /* * Retpoline calls/jumps are really dynamic calls/jumps in disguise, * so convert them accordingly.
*/ switch (insn->type) { case INSN_CALL:
insn->type = INSN_CALL_DYNAMIC; break; case INSN_JUMP_UNCONDITIONAL:
insn->type = INSN_JUMP_DYNAMIC; break; case INSN_JUMP_CONDITIONAL:
insn->type = INSN_JUMP_DYNAMIC_CONDITIONAL; break; default: return 0;
}
insn->retpoline_safe = true;
/* * Whatever stack impact regular CALLs have, should be undone * by the RETURN of the called function. * * Annotated intra-function calls retain the stack_ops but * are converted to JUMP, see read_intra_function_calls().
*/
remove_insn_ops(insn);
return annotate_call_site(file, insn, false);
}
staticvoid add_return_call(struct objtool_file *file, struct instruction *insn, bool add)
{ /* * Return thunk tail calls are really just returns in disguise, * so convert them accordingly.
*/
insn->type = INSN_RETURN;
insn->retpoline_safe = true;
if (add)
list_add_tail(&insn->call_node, &file->return_thunk_list);
}
staticbool is_first_func_insn(struct objtool_file *file, struct instruction *insn, struct symbol *sym)
{ if (insn->offset == sym->offset) returntrue;
/* Allow direct CALL/JMP past ENDBR */ if (opts.ibt) { struct instruction *prev = prev_insn_same_sym(file, insn);
/* * A sibling call is a tail-call to another symbol -- to differentiate from a * recursive tail-call which is to the same symbol.
*/ staticbool jump_is_sibling_call(struct objtool_file *file, struct instruction *from, struct instruction *to)
{ struct symbol *fs = from->sym; struct symbol *ts = to->sym;
/* Not a sibling call if from/to a symbol hole */ if (!fs || !ts) returnfalse;
/* Not a sibling call if not targeting the start of a symbol. */ if (!is_first_func_insn(file, to, ts)) returnfalse;
/* Disallow sibling calls into STT_NOTYPE */ if (ts->type == STT_NOTYPE) returnfalse;
/* Must not be self to be a sibling */ return fs->pfunc != ts->pfunc;
}
/* * Find the destination instructions for all jumps.
*/ staticint add_jump_destinations(struct objtool_file *file)
{ struct instruction *insn, *jump_dest; struct reloc *reloc; struct section *dest_sec; unsignedlong dest_off; int ret;
for_each_insn(file, insn) { struct symbol *func = insn_func(insn);
if (insn->jump_dest) { /* * handle_group_alt() may have previously set * 'jump_dest' for some alternatives.
*/ continue;
} if (!is_static_jump(insn)) continue;
jump_dest = find_insn(file, dest_sec, dest_off); if (!jump_dest) { struct symbol *sym = find_symbol_by_offset(dest_sec, dest_off);
/* * This is a special case for retbleed_untrain_ret(). * It jumps to __x86_return_thunk(), but objtool * can't find the thunk's starting RET * instruction, because the RET is also in the * middle of another instruction. Objtool only * knows about the outer instruction.
*/ if (sym && sym->embedded_insn) {
add_return_call(file, insn, false); continue;
}
/* * GCOV/KCOV dead code can jump to the end of the * function/section.
*/ if (file->ignore_unreachables && func &&
dest_sec == insn->sec &&
dest_off == func->offset + func->len) continue;
ERROR_INSN(insn, "can't find jump dest instruction at %s+0x%lx",
dest_sec->name, dest_off); return -1;
}
/* * An intra-TU jump in retpoline.o might not have a relocation * for its jump dest, in which case the above * add_{retpoline,return}_call() didn't happen.
*/ if (jump_dest->sym && jump_dest->offset == jump_dest->sym->offset) { if (jump_dest->sym->retpoline_thunk) {
ret = add_retpoline_call(file, insn); if (ret) return ret; continue;
} if (jump_dest->sym->return_thunk) {
add_return_call(file, insn, true); continue;
}
}
/* * For GCC 8+, create parent/child links for any cold * subfunctions. This is _mostly_ redundant with a * similar initialization in read_symbols(). * * If a function has aliases, we want the *first* such * function in the symbol table to be the subfunction's * parent. In that case we overwrite the * initialization done in read_symbols(). * * However this code can't completely replace the * read_symbols() code because this doesn't detect the * case where the parent function's only reference to a * subfunction is through a jump table.
*/ if (!strstr(func->name, ".cold") &&
strstr(insn_func(jump_dest)->name, ".cold")) {
func->cfunc = insn_func(jump_dest);
insn_func(jump_dest)->pfunc = func;
}
}
if (jump_is_sibling_call(file, insn, jump_dest)) { /* * Internal sibling call without reloc or with * STT_SECTION reloc.
*/
ret = add_call_dest(file, insn, insn_func(jump_dest), true); if (ret) return ret; continue;
}
insn->jump_dest = jump_dest;
}
return 0;
}
staticstruct symbol *find_call_destination(struct section *sec, unsignedlong offset)
{ struct symbol *call_dest;
call_dest = find_func_by_offset(sec, offset); if (!call_dest)
call_dest = find_symbol_by_offset(sec, offset);
return call_dest;
}
/* * Find the destination instructions for all calls.
*/ staticint add_call_destinations(struct objtool_file *file)
{ struct instruction *insn; unsignedlong dest_off; struct symbol *dest; struct reloc *reloc; int ret;
for_each_insn(file, insn) { struct symbol *func = insn_func(insn); if (insn->type != INSN_CALL) continue;
reloc = insn_reloc(file, insn); if (!reloc) {
dest_off = arch_jump_destination(insn);
dest = find_call_destination(insn->sec, dest_off);
ret = add_call_dest(file, insn, dest, false); if (ret) return ret;
if (func && func->ignore) continue;
if (!insn_call_dest(insn)) {
ERROR_INSN(insn, "unannotated intra-function call"); return -1;
}
if (func && insn_call_dest(insn)->type != STT_FUNC) {
ERROR_INSN(insn, "unsupported call to non-function"); return -1;
}
} elseif (reloc->sym->type == STT_SECTION) {
dest_off = arch_dest_reloc_offset(reloc_addend(reloc));
dest = find_call_destination(reloc->sym->sec, dest_off); if (!dest) {
ERROR_INSN(insn, "can't find call dest symbol at %s+0x%lx",
reloc->sym->sec->name, dest_off); return -1;
}
ret = add_call_dest(file, insn, dest, false); if (ret) return ret;
} elseif (reloc->sym->retpoline_thunk) {
ret = add_retpoline_call(file, insn); if (ret) return ret;
} else {
ret = add_call_dest(file, insn, reloc->sym, false); if (ret) return ret;
}
}
return 0;
}
/* * The .alternatives section requires some extra special care over and above * other special sections because alternatives are patched in place.
*/ staticint handle_group_alt(struct objtool_file *file, struct special_alt *special_alt, struct instruction *orig_insn, struct instruction **new_insn)
{ struct instruction *last_new_insn = NULL, *insn, *nop = NULL; struct alt_group *orig_alt_group, *new_alt_group; unsignedlong dest_off;
if (special_alt->new_len < special_alt->orig_len) { /* * Insert a fake nop at the end to make the replacement * alt_group the same size as the original. This is needed to * allow propagate_alt_cfi() to do its magic. When the last * instruction affects the stack, the instruction after it (the * nop) will propagate the new state to the shared CFI array.
*/
nop = calloc(1, sizeof(*nop)); if (!nop) {
ERROR_GLIBC("calloc"); return -1;
}
memset(nop, 0, sizeof(*nop));
/* * Since alternative replacement code is copy/pasted by the * kernel after applying relocations, generally such code can't * have relative-address relocation references to outside the * .altinstr_replacement section, unless the arch's * alternatives code can adjust the relative offsets * accordingly.
*/
alt_reloc = insn_reloc(file, insn); if (alt_reloc && arch_pc_relative_reloc(alt_reloc) &&
!arch_support_alt_relocation(special_alt, insn, alt_reloc)) {
ERROR_INSN(insn, "unsupported relocation in alternatives section"); return -1;
}
if (!is_static_jump(insn)) continue;
if (!insn->immediate) continue;
dest_off = arch_jump_destination(insn); if (dest_off == special_alt->new_off + special_alt->new_len) {
insn->jump_dest = next_insn_same_sec(file, orig_alt_group->last_insn); if (!insn->jump_dest) {
ERROR_INSN(insn, "can't find alternative jump destination"); return -1;
}
}
}
if (!last_new_insn) {
ERROR_FUNC(special_alt->new_sec, special_alt->new_off, "can't find last new alternative instruction"); return -1;
}
/* * A jump table entry can either convert a nop to a jump or a jump to a nop. * If the original instruction is a jump, make the alt entry an effective nop * by just skipping the original instruction.
*/ staticint handle_jump_alt(struct objtool_file *file, struct special_alt *special_alt, struct instruction *orig_insn, struct instruction **new_insn)
{ if (orig_insn->type != INSN_JUMP_UNCONDITIONAL &&
orig_insn->type != INSN_NOP) {
ERROR_INSN(orig_insn, "unsupported instruction at jump label"); return -1;
}
/* * Read all the special sections which have alternate instructions which can be * patched in or redirected to at runtime. Each instruction having alternate * instruction(s) has them added to its insn->alts list, which will be * traversed in validate_branch().
*/ staticint add_special_section_alts(struct objtool_file *file)
{ struct list_head special_alts; struct instruction *orig_insn, *new_insn; struct special_alt *special_alt, *tmp; struct alternative *alt; int ret;
if (special_get_alts(file->elf, &special_alts)) return -1;
/* * Each @reloc is a switch table relocation which points to the target * instruction.
*/
for_each_reloc_from(table->sec, reloc) {
/* Check for the end of the table: */ if (table_size && reloc_offset(reloc) - reloc_offset(table) >= table_size) break; if (reloc != table && is_jump_table(reloc)) break;
/* Make sure the table entries are consecutive: */ if (prev_offset && reloc_offset(reloc) != prev_offset + arch_reloc_size(reloc)) break;
/* Detect function pointers from contiguous objects: */ if (reloc->sym->sec == pfunc->sec && sym_offset == pfunc->offset) break;
/* * Clang sometimes leaves dangling unused jump table entries * which point to the end of the function. Ignore them.
*/ if (reloc->sym->sec == pfunc->sec &&
sym_offset == pfunc->offset + pfunc->len) goto next;
dest_insn = find_insn(file, reloc->sym->sec, sym_offset); if (!dest_insn) break;
/* Make sure the destination is in the same function: */ if (!insn_func(dest_insn) || insn_func(dest_insn)->pfunc != pfunc) break;
alt = calloc(1, sizeof(*alt)); if (!alt) {
ERROR_GLIBC("calloc"); return -1;
}
/* * find_jump_table() - Given a dynamic jump, find the switch jump table * associated with it.
*/ staticvoid find_jump_table(struct objtool_file *file, struct symbol *func, struct instruction *insn)
{ struct reloc *table_reloc; struct instruction *dest_insn, *orig_insn = insn; unsignedlong table_size; unsignedlong sym_offset;
/* * Backward search using the @first_jump_src links, these help avoid * much of the 'in between' code. Which avoids us getting confused by * it.
*/ for (;
insn && insn_func(insn) && insn_func(insn)->pfunc == func;
insn = insn->first_jump_src ?: prev_insn_same_sym(file, insn)) {
if (insn != orig_insn && insn->type == INSN_JUMP_DYNAMIC) break;
/* allow small jumps within the range */ if (insn->type == INSN_JUMP_UNCONDITIONAL &&
insn->jump_dest &&
(insn->jump_dest->offset <= insn->offset ||
insn->jump_dest->offset > orig_insn->offset)) break;
table_reloc = arch_find_switch_table(file, insn, &table_size); if (!table_reloc) continue;
/* * First pass: Mark the head of each jump table so that in the next pass, * we know when a given jump table ends and the next one starts.
*/ staticvoid mark_func_jump_tables(struct objtool_file *file, struct symbol *func)
{ struct instruction *insn, *last = NULL;
func_for_each_insn(file, func, insn) { if (!last)
last = insn;
/* * Store back-pointers for unconditional forward jumps such * that find_jump_table() can back-track using those and * avoid some potentially confusing code.
*/ if (insn->type == INSN_JUMP_UNCONDITIONAL && insn->jump_dest &&
insn->offset > last->offset &&
insn->jump_dest->offset > insn->offset &&
!insn->jump_dest->first_jump_src) {
insn->jump_dest->first_jump_src = insn;
last = insn->jump_dest;
}
if (insn->type != INSN_JUMP_DYNAMIC) continue;
find_jump_table(file, func, insn);
}
}
staticint add_func_jump_tables(struct objtool_file *file, struct symbol *func)
{ struct instruction *insn; int ret;
func_for_each_insn(file, func, insn) { if (!insn_jump_table(insn)) continue;
ret = add_jump_table(file, insn); if (ret) return ret;
}
return 0;
}
/* * For some switch statements, gcc generates a jump table in the .rodata * section which contains a list of addresses within the function to jump to. * This finds these jump tables and adds them to the insn->alts lists.
*/ staticint add_jump_table_alts(struct objtool_file *file)
{ struct symbol *func; int ret;
if (!file->rodata) return 0;
for_each_sym(file, func) { if (func->type != STT_FUNC) continue;
mark_func_jump_tables(file, func);
ret = add_func_jump_tables(file, func); if (ret) return ret;
}
if (type != ANNOTYPE_INTRA_FUNCTION_CALL) return 0;
if (insn->type != INSN_CALL) {
ERROR_INSN(insn, "intra_function_call not a direct call"); return -1;
}
/* * Treat intra-function CALLs as JMPs, but with a stack_op. * See add_call_destinations(), which strips stack_ops from * normal CALLs.
*/
insn->type = INSN_JUMP_UNCONDITIONAL;
dest_off = arch_jump_destination(insn);
insn->jump_dest = find_insn(file, insn->sec, dest_off); if (!insn->jump_dest) {
ERROR_INSN(insn, "can't find call dest at %s+0x%lx",
insn->sec->name, dest_off); return -1;
}
return 0;
}
staticint __annotate_late(struct objtool_file *file, int type, struct instruction *insn)
{ switch (type) { case ANNOTYPE_NOENDBR: /* early */ break;
case ANNOTYPE_RETPOLINE_SAFE: if (insn->type != INSN_JUMP_DYNAMIC &&
insn->type != INSN_CALL_DYNAMIC &&
insn->type != INSN_RETURN &&
insn->type != INSN_NOP) {
ERROR_INSN(insn, "retpoline_safe hint not an indirect jump/call/ret/nop"); return -1;
}
insn->retpoline_safe = true; break;
case ANNOTYPE_INSTR_BEGIN:
insn->instr++; break;
case ANNOTYPE_INSTR_END:
insn->instr--; break;
case ANNOTYPE_UNRET_BEGIN:
insn->unret = 1; break;
case ANNOTYPE_IGNORE_ALTS: /* early */ break;
case ANNOTYPE_INTRA_FUNCTION_CALL: /* ifc */ break;
case ANNOTYPE_REACHABLE:
insn->dead_end = false; break;
/* * Return true if name matches an instrumentation function, where calls to that * function from noinstr code can safely be removed, but compilers won't do so.
*/ staticbool is_profiling_func(constchar *name)
{ /* * Many compilers cannot disable KCOV with a function attribute.
*/ if (!strncmp(name, "__sanitizer_cov_", 16)) returntrue;
/* * Some compilers currently do not remove __tsan_func_entry/exit nor * __tsan_atomic_signal_fence (used for barrier instrumentation) with * the __no_sanitize_thread attribute, remove them. Once the kernel's * minimum Clang version is 14.0, this can be removed.
*/ if (!strncmp(name, "__tsan_func_", 12) ||
!strcmp(name, "__tsan_atomic_signal_fence")) returntrue;
returnfalse;
}
staticint classify_symbols(struct objtool_file *file)
{ struct symbol *func;
/* * Search for the following rodata sections, each of which can * potentially contain jump tables: * * - .rodata: can contain GCC switch tables * - .rodata.<func>: same, if -fdata-sections is being used * - .data.rel.ro.c_jump_table: contains C annotated jump tables * * .rodata.str1.* sections are ignored; they don't contain jump tables.
*/
for_each_sec(file, sec) { if ((!strncmp(sec->name, ".rodata", 7) &&
!strstr(sec->name, ".str1.")) ||
!strncmp(sec->name, ".data.rel.ro", 12)) {
sec->rodata = true;
found = true;
}
}
file->rodata = found;
}
staticint decode_sections(struct objtool_file *file)
{ int ret;
mark_rodata(file);
ret = init_pv_ops(file); if (ret) return ret;
/* * Must be before add_{jump_call}_destination.
*/
ret = classify_symbols(file); if (ret) return ret;
ret = decode_instructions(file); if (ret) return ret;
ret = add_ignores(file); if (ret) return ret;
add_uaccess_safe(file);
ret = read_annotate(file, __annotate_early); if (ret) return ret;
/* * Must be before add_jump_destinations(), which depends on 'func' * being set for alternatives, to enable proper sibling call detection.
*/ if (opts.stackval || opts.orc || opts.uaccess || opts.noinstr) {
ret = add_special_section_alts(file); if (ret) return ret;
}
ret = add_jump_destinations(file); if (ret) return ret;
/* * Must be before add_call_destination(); it changes INSN_CALL to * INSN_JUMP.
*/
ret = read_annotate(file, __annotate_ifc); if (ret) return ret;
ret = add_call_destinations(file); if (ret) return ret;
ret = add_jump_table_alts(file); if (ret) return ret;
ret = read_unwind_hints(file); if (ret) return ret;
/* * Must be after add_call_destinations() such that it can override * dead_end_function() marks.
*/
ret = read_annotate(file, __annotate_late); if (ret) return ret;
return 0;
}
staticbool is_special_call(struct instruction *insn)
{ if (insn->type == INSN_CALL) { struct symbol *dest = insn_call_dest(insn);
if (!dest) returnfalse;
if (dest->fentry || dest->embedded_insn) returntrue;
}
if (cfi->cfa.base != initial_func_cfi.cfa.base || cfi->drap) returntrue;
if (cfi->cfa.offset != initial_func_cfi.cfa.offset) returntrue;
if (cfi->stack_size != initial_func_cfi.cfa.offset) returntrue;
for (i = 0; i < CFI_NUM_REGS; i++) { if (cfi->regs[i].base != initial_func_cfi.regs[i].base ||
cfi->regs[i].offset != initial_func_cfi.regs[i].offset) returntrue;
}
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.