/* * This page serves two purposes: * - It used as early shadow memory. The entire shadow region populated * with this page, before we will be able to setup normal shadow memory. * - Latter it reused it as zero shadow to cover large ranges of memory * that allowed to access, but not handled by kasan (vmalloc/vmemmap ...).
*/ unsignedchar kasan_early_shadow_page[PAGE_SIZE] __page_aligned_bss;
if (IS_ALIGNED(addr, PMD_SIZE) && end - addr >= PMD_SIZE) {
pmd_populate_kernel(&init_mm, pmd,
lm_alias(kasan_early_shadow_pte)); continue;
}
if (pmd_none(*pmd)) {
pte_t *p;
if (slab_is_available())
p = pte_alloc_one_kernel(&init_mm); else {
p = early_alloc(PAGE_SIZE, NUMA_NO_NODE);
kernel_pte_init(p);
} if (!p) return -ENOMEM;
if (slab_is_available()) {
p = pud_alloc(&init_mm, p4d, addr); if (!p) return -ENOMEM;
} else {
p = early_alloc(PAGE_SIZE, NUMA_NO_NODE);
pud_init(p);
p4d_populate_kernel(addr, p4d, p);
}
}
zero_pud_populate(p4d, addr, next);
} while (p4d++, addr = next, addr != end);
return 0;
}
/** * kasan_populate_early_shadow - populate shadow memory region with * kasan_early_shadow_page * @shadow_start: start of the memory range to populate * @shadow_end: end of the memory range to populate
*/ int __ref kasan_populate_early_shadow(constvoid *shadow_start, constvoid *shadow_end)
{ unsignedlong addr = (unsignedlong)shadow_start; unsignedlong end = (unsignedlong)shadow_end;
pgd_t *pgd = pgd_offset_k(addr); unsignedlong next;
do {
next = pgd_addr_end(addr, end);
if (IS_ALIGNED(addr, PGDIR_SIZE) && end - addr >= PGDIR_SIZE) {
p4d_t *p4d;
pud_t *pud;
pmd_t *pmd;
/* * kasan_early_shadow_pud should be populated with pmds * at this moment. * [pud,pmd]_populate*() below needed only for * 3,2 - level page tables where we don't have * puds,pmds, so pgd_populate(), pud_populate() * is noops.
*/
pgd_populate_kernel(addr, pgd,
lm_alias(kasan_early_shadow_p4d));
p4d = p4d_offset(pgd, addr);
p4d_populate_kernel(addr, p4d,
lm_alias(kasan_early_shadow_pud));
pud = pud_offset(p4d, addr);
pud_populate(&init_mm, pud,
lm_alias(kasan_early_shadow_pmd));
pmd = pmd_offset(pud, addr);
pmd_populate_kernel(&init_mm, pmd,
lm_alias(kasan_early_shadow_pte)); continue;
}
if (pgd_none(*pgd)) {
p4d_t *p;
if (slab_is_available()) {
p = p4d_alloc(&init_mm, pgd, addr); if (!p) return -ENOMEM;
} else {
pgd_populate_kernel(addr, pgd,
early_alloc(PAGE_SIZE, NUMA_NO_NODE));
}
}
zero_p4d_populate(pgd, addr, next);
} while (pgd++, addr = next, addr != end);
return 0;
}
staticvoid kasan_free_pte(pte_t *pte_start, pmd_t *pmd)
{
pte_t *pte; int i;
for (i = 0; i < PTRS_PER_PTE; i++) {
pte = pte_start + i; if (!pte_none(ptep_get(pte))) return;
}
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.