Skip to content

Commit 3d4247f

Browse files
chleroympe
authored andcommitted
powerpc/32: Add support of KASAN_VMALLOC
Add support of KASAN_VMALLOC on PPC32. To allow this, the early shadow covering the VMALLOC space need to be removed once high_memory var is set and before freeing memblock. And the VMALLOC area need to be aligned such that boundaries are covered by a full shadow page. Signed-off-by: Christophe Leroy <[email protected]> Signed-off-by: Michael Ellerman <[email protected]> Link: https://lore.kernel.org/r/031dec5487bde9b2181c8b3c9800e1879cf98c1a.1579024426.git.christophe.leroy@c-s.fr
1 parent 0f9aee0 commit 3d4247f

File tree

6 files changed

+49
-1
lines changed

6 files changed

+49
-1
lines changed

arch/powerpc/Kconfig

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -173,6 +173,7 @@ config PPC
173173
select HAVE_ARCH_HUGE_VMAP if PPC_BOOK3S_64 && PPC_RADIX_MMU
174174
select HAVE_ARCH_JUMP_LABEL
175175
select HAVE_ARCH_KASAN if PPC32
176+
select HAVE_ARCH_KASAN_VMALLOC if PPC32
176177
select HAVE_ARCH_KGDB
177178
select HAVE_ARCH_MMAP_RND_BITS
178179
select HAVE_ARCH_MMAP_RND_COMPAT_BITS if COMPAT

arch/powerpc/include/asm/book3s/32/pgtable.h

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,12 @@ int map_kernel_page(unsigned long va, phys_addr_t pa, pgprot_t prot);
193193
#else
194194
#define VMALLOC_START ((((long)high_memory + VMALLOC_OFFSET) & ~(VMALLOC_OFFSET-1)))
195195
#endif
196+
197+
#ifdef CONFIG_KASAN_VMALLOC
198+
#define VMALLOC_END _ALIGN_DOWN(ioremap_bot, PAGE_SIZE << KASAN_SHADOW_SCALE_SHIFT)
199+
#else
196200
#define VMALLOC_END ioremap_bot
201+
#endif
197202

198203
#ifndef __ASSEMBLY__
199204
#include <linux/sched.h>

arch/powerpc/include/asm/kasan.h

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,9 +31,11 @@
3131
void kasan_early_init(void);
3232
void kasan_mmu_init(void);
3333
void kasan_init(void);
34+
void kasan_late_init(void);
3435
#else
3536
static inline void kasan_init(void) { }
3637
static inline void kasan_mmu_init(void) { }
38+
static inline void kasan_late_init(void) { }
3739
#endif
3840

3941
#endif /* __ASSEMBLY */

arch/powerpc/include/asm/nohash/32/pgtable.h

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -114,7 +114,12 @@ int map_kernel_page(unsigned long va, phys_addr_t pa, pgprot_t prot);
114114
#else
115115
#define VMALLOC_START ((((long)high_memory + VMALLOC_OFFSET) & ~(VMALLOC_OFFSET-1)))
116116
#endif
117+
118+
#ifdef CONFIG_KASAN_VMALLOC
119+
#define VMALLOC_END _ALIGN_DOWN(ioremap_bot, PAGE_SIZE << KASAN_SHADOW_SCALE_SHIFT)
120+
#else
117121
#define VMALLOC_END ioremap_bot
122+
#endif
118123

119124
/*
120125
* Bits in a linux-style PTE. These match the bits in the

arch/powerpc/mm/kasan/kasan_init_32.c

Lines changed: 32 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -129,6 +129,31 @@ static void __init kasan_remap_early_shadow_ro(void)
129129
flush_tlb_kernel_range(KASAN_SHADOW_START, KASAN_SHADOW_END);
130130
}
131131

132+
static void __init kasan_unmap_early_shadow_vmalloc(void)
133+
{
134+
unsigned long k_start = (unsigned long)kasan_mem_to_shadow((void *)VMALLOC_START);
135+
unsigned long k_end = (unsigned long)kasan_mem_to_shadow((void *)VMALLOC_END);
136+
unsigned long k_cur;
137+
phys_addr_t pa = __pa(kasan_early_shadow_page);
138+
139+
if (!early_mmu_has_feature(MMU_FTR_HPTE_TABLE)) {
140+
int ret = kasan_init_shadow_page_tables(k_start, k_end);
141+
142+
if (ret)
143+
panic("kasan: kasan_init_shadow_page_tables() failed");
144+
}
145+
for (k_cur = k_start & PAGE_MASK; k_cur < k_end; k_cur += PAGE_SIZE) {
146+
pmd_t *pmd = pmd_offset(pud_offset(pgd_offset_k(k_cur), k_cur), k_cur);
147+
pte_t *ptep = pte_offset_kernel(pmd, k_cur);
148+
149+
if ((pte_val(*ptep) & PTE_RPN_MASK) != pa)
150+
continue;
151+
152+
__set_pte_at(&init_mm, k_cur, ptep, __pte(0), 0);
153+
}
154+
flush_tlb_kernel_range(k_start, k_end);
155+
}
156+
132157
void __init kasan_mmu_init(void)
133158
{
134159
int ret;
@@ -165,7 +190,13 @@ void __init kasan_init(void)
165190
pr_info("KASAN init done\n");
166191
}
167192

168-
#ifdef CONFIG_MODULES
193+
void __init kasan_late_init(void)
194+
{
195+
if (IS_ENABLED(CONFIG_KASAN_VMALLOC))
196+
kasan_unmap_early_shadow_vmalloc();
197+
}
198+
199+
#if defined(CONFIG_MODULES) && !defined(CONFIG_KASAN_VMALLOC)
169200
void *module_alloc(unsigned long size)
170201
{
171202
void *base;

arch/powerpc/mm/mem.c

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,7 @@
4949
#include <asm/fixmap.h>
5050
#include <asm/swiotlb.h>
5151
#include <asm/rtas.h>
52+
#include <asm/kasan.h>
5253

5354
#include <mm/mmu_decl.h>
5455

@@ -301,6 +302,9 @@ void __init mem_init(void)
301302

302303
high_memory = (void *) __va(max_low_pfn * PAGE_SIZE);
303304
set_max_mapnr(max_pfn);
305+
306+
kasan_late_init();
307+
304308
memblock_free_all();
305309

306310
#ifdef CONFIG_HIGHMEM

0 commit comments

Comments
 (0)