Skip to content

Commit 917538e

Browse files
xairytorvalds
authored andcommitted
kasan: clean up KASAN_SHADOW_SCALE_SHIFT usage
Right now the fact that KASAN uses a single shadow byte for 8 bytes of memory is scattered all over the code. This change defines KASAN_SHADOW_SCALE_SHIFT early in asm include files and makes use of this constant where necessary. [[email protected]: coding-style fixes] Link: http://lkml.kernel.org/r/34937ca3b90736eaad91b568edf5684091f662e3.1515775666.git.andreyknvl@google.com Signed-off-by: Andrey Konovalov <[email protected]> Acked-by: Andrey Ryabinin <[email protected]> Cc: Dmitry Vyukov <[email protected]> Signed-off-by: Andrew Morton <[email protected]> Signed-off-by: Linus Torvalds <[email protected]>
1 parent 5f21f3a commit 917538e

File tree

5 files changed

+22
-15
lines changed

5 files changed

+22
-15
lines changed

arch/arm64/include/asm/kasan.h

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -12,22 +12,25 @@
1212

1313
/*
1414
* KASAN_SHADOW_START: beginning of the kernel virtual addresses.
15-
* KASAN_SHADOW_END: KASAN_SHADOW_START + 1/8 of kernel virtual addresses.
15+
* KASAN_SHADOW_END: KASAN_SHADOW_START + 1/N of kernel virtual addresses,
16+
* where N = (1 << KASAN_SHADOW_SCALE_SHIFT).
1617
*/
1718
#define KASAN_SHADOW_START (VA_START)
1819
#define KASAN_SHADOW_END (KASAN_SHADOW_START + KASAN_SHADOW_SIZE)
1920

2021
/*
2122
* This value is used to map an address to the corresponding shadow
2223
* address by the following formula:
23-
* shadow_addr = (address >> 3) + KASAN_SHADOW_OFFSET;
24+
* shadow_addr = (address >> KASAN_SHADOW_SCALE_SHIFT) + KASAN_SHADOW_OFFSET
2425
*
25-
* (1 << 61) shadow addresses - [KASAN_SHADOW_OFFSET,KASAN_SHADOW_END]
26-
* cover all 64-bits of virtual addresses. So KASAN_SHADOW_OFFSET
27-
* should satisfy the following equation:
28-
* KASAN_SHADOW_OFFSET = KASAN_SHADOW_END - (1ULL << 61)
26+
* (1 << (64 - KASAN_SHADOW_SCALE_SHIFT)) shadow addresses that lie in range
27+
* [KASAN_SHADOW_OFFSET, KASAN_SHADOW_END) cover all 64-bits of virtual
28+
* addresses. So KASAN_SHADOW_OFFSET should satisfy the following equation:
29+
* KASAN_SHADOW_OFFSET = KASAN_SHADOW_END -
30+
* (1ULL << (64 - KASAN_SHADOW_SCALE_SHIFT))
2931
*/
30-
#define KASAN_SHADOW_OFFSET (KASAN_SHADOW_END - (1ULL << (64 - 3)))
32+
#define KASAN_SHADOW_OFFSET (KASAN_SHADOW_END - (1ULL << \
33+
(64 - KASAN_SHADOW_SCALE_SHIFT)))
3134

3235
void kasan_init(void);
3336
void kasan_copy_shadow(pgd_t *pgdir);

arch/arm64/include/asm/memory.h

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,8 @@
8585
* stack size when KASAN is in use.
8686
*/
8787
#ifdef CONFIG_KASAN
88-
#define KASAN_SHADOW_SIZE (UL(1) << (VA_BITS - 3))
88+
#define KASAN_SHADOW_SCALE_SHIFT 3
89+
#define KASAN_SHADOW_SIZE (UL(1) << (VA_BITS - KASAN_SHADOW_SCALE_SHIFT))
8990
#define KASAN_THREAD_SHIFT 1
9091
#else
9192
#define KASAN_SHADOW_SIZE (0)

arch/arm64/mm/kasan_init.c

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -135,7 +135,8 @@ static void __init kasan_pgd_populate(unsigned long addr, unsigned long end,
135135
/* The early shadow maps everything to a single page of zeroes */
136136
asmlinkage void __init kasan_early_init(void)
137137
{
138-
BUILD_BUG_ON(KASAN_SHADOW_OFFSET != KASAN_SHADOW_END - (1UL << 61));
138+
BUILD_BUG_ON(KASAN_SHADOW_OFFSET !=
139+
KASAN_SHADOW_END - (1UL << (64 - KASAN_SHADOW_SCALE_SHIFT)));
139140
BUILD_BUG_ON(!IS_ALIGNED(KASAN_SHADOW_START, PGDIR_SIZE));
140141
BUILD_BUG_ON(!IS_ALIGNED(KASAN_SHADOW_END, PGDIR_SIZE));
141142
kasan_pgd_populate(KASAN_SHADOW_START, KASAN_SHADOW_END, NUMA_NO_NODE,

arch/x86/include/asm/kasan.h

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44

55
#include <linux/const.h>
66
#define KASAN_SHADOW_OFFSET _AC(CONFIG_KASAN_SHADOW_OFFSET, UL)
7+
#define KASAN_SHADOW_SCALE_SHIFT 3
78

89
/*
910
* Compiler uses shadow offset assuming that addresses start
@@ -12,12 +13,15 @@
1213
* 'kernel address space start' >> KASAN_SHADOW_SCALE_SHIFT
1314
*/
1415
#define KASAN_SHADOW_START (KASAN_SHADOW_OFFSET + \
15-
((-1UL << __VIRTUAL_MASK_SHIFT) >> 3))
16+
((-1UL << __VIRTUAL_MASK_SHIFT) >> \
17+
KASAN_SHADOW_SCALE_SHIFT))
1618
/*
17-
* 47 bits for kernel address -> (47 - 3) bits for shadow
18-
* 56 bits for kernel address -> (56 - 3) bits for shadow
19+
* 47 bits for kernel address -> (47 - KASAN_SHADOW_SCALE_SHIFT) bits for shadow
20+
* 56 bits for kernel address -> (56 - KASAN_SHADOW_SCALE_SHIFT) bits for shadow
1921
*/
20-
#define KASAN_SHADOW_END (KASAN_SHADOW_START + (1ULL << (__VIRTUAL_MASK_SHIFT - 3)))
22+
#define KASAN_SHADOW_END (KASAN_SHADOW_START + \
23+
(1ULL << (__VIRTUAL_MASK_SHIFT - \
24+
KASAN_SHADOW_SCALE_SHIFT)))
2125

2226
#ifndef __ASSEMBLY__
2327

include/linux/kasan.h

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,6 @@ struct task_struct;
1111

1212
#ifdef CONFIG_KASAN
1313

14-
#define KASAN_SHADOW_SCALE_SHIFT 3
15-
1614
#include <asm/kasan.h>
1715
#include <asm/pgtable.h>
1816

0 commit comments

Comments
 (0)