Skip to content

Commit b19b74b

Browse files
kirylhansendc
authored andcommitted
x86/mm: Rework address range check in get_user() and put_user()
The functions get_user() and put_user() check that the target address range resides in the user space portion of the virtual address space. In order to perform this check, the functions compare the end of the range against TASK_SIZE_MAX. For kernels compiled with CONFIG_X86_5LEVEL, this process requires some additional trickery using ALTERNATIVE, as TASK_SIZE_MAX depends on the paging mode in use. Linus suggested that this check could be simplified for 64-bit kernels. It is sufficient to check bit 63 of the address to ensure that the range belongs to user space. Additionally, the use of branches can be avoided by setting the target address to all ones if bit 63 is set. There's no need to check the end of the access range as there's huge gap between end of userspace range and start of the kernel range. The gap consists of canonical hole and unused ranges on both kernel and userspace sides. If an address with bit 63 set is passed down, it will trigger a #GP exception. _ASM_EXTABLE_UA() complains about this. Replace it with plain _ASM_EXTABLE() as it is expected behaviour now. The updated get_user() and put_user() checks are also compatible with Linear Address Masking, which allows user space to encode metadata in the upper bits of pointers and eliminates the need to untag the address before handling it. Suggested-by: Linus Torvalds <[email protected]> Signed-off-by: Kirill A. Shutemov <[email protected]> Signed-off-by: Dave Hansen <[email protected]> Acked-by: Peter Zijlstra (Intel) <[email protected]> Link: https://lore.kernel.org/all/20230312112612.31869-2-kirill.shutemov%40linux.intel.com
1 parent eeac8ed commit b19b74b

File tree

2 files changed

+55
-82
lines changed

2 files changed

+55
-82
lines changed

arch/x86/lib/getuser.S

Lines changed: 31 additions & 52 deletions
Original file line numberDiff line numberDiff line change
@@ -37,22 +37,22 @@
3737

3838
#define ASM_BARRIER_NOSPEC ALTERNATIVE "", "lfence", X86_FEATURE_LFENCE_RDTSC
3939

40-
#ifdef CONFIG_X86_5LEVEL
41-
#define LOAD_TASK_SIZE_MINUS_N(n) \
42-
ALTERNATIVE __stringify(mov $((1 << 47) - 4096 - (n)),%rdx), \
43-
__stringify(mov $((1 << 56) - 4096 - (n)),%rdx), X86_FEATURE_LA57
44-
#else
45-
#define LOAD_TASK_SIZE_MINUS_N(n) \
46-
mov $(TASK_SIZE_MAX - (n)),%_ASM_DX
47-
#endif
40+
.macro check_range size:req
41+
.if IS_ENABLED(CONFIG_X86_64)
42+
mov %rax, %rdx
43+
sar $63, %rdx
44+
or %rdx, %rax
45+
.else
46+
cmp $TASK_SIZE_MAX-\size+1, %eax
47+
jae .Lbad_get_user
48+
sbb %edx, %edx /* array_index_mask_nospec() */
49+
and %edx, %eax
50+
.endif
51+
.endm
4852

4953
.text
5054
SYM_FUNC_START(__get_user_1)
51-
LOAD_TASK_SIZE_MINUS_N(0)
52-
cmp %_ASM_DX,%_ASM_AX
53-
jae bad_get_user
54-
sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
55-
and %_ASM_DX, %_ASM_AX
55+
check_range size=1
5656
ASM_STAC
5757
1: movzbl (%_ASM_AX),%edx
5858
xor %eax,%eax
@@ -62,11 +62,7 @@ SYM_FUNC_END(__get_user_1)
6262
EXPORT_SYMBOL(__get_user_1)
6363

6464
SYM_FUNC_START(__get_user_2)
65-
LOAD_TASK_SIZE_MINUS_N(1)
66-
cmp %_ASM_DX,%_ASM_AX
67-
jae bad_get_user
68-
sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
69-
and %_ASM_DX, %_ASM_AX
65+
check_range size=2
7066
ASM_STAC
7167
2: movzwl (%_ASM_AX),%edx
7268
xor %eax,%eax
@@ -76,11 +72,7 @@ SYM_FUNC_END(__get_user_2)
7672
EXPORT_SYMBOL(__get_user_2)
7773

7874
SYM_FUNC_START(__get_user_4)
79-
LOAD_TASK_SIZE_MINUS_N(3)
80-
cmp %_ASM_DX,%_ASM_AX
81-
jae bad_get_user
82-
sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
83-
and %_ASM_DX, %_ASM_AX
75+
check_range size=4
8476
ASM_STAC
8577
3: movl (%_ASM_AX),%edx
8678
xor %eax,%eax
@@ -90,30 +82,17 @@ SYM_FUNC_END(__get_user_4)
9082
EXPORT_SYMBOL(__get_user_4)
9183

9284
SYM_FUNC_START(__get_user_8)
93-
#ifdef CONFIG_X86_64
94-
LOAD_TASK_SIZE_MINUS_N(7)
95-
cmp %_ASM_DX,%_ASM_AX
96-
jae bad_get_user
97-
sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
98-
and %_ASM_DX, %_ASM_AX
85+
check_range size=8
9986
ASM_STAC
87+
#ifdef CONFIG_X86_64
10088
4: movq (%_ASM_AX),%rdx
101-
xor %eax,%eax
102-
ASM_CLAC
103-
RET
10489
#else
105-
LOAD_TASK_SIZE_MINUS_N(7)
106-
cmp %_ASM_DX,%_ASM_AX
107-
jae bad_get_user_8
108-
sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
109-
and %_ASM_DX, %_ASM_AX
110-
ASM_STAC
11190
4: movl (%_ASM_AX),%edx
11291
5: movl 4(%_ASM_AX),%ecx
92+
#endif
11393
xor %eax,%eax
11494
ASM_CLAC
11595
RET
116-
#endif
11796
SYM_FUNC_END(__get_user_8)
11897
EXPORT_SYMBOL(__get_user_8)
11998

@@ -166,7 +145,7 @@ EXPORT_SYMBOL(__get_user_nocheck_8)
166145

167146
SYM_CODE_START_LOCAL(.Lbad_get_user_clac)
168147
ASM_CLAC
169-
bad_get_user:
148+
.Lbad_get_user:
170149
xor %edx,%edx
171150
mov $(-EFAULT),%_ASM_AX
172151
RET
@@ -184,23 +163,23 @@ SYM_CODE_END(.Lbad_get_user_8_clac)
184163
#endif
185164

186165
/* get_user */
187-
_ASM_EXTABLE_UA(1b, .Lbad_get_user_clac)
188-
_ASM_EXTABLE_UA(2b, .Lbad_get_user_clac)
189-
_ASM_EXTABLE_UA(3b, .Lbad_get_user_clac)
166+
_ASM_EXTABLE(1b, .Lbad_get_user_clac)
167+
_ASM_EXTABLE(2b, .Lbad_get_user_clac)
168+
_ASM_EXTABLE(3b, .Lbad_get_user_clac)
190169
#ifdef CONFIG_X86_64
191-
_ASM_EXTABLE_UA(4b, .Lbad_get_user_clac)
170+
_ASM_EXTABLE(4b, .Lbad_get_user_clac)
192171
#else
193-
_ASM_EXTABLE_UA(4b, .Lbad_get_user_8_clac)
194-
_ASM_EXTABLE_UA(5b, .Lbad_get_user_8_clac)
172+
_ASM_EXTABLE(4b, .Lbad_get_user_8_clac)
173+
_ASM_EXTABLE(5b, .Lbad_get_user_8_clac)
195174
#endif
196175

197176
/* __get_user */
198-
_ASM_EXTABLE_UA(6b, .Lbad_get_user_clac)
199-
_ASM_EXTABLE_UA(7b, .Lbad_get_user_clac)
200-
_ASM_EXTABLE_UA(8b, .Lbad_get_user_clac)
177+
_ASM_EXTABLE(6b, .Lbad_get_user_clac)
178+
_ASM_EXTABLE(7b, .Lbad_get_user_clac)
179+
_ASM_EXTABLE(8b, .Lbad_get_user_clac)
201180
#ifdef CONFIG_X86_64
202-
_ASM_EXTABLE_UA(9b, .Lbad_get_user_clac)
181+
_ASM_EXTABLE(9b, .Lbad_get_user_clac)
203182
#else
204-
_ASM_EXTABLE_UA(9b, .Lbad_get_user_8_clac)
205-
_ASM_EXTABLE_UA(10b, .Lbad_get_user_8_clac)
183+
_ASM_EXTABLE(9b, .Lbad_get_user_8_clac)
184+
_ASM_EXTABLE(10b, .Lbad_get_user_8_clac)
206185
#endif

arch/x86/lib/putuser.S

Lines changed: 24 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -33,20 +33,20 @@
3333
* as they get called from within inline assembly.
3434
*/
3535

36-
#ifdef CONFIG_X86_5LEVEL
37-
#define LOAD_TASK_SIZE_MINUS_N(n) \
38-
ALTERNATIVE __stringify(mov $((1 << 47) - 4096 - (n)),%rbx), \
39-
__stringify(mov $((1 << 56) - 4096 - (n)),%rbx), X86_FEATURE_LA57
40-
#else
41-
#define LOAD_TASK_SIZE_MINUS_N(n) \
42-
mov $(TASK_SIZE_MAX - (n)),%_ASM_BX
43-
#endif
36+
.macro check_range size:req
37+
.if IS_ENABLED(CONFIG_X86_64)
38+
mov %rcx, %rbx
39+
sar $63, %rbx
40+
or %rbx, %rcx
41+
.else
42+
cmp $TASK_SIZE_MAX-\size+1, %ecx
43+
jae .Lbad_put_user
44+
.endif
45+
.endm
4446

4547
.text
4648
SYM_FUNC_START(__put_user_1)
47-
LOAD_TASK_SIZE_MINUS_N(0)
48-
cmp %_ASM_BX,%_ASM_CX
49-
jae .Lbad_put_user
49+
check_range size=1
5050
ASM_STAC
5151
1: movb %al,(%_ASM_CX)
5252
xor %ecx,%ecx
@@ -66,9 +66,7 @@ SYM_FUNC_END(__put_user_nocheck_1)
6666
EXPORT_SYMBOL(__put_user_nocheck_1)
6767

6868
SYM_FUNC_START(__put_user_2)
69-
LOAD_TASK_SIZE_MINUS_N(1)
70-
cmp %_ASM_BX,%_ASM_CX
71-
jae .Lbad_put_user
69+
check_range size=2
7270
ASM_STAC
7371
3: movw %ax,(%_ASM_CX)
7472
xor %ecx,%ecx
@@ -88,9 +86,7 @@ SYM_FUNC_END(__put_user_nocheck_2)
8886
EXPORT_SYMBOL(__put_user_nocheck_2)
8987

9088
SYM_FUNC_START(__put_user_4)
91-
LOAD_TASK_SIZE_MINUS_N(3)
92-
cmp %_ASM_BX,%_ASM_CX
93-
jae .Lbad_put_user
89+
check_range size=4
9490
ASM_STAC
9591
5: movl %eax,(%_ASM_CX)
9692
xor %ecx,%ecx
@@ -110,9 +106,7 @@ SYM_FUNC_END(__put_user_nocheck_4)
110106
EXPORT_SYMBOL(__put_user_nocheck_4)
111107

112108
SYM_FUNC_START(__put_user_8)
113-
LOAD_TASK_SIZE_MINUS_N(7)
114-
cmp %_ASM_BX,%_ASM_CX
115-
jae .Lbad_put_user
109+
check_range size=8
116110
ASM_STAC
117111
7: mov %_ASM_AX,(%_ASM_CX)
118112
#ifdef CONFIG_X86_32
@@ -144,15 +138,15 @@ SYM_CODE_START_LOCAL(.Lbad_put_user_clac)
144138
RET
145139
SYM_CODE_END(.Lbad_put_user_clac)
146140

147-
_ASM_EXTABLE_UA(1b, .Lbad_put_user_clac)
148-
_ASM_EXTABLE_UA(2b, .Lbad_put_user_clac)
149-
_ASM_EXTABLE_UA(3b, .Lbad_put_user_clac)
150-
_ASM_EXTABLE_UA(4b, .Lbad_put_user_clac)
151-
_ASM_EXTABLE_UA(5b, .Lbad_put_user_clac)
152-
_ASM_EXTABLE_UA(6b, .Lbad_put_user_clac)
153-
_ASM_EXTABLE_UA(7b, .Lbad_put_user_clac)
154-
_ASM_EXTABLE_UA(9b, .Lbad_put_user_clac)
141+
_ASM_EXTABLE(1b, .Lbad_put_user_clac)
142+
_ASM_EXTABLE(2b, .Lbad_put_user_clac)
143+
_ASM_EXTABLE(3b, .Lbad_put_user_clac)
144+
_ASM_EXTABLE(4b, .Lbad_put_user_clac)
145+
_ASM_EXTABLE(5b, .Lbad_put_user_clac)
146+
_ASM_EXTABLE(6b, .Lbad_put_user_clac)
147+
_ASM_EXTABLE(7b, .Lbad_put_user_clac)
148+
_ASM_EXTABLE(9b, .Lbad_put_user_clac)
155149
#ifdef CONFIG_X86_32
156-
_ASM_EXTABLE_UA(8b, .Lbad_put_user_clac)
157-
_ASM_EXTABLE_UA(10b, .Lbad_put_user_clac)
150+
_ASM_EXTABLE(8b, .Lbad_put_user_clac)
151+
_ASM_EXTABLE(10b, .Lbad_put_user_clac)
158152
#endif

0 commit comments

Comments
 (0)