Skip to content

Commit 11f1a4b

Browse files
committed
x86: reorganize SMAP handling in user space accesses
This reorganizes how we do the stac/clac instructions in the user access code. Instead of adding the instructions directly to the same inline asm that does the actual user level access and exception handling, add them at a higher level. This is mainly preparation for the next step, where we will expose an interface to allow users to mark several accesses together as being user space accesses, but it does already clean up some code: - the inlined trivial cases of copy_in_user() now do stac/clac just once over the accesses: they used to do one pair around the user space read, and another pair around the write-back. - the {get,put}_user_ex() macros that are used with the catch/try handling don't do any stac/clac at all, because that happens in the try/catch surrounding them. Other than those two cleanups that happened naturally from the re-organization, this should not make any difference. Yet. Signed-off-by: Linus Torvalds <[email protected]>
1 parent a5e90b1 commit 11f1a4b

File tree

2 files changed

+101
-46
lines changed

2 files changed

+101
-46
lines changed

arch/x86/include/asm/uaccess.h

Lines changed: 35 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -134,6 +134,9 @@ extern int __get_user_4(void);
134134
extern int __get_user_8(void);
135135
extern int __get_user_bad(void);
136136

137+
#define __uaccess_begin() stac()
138+
#define __uaccess_end() clac()
139+
137140
/*
138141
* This is a type: either unsigned long, if the argument fits into
139142
* that type, or otherwise unsigned long long.
@@ -193,10 +196,10 @@ __typeof__(__builtin_choose_expr(sizeof(x) > sizeof(0UL), 0ULL, 0UL))
193196

194197
#ifdef CONFIG_X86_32
195198
#define __put_user_asm_u64(x, addr, err, errret) \
196-
asm volatile(ASM_STAC "\n" \
199+
asm volatile("\n" \
197200
"1: movl %%eax,0(%2)\n" \
198201
"2: movl %%edx,4(%2)\n" \
199-
"3: " ASM_CLAC "\n" \
202+
"3:" \
200203
".section .fixup,\"ax\"\n" \
201204
"4: movl %3,%0\n" \
202205
" jmp 3b\n" \
@@ -207,10 +210,10 @@ __typeof__(__builtin_choose_expr(sizeof(x) > sizeof(0UL), 0ULL, 0UL))
207210
: "A" (x), "r" (addr), "i" (errret), "0" (err))
208211

209212
#define __put_user_asm_ex_u64(x, addr) \
210-
asm volatile(ASM_STAC "\n" \
213+
asm volatile("\n" \
211214
"1: movl %%eax,0(%1)\n" \
212215
"2: movl %%edx,4(%1)\n" \
213-
"3: " ASM_CLAC "\n" \
216+
"3:" \
214217
_ASM_EXTABLE_EX(1b, 2b) \
215218
_ASM_EXTABLE_EX(2b, 3b) \
216219
: : "A" (x), "r" (addr))
@@ -304,6 +307,10 @@ do { \
304307
} \
305308
} while (0)
306309

310+
/*
311+
* This doesn't do __uaccess_begin/end - the exception handling
312+
* around it must do that.
313+
*/
307314
#define __put_user_size_ex(x, ptr, size) \
308315
do { \
309316
__chk_user_ptr(ptr); \
@@ -358,9 +365,9 @@ do { \
358365
} while (0)
359366

360367
#define __get_user_asm(x, addr, err, itype, rtype, ltype, errret) \
361-
asm volatile(ASM_STAC "\n" \
368+
asm volatile("\n" \
362369
"1: mov"itype" %2,%"rtype"1\n" \
363-
"2: " ASM_CLAC "\n" \
370+
"2:\n" \
364371
".section .fixup,\"ax\"\n" \
365372
"3: mov %3,%0\n" \
366373
" xor"itype" %"rtype"1,%"rtype"1\n" \
@@ -370,6 +377,10 @@ do { \
370377
: "=r" (err), ltype(x) \
371378
: "m" (__m(addr)), "i" (errret), "0" (err))
372379

380+
/*
381+
* This doesn't do __uaccess_begin/end - the exception handling
382+
* around it must do that.
383+
*/
373384
#define __get_user_size_ex(x, ptr, size) \
374385
do { \
375386
__chk_user_ptr(ptr); \
@@ -400,15 +411,19 @@ do { \
400411
#define __put_user_nocheck(x, ptr, size) \
401412
({ \
402413
int __pu_err; \
414+
__uaccess_begin(); \
403415
__put_user_size((x), (ptr), (size), __pu_err, -EFAULT); \
416+
__uaccess_end(); \
404417
__builtin_expect(__pu_err, 0); \
405418
})
406419

407420
#define __get_user_nocheck(x, ptr, size) \
408421
({ \
409422
int __gu_err; \
410423
unsigned long __gu_val; \
424+
__uaccess_begin(); \
411425
__get_user_size(__gu_val, (ptr), (size), __gu_err, -EFAULT); \
426+
__uaccess_end(); \
412427
(x) = (__force __typeof__(*(ptr)))__gu_val; \
413428
__builtin_expect(__gu_err, 0); \
414429
})
@@ -423,9 +438,9 @@ struct __large_struct { unsigned long buf[100]; };
423438
* aliasing issues.
424439
*/
425440
#define __put_user_asm(x, addr, err, itype, rtype, ltype, errret) \
426-
asm volatile(ASM_STAC "\n" \
441+
asm volatile("\n" \
427442
"1: mov"itype" %"rtype"1,%2\n" \
428-
"2: " ASM_CLAC "\n" \
443+
"2:\n" \
429444
".section .fixup,\"ax\"\n" \
430445
"3: mov %3,%0\n" \
431446
" jmp 2b\n" \
@@ -445,11 +460,11 @@ struct __large_struct { unsigned long buf[100]; };
445460
*/
446461
#define uaccess_try do { \
447462
current_thread_info()->uaccess_err = 0; \
448-
stac(); \
463+
__uaccess_begin(); \
449464
barrier();
450465

451466
#define uaccess_catch(err) \
452-
clac(); \
467+
__uaccess_end(); \
453468
(err) |= (current_thread_info()->uaccess_err ? -EFAULT : 0); \
454469
} while (0)
455470

@@ -547,12 +562,13 @@ extern void __cmpxchg_wrong_size(void)
547562
__typeof__(ptr) __uval = (uval); \
548563
__typeof__(*(ptr)) __old = (old); \
549564
__typeof__(*(ptr)) __new = (new); \
565+
__uaccess_begin(); \
550566
switch (size) { \
551567
case 1: \
552568
{ \
553-
asm volatile("\t" ASM_STAC "\n" \
569+
asm volatile("\n" \
554570
"1:\t" LOCK_PREFIX "cmpxchgb %4, %2\n" \
555-
"2:\t" ASM_CLAC "\n" \
571+
"2:\n" \
556572
"\t.section .fixup, \"ax\"\n" \
557573
"3:\tmov %3, %0\n" \
558574
"\tjmp 2b\n" \
@@ -566,9 +582,9 @@ extern void __cmpxchg_wrong_size(void)
566582
} \
567583
case 2: \
568584
{ \
569-
asm volatile("\t" ASM_STAC "\n" \
585+
asm volatile("\n" \
570586
"1:\t" LOCK_PREFIX "cmpxchgw %4, %2\n" \
571-
"2:\t" ASM_CLAC "\n" \
587+
"2:\n" \
572588
"\t.section .fixup, \"ax\"\n" \
573589
"3:\tmov %3, %0\n" \
574590
"\tjmp 2b\n" \
@@ -582,9 +598,9 @@ extern void __cmpxchg_wrong_size(void)
582598
} \
583599
case 4: \
584600
{ \
585-
asm volatile("\t" ASM_STAC "\n" \
601+
asm volatile("\n" \
586602
"1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n" \
587-
"2:\t" ASM_CLAC "\n" \
603+
"2:\n" \
588604
"\t.section .fixup, \"ax\"\n" \
589605
"3:\tmov %3, %0\n" \
590606
"\tjmp 2b\n" \
@@ -601,9 +617,9 @@ extern void __cmpxchg_wrong_size(void)
601617
if (!IS_ENABLED(CONFIG_X86_64)) \
602618
__cmpxchg_wrong_size(); \
603619
\
604-
asm volatile("\t" ASM_STAC "\n" \
620+
asm volatile("\n" \
605621
"1:\t" LOCK_PREFIX "cmpxchgq %4, %2\n" \
606-
"2:\t" ASM_CLAC "\n" \
622+
"2:\n" \
607623
"\t.section .fixup, \"ax\"\n" \
608624
"3:\tmov %3, %0\n" \
609625
"\tjmp 2b\n" \
@@ -618,6 +634,7 @@ extern void __cmpxchg_wrong_size(void)
618634
default: \
619635
__cmpxchg_wrong_size(); \
620636
} \
637+
__uaccess_end(); \
621638
*__uval = __old; \
622639
__ret; \
623640
})

arch/x86/include/asm/uaccess_64.h

Lines changed: 66 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -56,35 +56,49 @@ int __copy_from_user_nocheck(void *dst, const void __user *src, unsigned size)
5656
if (!__builtin_constant_p(size))
5757
return copy_user_generic(dst, (__force void *)src, size);
5858
switch (size) {
59-
case 1:__get_user_asm(*(u8 *)dst, (u8 __user *)src,
59+
case 1:
60+
__uaccess_begin();
61+
__get_user_asm(*(u8 *)dst, (u8 __user *)src,
6062
ret, "b", "b", "=q", 1);
63+
__uaccess_end();
6164
return ret;
62-
case 2:__get_user_asm(*(u16 *)dst, (u16 __user *)src,
65+
case 2:
66+
__uaccess_begin();
67+
__get_user_asm(*(u16 *)dst, (u16 __user *)src,
6368
ret, "w", "w", "=r", 2);
69+
__uaccess_end();
6470
return ret;
65-
case 4:__get_user_asm(*(u32 *)dst, (u32 __user *)src,
71+
case 4:
72+
__uaccess_begin();
73+
__get_user_asm(*(u32 *)dst, (u32 __user *)src,
6674
ret, "l", "k", "=r", 4);
75+
__uaccess_end();
6776
return ret;
68-
case 8:__get_user_asm(*(u64 *)dst, (u64 __user *)src,
77+
case 8:
78+
__uaccess_begin();
79+
__get_user_asm(*(u64 *)dst, (u64 __user *)src,
6980
ret, "q", "", "=r", 8);
81+
__uaccess_end();
7082
return ret;
7183
case 10:
84+
__uaccess_begin();
7285
__get_user_asm(*(u64 *)dst, (u64 __user *)src,
7386
ret, "q", "", "=r", 10);
74-
if (unlikely(ret))
75-
return ret;
76-
__get_user_asm(*(u16 *)(8 + (char *)dst),
77-
(u16 __user *)(8 + (char __user *)src),
78-
ret, "w", "w", "=r", 2);
87+
if (likely(!ret))
88+
__get_user_asm(*(u16 *)(8 + (char *)dst),
89+
(u16 __user *)(8 + (char __user *)src),
90+
ret, "w", "w", "=r", 2);
91+
__uaccess_end();
7992
return ret;
8093
case 16:
94+
__uaccess_begin();
8195
__get_user_asm(*(u64 *)dst, (u64 __user *)src,
8296
ret, "q", "", "=r", 16);
83-
if (unlikely(ret))
84-
return ret;
85-
__get_user_asm(*(u64 *)(8 + (char *)dst),
86-
(u64 __user *)(8 + (char __user *)src),
87-
ret, "q", "", "=r", 8);
97+
if (likely(!ret))
98+
__get_user_asm(*(u64 *)(8 + (char *)dst),
99+
(u64 __user *)(8 + (char __user *)src),
100+
ret, "q", "", "=r", 8);
101+
__uaccess_end();
88102
return ret;
89103
default:
90104
return copy_user_generic(dst, (__force void *)src, size);
@@ -106,35 +120,51 @@ int __copy_to_user_nocheck(void __user *dst, const void *src, unsigned size)
106120
if (!__builtin_constant_p(size))
107121
return copy_user_generic((__force void *)dst, src, size);
108122
switch (size) {
109-
case 1:__put_user_asm(*(u8 *)src, (u8 __user *)dst,
123+
case 1:
124+
__uaccess_begin();
125+
__put_user_asm(*(u8 *)src, (u8 __user *)dst,
110126
ret, "b", "b", "iq", 1);
127+
__uaccess_end();
111128
return ret;
112-
case 2:__put_user_asm(*(u16 *)src, (u16 __user *)dst,
129+
case 2:
130+
__uaccess_begin();
131+
__put_user_asm(*(u16 *)src, (u16 __user *)dst,
113132
ret, "w", "w", "ir", 2);
133+
__uaccess_end();
114134
return ret;
115-
case 4:__put_user_asm(*(u32 *)src, (u32 __user *)dst,
135+
case 4:
136+
__uaccess_begin();
137+
__put_user_asm(*(u32 *)src, (u32 __user *)dst,
116138
ret, "l", "k", "ir", 4);
139+
__uaccess_end();
117140
return ret;
118-
case 8:__put_user_asm(*(u64 *)src, (u64 __user *)dst,
141+
case 8:
142+
__uaccess_begin();
143+
__put_user_asm(*(u64 *)src, (u64 __user *)dst,
119144
ret, "q", "", "er", 8);
145+
__uaccess_end();
120146
return ret;
121147
case 10:
148+
__uaccess_begin();
122149
__put_user_asm(*(u64 *)src, (u64 __user *)dst,
123150
ret, "q", "", "er", 10);
124-
if (unlikely(ret))
125-
return ret;
126-
asm("":::"memory");
127-
__put_user_asm(4[(u16 *)src], 4 + (u16 __user *)dst,
128-
ret, "w", "w", "ir", 2);
151+
if (likely(!ret)) {
152+
asm("":::"memory");
153+
__put_user_asm(4[(u16 *)src], 4 + (u16 __user *)dst,
154+
ret, "w", "w", "ir", 2);
155+
}
156+
__uaccess_end();
129157
return ret;
130158
case 16:
159+
__uaccess_begin();
131160
__put_user_asm(*(u64 *)src, (u64 __user *)dst,
132161
ret, "q", "", "er", 16);
133-
if (unlikely(ret))
134-
return ret;
135-
asm("":::"memory");
136-
__put_user_asm(1[(u64 *)src], 1 + (u64 __user *)dst,
137-
ret, "q", "", "er", 8);
162+
if (likely(!ret)) {
163+
asm("":::"memory");
164+
__put_user_asm(1[(u64 *)src], 1 + (u64 __user *)dst,
165+
ret, "q", "", "er", 8);
166+
}
167+
__uaccess_end();
138168
return ret;
139169
default:
140170
return copy_user_generic((__force void *)dst, src, size);
@@ -160,39 +190,47 @@ int __copy_in_user(void __user *dst, const void __user *src, unsigned size)
160190
switch (size) {
161191
case 1: {
162192
u8 tmp;
193+
__uaccess_begin();
163194
__get_user_asm(tmp, (u8 __user *)src,
164195
ret, "b", "b", "=q", 1);
165196
if (likely(!ret))
166197
__put_user_asm(tmp, (u8 __user *)dst,
167198
ret, "b", "b", "iq", 1);
199+
__uaccess_end();
168200
return ret;
169201
}
170202
case 2: {
171203
u16 tmp;
204+
__uaccess_begin();
172205
__get_user_asm(tmp, (u16 __user *)src,
173206
ret, "w", "w", "=r", 2);
174207
if (likely(!ret))
175208
__put_user_asm(tmp, (u16 __user *)dst,
176209
ret, "w", "w", "ir", 2);
210+
__uaccess_end();
177211
return ret;
178212
}
179213

180214
case 4: {
181215
u32 tmp;
216+
__uaccess_begin();
182217
__get_user_asm(tmp, (u32 __user *)src,
183218
ret, "l", "k", "=r", 4);
184219
if (likely(!ret))
185220
__put_user_asm(tmp, (u32 __user *)dst,
186221
ret, "l", "k", "ir", 4);
222+
__uaccess_end();
187223
return ret;
188224
}
189225
case 8: {
190226
u64 tmp;
227+
__uaccess_begin();
191228
__get_user_asm(tmp, (u64 __user *)src,
192229
ret, "q", "", "=r", 8);
193230
if (likely(!ret))
194231
__put_user_asm(tmp, (u64 __user *)dst,
195232
ret, "q", "", "er", 8);
233+
__uaccess_end();
196234
return ret;
197235
}
198236
default:

0 commit comments

Comments
 (0)