@@ -134,6 +134,9 @@ extern int __get_user_4(void);
134
134
extern int __get_user_8 (void );
135
135
extern int __get_user_bad (void );
136
136
137
+ #define __uaccess_begin () stac()
138
+ #define __uaccess_end () clac()
139
+
137
140
/*
138
141
* This is a type: either unsigned long, if the argument fits into
139
142
* that type, or otherwise unsigned long long.
@@ -193,10 +196,10 @@ __typeof__(__builtin_choose_expr(sizeof(x) > sizeof(0UL), 0ULL, 0UL))
193
196
194
197
#ifdef CONFIG_X86_32
195
198
#define __put_user_asm_u64 (x , addr , err , errret ) \
196
- asm volatile(ASM_STAC "\n" \
199
+ asm volatile("\n" \
197
200
"1: movl %%eax,0(%2)\n" \
198
201
"2: movl %%edx,4(%2)\n" \
199
- "3: " ASM_CLAC "\n" \
202
+ "3:" \
200
203
".section .fixup,\"ax\"\n" \
201
204
"4: movl %3,%0\n" \
202
205
" jmp 3b\n" \
@@ -207,10 +210,10 @@ __typeof__(__builtin_choose_expr(sizeof(x) > sizeof(0UL), 0ULL, 0UL))
207
210
: "A" (x), "r" (addr), "i" (errret), "0" (err))
208
211
209
212
#define __put_user_asm_ex_u64 (x , addr ) \
210
- asm volatile(ASM_STAC "\n" \
213
+ asm volatile("\n" \
211
214
"1: movl %%eax,0(%1)\n" \
212
215
"2: movl %%edx,4(%1)\n" \
213
- "3: " ASM_CLAC "\n" \
216
+ "3:" \
214
217
_ASM_EXTABLE_EX(1b, 2b) \
215
218
_ASM_EXTABLE_EX(2b, 3b) \
216
219
: : "A" (x), "r" (addr))
@@ -304,6 +307,10 @@ do { \
304
307
} \
305
308
} while (0)
306
309
310
+ /*
311
+ * This doesn't do __uaccess_begin/end - the exception handling
312
+ * around it must do that.
313
+ */
307
314
#define __put_user_size_ex (x , ptr , size ) \
308
315
do { \
309
316
__chk_user_ptr(ptr); \
@@ -358,9 +365,9 @@ do { \
358
365
} while (0)
359
366
360
367
#define __get_user_asm (x , addr , err , itype , rtype , ltype , errret ) \
361
- asm volatile(ASM_STAC "\n" \
368
+ asm volatile("\n" \
362
369
"1: mov"itype" %2,%"rtype"1\n" \
363
- "2: " ASM_CLAC " \n" \
370
+ "2:\n" \
364
371
".section .fixup,\"ax\"\n" \
365
372
"3: mov %3,%0\n" \
366
373
" xor"itype" %"rtype"1,%"rtype"1\n" \
@@ -370,6 +377,10 @@ do { \
370
377
: "=r" (err), ltype(x) \
371
378
: "m" (__m(addr)), "i" (errret), "0" (err))
372
379
380
+ /*
381
+ * This doesn't do __uaccess_begin/end - the exception handling
382
+ * around it must do that.
383
+ */
373
384
#define __get_user_size_ex (x , ptr , size ) \
374
385
do { \
375
386
__chk_user_ptr(ptr); \
@@ -400,15 +411,19 @@ do { \
400
411
#define __put_user_nocheck (x , ptr , size ) \
401
412
({ \
402
413
int __pu_err; \
414
+ __uaccess_begin(); \
403
415
__put_user_size((x), (ptr), (size), __pu_err, -EFAULT); \
416
+ __uaccess_end(); \
404
417
__builtin_expect(__pu_err, 0); \
405
418
})
406
419
407
420
#define __get_user_nocheck (x , ptr , size ) \
408
421
({ \
409
422
int __gu_err; \
410
423
unsigned long __gu_val; \
424
+ __uaccess_begin(); \
411
425
__get_user_size(__gu_val, (ptr), (size), __gu_err, -EFAULT); \
426
+ __uaccess_end(); \
412
427
(x) = (__force __typeof__(*(ptr)))__gu_val; \
413
428
__builtin_expect(__gu_err, 0); \
414
429
})
@@ -423,9 +438,9 @@ struct __large_struct { unsigned long buf[100]; };
423
438
* aliasing issues.
424
439
*/
425
440
#define __put_user_asm (x , addr , err , itype , rtype , ltype , errret ) \
426
- asm volatile(ASM_STAC "\n" \
441
+ asm volatile("\n" \
427
442
"1: mov"itype" %"rtype"1,%2\n" \
428
- "2: " ASM_CLAC " \n" \
443
+ "2:\n" \
429
444
".section .fixup,\"ax\"\n" \
430
445
"3: mov %3,%0\n" \
431
446
" jmp 2b\n" \
@@ -445,11 +460,11 @@ struct __large_struct { unsigned long buf[100]; };
445
460
*/
446
461
#define uaccess_try do { \
447
462
current_thread_info()->uaccess_err = 0; \
448
- stac (); \
463
+ __uaccess_begin (); \
449
464
barrier();
450
465
451
466
#define uaccess_catch (err ) \
452
- clac (); \
467
+ __uaccess_end (); \
453
468
(err) |= (current_thread_info()->uaccess_err ? -EFAULT : 0); \
454
469
} while (0)
455
470
@@ -547,12 +562,13 @@ extern void __cmpxchg_wrong_size(void)
547
562
__typeof__(ptr) __uval = (uval); \
548
563
__typeof__(*(ptr)) __old = (old); \
549
564
__typeof__(*(ptr)) __new = (new); \
565
+ __uaccess_begin(); \
550
566
switch (size) { \
551
567
case 1: \
552
568
{ \
553
- asm volatile("\t" ASM_STAC "\n" \
569
+ asm volatile("\n" \
554
570
"1:\t" LOCK_PREFIX "cmpxchgb %4, %2\n" \
555
- "2:\t" ASM_CLAC "\n" \
571
+ "2:\n" \
556
572
"\t.section .fixup, \"ax\"\n" \
557
573
"3:\tmov %3, %0\n" \
558
574
"\tjmp 2b\n" \
@@ -566,9 +582,9 @@ extern void __cmpxchg_wrong_size(void)
566
582
} \
567
583
case 2: \
568
584
{ \
569
- asm volatile("\t" ASM_STAC "\n" \
585
+ asm volatile("\n" \
570
586
"1:\t" LOCK_PREFIX "cmpxchgw %4, %2\n" \
571
- "2:\t" ASM_CLAC "\n" \
587
+ "2:\n" \
572
588
"\t.section .fixup, \"ax\"\n" \
573
589
"3:\tmov %3, %0\n" \
574
590
"\tjmp 2b\n" \
@@ -582,9 +598,9 @@ extern void __cmpxchg_wrong_size(void)
582
598
} \
583
599
case 4: \
584
600
{ \
585
- asm volatile("\t" ASM_STAC "\n" \
601
+ asm volatile("\n" \
586
602
"1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n" \
587
- "2:\t" ASM_CLAC "\n" \
603
+ "2:\n" \
588
604
"\t.section .fixup, \"ax\"\n" \
589
605
"3:\tmov %3, %0\n" \
590
606
"\tjmp 2b\n" \
@@ -601,9 +617,9 @@ extern void __cmpxchg_wrong_size(void)
601
617
if (!IS_ENABLED(CONFIG_X86_64)) \
602
618
__cmpxchg_wrong_size(); \
603
619
\
604
- asm volatile("\t" ASM_STAC "\n" \
620
+ asm volatile("\n" \
605
621
"1:\t" LOCK_PREFIX "cmpxchgq %4, %2\n" \
606
- "2:\t" ASM_CLAC "\n" \
622
+ "2:\n" \
607
623
"\t.section .fixup, \"ax\"\n" \
608
624
"3:\tmov %3, %0\n" \
609
625
"\tjmp 2b\n" \
@@ -618,6 +634,7 @@ extern void __cmpxchg_wrong_size(void)
618
634
default: \
619
635
__cmpxchg_wrong_size(); \
620
636
} \
637
+ __uaccess_end(); \
621
638
*__uval = __old; \
622
639
__ret; \
623
640
})
0 commit comments