@@ -333,7 +333,26 @@ do { \
333
333
} while (0)
334
334
335
335
#ifdef CONFIG_X86_32
336
- #define __get_user_asm_u64 (x , ptr , retval , errret ) (x) = __get_user_bad()
336
+ #define __get_user_asm_u64 (x , ptr , retval , errret ) \
337
+ ({ \
338
+ __typeof__(ptr) __ptr = (ptr); \
339
+ asm volatile(ASM_STAC "\n" \
340
+ "1: movl %2,%%eax\n" \
341
+ "2: movl %3,%%edx\n" \
342
+ "3: " ASM_CLAC "\n" \
343
+ ".section .fixup,\"ax\"\n" \
344
+ "4: mov %4,%0\n" \
345
+ " xorl %%eax,%%eax\n" \
346
+ " xorl %%edx,%%edx\n" \
347
+ " jmp 3b\n" \
348
+ ".previous\n" \
349
+ _ASM_EXTABLE(1b, 4b) \
350
+ _ASM_EXTABLE(2b, 4b) \
351
+ : "=r" (retval), "=A"(x) \
352
+ : "m" (__m(__ptr)), "m" __m(((u32 *)(__ptr)) + 1), \
353
+ "i" (errret), "0" (retval)); \
354
+ })
355
+
337
356
#define __get_user_asm_ex_u64 (x , ptr ) (x) = __get_user_bad()
338
357
#else
339
358
#define __get_user_asm_u64 (x , ptr , retval , errret ) \
@@ -420,7 +439,7 @@ do { \
420
439
#define __get_user_nocheck (x , ptr , size ) \
421
440
({ \
422
441
int __gu_err; \
423
- unsigned long __gu_val; \
442
+ __inttype(*(ptr)) __gu_val; \
424
443
__uaccess_begin(); \
425
444
__get_user_size(__gu_val, (ptr), (size), __gu_err, -EFAULT); \
426
445
__uaccess_end(); \
0 commit comments