Skip to content

Commit 4b842e4

Browse files
author
Al Viro
committed
x86: get rid of small constant size cases in raw_copy_{to,from}_user()
Very few call sites where that would be triggered remain, and none of those is anywhere near hot enough to bother. Signed-off-by: Al Viro <[email protected]>
1 parent 71c3313 commit 4b842e4

File tree

3 files changed

+2
-145
lines changed

3 files changed

+2
-145
lines changed

arch/x86/include/asm/uaccess.h

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -378,18 +378,6 @@ do { \
378378
: "=r" (err), ltype(x) \
379379
: "m" (__m(addr)), "i" (errret), "0" (err))
380380

381-
#define __get_user_asm_nozero(x, addr, err, itype, rtype, ltype, errret) \
382-
asm volatile("\n" \
383-
"1: mov"itype" %2,%"rtype"1\n" \
384-
"2:\n" \
385-
".section .fixup,\"ax\"\n" \
386-
"3: mov %3,%0\n" \
387-
" jmp 2b\n" \
388-
".previous\n" \
389-
_ASM_EXTABLE_UA(1b, 3b) \
390-
: "=r" (err), ltype(x) \
391-
: "m" (__m(addr)), "i" (errret), "0" (err))
392-
393381
/*
394382
* This doesn't do __uaccess_begin/end - the exception handling
395383
* around it must do that.

arch/x86/include/asm/uaccess_32.h

Lines changed: 0 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -23,33 +23,6 @@ raw_copy_to_user(void __user *to, const void *from, unsigned long n)
2323
static __always_inline unsigned long
2424
raw_copy_from_user(void *to, const void __user *from, unsigned long n)
2525
{
26-
if (__builtin_constant_p(n)) {
27-
unsigned long ret;
28-
29-
switch (n) {
30-
case 1:
31-
ret = 0;
32-
__uaccess_begin_nospec();
33-
__get_user_asm_nozero(*(u8 *)to, from, ret,
34-
"b", "b", "=q", 1);
35-
__uaccess_end();
36-
return ret;
37-
case 2:
38-
ret = 0;
39-
__uaccess_begin_nospec();
40-
__get_user_asm_nozero(*(u16 *)to, from, ret,
41-
"w", "w", "=r", 2);
42-
__uaccess_end();
43-
return ret;
44-
case 4:
45-
ret = 0;
46-
__uaccess_begin_nospec();
47-
__get_user_asm_nozero(*(u32 *)to, from, ret,
48-
"l", "k", "=r", 4);
49-
__uaccess_end();
50-
return ret;
51-
}
52-
}
5326
return __copy_user_ll(to, (__force const void *)from, n);
5427
}
5528

arch/x86/include/asm/uaccess_64.h

Lines changed: 2 additions & 106 deletions
Original file line numberDiff line numberDiff line change
@@ -65,117 +65,13 @@ copy_to_user_mcsafe(void *to, const void *from, unsigned len)
6565
static __always_inline __must_check unsigned long
6666
raw_copy_from_user(void *dst, const void __user *src, unsigned long size)
6767
{
68-
int ret = 0;
69-
70-
if (!__builtin_constant_p(size))
71-
return copy_user_generic(dst, (__force void *)src, size);
72-
switch (size) {
73-
case 1:
74-
__uaccess_begin_nospec();
75-
__get_user_asm_nozero(*(u8 *)dst, (u8 __user *)src,
76-
ret, "b", "b", "=q", 1);
77-
__uaccess_end();
78-
return ret;
79-
case 2:
80-
__uaccess_begin_nospec();
81-
__get_user_asm_nozero(*(u16 *)dst, (u16 __user *)src,
82-
ret, "w", "w", "=r", 2);
83-
__uaccess_end();
84-
return ret;
85-
case 4:
86-
__uaccess_begin_nospec();
87-
__get_user_asm_nozero(*(u32 *)dst, (u32 __user *)src,
88-
ret, "l", "k", "=r", 4);
89-
__uaccess_end();
90-
return ret;
91-
case 8:
92-
__uaccess_begin_nospec();
93-
__get_user_asm_nozero(*(u64 *)dst, (u64 __user *)src,
94-
ret, "q", "", "=r", 8);
95-
__uaccess_end();
96-
return ret;
97-
case 10:
98-
__uaccess_begin_nospec();
99-
__get_user_asm_nozero(*(u64 *)dst, (u64 __user *)src,
100-
ret, "q", "", "=r", 10);
101-
if (likely(!ret))
102-
__get_user_asm_nozero(*(u16 *)(8 + (char *)dst),
103-
(u16 __user *)(8 + (char __user *)src),
104-
ret, "w", "w", "=r", 2);
105-
__uaccess_end();
106-
return ret;
107-
case 16:
108-
__uaccess_begin_nospec();
109-
__get_user_asm_nozero(*(u64 *)dst, (u64 __user *)src,
110-
ret, "q", "", "=r", 16);
111-
if (likely(!ret))
112-
__get_user_asm_nozero(*(u64 *)(8 + (char *)dst),
113-
(u64 __user *)(8 + (char __user *)src),
114-
ret, "q", "", "=r", 8);
115-
__uaccess_end();
116-
return ret;
117-
default:
118-
return copy_user_generic(dst, (__force void *)src, size);
119-
}
68+
return copy_user_generic(dst, (__force void *)src, size);
12069
}
12170

12271
static __always_inline __must_check unsigned long
12372
raw_copy_to_user(void __user *dst, const void *src, unsigned long size)
12473
{
125-
int ret = 0;
126-
127-
if (!__builtin_constant_p(size))
128-
return copy_user_generic((__force void *)dst, src, size);
129-
switch (size) {
130-
case 1:
131-
__uaccess_begin();
132-
__put_user_asm(*(u8 *)src, (u8 __user *)dst,
133-
ret, "b", "b", "iq", 1);
134-
__uaccess_end();
135-
return ret;
136-
case 2:
137-
__uaccess_begin();
138-
__put_user_asm(*(u16 *)src, (u16 __user *)dst,
139-
ret, "w", "w", "ir", 2);
140-
__uaccess_end();
141-
return ret;
142-
case 4:
143-
__uaccess_begin();
144-
__put_user_asm(*(u32 *)src, (u32 __user *)dst,
145-
ret, "l", "k", "ir", 4);
146-
__uaccess_end();
147-
return ret;
148-
case 8:
149-
__uaccess_begin();
150-
__put_user_asm(*(u64 *)src, (u64 __user *)dst,
151-
ret, "q", "", "er", 8);
152-
__uaccess_end();
153-
return ret;
154-
case 10:
155-
__uaccess_begin();
156-
__put_user_asm(*(u64 *)src, (u64 __user *)dst,
157-
ret, "q", "", "er", 10);
158-
if (likely(!ret)) {
159-
asm("":::"memory");
160-
__put_user_asm(4[(u16 *)src], 4 + (u16 __user *)dst,
161-
ret, "w", "w", "ir", 2);
162-
}
163-
__uaccess_end();
164-
return ret;
165-
case 16:
166-
__uaccess_begin();
167-
__put_user_asm(*(u64 *)src, (u64 __user *)dst,
168-
ret, "q", "", "er", 16);
169-
if (likely(!ret)) {
170-
asm("":::"memory");
171-
__put_user_asm(1[(u64 *)src], 1 + (u64 __user *)dst,
172-
ret, "q", "", "er", 8);
173-
}
174-
__uaccess_end();
175-
return ret;
176-
default:
177-
return copy_user_generic((__force void *)dst, src, size);
178-
}
74+
return copy_user_generic((__force void *)dst, src, size);
17975
}
18076

18177
static __always_inline __must_check

0 commit comments

Comments
 (0)