]> git.kernelconcepts.de Git - karo-tx-linux.git/blob - arch/x86/lib/usercopy_32.c
Merge branches 'uaccess.alpha', 'uaccess.arc', 'uaccess.arm', 'uaccess.arm64', 'uacce...
[karo-tx-linux.git] / arch / x86 / lib / usercopy_32.c
1 /*
2  * User address space access functions.
3  * The non inlined parts of asm-i386/uaccess.h are here.
4  *
5  * Copyright 1997 Andi Kleen <ak@muc.de>
6  * Copyright 1997 Linus Torvalds
7  */
8 #include <linux/export.h>
9 #include <linux/uaccess.h>
10 #include <asm/mmx.h>
11 #include <asm/asm.h>
12
13 #ifdef CONFIG_X86_INTEL_USERCOPY
14 /*
15  * Alignment at which movsl is preferred for bulk memory copies.
16  */
17 struct movsl_mask movsl_mask __read_mostly;
18 #endif
19
20 static inline int __movsl_is_ok(unsigned long a1, unsigned long a2, unsigned long n)
21 {
22 #ifdef CONFIG_X86_INTEL_USERCOPY
23         if (n >= 64 && ((a1 ^ a2) & movsl_mask.mask))
24                 return 0;
25 #endif
26         return 1;
27 }
28 #define movsl_is_ok(a1, a2, n) \
29         __movsl_is_ok((unsigned long)(a1), (unsigned long)(a2), (n))
30
31 /*
32  * Zero Userspace
33  */
34
35 #define __do_clear_user(addr,size)                                      \
36 do {                                                                    \
37         int __d0;                                                       \
38         might_fault();                                                  \
39         __asm__ __volatile__(                                           \
40                 ASM_STAC "\n"                                           \
41                 "0:     rep; stosl\n"                                   \
42                 "       movl %2,%0\n"                                   \
43                 "1:     rep; stosb\n"                                   \
44                 "2: " ASM_CLAC "\n"                                     \
45                 ".section .fixup,\"ax\"\n"                              \
46                 "3:     lea 0(%2,%0,4),%0\n"                            \
47                 "       jmp 2b\n"                                       \
48                 ".previous\n"                                           \
49                 _ASM_EXTABLE(0b,3b)                                     \
50                 _ASM_EXTABLE(1b,2b)                                     \
51                 : "=&c"(size), "=&D" (__d0)                             \
52                 : "r"(size & 3), "0"(size / 4), "1"(addr), "a"(0));     \
53 } while (0)
54
55 /**
56  * clear_user: - Zero a block of memory in user space.
57  * @to:   Destination address, in user space.
58  * @n:    Number of bytes to zero.
59  *
60  * Zero a block of memory in user space.
61  *
62  * Returns number of bytes that could not be cleared.
63  * On success, this will be zero.
64  */
65 unsigned long
66 clear_user(void __user *to, unsigned long n)
67 {
68         might_fault();
69         if (access_ok(VERIFY_WRITE, to, n))
70                 __do_clear_user(to, n);
71         return n;
72 }
73 EXPORT_SYMBOL(clear_user);
74
75 /**
76  * __clear_user: - Zero a block of memory in user space, with less checking.
77  * @to:   Destination address, in user space.
78  * @n:    Number of bytes to zero.
79  *
80  * Zero a block of memory in user space.  Caller must check
81  * the specified block with access_ok() before calling this function.
82  *
83  * Returns number of bytes that could not be cleared.
84  * On success, this will be zero.
85  */
86 unsigned long
87 __clear_user(void __user *to, unsigned long n)
88 {
89         __do_clear_user(to, n);
90         return n;
91 }
92 EXPORT_SYMBOL(__clear_user);
93
94 #ifdef CONFIG_X86_INTEL_USERCOPY
95 static unsigned long
96 __copy_user_intel(void __user *to, const void *from, unsigned long size)
97 {
98         int d0, d1;
99         __asm__ __volatile__(
100                        "       .align 2,0x90\n"
101                        "1:     movl 32(%4), %%eax\n"
102                        "       cmpl $67, %0\n"
103                        "       jbe 3f\n"
104                        "2:     movl 64(%4), %%eax\n"
105                        "       .align 2,0x90\n"
106                        "3:     movl 0(%4), %%eax\n"
107                        "4:     movl 4(%4), %%edx\n"
108                        "5:     movl %%eax, 0(%3)\n"
109                        "6:     movl %%edx, 4(%3)\n"
110                        "7:     movl 8(%4), %%eax\n"
111                        "8:     movl 12(%4),%%edx\n"
112                        "9:     movl %%eax, 8(%3)\n"
113                        "10:    movl %%edx, 12(%3)\n"
114                        "11:    movl 16(%4), %%eax\n"
115                        "12:    movl 20(%4), %%edx\n"
116                        "13:    movl %%eax, 16(%3)\n"
117                        "14:    movl %%edx, 20(%3)\n"
118                        "15:    movl 24(%4), %%eax\n"
119                        "16:    movl 28(%4), %%edx\n"
120                        "17:    movl %%eax, 24(%3)\n"
121                        "18:    movl %%edx, 28(%3)\n"
122                        "19:    movl 32(%4), %%eax\n"
123                        "20:    movl 36(%4), %%edx\n"
124                        "21:    movl %%eax, 32(%3)\n"
125                        "22:    movl %%edx, 36(%3)\n"
126                        "23:    movl 40(%4), %%eax\n"
127                        "24:    movl 44(%4), %%edx\n"
128                        "25:    movl %%eax, 40(%3)\n"
129                        "26:    movl %%edx, 44(%3)\n"
130                        "27:    movl 48(%4), %%eax\n"
131                        "28:    movl 52(%4), %%edx\n"
132                        "29:    movl %%eax, 48(%3)\n"
133                        "30:    movl %%edx, 52(%3)\n"
134                        "31:    movl 56(%4), %%eax\n"
135                        "32:    movl 60(%4), %%edx\n"
136                        "33:    movl %%eax, 56(%3)\n"
137                        "34:    movl %%edx, 60(%3)\n"
138                        "       addl $-64, %0\n"
139                        "       addl $64, %4\n"
140                        "       addl $64, %3\n"
141                        "       cmpl $63, %0\n"
142                        "       ja  1b\n"
143                        "35:    movl  %0, %%eax\n"
144                        "       shrl  $2, %0\n"
145                        "       andl  $3, %%eax\n"
146                        "       cld\n"
147                        "99:    rep; movsl\n"
148                        "36:    movl %%eax, %0\n"
149                        "37:    rep; movsb\n"
150                        "100:\n"
151                        ".section .fixup,\"ax\"\n"
152                        "101:   lea 0(%%eax,%0,4),%0\n"
153                        "       jmp 100b\n"
154                        ".previous\n"
155                        _ASM_EXTABLE(1b,100b)
156                        _ASM_EXTABLE(2b,100b)
157                        _ASM_EXTABLE(3b,100b)
158                        _ASM_EXTABLE(4b,100b)
159                        _ASM_EXTABLE(5b,100b)
160                        _ASM_EXTABLE(6b,100b)
161                        _ASM_EXTABLE(7b,100b)
162                        _ASM_EXTABLE(8b,100b)
163                        _ASM_EXTABLE(9b,100b)
164                        _ASM_EXTABLE(10b,100b)
165                        _ASM_EXTABLE(11b,100b)
166                        _ASM_EXTABLE(12b,100b)
167                        _ASM_EXTABLE(13b,100b)
168                        _ASM_EXTABLE(14b,100b)
169                        _ASM_EXTABLE(15b,100b)
170                        _ASM_EXTABLE(16b,100b)
171                        _ASM_EXTABLE(17b,100b)
172                        _ASM_EXTABLE(18b,100b)
173                        _ASM_EXTABLE(19b,100b)
174                        _ASM_EXTABLE(20b,100b)
175                        _ASM_EXTABLE(21b,100b)
176                        _ASM_EXTABLE(22b,100b)
177                        _ASM_EXTABLE(23b,100b)
178                        _ASM_EXTABLE(24b,100b)
179                        _ASM_EXTABLE(25b,100b)
180                        _ASM_EXTABLE(26b,100b)
181                        _ASM_EXTABLE(27b,100b)
182                        _ASM_EXTABLE(28b,100b)
183                        _ASM_EXTABLE(29b,100b)
184                        _ASM_EXTABLE(30b,100b)
185                        _ASM_EXTABLE(31b,100b)
186                        _ASM_EXTABLE(32b,100b)
187                        _ASM_EXTABLE(33b,100b)
188                        _ASM_EXTABLE(34b,100b)
189                        _ASM_EXTABLE(35b,100b)
190                        _ASM_EXTABLE(36b,100b)
191                        _ASM_EXTABLE(37b,100b)
192                        _ASM_EXTABLE(99b,101b)
193                        : "=&c"(size), "=&D" (d0), "=&S" (d1)
194                        :  "1"(to), "2"(from), "0"(size)
195                        : "eax", "edx", "memory");
196         return size;
197 }
198
199 static unsigned long __copy_user_intel_nocache(void *to,
200                                 const void __user *from, unsigned long size)
201 {
202         int d0, d1;
203
204         __asm__ __volatile__(
205                "        .align 2,0x90\n"
206                "0:      movl 32(%4), %%eax\n"
207                "        cmpl $67, %0\n"
208                "        jbe 2f\n"
209                "1:      movl 64(%4), %%eax\n"
210                "        .align 2,0x90\n"
211                "2:      movl 0(%4), %%eax\n"
212                "21:     movl 4(%4), %%edx\n"
213                "        movnti %%eax, 0(%3)\n"
214                "        movnti %%edx, 4(%3)\n"
215                "3:      movl 8(%4), %%eax\n"
216                "31:     movl 12(%4),%%edx\n"
217                "        movnti %%eax, 8(%3)\n"
218                "        movnti %%edx, 12(%3)\n"
219                "4:      movl 16(%4), %%eax\n"
220                "41:     movl 20(%4), %%edx\n"
221                "        movnti %%eax, 16(%3)\n"
222                "        movnti %%edx, 20(%3)\n"
223                "10:     movl 24(%4), %%eax\n"
224                "51:     movl 28(%4), %%edx\n"
225                "        movnti %%eax, 24(%3)\n"
226                "        movnti %%edx, 28(%3)\n"
227                "11:     movl 32(%4), %%eax\n"
228                "61:     movl 36(%4), %%edx\n"
229                "        movnti %%eax, 32(%3)\n"
230                "        movnti %%edx, 36(%3)\n"
231                "12:     movl 40(%4), %%eax\n"
232                "71:     movl 44(%4), %%edx\n"
233                "        movnti %%eax, 40(%3)\n"
234                "        movnti %%edx, 44(%3)\n"
235                "13:     movl 48(%4), %%eax\n"
236                "81:     movl 52(%4), %%edx\n"
237                "        movnti %%eax, 48(%3)\n"
238                "        movnti %%edx, 52(%3)\n"
239                "14:     movl 56(%4), %%eax\n"
240                "91:     movl 60(%4), %%edx\n"
241                "        movnti %%eax, 56(%3)\n"
242                "        movnti %%edx, 60(%3)\n"
243                "        addl $-64, %0\n"
244                "        addl $64, %4\n"
245                "        addl $64, %3\n"
246                "        cmpl $63, %0\n"
247                "        ja  0b\n"
248                "        sfence \n"
249                "5:      movl  %0, %%eax\n"
250                "        shrl  $2, %0\n"
251                "        andl $3, %%eax\n"
252                "        cld\n"
253                "6:      rep; movsl\n"
254                "        movl %%eax,%0\n"
255                "7:      rep; movsb\n"
256                "8:\n"
257                ".section .fixup,\"ax\"\n"
258                "9:      lea 0(%%eax,%0,4),%0\n"
259                "16:     jmp 8b\n"
260                ".previous\n"
261                _ASM_EXTABLE(0b,16b)
262                _ASM_EXTABLE(1b,16b)
263                _ASM_EXTABLE(2b,16b)
264                _ASM_EXTABLE(21b,16b)
265                _ASM_EXTABLE(3b,16b)
266                _ASM_EXTABLE(31b,16b)
267                _ASM_EXTABLE(4b,16b)
268                _ASM_EXTABLE(41b,16b)
269                _ASM_EXTABLE(10b,16b)
270                _ASM_EXTABLE(51b,16b)
271                _ASM_EXTABLE(11b,16b)
272                _ASM_EXTABLE(61b,16b)
273                _ASM_EXTABLE(12b,16b)
274                _ASM_EXTABLE(71b,16b)
275                _ASM_EXTABLE(13b,16b)
276                _ASM_EXTABLE(81b,16b)
277                _ASM_EXTABLE(14b,16b)
278                _ASM_EXTABLE(91b,16b)
279                _ASM_EXTABLE(6b,9b)
280                _ASM_EXTABLE(7b,16b)
281                : "=&c"(size), "=&D" (d0), "=&S" (d1)
282                :  "1"(to), "2"(from), "0"(size)
283                : "eax", "edx", "memory");
284         return size;
285 }
286
287 #else
288
289 /*
290  * Leave these declared but undefined.  They should not be any references to
291  * them
292  */
293 unsigned long __copy_user_intel(void __user *to, const void *from,
294                                         unsigned long size);
295 #endif /* CONFIG_X86_INTEL_USERCOPY */
296
297 /* Generic arbitrary sized copy.  */
298 #define __copy_user(to, from, size)                                     \
299 do {                                                                    \
300         int __d0, __d1, __d2;                                           \
301         __asm__ __volatile__(                                           \
302                 "       cmp  $7,%0\n"                                   \
303                 "       jbe  1f\n"                                      \
304                 "       movl %1,%0\n"                                   \
305                 "       negl %0\n"                                      \
306                 "       andl $7,%0\n"                                   \
307                 "       subl %0,%3\n"                                   \
308                 "4:     rep; movsb\n"                                   \
309                 "       movl %3,%0\n"                                   \
310                 "       shrl $2,%0\n"                                   \
311                 "       andl $3,%3\n"                                   \
312                 "       .align 2,0x90\n"                                \
313                 "0:     rep; movsl\n"                                   \
314                 "       movl %3,%0\n"                                   \
315                 "1:     rep; movsb\n"                                   \
316                 "2:\n"                                                  \
317                 ".section .fixup,\"ax\"\n"                              \
318                 "5:     addl %3,%0\n"                                   \
319                 "       jmp 2b\n"                                       \
320                 "3:     lea 0(%3,%0,4),%0\n"                            \
321                 "       jmp 2b\n"                                       \
322                 ".previous\n"                                           \
323                 _ASM_EXTABLE(4b,5b)                                     \
324                 _ASM_EXTABLE(0b,3b)                                     \
325                 _ASM_EXTABLE(1b,2b)                                     \
326                 : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2)   \
327                 : "3"(size), "0"(size), "1"(to), "2"(from)              \
328                 : "memory");                                            \
329 } while (0)
330
331 unsigned long __copy_user_ll(void *to, const void *from, unsigned long n)
332 {
333         stac();
334         if (movsl_is_ok(to, from, n))
335                 __copy_user(to, from, n);
336         else
337                 n = __copy_user_intel(to, from, n);
338         clac();
339         return n;
340 }
341 EXPORT_SYMBOL(__copy_user_ll);
342
343 unsigned long __copy_from_user_ll_nocache_nozero(void *to, const void __user *from,
344                                         unsigned long n)
345 {
346         stac();
347 #ifdef CONFIG_X86_INTEL_USERCOPY
348         if (n > 64 && static_cpu_has(X86_FEATURE_XMM2))
349                 n = __copy_user_intel_nocache(to, from, n);
350         else
351                 __copy_user(to, from, n);
352 #else
353         __copy_user(to, from, n);
354 #endif
355         clac();
356         return n;
357 }
358 EXPORT_SYMBOL(__copy_from_user_ll_nocache_nozero);