@@ -177,3 +177,120 @@ ENTRY(memcpy_orig)
177
177
.Lend:
178
178
retq
179
179
ENDPROC(memcpy_orig)
180
+
181
+ #ifndef CONFIG_UML
182
+ /*
183
+ * memcpy_mcsafe - memory copy with machine check exception handling
184
+ * Note that we only catch machine checks when reading the source addresses.
185
+ * Writes to target are posted and don't generate machine checks.
186
+ */
187
+ ENTRY(memcpy_mcsafe)
188
+ cmpl $8 , %edx
189
+ /* Less than 8 bytes? Go to byte copy loop */
190
+ jb .L_no_whole_words
191
+
192
+ /* Check for bad alignment of source */
193
+ testl $7 , %esi
194
+ /* Already aligned */
195
+ jz .L_8byte_aligned
196
+
197
+ /* Copy one byte at a time until source is 8-byte aligned */
198
+ movl %esi , %ecx
199
+ andl $7 , %ecx
200
+ subl $8 , %ecx
201
+ negl %ecx
202
+ subl %ecx , %edx
203
+ .L_copy_leading_bytes:
204
+ movb (%rsi ), %al
205
+ movb %al , (%rdi )
206
+ incq %rsi
207
+ incq %rdi
208
+ decl %ecx
209
+ jnz .L_copy_leading_bytes
210
+
211
+ .L_8byte_aligned:
212
+ /* Figure out how many whole cache lines (64-bytes) to copy */
213
+ movl %edx , %ecx
214
+ andl $63 , %edx
215
+ shrl $6 , %ecx
216
+ jz .L_no_whole_cache_lines
217
+
218
+ /* Loop copying whole cache lines */
219
+ .L_cache_w0: movq (%rsi ), %r8
220
+ .L_cache_w1: movq 1*8 (%rsi ), %r9
221
+ .L_cache_w2: movq 2*8 (%rsi ), %r10
222
+ .L_cache_w3: movq 3*8 (%rsi ), %r11
223
+ movq %r8 , (%rdi )
224
+ movq %r9 , 1*8 (%rdi )
225
+ movq %r10 , 2*8 (%rdi )
226
+ movq %r11 , 3*8 (%rdi )
227
+ .L_cache_w4: movq 4*8 (%rsi ), %r8
228
+ .L_cache_w5: movq 5*8 (%rsi ), %r9
229
+ .L_cache_w6: movq 6*8 (%rsi ), %r10
230
+ .L_cache_w7: movq 7*8 (%rsi ), %r11
231
+ movq %r8 , 4*8 (%rdi )
232
+ movq %r9 , 5*8 (%rdi )
233
+ movq %r10 , 6*8 (%rdi )
234
+ movq %r11 , 7*8 (%rdi )
235
+ leaq 64 (%rsi ), %rsi
236
+ leaq 64 (%rdi ), %rdi
237
+ decl %ecx
238
+ jnz .L_cache_w0
239
+
240
+ /* Are there any trailing 8-byte words? */
241
+ .L_no_whole_cache_lines:
242
+ movl %edx , %ecx
243
+ andl $7 , %edx
244
+ shrl $3 , %ecx
245
+ jz .L_no_whole_words
246
+
247
+ /* Copy trailing words */
248
+ .L_copy_trailing_words:
249
+ movq (%rsi ), %r8
250
+ mov %r8 , (%rdi )
251
+ leaq 8 (%rsi ), %rsi
252
+ leaq 8 (%rdi ), %rdi
253
+ decl %ecx
254
+ jnz .L_copy_trailing_words
255
+
256
+ /* Any trailing bytes? */
257
+ .L_no_whole_words:
258
+ andl %edx , %edx
259
+ jz .L_done_memcpy_trap
260
+
261
+ /* Copy trailing bytes */
262
+ movl %edx , %ecx
263
+ .L_copy_trailing_bytes:
264
+ movb (%rsi ), %al
265
+ movb %al , (%rdi )
266
+ incq %rsi
267
+ incq %rdi
268
+ decl %ecx
269
+ jnz .L_copy_trailing_bytes
270
+
271
+ /* Copy successful. Return true */
272
+ .L_done_memcpy_trap:
273
+ xorq %rax , %rax
274
+ ret
275
+ ENDPROC(memcpy_mcsafe)
276
+
277
+ .section .fixup, "ax"
278
+ /* Return false for any failure */
279
+ .L_memcpy_mcsafe_fail:
280
+ mov $1 , %rax
281
+ ret
282
+
283
+ .previous
284
+
285
+ _ASM_EXTABLE_FAULT(.L_copy_leading_bytes, .L_memcpy_mcsafe_fail)
286
+ _ASM_EXTABLE_FAULT(.L_cache_w0, .L_memcpy_mcsafe_fail)
287
+ _ASM_EXTABLE_FAULT(.L_cache_w1, .L_memcpy_mcsafe_fail)
288
+ _ASM_EXTABLE_FAULT(.L_cache_w3, .L_memcpy_mcsafe_fail)
289
+ _ASM_EXTABLE_FAULT(.L_cache_w3, .L_memcpy_mcsafe_fail)
290
+ _ASM_EXTABLE_FAULT(.L_cache_w4, .L_memcpy_mcsafe_fail)
291
+ _ASM_EXTABLE_FAULT(.L_cache_w5, .L_memcpy_mcsafe_fail)
292
+ _ASM_EXTABLE_FAULT(.L_cache_w6, .L_memcpy_mcsafe_fail)
293
+ _ASM_EXTABLE_FAULT(.L_cache_w7, .L_memcpy_mcsafe_fail)
294
+ _ASM_EXTABLE_FAULT(.L_copy_trailing_words, .L_memcpy_mcsafe_fail)
295
+ _ASM_EXTABLE_FAULT(.L_copy_trailing_bytes, .L_memcpy_mcsafe_fail)
296
+ #endif
0 commit comments