@@ -305,6 +305,34 @@ where
305
305
}
306
306
}
307
307
308
+ // Split a memory region ptr..ptr + len into three parts:
309
+ // +--------+
310
+ // | small0 | Chunk smaller than 8 bytes
311
+ // +--------+
312
+ // | big | Chunk 8-byte aligned, and size a multiple of 8 bytes
313
+ // +--------+
314
+ // | small1 | Chunk smaller than 8 bytes
315
+ // +--------+
316
+ fn region_as_aligned_chunks ( ptr : * const u8 , len : usize ) -> ( u8 , usize , u8 ) {
317
+ let small0_size = ( 8 - ptr as usize % 8 ) as u8 ;
318
+ let small1_size = ( ( len - small0_size as usize ) % 8 ) as u8 ;
319
+ let big_size = len - small0_size as usize - small1_size as usize ;
320
+
321
+ ( small0_size, big_size, small1_size)
322
+ }
323
+
324
+ unsafe fn copy_quadwords ( src : * const u8 , dst : * mut u8 , len : usize ) {
325
+ unsafe {
326
+ asm ! (
327
+ "rep movsq (%rsi), (%rdi)" ,
328
+ inout( "rcx" ) len / 8 => _,
329
+ inout( "rdi" ) dst => _,
330
+ inout( "rsi" ) src => _,
331
+ options( att_syntax, nostack, preserves_flags)
332
+ ) ;
333
+ }
334
+ }
335
+
308
336
/// Copies `len` bytes of data from enclave pointer `src` to userspace `dst`
309
337
///
310
338
/// This function mitigates stale data vulnerabilities by ensuring all writes to untrusted memory are either:
@@ -343,17 +371,6 @@ pub(crate) unsafe fn copy_to_userspace(src: *const u8, dst: *mut u8, len: usize)
343
371
}
344
372
}
345
373
346
- unsafe fn copy_aligned_quadwords_to_userspace ( src : * const u8 , dst : * mut u8 , len : usize ) {
347
- unsafe {
348
- asm ! (
349
- "rep movsq (%rsi), (%rdi)" ,
350
- inout( "rcx" ) len / 8 => _,
351
- inout( "rdi" ) dst => _,
352
- inout( "rsi" ) src => _,
353
- options( att_syntax, nostack, preserves_flags)
354
- ) ;
355
- }
356
- }
357
374
assert ! ( !src. is_null( ) ) ;
358
375
assert ! ( !dst. is_null( ) ) ;
359
376
assert ! ( is_enclave_range( src, len) ) ;
@@ -370,7 +387,7 @@ pub(crate) unsafe fn copy_to_userspace(src: *const u8, dst: *mut u8, len: usize)
370
387
} else if len % 8 == 0 && dst as usize % 8 == 0 {
371
388
// Copying 8-byte aligned quadwords: copy quad word per quad word
372
389
unsafe {
373
- copy_aligned_quadwords_to_userspace ( src, dst, len) ;
390
+ copy_quadwords ( src, dst, len) ;
374
391
}
375
392
} else {
376
393
// Split copies into three parts:
@@ -381,20 +398,16 @@ pub(crate) unsafe fn copy_to_userspace(src: *const u8, dst: *mut u8, len: usize)
381
398
// +--------+
382
399
// | small1 | Chunk smaller than 8 bytes
383
400
// +--------+
401
+ let ( small0_size, big_size, small1_size) = region_as_aligned_chunks ( dst, len) ;
384
402
385
403
unsafe {
386
404
// Copy small0
387
- let small0_size = ( 8 - dst as usize % 8 ) as u8 ;
388
- let small0_src = src;
389
- let small0_dst = dst;
390
- copy_bytewise_to_userspace ( small0_src as _ , small0_dst, small0_size as _ ) ;
405
+ copy_bytewise_to_userspace ( src, dst, small0_size as _ ) ;
391
406
392
407
// Copy big
393
- let small1_size = ( ( len - small0_size as usize ) % 8 ) as u8 ;
394
- let big_size = len - small0_size as usize - small1_size as usize ;
395
408
let big_src = src. offset ( small0_size as _ ) ;
396
409
let big_dst = dst. offset ( small0_size as _ ) ;
397
- copy_aligned_quadwords_to_userspace ( big_src as _ , big_dst, big_size) ;
410
+ copy_quadwords ( big_src as _ , big_dst, big_size) ;
398
411
399
412
// Copy small1
400
413
let small1_src = src. offset ( big_size as isize + small0_size as isize ) ;
0 commit comments