@@ -313,9 +313,9 @@ where
313
313
// +--------+
314
314
// | small1 | Chunk smaller than 8 bytes
315
315
// +--------+
316
- fn region_as_aligned_chunks ( ptr : * const u8 , len : usize ) -> ( u8 , usize , u8 ) {
317
- let small0_size = ( 8 - ptr as usize % 8 ) as u8 ;
318
- let small1_size = ( ( len - small0_size as usize ) % 8 ) as u8 ;
316
+ fn region_as_aligned_chunks ( ptr : * const u8 , len : usize ) -> ( usize , usize , usize ) {
317
+ let small0_size = if ptr as usize % 8 == 0 { 0 } else { 8 - ptr as usize % 8 } ;
318
+ let small1_size = ( len - small0_size as usize ) % 8 ;
319
319
let big_size = len - small0_size as usize - small1_size as usize ;
320
320
321
321
( small0_size, big_size, small1_size)
@@ -417,6 +417,106 @@ pub(crate) unsafe fn copy_to_userspace(src: *const u8, dst: *mut u8, len: usize)
417
417
}
418
418
}
419
419
420
+ /// Copies `len` bytes of data from userspace pointer `src` to enclave pointer `dst`
421
+ ///
422
+ /// This function mitigates AEPIC leak vulnerabilities by ensuring all reads from untrusted memory are 8-byte aligned
423
+ ///
424
+ /// # Panics
425
+ /// This function panics if:
426
+ ///
427
+ /// * The `src` pointer is null
428
+ /// * The `dst` pointer is null
429
+ /// * The `src` memory range is not in user memory
430
+ /// * The `dst` memory range is not in enclave memory
431
+ ///
432
+ /// # References
433
+ /// - https://www.intel.com/content/www/us/en/security-center/advisory/intel-sa-00657.html
434
+ /// - https://www.intel.com/content/www/us/en/developer/articles/technical/software-security-guidance/advisory-guidance/stale-data-read-from-xapic.html
435
+ pub ( crate ) unsafe fn copy_from_userspace ( src : * const u8 , dst : * mut u8 , len : usize ) {
436
+ // Copies memory region `src..src + len` to the enclave at `dst`. The source memory region
437
+ // is:
438
+ // - strictly less than 8 bytes in size and may be
439
+ // - located at a misaligned memory location
440
+ fn copy_misaligned_chunk_to_enclave ( src : * const u8 , dst : * mut u8 , len : usize ) {
441
+ let mut tmp_buff = [ 0u8 ; 16 ] ;
442
+
443
+ unsafe {
444
+ // Compute an aligned memory region to read from
445
+ // +--------+ <-- aligned_src + aligned_len (8B-aligned)
446
+ // | pad1 |
447
+ // +--------+ <-- src + len (misaligned)
448
+ // | |
449
+ // | |
450
+ // | |
451
+ // +--------+ <-- src (misaligned)
452
+ // | pad0 |
453
+ // +--------+ <-- aligned_src (8B-aligned)
454
+ let pad0_size = src as usize % 8 ;
455
+ let aligned_src = src. sub ( pad0_size) ;
456
+
457
+ let pad1_size = 8 - ( src. add ( len) as usize % 8 ) ;
458
+ let aligned_len = pad0_size + len + pad1_size;
459
+
460
+ debug_assert ! ( len < 8 ) ;
461
+ debug_assert_eq ! ( aligned_src as usize % 8 , 0 ) ;
462
+ debug_assert_eq ! ( aligned_len % 8 , 0 ) ;
463
+ debug_assert ! ( aligned_len <= 16 ) ;
464
+
465
+ // Copy the aligned buffer to a temporary buffer
466
+ // Note: copying from a slightly different memory location is a bit odd. In this case it
467
+ // can't lead to page faults or inadvertent copying from the enclave as we only ensured
468
+ // that the `src` pointer is aligned at an 8 byte boundary. As pages are 4096 bytes
469
+ // aligned, `aligned_src` must be on the same page as `src`. A similar argument can be made
470
+ // for `src + len`
471
+ copy_quadwords ( aligned_src as _ , tmp_buff. as_mut_ptr ( ) , aligned_len) ;
472
+
473
+ // Copy the correct parts of the temporary buffer to the destination
474
+ ptr:: copy ( tmp_buff. as_ptr ( ) . add ( pad0_size) , dst, len) ;
475
+ }
476
+ }
477
+
478
+ assert ! ( !src. is_null( ) ) ;
479
+ assert ! ( !dst. is_null( ) ) ;
480
+ assert ! ( is_user_range( src, len) ) ;
481
+ assert ! ( is_enclave_range( dst, len) ) ;
482
+ assert ! ( !( src as usize ) . overflowing_add( len + 8 ) . 1 ) ;
483
+ assert ! ( !( dst as usize ) . overflowing_add( len + 8 ) . 1 ) ;
484
+
485
+ if len < 8 {
486
+ copy_misaligned_chunk_to_enclave ( src, dst, len) ;
487
+ } else if len % 8 == 0 && src as usize % 8 == 0 {
488
+ // Copying 8-byte aligned quadwords: copy quad word per quad word
489
+ unsafe {
490
+ copy_quadwords ( src, dst, len) ;
491
+ }
492
+ } else {
493
+ // Split copies into three parts:
494
+ // +--------+
495
+ // | small0 | Chunk smaller than 8 bytes
496
+ // +--------+
497
+ // | big | Chunk 8-byte aligned, and size a multiple of 8 bytes
498
+ // +--------+
499
+ // | small1 | Chunk smaller than 8 bytes
500
+ // +--------+
501
+ let ( small0_size, big_size, small1_size) = region_as_aligned_chunks ( dst, len) ;
502
+
503
+ unsafe {
504
+ // Copy small0
505
+ copy_misaligned_chunk_to_enclave ( src, dst, small0_size) ;
506
+
507
+ // Copy big
508
+ let big_src = src. add ( small0_size) ;
509
+ let big_dst = dst. add ( small0_size) ;
510
+ copy_quadwords ( big_src, big_dst, big_size) ;
511
+
512
+ // Copy small1
513
+ let small1_src = src. add ( big_size + small0_size) ;
514
+ let small1_dst = dst. add ( big_size + small0_size) ;
515
+ copy_misaligned_chunk_to_enclave ( small1_src, small1_dst, small1_size) ;
516
+ }
517
+ }
518
+ }
519
+
420
520
#[ unstable( feature = "sgx_platform" , issue = "56975" ) ]
421
521
impl < T : ?Sized > UserRef < T >
422
522
where
@@ -481,7 +581,7 @@ where
481
581
pub fn copy_to_enclave ( & self , dest : & mut T ) {
482
582
unsafe {
483
583
assert_eq ! ( mem:: size_of_val( dest) , mem:: size_of_val( & * self . 0 . get( ) ) ) ;
484
- ptr :: copy (
584
+ copy_from_userspace (
485
585
self . 0 . get ( ) as * const T as * const u8 ,
486
586
dest as * mut T as * mut u8 ,
487
587
mem:: size_of_val ( dest) ,
@@ -507,7 +607,11 @@ where
507
607
{
508
608
/// Copies the value from user memory into enclave memory.
509
609
pub fn to_enclave ( & self ) -> T {
510
- unsafe { ptr:: read ( self . 0 . get ( ) ) }
610
+ unsafe {
611
+ let mut data: T = mem:: MaybeUninit :: uninit ( ) . assume_init ( ) ;
612
+ copy_from_userspace ( self . 0 . get ( ) as _ , & mut data as * mut T as _ , mem:: size_of :: < T > ( ) ) ;
613
+ data
614
+ }
511
615
}
512
616
}
513
617
0 commit comments