Skip to content

Commit

Permalink
Rollup merge of #98126 - fortanix:raoul/mitigate_stale_data_vulnerabi…
Browse files Browse the repository at this point in the history
…lity, r=cuviper

Mitigate MMIO stale data vulnerability

Intel publicly disclosed the MMIO stale data vulnerability on June 14. To mitigate this vulnerability, compiler changes are required for the `x86_64-fortanix-unknown-sgx` target.
cc: ````@jethrogb````
  • Loading branch information
matthiaskrgr committed Jun 25, 2022
2 parents d7388d1 + 6a6910e commit a130521
Show file tree
Hide file tree
Showing 3 changed files with 142 additions and 10 deletions.
120 changes: 110 additions & 10 deletions library/std/src/sys/sgx/abi/usercalls/alloc.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,16 @@
#![allow(unused)]

use crate::arch::asm;
use crate::cell::UnsafeCell;
use crate::cmp;
use crate::convert::TryInto;
use crate::mem;
use crate::ops::{CoerceUnsized, Deref, DerefMut, Index, IndexMut};
use crate::ptr::{self, NonNull};
use crate::slice;
use crate::slice::SliceIndex;

use super::super::mem::is_user_range;
use super::super::mem::{is_enclave_range, is_user_range};
use fortanix_sgx_abi::*;

/// A type that can be safely read from or written to userspace.
Expand Down Expand Up @@ -210,7 +213,9 @@ where
unsafe {
// Mustn't call alloc with size 0.
let ptr = if size > 0 {
rtunwrap!(Ok, super::alloc(size, T::align_of())) as _
// `copy_to_userspace` is more efficient when data is 8-byte aligned
let alignment = cmp::max(T::align_of(), 8);
rtunwrap!(Ok, super::alloc(size, alignment)) as _
} else {
T::align_of() as _ // dangling pointer ok for size 0
};
Expand All @@ -225,13 +230,9 @@ where
/// Copies `val` into freshly allocated space in user memory.
pub fn new_from_enclave(val: &T) -> Self {
unsafe {
let ret = Self::new_uninit_bytes(mem::size_of_val(val));
ptr::copy(
val as *const T as *const u8,
ret.0.as_ptr() as *mut u8,
mem::size_of_val(val),
);
ret
let mut user = Self::new_uninit_bytes(mem::size_of_val(val));
user.copy_from_enclave(val);
user
}
}

Expand Down Expand Up @@ -304,6 +305,105 @@ where
}
}

/// Copies `len` bytes of data from enclave pointer `src` to userspace `dst`
///
/// This function mitigates stale data vulnerabilities by ensuring all writes to untrusted memory are either:
/// - preceded by the VERW instruction and followed by the MFENCE; LFENCE instruction sequence
/// - or are in multiples of 8 bytes, aligned to an 8-byte boundary
///
/// # Panics
/// This function panics if:
///
/// * The `src` pointer is null
/// * The `dst` pointer is null
/// * The `src` memory range is not in enclave memory
/// * The `dst` memory range is not in user memory
///
/// # References
/// - https://www.intel.com/content/www/us/en/security-center/advisory/intel-sa-00615.html
/// - https://www.intel.com/content/www/us/en/developer/articles/technical/software-security-guidance/technical-documentation/processor-mmio-stale-data-vulnerabilities.html#inpage-nav-3-2-2
pub(crate) unsafe fn copy_to_userspace(src: *const u8, dst: *mut u8, len: usize) {
unsafe fn copy_bytewise_to_userspace(src: *const u8, dst: *mut u8, len: usize) {
unsafe {
let mut seg_sel: u16 = 0;
for off in 0..len {
asm!("
mov %ds, ({seg_sel})
verw ({seg_sel})
movb {val}, ({dst})
mfence
lfence
",
val = in(reg_byte) *src.offset(off as isize),
dst = in(reg) dst.offset(off as isize),
seg_sel = in(reg) &mut seg_sel,
options(nostack, att_syntax)
);
}
}
}

unsafe fn copy_aligned_quadwords_to_userspace(src: *const u8, dst: *mut u8, len: usize) {
unsafe {
asm!(
"rep movsq (%rsi), (%rdi)",
inout("rcx") len / 8 => _,
inout("rdi") dst => _,
inout("rsi") src => _,
options(att_syntax, nostack, preserves_flags)
);
}
}
assert!(!src.is_null());
assert!(!dst.is_null());
assert!(is_enclave_range(src, len));
assert!(is_user_range(dst, len));
assert!(len < isize::MAX as usize);
assert!(!(src as usize).overflowing_add(len).1);
assert!(!(dst as usize).overflowing_add(len).1);

if len < 8 {
// Can't align on 8 byte boundary: copy safely byte per byte
unsafe {
copy_bytewise_to_userspace(src, dst, len);
}
} else if len % 8 == 0 && dst as usize % 8 == 0 {
// Copying 8-byte aligned quadwords: copy quad word per quad word
unsafe {
copy_aligned_quadwords_to_userspace(src, dst, len);
}
} else {
// Split copies into three parts:
// +--------+
// | small0 | Chunk smaller than 8 bytes
// +--------+
// | big | Chunk 8-byte aligned, and size a multiple of 8 bytes
// +--------+
// | small1 | Chunk smaller than 8 bytes
// +--------+

unsafe {
// Copy small0
let small0_size = (8 - dst as usize % 8) as u8;
let small0_src = src;
let small0_dst = dst;
copy_bytewise_to_userspace(small0_src as _, small0_dst, small0_size as _);

// Copy big
let small1_size = ((len - small0_size as usize) % 8) as u8;
let big_size = len - small0_size as usize - small1_size as usize;
let big_src = src.offset(small0_size as _);
let big_dst = dst.offset(small0_size as _);
copy_aligned_quadwords_to_userspace(big_src as _, big_dst, big_size);

// Copy small1
let small1_src = src.offset(big_size as isize + small0_size as isize);
let small1_dst = dst.offset(big_size as isize + small0_size as isize);
copy_bytewise_to_userspace(small1_src, small1_dst, small1_size as _);
}
}
}

#[unstable(feature = "sgx_platform", issue = "56975")]
impl<T: ?Sized> UserRef<T>
where
Expand Down Expand Up @@ -352,7 +452,7 @@ where
pub fn copy_from_enclave(&mut self, val: &T) {
unsafe {
assert_eq!(mem::size_of_val(val), mem::size_of_val(&*self.0.get()));
ptr::copy(
copy_to_userspace(
val as *const T as *const u8,
self.0.get() as *mut T as *mut u8,
mem::size_of_val(val),
Expand Down
2 changes: 2 additions & 0 deletions library/std/src/sys/sgx/abi/usercalls/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ use crate::time::{Duration, Instant};
pub(crate) mod alloc;
#[macro_use]
pub(crate) mod raw;
#[cfg(test)]
mod tests;

use self::raw::*;

Expand Down
30 changes: 30 additions & 0 deletions library/std/src/sys/sgx/abi/usercalls/tests.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
use super::alloc::copy_to_userspace;
use super::alloc::User;

#[test]
fn test_copy_function() {
let mut src = [0u8; 100];
let mut dst = User::<[u8]>::uninitialized(100);

for i in 0..src.len() {
src[i] = i as _;
}

for size in 0..48 {
// For all possible alignment
for offset in 0..8 {
// overwrite complete dst
dst.copy_from_enclave(&[0u8; 100]);

// Copy src[0..size] to dst + offset
unsafe { copy_to_userspace(src.as_ptr(), dst.as_mut_ptr().offset(offset), size) };

// Verify copy
for byte in 0..size {
unsafe {
assert_eq!(*dst.as_ptr().offset(offset + byte as isize), src[byte as usize]);
}
}
}
}
}

0 comments on commit a130521

Please sign in to comment.