Skip to content

Commit

Permalink
Partially improve the safety of stack.rs
Browse files Browse the repository at this point in the history
- Fix miri warning about pointer->integer->pointer cast in stack.rs
- Panic if Stack::truncate is called with an unaligned value
- Add assertions in stack.rs and TODOs for leftover unsafe operations
  • Loading branch information
adri326 committed Jan 12, 2025
1 parent 45ab7d5 commit 166e4fd
Show file tree
Hide file tree
Showing 2 changed files with 52 additions and 7 deletions.
48 changes: 41 additions & 7 deletions src/machine/stack.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,11 @@ impl Index<usize> for AndFrame {
type Output = HeapCellValue;

fn index(&self, index: usize) -> &Self::Output {
assert!(
index > 0 && index <= self.prelude.num_cells,
"Invalid index within AndFrame: {index} not in 1..={}",
self.prelude.num_cells
);
let prelude_offset = prelude_size::<AndFramePrelude>();
let index_offset = (index - 1) * mem::size_of::<HeapCellValue>();

Expand All @@ -76,6 +81,11 @@ impl Index<usize> for AndFrame {

impl IndexMut<usize> for AndFrame {
fn index_mut(&mut self, index: usize) -> &mut Self::Output {
assert!(
index > 0 && index <= self.prelude.num_cells,
"Invalid index within AndFrame: {index} not in 1..={}",
self.prelude.num_cells
);
let prelude_offset = prelude_size::<AndFramePrelude>();
let index_offset = (index - 1) * mem::size_of::<HeapCellValue>();

Expand All @@ -95,7 +105,7 @@ impl Index<usize> for Stack {
fn index(&self, index: usize) -> &Self::Output {
unsafe {
// TODO: implement some mechanism to verify soundness
let ptr = self.buf.get_unchecked(index);
let ptr = self.buf.get(index).unwrap();
&*(ptr as *const HeapCellValue)
}
}
Expand Down Expand Up @@ -136,6 +146,11 @@ impl Index<usize> for OrFrame {

#[inline]
fn index(&self, index: usize) -> &Self::Output {
assert!(
index < self.prelude.num_cells,
"Invalid index within OrFrame: {index} not in 0..{}",
self.prelude.num_cells
);
let prelude_offset = prelude_size::<OrFramePrelude>();
let index_offset = index * mem::size_of::<HeapCellValue>();

Expand All @@ -151,6 +166,11 @@ impl Index<usize> for OrFrame {
impl IndexMut<usize> for OrFrame {
#[inline]
fn index_mut(&mut self, index: usize) -> &mut Self::Output {
assert!(
index < self.prelude.num_cells,
"Invalid index within OrFrame: {index} not in 0..{}",
self.prelude.num_cells
);
let prelude_offset = prelude_size::<OrFramePrelude>();
let index_offset = index * mem::size_of::<HeapCellValue>();

Expand Down Expand Up @@ -202,6 +222,7 @@ impl Stack {
pub(crate) fn allocate_and_frame(&mut self, num_cells: usize) -> usize {
let frame_size = AndFrame::size_of(num_cells);

// TODO: prove safety of this block and prove that it upholds the invariants of `Stack`.
unsafe {
let e = self.buf.len();
let new_ptr = self.alloc(frame_size);
Expand Down Expand Up @@ -230,14 +251,15 @@ impl Stack {
pub(crate) fn allocate_or_frame(&mut self, num_cells: usize) -> usize {
let frame_size = OrFrame::size_of(num_cells);

// TODO: prove safety of this block and prove that it upholds the invariants of `Stack`.
unsafe {
let b = self.buf.len();
let new_ptr = self.alloc(frame_size);
let mut offset = prelude_size::<OrFramePrelude>();

for idx in 0..num_cells {
ptr::write(
(new_ptr as usize + offset) as *mut HeapCellValue,
new_ptr.add(offset).cast::<HeapCellValue>(),
stack_loc_as_cell!(OrFrame, b, idx),
);

Expand All @@ -254,7 +276,8 @@ impl Stack {
#[inline(always)]
pub(crate) fn index_and_frame(&self, e: usize) -> &AndFrame {
unsafe {
let ptr = self.buf.get_unchecked(e);
let ptr = self.buf.get(e).unwrap();
// TODO: ensure safety of this cast
&*(ptr as *const AndFrame)
}
}
Expand All @@ -263,15 +286,17 @@ impl Stack {
pub(crate) fn index_and_frame_mut(&mut self, e: usize) -> &mut AndFrame {
unsafe {
// This is doing alignment wrong
let ptr = self.buf.get_unchecked(e);
let ptr = self.buf.get(e).unwrap();
// TODO: ensure safety of this cast
&mut *(ptr as *mut AndFrame)
}
}

#[inline(always)]
pub(crate) fn index_or_frame(&self, b: usize) -> &OrFrame {
unsafe {
let ptr = self.buf.get_unchecked(b);
let ptr = self.buf.get(b).unwrap();
// TODO: ensure safety of this cast
&*(ptr as *const OrFrame)
}
}
Expand Down Expand Up @@ -299,14 +324,23 @@ impl Stack {
pub(crate) fn index_or_frame_mut(&mut self, b: usize) -> &mut OrFrame {
unsafe {
let ptr = self.buf.get_mut_unchecked(b);
// TODO: ensure safety of this cast
&mut *(ptr as *mut OrFrame)
}
}

/// Panics if `byte_offset` is not aligned to [`Stack::ALIGN`].
///
/// Frames beyond `byte_offset` become invalidated after a call to this function.
#[inline(always)]
pub(crate) fn truncate(&mut self, b: usize) {
pub(crate) fn truncate(&mut self, byte_offset: usize) {
unsafe {
self.buf.truncate(b);
// TODO: implement a way to mitigate indexing to invalidated frames,
// and/or mark this function as unsafe.
//
// I think that one could also create a safer abstraction for the
// "store offset, do work, truncate to offset" kind of tasks.
self.buf.truncate(byte_offset);
}
}
}
Expand Down
11 changes: 11 additions & 0 deletions src/raw_block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ pub struct RawBlock<T: RawBlockTraits> {
/// # Safety
///
/// `head <= self.capacity()`
///
/// `head` must always be a multiple of [`T::ALIGN`](RawBlockTraits::ALIGN).
head: Cell<usize>,

_marker: PhantomData<T>,
Expand Down Expand Up @@ -233,7 +235,16 @@ impl<T: RawBlockTraits> RawBlock<T> {
/// this function is marked as unsafe.
///
/// Does not resize the allocated region of memory.
///
/// Panics if `new_len` is not a multiple of [`T::ALIGN`](RawBlockTraits::ALIGN).
pub unsafe fn truncate(&mut self, new_len: usize) {
assert_eq!(
new_len % T::ALIGN,
0,
"RawBlock::truncate(new_len = {new_len}) requires new_len to be aligned to {}",
T::ALIGN
);

let head = self.head.get_mut();
if new_len < *head {
*head = new_len;
Expand Down

0 comments on commit 166e4fd

Please sign in to comment.