Skip to content

Commit 2458005

Browse files
authored
Merge pull request #2729 from adri326/rebis-dev-fix-miri
rebis-dev: Fix UB detected by miri in tests
2 parents ff04343 + eb16e54 commit 2458005

File tree

2 files changed

+59
-26
lines changed

2 files changed

+59
-26
lines changed

src/heap_iter.rs

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2979,6 +2979,19 @@ mod tests {
29792979

29802980
wam.machine_st.heap.clear();
29812981

2982+
}
2983+
2984+
#[test]
2985+
fn heap_stackless_post_order_iter_pstr() {
2986+
let mut wam = MockWAM::new();
2987+
2988+
let f_atom = atom!("f");
2989+
let a_atom = atom!("a");
2990+
let b_atom = atom!("b");
2991+
2992+
// clear the heap of resource error data etc
2993+
wam.machine_st.heap.clear();
2994+
29822995
// first a 'dangling' partial string, later modified to be a
29832996
// two-part complete string, then a three-part cyclic string
29842997
// involving an uncompacted list of chars.
@@ -3004,9 +3017,13 @@ mod tests {
30043017
}
30053018

30063019
wam.machine_st.heap[2] = heap_loc_as_cell!(2);
3020+
assert_eq!(wam.machine_st.heap.cell_len(), 3);
3021+
30073022
wam.machine_st.allocate_pstr("def").unwrap();
3023+
assert_eq!(wam.machine_st.heap.cell_len(), 4);
30083024

30093025
wam.machine_st.heap.push_cell(pstr_loc_as_cell!(0)).unwrap();
3026+
assert_eq!(wam.machine_st.heap.cell_len(), 5);
30103027

30113028
{
30123029
let mut iter = stackless_post_order_iter(&mut wam.machine_st.heap, 4);

src/machine/heap.rs

Lines changed: 42 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -24,17 +24,29 @@ pub struct Heap {
2424

2525
impl Drop for Heap {
2626
fn drop(&mut self) {
27-
unsafe {
28-
let layout = alloc::Layout::array::<u8>(self.inner.byte_cap).unwrap();
29-
alloc::dealloc(self.inner.ptr, layout);
27+
if !self.inner.ptr.is_null() {
28+
unsafe {
29+
let layout = alloc::Layout::array::<u8>(self.inner.byte_cap).unwrap();
30+
alloc::dealloc(self.inner.ptr, layout);
31+
}
3032
}
3133
}
3234
}
3335

36+
// TODO: verify the soundness of the various accesses to `ptr`,
37+
// or rely on a Vec-like library with fallible allocations.
3438
#[derive(Debug)]
3539
struct InnerHeap {
3640
ptr: *mut u8,
41+
42+
/// # Safety
43+
///
44+
/// Must be equal to zero when `ptr.is_null()`.
3745
byte_len: usize,
46+
47+
/// # Safety
48+
///
49+
/// Must be equal to zero when `ptr.is_null()`.
3850
byte_cap: usize,
3951
}
4052

@@ -341,15 +353,7 @@ impl<'a> ReservedHeapSection<'a> {
341353

342354
let zero_region_idx = heap_index!(self.heap_cell_len) + str_byte_len;
343355

344-
let align_offset = self.heap_ptr
345-
.add(zero_region_idx)
346-
.align_offset(ALIGN_CELL);
347-
348-
let align_offset = if align_offset == 0 {
349-
ALIGN_CELL
350-
} else {
351-
align_offset
352-
};
356+
let align_offset = pstr_sentinel_length(zero_region_idx);
353357

354358
ptr::write_bytes(
355359
self.heap_ptr.add(zero_region_idx),
@@ -469,6 +473,22 @@ impl<'a> Index<usize> for ReservedHeapSection<'a> {
469473
}
470474
}
471475

476+
/// Computes the number of bytes required to pad a string of length `chunk_len`
477+
/// with zeroes, such that `chunk_len + pstr_sentinel_length(chunk_len)` is a
478+
/// multiple of `Heap::heap_cell_alignement()`.
479+
fn pstr_sentinel_length(chunk_len: usize) -> usize {
480+
const ALIGN: usize = Heap::heap_cell_alignment();
481+
482+
let res = chunk_len.next_multiple_of(ALIGN) - chunk_len;
483+
484+
// No bytes available in last chunk
485+
if res == 0 {
486+
ALIGN
487+
} else {
488+
res
489+
}
490+
}
491+
472492
#[must_use]
473493
#[derive(Debug)]
474494
pub struct HeapWriter<'a> {
@@ -623,7 +643,7 @@ impl Heap {
623643
if self.free_space() >= len {
624644
section = ReservedHeapSection {
625645
heap_ptr: self.inner.ptr,
626-
heap_cell_len: cell_index!(self.inner.byte_len),
646+
heap_cell_len: self.cell_len(),
627647
pstr_vec: &mut self.pstr_vec,
628648
};
629649
break;
@@ -798,6 +818,11 @@ impl Heap {
798818
}
799819
}
800820

821+
// SAFETY:
822+
// - Postcondition: from `self.grow()`, `self.inner.byte_len + size_of::<HeapCellValue>()`
823+
// is strictly less than `self.inner.byte_cap`.
824+
// - Asserted: `self.cell_len() * size_of::<HeapCellvalue>() <= self.inner.byte_cap`.
825+
// - Invariant: from `InnerHeap`, `self.inner.byte_cap < isize::MAX`.
801826
let cell_ptr = (self.inner.ptr as *mut HeapCellValue).add(self.cell_len());
802827
cell_ptr.write(cell);
803828
self.pstr_vec.push(false);
@@ -954,17 +979,7 @@ impl Heap {
954979

955980
const ALIGN_CELL: usize = Heap::heap_cell_alignment();
956981

957-
let align_offset = unsafe {
958-
self.inner.ptr
959-
.add(self.inner.byte_len + s_len)
960-
.align_offset(ALIGN_CELL)
961-
};
962-
963-
let align_offset = if align_offset == 0 {
964-
ALIGN_CELL
965-
} else {
966-
align_offset
967-
};
982+
let align_offset = pstr_sentinel_length(s_len);
968983

969984
let copy_size = s_len + align_offset;
970985

@@ -1027,7 +1042,8 @@ impl Heap {
10271042
Ok(())
10281043
}
10291044

1030-
pub(crate) const fn compute_pstr_size(src: &str) -> usize {
1045+
/// Returns the number of bytes needed to store `src` as a `PStr`.
1046+
pub(crate) fn compute_pstr_size(src: &str) -> usize {
10311047
const ALIGN_CELL: usize = Heap::heap_cell_alignment();
10321048

10331049
let mut byte_size = 0;
@@ -1044,7 +1060,7 @@ impl Heap {
10441060
null_idx += 1;
10451061
}
10461062

1047-
byte_size += (null_idx & !(ALIGN_CELL - 1)) + ALIGN_CELL;
1063+
byte_size += null_idx.next_multiple_of(ALIGN_CELL);
10481064
byte_size += mem::size_of::<HeapCellValue>();
10491065

10501066
if null_idx + 1 >= src.len() {

0 commit comments

Comments
 (0)