Skip to content

Commit 6e5f516

Browse files
committed
Use a new AllocationMap to store store buffers in the same allocation
1 parent 60fa558 commit 6e5f516

File tree

5 files changed

+342
-39
lines changed

5 files changed

+342
-39
lines changed

src/allocation_map.rs

Lines changed: 272 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,272 @@
1+
//! Implements a map from allocation ranges to data.
2+
//! This is somewhat similar to RangeMap, but the ranges
3+
//! and data are discrete and non-splittable. An allocation in the
4+
//! map will always have the same range until explicitly removed
5+
6+
use rustc_target::abi::Size;
7+
use std::ops::{Index, IndexMut, Range};
8+
9+
use rustc_const_eval::interpret::AllocRange;
10+
11+
#[derive(Clone, Debug)]
12+
struct Elem<T> {
13+
/// The range covered by this element; never empty.
14+
range: AllocRange,
15+
/// The data stored for this element.
16+
data: T,
17+
}
18+
19+
/// Index of an allocation within the map
20+
type Position = usize;
21+
22+
#[derive(Clone, Debug)]
23+
pub struct AllocationMap<T> {
24+
v: Vec<Elem<T>>,
25+
}
26+
27+
#[derive(Clone, Debug, PartialEq)]
28+
pub enum AccessType {
29+
/// The access perfectly overlaps (same offset and range) with the exsiting allocation
30+
PerfectlyOverlapping(Position),
31+
/// The access does not touch any exising allocation
32+
Empty(Position),
33+
/// The access overlaps with one or more existing allocations
34+
ImperfectlyOverlapping(Range<Position>),
35+
}
36+
37+
impl<T> AllocationMap<T> {
38+
pub fn new() -> Self {
39+
Self { v: Vec::new() }
40+
}
41+
42+
/// Finds the position of the allocation containing the given offset. If the offset is not
43+
/// in an existing allocation, then returns Err containing the position
44+
/// where such allocation should be inserted
45+
fn find_offset(&self, offset: Size) -> Result<Position, Position> {
46+
// We do a binary search.
47+
let mut left = 0usize; // inclusive
48+
let mut right = self.v.len(); // exclusive
49+
loop {
50+
if left == right {
51+
// No element contains the given offset. But the
52+
// index is where such element should be placed at.
53+
return Err(left);
54+
}
55+
let candidate = left.checked_add(right).unwrap() / 2;
56+
let elem = &self.v[candidate];
57+
if offset < elem.range.start {
58+
// We are too far right (offset is further left).
59+
debug_assert!(candidate < right); // we are making progress
60+
right = candidate;
61+
} else if offset >= elem.range.end() {
62+
// We are too far left (offset is further right).
63+
debug_assert!(candidate >= left); // we are making progress
64+
left = candidate + 1;
65+
} else {
66+
// This is it!
67+
return Ok(candidate);
68+
}
69+
}
70+
}
71+
72+
/// Determines whether a given access on `range` overlaps with
73+
/// an existing allocation
74+
pub fn access_type(&self, range: AllocRange) -> AccessType {
75+
match self.find_offset(range.start) {
76+
Ok(index) => {
77+
// Start of the range belongs to an existing object, now let's check the overlapping situation
78+
let elem = &self.v[index];
79+
// FIXME: derive Eq for AllocRange in rustc
80+
if elem.range.start == range.start && elem.range.size == range.size {
81+
// Happy case: perfectly overlapping access
82+
AccessType::PerfectlyOverlapping(index)
83+
} else {
84+
// FIXME: add a last() method to AllocRange that returns the last inclusive offset (end() is exclusive)
85+
let end_index = match self.find_offset(range.end() - Size::from_bytes(1)) {
86+
// If the end lands in an existing object, add one to get the exclusive index
87+
Ok(inclusive) => inclusive + 1,
88+
Err(exclusive) => exclusive,
89+
};
90+
91+
AccessType::ImperfectlyOverlapping(index..end_index)
92+
}
93+
}
94+
Err(index) => {
95+
// Start of the range doesn't belong to an existing object
96+
match self.find_offset(range.end() - Size::from_bytes(1)) {
97+
// Neither does the end
98+
Err(end_index) =>
99+
if index == end_index {
100+
// There's nothing between the start and the end, so the range thing is empty
101+
AccessType::Empty(index)
102+
} else {
103+
// Otherwise we have entirely covered an existing object
104+
AccessType::ImperfectlyOverlapping(index..end_index)
105+
},
106+
// Otherwise at least part of it overlaps with something else
107+
Ok(end_index) => AccessType::ImperfectlyOverlapping(index..end_index + 1),
108+
}
109+
}
110+
}
111+
}
112+
113+
/// Inserts an object and its occupied range at given position
114+
pub fn insert(&mut self, index: Position, range: AllocRange, data: T) {
115+
self.v.insert(index, Elem { range, data });
116+
// If we aren't the first element, then our start must be greater than the preivous element's end
117+
if index > 0 {
118+
debug_assert!(self.v[index - 1].range.end() <= range.start);
119+
}
120+
// If we aren't the last element, then our end must be smaller than next element's start
121+
if index < self.v.len() - 1 {
122+
debug_assert!(range.end() <= self.v[index + 1].range.start);
123+
}
124+
}
125+
126+
/// Removes an object at given position
127+
pub fn remove(&mut self, index: Position) -> T {
128+
self.v.remove(index).data
129+
}
130+
}
131+
132+
impl<T> Index<Position> for AllocationMap<T> {
133+
type Output = T;
134+
135+
fn index(&self, index: usize) -> &Self::Output {
136+
&self.v[index].data
137+
}
138+
}
139+
140+
impl<T> IndexMut<Position> for AllocationMap<T> {
141+
fn index_mut(&mut self, index: usize) -> &mut Self::Output {
142+
&mut self.v[index].data
143+
}
144+
}
145+
146+
#[cfg(test)]
147+
mod tests {
148+
use rustc_const_eval::interpret::alloc_range;
149+
150+
use super::*;
151+
152+
#[test]
153+
fn empty_map() {
154+
// FIXME: make Size::from_bytes const
155+
let four = Size::from_bytes(4);
156+
let map = AllocationMap::<()>::new();
157+
158+
// Correctly tells where we should insert the first element (at index 0)
159+
assert_eq!(map.find_offset(Size::from_bytes(3)), Err(0));
160+
161+
// Correctly tells the access type along with the supposed index
162+
assert_eq!(map.access_type(alloc_range(Size::ZERO, four)), AccessType::Empty(0));
163+
}
164+
165+
#[test]
166+
#[should_panic]
167+
fn no_overlapping_inserts() {
168+
let four = Size::from_bytes(4);
169+
170+
let mut map = AllocationMap::<&str>::new();
171+
172+
// |_|_|_|_|#|#|#|#|_|_|_|_|...
173+
// 0 1 2 3 4 5 6 7 8 9 a b c d
174+
map.insert(0, alloc_range(four, four), "#");
175+
// |_|_|_|_|#|#|#|#|_|_|_|_|...
176+
// 0 ^ ^ ^ ^ 5 6 7 8 9 a b c d
177+
map.insert(0, alloc_range(Size::from_bytes(1), four), "@");
178+
}
179+
180+
#[test]
181+
fn boundaries() {
182+
let four = Size::from_bytes(4);
183+
184+
let mut map = AllocationMap::<&str>::new();
185+
186+
// |#|#|#|#|_|_|...
187+
// 0 1 2 3 4 5
188+
map.insert(0, alloc_range(Size::ZERO, four), "#");
189+
// |#|#|#|#|_|_|...
190+
// 0 1 2 3 ^ 5
191+
assert_eq!(map.find_offset(four), Err(1));
192+
// |#|#|#|#|_|_|_|_|_|...
193+
// 0 1 2 3 ^ ^ ^ ^ 8
194+
assert_eq!(map.access_type(alloc_range(four, four)), AccessType::Empty(1));
195+
196+
let eight = Size::from_bytes(8);
197+
// |#|#|#|#|_|_|_|_|@|@|@|@|_|_|...
198+
// 0 1 2 3 4 5 6 7 8 9 a b c d
199+
map.insert(1, alloc_range(eight, four), "@");
200+
// |#|#|#|#|_|_|_|_|@|@|@|@|_|_|...
201+
// 0 1 2 3 4 5 6 ^ 8 9 a b c d
202+
assert_eq!(map.find_offset(Size::from_bytes(7)), Err(1));
203+
// |#|#|#|#|_|_|_|_|@|@|@|@|_|_|...
204+
// 0 1 2 3 ^ ^ ^ ^ 8 9 a b c d
205+
assert_eq!(map.access_type(alloc_range(four, four)), AccessType::Empty(1));
206+
}
207+
208+
#[test]
209+
fn perfectly_overlapping() {
210+
let four = Size::from_bytes(4);
211+
212+
let mut map = AllocationMap::<&str>::new();
213+
214+
// |#|#|#|#|_|_|...
215+
// 0 1 2 3 4 5
216+
map.insert(0, alloc_range(Size::ZERO, four), "#");
217+
// |#|#|#|#|_|_|...
218+
// ^ ^ ^ ^ 4 5
219+
assert_eq!(map.find_offset(Size::ZERO), Ok(0));
220+
assert_eq!(
221+
map.access_type(alloc_range(Size::ZERO, four)),
222+
AccessType::PerfectlyOverlapping(0)
223+
);
224+
225+
// |#|#|#|#|@|@|@|@|_|...
226+
// 0 1 2 3 4 5 6 7 8
227+
map.insert(1, alloc_range(four, four), "@");
228+
// |#|#|#|#|@|@|@|@|_|...
229+
// 0 1 2 3 ^ ^ ^ ^ 8
230+
assert_eq!(map.find_offset(four), Ok(1));
231+
assert_eq!(map.access_type(alloc_range(four, four)), AccessType::PerfectlyOverlapping(1));
232+
}
233+
234+
#[test]
235+
fn straddling() {
236+
let four = Size::from_bytes(4);
237+
238+
let mut map = AllocationMap::<&str>::new();
239+
240+
// |_|_|_|_|#|#|#|#|_|_|_|_|...
241+
// 0 1 2 3 4 5 6 7 8 9 a b c d
242+
map.insert(0, alloc_range(four, four), "#");
243+
// |_|_|_|_|#|#|#|#|_|_|_|_|...
244+
// 0 1 ^ ^ ^ ^ 6 7 8 9 a b c d
245+
assert_eq!(
246+
map.access_type(alloc_range(Size::from_bytes(2), four)),
247+
AccessType::ImperfectlyOverlapping(0..1)
248+
);
249+
// |_|_|_|_|#|#|#|#|_|_|_|_|...
250+
// 0 1 2 3 4 5 ^ ^ ^ ^ a b c d
251+
assert_eq!(
252+
map.access_type(alloc_range(Size::from_bytes(6), four)),
253+
AccessType::ImperfectlyOverlapping(0..1)
254+
);
255+
// |_|_|_|_|#|#|#|#|_|_|_|_|...
256+
// 0 1 ^ ^ ^ ^ ^ ^ ^ ^ a b c d
257+
assert_eq!(
258+
map.access_type(alloc_range(Size::from_bytes(2), Size::from_bytes(8))),
259+
AccessType::ImperfectlyOverlapping(0..1)
260+
);
261+
262+
// |_|_|_|_|#|#|#|#|_|_|@|@|_|_|...
263+
// 0 1 2 3 4 5 6 7 8 9 a b c d
264+
map.insert(1, alloc_range(Size::from_bytes(10), Size::from_bytes(2)), "@");
265+
// |_|_|_|_|#|#|#|#|_|_|@|@|_|_|...
266+
// 0 1 2 3 4 5 ^ ^ ^ ^ ^ ^ ^ ^
267+
assert_eq!(
268+
map.access_type(alloc_range(Size::from_bytes(6), Size::from_bytes(8))),
269+
AccessType::ImperfectlyOverlapping(0..2)
270+
);
271+
}
272+
}

src/data_race.rs

Lines changed: 8 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -489,7 +489,8 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
489489
global.sc_read();
490490
}
491491
let mut rng = this.machine.rng.borrow_mut();
492-
let buffer = alloc_buffers.get_store_buffer(alloc_range(base_offset, place.layout.size));
492+
let buffer =
493+
alloc_buffers.get_store_buffer(alloc_range(base_offset, place.layout.size));
493494
let loaded = buffer.buffered_read(
494495
global,
495496
atomic == AtomicReadOp::SeqCst,
@@ -525,12 +526,9 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
525526
if atomic == AtomicWriteOp::SeqCst {
526527
global.sc_write();
527528
}
528-
let mut buffer = alloc_buffers.get_store_buffer_mut(alloc_range(base_offset, dest.layout.size));
529-
buffer.buffered_write(
530-
val,
531-
global,
532-
atomic == AtomicWriteOp::SeqCst,
533-
)?;
529+
let buffer =
530+
alloc_buffers.get_store_buffer_mut(alloc_range(base_offset, dest.layout.size));
531+
buffer.buffered_write(val, global, atomic == AtomicWriteOp::SeqCst)?;
534532
}
535533

536534
Ok(())
@@ -594,17 +592,9 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
594592
let lt = this.binary_op(mir::BinOp::Lt, &old, &rhs)?.to_scalar()?.to_bool()?;
595593

596594
let new_val = if min {
597-
if lt {
598-
&old
599-
} else {
600-
&rhs
601-
}
595+
if lt { &old } else { &rhs }
602596
} else {
603-
if lt {
604-
&rhs
605-
} else {
606-
&old
607-
}
597+
if lt { &rhs } else { &old }
608598
};
609599

610600
this.allow_data_races_mut(|this| this.write_immediate(**new_val, &(*place).into()))?;
@@ -702,7 +692,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
702692
global.sc_write();
703693
}
704694
let range = alloc_range(base_offset, place.layout.size);
705-
let mut buffer = alloc_buffers.get_store_buffer_mut(range);
695+
let buffer = alloc_buffers.get_store_buffer_mut(range);
706696
buffer.read_from_last_store(global);
707697
buffer.buffered_write(new_val, global, atomic == AtomicRwOp::SeqCst)?;
708698
}

src/lib.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@ extern crate rustc_session;
3434
extern crate rustc_span;
3535
extern crate rustc_target;
3636

37+
mod allocation_map;
3738
mod data_race;
3839
mod diagnostics;
3940
mod eval;

src/machine.rs

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -587,13 +587,17 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
587587
let buffer_alloc = if ecx.machine.weak_memory {
588588
// FIXME: if this is an atomic obejct, we want to supply its initial value
589589
// while allocating the store buffer here.
590-
Some(weak_memory::AllocExtra::new_allocation(alloc.size()))
590+
Some(weak_memory::AllocExtra::new_allocation())
591591
} else {
592592
None
593593
};
594594
let alloc: Allocation<Tag, Self::AllocExtra> = alloc.convert_tag_add_extra(
595595
&ecx.tcx,
596-
AllocExtra { stacked_borrows: stacks, data_race: race_alloc, weak_memory: buffer_alloc },
596+
AllocExtra {
597+
stacked_borrows: stacks,
598+
data_race: race_alloc,
599+
weak_memory: buffer_alloc,
600+
},
597601
|ptr| Evaluator::tag_alloc_base_pointer(ecx, ptr),
598602
);
599603
Cow::Owned(alloc)

0 commit comments

Comments
 (0)