Skip to content

Commit 0d1c7d3

Browse files
committed
Make ObjectReferenct non-nullable
1 parent e7143b9 commit 0d1c7d3

19 files changed

+97
-130
lines changed

src/memory_manager.rs

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -708,9 +708,6 @@ pub fn is_mmtk_object(addr: Address) -> bool {
708708
/// * `object`: The object reference to query.
709709
pub fn is_in_mmtk_spaces<VM: VMBinding>(object: ObjectReference) -> bool {
710710
use crate::mmtk::SFT_MAP;
711-
if object.is_null() {
712-
return false;
713-
}
714711
SFT_MAP
715712
.get_checked(object.to_address::<VM>())
716713
.is_in_space(object)

src/plan/generational/gc_work.rs

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -36,18 +36,16 @@ impl<VM: VMBinding, P: GenerationalPlanExt<VM> + PlanTraceObject<VM>> ProcessEdg
3636
Self { plan, base }
3737
}
3838
fn trace_object(&mut self, object: ObjectReference) -> ObjectReference {
39-
debug_assert!(!object.is_null());
40-
4139
// We cannot borrow `self` twice in a call, so we extract `worker` as a local variable.
4240
let worker = self.worker();
4341
self.plan
4442
.trace_object_nursery(&mut self.base.nodes, object, worker)
4543
}
4644
fn process_edge(&mut self, slot: EdgeOf<Self>) {
47-
let object = slot.load();
48-
if object.is_null() {
45+
let Some(object) = slot.load() else {
46+
// Skip slots that are not holding an object reference.
4947
return;
50-
}
48+
};
5149
let new_object = self.trace_object(object);
5250
debug_assert!(!self.plan.is_object_in_nursery(new_object));
5351
// Note: If `object` is a mature object, `trace_object` will not call `space.trace_object`,

src/plan/tracing.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,7 @@ impl<'a, E: ProcessEdgesWork> EdgeVisitor<EdgeOf<E>> for ObjectsClosure<'a, E> {
117117
{
118118
use crate::vm::edge_shape::Edge;
119119
trace!(
120-
"(ObjectsClosure) Visit edge {:?} (pointing to {})",
120+
"(ObjectsClosure) Visit edge {:?} (pointing to {:?})",
121121
slot,
122122
slot.load()
123123
);

src/policy/copyspace.rs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -217,7 +217,6 @@ impl<VM: VMBinding> CopySpace<VM> {
217217
worker: &mut GCWorker<VM>,
218218
) -> ObjectReference {
219219
trace!("copyspace.trace_object(, {:?}, {:?})", object, semantics,);
220-
debug_assert!(!object.is_null());
221220

222221
// If this is not from space, we do not need to trace it (the object has been copied to the tosapce)
223222
if !self.is_from_space() {

src/policy/immix/immixspace.rs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -184,7 +184,6 @@ impl<VM: VMBinding> crate::policy::gc_work::PolicyTraceObject<VM> for ImmixSpace
184184
copy: Option<CopySemantics>,
185185
worker: &mut GCWorker<VM>,
186186
) -> ObjectReference {
187-
debug_assert!(!object.is_null());
188187
if KIND == TRACE_KIND_TRANSITIVE_PIN {
189188
self.trace_object_without_moving(queue, object)
190189
} else if KIND == TRACE_KIND_DEFRAG {

src/policy/immortalspace.rs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,6 @@ impl<VM: VMBinding> ImmortalSpace<VM> {
187187
queue: &mut Q,
188188
object: ObjectReference,
189189
) -> ObjectReference {
190-
debug_assert!(!object.is_null());
191190
#[cfg(feature = "vo_bit")]
192191
debug_assert!(
193192
crate::util::metadata::vo_bit::is_vo_bit_set::<VM>(object),

src/policy/largeobjectspace.rs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -189,7 +189,6 @@ impl<VM: VMBinding> LargeObjectSpace<VM> {
189189
queue: &mut Q,
190190
object: ObjectReference,
191191
) -> ObjectReference {
192-
debug_assert!(!object.is_null());
193192
#[cfg(feature = "vo_bit")]
194193
debug_assert!(
195194
crate::util::metadata::vo_bit::is_vo_bit_set::<VM>(object),

src/policy/markcompactspace.rs

Lines changed: 12 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -37,12 +37,7 @@ impl<VM: VMBinding> SFT for MarkCompactSpace<VM> {
3737
}
3838

3939
fn get_forwarded_object(&self, object: ObjectReference) -> Option<ObjectReference> {
40-
let forwarding_pointer = Self::get_header_forwarding_pointer(object);
41-
if forwarding_pointer.is_null() {
42-
None
43-
} else {
44-
Some(forwarding_pointer)
45-
}
40+
Self::get_header_forwarding_pointer(object)
4641
}
4742

4843
fn is_live(&self, object: ObjectReference) -> bool {
@@ -130,7 +125,6 @@ impl<VM: VMBinding> crate::policy::gc_work::PolicyTraceObject<VM> for MarkCompac
130125
_copy: Option<CopySemantics>,
131126
_worker: &mut GCWorker<VM>,
132127
) -> ObjectReference {
133-
debug_assert!(!object.is_null());
134128
debug_assert!(
135129
KIND != TRACE_KIND_TRANSITIVE_PIN,
136130
"MarkCompact does not support transitive pin trace."
@@ -177,8 +171,9 @@ impl<VM: VMBinding> MarkCompactSpace<VM> {
177171
}
178172

179173
/// Get header forwarding pointer for an object
180-
fn get_header_forwarding_pointer(object: ObjectReference) -> ObjectReference {
181-
unsafe { Self::header_forwarding_pointer_address(object).load::<ObjectReference>() }
174+
fn get_header_forwarding_pointer(object: ObjectReference) -> Option<ObjectReference> {
175+
let addr = unsafe { Self::header_forwarding_pointer_address(object).load::<Address>() };
176+
ObjectReference::from_raw_address(addr)
182177
}
183178

184179
/// Store header forwarding pointer for an object
@@ -251,7 +246,9 @@ impl<VM: VMBinding> MarkCompactSpace<VM> {
251246
queue.enqueue(object);
252247
}
253248

254-
Self::get_header_forwarding_pointer(object)
249+
Self::get_header_forwarding_pointer(object).unwrap_or_else(|| {
250+
panic!("trace_forward_object called when an object is not forwarded, yet. object: {object}")
251+
})
255252
}
256253

257254
pub fn test_and_mark(object: ObjectReference) -> bool {
@@ -388,10 +385,9 @@ impl<VM: VMBinding> MarkCompactSpace<VM> {
388385
// clear the VO bit
389386
vo_bit::unset_vo_bit::<VM>(obj);
390387

391-
let forwarding_pointer = Self::get_header_forwarding_pointer(obj);
392-
393-
trace!("Compact {} to {}", obj, forwarding_pointer);
394-
if !forwarding_pointer.is_null() {
388+
let maybe_forwarding_pointer = Self::get_header_forwarding_pointer(obj);
389+
if let Some(forwarding_pointer) = maybe_forwarding_pointer {
390+
trace!("Compact {} to {}", obj, forwarding_pointer);
395391
let new_object = forwarding_pointer;
396392
Self::clear_header_forwarding_pointer(new_object);
397393

@@ -403,6 +399,8 @@ impl<VM: VMBinding> MarkCompactSpace<VM> {
403399
vo_bit::set_vo_bit::<VM>(new_object);
404400
to = new_object.to_object_start::<VM>() + copied_size;
405401
debug_assert_eq!(end_of_new_object, to);
402+
} else {
403+
trace!("Skipping dead object {}", obj);
406404
}
407405
}
408406
}

src/policy/marksweepspace/malloc_ms/global.rs

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -400,8 +400,6 @@ impl<VM: VMBinding> MallocSpace<VM> {
400400
queue: &mut Q,
401401
object: ObjectReference,
402402
) -> ObjectReference {
403-
debug_assert!(!object.is_null());
404-
405403
assert!(
406404
self.in_space(object),
407405
"Cannot mark an object {} that was not alloced by malloc.",

src/policy/marksweepspace/native_ms/block.rs

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -287,7 +287,8 @@ impl Block {
287287
while cell + cell_size <= self.start() + Block::BYTES {
288288
// The invariants we checked earlier ensures that we can use cell and object reference interchangably
289289
// We may not really have an object in this cell, but if we do, this object reference is correct.
290-
let potential_object = ObjectReference::from_raw_address(cell);
290+
// About unsafe: We know `cell` is non-zero here.
291+
let potential_object = unsafe { ObjectReference::from_raw_address_unchecked(cell) };
291292

292293
if !VM::VMObjectModel::LOCAL_MARK_BIT_SPEC
293294
.is_marked::<VM>(potential_object, Ordering::SeqCst)
@@ -327,9 +328,12 @@ impl Block {
327328

328329
while cell + cell_size <= self.end() {
329330
// possible object ref
330-
let potential_object_ref = ObjectReference::from_raw_address(
331-
cursor + VM::VMObjectModel::OBJECT_REF_OFFSET_LOWER_BOUND,
332-
);
331+
let potential_object_ref = unsafe {
332+
// We know cursor plus an offset cannot be 0.
333+
ObjectReference::from_raw_address_unchecked(
334+
cursor + VM::VMObjectModel::OBJECT_REF_OFFSET_LOWER_BOUND,
335+
)
336+
};
333337
trace!(
334338
"{:?}: cell = {}, last cell in free list = {}, cursor = {}, potential object = {}",
335339
self,

src/policy/marksweepspace/native_ms/global.rs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -241,7 +241,6 @@ impl<VM: VMBinding> MarkSweepSpace<VM> {
241241
queue: &mut Q,
242242
object: ObjectReference,
243243
) -> ObjectReference {
244-
debug_assert!(!object.is_null());
245244
debug_assert!(
246245
self.in_space(object),
247246
"Cannot mark an object {} that was not alloced by free list allocator.",

src/scheduler/gc_work.rs

Lines changed: 6 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -274,7 +274,6 @@ impl<E: ProcessEdgesWork> ObjectTracer for ProcessEdgesWorkTracer<E> {
274274
/// Forward the `trace_object` call to the underlying `ProcessEdgesWork`,
275275
/// and flush as soon as the underlying buffer of `process_edges_work` is full.
276276
fn trace_object(&mut self, object: ObjectReference) -> ObjectReference {
277-
debug_assert!(!object.is_null());
278277
let result = self.process_edges_work.trace_object(object);
279278
self.flush_if_full();
280279
result
@@ -659,12 +658,11 @@ pub trait ProcessEdgesWork:
659658
/// Process an edge, including loading the object reference from the memory slot,
660659
/// trace the object and store back the new object reference if necessary.
661660
fn process_edge(&mut self, slot: EdgeOf<Self>) {
662-
let object = slot.load();
663-
if object.is_null() {
661+
let Some(object) = slot.load() else {
662+
// Skip slots that are not holding an object reference.
664663
return;
665-
}
664+
};
666665
let new_object = self.trace_object(object);
667-
debug_assert!(!new_object.is_null());
668666
if Self::OVERWRITE_REFERENCE && new_object != object {
669667
slot.store(new_object);
670668
}
@@ -722,8 +720,6 @@ impl<VM: VMBinding> ProcessEdgesWork for SFTProcessEdges<VM> {
722720
fn trace_object(&mut self, object: ObjectReference) -> ObjectReference {
723721
use crate::policy::sft::GCWorkerMutRef;
724722

725-
debug_assert!(!object.is_null());
726-
727723
// Erase <VM> type parameter
728724
let worker = GCWorkerMutRef::new(self.worker());
729725

@@ -996,20 +992,18 @@ impl<VM: VMBinding, P: PlanTraceObject<VM> + Plan<VM = VM>, const KIND: TraceKin
996992
}
997993

998994
fn trace_object(&mut self, object: ObjectReference) -> ObjectReference {
999-
debug_assert!(!object.is_null());
1000995
// We cannot borrow `self` twice in a call, so we extract `worker` as a local variable.
1001996
let worker = self.worker();
1002997
self.plan
1003998
.trace_object::<VectorObjectQueue, KIND>(&mut self.base.nodes, object, worker)
1004999
}
10051000

10061001
fn process_edge(&mut self, slot: EdgeOf<Self>) {
1007-
let object = slot.load();
1008-
if object.is_null() {
1002+
let Some(object) = slot.load() else {
1003+
// Skip slots that are not holding an object reference.
10091004
return;
1010-
}
1005+
};
10111006
let new_object = self.trace_object(object);
1012-
debug_assert!(!new_object.is_null());
10131007
if P::may_move_objects::<KIND>() && new_object != object {
10141008
slot.store(new_object);
10151009
}

src/util/address.rs

Lines changed: 20 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ use bytemuck::NoUninit;
33

44
use std::fmt;
55
use std::mem;
6+
use std::num::NonZeroUsize;
67
use std::ops::*;
78
use std::sync::atomic::Ordering;
89

@@ -479,28 +480,33 @@ use crate::vm::VMBinding;
479480
/// the opaque `ObjectReference` type, and we haven't seen a use case for now.
480481
#[repr(transparent)]
481482
#[derive(Copy, Clone, Eq, Hash, PartialOrd, Ord, PartialEq, NoUninit)]
482-
pub struct ObjectReference(usize);
483+
pub struct ObjectReference(NonZeroUsize);
483484

484485
impl ObjectReference {
485-
/// The null object reference, represented as zero.
486-
pub const NULL: ObjectReference = ObjectReference(0);
487-
488486
/// Cast the object reference to its raw address. This method is mostly for the convinience of a binding.
489487
///
490488
/// MMTk should not make any assumption on the actual location of the address with the object reference.
491489
/// MMTk should not assume the address returned by this method is in our allocation. For the purposes of
492490
/// setting object metadata, MMTk should use [`crate::vm::ObjectModel::ref_to_address()`] or [`crate::vm::ObjectModel::ref_to_header()`].
493491
pub fn to_raw_address(self) -> Address {
494-
Address(self.0)
492+
Address(self.0.get())
495493
}
496494

497495
/// Cast a raw address to an object reference. This method is mostly for the convinience of a binding.
498496
/// This is how a binding creates `ObjectReference` instances.
499497
///
498+
/// If `addr` is 0, the result is `None`.
499+
///
500500
/// MMTk should not assume an arbitrary address can be turned into an object reference. MMTk can use [`crate::vm::ObjectModel::address_to_ref()`]
501501
/// to turn addresses that are from [`crate::vm::ObjectModel::ref_to_address()`] back to object.
502-
pub fn from_raw_address(addr: Address) -> ObjectReference {
503-
ObjectReference(addr.0)
502+
pub fn from_raw_address(addr: Address) -> Option<ObjectReference> {
503+
NonZeroUsize::new(addr.0).map(ObjectReference)
504+
}
505+
506+
/// Like `from_raw_address`, but assume `addr` is not zero.
507+
pub unsafe fn from_raw_address_unchecked(addr: Address) -> ObjectReference {
508+
debug_assert!(!addr.is_zero());
509+
ObjectReference(NonZeroUsize::new_unchecked(addr.0))
504510
}
505511

506512
/// Get the in-heap address from an object reference. This method is used by MMTk to get an in-heap address
@@ -541,54 +547,41 @@ impl ObjectReference {
541547
obj
542548
}
543549

544-
/// is this object reference null reference?
545-
pub fn is_null(self) -> bool {
546-
self.0 == 0
547-
}
548-
549550
/// returns the ObjectReference
550551
pub fn value(self) -> usize {
551-
self.0
552+
self.0.get()
552553
}
553554

554555
/// Is the object reachable, determined by the policy?
555556
/// Note: Objects in ImmortalSpace may have `is_live = true` but are actually unreachable.
556557
pub fn is_reachable(self) -> bool {
557-
if self.is_null() {
558-
false
559-
} else {
560-
unsafe { SFT_MAP.get_unchecked(Address(self.0)) }.is_reachable(self)
561-
}
558+
unsafe { SFT_MAP.get_unchecked(self.to_raw_address()) }.is_reachable(self)
562559
}
563560

564561
/// Is the object live, determined by the policy?
565562
pub fn is_live(self) -> bool {
566-
if self.0 == 0 {
567-
false
568-
} else {
569-
unsafe { SFT_MAP.get_unchecked(Address(self.0)) }.is_live(self)
570-
}
563+
unsafe { SFT_MAP.get_unchecked(self.to_raw_address()) }.is_live(self)
571564
}
572565

573566
/// Can the object be moved?
574567
pub fn is_movable(self) -> bool {
575-
unsafe { SFT_MAP.get_unchecked(Address(self.0)) }.is_movable()
568+
unsafe { SFT_MAP.get_unchecked(self.to_raw_address()) }.is_movable()
576569
}
577570

578571
/// Get forwarding pointer if the object is forwarded.
579572
pub fn get_forwarded_object(self) -> Option<Self> {
580-
unsafe { SFT_MAP.get_unchecked(Address(self.0)) }.get_forwarded_object(self)
573+
unsafe { SFT_MAP.get_unchecked(self.to_raw_address()) }.get_forwarded_object(self)
581574
}
582575

583576
/// Is the object in any MMTk spaces?
584577
pub fn is_in_any_space(self) -> bool {
585-
unsafe { SFT_MAP.get_unchecked(Address(self.0)) }.is_in_space(self)
578+
unsafe { SFT_MAP.get_unchecked(self.to_raw_address()) }.is_in_space(self)
586579
}
587580

588581
/// Is the object sane?
589582
#[cfg(feature = "sanity")]
590583
pub fn is_sane(self) -> bool {
591-
unsafe { SFT_MAP.get_unchecked(Address(self.0)) }.is_sane()
584+
unsafe { SFT_MAP.get_unchecked(self.to_raw_address()) }.is_sane()
592585
}
593586
}
594587

src/util/metadata/vo_bit/mod.rs

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,10 @@ pub fn is_vo_bit_set<VM: VMBinding>(object: ObjectReference) -> bool {
100100
/// Check if an address can be turned directly into an object reference using the VO bit.
101101
/// If so, return `Some(object)`. Otherwise return `None`.
102102
pub fn is_vo_bit_set_for_addr<VM: VMBinding>(address: Address) -> Option<ObjectReference> {
103-
let potential_object = ObjectReference::from_raw_address(address);
103+
let Some(potential_object) = ObjectReference::from_raw_address(address) else {
104+
return None;
105+
};
106+
104107
let addr = potential_object.to_address::<VM>();
105108

106109
// If we haven't mapped VO bit for the address, it cannot be an object
@@ -123,7 +126,10 @@ pub fn is_vo_bit_set_for_addr<VM: VMBinding>(address: Address) -> Option<ObjectR
123126
///
124127
/// This is unsafe: check the comment on `side_metadata::load`
125128
pub unsafe fn is_vo_bit_set_unsafe<VM: VMBinding>(address: Address) -> Option<ObjectReference> {
126-
let potential_object = ObjectReference::from_raw_address(address);
129+
let Some(potential_object) = ObjectReference::from_raw_address(address) else {
130+
return None;
131+
};
132+
127133
let addr = potential_object.to_address::<VM>();
128134

129135
// If we haven't mapped VO bit for the address, it cannot be an object

src/util/object_forwarding.rs

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -150,7 +150,9 @@ pub fn read_forwarding_pointer<VM: VMBinding>(object: ObjectReference) -> Object
150150

151151
// We write the forwarding poiner. We know it is an object reference.
152152
unsafe {
153-
ObjectReference::from_raw_address(crate::util::Address::from_usize(
153+
// We use "unchecked" convertion becasue we guarantee the forwarding pointer we stored
154+
// previously is from a valid `ObjectReference` which is never zero.
155+
ObjectReference::from_raw_address_unchecked(crate::util::Address::from_usize(
154156
VM::VMObjectModel::LOCAL_FORWARDING_POINTER_SPEC.load_atomic::<VM, usize>(
155157
object,
156158
Some(FORWARDING_POINTER_MASK),

0 commit comments

Comments
 (0)