Skip to content

Commit 54ea0f9

Browse files
authored
Rollup merge of #78351 - RalfJung:validity-unsafe-cell, r=oli-obk
Move "mutable thing in const" check from interning to validity This moves the check for mutable things (such as `UnsafeCell` or `&mut`) in a`const` from interning to validity. That means we can give more targeted error messages (pointing out *where* the problem lies), and we can simplify interning a bit. Also fix the interning mode used for promoteds in statics. r? @oli-obk
2 parents 86a4a38 + 744dfd8 commit 54ea0f9

File tree

10 files changed

+135
-144
lines changed

10 files changed

+135
-144
lines changed

compiler/rustc_middle/src/mir/mod.rs

-12
Original file line numberDiff line numberDiff line change
@@ -210,16 +210,6 @@ pub struct Body<'tcx> {
210210
/// We hold in this field all the constants we are not able to evaluate yet.
211211
pub required_consts: Vec<Constant<'tcx>>,
212212

213-
/// The user may be writing e.g. `&[(SOME_CELL, 42)][i].1` and this would get promoted, because
214-
/// we'd statically know that no thing with interior mutability will ever be available to the
215-
/// user without some serious unsafe code. Now this means that our promoted is actually
216-
/// `&[(SOME_CELL, 42)]` and the MIR using it will do the `&promoted[i].1` projection because
217-
/// the index may be a runtime value. Such a promoted value is illegal because it has reachable
218-
/// interior mutability. This flag just makes this situation very obvious where the previous
219-
/// implementation without the flag hid this situation silently.
220-
/// FIXME(oli-obk): rewrite the promoted during promotion to eliminate the cell components.
221-
pub ignore_interior_mut_in_const_validation: bool,
222-
223213
/// Does this body use generic parameters. This is used for the `ConstEvaluatable` check.
224214
///
225215
/// Note that this does not actually mean that this body is not computable right now.
@@ -276,7 +266,6 @@ impl<'tcx> Body<'tcx> {
276266
var_debug_info,
277267
span,
278268
required_consts: Vec::new(),
279-
ignore_interior_mut_in_const_validation: false,
280269
is_polymorphic: false,
281270
predecessor_cache: PredecessorCache::new(),
282271
};
@@ -306,7 +295,6 @@ impl<'tcx> Body<'tcx> {
306295
required_consts: Vec::new(),
307296
generator_kind: None,
308297
var_debug_info: Vec::new(),
309-
ignore_interior_mut_in_const_validation: false,
310298
is_polymorphic: false,
311299
predecessor_cache: PredecessorCache::new(),
312300
};

compiler/rustc_mir/src/const_eval/eval_queries.rs

+24-25
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
use super::{CompileTimeEvalContext, CompileTimeInterpreter, ConstEvalErr, MemoryExtra};
22
use crate::interpret::eval_nullary_intrinsic;
33
use crate::interpret::{
4-
intern_const_alloc_recursive, Allocation, ConstAlloc, ConstValue, GlobalId, Immediate,
5-
InternKind, InterpCx, InterpResult, MPlaceTy, MemoryKind, OpTy, RefTracking, Scalar,
4+
intern_const_alloc_recursive, Allocation, ConstAlloc, ConstValue, CtfeValidationMode, GlobalId,
5+
Immediate, InternKind, InterpCx, InterpResult, MPlaceTy, MemoryKind, OpTy, RefTracking, Scalar,
66
ScalarMaybeUninit, StackPopCleanup,
77
};
88

@@ -59,23 +59,15 @@ fn eval_body_using_ecx<'mir, 'tcx>(
5959
ecx.run()?;
6060

6161
// Intern the result
62-
// FIXME: since the DefId of a promoted is the DefId of its owner, this
63-
// means that promoteds in statics are actually interned like statics!
64-
// However, this is also currently crucial because we promote mutable
65-
// non-empty slices in statics to extend their lifetime, and this
66-
// ensures that they are put into a mutable allocation.
67-
// For other kinds of promoteds in statics (like array initializers), this is rather silly.
68-
let intern_kind = match tcx.static_mutability(cid.instance.def_id()) {
69-
Some(m) => InternKind::Static(m),
70-
None if cid.promoted.is_some() => InternKind::Promoted,
71-
_ => InternKind::Constant,
62+
let intern_kind = if cid.promoted.is_some() {
63+
InternKind::Promoted
64+
} else {
65+
match tcx.static_mutability(cid.instance.def_id()) {
66+
Some(m) => InternKind::Static(m),
67+
None => InternKind::Constant,
68+
}
7269
};
73-
intern_const_alloc_recursive(
74-
ecx,
75-
intern_kind,
76-
ret,
77-
body.ignore_interior_mut_in_const_validation,
78-
);
70+
intern_const_alloc_recursive(ecx, intern_kind, ret);
7971

8072
debug!("eval_body_using_ecx done: {:?}", *ret);
8173
Ok(ret)
@@ -376,16 +368,23 @@ pub fn eval_to_allocation_raw_provider<'tcx>(
376368
// Since evaluation had no errors, valiate the resulting constant:
377369
let validation = try {
378370
// FIXME do not validate promoteds until a decision on
379-
// https://github.com/rust-lang/rust/issues/67465 is made
371+
// https://github.com/rust-lang/rust/issues/67465 and
372+
// https://github.com/rust-lang/rust/issues/67534 is made.
373+
// Promoteds can contain unexpected `UnsafeCell` and reference `static`s, but their
374+
// otherwise restricted form ensures that this is still sound. We just lose the
375+
// extra safety net of some of the dynamic checks. They can also contain invalid
376+
// values, but since we do not usually check intermediate results of a computation
377+
// for validity, it might be surprising to do that here.
380378
if cid.promoted.is_none() {
381379
let mut ref_tracking = RefTracking::new(mplace);
380+
let mut inner = false;
382381
while let Some((mplace, path)) = ref_tracking.todo.pop() {
383-
ecx.const_validate_operand(
384-
mplace.into(),
385-
path,
386-
&mut ref_tracking,
387-
/*may_ref_to_static*/ ecx.memory.extra.can_access_statics,
388-
)?;
382+
let mode = match tcx.static_mutability(cid.instance.def_id()) {
383+
Some(_) => CtfeValidationMode::Regular, // a `static`
384+
None => CtfeValidationMode::Const { inner },
385+
};
386+
ecx.const_validate_operand(mplace.into(), path, &mut ref_tracking, mode)?;
387+
inner = true;
389388
}
390389
}
391390
};

compiler/rustc_mir/src/const_eval/mod.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ pub(crate) fn const_caller_location(
2929
let mut ecx = mk_eval_cx(tcx, DUMMY_SP, ty::ParamEnv::reveal_all(), false);
3030

3131
let loc_place = ecx.alloc_caller_location(file, line, col);
32-
intern_const_alloc_recursive(&mut ecx, InternKind::Constant, loc_place, false);
32+
intern_const_alloc_recursive(&mut ecx, InternKind::Constant, loc_place);
3333
ConstValue::Scalar(loc_place.ptr)
3434
}
3535

compiler/rustc_mir/src/interpret/intern.rs

+28-59
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,23 @@
22
//!
33
//! After a const evaluation has computed a value, before we destroy the const evaluator's session
44
//! memory, we need to extract all memory allocations to the global memory pool so they stay around.
5+
//!
6+
//! In principle, this is not very complicated: we recursively walk the final value, follow all the
7+
//! pointers, and move all reachable allocations to the global `tcx` memory. The only complication
8+
//! is picking the right mutability for the allocations in a `static` initializer: we want to make
9+
//! as many allocations as possible immutable so LLVM can put them into read-only memory. At the
10+
//! same time, we need to make memory that could be mutated by the program mutable to avoid
11+
//! incorrect compilations. To achieve this, we do a type-based traversal of the final value,
12+
//! tracking mutable and shared references and `UnsafeCell` to determine the current mutability.
13+
//! (In principle, we could skip this type-based part for `const` and promoteds, as they need to be
14+
//! always immutable. At least for `const` however we use this opportunity to reject any `const`
15+
//! that contains allocations whose mutability we cannot identify.)
516
617
use super::validity::RefTracking;
718
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
819
use rustc_hir as hir;
920
use rustc_middle::mir::interpret::InterpResult;
10-
use rustc_middle::ty::{self, layout::TyAndLayout, query::TyCtxtAt, Ty};
21+
use rustc_middle::ty::{self, layout::TyAndLayout, Ty};
1122
use rustc_target::abi::Size;
1223

1324
use rustc_ast::Mutability;
@@ -40,11 +51,6 @@ struct InternVisitor<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx>> {
4051
/// This field stores whether we are *currently* inside an `UnsafeCell`. This can affect
4152
/// the intern mode of references we encounter.
4253
inside_unsafe_cell: bool,
43-
44-
/// This flag is to avoid triggering UnsafeCells are not allowed behind references in constants
45-
/// for promoteds.
46-
/// It's a copy of `mir::Body`'s ignore_interior_mut_in_const_validation field
47-
ignore_interior_mut_in_const: bool,
4854
}
4955

5056
#[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)]
@@ -53,22 +59,14 @@ enum InternMode {
5359
/// this is *immutable*, and below mutable references inside an `UnsafeCell`, this
5460
/// is *mutable*.
5561
Static(hir::Mutability),
56-
/// The "base value" of a const, which can have `UnsafeCell` (as in `const FOO: Cell<i32>`),
57-
/// but that interior mutability is simply ignored.
58-
ConstBase,
59-
/// The "inner values" of a const with references, where `UnsafeCell` is an error.
60-
ConstInner,
62+
/// A `const`.
63+
Const,
6164
}
6265

6366
/// Signalling data structure to ensure we don't recurse
6467
/// into the memory of other constants or statics
6568
struct IsStaticOrFn;
6669

67-
fn mutable_memory_in_const(tcx: TyCtxtAt<'_>, kind: &str) {
68-
// FIXME: show this in validation instead so we can point at where in the value the error is?
69-
tcx.sess.span_err(tcx.span, &format!("mutable memory ({}) is not allowed in constant", kind));
70-
}
71-
7270
/// Intern an allocation without looking at its children.
7371
/// `mode` is the mode of the environment where we found this pointer.
7472
/// `mutablity` is the mutability of the place to be interned; even if that says
@@ -129,9 +127,7 @@ fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx>>(
129127
// See const_eval::machine::MemoryExtra::can_access_statics for why
130128
// immutability is so important.
131129

132-
// There are no sensible checks we can do here; grep for `mutable_memory_in_const` to
133-
// find the checks we are doing elsewhere to avoid even getting here for memory
134-
// that "wants" to be mutable.
130+
// Validation will ensure that there is no `UnsafeCell` on an immutable allocation.
135131
alloc.mutability = Mutability::Not;
136132
};
137133
// link the alloc id to the actual allocation
@@ -167,17 +163,13 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir
167163
mplace: MPlaceTy<'tcx>,
168164
fields: impl Iterator<Item = InterpResult<'tcx, Self::V>>,
169165
) -> InterpResult<'tcx> {
166+
// ZSTs cannot contain pointers, so we can skip them.
167+
if mplace.layout.is_zst() {
168+
return Ok(());
169+
}
170+
170171
if let Some(def) = mplace.layout.ty.ty_adt_def() {
171172
if Some(def.did) == self.ecx.tcx.lang_items().unsafe_cell_type() {
172-
if self.mode == InternMode::ConstInner && !self.ignore_interior_mut_in_const {
173-
// We do not actually make this memory mutable. But in case the user
174-
// *expected* it to be mutable, make sure we error. This is just a
175-
// sanity check to prevent users from accidentally exploiting the UB
176-
// they caused. It also helps us to find cases where const-checking
177-
// failed to prevent an `UnsafeCell` (but as `ignore_interior_mut_in_const`
178-
// shows that part is not airtight).
179-
mutable_memory_in_const(self.ecx.tcx, "`UnsafeCell`");
180-
}
181173
// We are crossing over an `UnsafeCell`, we can mutate again. This means that
182174
// References we encounter inside here are interned as pointing to mutable
183175
// allocations.
@@ -189,11 +181,6 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir
189181
}
190182
}
191183

192-
// ZSTs cannot contain pointers, so we can skip them.
193-
if mplace.layout.is_zst() {
194-
return Ok(());
195-
}
196-
197184
self.walk_aggregate(mplace, fields)
198185
}
199186

@@ -213,7 +200,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir
213200
if let Scalar::Ptr(vtable) = mplace.meta.unwrap_meta() {
214201
// Explicitly choose const mode here, since vtables are immutable, even
215202
// if the reference of the fat pointer is mutable.
216-
self.intern_shallow(vtable.alloc_id, InternMode::ConstInner, None);
203+
self.intern_shallow(vtable.alloc_id, InternMode::Const, None);
217204
} else {
218205
// Validation will error (with a better message) on an invalid vtable pointer.
219206
// Let validation show the error message, but make sure it *does* error.
@@ -225,7 +212,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir
225212
// Only recurse for allocation-backed pointers.
226213
if let Scalar::Ptr(ptr) = mplace.ptr {
227214
// Compute the mode with which we intern this. Our goal here is to make as many
228-
// statics as we can immutable so they can be placed in const memory by LLVM.
215+
// statics as we can immutable so they can be placed in read-only memory by LLVM.
229216
let ref_mode = match self.mode {
230217
InternMode::Static(mutbl) => {
231218
// In statics, merge outer mutability with reference mutability and
@@ -259,27 +246,11 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir
259246
}
260247
}
261248
}
262-
InternMode::ConstBase | InternMode::ConstInner => {
263-
// Ignore `UnsafeCell`, everything is immutable. Do some sanity checking
264-
// for mutable references that we encounter -- they must all be ZST.
265-
// This helps to prevent users from accidentally exploiting UB that they
266-
// caused (by somehow getting a mutable reference in a `const`).
267-
if ref_mutability == Mutability::Mut {
268-
match referenced_ty.kind() {
269-
ty::Array(_, n) if n.eval_usize(*tcx, self.ecx.param_env) == 0 => {}
270-
ty::Slice(_)
271-
if mplace.meta.unwrap_meta().to_machine_usize(self.ecx)?
272-
== 0 => {}
273-
_ => mutable_memory_in_const(tcx, "`&mut`"),
274-
}
275-
} else {
276-
// A shared reference. We cannot check `freeze` here due to references
277-
// like `&dyn Trait` that are actually immutable. We do check for
278-
// concrete `UnsafeCell` when traversing the pointee though (if it is
279-
// a new allocation, not yet interned).
280-
}
281-
// Go on with the "inner" rules.
282-
InternMode::ConstInner
249+
InternMode::Const => {
250+
// Ignore `UnsafeCell`, everything is immutable. Validity does some sanity
251+
// checking for mutable references that we encounter -- they must all be
252+
// ZST.
253+
InternMode::Const
283254
}
284255
};
285256
match self.intern_shallow(ptr.alloc_id, ref_mode, Some(referenced_ty)) {
@@ -318,7 +289,6 @@ pub fn intern_const_alloc_recursive<M: CompileTimeMachine<'mir, 'tcx>>(
318289
ecx: &mut InterpCx<'mir, 'tcx, M>,
319290
intern_kind: InternKind,
320291
ret: MPlaceTy<'tcx>,
321-
ignore_interior_mut_in_const: bool,
322292
) where
323293
'tcx: 'mir,
324294
{
@@ -327,7 +297,7 @@ pub fn intern_const_alloc_recursive<M: CompileTimeMachine<'mir, 'tcx>>(
327297
InternKind::Static(mutbl) => InternMode::Static(mutbl),
328298
// `Constant` includes array lengths.
329299
// `Promoted` includes non-`Copy` array initializers and `rustc_args_required_const` arguments.
330-
InternKind::Constant | InternKind::Promoted => InternMode::ConstBase,
300+
InternKind::Constant | InternKind::Promoted => InternMode::Const,
331301
};
332302

333303
// Type based interning.
@@ -357,7 +327,6 @@ pub fn intern_const_alloc_recursive<M: CompileTimeMachine<'mir, 'tcx>>(
357327
ecx,
358328
mode,
359329
leftover_allocations,
360-
ignore_interior_mut_in_const,
361330
inside_unsafe_cell: false,
362331
}
363332
.visit_value(mplace);

compiler/rustc_mir/src/interpret/mod.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ pub use self::machine::{compile_time_machine, AllocMap, Machine, MayLeak, StackP
2424
pub use self::memory::{AllocCheck, FnVal, Memory, MemoryKind};
2525
pub use self::operand::{ImmTy, Immediate, OpTy, Operand};
2626
pub use self::place::{MPlaceTy, MemPlace, MemPlaceMeta, Place, PlaceTy};
27-
pub use self::validity::RefTracking;
27+
pub use self::validity::{CtfeValidationMode, RefTracking};
2828
pub use self::visitor::{MutValueVisitor, ValueVisitor};
2929

3030
crate use self::intrinsics::eval_nullary_intrinsic;

0 commit comments

Comments
 (0)