@@ -18,9 +18,9 @@ use super::{CanAccessStatics, CompileTimeEvalContext, CompileTimeInterpreter};
18
18
use crate :: errors;
19
19
use crate :: interpret:: eval_nullary_intrinsic;
20
20
use crate :: interpret:: {
21
- intern_const_alloc_recursive, Allocation , ConstAlloc , ConstValue , CtfeValidationMode , GlobalId ,
22
- Immediate , InternKind , InterpCx , InterpError , InterpResult , MPlaceTy , MemoryKind , OpTy ,
23
- RefTracking , StackPopCleanup ,
21
+ intern_const_alloc_recursive, ConstAlloc , ConstValue , CtfeValidationMode , GlobalId , Immediate ,
22
+ InternKind , InterpCx , InterpError , InterpResult , MPlaceTy , MemoryKind , OpTy , RefTracking ,
23
+ StackPopCleanup ,
24
24
} ;
25
25
26
26
// Returns a pointer to where the result lives
@@ -105,8 +105,7 @@ pub(super) fn mk_eval_cx<'mir, 'tcx>(
105
105
)
106
106
}
107
107
108
- /// This function converts an interpreter value into a constant that is meant for use in the
109
- /// type system.
108
+ /// This function converts an interpreter value into a MIR constant.
110
109
#[ instrument( skip( ecx) , level = "debug" ) ]
111
110
pub ( super ) fn op_to_const < ' tcx > (
112
111
ecx : & CompileTimeEvalContext < ' _ , ' tcx > ,
@@ -117,28 +116,25 @@ pub(super) fn op_to_const<'tcx>(
117
116
return ConstValue :: ZeroSized ;
118
117
}
119
118
120
- // We do not have value optimizations for everything.
121
- // Only scalars and slices, since they are very common.
122
- let try_as_immediate = match op. layout . abi {
119
+ // All scalar types should be stored as `ConstValue::Scalar`. This is needed to make
120
+ // `ConstValue::try_to_scalar` efficient; we want that to work for *all* constants of scalar
121
+ // type (it's used throughout the compiler and having it work just on literals is not enough)
122
+ // and we want it to be fast (i.e., don't go to an `Allocation` and reconstruct the `Scalar`
123
+ // from its byte-serialized form).
124
+ let force_as_immediate = match op. layout . abi {
123
125
Abi :: Scalar ( abi:: Scalar :: Initialized { .. } ) => true ,
124
- Abi :: ScalarPair ( ..) => match op. layout . ty . kind ( ) {
125
- ty:: Ref ( _, inner, _) => match * inner. kind ( ) {
126
- ty:: Slice ( elem) => elem == ecx. tcx . types . u8 ,
127
- ty:: Str => true ,
128
- _ => false ,
129
- } ,
130
- _ => false ,
131
- } ,
126
+ // We don't *force* `ConstValue::Slice` for `ScalarPair`. This has the advantage that if the
127
+ // input `op` is a place, then turning it into a `ConstValue` and back into a `OpTy` will
128
+ // not have to generate any duplicate allocations (we preserve the original `AllocId` in
129
+ // `ConstValue::Indirect`). It means accessing the contents of a slice can be slow (since
130
+ // they can be stored as `ConstValue::Indirect`), but that's not relevant since we barely
131
+ // ever have to do this. (`try_get_slice_bytes_for_diagnostics` exists to provide this
132
+ // functionality.)
132
133
_ => false ,
133
134
} ;
134
- let immediate = if try_as_immediate {
135
+ let immediate = if force_as_immediate {
135
136
Right ( ecx. read_immediate ( op) . expect ( "normalization works on validated constants" ) )
136
137
} else {
137
- // It is guaranteed that any non-slice scalar pair is actually `Indirect` here.
138
- // When we come back from raw const eval, we are always by-ref. The only way our op here is
139
- // by-val is if we are in destructure_mir_constant, i.e., if this is (a field of) something that we
140
- // "tried to make immediate" before. We wouldn't do that for non-slice scalar pairs or
141
- // structs containing such.
142
138
op. as_mplace_or_imm ( )
143
139
} ;
144
140
@@ -151,25 +147,22 @@ pub(super) fn op_to_const<'tcx>(
151
147
let alloc_id = alloc_id. expect ( "cannot have `fake` place fot non-ZST type" ) ;
152
148
ConstValue :: Indirect { alloc_id, offset }
153
149
}
154
- // see comment on `let try_as_immediate ` above
150
+ // see comment on `let force_as_immediate ` above
155
151
Right ( imm) => match * imm {
156
152
Immediate :: Scalar ( x) => ConstValue :: Scalar ( x) ,
157
153
Immediate :: ScalarPair ( a, b) => {
158
154
debug ! ( "ScalarPair(a: {:?}, b: {:?})" , a, b) ;
155
+ // FIXME: assert that this has an appropriate type.
156
+ // Currently we actually get here for non-[u8] slices during valtree construction!
157
+ let msg = "`op_to_const` on an immediate scalar pair must only be used on slice references to actually allocated memory" ;
159
158
// We know `offset` is relative to the allocation, so we can use `into_parts`.
160
- let ( data, start) = match a. to_pointer ( ecx) . unwrap ( ) . into_parts ( ) {
161
- ( Some ( alloc_id) , offset) => {
162
- ( ecx. tcx . global_alloc ( alloc_id) . unwrap_memory ( ) , offset. bytes ( ) )
163
- }
164
- ( None , _offset) => (
165
- ecx. tcx . mk_const_alloc ( Allocation :: from_bytes_byte_aligned_immutable (
166
- b"" as & [ u8 ] ,
167
- ) ) ,
168
- 0 ,
169
- ) ,
170
- } ;
171
- let len = b. to_target_usize ( ecx) . unwrap ( ) ;
172
- let start = start. try_into ( ) . unwrap ( ) ;
159
+ // We use `ConstValue::Slice` so that we don't have to generate an allocation for
160
+ // `ConstValue::Indirect` here.
161
+ let ( alloc_id, offset) = a. to_pointer ( ecx) . expect ( msg) . into_parts ( ) ;
162
+ let alloc_id = alloc_id. expect ( msg) ;
163
+ let data = ecx. tcx . global_alloc ( alloc_id) . unwrap_memory ( ) ;
164
+ let start = offset. bytes_usize ( ) ;
165
+ let len = b. to_target_usize ( ecx) . expect ( msg) ;
173
166
let len: usize = len. try_into ( ) . unwrap ( ) ;
174
167
ConstValue :: Slice { data, start, end : start + len }
175
168
}
0 commit comments