Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

interpret: remove support for uninitialized scalars #100043

Merged
merged 5 commits into from
Aug 27, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 2 additions & 5 deletions compiler/rustc_const_eval/src/const_eval/eval_queries.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use crate::interpret::eval_nullary_intrinsic;
use crate::interpret::{
intern_const_alloc_recursive, Allocation, ConstAlloc, ConstValue, CtfeValidationMode, GlobalId,
Immediate, InternKind, InterpCx, InterpResult, MPlaceTy, MemoryKind, OpTy, RefTracking,
ScalarMaybeUninit, StackPopCleanup,
StackPopCleanup,
};

use rustc_hir::def::DefKind;
Expand Down Expand Up @@ -170,10 +170,7 @@ pub(super) fn op_to_const<'tcx>(
// see comment on `let try_as_immediate` above
Err(imm) => match *imm {
_ if imm.layout.is_zst() => ConstValue::ZeroSized,
Immediate::Scalar(x) => match x {
ScalarMaybeUninit::Scalar(s) => ConstValue::Scalar(s),
ScalarMaybeUninit::Uninit => to_const_value(&op.assert_mem_place()),
},
Immediate::Scalar(x) => ConstValue::Scalar(x),
Immediate::ScalarPair(a, b) => {
debug!("ScalarPair(a: {:?}, b: {:?})", a, b);
// We know `offset` is relative to the allocation, so we can use `into_parts`.
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_const_eval/src/const_eval/machine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -347,8 +347,8 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
};
match intrinsic_name {
sym::ptr_guaranteed_eq | sym::ptr_guaranteed_ne => {
let a = ecx.read_immediate(&args[0])?.to_scalar()?;
let b = ecx.read_immediate(&args[1])?.to_scalar()?;
let a = ecx.read_scalar(&args[0])?;
let b = ecx.read_scalar(&args[1])?;
let cmp = if intrinsic_name == sym::ptr_guaranteed_eq {
ecx.guaranteed_eq(a, b)?
} else {
Expand Down
15 changes: 5 additions & 10 deletions compiler/rustc_const_eval/src/const_eval/valtrees.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use super::machine::CompileTimeEvalContext;
use super::{ValTreeCreationError, ValTreeCreationResult, VALTREE_MAX_NODES};
use crate::interpret::{
intern_const_alloc_recursive, ConstValue, ImmTy, Immediate, InternKind, MemPlaceMeta,
MemoryKind, PlaceTy, Scalar, ScalarMaybeUninit,
MemoryKind, PlaceTy, Scalar,
};
use crate::interpret::{MPlaceTy, Value};
use rustc_middle::ty::{self, ScalarInt, Ty, TyCtxt};
Expand Down Expand Up @@ -90,7 +90,7 @@ pub(crate) fn const_to_valtree_inner<'tcx>(
let Ok(val) = ecx.read_immediate(&place.into()) else {
return Err(ValTreeCreationError::Other);
};
let val = val.to_scalar().unwrap();
let val = val.to_scalar();
*num_nodes += 1;

Ok(ty::ValTree::Leaf(val.assert_int()))
Expand Down Expand Up @@ -349,11 +349,7 @@ fn valtree_into_mplace<'tcx>(
ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char => {
let scalar_int = valtree.unwrap_leaf();
debug!("writing trivial valtree {:?} to place {:?}", scalar_int, place);
ecx.write_immediate(
Immediate::Scalar(ScalarMaybeUninit::Scalar(scalar_int.into())),
&place.into(),
)
.unwrap();
ecx.write_immediate(Immediate::Scalar(scalar_int.into()), &place.into()).unwrap();
}
ty::Ref(_, inner_ty, _) => {
let mut pointee_place = create_pointee_place(ecx, *inner_ty, valtree);
Expand All @@ -366,11 +362,10 @@ fn valtree_into_mplace<'tcx>(
let imm = match inner_ty.kind() {
ty::Slice(_) | ty::Str => {
let len = valtree.unwrap_branch().len();
let len_scalar =
ScalarMaybeUninit::Scalar(Scalar::from_machine_usize(len as u64, &tcx));
let len_scalar = Scalar::from_machine_usize(len as u64, &tcx);

Immediate::ScalarPair(
ScalarMaybeUninit::from_maybe_pointer((*pointee_place).ptr, &tcx),
Scalar::from_maybe_pointer((*pointee_place).ptr, &tcx),
len_scalar,
)
}
Expand Down
18 changes: 9 additions & 9 deletions compiler/rustc_const_eval/src/interpret/cast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -123,10 +123,10 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
match src.layout.ty.kind() {
// Floating point
Float(FloatTy::F32) => {
return Ok(self.cast_from_float(src.to_scalar()?.to_f32()?, cast_ty).into());
return Ok(self.cast_from_float(src.to_scalar().to_f32()?, cast_ty).into());
}
Float(FloatTy::F64) => {
return Ok(self.cast_from_float(src.to_scalar()?.to_f64()?, cast_ty).into());
return Ok(self.cast_from_float(src.to_scalar().to_f64()?, cast_ty).into());
}
// The rest is integer/pointer-"like", including fn ptr casts
_ => assert!(
Expand All @@ -153,7 +153,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
assert_eq!(dest_layout.size, self.pointer_size());
assert!(src.layout.ty.is_unsafe_ptr());
return match **src {
Immediate::ScalarPair(data, _) => Ok(data.check_init()?.into()),
Immediate::ScalarPair(data, _) => Ok(data.into()),
Immediate::Scalar(..) => span_bug!(
self.cur_span(),
"{:?} input to a fat-to-thin cast ({:?} -> {:?})",
Expand All @@ -167,7 +167,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
}

// # The remaining source values are scalar and "int-like".
let scalar = src.to_scalar()?;
let scalar = src.to_scalar();
Ok(self.cast_from_int_like(scalar, src.layout, cast_ty)?.into())
}

Expand All @@ -179,7 +179,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
assert_matches!(src.layout.ty.kind(), ty::RawPtr(_) | ty::FnPtr(_));
assert!(cast_ty.is_integral());

let scalar = src.to_scalar()?;
let scalar = src.to_scalar();
let ptr = scalar.to_pointer(self)?;
match ptr.into_pointer_or_addr() {
Ok(ptr) => M::expose_ptr(self, ptr)?,
Expand All @@ -197,7 +197,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
assert_matches!(cast_ty.kind(), ty::RawPtr(_));

// First cast to usize.
let scalar = src.to_scalar()?;
let scalar = src.to_scalar();
let addr = self.cast_from_int_like(scalar, src.layout, self.tcx.types.usize)?;
let addr = addr.to_machine_usize(self)?;

Expand Down Expand Up @@ -291,7 +291,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {

match (&src_pointee_ty.kind(), &dest_pointee_ty.kind()) {
(&ty::Array(_, length), &ty::Slice(_)) => {
let ptr = self.read_immediate(src)?.to_scalar()?;
let ptr = self.read_scalar(src)?;
// u64 cast is from usize to u64, which is always good
let val =
Immediate::new_slice(ptr, length.eval_usize(*self.tcx, self.param_env), self);
Expand All @@ -303,7 +303,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// A NOP cast that doesn't actually change anything, should be allowed even with mismatching vtables.
return self.write_immediate(*val, dest);
}
let (old_data, old_vptr) = val.to_scalar_pair()?;
let (old_data, old_vptr) = val.to_scalar_pair();
let old_vptr = old_vptr.to_pointer(self)?;
let (ty, old_trait) = self.get_ptr_vtable(old_vptr)?;
if old_trait != data_a.principal() {
Expand All @@ -315,7 +315,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
(_, &ty::Dynamic(ref data, _)) => {
// Initial cast from sized to dyn trait
let vtable = self.get_vtable_ptr(src_pointee_ty, data.principal())?;
let ptr = self.read_immediate(src)?.to_scalar()?;
let ptr = self.read_scalar(src)?;
let val = Immediate::new_dyn_trait(ptr, vtable, &*self.tcx);
self.write_immediate(val, dest)
}
Expand Down
8 changes: 4 additions & 4 deletions compiler/rustc_const_eval/src/interpret/eval_context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ use rustc_target::abi::{call::FnAbi, Align, HasDataLayout, Size, TargetDataLayou
use super::{
AllocId, GlobalId, Immediate, InterpErrorInfo, InterpResult, MPlaceTy, Machine, MemPlace,
MemPlaceMeta, Memory, MemoryKind, Operand, Place, PlaceTy, PointerArithmetic, Provenance,
Scalar, ScalarMaybeUninit, StackPopJump,
Scalar, StackPopJump,
};
use crate::transform::validate::equal_up_to_regions;

Expand Down Expand Up @@ -991,16 +991,16 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> std::fmt::Debug
}
LocalValue::Live(Operand::Immediate(Immediate::Scalar(val))) => {
write!(fmt, " {:?}", val)?;
if let ScalarMaybeUninit::Scalar(Scalar::Ptr(ptr, _size)) = val {
if let Scalar::Ptr(ptr, _size) = val {
allocs.push(ptr.provenance.get_alloc_id());
}
}
LocalValue::Live(Operand::Immediate(Immediate::ScalarPair(val1, val2))) => {
write!(fmt, " ({:?}, {:?})", val1, val2)?;
if let ScalarMaybeUninit::Scalar(Scalar::Ptr(ptr, _size)) = val1 {
if let Scalar::Ptr(ptr, _size) = val1 {
allocs.push(ptr.provenance.get_alloc_id());
}
if let ScalarMaybeUninit::Scalar(Scalar::Ptr(ptr, _size)) = val2 {
if let Scalar::Ptr(ptr, _size) = val2 {
allocs.push(ptr.provenance.get_alloc_id());
}
}
Expand Down
12 changes: 6 additions & 6 deletions compiler/rustc_const_eval/src/interpret/intrinsics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
| sym::bitreverse => {
let ty = substs.type_at(0);
let layout_of = self.layout_of(ty)?;
let val = self.read_scalar(&args[0])?.check_init()?;
let val = self.read_scalar(&args[0])?;
let bits = val.to_bits(layout_of.size)?;
let kind = match layout_of.abi {
Abi::Scalar(scalar) => scalar.primitive(),
Expand Down Expand Up @@ -256,7 +256,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let (val, overflowed, _ty) = self.overflowing_binary_op(bin_op, &l, &r)?;
if overflowed {
let layout = self.layout_of(substs.type_at(0))?;
let r_val = r.to_scalar()?.to_bits(layout.size)?;
let r_val = r.to_scalar().to_bits(layout.size)?;
if let sym::unchecked_shl | sym::unchecked_shr = intrinsic_name {
throw_ub_format!("overflowing shift by {} in `{}`", r_val, intrinsic_name);
} else {
Expand All @@ -269,9 +269,9 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// rotate_left: (X << (S % BW)) | (X >> ((BW - S) % BW))
// rotate_right: (X << ((BW - S) % BW)) | (X >> (S % BW))
let layout = self.layout_of(substs.type_at(0))?;
let val = self.read_scalar(&args[0])?.check_init()?;
let val = self.read_scalar(&args[0])?;
let val_bits = val.to_bits(layout.size)?;
let raw_shift = self.read_scalar(&args[1])?.check_init()?;
let raw_shift = self.read_scalar(&args[1])?;
let raw_shift_bits = raw_shift.to_bits(layout.size)?;
let width_bits = u128::from(layout.size.bits());
let shift_bits = raw_shift_bits % width_bits;
Expand Down Expand Up @@ -507,7 +507,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
self.copy_op(&args[0], dest, /*allow_transmute*/ false)?;
}
sym::assume => {
let cond = self.read_scalar(&args[0])?.check_init()?.to_bool()?;
let cond = self.read_scalar(&args[0])?.to_bool()?;
if !cond {
throw_ub_format!("`assume` intrinsic called with `false`");
}
Expand Down Expand Up @@ -570,7 +570,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// term since the sign of the second term can be inferred from this and
// the fact that the operation has overflowed (if either is 0 no
// overflow can occur)
let first_term: u128 = l.to_scalar()?.to_bits(l.layout.size)?;
let first_term: u128 = l.to_scalar().to_bits(l.layout.size)?;
let first_term_positive = first_term & (1 << (num_bits - 1)) == 0;
if first_term_positive {
// Negative overflow not possible since the positive first term
Expand Down
18 changes: 5 additions & 13 deletions compiler/rustc_const_eval/src/interpret/machine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -123,18 +123,15 @@ pub trait Machine<'mir, 'tcx>: Sized {
/// Whether memory accesses should be alignment-checked.
fn enforce_alignment(ecx: &InterpCx<'mir, 'tcx, Self>) -> bool;

/// Whether, when checking alignment, we should `force_int` and thus support
/// Whether, when checking alignment, we should look at the actual address and thus support
/// custom alignment logic based on whatever the integer address happens to be.
///
/// Requires Provenance::OFFSET_IS_ADDR to be true.
fn force_int_for_alignment_check(ecx: &InterpCx<'mir, 'tcx, Self>) -> bool;
/// If this returns true, Provenance::OFFSET_IS_ADDR must be true.
fn use_addr_for_alignment_check(ecx: &InterpCx<'mir, 'tcx, Self>) -> bool;

/// Whether to enforce the validity invariant
fn enforce_validity(ecx: &InterpCx<'mir, 'tcx, Self>) -> bool;

/// Whether to enforce integers and floats being initialized.
fn enforce_number_init(ecx: &InterpCx<'mir, 'tcx, Self>) -> bool;

/// Whether function calls should be [ABI](CallAbi)-checked.
fn enforce_abi(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool {
true
Expand Down Expand Up @@ -437,16 +434,11 @@ pub macro compile_time_machine(<$mir: lifetime, $tcx: lifetime>) {
type FrameExtra = ();

#[inline(always)]
fn force_int_for_alignment_check(_ecx: &InterpCx<$mir, $tcx, Self>) -> bool {
// We do not support `force_int`.
fn use_addr_for_alignment_check(_ecx: &InterpCx<$mir, $tcx, Self>) -> bool {
// We do not support `use_addr`.
false
}

#[inline(always)]
fn enforce_number_init(_ecx: &InterpCx<$mir, $tcx, Self>) -> bool {
true
}

#[inline(always)]
fn checked_binop_checks_overflow(_ecx: &InterpCx<$mir, $tcx, Self>) -> bool {
true
Expand Down
32 changes: 9 additions & 23 deletions compiler/rustc_const_eval/src/interpret/memory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ use rustc_target::abi::{Align, HasDataLayout, Size};
use super::{
alloc_range, AllocId, AllocMap, AllocRange, Allocation, CheckInAllocMsg, GlobalAlloc, InterpCx,
InterpResult, Machine, MayLeak, Pointer, PointerArithmetic, Provenance, Scalar,
ScalarMaybeUninit,
};

#[derive(Debug, PartialEq, Copy, Clone)]
Expand Down Expand Up @@ -445,8 +444,8 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// Test align. Check this last; if both bounds and alignment are violated
// we want the error to be about the bounds.
if let Some(align) = align {
if M::force_int_for_alignment_check(self) {
// `force_int_for_alignment_check` can only be true if `OFFSET_IS_ADDR` is true.
if M::use_addr_for_alignment_check(self) {
// `use_addr_for_alignment_check` can only be true if `OFFSET_IS_ADDR` is true.
check_offset_align(ptr.addr().bytes(), align)?;
} else {
// Check allocation alignment and offset alignment.
Expand Down Expand Up @@ -901,11 +900,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> std::fmt::Debug for DumpAllocs<'a,
/// Reading and writing.
impl<'tcx, 'a, Prov: Provenance, Extra> AllocRefMut<'a, 'tcx, Prov, Extra> {
/// `range` is relative to this allocation reference, not the base of the allocation.
pub fn write_scalar(
&mut self,
range: AllocRange,
val: ScalarMaybeUninit<Prov>,
) -> InterpResult<'tcx> {
pub fn write_scalar(&mut self, range: AllocRange, val: Scalar<Prov>) -> InterpResult<'tcx> {
let range = self.range.subrange(range);
debug!("write_scalar at {:?}{range:?}: {val:?}", self.alloc_id);
Ok(self
Expand All @@ -915,11 +910,7 @@ impl<'tcx, 'a, Prov: Provenance, Extra> AllocRefMut<'a, 'tcx, Prov, Extra> {
}

/// `offset` is relative to this allocation reference, not the base of the allocation.
pub fn write_ptr_sized(
&mut self,
offset: Size,
val: ScalarMaybeUninit<Prov>,
) -> InterpResult<'tcx> {
pub fn write_ptr_sized(&mut self, offset: Size, val: Scalar<Prov>) -> InterpResult<'tcx> {
self.write_scalar(alloc_range(offset, self.tcx.data_layout().pointer_size), val)
}

Expand All @@ -938,7 +929,7 @@ impl<'tcx, 'a, Prov: Provenance, Extra> AllocRef<'a, 'tcx, Prov, Extra> {
&self,
range: AllocRange,
read_provenance: bool,
) -> InterpResult<'tcx, ScalarMaybeUninit<Prov>> {
) -> InterpResult<'tcx, Scalar<Prov>> {
let range = self.range.subrange(range);
let res = self
.alloc
Expand All @@ -949,28 +940,23 @@ impl<'tcx, 'a, Prov: Provenance, Extra> AllocRef<'a, 'tcx, Prov, Extra> {
}

/// `range` is relative to this allocation reference, not the base of the allocation.
pub fn read_integer(&self, range: AllocRange) -> InterpResult<'tcx, ScalarMaybeUninit<Prov>> {
pub fn read_integer(&self, range: AllocRange) -> InterpResult<'tcx, Scalar<Prov>> {
self.read_scalar(range, /*read_provenance*/ false)
}

/// `offset` is relative to this allocation reference, not the base of the allocation.
pub fn read_pointer(&self, offset: Size) -> InterpResult<'tcx, ScalarMaybeUninit<Prov>> {
pub fn read_pointer(&self, offset: Size) -> InterpResult<'tcx, Scalar<Prov>> {
self.read_scalar(
alloc_range(offset, self.tcx.data_layout().pointer_size),
/*read_provenance*/ true,
)
}

/// `range` is relative to this allocation reference, not the base of the allocation.
pub fn check_bytes(
&self,
range: AllocRange,
allow_uninit: bool,
allow_ptr: bool,
) -> InterpResult<'tcx> {
pub fn check_bytes(&self, range: AllocRange) -> InterpResult<'tcx> {
Ok(self
.alloc
.check_bytes(&self.tcx, self.range.subrange(range), allow_uninit, allow_ptr)
.check_bytes(&self.tcx, self.range.subrange(range))
.map_err(|e| e.to_interp_error(self.alloc_id))?)
}

Expand Down
Loading