Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove unchecked_shr/shl from intrinsics #451

Merged
merged 4 commits into from
Sep 15, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion rust-toolchain
Original file line number Diff line number Diff line change
@@ -1 +1 @@
nightly-2018-09-01
nightly-2018-09-15
102 changes: 0 additions & 102 deletions src/intrinsic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,39 +33,6 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for EvalContext<'a, 'mir, 'tcx, super:

let intrinsic_name = &self.tcx.item_name(instance.def_id()).as_str()[..];
match intrinsic_name {
"add_with_overflow" => {
let l = self.read_value(args[0])?;
let r = self.read_value(args[1])?;
self.binop_with_overflow(
mir::BinOp::Add,
l,
r,
dest,
)?
}

"sub_with_overflow" => {
let l = self.read_value(args[0])?;
let r = self.read_value(args[1])?;
self.binop_with_overflow(
mir::BinOp::Sub,
l,
r,
dest,
)?
}

"mul_with_overflow" => {
let l = self.read_value(args[0])?;
let r = self.read_value(args[1])?;
self.binop_with_overflow(
mir::BinOp::Mul,
l,
r,
dest,
)?
}

"arith_offset" => {
let offset = self.read_scalar(args[1])?.to_isize(&self)?;
let ptr = self.read_scalar(args[0])?.not_undef()?;
Expand Down Expand Up @@ -326,39 +293,6 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for EvalContext<'a, 'mir, 'tcx, super:
self.write_scalar(result_ptr, dest)?;
}

"overflowing_sub" => {
let l = self.read_value(args[0])?;
let r = self.read_value(args[1])?;
self.binop_ignore_overflow(
mir::BinOp::Sub,
l,
r,
dest,
)?;
}

"overflowing_mul" => {
let l = self.read_value(args[0])?;
let r = self.read_value(args[1])?;
self.binop_ignore_overflow(
mir::BinOp::Mul,
r,
l,
dest,
)?;
}

"overflowing_add" => {
let l = self.read_value(args[0])?;
let r = self.read_value(args[1])?;
self.binop_ignore_overflow(
mir::BinOp::Add,
r,
l,
dest,
)?;
}

"powf32" => {
let f = self.read_scalar(args[0])?.to_f32()?;
let f2 = self.read_scalar(args[1])?.to_f32()?;
Expand Down Expand Up @@ -443,42 +377,6 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for EvalContext<'a, 'mir, 'tcx, super:
self.write_value(value, dest)?;
}

"unchecked_shl" => {
let bits = dest.layout.size.bytes() as u128 * 8;
let l = self.read_value(args[0])?;
let r = self.read_value(args[1])?;
let rval = r.to_scalar()?.to_bytes()?;
if rval >= bits {
return err!(Intrinsic(
format!("Overflowing shift by {} in unchecked_shl", rval),
));
}
self.binop_ignore_overflow(
mir::BinOp::Shl,
l,
r,
dest,
)?;
}

"unchecked_shr" => {
let bits = dest.layout.size.bytes() as u128 * 8;
let l = self.read_value(args[0])?;
let r = self.read_value(args[1])?;
let rval = r.to_scalar()?.to_bytes()?;
if rval >= bits {
return err!(Intrinsic(
format!("Overflowing shift by {} in unchecked_shr", rval),
));
}
self.binop_ignore_overflow(
mir::BinOp::Shr,
l,
r,
dest,
)?;
}

"unchecked_div" => {
let l = self.read_value(args[0])?;
let r = self.read_value(args[1])?;
Expand Down
76 changes: 5 additions & 71 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,11 @@ use rustc::ty::layout::{TyLayout, LayoutOf, Size};
use rustc::hir::def_id::DefId;
use rustc::mir;

use rustc_data_structures::fx::FxHasher;

use syntax::ast::Mutability;
use syntax::attr;

use std::marker::PhantomData;
use std::collections::{HashMap, BTreeMap};
use std::hash::{Hash, Hasher};
use std::collections::HashMap;

pub use rustc::mir::interpret::*;
pub use rustc_mir::interpret::*;
Expand All @@ -43,7 +40,7 @@ use fn_call::EvalContextExt as MissingFnsEvalContextExt;
use operator::EvalContextExt as OperatorEvalContextExt;
use intrinsic::EvalContextExt as IntrinsicEvalContextExt;
use tls::EvalContextExt as TlsEvalContextExt;
use memory::MemoryKind as MiriMemoryKind;
use memory::{MemoryKind as MiriMemoryKind, TlsKey, TlsEntry, MemoryData};
use locks::LockInfo;
use range_map::RangeMap;
use helpers::FalibleScalarExt;
Expand All @@ -63,7 +60,7 @@ pub fn create_ecx<'a, 'mir: 'a, 'tcx: 'mir>(
let main_instance = ty::Instance::mono(ecx.tcx.tcx, main_id);
let main_mir = ecx.load_mir(main_instance.def)?;

if !main_mir.return_ty().is_nil() || main_mir.arg_count != 0 {
if !main_mir.return_ty().is_unit() || main_mir.arg_count != 0 {
return err!(Unimplemented(
"miri does not support main functions without `fn()` type signatures"
.to_owned(),
Expand Down Expand Up @@ -214,75 +211,12 @@ pub struct Evaluator<'tcx> {
_dummy : PhantomData<&'tcx ()>,
}

impl<'tcx> Hash for Evaluator<'tcx> {
fn hash<H: Hasher>(&self, state: &mut H) {
let Evaluator {
env_vars,
_dummy: _,
} = self;

env_vars.iter()
.map(|(env, ptr)| {
let mut h = FxHasher::default();
env.hash(&mut h);
ptr.hash(&mut h);
h.finish()
})
.fold(0u64, |acc, hash| acc.wrapping_add(hash))
.hash(state);
}
}

pub type TlsKey = u128;

#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
pub struct TlsEntry<'tcx> {
data: Scalar, // Will eventually become a map from thread IDs to `Scalar`s, if we ever support more than one thread.
dtor: Option<ty::Instance<'tcx>>,
}

#[derive(Clone, PartialEq, Eq)]
pub struct MemoryData<'tcx> {
/// The Key to use for the next thread-local allocation.
next_thread_local: TlsKey,

/// pthreads-style thread-local storage.
thread_local: BTreeMap<TlsKey, TlsEntry<'tcx>>,

/// Memory regions that are locked by some function
///
/// Only mutable (static mut, heap, stack) allocations have an entry in this map.
/// The entry is created when allocating the memory and deleted after deallocation.
locks: HashMap<AllocId, RangeMap<LockInfo<'tcx>>>,
}

impl<'tcx> MemoryData<'tcx> {
fn new() -> Self {
MemoryData {
next_thread_local: 1, // start with 1 as we must not use 0 on Windows
thread_local: BTreeMap::new(),
locks: HashMap::new(),
}
}
}

impl<'tcx> Hash for MemoryData<'tcx> {
fn hash<H: Hasher>(&self, state: &mut H) {
let MemoryData {
next_thread_local: _,
thread_local,
locks: _,
} = self;

thread_local.hash(state);
}
}

impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'tcx> {
type MemoryData = MemoryData<'tcx>;
type MemoryData = memory::MemoryData<'tcx>;
type MemoryKinds = memory::MemoryKind;

const MUT_STATIC_KIND: Option<memory::MemoryKind> = Some(memory::MemoryKind::MutStatic);
const DETECT_LOOPS: bool = false;

/// Returns Ok() when the function was handled, fail otherwise
fn find_fn<'a>(
Expand Down
42 changes: 41 additions & 1 deletion src/memory.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,44 @@
#[derive(Debug, PartialEq, Copy, Clone, Hash, Eq)]
use std::collections::{HashMap, BTreeMap};

use rustc::ty;

use super::{AllocId, Scalar, LockInfo, RangeMap};

pub type TlsKey = u128;

#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct TlsEntry<'tcx> {
pub(crate) data: Scalar, // Will eventually become a map from thread IDs to `Scalar`s, if we ever support more than one thread.
pub(crate) dtor: Option<ty::Instance<'tcx>>,
}

#[derive(Debug, Clone, PartialEq, Eq)]
pub struct MemoryData<'tcx> {
/// The Key to use for the next thread-local allocation.
pub(crate) next_thread_local: TlsKey,

/// pthreads-style thread-local storage.
pub(crate) thread_local: BTreeMap<TlsKey, TlsEntry<'tcx>>,

/// Memory regions that are locked by some function
///
/// Only mutable (static mut, heap, stack) allocations have an entry in this map.
/// The entry is created when allocating the memory and deleted after deallocation.
pub(crate) locks: HashMap<AllocId, RangeMap<LockInfo<'tcx>>>,
}

impl<'tcx> MemoryData<'tcx> {
pub(crate) fn new() -> Self {
MemoryData {
next_thread_local: 1, // start with 1 as we must not use 0 on Windows
thread_local: BTreeMap::new(),
locks: HashMap::new(),
}
}
}


#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum MemoryKind {
/// `__rust_alloc` memory
Rust,
Expand Down