Skip to content

Commit 771e237

Browse files
committed
Move UndefMask and Relocations into allocation.rs
1 parent 69299cd commit 771e237

File tree

2 files changed

+138
-135
lines changed

2 files changed

+138
-135
lines changed

src/librustc/mir/interpret/allocation.rs

Lines changed: 134 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,6 @@
1111
//! The virtual memory representation of the MIR interpreter
1212
1313
use super::{
14-
UndefMask,
15-
Relocations,
1614
EvalResult,
1715
Pointer,
1816
AllocId,
@@ -26,6 +24,10 @@ use super::{
2624
use ty::layout::{self, Size, Align};
2725
use syntax::ast::Mutability;
2826
use rustc_target::abi::HasDataLayout;
27+
use std::iter;
28+
use mir;
29+
use std::ops::{Deref, DerefMut};
30+
use rustc_data_structures::sorted_map::SortedMap;
2931

3032
/// Classifying memory accesses
3133
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
@@ -595,3 +597,133 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
595597
self.check_bounds_ptr(ptr.offset(size, cx)?, access)
596598
}
597599
}
600+
601+
602+
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
603+
pub struct Relocations<Tag=(), Id=AllocId>(SortedMap<Size, (Tag, Id)>);
604+
605+
impl<Tag, Id> Relocations<Tag, Id> {
606+
pub fn new() -> Self {
607+
Relocations(SortedMap::new())
608+
}
609+
610+
// The caller must guarantee that the given relocations are already sorted
611+
// by address and contain no duplicates.
612+
pub fn from_presorted(r: Vec<(Size, (Tag, Id))>) -> Self {
613+
Relocations(SortedMap::from_presorted_elements(r))
614+
}
615+
}
616+
617+
impl<Tag> Deref for Relocations<Tag> {
618+
type Target = SortedMap<Size, (Tag, AllocId)>;
619+
620+
fn deref(&self) -> &Self::Target {
621+
&self.0
622+
}
623+
}
624+
625+
impl<Tag> DerefMut for Relocations<Tag> {
626+
fn deref_mut(&mut self) -> &mut Self::Target {
627+
&mut self.0
628+
}
629+
}
630+
631+
////////////////////////////////////////////////////////////////////////////////
632+
// Undefined byte tracking
633+
////////////////////////////////////////////////////////////////////////////////
634+
635+
type Block = u64;
636+
const BLOCK_SIZE: u64 = 64;
637+
638+
#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
639+
pub struct UndefMask {
640+
blocks: Vec<Block>,
641+
len: Size,
642+
}
643+
644+
impl_stable_hash_for!(struct mir::interpret::UndefMask{blocks, len});
645+
646+
impl UndefMask {
647+
pub fn new(size: Size) -> Self {
648+
let mut m = UndefMask {
649+
blocks: vec![],
650+
len: Size::ZERO,
651+
};
652+
m.grow(size, false);
653+
m
654+
}
655+
656+
/// Check whether the range `start..end` (end-exclusive) is entirely defined.
657+
///
658+
/// Returns `Ok(())` if it's defined. Otherwise returns the index of the byte
659+
/// at which the first undefined access begins.
660+
#[inline]
661+
pub fn is_range_defined(&self, start: Size, end: Size) -> Result<(), Size> {
662+
if end > self.len {
663+
return Err(self.len);
664+
}
665+
666+
let idx = (start.bytes()..end.bytes())
667+
.map(|i| Size::from_bytes(i))
668+
.find(|&i| !self.get(i));
669+
670+
match idx {
671+
Some(idx) => Err(idx),
672+
None => Ok(())
673+
}
674+
}
675+
676+
pub fn set_range(&mut self, start: Size, end: Size, new_state: bool) {
677+
let len = self.len;
678+
if end > len {
679+
self.grow(end - len, new_state);
680+
}
681+
self.set_range_inbounds(start, end, new_state);
682+
}
683+
684+
pub fn set_range_inbounds(&mut self, start: Size, end: Size, new_state: bool) {
685+
for i in start.bytes()..end.bytes() {
686+
self.set(Size::from_bytes(i), new_state);
687+
}
688+
}
689+
690+
#[inline]
691+
pub fn get(&self, i: Size) -> bool {
692+
let (block, bit) = bit_index(i);
693+
(self.blocks[block] & 1 << bit) != 0
694+
}
695+
696+
#[inline]
697+
pub fn set(&mut self, i: Size, new_state: bool) {
698+
let (block, bit) = bit_index(i);
699+
if new_state {
700+
self.blocks[block] |= 1 << bit;
701+
} else {
702+
self.blocks[block] &= !(1 << bit);
703+
}
704+
}
705+
706+
pub fn grow(&mut self, amount: Size, new_state: bool) {
707+
let unused_trailing_bits = self.blocks.len() as u64 * BLOCK_SIZE - self.len.bytes();
708+
if amount.bytes() > unused_trailing_bits {
709+
let additional_blocks = amount.bytes() / BLOCK_SIZE + 1;
710+
assert_eq!(additional_blocks as usize as u64, additional_blocks);
711+
self.blocks.extend(
712+
iter::repeat(0).take(additional_blocks as usize),
713+
);
714+
}
715+
let start = self.len;
716+
self.len += amount;
717+
self.set_range_inbounds(start, start + amount, new_state);
718+
}
719+
}
720+
721+
#[inline]
722+
fn bit_index(bits: Size) -> (usize, usize) {
723+
let bits = bits.bytes();
724+
let a = bits / BLOCK_SIZE;
725+
let b = bits % BLOCK_SIZE;
726+
assert_eq!(a as usize as u64, a);
727+
assert_eq!(b as usize as u64, b);
728+
(a as usize, b as usize)
729+
}

src/librustc/mir/interpret/mod.rs

Lines changed: 4 additions & 133 deletions
Original file line numberDiff line numberDiff line change
@@ -26,20 +26,20 @@ pub use self::error::{
2626

2727
pub use self::value::{Scalar, ConstValue};
2828

29-
pub use self::allocation::{Allocation, MemoryAccess, AllocationExtra};
29+
pub use self::allocation::{
30+
Allocation, MemoryAccess, AllocationExtra,
31+
Relocations, UndefMask,
32+
};
3033

3134
use std::fmt;
3235
use mir;
3336
use hir::def_id::DefId;
3437
use ty::{self, TyCtxt, Instance};
3538
use ty::layout::{self, HasDataLayout, Size};
3639
use middle::region;
37-
use std::iter;
3840
use std::io;
39-
use std::ops::{Deref, DerefMut};
4041
use std::hash::Hash;
4142
use rustc_serialize::{Encoder, Decodable, Encodable};
42-
use rustc_data_structures::sorted_map::SortedMap;
4343
use rustc_data_structures::fx::FxHashMap;
4444
use rustc_data_structures::sync::{Lock as Mutex, HashMapExt};
4545
use rustc_data_structures::tiny_list::TinyList;
@@ -525,35 +525,6 @@ impl<'tcx, M: fmt::Debug + Eq + Hash + Clone> AllocMap<'tcx, M> {
525525
}
526526
}
527527

528-
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
529-
pub struct Relocations<Tag=(), Id=AllocId>(SortedMap<Size, (Tag, Id)>);
530-
531-
impl<Tag, Id> Relocations<Tag, Id> {
532-
pub fn new() -> Self {
533-
Relocations(SortedMap::new())
534-
}
535-
536-
// The caller must guarantee that the given relocations are already sorted
537-
// by address and contain no duplicates.
538-
pub fn from_presorted(r: Vec<(Size, (Tag, Id))>) -> Self {
539-
Relocations(SortedMap::from_presorted_elements(r))
540-
}
541-
}
542-
543-
impl<Tag> Deref for Relocations<Tag> {
544-
type Target = SortedMap<Size, (Tag, AllocId)>;
545-
546-
fn deref(&self) -> &Self::Target {
547-
&self.0
548-
}
549-
}
550-
551-
impl<Tag> DerefMut for Relocations<Tag> {
552-
fn deref_mut(&mut self) -> &mut Self::Target {
553-
&mut self.0
554-
}
555-
}
556-
557528
////////////////////////////////////////////////////////////////////////////////
558529
// Methods to access integers in the target endianness
559530
////////////////////////////////////////////////////////////////////////////////
@@ -597,106 +568,6 @@ pub fn truncate(value: u128, size: Size) -> u128 {
597568
(value << shift) >> shift
598569
}
599570

600-
////////////////////////////////////////////////////////////////////////////////
601-
// Undefined byte tracking
602-
////////////////////////////////////////////////////////////////////////////////
603-
604-
type Block = u64;
605-
const BLOCK_SIZE: u64 = 64;
606-
607-
#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
608-
pub struct UndefMask {
609-
blocks: Vec<Block>,
610-
len: Size,
611-
}
612-
613-
impl_stable_hash_for!(struct mir::interpret::UndefMask{blocks, len});
614-
615-
impl UndefMask {
616-
pub fn new(size: Size) -> Self {
617-
let mut m = UndefMask {
618-
blocks: vec![],
619-
len: Size::ZERO,
620-
};
621-
m.grow(size, false);
622-
m
623-
}
624-
625-
/// Check whether the range `start..end` (end-exclusive) is entirely defined.
626-
///
627-
/// Returns `Ok(())` if it's defined. Otherwise returns the index of the byte
628-
/// at which the first undefined access begins.
629-
#[inline]
630-
pub fn is_range_defined(&self, start: Size, end: Size) -> Result<(), Size> {
631-
if end > self.len {
632-
return Err(self.len);
633-
}
634-
635-
let idx = (start.bytes()..end.bytes())
636-
.map(|i| Size::from_bytes(i))
637-
.find(|&i| !self.get(i));
638-
639-
match idx {
640-
Some(idx) => Err(idx),
641-
None => Ok(())
642-
}
643-
}
644-
645-
pub fn set_range(&mut self, start: Size, end: Size, new_state: bool) {
646-
let len = self.len;
647-
if end > len {
648-
self.grow(end - len, new_state);
649-
}
650-
self.set_range_inbounds(start, end, new_state);
651-
}
652-
653-
pub fn set_range_inbounds(&mut self, start: Size, end: Size, new_state: bool) {
654-
for i in start.bytes()..end.bytes() {
655-
self.set(Size::from_bytes(i), new_state);
656-
}
657-
}
658-
659-
#[inline]
660-
pub fn get(&self, i: Size) -> bool {
661-
let (block, bit) = bit_index(i);
662-
(self.blocks[block] & 1 << bit) != 0
663-
}
664-
665-
#[inline]
666-
pub fn set(&mut self, i: Size, new_state: bool) {
667-
let (block, bit) = bit_index(i);
668-
if new_state {
669-
self.blocks[block] |= 1 << bit;
670-
} else {
671-
self.blocks[block] &= !(1 << bit);
672-
}
673-
}
674-
675-
pub fn grow(&mut self, amount: Size, new_state: bool) {
676-
let unused_trailing_bits = self.blocks.len() as u64 * BLOCK_SIZE - self.len.bytes();
677-
if amount.bytes() > unused_trailing_bits {
678-
let additional_blocks = amount.bytes() / BLOCK_SIZE + 1;
679-
assert_eq!(additional_blocks as usize as u64, additional_blocks);
680-
self.blocks.extend(
681-
iter::repeat(0).take(additional_blocks as usize),
682-
);
683-
}
684-
let start = self.len;
685-
self.len += amount;
686-
self.set_range_inbounds(start, start + amount, new_state);
687-
}
688-
}
689-
690-
#[inline]
691-
fn bit_index(bits: Size) -> (usize, usize) {
692-
let bits = bits.bytes();
693-
let a = bits / BLOCK_SIZE;
694-
let b = bits % BLOCK_SIZE;
695-
assert_eq!(a as usize as u64, a);
696-
assert_eq!(b as usize as u64, b);
697-
(a as usize, b as usize)
698-
}
699-
700571
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash)]
701572
pub enum ScalarMaybeUndef<Tag=(), Id=AllocId> {
702573
Scalar(Scalar<Tag, Id>),

0 commit comments

Comments
 (0)