Skip to content

Commit 62046bf

Browse files
committed
Auto merge of #1814 - RalfJung:rustup, r=RalfJung
avoid unnecessary RefCell calls Blocked on rust-lang/rust#85599
2 parents cf7e4b9 + a03f700 commit 62046bf

File tree

8 files changed

+102
-69
lines changed

8 files changed

+102
-69
lines changed

rust-version

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
6e92fb409816c65cd0a78a1fbcc71e2fbabdf50a
1+
0f8cd43ee8c3614e04b5c624dd8a45758d7023da

src/data_race.rs

Lines changed: 28 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -598,7 +598,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
598598
// of the time, based on `rate`.
599599
let rate = this.memory.extra.cmpxchg_weak_failure_rate;
600600
let cmpxchg_success = eq.to_bool()?
601-
&& (!can_fail_spuriously || this.memory.extra.rng.borrow_mut().gen::<f64>() < rate);
601+
&& (!can_fail_spuriously || this.memory.extra.rng.get_mut().gen::<f64>() < rate);
602602
let res = Immediate::ScalarPair(
603603
old.to_scalar_or_uninit(),
604604
Scalar::from_bool(cmpxchg_success).into(),
@@ -647,7 +647,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
647647
place: &MPlaceTy<'tcx, Tag>,
648648
atomic: AtomicWriteOp,
649649
) -> InterpResult<'tcx> {
650-
let this = self.eval_context_ref();
650+
let this = self.eval_context_mut();
651651
this.validate_atomic_op(
652652
place,
653653
atomic,
@@ -672,7 +672,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
672672
use AtomicRwOp::*;
673673
let acquire = matches!(atomic, Acquire | AcqRel | SeqCst);
674674
let release = matches!(atomic, Release | AcqRel | SeqCst);
675-
let this = self.eval_context_ref();
675+
let this = self.eval_context_mut();
676676
this.validate_atomic_op(place, atomic, "Atomic RMW", move |memory, clocks, index, _| {
677677
if acquire {
678678
memory.load_acquire(clocks, index)?;
@@ -690,7 +690,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
690690
/// Update the data-race detector for an atomic fence on the current thread.
691691
fn validate_atomic_fence(&mut self, atomic: AtomicFenceOp) -> InterpResult<'tcx> {
692692
let this = self.eval_context_mut();
693-
if let Some(data_race) = &this.memory.extra.data_race {
693+
if let Some(data_race) = &mut this.memory.extra.data_race {
694694
data_race.maybe_perform_sync_operation(move |index, mut clocks| {
695695
log::trace!("Atomic fence on {:?} with ordering {:?}", index, atomic);
696696

@@ -771,7 +771,7 @@ impl VClockAlloc {
771771
}
772772

773773
fn reset_clocks(&mut self, offset: Size, len: Size) {
774-
let mut alloc_ranges = self.alloc_ranges.borrow_mut();
774+
let alloc_ranges = self.alloc_ranges.get_mut();
775775
for (_, range) in alloc_ranges.iter_mut(offset, len) {
776776
// Reset the portion of the range
777777
*range = MemoryCellClocks::new(0, VectorIdx::MAX_INDEX);
@@ -1025,6 +1025,7 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
10251025
if let Some(data_race) = &this.memory.extra.data_race {
10261026
if data_race.multi_threaded.get() {
10271027
// Load and log the atomic operation.
1028+
// Note that atomic loads are possible even from read-only allocations, so `get_alloc_extra_mut` is not an option.
10281029
let place_ptr = place.ptr.assert_ptr();
10291030
let size = place.layout.size;
10301031
let alloc_meta =
@@ -1105,6 +1106,7 @@ struct ThreadExtraState {
11051106
/// Global data-race detection state, contains the currently
11061107
/// executing thread as well as the vector-clocks associated
11071108
/// with each of the threads.
1109+
// FIXME: it is probably better to have one large RefCell, than to have so many small ones.
11081110
#[derive(Debug, Clone)]
11091111
pub struct GlobalState {
11101112
/// Set to true once the first additional
@@ -1158,7 +1160,7 @@ impl GlobalState {
11581160
/// Create a new global state, setup with just thread-id=0
11591161
/// advanced to timestamp = 1.
11601162
pub fn new() -> Self {
1161-
let global_state = GlobalState {
1163+
let mut global_state = GlobalState {
11621164
multi_threaded: Cell::new(false),
11631165
vector_clocks: RefCell::new(IndexVec::new()),
11641166
vector_info: RefCell::new(IndexVec::new()),
@@ -1172,9 +1174,9 @@ impl GlobalState {
11721174
// Setup the main-thread since it is not explicitly created:
11731175
// uses vector index and thread-id 0, also the rust runtime gives
11741176
// the main-thread a name of "main".
1175-
let index = global_state.vector_clocks.borrow_mut().push(ThreadClockSet::default());
1176-
global_state.vector_info.borrow_mut().push(ThreadId::new(0));
1177-
global_state.thread_info.borrow_mut().push(ThreadExtraState {
1177+
let index = global_state.vector_clocks.get_mut().push(ThreadClockSet::default());
1178+
global_state.vector_info.get_mut().push(ThreadId::new(0));
1179+
global_state.thread_info.get_mut().push(ThreadExtraState {
11781180
vector_index: Some(index),
11791181
thread_name: Some("main".to_string().into_boxed_str()),
11801182
termination_vector_clock: None,
@@ -1221,7 +1223,7 @@ impl GlobalState {
12211223
// Hook for thread creation, enabled multi-threaded execution and marks
12221224
// the current thread timestamp as happening-before the current thread.
12231225
#[inline]
1224-
pub fn thread_created(&self, thread: ThreadId) {
1226+
pub fn thread_created(&mut self, thread: ThreadId) {
12251227
let current_index = self.current_index();
12261228

12271229
// Increment the number of active threads.
@@ -1241,12 +1243,12 @@ impl GlobalState {
12411243
let created_index = if let Some(reuse_index) = self.find_vector_index_reuse_candidate() {
12421244
// Now re-configure the re-use candidate, increment the clock
12431245
// for the new sync use of the vector.
1244-
let mut vector_clocks = self.vector_clocks.borrow_mut();
1246+
let vector_clocks = self.vector_clocks.get_mut();
12451247
vector_clocks[reuse_index].increment_clock(reuse_index);
12461248

12471249
// Locate the old thread the vector was associated with and update
12481250
// it to represent the new thread instead.
1249-
let mut vector_info = self.vector_info.borrow_mut();
1251+
let vector_info = self.vector_info.get_mut();
12501252
let old_thread = vector_info[reuse_index];
12511253
vector_info[reuse_index] = thread;
12521254

@@ -1258,7 +1260,7 @@ impl GlobalState {
12581260
} else {
12591261
// No vector re-use candidates available, instead create
12601262
// a new vector index.
1261-
let mut vector_info = self.vector_info.borrow_mut();
1263+
let vector_info = self.vector_info.get_mut();
12621264
vector_info.push(thread)
12631265
};
12641266

@@ -1268,7 +1270,7 @@ impl GlobalState {
12681270
thread_info[thread].vector_index = Some(created_index);
12691271

12701272
// Create a thread clock set if applicable.
1271-
let mut vector_clocks = self.vector_clocks.borrow_mut();
1273+
let vector_clocks = self.vector_clocks.get_mut();
12721274
if created_index == vector_clocks.next_index() {
12731275
vector_clocks.push(ThreadClockSet::default());
12741276
}
@@ -1289,9 +1291,9 @@ impl GlobalState {
12891291
/// Hook on a thread join to update the implicit happens-before relation
12901292
/// between the joined thread and the current thread.
12911293
#[inline]
1292-
pub fn thread_joined(&self, current_thread: ThreadId, join_thread: ThreadId) {
1293-
let mut clocks_vec = self.vector_clocks.borrow_mut();
1294-
let thread_info = self.thread_info.borrow();
1294+
pub fn thread_joined(&mut self, current_thread: ThreadId, join_thread: ThreadId) {
1295+
let clocks_vec = self.vector_clocks.get_mut();
1296+
let thread_info = self.thread_info.get_mut();
12951297

12961298
// Load the vector clock of the current thread.
12971299
let current_index = thread_info[current_thread]
@@ -1329,9 +1331,9 @@ impl GlobalState {
13291331

13301332
// If the thread is marked as terminated but not joined
13311333
// then move the thread to the re-use set.
1332-
let mut termination = self.terminated_threads.borrow_mut();
1334+
let termination = self.terminated_threads.get_mut();
13331335
if let Some(index) = termination.remove(&join_thread) {
1334-
let mut reuse = self.reuse_candidates.borrow_mut();
1336+
let reuse = self.reuse_candidates.get_mut();
13351337
reuse.insert(index);
13361338
}
13371339
}
@@ -1344,28 +1346,28 @@ impl GlobalState {
13441346
/// This should be called strictly before any calls to
13451347
/// `thread_joined`.
13461348
#[inline]
1347-
pub fn thread_terminated(&self) {
1349+
pub fn thread_terminated(&mut self) {
13481350
let current_index = self.current_index();
13491351

13501352
// Increment the clock to a unique termination timestamp.
1351-
let mut vector_clocks = self.vector_clocks.borrow_mut();
1353+
let vector_clocks = self.vector_clocks.get_mut();
13521354
let current_clocks = &mut vector_clocks[current_index];
13531355
current_clocks.increment_clock(current_index);
13541356

13551357
// Load the current thread id for the executing vector.
1356-
let vector_info = self.vector_info.borrow();
1358+
let vector_info = self.vector_info.get_mut();
13571359
let current_thread = vector_info[current_index];
13581360

13591361
// Load the current thread metadata, and move to a terminated
13601362
// vector state. Setting up the vector clock all join operations
13611363
// will use.
1362-
let mut thread_info = self.thread_info.borrow_mut();
1364+
let thread_info = self.thread_info.get_mut();
13631365
let current = &mut thread_info[current_thread];
13641366
current.termination_vector_clock = Some(current_clocks.clock.clone());
13651367

13661368
// Add this thread as a candidate for re-use after a thread join
13671369
// occurs.
1368-
let mut termination = self.terminated_threads.borrow_mut();
1370+
let termination = self.terminated_threads.get_mut();
13691371
termination.insert(current_thread, current_index);
13701372

13711373
// Reduce the number of active threads, now that a thread has
@@ -1392,9 +1394,9 @@ impl GlobalState {
13921394
/// the thread name is used for improved diagnostics
13931395
/// during a data-race.
13941396
#[inline]
1395-
pub fn thread_set_name(&self, thread: ThreadId, name: String) {
1397+
pub fn thread_set_name(&mut self, thread: ThreadId, name: String) {
13961398
let name = name.into_boxed_str();
1397-
let mut thread_info = self.thread_info.borrow_mut();
1399+
let thread_info = self.thread_info.get_mut();
13981400
thread_info[thread].thread_name = Some(name);
13991401
}
14001402

src/shims/backtrace.rs

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -119,8 +119,10 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
119119
// `lo.col` is 0-based - add 1 to make it 1-based for the caller.
120120
let colno: u32 = lo.col.0 as u32 + 1;
121121

122-
let name_alloc = this.allocate_str(&name, MiriMemoryKind::Rust.into());
123-
let filename_alloc = this.allocate_str(&filename, MiriMemoryKind::Rust.into());
122+
// These are "mutable" allocations as we consider them to be owned by the callee.
123+
let name_alloc = this.allocate_str(&name, MiriMemoryKind::Rust.into(), Mutability::Mut);
124+
let filename_alloc =
125+
this.allocate_str(&filename, MiriMemoryKind::Rust.into(), Mutability::Mut);
124126
let lineno_alloc = Scalar::from_u32(lineno);
125127
let colno_alloc = Scalar::from_u32(colno);
126128

src/shims/panic.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
1414
use log::trace;
1515

16+
use rustc_ast::Mutability;
1617
use rustc_middle::{mir, ty};
1718
use rustc_target::spec::abi::Abi;
1819
use rustc_target::spec::PanicStrategy;
@@ -169,7 +170,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
169170
let this = self.eval_context_mut();
170171

171172
// First arg: message.
172-
let msg = this.allocate_str(msg, MiriMemoryKind::Machine.into());
173+
let msg = this.allocate_str(msg, MiriMemoryKind::Machine.into(), Mutability::Not);
173174

174175
// Call the lang item.
175176
let panic = this.tcx.lang_items().panic_fn().unwrap();

src/shims/posix/sync.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ fn mutexattr_set_kind<'mir, 'tcx: 'mir>(
5858
// (the kind has to be at its offset for compatibility with static initializer macros)
5959

6060
fn mutex_get_kind<'mir, 'tcx: 'mir>(
61-
ecx: &mut MiriEvalContext<'mir, 'tcx>,
61+
ecx: &MiriEvalContext<'mir, 'tcx>,
6262
mutex_op: &OpTy<'tcx, Tag>,
6363
) -> InterpResult<'tcx, ScalarMaybeUninit<Tag>> {
6464
let offset = if ecx.pointer_size().bytes() == 8 { 16 } else { 12 };

src/stacked_borrows.rs

Lines changed: 53 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -457,14 +457,29 @@ impl<'tcx> Stacks {
457457
&self,
458458
ptr: Pointer<Tag>,
459459
size: Size,
460-
global: &GlobalState,
461-
f: impl Fn(Pointer<Tag>, &mut Stack, &GlobalState) -> InterpResult<'tcx>,
460+
f: impl Fn(Pointer<Tag>, &mut Stack) -> InterpResult<'tcx>,
462461
) -> InterpResult<'tcx> {
463462
let mut stacks = self.stacks.borrow_mut();
464463
for (offset, stack) in stacks.iter_mut(ptr.offset, size) {
465464
let mut cur_ptr = ptr;
466465
cur_ptr.offset = offset;
467-
f(cur_ptr, stack, &*global)?;
466+
f(cur_ptr, stack)?;
467+
}
468+
Ok(())
469+
}
470+
471+
/// Call `f` on every stack in the range.
472+
fn for_each_mut(
473+
&mut self,
474+
ptr: Pointer<Tag>,
475+
size: Size,
476+
f: impl Fn(Pointer<Tag>, &mut Stack) -> InterpResult<'tcx>,
477+
) -> InterpResult<'tcx> {
478+
let stacks = self.stacks.get_mut();
479+
for (offset, stack) in stacks.iter_mut(ptr.offset, size) {
480+
let mut cur_ptr = ptr;
481+
cur_ptr.offset = offset;
482+
f(cur_ptr, stack)?;
468483
}
469484
Ok(())
470485
}
@@ -516,9 +531,8 @@ impl Stacks {
516531
extra: &MemoryExtra,
517532
) -> InterpResult<'tcx> {
518533
trace!("read access with tag {:?}: {:?}, size {}", ptr.tag, ptr.erase_tag(), size.bytes());
519-
self.for_each(ptr, size, &*extra.borrow(), |ptr, stack, global| {
520-
stack.access(AccessKind::Read, ptr, global)
521-
})
534+
let global = &*extra.borrow();
535+
self.for_each(ptr, size, move |ptr, stack| stack.access(AccessKind::Read, ptr, global))
522536
}
523537

524538
#[inline(always)]
@@ -529,9 +543,8 @@ impl Stacks {
529543
extra: &mut MemoryExtra,
530544
) -> InterpResult<'tcx> {
531545
trace!("write access with tag {:?}: {:?}, size {}", ptr.tag, ptr.erase_tag(), size.bytes());
532-
self.for_each(ptr, size, extra.get_mut(), |ptr, stack, global| {
533-
stack.access(AccessKind::Write, ptr, global)
534-
})
546+
let global = extra.get_mut();
547+
self.for_each_mut(ptr, size, move |ptr, stack| stack.access(AccessKind::Write, ptr, global))
535548
}
536549

537550
#[inline(always)]
@@ -542,7 +555,8 @@ impl Stacks {
542555
extra: &mut MemoryExtra,
543556
) -> InterpResult<'tcx> {
544557
trace!("deallocation with tag {:?}: {:?}, size {}", ptr.tag, ptr.erase_tag(), size.bytes());
545-
self.for_each(ptr, size, extra.get_mut(), |ptr, stack, global| stack.dealloc(ptr, global))
558+
let global = extra.get_mut();
559+
self.for_each_mut(ptr, size, move |ptr, stack| stack.dealloc(ptr, global))
546560
}
547561
}
548562

@@ -558,6 +572,18 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
558572
new_tag: Tag,
559573
protect: bool,
560574
) -> InterpResult<'tcx> {
575+
// Nothing to do for ZSTs.
576+
if size == Size::ZERO {
577+
trace!(
578+
"reborrow of size 0: {} reference {:?} derived from {:?} (pointee {})",
579+
kind,
580+
new_tag,
581+
place.ptr,
582+
place.layout.ty,
583+
);
584+
return Ok(());
585+
}
586+
561587
let this = self.eval_context_mut();
562588
let protector = if protect { Some(this.frame().extra.call_id) } else { None };
563589
let ptr = place.ptr.assert_ptr();
@@ -571,12 +597,6 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
571597
size.bytes()
572598
);
573599

574-
// Get the allocation. We need both the allocation and the MemoryExtra, so we cannot use `&mut`.
575-
// FIXME: make `get_alloc_extra_mut` also return `&mut MemoryExtra`.
576-
let extra = this.memory.get_alloc_extra(ptr.alloc_id)?;
577-
let stacked_borrows =
578-
extra.stacked_borrows.as_ref().expect("we should have Stacked Borrows data");
579-
let global = this.memory.extra.stacked_borrows.as_ref().unwrap().borrow();
580600
// Update the stacks.
581601
// Make sure that raw pointers and mutable shared references are reborrowed "weak":
582602
// There could be existing unique pointers reborrowed from them that should remain valid!
@@ -588,6 +608,12 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
588608
// Shared references and *const are a whole different kind of game, the
589609
// permission is not uniform across the entire range!
590610
// We need a frozen-sensitive reborrow.
611+
// We have to use shared references to alloc/memory_extra here since
612+
// `visit_freeze_sensitive` needs to access the global state.
613+
let extra = this.memory.get_alloc_extra(ptr.alloc_id)?;
614+
let stacked_borrows =
615+
extra.stacked_borrows.as_ref().expect("we should have Stacked Borrows data");
616+
let global = this.memory.extra.stacked_borrows.as_ref().unwrap().borrow();
591617
return this.visit_freeze_sensitive(place, size, |cur_ptr, size, frozen| {
592618
// We are only ever `SharedReadOnly` inside the frozen bits.
593619
let perm = if frozen {
@@ -596,15 +622,21 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
596622
Permission::SharedReadWrite
597623
};
598624
let item = Item { perm, tag: new_tag, protector };
599-
stacked_borrows.for_each(cur_ptr, size, &*global, |cur_ptr, stack, global| {
600-
stack.grant(cur_ptr, item, global)
625+
stacked_borrows.for_each(cur_ptr, size, |cur_ptr, stack| {
626+
stack.grant(cur_ptr, item, &*global)
601627
})
602628
});
603629
}
604630
};
631+
// Here we can avoid `borrow()` calls because we have mutable references.
632+
// Note that this asserts that the allocation is mutable -- but since we are creating a
633+
// mutable pointer, that seems reasonable.
634+
let (alloc_extra, memory_extra) = this.memory.get_alloc_extra_mut(ptr.alloc_id)?;
635+
let stacked_borrows =
636+
alloc_extra.stacked_borrows.as_mut().expect("we should have Stacked Borrows data");
637+
let global = memory_extra.stacked_borrows.as_mut().unwrap().get_mut();
605638
let item = Item { perm, tag: new_tag, protector };
606-
stacked_borrows
607-
.for_each(ptr, size, &*global, |ptr, stack, global| stack.grant(ptr, item, global))
639+
stacked_borrows.for_each_mut(ptr, size, |ptr, stack| stack.grant(ptr, item, global))
608640
}
609641

610642
/// Retags an indidual pointer, returning the retagged version.
@@ -631,16 +663,10 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
631663
// We can see dangling ptrs in here e.g. after a Box's `Unique` was
632664
// updated using "self.0 = ..." (can happen in Box::from_raw) so we cannot ICE; see miri#1050.
633665
let place = this.mplace_access_checked(place, Some(Align::from_bytes(1).unwrap()))?;
634-
// Nothing to do for ZSTs. We use `is_bits` here because we *do* need to retag even ZSTs
635-
// when there actually is a tag (to avoid inheriting a tag that would let us access more
636-
// than 0 bytes).
637-
if size == Size::ZERO && place.ptr.is_bits() {
638-
return Ok(*val);
639-
}
640666

641667
// Compute new borrow.
642668
let new_tag = {
643-
let mut mem_extra = this.memory.extra.stacked_borrows.as_ref().unwrap().borrow_mut();
669+
let mem_extra = this.memory.extra.stacked_borrows.as_mut().unwrap().get_mut();
644670
match kind {
645671
// Give up tracking for raw pointers.
646672
RefKind::Raw { .. } if !mem_extra.track_raw => Tag::Untagged,

0 commit comments

Comments
 (0)