Skip to content

Commit 5e3172d

Browse files
committed
---
yaml --- r: 229059 b: refs/heads/try c: 22e2100 h: refs/heads/master i: 229057: eb17999 229055: 845969a v: v3
1 parent 5716f43 commit 5e3172d

File tree

2 files changed

+27
-5
lines changed

2 files changed

+27
-5
lines changed

[refs]

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
---
22
refs/heads/master: aca2057ed5fb7af3f8905b2bc01f72fa001c35c8
33
refs/heads/snap-stage3: 1af31d4974e33027a68126fa5a5a3c2c6491824f
4-
refs/heads/try: 1ffe3453cb2bd4cc031b4f8a4bdb88279e01e094
4+
refs/heads/try: 22e21004582902cc1b7d1bef89d09728cbe64ca2
55
refs/tags/release-0.1: 1f5c5126e96c79d22cb7862f75304136e204f105
66
refs/tags/release-0.2: c870d2dffb391e14efb05aa27898f1f6333a9596
77
refs/tags/release-0.3: b5f0d0f648d9a6153664837026ba1be43d3e2503

branches/try/src/liballoc/arc.rs

Lines changed: 26 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -78,16 +78,18 @@ use core::atomic::Ordering::{Relaxed, Release, Acquire, SeqCst};
7878
use core::fmt;
7979
use core::cmp::Ordering;
8080
use core::mem::{align_of_val, size_of_val};
81-
use core::intrinsics::drop_in_place;
81+
use core::intrinsics::{drop_in_place, abort};
8282
use core::mem;
8383
use core::nonzero::NonZero;
8484
use core::ops::{Deref, CoerceUnsized};
8585
use core::ptr;
8686
use core::marker::Unsize;
8787
use core::hash::{Hash, Hasher};
88-
use core::usize;
88+
use core::{usize, isize};
8989
use heap::deallocate;
9090

91+
const MAX_REFCOUNT: usize = (isize::MAX) as usize;
92+
9193
/// An atomically reference counted wrapper for shared state.
9294
///
9395
/// # Examples
@@ -311,7 +313,21 @@ impl<T: ?Sized> Clone for Arc<T> {
311313
// another must already provide any required synchronization.
312314
//
313315
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
314-
self.inner().strong.fetch_add(1, Relaxed);
316+
let old_size = self.inner().strong.fetch_add(1, Relaxed);
317+
318+
// However we need to guard against massive refcounts in case someone
319+
// is `mem::forget`ing Arcs. If we don't do this the count can overflow
320+
// and users will use-after free. We racily saturate to `isize::MAX` on
321+
// the assumption that there aren't ~2 billion threads incrementing
322+
// the reference count at once. This branch will never be taken in
323+
// any realistic program.
324+
//
325+
// We abort because such a program is incredibly degenerate, and we
326+
// don't care to support it.
327+
if old_size > MAX_REFCOUNT {
328+
unsafe { abort(); }
329+
}
330+
315331
Arc { _ptr: self._ptr }
316332
}
317333
}
@@ -612,7 +628,13 @@ impl<T: ?Sized> Clone for Weak<T> {
612628
// fetch_add (ignoring the lock) because the weak count is only locked
613629
// where are *no other* weak pointers in existence. (So we can't be
614630
// running this code in that case).
615-
self.inner().weak.fetch_add(1, Relaxed);
631+
let old_size = self.inner().weak.fetch_add(1, Relaxed);
632+
633+
// See comments in Arc::clone() for why we do this (for mem::forget).
634+
if old_size > MAX_REFCOUNT {
635+
unsafe { abort(); }
636+
}
637+
616638
return Weak { _ptr: self._ptr }
617639
}
618640
}

0 commit comments

Comments
 (0)