Skip to content

Commit bf8c8b4

Browse files
committed
---
yaml --- r: 120541 b: refs/heads/dist-snap c: fe93c3d h: refs/heads/master i: 120539: 8d57a69 v: v3
1 parent 89a6e46 commit bf8c8b4

File tree

2 files changed

+27
-26
lines changed

2 files changed

+27
-26
lines changed

[refs]

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ refs/heads/try: 1813e5aa1a03b0596b8de7abd1af31edf5d6098f
66
refs/tags/release-0.1: 1f5c5126e96c79d22cb7862f75304136e204f105
77
refs/heads/ndm: f3868061cd7988080c30d6d5bf352a5a5fe2460b
88
refs/heads/try2: 147ecfdd8221e4a4d4e090486829a06da1e0ca3c
9-
refs/heads/dist-snap: 2966e970cabdf7103ad61c840c72bf58352150e0
9+
refs/heads/dist-snap: fe93c3d47ed0cdf0a0cbac66a9f35ddb4c6783a2
1010
refs/tags/release-0.2: c870d2dffb391e14efb05aa27898f1f6333a9596
1111
refs/tags/release-0.3: b5f0d0f648d9a6153664837026ba1be43d3e2503
1212
refs/heads/try3: 9387340aab40a73e8424c48fd42f0c521a4875c0

branches/dist-snap/src/libstd/sync/spsc_queue.rs

Lines changed: 26 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@ use option::{Some, None, Option};
4040
use owned::Box;
4141
use ptr::RawPtr;
4242
use sync::atomics::{AtomicPtr, Relaxed, AtomicUint, Acquire, Release};
43+
use ty::Unsafe;
4344

4445
// Node within the linked list queue of messages to send
4546
struct Node<T> {
@@ -56,13 +57,13 @@ struct Node<T> {
5657
/// time.
5758
pub struct Queue<T> {
5859
// consumer fields
59-
tail: *mut Node<T>, // where to pop from
60+
tail: Unsafe<*mut Node<T>>, // where to pop from
6061
tail_prev: AtomicPtr<Node<T>>, // where to pop from
6162

6263
// producer fields
63-
head: *mut Node<T>, // where to push to
64-
first: *mut Node<T>, // where to get new nodes from
65-
tail_copy: *mut Node<T>, // between first/tail
64+
head: Unsafe<*mut Node<T>>, // where to push to
65+
first: Unsafe<*mut Node<T>>, // where to get new nodes from
66+
tail_copy: Unsafe<*mut Node<T>>, // between first/tail
6667

6768
// Cache maintenance fields. Additions and subtractions are stored
6869
// separately in order to allow them to use nonatomic addition/subtraction.
@@ -101,11 +102,11 @@ impl<T: Send> Queue<T> {
101102
let n2 = Node::new();
102103
unsafe { (*n1).next.store(n2, Relaxed) }
103104
Queue {
104-
tail: n2,
105+
tail: Unsafe::new(n2),
105106
tail_prev: AtomicPtr::new(n1),
106-
head: n2,
107-
first: n1,
108-
tail_copy: n1,
107+
head: Unsafe::new(n2),
108+
first: Unsafe::new(n1),
109+
tail_copy: Unsafe::new(n1),
109110
cache_bound: bound,
110111
cache_additions: AtomicUint::new(0),
111112
cache_subtractions: AtomicUint::new(0),
@@ -114,43 +115,43 @@ impl<T: Send> Queue<T> {
114115

115116
/// Pushes a new value onto this queue. Note that to use this function
116117
/// safely, it must be externally guaranteed that there is only one pusher.
117-
pub fn push(&mut self, t: T) {
118+
pub fn push(&self, t: T) {
118119
unsafe {
119120
// Acquire a node (which either uses a cached one or allocates a new
120121
// one), and then append this to the 'head' node.
121122
let n = self.alloc();
122123
assert!((*n).value.is_none());
123124
(*n).value = Some(t);
124125
(*n).next.store(0 as *mut Node<T>, Relaxed);
125-
(*self.head).next.store(n, Release);
126-
self.head = n;
126+
(**self.head.get()).next.store(n, Release);
127+
*self.head.get() = n;
127128
}
128129
}
129130

130-
unsafe fn alloc(&mut self) -> *mut Node<T> {
131+
unsafe fn alloc(&self) -> *mut Node<T> {
131132
// First try to see if we can consume the 'first' node for our uses.
132133
// We try to avoid as many atomic instructions as possible here, so
133134
// the addition to cache_subtractions is not atomic (plus we're the
134135
// only one subtracting from the cache).
135-
if self.first != self.tail_copy {
136+
if *self.first.get() != *self.tail_copy.get() {
136137
if self.cache_bound > 0 {
137138
let b = self.cache_subtractions.load(Relaxed);
138139
self.cache_subtractions.store(b + 1, Relaxed);
139140
}
140-
let ret = self.first;
141-
self.first = (*ret).next.load(Relaxed);
141+
let ret = *self.first.get();
142+
*self.first.get() = (*ret).next.load(Relaxed);
142143
return ret;
143144
}
144145
// If the above fails, then update our copy of the tail and try
145146
// again.
146-
self.tail_copy = self.tail_prev.load(Acquire);
147-
if self.first != self.tail_copy {
147+
*self.tail_copy.get() = self.tail_prev.load(Acquire);
148+
if *self.first.get() != *self.tail_copy.get() {
148149
if self.cache_bound > 0 {
149150
let b = self.cache_subtractions.load(Relaxed);
150151
self.cache_subtractions.store(b + 1, Relaxed);
151152
}
152-
let ret = self.first;
153-
self.first = (*ret).next.load(Relaxed);
153+
let ret = *self.first.get();
154+
*self.first.get() = (*ret).next.load(Relaxed);
154155
return ret;
155156
}
156157
// If all of that fails, then we have to allocate a new node
@@ -160,19 +161,19 @@ impl<T: Send> Queue<T> {
160161

161162
/// Attempts to pop a value from this queue. Remember that to use this type
162163
/// safely you must ensure that there is only one popper at a time.
163-
pub fn pop(&mut self) -> Option<T> {
164+
pub fn pop(&self) -> Option<T> {
164165
unsafe {
165166
// The `tail` node is not actually a used node, but rather a
166167
// sentinel from where we should start popping from. Hence, look at
167168
// tail's next field and see if we can use it. If we do a pop, then
168169
// the current tail node is a candidate for going into the cache.
169-
let tail = self.tail;
170+
let tail = *self.tail.get();
170171
let next = (*tail).next.load(Acquire);
171172
if next.is_null() { return None }
172173
assert!((*next).value.is_some());
173174
let ret = (*next).value.take();
174175

175-
self.tail = next;
176+
*self.tail.get() = next;
176177
if self.cache_bound == 0 {
177178
self.tail_prev.store(tail, Release);
178179
} else {
@@ -197,11 +198,11 @@ impl<T: Send> Queue<T> {
197198

198199
/// Attempts to peek at the head of the queue, returning `None` if the queue
199200
/// has no data currently
200-
pub fn peek<'a>(&'a mut self) -> Option<&'a mut T> {
201+
pub fn peek<'a>(&'a self) -> Option<&'a mut T> {
201202
// This is essentially the same as above with all the popping bits
202203
// stripped out.
203204
unsafe {
204-
let tail = self.tail;
205+
let tail = *self.tail.get();
205206
let next = (*tail).next.load(Acquire);
206207
if next.is_null() { return None }
207208
return (*next).value.as_mut();
@@ -213,7 +214,7 @@ impl<T: Send> Queue<T> {
213214
impl<T: Send> Drop for Queue<T> {
214215
fn drop(&mut self) {
215216
unsafe {
216-
let mut cur = self.first;
217+
let mut cur = *self.first.get();
217218
while !cur.is_null() {
218219
let next = (*cur).next.load(Relaxed);
219220
let _n: Box<Node<T>> = mem::transmute(cur);

0 commit comments

Comments
 (0)