Skip to content

Commit 5079c1b

Browse files
committed
---
yaml --- r: 33431 b: refs/heads/snap-stage3 c: 42c05fe h: refs/heads/master i: 33429: 064fc73 33427: b93e089 33423: 014d738 v: v3
1 parent e7c0bd5 commit 5079c1b

File tree

17 files changed

+149
-229
lines changed

17 files changed

+149
-229
lines changed

[refs]

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
---
22
refs/heads/master: cd6f24f9d14ac90d167386a56e7a6ac1f0318195
33
refs/heads/snap-stage1: e33de59e47c5076a89eadeb38f4934f58a3618a6
4-
refs/heads/snap-stage3: 48582b360c784a536bd502f9611cfe66c753ce64
4+
refs/heads/snap-stage3: 42c05fe642efa726dc6cde624b40b638741724ee
55
refs/heads/try: d324a424d8f84b1eb049b12cf34182bda91b0024
66
refs/tags/release-0.1: 1f5c5126e96c79d22cb7862f75304136e204f105
77
refs/heads/ndm: f3868061cd7988080c30d6d5bf352a5a5fe2460b

branches/snap-stage3/src/libcore/private.rs

Lines changed: 12 additions & 145 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,14 @@ extern mod rustrt {
1414
fn rust_task_weaken(ch: rust_port_id);
1515
fn rust_task_unweaken(ch: rust_port_id);
1616

17+
#[rust_stack]
18+
fn rust_atomic_increment(p: &mut libc::intptr_t)
19+
-> libc::intptr_t;
20+
21+
#[rust_stack]
22+
fn rust_atomic_decrement(p: &mut libc::intptr_t)
23+
-> libc::intptr_t;
24+
1725
#[rust_stack]
1826
fn rust_compare_and_swap_ptr(address: &mut libc::uintptr_t,
1927
oldval: libc::uintptr_t,
@@ -25,15 +33,6 @@ extern mod rustrt {
2533
fn rust_unlock_little_lock(lock: rust_little_lock);
2634
}
2735

28-
#[abi = "rust-intrinsic"]
29-
extern mod rusti {
30-
31-
#[cfg(stage1)] #[cfg(stage2)] #[cfg(stage3)]
32-
fn atomic_cxchg(dst: &mut int, old: int, src: int) -> int;
33-
fn atomic_xadd(dst: &mut int, src: int) -> int;
34-
fn atomic_xsub(dst: &mut int, src: int) -> int;
35-
}
36-
3736
#[allow(non_camel_case_types)] // runtime type
3837
type rust_port_id = uint;
3938

@@ -44,7 +43,6 @@ type GlobalPtr = *libc::uintptr_t;
4443
* or, if no channel exists creates and installs a new channel and sets up a
4544
* new task to receive from it.
4645
*/
47-
#[cfg(stage0)]
4846
pub unsafe fn chan_from_global_ptr<T: Send>(
4947
global: GlobalPtr,
5048
task_fn: fn() -> task::TaskBuilder,
@@ -105,68 +103,6 @@ pub unsafe fn chan_from_global_ptr<T: Send>(
105103
}
106104
}
107105

108-
#[cfg(stage1)] #[cfg(stage2)] #[cfg(stage3)]
109-
pub unsafe fn chan_from_global_ptr<T: Send>(
110-
global: GlobalPtr,
111-
task_fn: fn() -> task::TaskBuilder,
112-
f: fn~(comm::Port<T>)
113-
) -> comm::Chan<T> {
114-
115-
enum Msg {
116-
Proceed,
117-
Abort
118-
}
119-
120-
log(debug,~"ENTERING chan_from_global_ptr, before is_prob_zero check");
121-
let is_probably_zero = *global == 0u;
122-
log(debug,~"after is_prob_zero check");
123-
if is_probably_zero {
124-
log(debug,~"is probably zero...");
125-
// There's no global channel. We must make it
126-
127-
let (setup_po, setup_ch) = do task_fn().spawn_conversation
128-
|move f, setup_po, setup_ch| {
129-
let po = comm::Port::<T>();
130-
let ch = comm::Chan(&po);
131-
comm::send(setup_ch, ch);
132-
133-
// Wait to hear if we are the official instance of
134-
// this global task
135-
match comm::recv::<Msg>(setup_po) {
136-
Proceed => f(move po),
137-
Abort => ()
138-
}
139-
};
140-
141-
log(debug,~"before setup recv..");
142-
// This is the proposed global channel
143-
let ch = comm::recv(setup_po);
144-
// 0 is our sentinal value. It is not a valid channel
145-
assert *ch != 0;
146-
147-
// Install the channel
148-
log(debug,~"BEFORE COMPARE AND SWAP");
149-
rusti::atomic_cxchg(
150-
cast::reinterpret_cast(&global),
151-
0, cast::reinterpret_cast(&ch));
152-
let swapped = *global != 0;
153-
log(debug,fmt!("AFTER .. swapped? %?", swapped));
154-
155-
if swapped {
156-
// Success!
157-
comm::send(setup_ch, Proceed);
158-
ch
159-
} else {
160-
// Somebody else got in before we did
161-
comm::send(setup_ch, Abort);
162-
cast::reinterpret_cast(&*global)
163-
}
164-
} else {
165-
log(debug, ~"global != 0");
166-
cast::reinterpret_cast(&*global)
167-
}
168-
}
169-
170106
#[test]
171107
pub fn test_from_global_chan1() {
172108

@@ -369,7 +305,7 @@ struct ArcDestruct<T> {
369305
}
370306
do task::unkillable {
371307
let data: ~ArcData<T> = cast::reinterpret_cast(&self.data);
372-
let new_count = rusti::atomic_xsub(&mut data.count, 1) - 1;
308+
let new_count = rustrt::rust_atomic_decrement(&mut data.count);
373309
assert new_count >= 0;
374310
if new_count == 0 {
375311
// Were we really last, or should we hand off to an unwrapper?
@@ -405,7 +341,6 @@ fn ArcDestruct<T>(data: *libc::c_void) -> ArcDestruct<T> {
405341
}
406342
}
407343

408-
#[cfg(stage0)]
409344
pub unsafe fn unwrap_shared_mutable_state<T: Send>(rc: SharedMutableState<T>)
410345
-> T {
411346
struct DeathThroes<T> {
@@ -438,76 +373,8 @@ pub unsafe fn unwrap_shared_mutable_state<T: Send>(rc: SharedMutableState<T>)
438373
// Got in. Step 0: Tell destructor not to run. We are now it.
439374
rc.data = ptr::null();
440375
// Step 1 - drop our own reference.
441-
let new_count = rusti::atomic_xsub(&mut ptr.count, 1) - 1;
442-
//assert new_count >= 0;
443-
if new_count == 0 {
444-
// We were the last owner. Can unwrap immediately.
445-
// Also we have to free the server endpoints.
446-
let _server: UnwrapProto = cast::transmute(move serverp);
447-
option::swap_unwrap(&mut ptr.data)
448-
// drop glue takes over.
449-
} else {
450-
// The *next* person who sees the refcount hit 0 will wake us.
451-
let end_result =
452-
DeathThroes { ptr: Some(move ptr),
453-
response: Some(move c2) };
454-
let mut p1 = Some(move p1); // argh
455-
do task::rekillable {
456-
pipes::recv_one(option::swap_unwrap(&mut p1));
457-
}
458-
// Got here. Back in the 'unkillable' without getting killed.
459-
// Recover ownership of ptr, then take the data out.
460-
let ptr = option::swap_unwrap(&mut end_result.ptr);
461-
option::swap_unwrap(&mut ptr.data)
462-
// drop glue takes over.
463-
}
464-
} else {
465-
// Somebody else was trying to unwrap. Avoid guaranteed deadlock.
466-
cast::forget(move ptr);
467-
// Also we have to free the (rejected) server endpoints.
468-
let _server: UnwrapProto = cast::transmute(move serverp);
469-
fail ~"Another task is already unwrapping this ARC!";
470-
}
471-
}
472-
}
473-
474-
#[cfg(stage1)] #[cfg(stage2)] #[cfg(stage3)]
475-
pub unsafe fn unwrap_shared_mutable_state<T: Send>(rc: SharedMutableState<T>)
476-
-> T {
477-
struct DeathThroes<T> {
478-
mut ptr: Option<~ArcData<T>>,
479-
mut response: Option<pipes::ChanOne<bool>>,
480-
drop unsafe {
481-
let response = option::swap_unwrap(&mut self.response);
482-
// In case we get killed early, we need to tell the person who
483-
// tried to wake us whether they should hand-off the data to us.
484-
if task::failing() {
485-
pipes::send_one(move response, false);
486-
// Either this swap_unwrap or the one below (at "Got here")
487-
// ought to run.
488-
cast::forget(option::swap_unwrap(&mut self.ptr));
489-
} else {
490-
assert self.ptr.is_none();
491-
pipes::send_one(move response, true);
492-
}
493-
}
494-
}
495-
496-
do task::unkillable {
497-
let ptr: ~ArcData<T> = cast::reinterpret_cast(&rc.data);
498-
let (c1,p1) = pipes::oneshot(); // ()
499-
let (c2,p2) = pipes::oneshot(); // bool
500-
let server: UnwrapProto = ~mut Some((move c1,move p2));
501-
let serverp: libc::uintptr_t = cast::transmute(move server);
502-
// Try to put our server end in the unwrapper slot.
503-
rusti::atomic_cxchg(cast::reinterpret_cast(&ptr.unwrapper),
504-
0, serverp as int);
505-
if ptr.unwrapper != 0 {
506-
// Got in. Step 0: Tell destructor not to run. We are now it.
507-
rc.data = ptr::null();
508-
// Step 1 - drop our own reference.
509-
let new_count = rusti::atomic_xsub(&mut ptr.count, 1) - 1;
510-
//assert new_count >= 0;
376+
let new_count = rustrt::rust_atomic_decrement(&mut ptr.count);
377+
// assert new_count >= 0;
511378
if new_count == 0 {
512379
// We were the last owner. Can unwrap immediately.
513380
// Also we have to free the server endpoints.
@@ -585,7 +452,7 @@ pub unsafe fn clone_shared_mutable_state<T: Send>(rc: &SharedMutableState<T>)
585452
-> SharedMutableState<T> {
586453
unsafe {
587454
let ptr: ~ArcData<T> = cast::reinterpret_cast(&(*rc).data);
588-
let new_count = rusti::atomic_xadd(&mut ptr.count, 1) + 1;
455+
let new_count = rustrt::rust_atomic_increment(&mut ptr.count);
589456
assert new_count >= 2;
590457
cast::forget(move ptr);
591458
}

branches/snap-stage3/src/rt/rust_builtin.cpp

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -830,6 +830,16 @@ rust_compare_and_swap_ptr(intptr_t *address,
830830
return sync::compare_and_swap(address, oldval, newval);
831831
}
832832

833+
extern "C" CDECL intptr_t
834+
rust_atomic_increment(intptr_t *address) {
835+
return sync::increment(address);
836+
}
837+
838+
extern "C" CDECL intptr_t
839+
rust_atomic_decrement(intptr_t *address) {
840+
return sync::decrement(address);
841+
}
842+
833843
extern "C" CDECL void
834844
rust_task_weaken(rust_port_id chan) {
835845
rust_task *task = rust_get_current_task();

branches/snap-stage3/src/rt/rustrt.def.in

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -178,6 +178,8 @@ rust_dbg_do_nothing
178178
rust_dbg_breakpoint
179179
rust_osmain_sched_id
180180
rust_compare_and_swap_ptr
181+
rust_atomic_increment
182+
rust_atomic_decrement
181183
rust_global_env_chan_ptr
182184
rust_port_take
183185
rust_port_drop
@@ -205,4 +207,4 @@ rust_gc_metadata
205207
rust_uv_ip4_port
206208
rust_uv_ip6_port
207209
rust_uv_tcp_getpeername
208-
rust_uv_tcp_getpeername6
210+
rust_uv_tcp_getpeername6

branches/snap-stage3/src/rustc/lib/llvm.rs

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -843,9 +843,6 @@ extern mod llvm {
843843
Name: *c_char) -> ValueRef;
844844

845845
/* Atomic Operations */
846-
fn LLVMBuildAtomicCmpXchg(B: BuilderRef, LHS: ValueRef,
847-
CMP: ValueRef, RHS: ValueRef,
848-
++Order: AtomicOrdering) -> ValueRef;
849846
fn LLVMBuildAtomicRMW(B: BuilderRef, ++Op: AtomicBinOp,
850847
LHS: ValueRef, RHS: ValueRef,
851848
++Order: AtomicOrdering) -> ValueRef;

branches/snap-stage3/src/rustc/middle/borrowck/loan.rs

Lines changed: 61 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -106,23 +106,26 @@ impl LoanContext {
106106
cat_discr(base, _) => {
107107
self.loan(base, req_mutbl)
108108
}
109-
cat_comp(cmt_base, comp_field(*)) |
110-
cat_comp(cmt_base, comp_index(*)) |
111-
cat_comp(cmt_base, comp_tuple) => {
109+
cat_comp(cmt_base, comp_field(_, m)) |
110+
cat_comp(cmt_base, comp_index(_, m)) => {
112111
// For most components, the type of the embedded data is
113112
// stable. Therefore, the base structure need only be
114113
// const---unless the component must be immutable. In
115114
// that case, it must also be embedded in an immutable
116115
// location, or else the whole structure could be
117116
// overwritten and the component along with it.
118-
self.loan_stable_comp(cmt, cmt_base, req_mutbl)
117+
self.loan_stable_comp(cmt, cmt_base, req_mutbl, m)
118+
}
119+
cat_comp(cmt_base, comp_tuple) => {
120+
// As above.
121+
self.loan_stable_comp(cmt, cmt_base, req_mutbl, m_imm)
119122
}
120123
cat_comp(cmt_base, comp_variant(enum_did)) => {
121124
// For enums, the memory is unstable if there are multiple
122125
// variants, because if the enum value is overwritten then
123126
// the memory changes type.
124127
if ty::enum_is_univariant(self.bccx.tcx, enum_did) {
125-
self.loan_stable_comp(cmt, cmt_base, req_mutbl)
128+
self.loan_stable_comp(cmt, cmt_base, req_mutbl, m_imm)
126129
} else {
127130
self.loan_unstable_deref(cmt, cmt_base, req_mutbl)
128131
}
@@ -150,10 +153,59 @@ impl LoanContext {
150153
fn loan_stable_comp(&self,
151154
cmt: cmt,
152155
cmt_base: cmt,
153-
req_mutbl: ast::mutability) -> bckres<()> {
154-
let base_mutbl = match req_mutbl {
155-
m_imm => m_imm,
156-
m_const | m_mutbl => m_const
156+
req_mutbl: ast::mutability,
157+
comp_mutbl: ast::mutability) -> bckres<()> {
158+
// Determine the mutability that the base component must have,
159+
// given the required mutability of the pointer (`req_mutbl`)
160+
// and the declared mutability of the component (`comp_mutbl`).
161+
// This is surprisingly subtle.
162+
//
163+
// Note that the *declared* mutability of the component is not
164+
// necessarily the same as cmt.mutbl, since a component
165+
// declared as immutable but embedded in a mutable context
166+
// becomes mutable. It's best to think of comp_mutbl as being
167+
// either MUTABLE or DEFAULT, not MUTABLE or IMMUTABLE. We
168+
// should really patch up the AST to reflect this distinction.
169+
//
170+
// Let's consider the cases below:
171+
//
172+
// 1. mut required, mut declared: In this case, the base
173+
// component must merely be const. The reason is that it
174+
// does not matter if the base component is borrowed as
175+
// mutable or immutable, as the mutability of the base
176+
// component is overridden in the field declaration itself
177+
// (see `compile-fail/borrowck-mut-field-imm-base.rs`)
178+
//
179+
// 2. mut required, imm declared: This would only be legal if
180+
// the component is embeded in a mutable context. However,
181+
// we detect mismatches between the mutability of the value
182+
// as a whole and the required mutability in `issue_loan()`
183+
// above. In any case, presuming that the component IS
184+
// embedded in a mutable context, both the component and
185+
// the base must be loaned as MUTABLE. This is to ensure
186+
// that there is no loan of the base as IMMUTABLE, which
187+
// would imply that the component must be IMMUTABLE too
188+
// (see `compile-fail/borrowck-imm-field-imm-base.rs`).
189+
//
190+
// 3. mut required, const declared: this shouldn't really be
191+
// possible, since I don't think you can declare a const
192+
// field, but I guess if we DID permit such a declaration
193+
// it would be equivalent to the case above?
194+
//
195+
// 4. imm required, * declared: In this case, the base must be
196+
// immutable. This is true regardless of what was declared
197+
// for this subcomponent, this if the base is mutable, the
198+
// subcomponent must be mutable.
199+
// (see `compile-fail/borrowck-imm-field-mut-base.rs`).
200+
//
201+
// 5. const required, * declared: In this case, the base need
202+
// only be const, since we don't ultimately care whether
203+
// the subcomponent is mutable or not.
204+
let base_mutbl = match (req_mutbl, comp_mutbl) {
205+
(m_mutbl, m_mutbl) => m_const, // (1)
206+
(m_mutbl, _) => m_mutbl, // (2, 3)
207+
(m_imm, _) => m_imm, // (4)
208+
(m_const, _) => m_const // (5)
157209
};
158210

159211
do self.loan(cmt_base, base_mutbl).chain |_ok| {

branches/snap-stage3/src/rustc/middle/trans/build.rs

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -813,11 +813,6 @@ fn Resume(cx: block, Exn: ValueRef) -> ValueRef {
813813
}
814814

815815
// Atomic Operations
816-
fn AtomicCmpXchg(cx: block, dst: ValueRef,
817-
cmp: ValueRef, src: ValueRef,
818-
order: AtomicOrdering) -> ValueRef {
819-
llvm::LLVMBuildAtomicCmpXchg(B(cx), dst, cmp, src, order)
820-
}
821816
fn AtomicRMW(cx: block, op: AtomicBinOp,
822817
dst: ValueRef, src: ValueRef,
823818
order: AtomicOrdering) -> ValueRef {

0 commit comments

Comments
 (0)