(index<- ) ./libstd/unstable/atomics.rs
1 // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 /*!
12 * Atomic types
13 *
14 * Basic atomic types supporting atomic operations. Each method takes an `Ordering` which
15 * represents the strength of the memory barrier for that operation. These orderings are the same
16 * as C++11 atomic orderings [http://gcc.gnu.org/wiki/Atomic/GCCMM/AtomicSync]
17 *
18 * All atomic types are a single word in size.
19 */
20
21 use unstable::intrinsics;
22 use cast;
23 use option::{Option,Some,None};
24 use libc::c_void;
25 use ops::Drop;
26
27 /**
28 * A simple atomic flag, that can be set and cleared. The most basic atomic type.
29 */
30 pub struct AtomicFlag {
31 priv v: int
32 }
33
34 /**
35 * An atomic boolean type.
36 */
37 pub struct AtomicBool {
38 priv v: uint
39 }
40
41 /**
42 * A signed atomic integer type, supporting basic atomic arithmetic operations
43 */
44 pub struct AtomicInt {
45 priv v: int
46 }
47
48 /**
49 * An unsigned atomic integer type, supporting basic atomic arithmetic operations
50 */
51 pub struct AtomicUint {
52 priv v: uint
53 }
54
55 /**
56 * An unsafe atomic pointer. Only supports basic atomic operations
57 */
58 pub struct AtomicPtr<T> {
59 priv p: *mut T
60 }
61
62 /**
63 * An owned atomic pointer. Ensures that only a single reference to the data is held at any time.
64 */
65 #[unsafe_no_drop_flag]
66 pub struct AtomicOption<T> {
67 priv p: *mut c_void
68 }
69
70 pub enum Ordering {
71 Relaxed,
72 Release,
73 Acquire,
74 AcqRel,
75 SeqCst
76 }
77
78 pub static INIT_ATOMIC_FLAG : AtomicFlag = AtomicFlag { v: 0 };
79 pub static INIT_ATOMIC_BOOL : AtomicBool = AtomicBool { v: 0 };
80 pub static INIT_ATOMIC_INT : AtomicInt = AtomicInt { v: 0 };
81 pub static INIT_ATOMIC_UINT : AtomicUint = AtomicUint { v: 0 };
82
83 impl AtomicFlag {
84
85 pub fn new() -> AtomicFlag {
86 AtomicFlag { v: 0 }
87 }
88
89 /**
90 * Clears the atomic flag
91 */
92 #[inline]
93 pub fn clear(&mut self, order: Ordering) {
94 unsafe {atomic_store(&mut self.v, 0, order)}
95 }
96
97 /**
98 * Sets the flag if it was previously unset, returns the previous value of the
99 * flag.
100 */
101 #[inline]
102 pub fn test_and_set(&mut self, order: Ordering) -> bool {
103 unsafe { atomic_compare_and_swap(&mut self.v, 0, 1, order) > 0 }
104 }
105 }
106
107 impl AtomicBool {
108 pub fn new(v: bool) -> AtomicBool {
109 AtomicBool { v: if v { 1 } else { 0 } }
110 }
111
112 #[inline]
113 pub fn load(&self, order: Ordering) -> bool {
114 unsafe { atomic_load(&self.v, order) > 0 }
115 }
116
117 #[inline]
118 pub fn store(&mut self, val: bool, order: Ordering) {
119 let val = if val { 1 } else { 0 };
120
121 unsafe { atomic_store(&mut self.v, val, order); }
122 }
123
124 #[inline]
125 pub fn swap(&mut self, val: bool, order: Ordering) -> bool {
126 let val = if val { 1 } else { 0 };
127
128 unsafe { atomic_swap(&mut self.v, val, order) > 0 }
129 }
130
131 #[inline]
132 pub fn compare_and_swap(&mut self, old: bool, new: bool, order: Ordering) -> bool {
133 let old = if old { 1 } else { 0 };
134 let new = if new { 1 } else { 0 };
135
136 unsafe { atomic_compare_and_swap(&mut self.v, old, new, order) > 0 }
137 }
138
139 /// Returns the old value
140 #[inline]
141 pub fn fetch_and(&mut self, val: bool, order: Ordering) -> bool {
142 let val = if val { 1 } else { 0 };
143
144 unsafe { atomic_and(&mut self.v, val, order) > 0 }
145 }
146
147 /// Returns the old value
148 #[inline]
149 pub fn fetch_nand(&mut self, val: bool, order: Ordering) -> bool {
150 let val = if val { 1 } else { 0 };
151
152 unsafe { atomic_nand(&mut self.v, val, order) > 0 }
153 }
154
155 /// Returns the old value
156 #[inline]
157 pub fn fetch_or(&mut self, val: bool, order: Ordering) -> bool {
158 let val = if val { 1 } else { 0 };
159
160 unsafe { atomic_or(&mut self.v, val, order) > 0 }
161 }
162
163 /// Returns the old value
164 #[inline]
165 pub fn fetch_xor(&mut self, val: bool, order: Ordering) -> bool {
166 let val = if val { 1 } else { 0 };
167
168 unsafe { atomic_xor(&mut self.v, val, order) > 0 }
169 }
170 }
171
172 impl AtomicInt {
173 pub fn new(v: int) -> AtomicInt {
174 AtomicInt { v:v }
175 }
176
177 #[inline]
178 pub fn load(&self, order: Ordering) -> int {
179 unsafe { atomic_load(&self.v, order) }
180 }
181
182 #[inline]
183 pub fn store(&mut self, val: int, order: Ordering) {
184 unsafe { atomic_store(&mut self.v, val, order); }
185 }
186
187 #[inline]
188 pub fn swap(&mut self, val: int, order: Ordering) -> int {
189 unsafe { atomic_swap(&mut self.v, val, order) }
190 }
191
192 #[inline]
193 pub fn compare_and_swap(&mut self, old: int, new: int, order: Ordering) -> int {
194 unsafe { atomic_compare_and_swap(&mut self.v, old, new, order) }
195 }
196
197 /// Returns the old value (like __sync_fetch_and_add).
198 #[inline]
199 pub fn fetch_add(&mut self, val: int, order: Ordering) -> int {
200 unsafe { atomic_add(&mut self.v, val, order) }
201 }
202
203 /// Returns the old value (like __sync_fetch_and_sub).
204 #[inline]
205 pub fn fetch_sub(&mut self, val: int, order: Ordering) -> int {
206 unsafe { atomic_sub(&mut self.v, val, order) }
207 }
208 }
209
210 impl AtomicUint {
211 pub fn new(v: uint) -> AtomicUint {
212 AtomicUint { v:v }
213 }
214
215 #[inline]
216 pub fn load(&self, order: Ordering) -> uint {
217 unsafe { atomic_load(&self.v, order) }
218 }
219
220 #[inline]
221 pub fn store(&mut self, val: uint, order: Ordering) {
222 unsafe { atomic_store(&mut self.v, val, order); }
223 }
224
225 #[inline]
226 pub fn swap(&mut self, val: uint, order: Ordering) -> uint {
227 unsafe { atomic_swap(&mut self.v, val, order) }
228 }
229
230 #[inline]
231 pub fn compare_and_swap(&mut self, old: uint, new: uint, order: Ordering) -> uint {
232 unsafe { atomic_compare_and_swap(&mut self.v, old, new, order) }
233 }
234
235 /// Returns the old value (like __sync_fetch_and_add).
236 #[inline]
237 pub fn fetch_add(&mut self, val: uint, order: Ordering) -> uint {
238 unsafe { atomic_add(&mut self.v, val, order) }
239 }
240
241 /// Returns the old value (like __sync_fetch_and_sub)..
242 #[inline]
243 pub fn fetch_sub(&mut self, val: uint, order: Ordering) -> uint {
244 unsafe { atomic_sub(&mut self.v, val, order) }
245 }
246 }
247
248 impl<T> AtomicPtr<T> {
249 pub fn new(p: *mut T) -> AtomicPtr<T> {
250 AtomicPtr { p:p }
251 }
252
253 #[inline]
254 pub fn load(&self, order: Ordering) -> *mut T {
255 unsafe { atomic_load(&self.p, order) }
256 }
257
258 #[inline]
259 pub fn store(&mut self, ptr: *mut T, order: Ordering) {
260 unsafe { atomic_store(&mut self.p, ptr, order); }
261 }
262
263 #[inline]
264 pub fn swap(&mut self, ptr: *mut T, order: Ordering) -> *mut T {
265 unsafe { atomic_swap(&mut self.p, ptr, order) }
266 }
267
268 #[inline]
269 pub fn compare_and_swap(&mut self, old: *mut T, new: *mut T, order: Ordering) -> *mut T {
270 unsafe { atomic_compare_and_swap(&mut self.p, old, new, order) }
271 }
272 }
273
274 impl<T> AtomicOption<T> {
275 pub fn new(p: ~T) -> AtomicOption<T> {
276 unsafe {
277 AtomicOption {
278 p: cast::transmute(p)
279 }
280 }
281 }
282
283 pub fn empty() -> AtomicOption<T> {
284 unsafe {
285 AtomicOption {
286 p: cast::transmute(0)
287 }
288 }
289 }
290
291 #[inline]
292 pub fn swap(&mut self, val: ~T, order: Ordering) -> Option<~T> {
293 unsafe {
294 let val = cast::transmute(val);
295
296 let p = atomic_swap(&mut self.p, val, order);
297 let pv : &uint = cast::transmute(&p);
298
299 if *pv == 0 {
300 None
301 } else {
302 Some(cast::transmute(p))
303 }
304 }
305 }
306
307 #[inline]
308 pub fn take(&mut self, order: Ordering) -> Option<~T> {
309 unsafe {
310 self.swap(cast::transmute(0), order)
311 }
312 }
313
314 /// A compare-and-swap. Succeeds if the option is 'None' and returns 'None'
315 /// if so. If the option was already 'Some', returns 'Some' of the rejected
316 /// value.
317 #[inline]
318 pub fn fill(&mut self, val: ~T, order: Ordering) -> Option<~T> {
319 unsafe {
320 let val = cast::transmute(val);
321 let expected = cast::transmute(0);
322 let oldval = atomic_compare_and_swap(&mut self.p, expected, val, order);
323 if oldval == expected {
324 None
325 } else {
326 Some(cast::transmute(val))
327 }
328 }
329 }
330
331 /// Be careful: The caller must have some external method of ensuring the
332 /// result does not get invalidated by another task after this returns.
333 #[inline]
334 pub fn is_empty(&mut self, order: Ordering) -> bool {
335 unsafe { atomic_load(&self.p, order) == cast::transmute(0) }
336 }
337 }
338
339 #[unsafe_destructor]
340 impl<T> Drop for AtomicOption<T> {
341 fn drop(&mut self) {
342 let _ = self.take(SeqCst);
343 }
344 }
345
346 #[inline]
347 pub unsafe fn atomic_store<T>(dst: &mut T, val: T, order:Ordering) {
348 let dst = cast::transmute(dst);
349 let val = cast::transmute(val);
350
351 match order {
352 Release => intrinsics::atomic_store_rel(dst, val),
353 Relaxed => intrinsics::atomic_store_relaxed(dst, val),
354 _ => intrinsics::atomic_store(dst, val)
355 }
356 }
357
358 #[inline]
359 pub unsafe fn atomic_load<T>(dst: &T, order:Ordering) -> T {
360 let dst = cast::transmute(dst);
361
362 cast::transmute(match order {
363 Acquire => intrinsics::atomic_load_acq(dst),
364 Relaxed => intrinsics::atomic_load_relaxed(dst),
365 _ => intrinsics::atomic_load(dst)
366 })
367 }
368
369 #[inline]
370 pub unsafe fn atomic_swap<T>(dst: &mut T, val: T, order: Ordering) -> T {
371 let dst = cast::transmute(dst);
372 let val = cast::transmute(val);
373
374 cast::transmute(match order {
375 Acquire => intrinsics::atomic_xchg_acq(dst, val),
376 Release => intrinsics::atomic_xchg_rel(dst, val),
377 AcqRel => intrinsics::atomic_xchg_acqrel(dst, val),
378 Relaxed => intrinsics::atomic_xchg_relaxed(dst, val),
379 _ => intrinsics::atomic_xchg(dst, val)
380 })
381 }
382
383 /// Returns the old value (like __sync_fetch_and_add).
384 #[inline]
385 pub unsafe fn atomic_add<T>(dst: &mut T, val: T, order: Ordering) -> T {
386 let dst = cast::transmute(dst);
387 let val = cast::transmute(val);
388
389 cast::transmute(match order {
390 Acquire => intrinsics::atomic_xadd_acq(dst, val),
391 Release => intrinsics::atomic_xadd_rel(dst, val),
392 AcqRel => intrinsics::atomic_xadd_acqrel(dst, val),
393 Relaxed => intrinsics::atomic_xadd_relaxed(dst, val),
394 _ => intrinsics::atomic_xadd(dst, val)
395 })
396 }
397
398 /// Returns the old value (like __sync_fetch_and_sub).
399 #[inline]
400 pub unsafe fn atomic_sub<T>(dst: &mut T, val: T, order: Ordering) -> T {
401 let dst = cast::transmute(dst);
402 let val = cast::transmute(val);
403
404 cast::transmute(match order {
405 Acquire => intrinsics::atomic_xsub_acq(dst, val),
406 Release => intrinsics::atomic_xsub_rel(dst, val),
407 AcqRel => intrinsics::atomic_xsub_acqrel(dst, val),
408 Relaxed => intrinsics::atomic_xsub_relaxed(dst, val),
409 _ => intrinsics::atomic_xsub(dst, val)
410 })
411 }
412
413 #[inline]
414 pub unsafe fn atomic_compare_and_swap<T>(dst:&mut T, old:T, new:T, order: Ordering) -> T {
415 let dst = cast::transmute(dst);
416 let old = cast::transmute(old);
417 let new = cast::transmute(new);
418
419 cast::transmute(match order {
420 Acquire => intrinsics::atomic_cxchg_acq(dst, old, new),
421 Release => intrinsics::atomic_cxchg_rel(dst, old, new),
422 AcqRel => intrinsics::atomic_cxchg_acqrel(dst, old, new),
423 Relaxed => intrinsics::atomic_cxchg_relaxed(dst, old, new),
424 _ => intrinsics::atomic_cxchg(dst, old, new),
425 })
426 }
427
428 #[inline]
429 pub unsafe fn atomic_and<T>(dst: &mut T, val: T, order: Ordering) -> T {
430 let dst = cast::transmute(dst);
431 let val = cast::transmute(val);
432
433 cast::transmute(match order {
434 Acquire => intrinsics::atomic_and_acq(dst, val),
435 Release => intrinsics::atomic_and_rel(dst, val),
436 AcqRel => intrinsics::atomic_and_acqrel(dst, val),
437 Relaxed => intrinsics::atomic_and_relaxed(dst, val),
438 _ => intrinsics::atomic_and(dst, val)
439 })
440 }
441
442
443 #[inline]
444 pub unsafe fn atomic_nand<T>(dst: &mut T, val: T, order: Ordering) -> T {
445 let dst = cast::transmute(dst);
446 let val = cast::transmute(val);
447
448 cast::transmute(match order {
449 Acquire => intrinsics::atomic_nand_acq(dst, val),
450 Release => intrinsics::atomic_nand_rel(dst, val),
451 AcqRel => intrinsics::atomic_nand_acqrel(dst, val),
452 Relaxed => intrinsics::atomic_nand_relaxed(dst, val),
453 _ => intrinsics::atomic_nand(dst, val)
454 })
455 }
456
457
458 #[inline]
459 pub unsafe fn atomic_or<T>(dst: &mut T, val: T, order: Ordering) -> T {
460 let dst = cast::transmute(dst);
461 let val = cast::transmute(val);
462
463 cast::transmute(match order {
464 Acquire => intrinsics::atomic_or_acq(dst, val),
465 Release => intrinsics::atomic_or_rel(dst, val),
466 AcqRel => intrinsics::atomic_or_acqrel(dst, val),
467 Relaxed => intrinsics::atomic_or_relaxed(dst, val),
468 _ => intrinsics::atomic_or(dst, val)
469 })
470 }
471
472
473 #[inline]
474 pub unsafe fn atomic_xor<T>(dst: &mut T, val: T, order: Ordering) -> T {
475 let dst = cast::transmute(dst);
476 let val = cast::transmute(val);
477
478 cast::transmute(match order {
479 Acquire => intrinsics::atomic_xor_acq(dst, val),
480 Release => intrinsics::atomic_xor_rel(dst, val),
481 AcqRel => intrinsics::atomic_xor_acqrel(dst, val),
482 Relaxed => intrinsics::atomic_xor_relaxed(dst, val),
483 _ => intrinsics::atomic_xor(dst, val)
484 })
485 }
486
487
488 /**
489 * An atomic fence.
490 *
491 * A fence 'A' which has `Release` ordering semantics, synchronizes with a
492 * fence 'B' with (at least) `Acquire` semantics, if and only if there exists
493 * atomic operations X and Y, both operating on some atomic object 'M' such
494 * that A is sequenced before X, Y is synchronized before B and Y observers
495 * the change to M. This provides a happens-before dependence between A and B.
496 *
497 * Atomic operations with `Release` or `Acquire` semantics can also synchronize
498 * with a fence.
499 *
500 * A fence with has `SeqCst` ordering, in addition to having both `Acquire` and
501 * `Release` semantics, participates in the global program order of the other
502 * `SeqCst` operations and/or fences.
503 *
504 * Accepts `Acquire`, `Release`, `AcqRel` and `SeqCst` orderings.
505 */
506 #[inline]
507 pub fn fence(order: Ordering) {
508 unsafe {
509 match order {
510 Acquire => intrinsics::atomic_fence_acq(),
511 Release => intrinsics::atomic_fence_rel(),
512 AcqRel => intrinsics::atomic_fence_rel(),
513 _ => intrinsics::atomic_fence(),
514 }
515 }
516 }
517
518 #[cfg(test)]
519 mod test {
520 use option::*;
521 use super::*;
522
523 #[test]
524 fn flag() {
525 let mut flg = AtomicFlag::new();
526 assert!(!flg.test_and_set(SeqCst));
527 assert!(flg.test_and_set(SeqCst));
528
529 flg.clear(SeqCst);
530 assert!(!flg.test_and_set(SeqCst));
531 }
532
533 #[test]
534 fn option_empty() {
535 let mut option: AtomicOption<()> = AtomicOption::empty();
536 assert!(option.is_empty(SeqCst));
537 }
538
539 #[test]
540 fn option_swap() {
541 let mut p = AtomicOption::new(~1);
542 let a = ~2;
543
544 let b = p.swap(a, SeqCst);
545
546 assert_eq!(b, Some(~1));
547 assert_eq!(p.take(SeqCst), Some(~2));
548 }
549
550 #[test]
551 fn option_take() {
552 let mut p = AtomicOption::new(~1);
553
554 assert_eq!(p.take(SeqCst), Some(~1));
555 assert_eq!(p.take(SeqCst), None);
556
557 let p2 = ~2;
558 p.swap(p2, SeqCst);
559
560 assert_eq!(p.take(SeqCst), Some(~2));
561 }
562
563 #[test]
564 fn option_fill() {
565 let mut p = AtomicOption::new(~1);
566 assert!(p.fill(~2, SeqCst).is_some()); // should fail; shouldn't leak!
567 assert_eq!(p.take(SeqCst), Some(~1));
568
569 assert!(p.fill(~2, SeqCst).is_none()); // shouldn't fail
570 assert_eq!(p.take(SeqCst), Some(~2));
571 }
572
573 #[test]
574 fn bool_and() {
575 let mut a = AtomicBool::new(true);
576 assert_eq!(a.fetch_and(false, SeqCst),true);
577 assert_eq!(a.load(SeqCst),false);
578 }
579
580 static mut S_FLAG : AtomicFlag = INIT_ATOMIC_FLAG;
581 static mut S_BOOL : AtomicBool = INIT_ATOMIC_BOOL;
582 static mut S_INT : AtomicInt = INIT_ATOMIC_INT;
583 static mut S_UINT : AtomicUint = INIT_ATOMIC_UINT;
584
585 #[test]
586 fn static_init() {
587 unsafe {
588 assert!(!S_FLAG.test_and_set(SeqCst));
589 assert!(!S_BOOL.load(SeqCst));
590 assert!(S_INT.load(SeqCst) == 0);
591 assert!(S_UINT.load(SeqCst) == 0);
592 }
593 }
594 }
libstd/unstable/atomics.rs:65:23-65:23 -struct- definition:
#[unsafe_no_drop_flag]
pub struct AtomicOption<T> {
references:-283: pub fn empty() -> AtomicOption<T> {
340: impl<T> Drop for AtomicOption<T> {
277: AtomicOption {
285: AtomicOption {
274: impl<T> AtomicOption<T> {
275: pub fn new(p: ~T) -> AtomicOption<T> {
libstd/rt/comm.rs:
581: priv next: UnsafeArc<AtomicOption<StreamChanOne<T>>>
634: priv next_link: UnsafeArc<AtomicOption<PortOne<StreamPortOne<T>>>>
libstd/unstable/sync.rs:
40: unwrapper: AtomicOption<(comm::ChanOne<()>, comm::PortOne<bool>)>,
libstd/unstable/atomics.rs:369:10-369:10 -fn- definition:
#[inline]
pub unsafe fn atomic_swap<T>(dst: &mut T, val: T, order: Ordering) -> T {
references:-265: unsafe { atomic_swap(&mut self.p, ptr, order) }
296: let p = atomic_swap(&mut self.p, val, order);
128: unsafe { atomic_swap(&mut self.v, val, order) > 0 }
227: unsafe { atomic_swap(&mut self.v, val, order) }
189: unsafe { atomic_swap(&mut self.v, val, order) }
libstd/unstable/atomics.rs:50:4-50:4 -struct- definition:
*/
pub struct AtomicUint {
references:-210: impl AtomicUint {
81: pub static INIT_ATOMIC_UINT : AtomicUint = AtomicUint { v: 0 };
212: AtomicUint { v:v }
81: pub static INIT_ATOMIC_UINT : AtomicUint = AtomicUint { v: 0 };
211: pub fn new(v: uint) -> AtomicUint {
libstd/rt/kill.rs:
172: struct KillFlag(AtomicUint);
197: unkillable: AtomicUint,
libstd/rt/comm.rs:
45: state: AtomicUint,
libstd/unstable/sync.rs:
35: count: AtomicUint,
libstd/unstable/atomics.rs:36:4-36:4 -struct- definition:
*/
pub struct AtomicBool {
references:-108: pub fn new(v: bool) -> AtomicBool {
107: impl AtomicBool {
79: pub static INIT_ATOMIC_BOOL : AtomicBool = AtomicBool { v: 0 };
109: AtomicBool { v: if v { 1 } else { 0 } }
79: pub static INIT_ATOMIC_BOOL : AtomicBool = AtomicBool { v: 0 };
libstd/unstable/atomics.rs:458:10-458:10 -fn- definition:
#[inline]
pub unsafe fn atomic_or<T>(dst: &mut T, val: T, order: Ordering) -> T {
references:-160: unsafe { atomic_or(&mut self.v, val, order) > 0 }
libstd/unstable/atomics.rs:384:10-384:10 -fn- definition:
#[inline]
pub unsafe fn atomic_add<T>(dst: &mut T, val: T, order: Ordering) -> T {
references:-238: unsafe { atomic_add(&mut self.v, val, order) }
200: unsafe { atomic_add(&mut self.v, val, order) }
libstd/unstable/atomics.rs:428:10-428:10 -fn- definition:
#[inline]
pub unsafe fn atomic_and<T>(dst: &mut T, val: T, order: Ordering) -> T {
references:-144: unsafe { atomic_and(&mut self.v, val, order) > 0 }
libstd/unstable/atomics.rs:358:10-358:10 -fn- definition:
#[inline]
pub unsafe fn atomic_load<T>(dst: &T, order:Ordering) -> T {
references:-114: unsafe { atomic_load(&self.v, order) > 0 }
335: unsafe { atomic_load(&self.p, order) == cast::transmute(0) }
217: unsafe { atomic_load(&self.v, order) }
179: unsafe { atomic_load(&self.v, order) }
255: unsafe { atomic_load(&self.p, order) }
libstd/unstable/atomics.rs:43:4-43:4 -struct- definition:
*/
pub struct AtomicInt {
references:-80: pub static INIT_ATOMIC_INT : AtomicInt = AtomicInt { v: 0 };
174: AtomicInt { v:v }
80: pub static INIT_ATOMIC_INT : AtomicInt = AtomicInt { v: 0 };
173: pub fn new(v: int) -> AtomicInt {
172: impl AtomicInt {
libstd/rt/util.rs:
139: static mut EXIT_STATUS: AtomicInt = INIT_ATOMIC_INT;
libstd/unstable/atomics.rs:413:10-413:10 -fn- definition:
#[inline]
pub unsafe fn atomic_compare_and_swap<T>(dst:&mut T, old:T, new:T, order: Ordering) -> T {
references:-103: unsafe { atomic_compare_and_swap(&mut self.v, 0, 1, order) > 0 }
270: unsafe { atomic_compare_and_swap(&mut self.p, old, new, order) }
194: unsafe { atomic_compare_and_swap(&mut self.v, old, new, order) }
322: let oldval = atomic_compare_and_swap(&mut self.p, expected, val, order);
136: unsafe { atomic_compare_and_swap(&mut self.v, old, new, order) > 0 }
232: unsafe { atomic_compare_and_swap(&mut self.v, old, new, order) }
libstd/unstable/atomics.rs:57:4-57:4 -struct- definition:
*/
pub struct AtomicPtr<T> {
references:-249: pub fn new(p: *mut T) -> AtomicPtr<T> {
248: impl<T> AtomicPtr<T> {
250: AtomicPtr { p:p }
libstd/unstable/atomics.rs:69:1-69:1 -enum- definition:
pub enum Ordering {
references:-334: pub fn is_empty(&mut self, order: Ordering) -> bool {
347: pub unsafe fn atomic_store<T>(dst: &mut T, val: T, order:Ordering) {
385: pub unsafe fn atomic_add<T>(dst: &mut T, val: T, order: Ordering) -> T {
292: pub fn swap(&mut self, val: ~T, order: Ordering) -> Option<~T> {
444: pub unsafe fn atomic_nand<T>(dst: &mut T, val: T, order: Ordering) -> T {
221: pub fn store(&mut self, val: uint, order: Ordering) {
459: pub unsafe fn atomic_or<T>(dst: &mut T, val: T, order: Ordering) -> T {
414: pub unsafe fn atomic_compare_and_swap<T>(dst:&mut T, old:T, new:T, order: Ordering) -> T {
102: pub fn test_and_set(&mut self, order: Ordering) -> bool {
125: pub fn swap(&mut self, val: bool, order: Ordering) -> bool {
183: pub fn store(&mut self, val: int, order: Ordering) {
269: pub fn compare_and_swap(&mut self, old: *mut T, new: *mut T, order: Ordering) -> *mut T {
178: pub fn load(&self, order: Ordering) -> int {
370: pub unsafe fn atomic_swap<T>(dst: &mut T, val: T, order: Ordering) -> T {
132: pub fn compare_and_swap(&mut self, old: bool, new: bool, order: Ordering) -> bool {
93: pub fn clear(&mut self, order: Ordering) {
359: pub unsafe fn atomic_load<T>(dst: &T, order:Ordering) -> T {
216: pub fn load(&self, order: Ordering) -> uint {
157: pub fn fetch_or(&mut self, val: bool, order: Ordering) -> bool {
188: pub fn swap(&mut self, val: int, order: Ordering) -> int {
231: pub fn compare_and_swap(&mut self, old: uint, new: uint, order: Ordering) -> uint {
193: pub fn compare_and_swap(&mut self, old: int, new: int, order: Ordering) -> int {
118: pub fn store(&mut self, val: bool, order: Ordering) {
308: pub fn take(&mut self, order: Ordering) -> Option<~T> {
507: pub fn fence(order: Ordering) {
254: pub fn load(&self, order: Ordering) -> *mut T {
400: pub unsafe fn atomic_sub<T>(dst: &mut T, val: T, order: Ordering) -> T {
141: pub fn fetch_and(&mut self, val: bool, order: Ordering) -> bool {
226: pub fn swap(&mut self, val: uint, order: Ordering) -> uint {
243: pub fn fetch_sub(&mut self, val: uint, order: Ordering) -> uint {
(205)(237)(259)(165)(318)(149)(429)(474)(113)(199)(264)libstd/unstable/atomics.rs:399:10-399:10 -fn- definition:
#[inline]
pub unsafe fn atomic_sub<T>(dst: &mut T, val: T, order: Ordering) -> T {
references:-244: unsafe { atomic_sub(&mut self.v, val, order) }
206: unsafe { atomic_sub(&mut self.v, val, order) }
libstd/unstable/atomics.rs:473:10-473:10 -fn- definition:
#[inline]
pub unsafe fn atomic_xor<T>(dst: &mut T, val: T, order: Ordering) -> T {
references:-168: unsafe { atomic_xor(&mut self.v, val, order) > 0 }
libstd/unstable/atomics.rs:346:10-346:10 -fn- definition:
#[inline]
pub unsafe fn atomic_store<T>(dst: &mut T, val: T, order:Ordering) {
references:-260: unsafe { atomic_store(&mut self.p, ptr, order); }
184: unsafe { atomic_store(&mut self.v, val, order); }
222: unsafe { atomic_store(&mut self.v, val, order); }
121: unsafe { atomic_store(&mut self.v, val, order); }
94: unsafe {atomic_store(&mut self.v, 0, order)}
libstd/unstable/atomics.rs:443:10-443:10 -fn- definition:
#[inline]
pub unsafe fn atomic_nand<T>(dst: &mut T, val: T, order: Ordering) -> T {
references:-152: unsafe { atomic_nand(&mut self.v, val, order) > 0 }
libstd/unstable/atomics.rs:29:4-29:4 -struct- definition:
*/
pub struct AtomicFlag {
references:-83: impl AtomicFlag {
78: pub static INIT_ATOMIC_FLAG : AtomicFlag = AtomicFlag { v: 0 };
86: AtomicFlag { v: 0 }
78: pub static INIT_ATOMIC_FLAG : AtomicFlag = AtomicFlag { v: 0 };
85: pub fn new() -> AtomicFlag {