diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index 049bf4eb1b0..501f915461a 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -179,9 +179,9 @@ pub struct Rc { _noshare: marker::NoSync } -#[stable] impl Rc { /// Constructs a new reference-counted pointer. + #[stable] pub fn new(value: T) -> Rc { unsafe { Rc { @@ -200,9 +200,7 @@ impl Rc { } } } -} -impl Rc { /// Downgrades the reference-counted pointer to a weak reference. #[experimental = "Weak pointers may not belong in this module"] pub fn downgrade(&self) -> Weak { diff --git a/src/libcollections/vec.rs b/src/libcollections/vec.rs index ca9a44a15f2..15e1031f69d 100644 --- a/src/libcollections/vec.rs +++ b/src/libcollections/vec.rs @@ -645,7 +645,6 @@ impl Vec { /// assert!(vec.capacity() >= 3); /// ``` #[stable] - #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn shrink_to_fit(&mut self) { if mem::size_of::() == 0 { return } diff --git a/src/libcore/any.rs b/src/libcore/any.rs index 021f575b0ac..5511266b4cd 100644 --- a/src/libcore/any.rs +++ b/src/libcore/any.rs @@ -88,6 +88,7 @@ use intrinsics::TypeId; #[stable] pub trait Any: 'static { /// Get the `TypeId` of `self` + #[stable] fn get_type_id(&self) -> TypeId; } @@ -117,7 +118,6 @@ pub trait AnyRefExt<'a> { #[stable] impl<'a> AnyRefExt<'a> for &'a Any { #[inline] - #[stable] fn is(self) -> bool { // Get TypeId of the type this function is instantiated with let t = TypeId::of::(); @@ -130,7 +130,6 @@ impl<'a> AnyRefExt<'a> for &'a Any { } #[inline] - #[unstable = "naming conventions around acquiring references may change"] fn downcast_ref(self) -> Option<&'a T> { if self.is::() { unsafe { @@ -159,7 +158,6 @@ pub trait AnyMutRefExt<'a> { #[stable] impl<'a> AnyMutRefExt<'a> for &'a mut Any { #[inline] - #[unstable = "naming conventions around acquiring references may change"] fn downcast_mut(self) -> Option<&'a mut T> { if self.is::() { unsafe { diff --git a/src/libcore/atomic.rs b/src/libcore/atomic.rs index 0b1e08a5f43..d25a43ee2fe 100644 --- a/src/libcore/atomic.rs +++ b/src/libcore/atomic.rs @@ -58,20 +58,25 @@ pub struct AtomicPtr { #[stable] pub enum Ordering { /// No ordering constraints, only atomic operations + #[stable] Relaxed, /// When coupled with a store, all previous writes become visible /// to another thread that performs a load with `Acquire` ordering /// on the same value + #[stable] Release, /// When coupled with a load, all subsequent loads will see data /// written before a store with `Release` ordering on the same value /// in another thread + #[stable] Acquire, /// When coupled with a load, uses `Acquire` ordering, and with a store /// `Release` ordering + #[stable] AcqRel, /// Like `AcqRel` with the additional guarantee that all threads see all /// sequentially consistent operations in the same order. + #[stable] SeqCst } @@ -91,10 +96,10 @@ pub const INIT_ATOMIC_UINT: AtomicUint = // NB: Needs to be -1 (0b11111111...) to make fetch_nand work correctly const UINT_TRUE: uint = -1; -#[stable] impl AtomicBool { /// Create a new `AtomicBool` #[inline] + #[stable] pub fn new(v: bool) -> AtomicBool { let val = if v { UINT_TRUE } else { 0 }; AtomicBool { v: UnsafeCell::new(val), nocopy: marker::NoCopy } @@ -106,6 +111,7 @@ impl AtomicBool { /// /// Fails if `order` is `Release` or `AcqRel`. #[inline] + #[stable] pub fn load(&self, order: Ordering) -> bool { unsafe { atomic_load(self.v.get() as *const uint, order) > 0 } } @@ -116,6 +122,7 @@ impl AtomicBool { /// /// Fails if `order` is `Acquire` or `AcqRel`. #[inline] + #[stable] pub fn store(&self, val: bool, order: Ordering) { let val = if val { UINT_TRUE } else { 0 }; @@ -124,6 +131,7 @@ impl AtomicBool { /// Store a value, returning the old value #[inline] + #[stable] pub fn swap(&self, val: bool, order: Ordering) -> bool { let val = if val { UINT_TRUE } else { 0 }; @@ -174,6 +182,7 @@ impl AtomicBool { /// } /// ``` #[inline] + #[stable] pub fn compare_and_swap(&self, old: bool, new: bool, order: Ordering) -> bool { let old = if old { UINT_TRUE } else { 0 }; let new = if new { UINT_TRUE } else { 0 }; @@ -205,6 +214,7 @@ impl AtomicBool { /// assert_eq!(false, foo.load(SeqCst)); /// ``` #[inline] + #[stable] pub fn fetch_and(&self, val: bool, order: Ordering) -> bool { let val = if val { UINT_TRUE } else { 0 }; @@ -236,6 +246,7 @@ impl AtomicBool { /// assert_eq!(true, foo.load(SeqCst)); /// ``` #[inline] + #[stable] pub fn fetch_nand(&self, val: bool, order: Ordering) -> bool { let val = if val { UINT_TRUE } else { 0 }; @@ -266,6 +277,7 @@ impl AtomicBool { /// assert_eq!(false, foo.load(SeqCst)); /// ``` #[inline] + #[stable] pub fn fetch_or(&self, val: bool, order: Ordering) -> bool { let val = if val { UINT_TRUE } else { 0 }; @@ -296,6 +308,7 @@ impl AtomicBool { /// assert_eq!(false, foo.load(SeqCst)); /// ``` #[inline] + #[stable] pub fn fetch_xor(&self, val: bool, order: Ordering) -> bool { let val = if val { UINT_TRUE } else { 0 }; @@ -303,10 +316,10 @@ impl AtomicBool { } } -#[stable] impl AtomicInt { /// Create a new `AtomicInt` #[inline] + #[stable] pub fn new(v: int) -> AtomicInt { AtomicInt {v: UnsafeCell::new(v), nocopy: marker::NoCopy} } @@ -317,6 +330,7 @@ impl AtomicInt { /// /// Fails if `order` is `Release` or `AcqRel`. #[inline] + #[stable] pub fn load(&self, order: Ordering) -> int { unsafe { atomic_load(self.v.get() as *const int, order) } } @@ -327,12 +341,14 @@ impl AtomicInt { /// /// Fails if `order` is `Acquire` or `AcqRel`. #[inline] + #[stable] pub fn store(&self, val: int, order: Ordering) { unsafe { atomic_store(self.v.get(), val, order); } } /// Store a value, returning the old value #[inline] + #[stable] pub fn swap(&self, val: int, order: Ordering) -> int { unsafe { atomic_swap(self.v.get(), val, order) } } @@ -343,6 +359,7 @@ impl AtomicInt { /// replace the current value with `new`. Return the previous value. /// If the return value is equal to `old` then the value was updated. #[inline] + #[stable] pub fn compare_and_swap(&self, old: int, new: int, order: Ordering) -> int { unsafe { atomic_compare_and_swap(self.v.get(), old, new, order) } } @@ -359,6 +376,7 @@ impl AtomicInt { /// assert_eq!(10, foo.load(SeqCst)); /// ``` #[inline] + #[stable] pub fn fetch_add(&self, val: int, order: Ordering) -> int { unsafe { atomic_add(self.v.get(), val, order) } } @@ -375,6 +393,7 @@ impl AtomicInt { /// assert_eq!(-10, foo.load(SeqCst)); /// ``` #[inline] + #[stable] pub fn fetch_sub(&self, val: int, order: Ordering) -> int { unsafe { atomic_sub(self.v.get(), val, order) } } @@ -390,6 +409,7 @@ impl AtomicInt { /// assert_eq!(0b101101, foo.fetch_and(0b110011, SeqCst)); /// assert_eq!(0b100001, foo.load(SeqCst)); #[inline] + #[stable] pub fn fetch_and(&self, val: int, order: Ordering) -> int { unsafe { atomic_and(self.v.get(), val, order) } } @@ -405,6 +425,7 @@ impl AtomicInt { /// assert_eq!(0b101101, foo.fetch_or(0b110011, SeqCst)); /// assert_eq!(0b111111, foo.load(SeqCst)); #[inline] + #[stable] pub fn fetch_or(&self, val: int, order: Ordering) -> int { unsafe { atomic_or(self.v.get(), val, order) } } @@ -420,15 +441,16 @@ impl AtomicInt { /// assert_eq!(0b101101, foo.fetch_xor(0b110011, SeqCst)); /// assert_eq!(0b011110, foo.load(SeqCst)); #[inline] + #[stable] pub fn fetch_xor(&self, val: int, order: Ordering) -> int { unsafe { atomic_xor(self.v.get(), val, order) } } } -#[stable] impl AtomicUint { /// Create a new `AtomicUint` #[inline] + #[stable] pub fn new(v: uint) -> AtomicUint { AtomicUint { v: UnsafeCell::new(v), nocopy: marker::NoCopy } } @@ -439,6 +461,7 @@ impl AtomicUint { /// /// Fails if `order` is `Release` or `AcqRel`. #[inline] + #[stable] pub fn load(&self, order: Ordering) -> uint { unsafe { atomic_load(self.v.get() as *const uint, order) } } @@ -449,12 +472,14 @@ impl AtomicUint { /// /// Fails if `order` is `Acquire` or `AcqRel`. #[inline] + #[stable] pub fn store(&self, val: uint, order: Ordering) { unsafe { atomic_store(self.v.get(), val, order); } } /// Store a value, returning the old value #[inline] + #[stable] pub fn swap(&self, val: uint, order: Ordering) -> uint { unsafe { atomic_swap(self.v.get(), val, order) } } @@ -465,6 +490,7 @@ impl AtomicUint { /// replace the current value with `new`. Return the previous value. /// If the return value is equal to `old` then the value was updated. #[inline] + #[stable] pub fn compare_and_swap(&self, old: uint, new: uint, order: Ordering) -> uint { unsafe { atomic_compare_and_swap(self.v.get(), old, new, order) } } @@ -481,6 +507,7 @@ impl AtomicUint { /// assert_eq!(10, foo.load(SeqCst)); /// ``` #[inline] + #[stable] pub fn fetch_add(&self, val: uint, order: Ordering) -> uint { unsafe { atomic_add(self.v.get(), val, order) } } @@ -497,6 +524,7 @@ impl AtomicUint { /// assert_eq!(0, foo.load(SeqCst)); /// ``` #[inline] + #[stable] pub fn fetch_sub(&self, val: uint, order: Ordering) -> uint { unsafe { atomic_sub(self.v.get(), val, order) } } @@ -512,6 +540,7 @@ impl AtomicUint { /// assert_eq!(0b101101, foo.fetch_and(0b110011, SeqCst)); /// assert_eq!(0b100001, foo.load(SeqCst)); #[inline] + #[stable] pub fn fetch_and(&self, val: uint, order: Ordering) -> uint { unsafe { atomic_and(self.v.get(), val, order) } } @@ -527,6 +556,7 @@ impl AtomicUint { /// assert_eq!(0b101101, foo.fetch_or(0b110011, SeqCst)); /// assert_eq!(0b111111, foo.load(SeqCst)); #[inline] + #[stable] pub fn fetch_or(&self, val: uint, order: Ordering) -> uint { unsafe { atomic_or(self.v.get(), val, order) } } @@ -542,15 +572,16 @@ impl AtomicUint { /// assert_eq!(0b101101, foo.fetch_xor(0b110011, SeqCst)); /// assert_eq!(0b011110, foo.load(SeqCst)); #[inline] + #[stable] pub fn fetch_xor(&self, val: uint, order: Ordering) -> uint { unsafe { atomic_xor(self.v.get(), val, order) } } } -#[stable] impl AtomicPtr { /// Create a new `AtomicPtr` #[inline] + #[stable] pub fn new(p: *mut T) -> AtomicPtr { AtomicPtr { p: UnsafeCell::new(p as uint), nocopy: marker::NoCopy } } @@ -561,6 +592,7 @@ impl AtomicPtr { /// /// Fails if `order` is `Release` or `AcqRel`. #[inline] + #[stable] pub fn load(&self, order: Ordering) -> *mut T { unsafe { atomic_load(self.p.get() as *const *mut T, order) as *mut T @@ -573,12 +605,14 @@ impl AtomicPtr { /// /// Fails if `order` is `Acquire` or `AcqRel`. #[inline] + #[stable] pub fn store(&self, ptr: *mut T, order: Ordering) { unsafe { atomic_store(self.p.get(), ptr as uint, order); } } /// Store a value, returning the old value #[inline] + #[stable] pub fn swap(&self, ptr: *mut T, order: Ordering) -> *mut T { unsafe { atomic_swap(self.p.get(), ptr as uint, order) as *mut T } } @@ -589,6 +623,7 @@ impl AtomicPtr { /// replace the current value with `new`. Return the previous value. /// If the return value is equal to `old` then the value was updated. #[inline] + #[stable] pub fn compare_and_swap(&self, old: *mut T, new: *mut T, order: Ordering) -> *mut T { unsafe { atomic_compare_and_swap(self.p.get(), old as uint, @@ -609,6 +644,7 @@ unsafe fn atomic_store(dst: *mut T, val: T, order:Ordering) { } #[inline] +#[stable] unsafe fn atomic_load(dst: *const T, order:Ordering) -> T { match order { Acquire => intrinsics::atomic_load_acq(dst), @@ -620,6 +656,7 @@ unsafe fn atomic_load(dst: *const T, order:Ordering) -> T { } #[inline] +#[stable] unsafe fn atomic_swap(dst: *mut T, val: T, order: Ordering) -> T { match order { Acquire => intrinsics::atomic_xchg_acq(dst, val), @@ -632,6 +669,7 @@ unsafe fn atomic_swap(dst: *mut T, val: T, order: Ordering) -> T { /// Returns the old value (like __sync_fetch_and_add). #[inline] +#[stable] unsafe fn atomic_add(dst: *mut T, val: T, order: Ordering) -> T { match order { Acquire => intrinsics::atomic_xadd_acq(dst, val), @@ -644,6 +682,7 @@ unsafe fn atomic_add(dst: *mut T, val: T, order: Ordering) -> T { /// Returns the old value (like __sync_fetch_and_sub). #[inline] +#[stable] unsafe fn atomic_sub(dst: *mut T, val: T, order: Ordering) -> T { match order { Acquire => intrinsics::atomic_xsub_acq(dst, val), @@ -655,6 +694,7 @@ unsafe fn atomic_sub(dst: *mut T, val: T, order: Ordering) -> T { } #[inline] +#[stable] unsafe fn atomic_compare_and_swap(dst: *mut T, old:T, new:T, order: Ordering) -> T { match order { Acquire => intrinsics::atomic_cxchg_acq(dst, old, new), @@ -666,6 +706,7 @@ unsafe fn atomic_compare_and_swap(dst: *mut T, old:T, new:T, order: Ordering) } #[inline] +#[stable] unsafe fn atomic_and(dst: *mut T, val: T, order: Ordering) -> T { match order { Acquire => intrinsics::atomic_and_acq(dst, val), @@ -677,6 +718,7 @@ unsafe fn atomic_and(dst: *mut T, val: T, order: Ordering) -> T { } #[inline] +#[stable] unsafe fn atomic_nand(dst: *mut T, val: T, order: Ordering) -> T { match order { Acquire => intrinsics::atomic_nand_acq(dst, val), @@ -689,6 +731,7 @@ unsafe fn atomic_nand(dst: *mut T, val: T, order: Ordering) -> T { #[inline] +#[stable] unsafe fn atomic_or(dst: *mut T, val: T, order: Ordering) -> T { match order { Acquire => intrinsics::atomic_or_acq(dst, val), @@ -701,6 +744,7 @@ unsafe fn atomic_or(dst: *mut T, val: T, order: Ordering) -> T { #[inline] +#[stable] unsafe fn atomic_xor(dst: *mut T, val: T, order: Ordering) -> T { match order { Acquire => intrinsics::atomic_xor_acq(dst, val), diff --git a/src/libcore/cell.rs b/src/libcore/cell.rs index 9d3fa9deed7..dfef079d100 100644 --- a/src/libcore/cell.rs +++ b/src/libcore/cell.rs @@ -168,9 +168,9 @@ pub struct Cell { noshare: marker::NoSync, } -#[stable] impl Cell { /// Creates a new `Cell` containing the given value. + #[stable] pub fn new(value: T) -> Cell { Cell { value: UnsafeCell::new(value), @@ -180,12 +180,14 @@ impl Cell { /// Returns a copy of the contained value. #[inline] + #[stable] pub fn get(&self) -> T { unsafe{ *self.value.get() } } /// Sets the contained value. #[inline] + #[stable] pub fn set(&self, value: T) { unsafe { *self.value.get() = value; diff --git a/src/libcore/option.rs b/src/libcore/option.rs index 2964a6b6853..d41dbb11adb 100644 --- a/src/libcore/option.rs +++ b/src/libcore/option.rs @@ -716,7 +716,6 @@ impl Option { impl AsSlice for Option { /// Convert from `Option` to `&[T]` (without copying) #[inline] - #[stable] fn as_slice<'a>(&'a self) -> &'a [T] { match *self { Some(ref x) => slice::ref_slice(x), @@ -728,6 +727,7 @@ impl AsSlice for Option { } } +#[stable] impl Default for Option { #[inline] fn default() -> Option { None } @@ -772,9 +772,10 @@ impl DoubleEndedIterator for Item { impl ExactSize for Item {} ///////////////////////////////////////////////////////////////////////////// -// Free functions +// FromIterator ///////////////////////////////////////////////////////////////////////////// +#[stable] impl> FromIterator> for Option { /// Takes each element in the `Iterator`: if it is `None`, no further /// elements are taken, and the `None` is returned. Should no `None` occur, a diff --git a/src/libcore/result.rs b/src/libcore/result.rs index c4cb2987241..6c591fbcc38 100644 --- a/src/libcore/result.rs +++ b/src/libcore/result.rs @@ -878,9 +878,10 @@ impl DoubleEndedIterator for Item { impl ExactSize for Item {} ///////////////////////////////////////////////////////////////////////////// -// Free functions +// FromIterator ///////////////////////////////////////////////////////////////////////////// +#[stable] impl> FromIterator> for Result { /// Takes each element in the `Iterator`: if it is an `Err`, no further /// elements are taken, and the `Err` is returned. Should no `Err` occur, a @@ -933,6 +934,10 @@ impl> FromIterator> for Result { } } +///////////////////////////////////////////////////////////////////////////// +// FromIterator +///////////////////////////////////////////////////////////////////////////// + /// Perform a fold operation over the result values from an iterator. /// /// If an `Err` is encountered, it is immediately returned.