libs: fix #[stable] inheritance fallout

A recent change turned off inheritance for the #[stable] by default, but
failed to catch all the cases where this was being used in std. This
patch fixes that problem.
This commit is contained in:
Aaron Turon 2014-11-14 20:39:41 -08:00
parent 1bf0649544
commit 4caffa8526
7 changed files with 62 additions and 15 deletions

View file

@ -179,9 +179,9 @@ pub struct Rc<T> {
_noshare: marker::NoSync
}
#[stable]
impl<T> Rc<T> {
/// Constructs a new reference-counted pointer.
#[stable]
pub fn new(value: T) -> Rc<T> {
unsafe {
Rc {
@ -200,9 +200,7 @@ impl<T> Rc<T> {
}
}
}
}
impl<T> Rc<T> {
/// Downgrades the reference-counted pointer to a weak reference.
#[experimental = "Weak pointers may not belong in this module"]
pub fn downgrade(&self) -> Weak<T> {

View file

@ -645,7 +645,6 @@ impl<T> Vec<T> {
/// assert!(vec.capacity() >= 3);
/// ```
#[stable]
#[unstable = "matches collection reform specification, waiting for dust to settle"]
pub fn shrink_to_fit(&mut self) {
if mem::size_of::<T>() == 0 { return }

View file

@ -88,6 +88,7 @@ use intrinsics::TypeId;
#[stable]
pub trait Any: 'static {
/// Get the `TypeId` of `self`
#[stable]
fn get_type_id(&self) -> TypeId;
}
@ -117,7 +118,6 @@ pub trait AnyRefExt<'a> {
#[stable]
impl<'a> AnyRefExt<'a> for &'a Any {
#[inline]
#[stable]
fn is<T: 'static>(self) -> bool {
// Get TypeId of the type this function is instantiated with
let t = TypeId::of::<T>();
@ -130,7 +130,6 @@ impl<'a> AnyRefExt<'a> for &'a Any {
}
#[inline]
#[unstable = "naming conventions around acquiring references may change"]
fn downcast_ref<T: 'static>(self) -> Option<&'a T> {
if self.is::<T>() {
unsafe {
@ -159,7 +158,6 @@ pub trait AnyMutRefExt<'a> {
#[stable]
impl<'a> AnyMutRefExt<'a> for &'a mut Any {
#[inline]
#[unstable = "naming conventions around acquiring references may change"]
fn downcast_mut<T: 'static>(self) -> Option<&'a mut T> {
if self.is::<T>() {
unsafe {

View file

@ -58,20 +58,25 @@ pub struct AtomicPtr<T> {
#[stable]
pub enum Ordering {
/// No ordering constraints, only atomic operations
#[stable]
Relaxed,
/// When coupled with a store, all previous writes become visible
/// to another thread that performs a load with `Acquire` ordering
/// on the same value
#[stable]
Release,
/// When coupled with a load, all subsequent loads will see data
/// written before a store with `Release` ordering on the same value
/// in another thread
#[stable]
Acquire,
/// When coupled with a load, uses `Acquire` ordering, and with a store
/// `Release` ordering
#[stable]
AcqRel,
/// Like `AcqRel` with the additional guarantee that all threads see all
/// sequentially consistent operations in the same order.
#[stable]
SeqCst
}
@ -91,10 +96,10 @@ pub const INIT_ATOMIC_UINT: AtomicUint =
// NB: Needs to be -1 (0b11111111...) to make fetch_nand work correctly
const UINT_TRUE: uint = -1;
#[stable]
impl AtomicBool {
/// Create a new `AtomicBool`
#[inline]
#[stable]
pub fn new(v: bool) -> AtomicBool {
let val = if v { UINT_TRUE } else { 0 };
AtomicBool { v: UnsafeCell::new(val), nocopy: marker::NoCopy }
@ -106,6 +111,7 @@ impl AtomicBool {
///
/// Fails if `order` is `Release` or `AcqRel`.
#[inline]
#[stable]
pub fn load(&self, order: Ordering) -> bool {
unsafe { atomic_load(self.v.get() as *const uint, order) > 0 }
}
@ -116,6 +122,7 @@ impl AtomicBool {
///
/// Fails if `order` is `Acquire` or `AcqRel`.
#[inline]
#[stable]
pub fn store(&self, val: bool, order: Ordering) {
let val = if val { UINT_TRUE } else { 0 };
@ -124,6 +131,7 @@ impl AtomicBool {
/// Store a value, returning the old value
#[inline]
#[stable]
pub fn swap(&self, val: bool, order: Ordering) -> bool {
let val = if val { UINT_TRUE } else { 0 };
@ -174,6 +182,7 @@ impl AtomicBool {
/// }
/// ```
#[inline]
#[stable]
pub fn compare_and_swap(&self, old: bool, new: bool, order: Ordering) -> bool {
let old = if old { UINT_TRUE } else { 0 };
let new = if new { UINT_TRUE } else { 0 };
@ -205,6 +214,7 @@ impl AtomicBool {
/// assert_eq!(false, foo.load(SeqCst));
/// ```
#[inline]
#[stable]
pub fn fetch_and(&self, val: bool, order: Ordering) -> bool {
let val = if val { UINT_TRUE } else { 0 };
@ -236,6 +246,7 @@ impl AtomicBool {
/// assert_eq!(true, foo.load(SeqCst));
/// ```
#[inline]
#[stable]
pub fn fetch_nand(&self, val: bool, order: Ordering) -> bool {
let val = if val { UINT_TRUE } else { 0 };
@ -266,6 +277,7 @@ impl AtomicBool {
/// assert_eq!(false, foo.load(SeqCst));
/// ```
#[inline]
#[stable]
pub fn fetch_or(&self, val: bool, order: Ordering) -> bool {
let val = if val { UINT_TRUE } else { 0 };
@ -296,6 +308,7 @@ impl AtomicBool {
/// assert_eq!(false, foo.load(SeqCst));
/// ```
#[inline]
#[stable]
pub fn fetch_xor(&self, val: bool, order: Ordering) -> bool {
let val = if val { UINT_TRUE } else { 0 };
@ -303,10 +316,10 @@ impl AtomicBool {
}
}
#[stable]
impl AtomicInt {
/// Create a new `AtomicInt`
#[inline]
#[stable]
pub fn new(v: int) -> AtomicInt {
AtomicInt {v: UnsafeCell::new(v), nocopy: marker::NoCopy}
}
@ -317,6 +330,7 @@ impl AtomicInt {
///
/// Fails if `order` is `Release` or `AcqRel`.
#[inline]
#[stable]
pub fn load(&self, order: Ordering) -> int {
unsafe { atomic_load(self.v.get() as *const int, order) }
}
@ -327,12 +341,14 @@ impl AtomicInt {
///
/// Fails if `order` is `Acquire` or `AcqRel`.
#[inline]
#[stable]
pub fn store(&self, val: int, order: Ordering) {
unsafe { atomic_store(self.v.get(), val, order); }
}
/// Store a value, returning the old value
#[inline]
#[stable]
pub fn swap(&self, val: int, order: Ordering) -> int {
unsafe { atomic_swap(self.v.get(), val, order) }
}
@ -343,6 +359,7 @@ impl AtomicInt {
/// replace the current value with `new`. Return the previous value.
/// If the return value is equal to `old` then the value was updated.
#[inline]
#[stable]
pub fn compare_and_swap(&self, old: int, new: int, order: Ordering) -> int {
unsafe { atomic_compare_and_swap(self.v.get(), old, new, order) }
}
@ -359,6 +376,7 @@ impl AtomicInt {
/// assert_eq!(10, foo.load(SeqCst));
/// ```
#[inline]
#[stable]
pub fn fetch_add(&self, val: int, order: Ordering) -> int {
unsafe { atomic_add(self.v.get(), val, order) }
}
@ -375,6 +393,7 @@ impl AtomicInt {
/// assert_eq!(-10, foo.load(SeqCst));
/// ```
#[inline]
#[stable]
pub fn fetch_sub(&self, val: int, order: Ordering) -> int {
unsafe { atomic_sub(self.v.get(), val, order) }
}
@ -390,6 +409,7 @@ impl AtomicInt {
/// assert_eq!(0b101101, foo.fetch_and(0b110011, SeqCst));
/// assert_eq!(0b100001, foo.load(SeqCst));
#[inline]
#[stable]
pub fn fetch_and(&self, val: int, order: Ordering) -> int {
unsafe { atomic_and(self.v.get(), val, order) }
}
@ -405,6 +425,7 @@ impl AtomicInt {
/// assert_eq!(0b101101, foo.fetch_or(0b110011, SeqCst));
/// assert_eq!(0b111111, foo.load(SeqCst));
#[inline]
#[stable]
pub fn fetch_or(&self, val: int, order: Ordering) -> int {
unsafe { atomic_or(self.v.get(), val, order) }
}
@ -420,15 +441,16 @@ impl AtomicInt {
/// assert_eq!(0b101101, foo.fetch_xor(0b110011, SeqCst));
/// assert_eq!(0b011110, foo.load(SeqCst));
#[inline]
#[stable]
pub fn fetch_xor(&self, val: int, order: Ordering) -> int {
unsafe { atomic_xor(self.v.get(), val, order) }
}
}
#[stable]
impl AtomicUint {
/// Create a new `AtomicUint`
#[inline]
#[stable]
pub fn new(v: uint) -> AtomicUint {
AtomicUint { v: UnsafeCell::new(v), nocopy: marker::NoCopy }
}
@ -439,6 +461,7 @@ impl AtomicUint {
///
/// Fails if `order` is `Release` or `AcqRel`.
#[inline]
#[stable]
pub fn load(&self, order: Ordering) -> uint {
unsafe { atomic_load(self.v.get() as *const uint, order) }
}
@ -449,12 +472,14 @@ impl AtomicUint {
///
/// Fails if `order` is `Acquire` or `AcqRel`.
#[inline]
#[stable]
pub fn store(&self, val: uint, order: Ordering) {
unsafe { atomic_store(self.v.get(), val, order); }
}
/// Store a value, returning the old value
#[inline]
#[stable]
pub fn swap(&self, val: uint, order: Ordering) -> uint {
unsafe { atomic_swap(self.v.get(), val, order) }
}
@ -465,6 +490,7 @@ impl AtomicUint {
/// replace the current value with `new`. Return the previous value.
/// If the return value is equal to `old` then the value was updated.
#[inline]
#[stable]
pub fn compare_and_swap(&self, old: uint, new: uint, order: Ordering) -> uint {
unsafe { atomic_compare_and_swap(self.v.get(), old, new, order) }
}
@ -481,6 +507,7 @@ impl AtomicUint {
/// assert_eq!(10, foo.load(SeqCst));
/// ```
#[inline]
#[stable]
pub fn fetch_add(&self, val: uint, order: Ordering) -> uint {
unsafe { atomic_add(self.v.get(), val, order) }
}
@ -497,6 +524,7 @@ impl AtomicUint {
/// assert_eq!(0, foo.load(SeqCst));
/// ```
#[inline]
#[stable]
pub fn fetch_sub(&self, val: uint, order: Ordering) -> uint {
unsafe { atomic_sub(self.v.get(), val, order) }
}
@ -512,6 +540,7 @@ impl AtomicUint {
/// assert_eq!(0b101101, foo.fetch_and(0b110011, SeqCst));
/// assert_eq!(0b100001, foo.load(SeqCst));
#[inline]
#[stable]
pub fn fetch_and(&self, val: uint, order: Ordering) -> uint {
unsafe { atomic_and(self.v.get(), val, order) }
}
@ -527,6 +556,7 @@ impl AtomicUint {
/// assert_eq!(0b101101, foo.fetch_or(0b110011, SeqCst));
/// assert_eq!(0b111111, foo.load(SeqCst));
#[inline]
#[stable]
pub fn fetch_or(&self, val: uint, order: Ordering) -> uint {
unsafe { atomic_or(self.v.get(), val, order) }
}
@ -542,15 +572,16 @@ impl AtomicUint {
/// assert_eq!(0b101101, foo.fetch_xor(0b110011, SeqCst));
/// assert_eq!(0b011110, foo.load(SeqCst));
#[inline]
#[stable]
pub fn fetch_xor(&self, val: uint, order: Ordering) -> uint {
unsafe { atomic_xor(self.v.get(), val, order) }
}
}
#[stable]
impl<T> AtomicPtr<T> {
/// Create a new `AtomicPtr`
#[inline]
#[stable]
pub fn new(p: *mut T) -> AtomicPtr<T> {
AtomicPtr { p: UnsafeCell::new(p as uint), nocopy: marker::NoCopy }
}
@ -561,6 +592,7 @@ impl<T> AtomicPtr<T> {
///
/// Fails if `order` is `Release` or `AcqRel`.
#[inline]
#[stable]
pub fn load(&self, order: Ordering) -> *mut T {
unsafe {
atomic_load(self.p.get() as *const *mut T, order) as *mut T
@ -573,12 +605,14 @@ impl<T> AtomicPtr<T> {
///
/// Fails if `order` is `Acquire` or `AcqRel`.
#[inline]
#[stable]
pub fn store(&self, ptr: *mut T, order: Ordering) {
unsafe { atomic_store(self.p.get(), ptr as uint, order); }
}
/// Store a value, returning the old value
#[inline]
#[stable]
pub fn swap(&self, ptr: *mut T, order: Ordering) -> *mut T {
unsafe { atomic_swap(self.p.get(), ptr as uint, order) as *mut T }
}
@ -589,6 +623,7 @@ impl<T> AtomicPtr<T> {
/// replace the current value with `new`. Return the previous value.
/// If the return value is equal to `old` then the value was updated.
#[inline]
#[stable]
pub fn compare_and_swap(&self, old: *mut T, new: *mut T, order: Ordering) -> *mut T {
unsafe {
atomic_compare_and_swap(self.p.get(), old as uint,
@ -609,6 +644,7 @@ unsafe fn atomic_store<T>(dst: *mut T, val: T, order:Ordering) {
}
#[inline]
#[stable]
unsafe fn atomic_load<T>(dst: *const T, order:Ordering) -> T {
match order {
Acquire => intrinsics::atomic_load_acq(dst),
@ -620,6 +656,7 @@ unsafe fn atomic_load<T>(dst: *const T, order:Ordering) -> T {
}
#[inline]
#[stable]
unsafe fn atomic_swap<T>(dst: *mut T, val: T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_xchg_acq(dst, val),
@ -632,6 +669,7 @@ unsafe fn atomic_swap<T>(dst: *mut T, val: T, order: Ordering) -> T {
/// Returns the old value (like __sync_fetch_and_add).
#[inline]
#[stable]
unsafe fn atomic_add<T>(dst: *mut T, val: T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_xadd_acq(dst, val),
@ -644,6 +682,7 @@ unsafe fn atomic_add<T>(dst: *mut T, val: T, order: Ordering) -> T {
/// Returns the old value (like __sync_fetch_and_sub).
#[inline]
#[stable]
unsafe fn atomic_sub<T>(dst: *mut T, val: T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_xsub_acq(dst, val),
@ -655,6 +694,7 @@ unsafe fn atomic_sub<T>(dst: *mut T, val: T, order: Ordering) -> T {
}
#[inline]
#[stable]
unsafe fn atomic_compare_and_swap<T>(dst: *mut T, old:T, new:T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_cxchg_acq(dst, old, new),
@ -666,6 +706,7 @@ unsafe fn atomic_compare_and_swap<T>(dst: *mut T, old:T, new:T, order: Ordering)
}
#[inline]
#[stable]
unsafe fn atomic_and<T>(dst: *mut T, val: T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_and_acq(dst, val),
@ -677,6 +718,7 @@ unsafe fn atomic_and<T>(dst: *mut T, val: T, order: Ordering) -> T {
}
#[inline]
#[stable]
unsafe fn atomic_nand<T>(dst: *mut T, val: T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_nand_acq(dst, val),
@ -689,6 +731,7 @@ unsafe fn atomic_nand<T>(dst: *mut T, val: T, order: Ordering) -> T {
#[inline]
#[stable]
unsafe fn atomic_or<T>(dst: *mut T, val: T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_or_acq(dst, val),
@ -701,6 +744,7 @@ unsafe fn atomic_or<T>(dst: *mut T, val: T, order: Ordering) -> T {
#[inline]
#[stable]
unsafe fn atomic_xor<T>(dst: *mut T, val: T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_xor_acq(dst, val),

View file

@ -168,9 +168,9 @@ pub struct Cell<T> {
noshare: marker::NoSync,
}
#[stable]
impl<T:Copy> Cell<T> {
/// Creates a new `Cell` containing the given value.
#[stable]
pub fn new(value: T) -> Cell<T> {
Cell {
value: UnsafeCell::new(value),
@ -180,12 +180,14 @@ impl<T:Copy> Cell<T> {
/// Returns a copy of the contained value.
#[inline]
#[stable]
pub fn get(&self) -> T {
unsafe{ *self.value.get() }
}
/// Sets the contained value.
#[inline]
#[stable]
pub fn set(&self, value: T) {
unsafe {
*self.value.get() = value;

View file

@ -716,7 +716,6 @@ impl<T: Default> Option<T> {
impl<T> AsSlice<T> for Option<T> {
/// Convert from `Option<T>` to `&[T]` (without copying)
#[inline]
#[stable]
fn as_slice<'a>(&'a self) -> &'a [T] {
match *self {
Some(ref x) => slice::ref_slice(x),
@ -728,6 +727,7 @@ impl<T> AsSlice<T> for Option<T> {
}
}
#[stable]
impl<T> Default for Option<T> {
#[inline]
fn default() -> Option<T> { None }
@ -772,9 +772,10 @@ impl<A> DoubleEndedIterator<A> for Item<A> {
impl<A> ExactSize<A> for Item<A> {}
/////////////////////////////////////////////////////////////////////////////
// Free functions
// FromIterator
/////////////////////////////////////////////////////////////////////////////
#[stable]
impl<A, V: FromIterator<A>> FromIterator<Option<A>> for Option<V> {
/// Takes each element in the `Iterator`: if it is `None`, no further
/// elements are taken, and the `None` is returned. Should no `None` occur, a

View file

@ -878,9 +878,10 @@ impl<A> DoubleEndedIterator<A> for Item<A> {
impl<A> ExactSize<A> for Item<A> {}
/////////////////////////////////////////////////////////////////////////////
// Free functions
// FromIterator
/////////////////////////////////////////////////////////////////////////////
#[stable]
impl<A, E, V: FromIterator<A>> FromIterator<Result<A, E>> for Result<V, E> {
/// Takes each element in the `Iterator`: if it is an `Err`, no further
/// elements are taken, and the `Err` is returned. Should no `Err` occur, a
@ -933,6 +934,10 @@ impl<A, E, V: FromIterator<A>> FromIterator<Result<A, E>> for Result<V, E> {
}
}
/////////////////////////////////////////////////////////////////////////////
// FromIterator
/////////////////////////////////////////////////////////////////////////////
/// Perform a fold operation over the result values from an iterator.
///
/// If an `Err` is encountered, it is immediately returned.