Rollup merge of #66564 - foeb:66219-document-unsafe-sync-cell-str, r=Amanieu
Document unsafe blocks in core::{cell, str, sync} Split from #66506 (issue #66219). Hopefully doing a chunk at a time is more manageable! r? @RalfJung
This commit is contained in:
commit
d3589aa4ed
5 changed files with 112 additions and 35 deletions
|
@ -187,8 +187,6 @@
|
|||
//! ```
|
||||
//!
|
||||
|
||||
// ignore-tidy-undocumented-unsafe
|
||||
|
||||
#![stable(feature = "rust1", since = "1.0.0")]
|
||||
|
||||
use crate::cmp::Ordering;
|
||||
|
@ -368,6 +366,10 @@ impl<T> Cell<T> {
|
|||
if ptr::eq(self, other) {
|
||||
return;
|
||||
}
|
||||
// SAFETY: This can be risky if called from separate threads, but `Cell`
|
||||
// is `!Sync` so this won't happen. This also won't invalidate any
|
||||
// pointers since `Cell` makes sure nothing else will be pointing into
|
||||
// either of these `Cell`s.
|
||||
unsafe {
|
||||
ptr::swap(self.value.get(), other.value.get());
|
||||
}
|
||||
|
@ -387,6 +389,8 @@ impl<T> Cell<T> {
|
|||
/// ```
|
||||
#[stable(feature = "move_cell", since = "1.17.0")]
|
||||
pub fn replace(&self, val: T) -> T {
|
||||
// SAFETY: This can cause data races if called from a separate thread,
|
||||
// but `Cell` is `!Sync` so this won't happen.
|
||||
mem::replace(unsafe { &mut *self.value.get() }, val)
|
||||
}
|
||||
|
||||
|
@ -423,6 +427,8 @@ impl<T: Copy> Cell<T> {
|
|||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub fn get(&self) -> T {
|
||||
// SAFETY: This can cause data races if called from a separate thread,
|
||||
// but `Cell` is `!Sync` so this won't happen.
|
||||
unsafe { *self.value.get() }
|
||||
}
|
||||
|
||||
|
@ -491,6 +497,9 @@ impl<T: ?Sized> Cell<T> {
|
|||
#[inline]
|
||||
#[stable(feature = "cell_get_mut", since = "1.11.0")]
|
||||
pub fn get_mut(&mut self) -> &mut T {
|
||||
// SAFETY: This can cause data races if called from a separate thread,
|
||||
// but `Cell` is `!Sync` so this won't happen, and `&mut` guarantees
|
||||
// unique access.
|
||||
unsafe { &mut *self.value.get() }
|
||||
}
|
||||
|
||||
|
@ -510,6 +519,7 @@ impl<T: ?Sized> Cell<T> {
|
|||
#[inline]
|
||||
#[stable(feature = "as_cell", since = "1.37.0")]
|
||||
pub fn from_mut(t: &mut T) -> &Cell<T> {
|
||||
// SAFETY: `&mut` ensures unique access.
|
||||
unsafe { &*(t as *mut T as *const Cell<T>) }
|
||||
}
|
||||
}
|
||||
|
@ -553,6 +563,7 @@ impl<T> Cell<[T]> {
|
|||
/// ```
|
||||
#[stable(feature = "as_cell", since = "1.37.0")]
|
||||
pub fn as_slice_of_cells(&self) -> &[Cell<T>] {
|
||||
// SAFETY: `Cell<T>` has the same memory layout as `T`.
|
||||
unsafe { &*(self as *const Cell<[T]> as *const [Cell<T>]) }
|
||||
}
|
||||
}
|
||||
|
@ -816,6 +827,8 @@ impl<T: ?Sized> RefCell<T> {
|
|||
#[inline]
|
||||
pub fn try_borrow(&self) -> Result<Ref<'_, T>, BorrowError> {
|
||||
match BorrowRef::new(&self.borrow) {
|
||||
// SAFETY: `BorrowRef` ensures that there is only immutable access
|
||||
// to the value while borrowed.
|
||||
Some(b) => Ok(Ref { value: unsafe { &*self.value.get() }, borrow: b }),
|
||||
None => Err(BorrowError { _private: () }),
|
||||
}
|
||||
|
@ -891,6 +904,7 @@ impl<T: ?Sized> RefCell<T> {
|
|||
#[inline]
|
||||
pub fn try_borrow_mut(&self) -> Result<RefMut<'_, T>, BorrowMutError> {
|
||||
match BorrowRefMut::new(&self.borrow) {
|
||||
// SAFETY: `BorrowRef` guarantees unique access.
|
||||
Some(b) => Ok(RefMut { value: unsafe { &mut *self.value.get() }, borrow: b }),
|
||||
None => Err(BorrowMutError { _private: () }),
|
||||
}
|
||||
|
@ -940,6 +954,7 @@ impl<T: ?Sized> RefCell<T> {
|
|||
#[inline]
|
||||
#[stable(feature = "cell_get_mut", since = "1.11.0")]
|
||||
pub fn get_mut(&mut self) -> &mut T {
|
||||
// SAFETY: `&mut` guarantees unique access.
|
||||
unsafe { &mut *self.value.get() }
|
||||
}
|
||||
|
||||
|
|
|
@ -3,8 +3,6 @@ use crate::fmt::{self, Write};
|
|||
use crate::mem;
|
||||
use crate::str as core_str;
|
||||
|
||||
// ignore-tidy-undocumented-unsafe
|
||||
|
||||
/// Lossy UTF-8 string.
|
||||
#[unstable(feature = "str_internals", issue = "none")]
|
||||
pub struct Utf8Lossy {
|
||||
|
@ -17,6 +15,7 @@ impl Utf8Lossy {
|
|||
}
|
||||
|
||||
pub fn from_bytes(bytes: &[u8]) -> &Utf8Lossy {
|
||||
// SAFETY: Both use the same memory layout, and UTF-8 correctness isn't required.
|
||||
unsafe { mem::transmute(bytes) }
|
||||
}
|
||||
|
||||
|
@ -60,6 +59,8 @@ impl<'a> Iterator for Utf8LossyChunksIter<'a> {
|
|||
while i < self.source.len() {
|
||||
let i_ = i;
|
||||
|
||||
// SAFETY: `i` starts at `0`, is less than `self.source.len()`, and
|
||||
// only increases, so `0 <= i < self.source.len()`.
|
||||
let byte = unsafe { *self.source.get_unchecked(i) };
|
||||
i += 1;
|
||||
|
||||
|
@ -69,6 +70,7 @@ impl<'a> Iterator for Utf8LossyChunksIter<'a> {
|
|||
|
||||
macro_rules! error {
|
||||
() => {{
|
||||
// SAFETY: We have checked up to `i` that source is valid UTF-8.
|
||||
unsafe {
|
||||
let r = Utf8LossyChunk {
|
||||
valid: core_str::from_utf8_unchecked(&self.source[0..i_]),
|
||||
|
@ -130,6 +132,7 @@ impl<'a> Iterator for Utf8LossyChunksIter<'a> {
|
|||
}
|
||||
|
||||
let r = Utf8LossyChunk {
|
||||
// SAFETY: We have checked that the entire source is valid UTF-8.
|
||||
valid: unsafe { core_str::from_utf8_unchecked(self.source) },
|
||||
broken: &[],
|
||||
};
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
// ignore-tidy-filelength
|
||||
// ignore-tidy-undocumented-unsafe
|
||||
|
||||
//! String manipulation.
|
||||
//!
|
||||
|
@ -341,6 +340,7 @@ impl Utf8Error {
|
|||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub fn from_utf8(v: &[u8]) -> Result<&str, Utf8Error> {
|
||||
run_utf8_validation(v)?;
|
||||
// SAFETY: Just ran validation.
|
||||
Ok(unsafe { from_utf8_unchecked(v) })
|
||||
}
|
||||
|
||||
|
@ -379,6 +379,7 @@ pub fn from_utf8(v: &[u8]) -> Result<&str, Utf8Error> {
|
|||
#[stable(feature = "str_mut_extras", since = "1.20.0")]
|
||||
pub fn from_utf8_mut(v: &mut [u8]) -> Result<&mut str, Utf8Error> {
|
||||
run_utf8_validation(v)?;
|
||||
// SAFETY: Just ran validation.
|
||||
Ok(unsafe { from_utf8_unchecked_mut(v) })
|
||||
}
|
||||
|
||||
|
@ -581,7 +582,7 @@ impl<'a> Iterator for Chars<'a> {
|
|||
#[inline]
|
||||
fn next(&mut self) -> Option<char> {
|
||||
next_code_point(&mut self.iter).map(|ch| {
|
||||
// str invariant says `ch` is a valid Unicode Scalar Value
|
||||
// SAFETY: `str` invariant says `ch` is a valid Unicode Scalar Value.
|
||||
unsafe { char::from_u32_unchecked(ch) }
|
||||
})
|
||||
}
|
||||
|
@ -628,7 +629,7 @@ impl<'a> DoubleEndedIterator for Chars<'a> {
|
|||
#[inline]
|
||||
fn next_back(&mut self) -> Option<char> {
|
||||
next_code_point_reverse(&mut self.iter).map(|ch| {
|
||||
// str invariant says `ch` is a valid Unicode Scalar Value
|
||||
// SAFETY: `str` invariant says `ch` is a valid Unicode Scalar Value.
|
||||
unsafe { char::from_u32_unchecked(ch) }
|
||||
})
|
||||
}
|
||||
|
@ -658,6 +659,7 @@ impl<'a> Chars<'a> {
|
|||
#[stable(feature = "iter_to_slice", since = "1.4.0")]
|
||||
#[inline]
|
||||
pub fn as_str(&self) -> &'a str {
|
||||
// SAFETY: `Chars` is only made from a str, which guarantees the iter is valid UTF-8.
|
||||
unsafe { from_utf8_unchecked(self.iter.as_slice()) }
|
||||
}
|
||||
}
|
||||
|
@ -1102,6 +1104,7 @@ impl<'a, P: Pattern<'a>> SplitInternal<'a, P> {
|
|||
fn get_end(&mut self) -> Option<&'a str> {
|
||||
if !self.finished && (self.allow_trailing_empty || self.end - self.start > 0) {
|
||||
self.finished = true;
|
||||
// SAFETY: `self.start` and `self.end` always lie on unicode boundaries.
|
||||
unsafe {
|
||||
let string = self.matcher.haystack().get_unchecked(self.start..self.end);
|
||||
Some(string)
|
||||
|
@ -1119,6 +1122,7 @@ impl<'a, P: Pattern<'a>> SplitInternal<'a, P> {
|
|||
|
||||
let haystack = self.matcher.haystack();
|
||||
match self.matcher.next_match() {
|
||||
// SAFETY: `Searcher` guarantees that `a` and `b` lie on unicode boundaries.
|
||||
Some((a, b)) => unsafe {
|
||||
let elt = haystack.get_unchecked(self.start..a);
|
||||
self.start = b;
|
||||
|
@ -1151,11 +1155,13 @@ impl<'a, P: Pattern<'a>> SplitInternal<'a, P> {
|
|||
|
||||
let haystack = self.matcher.haystack();
|
||||
match self.matcher.next_match_back() {
|
||||
// SAFETY: `Searcher` guarantees that `a` and `b` lie on unicode boundaries.
|
||||
Some((a, b)) => unsafe {
|
||||
let elt = haystack.get_unchecked(b..self.end);
|
||||
self.end = a;
|
||||
Some(elt)
|
||||
},
|
||||
// SAFETY: `self.start` and `self.end` always lie on unicode boundaries.
|
||||
None => unsafe {
|
||||
self.finished = true;
|
||||
Some(haystack.get_unchecked(self.start..self.end))
|
||||
|
@ -1297,6 +1303,7 @@ impl<'a, P: Pattern<'a>> MatchIndicesInternal<'a, P> {
|
|||
fn next(&mut self) -> Option<(usize, &'a str)> {
|
||||
self.0
|
||||
.next_match()
|
||||
// SAFETY: `Searcher` guarantees that `start` and `end` lie on unicode boundaries.
|
||||
.map(|(start, end)| unsafe { (start, self.0.haystack().get_unchecked(start..end)) })
|
||||
}
|
||||
|
||||
|
@ -1307,6 +1314,7 @@ impl<'a, P: Pattern<'a>> MatchIndicesInternal<'a, P> {
|
|||
{
|
||||
self.0
|
||||
.next_match_back()
|
||||
// SAFETY: `Searcher` guarantees that `start` and `end` lie on unicode boundaries.
|
||||
.map(|(start, end)| unsafe { (start, self.0.haystack().get_unchecked(start..end)) })
|
||||
}
|
||||
}
|
||||
|
@ -1348,6 +1356,7 @@ where
|
|||
impl<'a, P: Pattern<'a>> MatchesInternal<'a, P> {
|
||||
#[inline]
|
||||
fn next(&mut self) -> Option<&'a str> {
|
||||
// SAFETY: `Searcher` guarantees that `start` and `end` lie on unicode boundaries.
|
||||
self.0.next_match().map(|(a, b)| unsafe {
|
||||
// Indices are known to be on utf8 boundaries
|
||||
self.0.haystack().get_unchecked(a..b)
|
||||
|
@ -1359,6 +1368,7 @@ impl<'a, P: Pattern<'a>> MatchesInternal<'a, P> {
|
|||
where
|
||||
P::Searcher: ReverseSearcher<'a>,
|
||||
{
|
||||
// SAFETY: `Searcher` guarantees that `start` and `end` lie on unicode boundaries.
|
||||
self.0.next_match_back().map(|(a, b)| unsafe {
|
||||
// Indices are known to be on utf8 boundaries
|
||||
self.0.haystack().get_unchecked(a..b)
|
||||
|
@ -1579,6 +1589,10 @@ fn run_utf8_validation(v: &[u8]) -> Result<(), Utf8Error> {
|
|||
if align != usize::max_value() && align.wrapping_sub(index) % usize_bytes == 0 {
|
||||
let ptr = v.as_ptr();
|
||||
while index < blocks_end {
|
||||
// SAFETY: since `align - index` and `ascii_block_size` are
|
||||
// multiples of `usize_bytes`, `block = ptr.add(index)` is
|
||||
// always aligned with a `usize` so it's safe to dereference
|
||||
// both `block` and `block.offset(1)`.
|
||||
unsafe {
|
||||
let block = ptr.add(index) as *const usize;
|
||||
// break if there is a nonascii byte
|
||||
|
@ -1804,6 +1818,7 @@ mod traits {
|
|||
&& slice.is_char_boundary(self.start)
|
||||
&& slice.is_char_boundary(self.end)
|
||||
{
|
||||
// SAFETY: just checked that `start` and `end` are on a char boundary.
|
||||
Some(unsafe { self.get_unchecked(slice) })
|
||||
} else {
|
||||
None
|
||||
|
@ -1815,6 +1830,7 @@ mod traits {
|
|||
&& slice.is_char_boundary(self.start)
|
||||
&& slice.is_char_boundary(self.end)
|
||||
{
|
||||
// SAFETY: just checked that `start` and `end` are on a char boundary.
|
||||
Some(unsafe { self.get_unchecked_mut(slice) })
|
||||
} else {
|
||||
None
|
||||
|
@ -1845,6 +1861,7 @@ mod traits {
|
|||
&& slice.is_char_boundary(self.start)
|
||||
&& slice.is_char_boundary(self.end)
|
||||
{
|
||||
// SAFETY: just checked that `start` and `end` are on a char boundary.
|
||||
unsafe { self.get_unchecked_mut(slice) }
|
||||
} else {
|
||||
super::slice_error_fail(slice, self.start, self.end)
|
||||
|
@ -1873,6 +1890,7 @@ mod traits {
|
|||
#[inline]
|
||||
fn get(self, slice: &str) -> Option<&Self::Output> {
|
||||
if slice.is_char_boundary(self.end) {
|
||||
// SAFETY: just checked that `end` is on a char boundary.
|
||||
Some(unsafe { self.get_unchecked(slice) })
|
||||
} else {
|
||||
None
|
||||
|
@ -1881,6 +1899,7 @@ mod traits {
|
|||
#[inline]
|
||||
fn get_mut(self, slice: &mut str) -> Option<&mut Self::Output> {
|
||||
if slice.is_char_boundary(self.end) {
|
||||
// SAFETY: just checked that `end` is on a char boundary.
|
||||
Some(unsafe { self.get_unchecked_mut(slice) })
|
||||
} else {
|
||||
None
|
||||
|
@ -1903,8 +1922,8 @@ mod traits {
|
|||
}
|
||||
#[inline]
|
||||
fn index_mut(self, slice: &mut str) -> &mut Self::Output {
|
||||
// is_char_boundary checks that the index is in [0, .len()]
|
||||
if slice.is_char_boundary(self.end) {
|
||||
// SAFETY: just checked that `end` is on a char boundary.
|
||||
unsafe { self.get_unchecked_mut(slice) }
|
||||
} else {
|
||||
super::slice_error_fail(slice, 0, self.end)
|
||||
|
@ -1934,6 +1953,7 @@ mod traits {
|
|||
#[inline]
|
||||
fn get(self, slice: &str) -> Option<&Self::Output> {
|
||||
if slice.is_char_boundary(self.start) {
|
||||
// SAFETY: just checked that `start` is on a char boundary.
|
||||
Some(unsafe { self.get_unchecked(slice) })
|
||||
} else {
|
||||
None
|
||||
|
@ -1942,6 +1962,7 @@ mod traits {
|
|||
#[inline]
|
||||
fn get_mut(self, slice: &mut str) -> Option<&mut Self::Output> {
|
||||
if slice.is_char_boundary(self.start) {
|
||||
// SAFETY: just checked that `start` is on a char boundary.
|
||||
Some(unsafe { self.get_unchecked_mut(slice) })
|
||||
} else {
|
||||
None
|
||||
|
@ -1966,8 +1987,8 @@ mod traits {
|
|||
}
|
||||
#[inline]
|
||||
fn index_mut(self, slice: &mut str) -> &mut Self::Output {
|
||||
// is_char_boundary checks that the index is in [0, .len()]
|
||||
if slice.is_char_boundary(self.start) {
|
||||
// SAFETY: just checked that `start` is on a char boundary.
|
||||
unsafe { self.get_unchecked_mut(slice) }
|
||||
} else {
|
||||
super::slice_error_fail(slice, self.start, slice.len())
|
||||
|
@ -2238,7 +2259,6 @@ impl str {
|
|||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[rustc_const_stable(feature = "str_as_bytes", since = "1.32.0")]
|
||||
#[inline(always)]
|
||||
// SAFETY: const sound because we transmute two types with the same layout
|
||||
#[allow(unused_attributes)]
|
||||
#[allow_internal_unstable(const_fn_union)]
|
||||
pub const fn as_bytes(&self) -> &[u8] {
|
||||
|
@ -2247,6 +2267,7 @@ impl str {
|
|||
str: &'a str,
|
||||
slice: &'a [u8],
|
||||
}
|
||||
// SAFETY: const sound because we transmute two types with the same layout
|
||||
unsafe { Slices { str: self }.slice }
|
||||
}
|
||||
|
||||
|
@ -2573,6 +2594,7 @@ impl str {
|
|||
pub fn split_at(&self, mid: usize) -> (&str, &str) {
|
||||
// is_char_boundary checks that the index is in [0, .len()]
|
||||
if self.is_char_boundary(mid) {
|
||||
// SAFETY: just checked that `mid` is on a char boundary.
|
||||
unsafe { (self.get_unchecked(0..mid), self.get_unchecked(mid..self.len())) }
|
||||
} else {
|
||||
slice_error_fail(self, 0, mid)
|
||||
|
@ -2617,6 +2639,7 @@ impl str {
|
|||
if self.is_char_boundary(mid) {
|
||||
let len = self.len();
|
||||
let ptr = self.as_mut_ptr();
|
||||
// SAFETY: just checked that `mid` is on a char boundary.
|
||||
unsafe {
|
||||
(
|
||||
from_utf8_unchecked_mut(slice::from_raw_parts_mut(ptr, mid)),
|
||||
|
@ -3805,10 +3828,8 @@ impl str {
|
|||
if let Some((_, b)) = matcher.next_reject_back() {
|
||||
j = b;
|
||||
}
|
||||
unsafe {
|
||||
// Searcher is known to return valid indices
|
||||
self.get_unchecked(i..j)
|
||||
}
|
||||
// SAFETY: `Searcher` is known to return valid indices.
|
||||
unsafe { self.get_unchecked(i..j) }
|
||||
}
|
||||
|
||||
/// Returns a string slice with all prefixes that match a pattern
|
||||
|
@ -3844,10 +3865,8 @@ impl str {
|
|||
if let Some((a, _)) = matcher.next_reject() {
|
||||
i = a;
|
||||
}
|
||||
unsafe {
|
||||
// Searcher is known to return valid indices
|
||||
self.get_unchecked(i..self.len())
|
||||
}
|
||||
// SAFETY: `Searcher` is known to return valid indices.
|
||||
unsafe { self.get_unchecked(i..self.len()) }
|
||||
}
|
||||
|
||||
/// Returns a string slice with the prefix removed.
|
||||
|
@ -3878,10 +3897,8 @@ impl str {
|
|||
"The first search step from Searcher \
|
||||
must include the first character"
|
||||
);
|
||||
unsafe {
|
||||
// Searcher is known to return valid indices.
|
||||
Some(self.get_unchecked(len..))
|
||||
}
|
||||
// SAFETY: `Searcher` is known to return valid indices.
|
||||
unsafe { Some(self.get_unchecked(len..)) }
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -3919,10 +3936,8 @@ impl str {
|
|||
"The first search step from ReverseSearcher \
|
||||
must include the last character"
|
||||
);
|
||||
unsafe {
|
||||
// Searcher is known to return valid indices.
|
||||
Some(self.get_unchecked(..start))
|
||||
}
|
||||
// SAFETY: `Searcher` is known to return valid indices.
|
||||
unsafe { Some(self.get_unchecked(..start)) }
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -3970,10 +3985,8 @@ impl str {
|
|||
if let Some((_, b)) = matcher.next_reject_back() {
|
||||
j = b;
|
||||
}
|
||||
unsafe {
|
||||
// Searcher is known to return valid indices
|
||||
self.get_unchecked(0..j)
|
||||
}
|
||||
// SAFETY: `Searcher` is known to return valid indices.
|
||||
unsafe { self.get_unchecked(0..j) }
|
||||
}
|
||||
|
||||
/// Returns a string slice with all prefixes that match a pattern
|
||||
|
@ -4166,6 +4179,7 @@ impl str {
|
|||
/// ```
|
||||
#[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
|
||||
pub fn make_ascii_uppercase(&mut self) {
|
||||
// SAFETY: safe because we transmute two types with the same layout.
|
||||
let me = unsafe { self.as_bytes_mut() };
|
||||
me.make_ascii_uppercase()
|
||||
}
|
||||
|
@ -4191,6 +4205,7 @@ impl str {
|
|||
/// ```
|
||||
#[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
|
||||
pub fn make_ascii_lowercase(&mut self) {
|
||||
// SAFETY: safe because we transmute two types with the same layout.
|
||||
let me = unsafe { self.as_bytes_mut() };
|
||||
me.make_ascii_lowercase()
|
||||
}
|
||||
|
@ -4357,6 +4372,7 @@ impl Default for &str {
|
|||
impl Default for &mut str {
|
||||
/// Creates an empty mutable str
|
||||
fn default() -> Self {
|
||||
// SAFETY: The empty string is valid UTF-8.
|
||||
unsafe { from_utf8_unchecked_mut(&mut []) }
|
||||
}
|
||||
}
|
||||
|
@ -4412,6 +4428,7 @@ impl_fn_for_zst! {
|
|||
|
||||
#[derive(Clone)]
|
||||
struct UnsafeBytesToStr impl<'a> Fn = |bytes: &'a [u8]| -> &'a str {
|
||||
// SAFETY: not safe
|
||||
unsafe { from_utf8_unchecked(bytes) }
|
||||
};
|
||||
}
|
||||
|
|
|
@ -3,8 +3,6 @@
|
|||
//! For more details, see the traits [`Pattern`], [`Searcher`],
|
||||
//! [`ReverseSearcher`], and [`DoubleEndedSearcher`].
|
||||
|
||||
// ignore-tidy-undocumented-unsafe
|
||||
|
||||
#![unstable(
|
||||
feature = "pattern",
|
||||
reason = "API not fully fleshed out and ready to be stabilized",
|
||||
|
@ -271,6 +269,14 @@ unsafe impl<'a> Searcher<'a> for CharSearcher<'a> {
|
|||
#[inline]
|
||||
fn next(&mut self) -> SearchStep {
|
||||
let old_finger = self.finger;
|
||||
// SAFETY: 1-4 guarantee safety of `get_unchecked`
|
||||
// 1. `self.finger` and `self.finger_back` are kept on unicode boundaries
|
||||
// (this is invariant)
|
||||
// 2. `self.finger >= 0` since it starts at 0 and only increases
|
||||
// 3. `self.finger < self.finger_back` because otherwise the char `iter`
|
||||
// would return `SearchStep::Done`
|
||||
// 4. `self.finger` comes before the end of the haystack because `self.finger_back`
|
||||
// starts at the end and only decreases
|
||||
let slice = unsafe { self.haystack.get_unchecked(old_finger..self.finger_back) };
|
||||
let mut iter = slice.chars();
|
||||
let old_len = iter.iter.len();
|
||||
|
@ -293,6 +299,7 @@ unsafe impl<'a> Searcher<'a> for CharSearcher<'a> {
|
|||
// get the haystack after the last character found
|
||||
let bytes = self.haystack.as_bytes().get(self.finger..self.finger_back)?;
|
||||
// the last byte of the utf8 encoded needle
|
||||
// SAFETY: we have an invariant that `utf8_size < 5`
|
||||
let last_byte = unsafe { *self.utf8_encoded.get_unchecked(self.utf8_size - 1) };
|
||||
if let Some(index) = memchr::memchr(last_byte, bytes) {
|
||||
// The new finger is the index of the byte we found,
|
||||
|
@ -336,6 +343,7 @@ unsafe impl<'a> ReverseSearcher<'a> for CharSearcher<'a> {
|
|||
#[inline]
|
||||
fn next_back(&mut self) -> SearchStep {
|
||||
let old_finger = self.finger_back;
|
||||
// SAFETY: see the comment for next() above
|
||||
let slice = unsafe { self.haystack.get_unchecked(self.finger..old_finger) };
|
||||
let mut iter = slice.chars();
|
||||
let old_len = iter.iter.len();
|
||||
|
@ -363,6 +371,7 @@ unsafe impl<'a> ReverseSearcher<'a> for CharSearcher<'a> {
|
|||
return None;
|
||||
};
|
||||
// the last byte of the utf8 encoded needle
|
||||
// SAFETY: we have an invariant that `utf8_size < 5`
|
||||
let last_byte = unsafe { *self.utf8_encoded.get_unchecked(self.utf8_size - 1) };
|
||||
if let Some(index) = memchr::memrchr(last_byte, bytes) {
|
||||
// we searched a slice that was offset by self.finger,
|
||||
|
|
|
@ -112,8 +112,6 @@
|
|||
//! println!("live threads: {}", old_thread_count + 1);
|
||||
//! ```
|
||||
|
||||
// ignore-tidy-undocumented-unsafe
|
||||
|
||||
#![stable(feature = "rust1", since = "1.0.0")]
|
||||
#![cfg_attr(not(target_has_atomic_load_store = "8"), allow(dead_code))]
|
||||
#![cfg_attr(not(target_has_atomic_load_store = "8"), allow(unused_imports))]
|
||||
|
@ -350,6 +348,7 @@ impl AtomicBool {
|
|||
#[inline]
|
||||
#[stable(feature = "atomic_access", since = "1.15.0")]
|
||||
pub fn get_mut(&mut self) -> &mut bool {
|
||||
// SAFETY: the mutable reference guarantees unique ownership.
|
||||
unsafe { &mut *(self.v.get() as *mut bool) }
|
||||
}
|
||||
|
||||
|
@ -400,6 +399,8 @@ impl AtomicBool {
|
|||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub fn load(&self, order: Ordering) -> bool {
|
||||
// SAFETY: any data races are prevented by atomic intrinsics and the raw
|
||||
// pointer passed in is valid because we got it from a reference.
|
||||
unsafe { atomic_load(self.v.get(), order) != 0 }
|
||||
}
|
||||
|
||||
|
@ -432,6 +433,8 @@ impl AtomicBool {
|
|||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub fn store(&self, val: bool, order: Ordering) {
|
||||
// SAFETY: any data races are prevented by atomic intrinsics and the raw
|
||||
// pointer passed in is valid because we got it from a reference.
|
||||
unsafe {
|
||||
atomic_store(self.v.get(), val as u8, order);
|
||||
}
|
||||
|
@ -463,6 +466,7 @@ impl AtomicBool {
|
|||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[cfg(target_has_atomic = "8")]
|
||||
pub fn swap(&self, val: bool, order: Ordering) -> bool {
|
||||
// SAFETY: data races are prevented by atomic intrinsics.
|
||||
unsafe { atomic_swap(self.v.get(), val as u8, order) != 0 }
|
||||
}
|
||||
|
||||
|
@ -558,6 +562,7 @@ impl AtomicBool {
|
|||
success: Ordering,
|
||||
failure: Ordering,
|
||||
) -> Result<bool, bool> {
|
||||
// SAFETY: data races are prevented by atomic intrinsics.
|
||||
match unsafe {
|
||||
atomic_compare_exchange(self.v.get(), current as u8, new as u8, success, failure)
|
||||
} {
|
||||
|
@ -615,6 +620,7 @@ impl AtomicBool {
|
|||
success: Ordering,
|
||||
failure: Ordering,
|
||||
) -> Result<bool, bool> {
|
||||
// SAFETY: data races are prevented by atomic intrinsics.
|
||||
match unsafe {
|
||||
atomic_compare_exchange_weak(self.v.get(), current as u8, new as u8, success, failure)
|
||||
} {
|
||||
|
@ -661,6 +667,7 @@ impl AtomicBool {
|
|||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[cfg(target_has_atomic = "8")]
|
||||
pub fn fetch_and(&self, val: bool, order: Ordering) -> bool {
|
||||
// SAFETY: data races are prevented by atomic intrinsics.
|
||||
unsafe { atomic_and(self.v.get(), val as u8, order) != 0 }
|
||||
}
|
||||
|
||||
|
@ -756,6 +763,7 @@ impl AtomicBool {
|
|||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[cfg(target_has_atomic = "8")]
|
||||
pub fn fetch_or(&self, val: bool, order: Ordering) -> bool {
|
||||
// SAFETY: data races are prevented by atomic intrinsics.
|
||||
unsafe { atomic_or(self.v.get(), val as u8, order) != 0 }
|
||||
}
|
||||
|
||||
|
@ -797,6 +805,7 @@ impl AtomicBool {
|
|||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[cfg(target_has_atomic = "8")]
|
||||
pub fn fetch_xor(&self, val: bool, order: Ordering) -> bool {
|
||||
// SAFETY: data races are prevented by atomic intrinsics.
|
||||
unsafe { atomic_xor(self.v.get(), val as u8, order) != 0 }
|
||||
}
|
||||
|
||||
|
@ -872,6 +881,7 @@ impl<T> AtomicPtr<T> {
|
|||
#[inline]
|
||||
#[stable(feature = "atomic_access", since = "1.15.0")]
|
||||
pub fn get_mut(&mut self) -> &mut *mut T {
|
||||
// SAFETY: the mutable reference guarantees unique ownership.
|
||||
unsafe { &mut *self.p.get() }
|
||||
}
|
||||
|
||||
|
@ -923,6 +933,7 @@ impl<T> AtomicPtr<T> {
|
|||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub fn load(&self, order: Ordering) -> *mut T {
|
||||
// SAFETY: data races are prevented by atomic intrinsics.
|
||||
unsafe { atomic_load(self.p.get() as *mut usize, order) as *mut T }
|
||||
}
|
||||
|
||||
|
@ -957,6 +968,7 @@ impl<T> AtomicPtr<T> {
|
|||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub fn store(&self, ptr: *mut T, order: Ordering) {
|
||||
// SAFETY: data races are prevented by atomic intrinsics.
|
||||
unsafe {
|
||||
atomic_store(self.p.get() as *mut usize, ptr as usize, order);
|
||||
}
|
||||
|
@ -990,6 +1002,7 @@ impl<T> AtomicPtr<T> {
|
|||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[cfg(target_has_atomic = "ptr")]
|
||||
pub fn swap(&self, ptr: *mut T, order: Ordering) -> *mut T {
|
||||
// SAFETY: data races are prevented by atomic intrinsics.
|
||||
unsafe { atomic_swap(self.p.get() as *mut usize, ptr as usize, order) as *mut T }
|
||||
}
|
||||
|
||||
|
@ -1074,6 +1087,7 @@ impl<T> AtomicPtr<T> {
|
|||
success: Ordering,
|
||||
failure: Ordering,
|
||||
) -> Result<*mut T, *mut T> {
|
||||
// SAFETY: data races are prevented by atomic intrinsics.
|
||||
unsafe {
|
||||
let res = atomic_compare_exchange(
|
||||
self.p.get() as *mut usize,
|
||||
|
@ -1137,6 +1151,7 @@ impl<T> AtomicPtr<T> {
|
|||
success: Ordering,
|
||||
failure: Ordering,
|
||||
) -> Result<*mut T, *mut T> {
|
||||
// SAFETY: data races are prevented by atomic intrinsics.
|
||||
unsafe {
|
||||
let res = atomic_compare_exchange_weak(
|
||||
self.p.get() as *mut usize,
|
||||
|
@ -1290,6 +1305,7 @@ assert_eq!(some_var.load(Ordering::SeqCst), 5);
|
|||
#[inline]
|
||||
#[$stable_access]
|
||||
pub fn get_mut(&mut self) -> &mut $int_type {
|
||||
// SAFETY: the mutable reference guarantees unique ownership.
|
||||
unsafe { &mut *self.v.get() }
|
||||
}
|
||||
}
|
||||
|
@ -1344,6 +1360,7 @@ assert_eq!(some_var.load(Ordering::Relaxed), 5);
|
|||
#[inline]
|
||||
#[$stable]
|
||||
pub fn load(&self, order: Ordering) -> $int_type {
|
||||
// SAFETY: data races are prevented by atomic intrinsics.
|
||||
unsafe { atomic_load(self.v.get(), order) }
|
||||
}
|
||||
}
|
||||
|
@ -1378,6 +1395,7 @@ assert_eq!(some_var.load(Ordering::Relaxed), 10);
|
|||
#[inline]
|
||||
#[$stable]
|
||||
pub fn store(&self, val: $int_type, order: Ordering) {
|
||||
// SAFETY: data races are prevented by atomic intrinsics.
|
||||
unsafe { atomic_store(self.v.get(), val, order); }
|
||||
}
|
||||
}
|
||||
|
@ -1408,6 +1426,7 @@ assert_eq!(some_var.swap(10, Ordering::Relaxed), 5);
|
|||
#[$stable]
|
||||
#[$cfg_cas]
|
||||
pub fn swap(&self, val: $int_type, order: Ordering) -> $int_type {
|
||||
// SAFETY: data races are prevented by atomic intrinsics.
|
||||
unsafe { atomic_swap(self.v.get(), val, order) }
|
||||
}
|
||||
}
|
||||
|
@ -1510,6 +1529,7 @@ assert_eq!(some_var.load(Ordering::Relaxed), 10);
|
|||
new: $int_type,
|
||||
success: Ordering,
|
||||
failure: Ordering) -> Result<$int_type, $int_type> {
|
||||
// SAFETY: data races are prevented by atomic intrinsics.
|
||||
unsafe { atomic_compare_exchange(self.v.get(), current, new, success, failure) }
|
||||
}
|
||||
}
|
||||
|
@ -1562,6 +1582,7 @@ loop {
|
|||
new: $int_type,
|
||||
success: Ordering,
|
||||
failure: Ordering) -> Result<$int_type, $int_type> {
|
||||
// SAFETY: data races are prevented by atomic intrinsics.
|
||||
unsafe {
|
||||
atomic_compare_exchange_weak(self.v.get(), current, new, success, failure)
|
||||
}
|
||||
|
@ -1596,6 +1617,7 @@ assert_eq!(foo.load(Ordering::SeqCst), 10);
|
|||
#[$stable]
|
||||
#[$cfg_cas]
|
||||
pub fn fetch_add(&self, val: $int_type, order: Ordering) -> $int_type {
|
||||
// SAFETY: data races are prevented by atomic intrinsics.
|
||||
unsafe { atomic_add(self.v.get(), val, order) }
|
||||
}
|
||||
}
|
||||
|
@ -1628,6 +1650,7 @@ assert_eq!(foo.load(Ordering::SeqCst), 10);
|
|||
#[$stable]
|
||||
#[$cfg_cas]
|
||||
pub fn fetch_sub(&self, val: $int_type, order: Ordering) -> $int_type {
|
||||
// SAFETY: data races are prevented by atomic intrinsics.
|
||||
unsafe { atomic_sub(self.v.get(), val, order) }
|
||||
}
|
||||
}
|
||||
|
@ -1663,6 +1686,7 @@ assert_eq!(foo.load(Ordering::SeqCst), 0b100001);
|
|||
#[$stable]
|
||||
#[$cfg_cas]
|
||||
pub fn fetch_and(&self, val: $int_type, order: Ordering) -> $int_type {
|
||||
// SAFETY: data races are prevented by atomic intrinsics.
|
||||
unsafe { atomic_and(self.v.get(), val, order) }
|
||||
}
|
||||
}
|
||||
|
@ -1699,6 +1723,7 @@ assert_eq!(foo.load(Ordering::SeqCst), !(0x13 & 0x31));
|
|||
#[$stable_nand]
|
||||
#[$cfg_cas]
|
||||
pub fn fetch_nand(&self, val: $int_type, order: Ordering) -> $int_type {
|
||||
// SAFETY: data races are prevented by atomic intrinsics.
|
||||
unsafe { atomic_nand(self.v.get(), val, order) }
|
||||
}
|
||||
}
|
||||
|
@ -1734,6 +1759,7 @@ assert_eq!(foo.load(Ordering::SeqCst), 0b111111);
|
|||
#[$stable]
|
||||
#[$cfg_cas]
|
||||
pub fn fetch_or(&self, val: $int_type, order: Ordering) -> $int_type {
|
||||
// SAFETY: data races are prevented by atomic intrinsics.
|
||||
unsafe { atomic_or(self.v.get(), val, order) }
|
||||
}
|
||||
}
|
||||
|
@ -1769,6 +1795,7 @@ assert_eq!(foo.load(Ordering::SeqCst), 0b011110);
|
|||
#[$stable]
|
||||
#[$cfg_cas]
|
||||
pub fn fetch_xor(&self, val: $int_type, order: Ordering) -> $int_type {
|
||||
// SAFETY: data races are prevented by atomic intrinsics.
|
||||
unsafe { atomic_xor(self.v.get(), val, order) }
|
||||
}
|
||||
}
|
||||
|
@ -1880,6 +1907,7 @@ assert!(max_foo == 42);
|
|||
issue = "48655")]
|
||||
#[$cfg_cas]
|
||||
pub fn fetch_max(&self, val: $int_type, order: Ordering) -> $int_type {
|
||||
// SAFETY: data races are prevented by atomic intrinsics.
|
||||
unsafe { $max_fn(self.v.get(), val, order) }
|
||||
}
|
||||
}
|
||||
|
@ -1932,6 +1960,7 @@ assert_eq!(min_foo, 12);
|
|||
issue = "48655")]
|
||||
#[$cfg_cas]
|
||||
pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
|
||||
// SAFETY: data races are prevented by atomic intrinsics.
|
||||
unsafe { $min_fn(self.v.get(), val, order) }
|
||||
}
|
||||
}
|
||||
|
@ -1960,7 +1989,9 @@ extern {
|
|||
}
|
||||
|
||||
let mut atomic = ", stringify!($atomic_type), "::new(1);
|
||||
unsafe {
|
||||
",
|
||||
// SAFETY: Safe as long as `my_atomic_op` is atomic.
|
||||
"unsafe {
|
||||
my_atomic_op(atomic.as_mut_ptr());
|
||||
}
|
||||
# }
|
||||
|
@ -2526,6 +2557,7 @@ pub fn fence(order: Ordering) {
|
|||
// https://github.com/WebAssembly/tool-conventions/issues/59. We should
|
||||
// follow that discussion and implement a solution when one comes about!
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
// SAFETY: using an atomic fence is safe.
|
||||
unsafe {
|
||||
match order {
|
||||
Acquire => intrinsics::atomic_fence_acq(),
|
||||
|
@ -2613,6 +2645,7 @@ pub fn fence(order: Ordering) {
|
|||
#[inline]
|
||||
#[stable(feature = "compiler_fences", since = "1.21.0")]
|
||||
pub fn compiler_fence(order: Ordering) {
|
||||
// SAFETY: using an atomic fence is safe.
|
||||
unsafe {
|
||||
match order {
|
||||
Acquire => intrinsics::atomic_singlethreadfence_acq(),
|
||||
|
|
Loading…
Add table
Reference in a new issue