revert changes that cast functions to raw pointers, portability hazard
This commit is contained in:
parent
09395f626b
commit
b608df8277
3 changed files with 12 additions and 13 deletions
|
@ -355,8 +355,8 @@ impl<'a> ArgumentV1<'a> {
|
|||
// We are type punning a bit here: USIZE_MARKER only takes an &usize but
|
||||
// formatter takes an &Opaque. Rust understandably doesn't think we should compare
|
||||
// the function pointers if they don't have the same signature, so we cast to
|
||||
// pointers to convince it that we know what we're doing.
|
||||
if self.formatter as *mut u8 == USIZE_MARKER as *mut u8 {
|
||||
// usizes to tell it that we just want to compare addresses.
|
||||
if self.formatter as usize == USIZE_MARKER as usize {
|
||||
// SAFETY: The `formatter` field is only set to USIZE_MARKER if
|
||||
// the value is a usize, so this is safe
|
||||
Some(unsafe { *(self.value as *const _ as *const usize) })
|
||||
|
|
|
@ -293,7 +293,7 @@ impl Backtrace {
|
|||
if !Backtrace::enabled() {
|
||||
return Backtrace { inner: Inner::Disabled };
|
||||
}
|
||||
Backtrace::create((Backtrace::capture as *mut ()).addr())
|
||||
Backtrace::create(Backtrace::capture as usize)
|
||||
}
|
||||
|
||||
/// Forcibly captures a full backtrace, regardless of environment variable
|
||||
|
@ -308,7 +308,7 @@ impl Backtrace {
|
|||
/// parts of code.
|
||||
#[inline(never)] // want to make sure there's a frame here to remove
|
||||
pub fn force_capture() -> Backtrace {
|
||||
Backtrace::create((Backtrace::force_capture as *mut ()).addr())
|
||||
Backtrace::create(Backtrace::force_capture as usize)
|
||||
}
|
||||
|
||||
/// Forcibly captures a disabled backtrace, regardless of environment
|
||||
|
|
|
@ -22,10 +22,9 @@
|
|||
// that, we'll just allow that some unix targets don't use this module at all.
|
||||
#![allow(dead_code, unused_macros)]
|
||||
|
||||
use crate::ffi::{c_void, CStr};
|
||||
use crate::ffi::CStr;
|
||||
use crate::marker::PhantomData;
|
||||
use crate::mem;
|
||||
use crate::ptr;
|
||||
use crate::sync::atomic::{self, AtomicUsize, Ordering};
|
||||
|
||||
// We can use true weak linkage on ELF targets.
|
||||
|
@ -130,25 +129,25 @@ impl<F> DlsymWeak<F> {
|
|||
// Cold because it should only happen during first-time initialization.
|
||||
#[cold]
|
||||
unsafe fn initialize(&self) -> Option<F> {
|
||||
assert_eq!(mem::size_of::<F>(), mem::size_of::<*mut ()>());
|
||||
assert_eq!(mem::size_of::<F>(), mem::size_of::<usize>());
|
||||
|
||||
let val = fetch(self.name);
|
||||
// This synchronizes with the acquire fence in `get`.
|
||||
self.addr.store(val.addr(), Ordering::Release);
|
||||
self.addr.store(val, Ordering::Release);
|
||||
|
||||
match val.addr() {
|
||||
match val {
|
||||
0 => None,
|
||||
_ => Some(mem::transmute_copy::<*mut c_void, F>(&val)),
|
||||
addr => Some(mem::transmute_copy::<usize, F>(&addr)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unsafe fn fetch(name: &str) -> *mut c_void {
|
||||
unsafe fn fetch(name: &str) -> usize {
|
||||
let name = match CStr::from_bytes_with_nul(name.as_bytes()) {
|
||||
Ok(cstr) => cstr,
|
||||
Err(..) => return ptr::null_mut(),
|
||||
Err(..) => return 0,
|
||||
};
|
||||
libc::dlsym(libc::RTLD_DEFAULT, name.as_ptr())
|
||||
libc::dlsym(libc::RTLD_DEFAULT, name.as_ptr()) as usize
|
||||
}
|
||||
|
||||
#[cfg(not(any(target_os = "linux", target_os = "android")))]
|
||||
|
|
Loading…
Add table
Reference in a new issue