Auto merge of #95524 - oli-obk:cached_stable_hash_cleanups, r=nnethercote
Cached stable hash cleanups r? `@nnethercote` Add a sanity assertion in debug mode to check that the cached hashes are actually the ones we get if we compute the hash each time. Add a new data structure that bundles all the hash-caching work to make it easier to re-use it for different interned data structures
This commit is contained in:
commit
e980c62955
13 changed files with 275 additions and 222 deletions
|
@ -4,6 +4,8 @@ use std::hash::{Hash, Hasher};
|
|||
use std::ops::Deref;
|
||||
use std::ptr;
|
||||
|
||||
use crate::fingerprint::Fingerprint;
|
||||
|
||||
mod private {
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct PrivateZst;
|
||||
|
@ -108,5 +110,87 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
/// A helper trait so that `Interned` things can cache stable hashes reproducibly.
|
||||
pub trait InternedHashingContext {
|
||||
fn with_def_path_and_no_spans(&mut self, f: impl FnOnce(&mut Self));
|
||||
}
|
||||
|
||||
/// A helper type that you can wrap round your own type in order to automatically
|
||||
/// cache the stable hash on creation and not recompute it whenever the stable hash
|
||||
/// of the type is computed.
|
||||
/// This is only done in incremental mode. You can also opt out of caching by using
|
||||
/// StableHash::ZERO for the hash, in which case the hash gets computed each time.
|
||||
/// This is useful if you have values that you intern but never (can?) use for stable
|
||||
/// hashing.
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct WithStableHash<T> {
|
||||
pub internee: T,
|
||||
pub stable_hash: Fingerprint,
|
||||
}
|
||||
|
||||
impl<T: PartialEq> PartialEq for WithStableHash<T> {
|
||||
#[inline]
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.internee.eq(&other.internee)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Eq> Eq for WithStableHash<T> {}
|
||||
|
||||
impl<T: Ord> PartialOrd for WithStableHash<T> {
|
||||
fn partial_cmp(&self, other: &WithStableHash<T>) -> Option<Ordering> {
|
||||
Some(self.internee.cmp(&other.internee))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Ord> Ord for WithStableHash<T> {
|
||||
fn cmp(&self, other: &WithStableHash<T>) -> Ordering {
|
||||
self.internee.cmp(&other.internee)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Deref for WithStableHash<T> {
|
||||
type Target = T;
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &T {
|
||||
&self.internee
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Hash> Hash for WithStableHash<T> {
|
||||
#[inline]
|
||||
fn hash<H: Hasher>(&self, s: &mut H) {
|
||||
self.internee.hash(s)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: HashStable<CTX>, CTX: InternedHashingContext> HashStable<CTX> for WithStableHash<T> {
|
||||
fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
|
||||
if self.stable_hash == Fingerprint::ZERO || cfg!(debug_assertions) {
|
||||
// No cached hash available. This can only mean that incremental is disabled.
|
||||
// We don't cache stable hashes in non-incremental mode, because they are used
|
||||
// so rarely that the performance actually suffers.
|
||||
|
||||
// We need to build the hash as if we cached it and then hash that hash, as
|
||||
// otherwise the hashes will differ between cached and non-cached mode.
|
||||
let stable_hash: Fingerprint = {
|
||||
let mut hasher = StableHasher::new();
|
||||
hcx.with_def_path_and_no_spans(|hcx| self.internee.hash_stable(hcx, &mut hasher));
|
||||
hasher.finish()
|
||||
};
|
||||
if cfg!(debug_assertions) && self.stable_hash != Fingerprint::ZERO {
|
||||
assert_eq!(
|
||||
stable_hash, self.stable_hash,
|
||||
"cached stable hash does not match freshly computed stable hash"
|
||||
);
|
||||
}
|
||||
stable_hash.hash_stable(hcx, hasher);
|
||||
} else {
|
||||
self.stable_hash.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
|
|
@ -2,7 +2,6 @@ use crate::infer::free_regions::FreeRegionMap;
|
|||
use crate::infer::{GenericKind, InferCtxt};
|
||||
use crate::traits::query::OutlivesBound;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::intern::Interned;
|
||||
use rustc_hir as hir;
|
||||
use rustc_middle::ty::{self, ReEarlyBound, ReFree, ReVar, Region};
|
||||
|
||||
|
@ -164,12 +163,6 @@ impl<'a, 'tcx> OutlivesEnvironment<'tcx> {
|
|||
for outlives_bound in outlives_bounds {
|
||||
debug!("add_outlives_bounds: outlives_bound={:?}", outlives_bound);
|
||||
match outlives_bound {
|
||||
OutlivesBound::RegionSubRegion(
|
||||
r_a @ (Region(Interned(ReEarlyBound(_), _)) | Region(Interned(ReFree(_), _))),
|
||||
Region(Interned(ReVar(vid_b), _)),
|
||||
) => {
|
||||
infcx.expect("no infcx provided but region vars found").add_given(r_a, *vid_b);
|
||||
}
|
||||
OutlivesBound::RegionSubParam(r_a, param_b) => {
|
||||
self.region_bound_pairs_accum.push((r_a, GenericKind::Param(param_b)));
|
||||
}
|
||||
|
@ -178,17 +171,23 @@ impl<'a, 'tcx> OutlivesEnvironment<'tcx> {
|
|||
.push((r_a, GenericKind::Projection(projection_b)));
|
||||
}
|
||||
OutlivesBound::RegionSubRegion(r_a, r_b) => {
|
||||
// In principle, we could record (and take
|
||||
// advantage of) every relationship here, but
|
||||
// we are also free not to -- it simply means
|
||||
// strictly less that we can successfully type
|
||||
// check. Right now we only look for things
|
||||
// relationships between free regions. (It may
|
||||
// also be that we should revise our inference
|
||||
// system to be more general and to make use
|
||||
// of *every* relationship that arises here,
|
||||
// but presently we do not.)
|
||||
self.free_region_map.relate_regions(r_a, r_b);
|
||||
if let (ReEarlyBound(_) | ReFree(_), ReVar(vid_b)) = (r_a.kind(), r_b.kind()) {
|
||||
infcx
|
||||
.expect("no infcx provided but region vars found")
|
||||
.add_given(r_a, vid_b);
|
||||
} else {
|
||||
// In principle, we could record (and take
|
||||
// advantage of) every relationship here, but
|
||||
// we are also free not to -- it simply means
|
||||
// strictly less that we can successfully type
|
||||
// check. Right now we only look for things
|
||||
// relationships between free regions. (It may
|
||||
// also be that we should revise our inference
|
||||
// system to be more general and to make use
|
||||
// of *every* relationship that arises here,
|
||||
// but presently we do not.)
|
||||
self.free_region_map.relate_regions(r_a, r_b);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -87,7 +87,7 @@ macro_rules! arena_types {
|
|||
[] hir_id_set: rustc_hir::HirIdSet,
|
||||
|
||||
// Interned types
|
||||
[] tys: rustc_middle::ty::TyS<'tcx>,
|
||||
[] tys: rustc_data_structures::intern::WithStableHash<rustc_middle::ty::TyS<'tcx>>,
|
||||
[] predicates: rustc_middle::ty::PredicateS<'tcx>,
|
||||
[] consts: rustc_middle::ty::ConstS<'tcx>,
|
||||
|
||||
|
|
|
@ -26,7 +26,7 @@ use crate::ty::{
|
|||
use rustc_ast as ast;
|
||||
use rustc_data_structures::fingerprint::Fingerprint;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_data_structures::intern::Interned;
|
||||
use rustc_data_structures::intern::{Interned, WithStableHash};
|
||||
use rustc_data_structures::memmap::Mmap;
|
||||
use rustc_data_structures::profiling::SelfProfilerRef;
|
||||
use rustc_data_structures::sharded::{IntoPointer, ShardedHashMap};
|
||||
|
@ -105,7 +105,7 @@ pub struct CtxtInterners<'tcx> {
|
|||
|
||||
// Specifically use a speedy hash algorithm for these hash sets, since
|
||||
// they're accessed quite often.
|
||||
type_: InternedSet<'tcx, TyS<'tcx>>,
|
||||
type_: InternedSet<'tcx, WithStableHash<TyS<'tcx>>>,
|
||||
substs: InternedSet<'tcx, InternalSubsts<'tcx>>,
|
||||
canonical_var_infos: InternedSet<'tcx, List<CanonicalVarInfo<'tcx>>>,
|
||||
region: InternedSet<'tcx, RegionKind>,
|
||||
|
@ -178,10 +178,11 @@ impl<'tcx> CtxtInterners<'tcx> {
|
|||
kind,
|
||||
flags: flags.flags,
|
||||
outer_exclusive_binder: flags.outer_exclusive_binder,
|
||||
stable_hash,
|
||||
};
|
||||
|
||||
InternedInSet(self.arena.alloc(ty_struct))
|
||||
InternedInSet(
|
||||
self.arena.alloc(WithStableHash { internee: ty_struct, stable_hash }),
|
||||
)
|
||||
})
|
||||
.0,
|
||||
))
|
||||
|
@ -2048,23 +2049,23 @@ impl<'tcx, T: 'tcx + ?Sized> IntoPointer for InternedInSet<'tcx, T> {
|
|||
}
|
||||
|
||||
#[allow(rustc::usage_of_ty_tykind)]
|
||||
impl<'tcx> Borrow<TyKind<'tcx>> for InternedInSet<'tcx, TyS<'tcx>> {
|
||||
impl<'tcx> Borrow<TyKind<'tcx>> for InternedInSet<'tcx, WithStableHash<TyS<'tcx>>> {
|
||||
fn borrow<'a>(&'a self) -> &'a TyKind<'tcx> {
|
||||
&self.0.kind
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> PartialEq for InternedInSet<'tcx, TyS<'tcx>> {
|
||||
fn eq(&self, other: &InternedInSet<'tcx, TyS<'tcx>>) -> bool {
|
||||
impl<'tcx> PartialEq for InternedInSet<'tcx, WithStableHash<TyS<'tcx>>> {
|
||||
fn eq(&self, other: &InternedInSet<'tcx, WithStableHash<TyS<'tcx>>>) -> bool {
|
||||
// The `Borrow` trait requires that `x.borrow() == y.borrow()` equals
|
||||
// `x == y`.
|
||||
self.0.kind == other.0.kind
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> Eq for InternedInSet<'tcx, TyS<'tcx>> {}
|
||||
impl<'tcx> Eq for InternedInSet<'tcx, WithStableHash<TyS<'tcx>>> {}
|
||||
|
||||
impl<'tcx> Hash for InternedInSet<'tcx, TyS<'tcx>> {
|
||||
impl<'tcx> Hash for InternedInSet<'tcx, WithStableHash<TyS<'tcx>>> {
|
||||
fn hash<H: Hasher>(&self, s: &mut H) {
|
||||
// The `Borrow` trait requires that `x.borrow().hash(s) == x.hash(s)`.
|
||||
self.0.kind.hash(s)
|
||||
|
|
|
@ -5,7 +5,6 @@ use crate::ty::subst::Subst;
|
|||
use crate::ty::{self, subst::SubstsRef, ReprOptions, Ty, TyCtxt, TypeFoldable};
|
||||
use rustc_ast as ast;
|
||||
use rustc_attr as attr;
|
||||
use rustc_data_structures::intern::Interned;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::lang_items::LangItem;
|
||||
use rustc_index::bit_set::BitSet;
|
||||
|
@ -503,42 +502,34 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
|
|||
}
|
||||
|
||||
// Two non-ZST fields, and they're both scalars.
|
||||
(
|
||||
Some((
|
||||
i,
|
||||
&TyAndLayout {
|
||||
layout: Layout(Interned(&LayoutS { abi: Abi::Scalar(a), .. }, _)),
|
||||
..
|
||||
},
|
||||
)),
|
||||
Some((
|
||||
j,
|
||||
&TyAndLayout {
|
||||
layout: Layout(Interned(&LayoutS { abi: Abi::Scalar(b), .. }, _)),
|
||||
..
|
||||
},
|
||||
)),
|
||||
None,
|
||||
) => {
|
||||
// Order by the memory placement, not source order.
|
||||
let ((i, a), (j, b)) =
|
||||
if offsets[i] < offsets[j] { ((i, a), (j, b)) } else { ((j, b), (i, a)) };
|
||||
let pair = self.scalar_pair(a, b);
|
||||
let pair_offsets = match pair.fields {
|
||||
FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
|
||||
assert_eq!(memory_index, &[0, 1]);
|
||||
offsets
|
||||
(Some((i, a)), Some((j, b)), None) => {
|
||||
match (a.abi, b.abi) {
|
||||
(Abi::Scalar(a), Abi::Scalar(b)) => {
|
||||
// Order by the memory placement, not source order.
|
||||
let ((i, a), (j, b)) = if offsets[i] < offsets[j] {
|
||||
((i, a), (j, b))
|
||||
} else {
|
||||
((j, b), (i, a))
|
||||
};
|
||||
let pair = self.scalar_pair(a, b);
|
||||
let pair_offsets = match pair.fields {
|
||||
FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
|
||||
assert_eq!(memory_index, &[0, 1]);
|
||||
offsets
|
||||
}
|
||||
_ => bug!(),
|
||||
};
|
||||
if offsets[i] == pair_offsets[0]
|
||||
&& offsets[j] == pair_offsets[1]
|
||||
&& align == pair.align
|
||||
&& size == pair.size
|
||||
{
|
||||
// We can use `ScalarPair` only when it matches our
|
||||
// already computed layout (including `#[repr(C)]`).
|
||||
abi = pair.abi;
|
||||
}
|
||||
}
|
||||
_ => bug!(),
|
||||
};
|
||||
if offsets[i] == pair_offsets[0]
|
||||
&& offsets[j] == pair_offsets[1]
|
||||
&& align == pair.align
|
||||
&& size == pair.size
|
||||
{
|
||||
// We can use `ScalarPair` only when it matches our
|
||||
// already computed layout (including `#[repr(C)]`).
|
||||
abi = pair.abi;
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -791,10 +782,7 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
|
|||
}
|
||||
|
||||
// Extract the number of elements from the layout of the array field:
|
||||
let Ok(TyAndLayout {
|
||||
layout: Layout(Interned(LayoutS { fields: FieldsShape::Array { count, .. }, .. }, _)),
|
||||
..
|
||||
}) = self.layout_of(f0_ty) else {
|
||||
let FieldsShape::Array { count, .. } = self.layout_of(f0_ty)?.layout.fields() else {
|
||||
return Err(LayoutError::Unknown(ty));
|
||||
};
|
||||
|
||||
|
|
|
@ -31,8 +31,8 @@ use crate::ty::util::Discr;
|
|||
use rustc_ast as ast;
|
||||
use rustc_attr as attr;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap};
|
||||
use rustc_data_structures::intern::Interned;
|
||||
use rustc_data_structures::stable_hasher::{HashStable, NodeIdHashingMode, StableHasher};
|
||||
use rustc_data_structures::intern::{Interned, WithStableHash};
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
use rustc_data_structures::tagged_ptr::CopyTaggedPtr;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::{CtorKind, CtorOf, DefKind, Res};
|
||||
|
@ -438,32 +438,36 @@ crate struct TyS<'tcx> {
|
|||
/// De Bruijn indices within the type are contained within `0..D`
|
||||
/// (exclusive).
|
||||
outer_exclusive_binder: ty::DebruijnIndex,
|
||||
|
||||
/// The stable hash of the type. This way hashing of types will not have to work
|
||||
/// on the address of the type anymore, but can instead just read this field
|
||||
stable_hash: Fingerprint,
|
||||
}
|
||||
|
||||
// `TyS` is used a lot. Make sure it doesn't unintentionally get bigger.
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
static_assert_size!(TyS<'_>, 56);
|
||||
static_assert_size!(TyS<'_>, 40);
|
||||
|
||||
// We are actually storing a stable hash cache next to the type, so let's
|
||||
// also check the full size
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
static_assert_size!(WithStableHash<TyS<'_>>, 56);
|
||||
|
||||
/// Use this rather than `TyS`, whenever possible.
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, HashStable)]
|
||||
#[rustc_diagnostic_item = "Ty"]
|
||||
#[rustc_pass_by_value]
|
||||
pub struct Ty<'tcx>(Interned<'tcx, TyS<'tcx>>);
|
||||
pub struct Ty<'tcx>(Interned<'tcx, WithStableHash<TyS<'tcx>>>);
|
||||
|
||||
// Statics only used for internal testing.
|
||||
pub static BOOL_TY: Ty<'static> = Ty(Interned::new_unchecked(&BOOL_TYS));
|
||||
static BOOL_TYS: TyS<'static> = TyS {
|
||||
pub static BOOL_TY: Ty<'static> = Ty(Interned::new_unchecked(&WithStableHash {
|
||||
internee: BOOL_TYS,
|
||||
stable_hash: Fingerprint::ZERO,
|
||||
}));
|
||||
const BOOL_TYS: TyS<'static> = TyS {
|
||||
kind: ty::Bool,
|
||||
flags: TypeFlags::empty(),
|
||||
outer_exclusive_binder: DebruijnIndex::from_usize(0),
|
||||
stable_hash: Fingerprint::ZERO,
|
||||
};
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for Ty<'tcx> {
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for TyS<'tcx> {
|
||||
#[inline]
|
||||
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
||||
let TyS {
|
||||
kind,
|
||||
|
@ -473,28 +477,9 @@ impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for Ty<'tcx> {
|
|||
flags: _,
|
||||
|
||||
outer_exclusive_binder: _,
|
||||
} = self;
|
||||
|
||||
stable_hash,
|
||||
} = self.0.0;
|
||||
|
||||
if *stable_hash == Fingerprint::ZERO {
|
||||
// No cached hash available. This can only mean that incremental is disabled.
|
||||
// We don't cache stable hashes in non-incremental mode, because they are used
|
||||
// so rarely that the performance actually suffers.
|
||||
|
||||
let stable_hash: Fingerprint = {
|
||||
let mut hasher = StableHasher::new();
|
||||
hcx.while_hashing_spans(false, |hcx| {
|
||||
hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
|
||||
kind.hash_stable(hcx, &mut hasher)
|
||||
})
|
||||
});
|
||||
hasher.finish()
|
||||
};
|
||||
stable_hash.hash_stable(hcx, hasher);
|
||||
} else {
|
||||
stable_hash.hash_stable(hcx, hasher);
|
||||
}
|
||||
kind.hash_stable(hcx, hasher)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -3,7 +3,6 @@ use crate::ty::subst::{GenericArg, GenericArgKind, Subst};
|
|||
use crate::ty::{self, ConstInt, DefIdTree, ParamConst, ScalarInt, Term, Ty, TyCtxt, TypeFoldable};
|
||||
use rustc_apfloat::ieee::{Double, Single};
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::intern::Interned;
|
||||
use rustc_data_structures::sso::SsoHashSet;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::{self, CtorKind, DefKind, Namespace};
|
||||
|
@ -1263,60 +1262,52 @@ pub trait PrettyPrinter<'tcx>:
|
|||
let (alloc_id, offset) = ptr.into_parts();
|
||||
match ty.kind() {
|
||||
// Byte strings (&[u8; N])
|
||||
ty::Ref(
|
||||
_,
|
||||
Ty(Interned(
|
||||
ty::TyS {
|
||||
kind:
|
||||
ty::Array(
|
||||
Ty(Interned(ty::TyS { kind: ty::Uint(ty::UintTy::U8), .. }, _)),
|
||||
ty::Const(Interned(
|
||||
ty::ConstS {
|
||||
val: ty::ConstKind::Value(ConstValue::Scalar(int)),
|
||||
..
|
||||
},
|
||||
_,
|
||||
)),
|
||||
),
|
||||
..
|
||||
},
|
||||
_,
|
||||
)),
|
||||
_,
|
||||
) => match self.tcx().get_global_alloc(alloc_id) {
|
||||
Some(GlobalAlloc::Memory(alloc)) => {
|
||||
let len = int.assert_bits(self.tcx().data_layout.pointer_size);
|
||||
let range = AllocRange { start: offset, size: Size::from_bytes(len) };
|
||||
if let Ok(byte_str) = alloc.inner().get_bytes(&self.tcx(), range) {
|
||||
p!(pretty_print_byte_str(byte_str))
|
||||
} else {
|
||||
p!("<too short allocation>")
|
||||
ty::Ref(_, inner, _) => {
|
||||
if let ty::Array(elem, len) = inner.kind() {
|
||||
if let ty::Uint(ty::UintTy::U8) = elem.kind() {
|
||||
if let ty::ConstKind::Value(ConstValue::Scalar(int)) = len.val() {
|
||||
match self.tcx().get_global_alloc(alloc_id) {
|
||||
Some(GlobalAlloc::Memory(alloc)) => {
|
||||
let len = int.assert_bits(self.tcx().data_layout.pointer_size);
|
||||
let range =
|
||||
AllocRange { start: offset, size: Size::from_bytes(len) };
|
||||
if let Ok(byte_str) =
|
||||
alloc.inner().get_bytes(&self.tcx(), range)
|
||||
{
|
||||
p!(pretty_print_byte_str(byte_str))
|
||||
} else {
|
||||
p!("<too short allocation>")
|
||||
}
|
||||
}
|
||||
// FIXME: for statics and functions, we could in principle print more detail.
|
||||
Some(GlobalAlloc::Static(def_id)) => {
|
||||
p!(write("<static({:?})>", def_id))
|
||||
}
|
||||
Some(GlobalAlloc::Function(_)) => p!("<function>"),
|
||||
None => p!("<dangling pointer>"),
|
||||
}
|
||||
return Ok(self);
|
||||
}
|
||||
}
|
||||
}
|
||||
// FIXME: for statics and functions, we could in principle print more detail.
|
||||
Some(GlobalAlloc::Static(def_id)) => p!(write("<static({:?})>", def_id)),
|
||||
Some(GlobalAlloc::Function(_)) => p!("<function>"),
|
||||
None => p!("<dangling pointer>"),
|
||||
},
|
||||
}
|
||||
ty::FnPtr(_) => {
|
||||
// FIXME: We should probably have a helper method to share code with the "Byte strings"
|
||||
// printing above (which also has to handle pointers to all sorts of things).
|
||||
match self.tcx().get_global_alloc(alloc_id) {
|
||||
Some(GlobalAlloc::Function(instance)) => {
|
||||
self = self.typed_value(
|
||||
|this| this.print_value_path(instance.def_id(), instance.substs),
|
||||
|this| this.print_type(ty),
|
||||
" as ",
|
||||
)?;
|
||||
}
|
||||
_ => self = self.pretty_print_const_pointer(ptr, ty, print_ty)?,
|
||||
if let Some(GlobalAlloc::Function(instance)) = self.tcx().get_global_alloc(alloc_id)
|
||||
{
|
||||
self = self.typed_value(
|
||||
|this| this.print_value_path(instance.def_id(), instance.substs),
|
||||
|this| this.print_type(ty),
|
||||
" as ",
|
||||
)?;
|
||||
return Ok(self);
|
||||
}
|
||||
}
|
||||
// Any pointer values not covered by a branch above
|
||||
_ => {
|
||||
self = self.pretty_print_const_pointer(ptr, ty, print_ty)?;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
// Any pointer values not covered by a branch above
|
||||
self = self.pretty_print_const_pointer(ptr, ty, print_ty)?;
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
|
@ -1437,28 +1428,31 @@ pub trait PrettyPrinter<'tcx>:
|
|||
|
||||
match (ct, ty.kind()) {
|
||||
// Byte/string slices, printed as (byte) string literals.
|
||||
(
|
||||
ConstValue::Slice { data, start, end },
|
||||
ty::Ref(_, Ty(Interned(ty::TyS { kind: ty::Slice(t), .. }, _)), _),
|
||||
) if *t == u8_type => {
|
||||
// The `inspect` here is okay since we checked the bounds, and there are
|
||||
// no relocations (we have an active slice reference here). We don't use
|
||||
// this result to affect interpreter execution.
|
||||
let byte_str =
|
||||
data.inner().inspect_with_uninit_and_ptr_outside_interpreter(start..end);
|
||||
self.pretty_print_byte_str(byte_str)
|
||||
}
|
||||
(
|
||||
ConstValue::Slice { data, start, end },
|
||||
ty::Ref(_, Ty(Interned(ty::TyS { kind: ty::Str, .. }, _)), _),
|
||||
) => {
|
||||
// The `inspect` here is okay since we checked the bounds, and there are no
|
||||
// relocations (we have an active `str` reference here). We don't use this
|
||||
// result to affect interpreter execution.
|
||||
let slice =
|
||||
data.inner().inspect_with_uninit_and_ptr_outside_interpreter(start..end);
|
||||
p!(write("{:?}", String::from_utf8_lossy(slice)));
|
||||
Ok(self)
|
||||
(ConstValue::Slice { data, start, end }, ty::Ref(_, inner, _)) => {
|
||||
match inner.kind() {
|
||||
ty::Slice(t) => {
|
||||
if *t == u8_type {
|
||||
// The `inspect` here is okay since we checked the bounds, and there are
|
||||
// no relocations (we have an active slice reference here). We don't use
|
||||
// this result to affect interpreter execution.
|
||||
let byte_str = data
|
||||
.inner()
|
||||
.inspect_with_uninit_and_ptr_outside_interpreter(start..end);
|
||||
return self.pretty_print_byte_str(byte_str);
|
||||
}
|
||||
}
|
||||
ty::Str => {
|
||||
// The `inspect` here is okay since we checked the bounds, and there are no
|
||||
// relocations (we have an active `str` reference here). We don't use this
|
||||
// result to affect interpreter execution.
|
||||
let slice = data
|
||||
.inner()
|
||||
.inspect_with_uninit_and_ptr_outside_interpreter(start..end);
|
||||
p!(write("{:?}", String::from_utf8_lossy(slice)));
|
||||
return Ok(self);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
(ConstValue::ByRef { alloc, offset }, ty::Array(t, n)) if *t == u8_type => {
|
||||
let n = n.val().try_to_bits(self.tcx().data_layout.pointer_size).unwrap();
|
||||
|
@ -1468,7 +1462,7 @@ pub trait PrettyPrinter<'tcx>:
|
|||
let byte_str = alloc.inner().get_bytes(&self.tcx(), range).unwrap();
|
||||
p!("*");
|
||||
p!(pretty_print_byte_str(byte_str));
|
||||
Ok(self)
|
||||
return Ok(self);
|
||||
}
|
||||
|
||||
// Aggregates, printed as array/tuple/struct/variant construction syntax.
|
||||
|
@ -1545,22 +1539,24 @@ pub trait PrettyPrinter<'tcx>:
|
|||
_ => unreachable!(),
|
||||
}
|
||||
|
||||
Ok(self)
|
||||
return Ok(self);
|
||||
}
|
||||
|
||||
(ConstValue::Scalar(scalar), _) => self.pretty_print_const_scalar(scalar, ty, print_ty),
|
||||
(ConstValue::Scalar(scalar), _) => {
|
||||
return self.pretty_print_const_scalar(scalar, ty, print_ty);
|
||||
}
|
||||
|
||||
// FIXME(oli-obk): also pretty print arrays and other aggregate constants by reading
|
||||
// their fields instead of just dumping the memory.
|
||||
_ => {
|
||||
// fallback
|
||||
p!(write("{:?}", ct));
|
||||
if print_ty {
|
||||
p!(": ", print(ty));
|
||||
}
|
||||
Ok(self)
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// fallback
|
||||
p!(write("{:?}", ct));
|
||||
if print_ty {
|
||||
p!(": ", print(ty));
|
||||
}
|
||||
Ok(self)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ use crate::ty::fold::{FallibleTypeFolder, TypeFoldable, TypeFolder, TypeVisitor}
|
|||
use crate::ty::sty::{ClosureSubsts, GeneratorSubsts, InlineConstSubsts};
|
||||
use crate::ty::{self, Lift, List, ParamConst, Ty, TyCtxt};
|
||||
|
||||
use rustc_data_structures::intern::Interned;
|
||||
use rustc_data_structures::intern::{Interned, WithStableHash};
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_macros::HashStable;
|
||||
use rustc_serialize::{self, Decodable, Encodable};
|
||||
|
@ -85,7 +85,7 @@ impl<'tcx> GenericArgKind<'tcx> {
|
|||
GenericArgKind::Type(ty) => {
|
||||
// Ensure we can use the tag bits.
|
||||
assert_eq!(mem::align_of_val(ty.0.0) & TAG_MASK, 0);
|
||||
(TYPE_TAG, ty.0.0 as *const ty::TyS<'tcx> as usize)
|
||||
(TYPE_TAG, ty.0.0 as *const WithStableHash<ty::TyS<'tcx>> as usize)
|
||||
}
|
||||
GenericArgKind::Const(ct) => {
|
||||
// Ensure we can use the tag bits.
|
||||
|
@ -154,7 +154,7 @@ impl<'tcx> GenericArg<'tcx> {
|
|||
&*((ptr & !TAG_MASK) as *const ty::RegionKind),
|
||||
))),
|
||||
TYPE_TAG => GenericArgKind::Type(Ty(Interned::new_unchecked(
|
||||
&*((ptr & !TAG_MASK) as *const ty::TyS<'tcx>),
|
||||
&*((ptr & !TAG_MASK) as *const WithStableHash<ty::TyS<'tcx>>),
|
||||
))),
|
||||
CONST_TAG => GenericArgKind::Const(ty::Const(Interned::new_unchecked(
|
||||
&*((ptr & !TAG_MASK) as *const ty::ConstS<'tcx>),
|
||||
|
|
|
@ -6,14 +6,12 @@ use crate::ty::layout::IntegerExt;
|
|||
use crate::ty::query::TyCtxtAt;
|
||||
use crate::ty::subst::{GenericArgKind, Subst, SubstsRef};
|
||||
use crate::ty::{
|
||||
self, Const, DebruijnIndex, DefIdTree, List, ReEarlyBound, Region, Ty, TyCtxt, TyKind::*,
|
||||
TypeFoldable,
|
||||
self, Const, DebruijnIndex, DefIdTree, List, ReEarlyBound, Ty, TyCtxt, TyKind::*, TypeFoldable,
|
||||
};
|
||||
use rustc_apfloat::Float as _;
|
||||
use rustc_ast as ast;
|
||||
use rustc_attr::{self as attr, SignedInt, UnsignedInt};
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_data_structures::intern::Interned;
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
use rustc_errors::ErrorGuaranteed;
|
||||
use rustc_hir as hir;
|
||||
|
@ -423,24 +421,25 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
let result = iter::zip(item_substs, impl_substs)
|
||||
.filter(|&(_, k)| {
|
||||
match k.unpack() {
|
||||
GenericArgKind::Lifetime(Region(Interned(ReEarlyBound(ref ebr), _))) => {
|
||||
!impl_generics.region_param(ebr, self).pure_wrt_drop
|
||||
}
|
||||
GenericArgKind::Type(Ty(Interned(
|
||||
ty::TyS { kind: ty::Param(ref pt), .. },
|
||||
_,
|
||||
))) => !impl_generics.type_param(pt, self).pure_wrt_drop,
|
||||
GenericArgKind::Const(Const(Interned(
|
||||
ty::ConstS { val: ty::ConstKind::Param(ref pc), .. },
|
||||
_,
|
||||
))) => !impl_generics.const_param(pc, self).pure_wrt_drop,
|
||||
GenericArgKind::Lifetime(_)
|
||||
| GenericArgKind::Type(_)
|
||||
| GenericArgKind::Const(_) => {
|
||||
// Not a type, const or region param: this should be reported
|
||||
// as an error.
|
||||
false
|
||||
}
|
||||
GenericArgKind::Lifetime(region) => match region.kind() {
|
||||
ReEarlyBound(ref ebr) => {
|
||||
!impl_generics.region_param(ebr, self).pure_wrt_drop
|
||||
}
|
||||
// Error: not a region param
|
||||
_ => false,
|
||||
},
|
||||
GenericArgKind::Type(ty) => match ty.kind() {
|
||||
ty::Param(ref pt) => !impl_generics.type_param(pt, self).pure_wrt_drop,
|
||||
// Error: not a type param
|
||||
_ => false,
|
||||
},
|
||||
GenericArgKind::Const(ct) => match ct.val() {
|
||||
ty::ConstKind::Param(ref pc) => {
|
||||
!impl_generics.const_param(pc, self).pure_wrt_drop
|
||||
}
|
||||
// Error: not a const param
|
||||
_ => false,
|
||||
},
|
||||
}
|
||||
})
|
||||
.map(|(item_param, _)| item_param)
|
||||
|
|
|
@ -231,4 +231,12 @@ impl<'a> rustc_span::HashStableContext for StableHashingContext<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> rustc_data_structures::intern::InternedHashingContext for StableHashingContext<'a> {
|
||||
fn with_def_path_and_no_spans(&mut self, f: impl FnOnce(&mut Self)) {
|
||||
self.while_hashing_spans(false, |hcx| {
|
||||
hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| f(hcx))
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> rustc_session::HashStableContext for StableHashingContext<'a> {}
|
||||
|
|
|
@ -8,7 +8,6 @@
|
|||
//! In this case we try to build an abstract representation of this constant using
|
||||
//! `thir_abstract_const` which can then be checked for structural equality with other
|
||||
//! generic constants mentioned in the `caller_bounds` of the current environment.
|
||||
use rustc_data_structures::intern::Interned;
|
||||
use rustc_errors::ErrorGuaranteed;
|
||||
use rustc_hir::def::DefKind;
|
||||
use rustc_index::vec::IndexVec;
|
||||
|
@ -414,14 +413,12 @@ impl<'a, 'tcx> AbstractConstBuilder<'a, 'tcx> {
|
|||
self.recurse_build(self.body_id)?;
|
||||
|
||||
for n in self.nodes.iter() {
|
||||
if let Node::Leaf(ty::Const(Interned(
|
||||
ty::ConstS { val: ty::ConstKind::Unevaluated(ct), ty: _ },
|
||||
_,
|
||||
))) = n
|
||||
{
|
||||
// `AbstractConst`s should not contain any promoteds as they require references which
|
||||
// are not allowed.
|
||||
assert_eq!(ct.promoted, None);
|
||||
if let Node::Leaf(ct) = n {
|
||||
if let ty::ConstKind::Unevaluated(ct) = ct.val() {
|
||||
// `AbstractConst`s should not contain any promoteds as they require references which
|
||||
// are not allowed.
|
||||
assert_eq!(ct.promoted, None);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
use crate::check::method::MethodCallee;
|
||||
use crate::check::{has_expected_num_generic_args, FnCtxt, PlaceOp};
|
||||
use rustc_ast as ast;
|
||||
use rustc_data_structures::intern::Interned;
|
||||
use rustc_errors::Applicability;
|
||||
use rustc_hir as hir;
|
||||
use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
|
||||
|
@ -126,9 +125,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
) = index_expr.kind
|
||||
{
|
||||
match adjusted_ty.kind() {
|
||||
ty::Adt(ty::AdtDef(Interned(ty::AdtDefData { did, .. }, _)), _)
|
||||
if self.tcx.is_diagnostic_item(sym::Vec, *did) =>
|
||||
{
|
||||
ty::Adt(def, _) if self.tcx.is_diagnostic_item(sym::Vec, def.did()) => {
|
||||
return self.negative_index(adjusted_ty, index_expr.span, base_expr);
|
||||
}
|
||||
ty::Slice(_) | ty::Array(_, _) => {
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
use clippy_utils::diagnostics::span_lint_and_help;
|
||||
use if_chain::if_chain;
|
||||
use rustc_ast::ast::LitKind;
|
||||
use rustc_data_structures::intern::Interned;
|
||||
use rustc_hir::{Expr, ExprKind, PathSegment};
|
||||
use rustc_lint::{LateContext, LateLintPass};
|
||||
use rustc_middle::ty;
|
||||
|
@ -56,8 +55,8 @@ fn check_case_sensitive_file_extension_comparison(ctx: &LateContext<'_>, expr: &
|
|||
ty::Str => {
|
||||
return Some(span);
|
||||
},
|
||||
ty::Adt(ty::AdtDef(Interned(&ty::AdtDefData { did, .. }, _)), _) => {
|
||||
if ctx.tcx.is_diagnostic_item(sym::String, did) {
|
||||
ty::Adt(def, _) => {
|
||||
if ctx.tcx.is_diagnostic_item(sym::String, def.did()) {
|
||||
return Some(span);
|
||||
}
|
||||
},
|
||||
|
|
Loading…
Add table
Reference in a new issue