commit
9669e57ed1
125 changed files with 2461 additions and 806 deletions
|
@ -347,7 +347,12 @@ fn codegen_fn_body(fx: &mut FunctionCx<'_, '_, '_>, start_block: Block) {
|
|||
}
|
||||
TerminatorKind::Assert { cond, expected, msg, target, cleanup: _ } => {
|
||||
if !fx.tcx.sess.overflow_checks() {
|
||||
if let mir::AssertKind::OverflowNeg(_) = *msg {
|
||||
let overflow_not_to_check = match msg {
|
||||
AssertKind::OverflowNeg(..) => true,
|
||||
AssertKind::Overflow(op, ..) => op.is_checkable(),
|
||||
_ => false,
|
||||
};
|
||||
if overflow_not_to_check {
|
||||
let target = fx.get_block(*target);
|
||||
fx.bcx.ins().jump(target, &[]);
|
||||
continue;
|
||||
|
@ -567,15 +572,7 @@ fn codegen_stmt<'tcx>(
|
|||
let lhs = codegen_operand(fx, &lhs_rhs.0);
|
||||
let rhs = codegen_operand(fx, &lhs_rhs.1);
|
||||
|
||||
let res = if !fx.tcx.sess.overflow_checks() {
|
||||
let val =
|
||||
crate::num::codegen_int_binop(fx, bin_op, lhs, rhs).load_scalar(fx);
|
||||
let is_overflow = fx.bcx.ins().iconst(types::I8, 0);
|
||||
CValue::by_val_pair(val, is_overflow, lval.layout())
|
||||
} else {
|
||||
crate::num::codegen_checked_int_binop(fx, bin_op, lhs, rhs)
|
||||
};
|
||||
|
||||
let res = crate::num::codegen_checked_int_binop(fx, bin_op, lhs, rhs);
|
||||
lval.write_cvalue(fx, res);
|
||||
}
|
||||
Rvalue::UnaryOp(un_op, ref operand) => {
|
||||
|
|
|
@ -493,20 +493,6 @@ fn codegen_regular_intrinsic_call<'tcx>(
|
|||
let res = crate::num::codegen_int_binop(fx, bin_op, x, y);
|
||||
ret.write_cvalue(fx, res);
|
||||
}
|
||||
sym::add_with_overflow | sym::sub_with_overflow | sym::mul_with_overflow => {
|
||||
intrinsic_args!(fx, args => (x, y); intrinsic);
|
||||
|
||||
assert_eq!(x.layout().ty, y.layout().ty);
|
||||
let bin_op = match intrinsic {
|
||||
sym::add_with_overflow => BinOp::Add,
|
||||
sym::sub_with_overflow => BinOp::Sub,
|
||||
sym::mul_with_overflow => BinOp::Mul,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
let res = crate::num::codegen_checked_int_binop(fx, bin_op, x, y);
|
||||
ret.write_cvalue(fx, res);
|
||||
}
|
||||
sym::saturating_add | sym::saturating_sub => {
|
||||
intrinsic_args!(fx, args => (lhs, rhs); intrinsic);
|
||||
|
||||
|
|
|
@ -329,7 +329,7 @@ impl<'tcx> LayoutLlvmExt<'tcx> for TyAndLayout<'tcx> {
|
|||
) -> &'a Type {
|
||||
// HACK(eddyb) special-case fat pointers until LLVM removes
|
||||
// pointee types, to avoid bitcasting every `OperandRef::deref`.
|
||||
match self.ty.kind() {
|
||||
match *self.ty.kind() {
|
||||
ty::Ref(..) | ty::RawPtr(_) => {
|
||||
return self.field(cx, index).llvm_type(cx);
|
||||
}
|
||||
|
@ -339,6 +339,11 @@ impl<'tcx> LayoutLlvmExt<'tcx> for TyAndLayout<'tcx> {
|
|||
let ptr_ty = cx.tcx.mk_mut_ptr(self.ty.boxed_ty());
|
||||
return cx.layout_of(ptr_ty).scalar_pair_element_llvm_type(cx, index, immediate);
|
||||
}
|
||||
// `dyn* Trait` has the same ABI as `*mut dyn Trait`
|
||||
ty::Dynamic(bounds, region, ty::DynStar) => {
|
||||
let ptr_ty = cx.tcx.mk_mut_ptr(cx.tcx.mk_dynamic(bounds, region, ty::Dyn));
|
||||
return cx.layout_of(ptr_ty).scalar_pair_element_llvm_type(cx, index, immediate);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@ use rustc_session::Session;
|
|||
use rustc_span::symbol::sym;
|
||||
use rustc_span::Symbol;
|
||||
use rustc_span::{DebuggerVisualizerFile, DebuggerVisualizerType};
|
||||
use rustc_target::abi::{Align, Size, VariantIdx};
|
||||
use rustc_target::abi::{Align, VariantIdx};
|
||||
|
||||
use std::collections::BTreeSet;
|
||||
use std::time::{Duration, Instant};
|
||||
|
@ -273,12 +273,13 @@ pub fn cast_to_dyn_star<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
|
|||
matches!(dst_ty.kind(), ty::Dynamic(_, _, ty::DynStar)),
|
||||
"destination type must be a dyn*"
|
||||
);
|
||||
// FIXME(dyn-star): this is probably not the best way to check if this is
|
||||
// a pointer, and really we should ensure that the value is a suitable
|
||||
// pointer earlier in the compilation process.
|
||||
let src = match src_ty_and_layout.pointee_info_at(bx.cx(), Size::ZERO) {
|
||||
Some(_) => bx.ptrtoint(src, bx.cx().type_isize()),
|
||||
None => bx.bitcast(src, bx.type_isize()),
|
||||
// FIXME(dyn-star): We can remove this when all supported LLVMs use opaque ptrs only.
|
||||
let unit_ptr = bx.cx().type_ptr_to(bx.cx().type_struct(&[], false));
|
||||
let src = match bx.cx().type_kind(bx.cx().backend_type(src_ty_and_layout)) {
|
||||
TypeKind::Pointer => bx.pointercast(src, unit_ptr),
|
||||
TypeKind::Integer => bx.inttoptr(src, unit_ptr),
|
||||
// FIXME(dyn-star): We probably have to do a bitcast first, then inttoptr.
|
||||
kind => bug!("unexpected TypeKind for left-hand side of `dyn*` cast: {kind:?}"),
|
||||
};
|
||||
(src, unsized_info(bx, src_ty_and_layout.ty, dst_ty, old_info))
|
||||
}
|
||||
|
|
|
@ -452,86 +452,84 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
args1 = [place.llval];
|
||||
&args1[..]
|
||||
};
|
||||
let (drop_fn, fn_abi) = match ty.kind() {
|
||||
// FIXME(eddyb) perhaps move some of this logic into
|
||||
// `Instance::resolve_drop_in_place`?
|
||||
ty::Dynamic(_, _, ty::Dyn) => {
|
||||
// IN THIS ARM, WE HAVE:
|
||||
// ty = *mut (dyn Trait)
|
||||
// which is: exists<T> ( *mut T, Vtable<T: Trait> )
|
||||
// args[0] args[1]
|
||||
//
|
||||
// args = ( Data, Vtable )
|
||||
// |
|
||||
// v
|
||||
// /-------\
|
||||
// | ... |
|
||||
// \-------/
|
||||
//
|
||||
let virtual_drop = Instance {
|
||||
def: ty::InstanceDef::Virtual(drop_fn.def_id(), 0),
|
||||
substs: drop_fn.substs,
|
||||
};
|
||||
debug!("ty = {:?}", ty);
|
||||
debug!("drop_fn = {:?}", drop_fn);
|
||||
debug!("args = {:?}", args);
|
||||
let fn_abi = bx.fn_abi_of_instance(virtual_drop, ty::List::empty());
|
||||
let vtable = args[1];
|
||||
// Truncate vtable off of args list
|
||||
args = &args[..1];
|
||||
(
|
||||
meth::VirtualIndex::from_index(ty::COMMON_VTABLE_ENTRIES_DROPINPLACE)
|
||||
.get_fn(bx, vtable, ty, &fn_abi),
|
||||
fn_abi,
|
||||
)
|
||||
}
|
||||
ty::Dynamic(_, _, ty::DynStar) => {
|
||||
// IN THIS ARM, WE HAVE:
|
||||
// ty = *mut (dyn* Trait)
|
||||
// which is: *mut exists<T: sizeof(T) == sizeof(usize)> (T, Vtable<T: Trait>)
|
||||
//
|
||||
// args = [ * ]
|
||||
// |
|
||||
// v
|
||||
// ( Data, Vtable )
|
||||
// |
|
||||
// v
|
||||
// /-------\
|
||||
// | ... |
|
||||
// \-------/
|
||||
//
|
||||
//
|
||||
// WE CAN CONVERT THIS INTO THE ABOVE LOGIC BY DOING
|
||||
//
|
||||
// data = &(*args[0]).0 // gives a pointer to Data above (really the same pointer)
|
||||
// vtable = (*args[0]).1 // loads the vtable out
|
||||
// (data, vtable) // an equivalent Rust `*mut dyn Trait`
|
||||
//
|
||||
// SO THEN WE CAN USE THE ABOVE CODE.
|
||||
let virtual_drop = Instance {
|
||||
def: ty::InstanceDef::Virtual(drop_fn.def_id(), 0),
|
||||
substs: drop_fn.substs,
|
||||
};
|
||||
debug!("ty = {:?}", ty);
|
||||
debug!("drop_fn = {:?}", drop_fn);
|
||||
debug!("args = {:?}", args);
|
||||
let fn_abi = bx.fn_abi_of_instance(virtual_drop, ty::List::empty());
|
||||
let data = args[0];
|
||||
let data_ty = bx.cx().backend_type(place.layout);
|
||||
let vtable_ptr =
|
||||
bx.gep(data_ty, data, &[bx.cx().const_i32(0), bx.cx().const_i32(1)]);
|
||||
let vtable = bx.load(bx.type_i8p(), vtable_ptr, abi::Align::ONE);
|
||||
// Truncate vtable off of args list
|
||||
args = &args[..1];
|
||||
debug!("args' = {:?}", args);
|
||||
(
|
||||
meth::VirtualIndex::from_index(ty::COMMON_VTABLE_ENTRIES_DROPINPLACE)
|
||||
.get_fn(bx, vtable, ty, &fn_abi),
|
||||
fn_abi,
|
||||
)
|
||||
}
|
||||
_ => (bx.get_fn_addr(drop_fn), bx.fn_abi_of_instance(drop_fn, ty::List::empty())),
|
||||
};
|
||||
let (drop_fn, fn_abi) =
|
||||
match ty.kind() {
|
||||
// FIXME(eddyb) perhaps move some of this logic into
|
||||
// `Instance::resolve_drop_in_place`?
|
||||
ty::Dynamic(_, _, ty::Dyn) => {
|
||||
// IN THIS ARM, WE HAVE:
|
||||
// ty = *mut (dyn Trait)
|
||||
// which is: exists<T> ( *mut T, Vtable<T: Trait> )
|
||||
// args[0] args[1]
|
||||
//
|
||||
// args = ( Data, Vtable )
|
||||
// |
|
||||
// v
|
||||
// /-------\
|
||||
// | ... |
|
||||
// \-------/
|
||||
//
|
||||
let virtual_drop = Instance {
|
||||
def: ty::InstanceDef::Virtual(drop_fn.def_id(), 0),
|
||||
substs: drop_fn.substs,
|
||||
};
|
||||
debug!("ty = {:?}", ty);
|
||||
debug!("drop_fn = {:?}", drop_fn);
|
||||
debug!("args = {:?}", args);
|
||||
let fn_abi = bx.fn_abi_of_instance(virtual_drop, ty::List::empty());
|
||||
let vtable = args[1];
|
||||
// Truncate vtable off of args list
|
||||
args = &args[..1];
|
||||
(
|
||||
meth::VirtualIndex::from_index(ty::COMMON_VTABLE_ENTRIES_DROPINPLACE)
|
||||
.get_fn(bx, vtable, ty, &fn_abi),
|
||||
fn_abi,
|
||||
)
|
||||
}
|
||||
ty::Dynamic(_, _, ty::DynStar) => {
|
||||
// IN THIS ARM, WE HAVE:
|
||||
// ty = *mut (dyn* Trait)
|
||||
// which is: *mut exists<T: sizeof(T) == sizeof(usize)> (T, Vtable<T: Trait>)
|
||||
//
|
||||
// args = [ * ]
|
||||
// |
|
||||
// v
|
||||
// ( Data, Vtable )
|
||||
// |
|
||||
// v
|
||||
// /-------\
|
||||
// | ... |
|
||||
// \-------/
|
||||
//
|
||||
//
|
||||
// WE CAN CONVERT THIS INTO THE ABOVE LOGIC BY DOING
|
||||
//
|
||||
// data = &(*args[0]).0 // gives a pointer to Data above (really the same pointer)
|
||||
// vtable = (*args[0]).1 // loads the vtable out
|
||||
// (data, vtable) // an equivalent Rust `*mut dyn Trait`
|
||||
//
|
||||
// SO THEN WE CAN USE THE ABOVE CODE.
|
||||
let virtual_drop = Instance {
|
||||
def: ty::InstanceDef::Virtual(drop_fn.def_id(), 0),
|
||||
substs: drop_fn.substs,
|
||||
};
|
||||
debug!("ty = {:?}", ty);
|
||||
debug!("drop_fn = {:?}", drop_fn);
|
||||
debug!("args = {:?}", args);
|
||||
let fn_abi = bx.fn_abi_of_instance(virtual_drop, ty::List::empty());
|
||||
let meta_ptr = place.project_field(bx, 1);
|
||||
let meta = bx.load_operand(meta_ptr);
|
||||
// Truncate vtable off of args list
|
||||
args = &args[..1];
|
||||
debug!("args' = {:?}", args);
|
||||
(
|
||||
meth::VirtualIndex::from_index(ty::COMMON_VTABLE_ENTRIES_DROPINPLACE)
|
||||
.get_fn(bx, meta.immediate(), ty, &fn_abi),
|
||||
fn_abi,
|
||||
)
|
||||
}
|
||||
_ => (bx.get_fn_addr(drop_fn), bx.fn_abi_of_instance(drop_fn, ty::List::empty())),
|
||||
};
|
||||
helper.do_call(
|
||||
self,
|
||||
bx,
|
||||
|
@ -565,11 +563,13 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
// with #[rustc_inherit_overflow_checks] and inlined from
|
||||
// another crate (mostly core::num generic/#[inline] fns),
|
||||
// while the current crate doesn't use overflow checks.
|
||||
// NOTE: Unlike binops, negation doesn't have its own
|
||||
// checked operation, just a comparison with the minimum
|
||||
// value, so we have to check for the assert message.
|
||||
if !bx.check_overflow() {
|
||||
if let AssertKind::OverflowNeg(_) = *msg {
|
||||
if !bx.cx().check_overflow() {
|
||||
let overflow_not_to_check = match msg {
|
||||
AssertKind::OverflowNeg(..) => true,
|
||||
AssertKind::Overflow(op, ..) => op.is_checkable(),
|
||||
_ => false,
|
||||
};
|
||||
if overflow_not_to_check {
|
||||
const_cond = Some(expected);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -218,9 +218,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
args[1].val.unaligned_volatile_store(bx, dst);
|
||||
return;
|
||||
}
|
||||
sym::add_with_overflow
|
||||
| sym::sub_with_overflow
|
||||
| sym::mul_with_overflow
|
||||
| sym::unchecked_div
|
||||
| sym::unchecked_rem
|
||||
| sym::unchecked_shl
|
||||
|
@ -232,28 +229,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
let ty = arg_tys[0];
|
||||
match int_type_width_signed(ty, bx.tcx()) {
|
||||
Some((_width, signed)) => match name {
|
||||
sym::add_with_overflow
|
||||
| sym::sub_with_overflow
|
||||
| sym::mul_with_overflow => {
|
||||
let op = match name {
|
||||
sym::add_with_overflow => OverflowOp::Add,
|
||||
sym::sub_with_overflow => OverflowOp::Sub,
|
||||
sym::mul_with_overflow => OverflowOp::Mul,
|
||||
_ => bug!(),
|
||||
};
|
||||
let (val, overflow) =
|
||||
bx.checked_binop(op, ty, args[0].immediate(), args[1].immediate());
|
||||
// Convert `i1` to a `bool`, and write it to the out parameter
|
||||
let val = bx.from_immediate(val);
|
||||
let overflow = bx.from_immediate(overflow);
|
||||
|
||||
let dest = result.project_field(bx, 0);
|
||||
bx.store(val, dest.llval, dest.align);
|
||||
let dest = result.project_field(bx, 1);
|
||||
bx.store(overflow, dest.llval, dest.align);
|
||||
|
||||
return;
|
||||
}
|
||||
sym::exact_div => {
|
||||
if signed {
|
||||
bx.exactsdiv(args[0].immediate(), args[1].immediate())
|
||||
|
|
|
@ -652,15 +652,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
rhs: Bx::Value,
|
||||
input_ty: Ty<'tcx>,
|
||||
) -> OperandValue<Bx::Value> {
|
||||
// This case can currently arise only from functions marked
|
||||
// with #[rustc_inherit_overflow_checks] and inlined from
|
||||
// another crate (mostly core::num generic/#[inline] fns),
|
||||
// while the current crate doesn't use overflow checks.
|
||||
if !bx.cx().check_overflow() {
|
||||
let val = self.codegen_scalar_binop(bx, op, lhs, rhs, input_ty);
|
||||
return OperandValue::Pair(val, bx.cx().const_bool(false));
|
||||
}
|
||||
|
||||
let (val, of) = match op {
|
||||
// These are checked using intrinsics
|
||||
mir::BinOp::Add | mir::BinOp::Sub | mir::BinOp::Mul => {
|
||||
|
|
|
@ -210,19 +210,6 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||
let out_val = numeric_intrinsic(intrinsic_name, bits, kind);
|
||||
self.write_scalar(out_val, dest)?;
|
||||
}
|
||||
sym::add_with_overflow | sym::sub_with_overflow | sym::mul_with_overflow => {
|
||||
let lhs = self.read_immediate(&args[0])?;
|
||||
let rhs = self.read_immediate(&args[1])?;
|
||||
let bin_op = match intrinsic_name {
|
||||
sym::add_with_overflow => BinOp::Add,
|
||||
sym::sub_with_overflow => BinOp::Sub,
|
||||
sym::mul_with_overflow => BinOp::Mul,
|
||||
_ => bug!(),
|
||||
};
|
||||
self.binop_with_overflow(
|
||||
bin_op, /*force_overflow_checks*/ true, &lhs, &rhs, dest,
|
||||
)?;
|
||||
}
|
||||
sym::saturating_add | sym::saturating_sub => {
|
||||
let l = self.read_immediate(&args[0])?;
|
||||
let r = self.read_immediate(&args[1])?;
|
||||
|
|
|
@ -147,8 +147,9 @@ pub trait Machine<'mir, 'tcx>: Sized {
|
|||
true
|
||||
}
|
||||
|
||||
/// Whether CheckedBinOp MIR statements should actually check for overflow.
|
||||
fn checked_binop_checks_overflow(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool;
|
||||
/// Whether Assert(OverflowNeg) and Assert(Overflow) MIR terminators should actually
|
||||
/// check for overflow.
|
||||
fn ignore_checkable_overflow_assertions(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool;
|
||||
|
||||
/// Entry point for obtaining the MIR of anything that should get evaluated.
|
||||
/// So not just functions and shims, but also const/static initializers, anonymous
|
||||
|
@ -466,8 +467,8 @@ pub macro compile_time_machine(<$mir: lifetime, $tcx: lifetime>) {
|
|||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn checked_binop_checks_overflow(_ecx: &InterpCx<$mir, $tcx, Self>) -> bool {
|
||||
true
|
||||
fn ignore_checkable_overflow_assertions(_ecx: &InterpCx<$mir, $tcx, Self>) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
|
|
|
@ -10,13 +10,9 @@ use super::{ImmTy, Immediate, InterpCx, Machine, PlaceTy};
|
|||
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
/// Applies the binary operation `op` to the two operands and writes a tuple of the result
|
||||
/// and a boolean signifying the potential overflow to the destination.
|
||||
///
|
||||
/// `force_overflow_checks` indicates whether overflow checks should be done even when
|
||||
/// `tcx.sess.overflow_checks()` is `false`.
|
||||
pub fn binop_with_overflow(
|
||||
&mut self,
|
||||
op: mir::BinOp,
|
||||
force_overflow_checks: bool,
|
||||
left: &ImmTy<'tcx, M::Provenance>,
|
||||
right: &ImmTy<'tcx, M::Provenance>,
|
||||
dest: &PlaceTy<'tcx, M::Provenance>,
|
||||
|
@ -28,10 +24,6 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||
"type mismatch for result of {:?}",
|
||||
op,
|
||||
);
|
||||
// As per https://github.com/rust-lang/rust/pull/98738, we always return `false` in the 2nd
|
||||
// component when overflow checking is disabled.
|
||||
let overflowed =
|
||||
overflowed && (force_overflow_checks || M::checked_binop_checks_overflow(self));
|
||||
// Write the result to `dest`.
|
||||
if let Abi::ScalarPair(..) = dest.layout.abi {
|
||||
// We can use the optimized path and avoid `place_field` (which might do
|
||||
|
|
|
@ -185,9 +185,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||
let left = self.read_immediate(&self.eval_operand(left, None)?)?;
|
||||
let layout = binop_right_homogeneous(bin_op).then_some(left.layout);
|
||||
let right = self.read_immediate(&self.eval_operand(right, layout)?)?;
|
||||
self.binop_with_overflow(
|
||||
bin_op, /*force_overflow_checks*/ false, &left, &right, &dest,
|
||||
)?;
|
||||
self.binop_with_overflow(bin_op, &left, &right, &dest)?;
|
||||
}
|
||||
|
||||
UnaryOp(un_op, ref operand) => {
|
||||
|
|
|
@ -137,8 +137,14 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||
}
|
||||
|
||||
Assert { ref cond, expected, ref msg, target, cleanup } => {
|
||||
let ignored = M::ignore_checkable_overflow_assertions(self)
|
||||
&& match msg {
|
||||
mir::AssertKind::OverflowNeg(..) => true,
|
||||
mir::AssertKind::Overflow(op, ..) => op.is_checkable(),
|
||||
_ => false,
|
||||
};
|
||||
let cond_val = self.read_scalar(&self.eval_operand(cond, None)?)?.to_bool()?;
|
||||
if expected == cond_val {
|
||||
if ignored || expected == cond_val {
|
||||
self.go_to_block(target);
|
||||
} else {
|
||||
M::assert_panic(self, msg, cleanup)?;
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
use crate::astconv::AstConv;
|
||||
use crate::errors::{ManualImplementation, MissingTypeParams};
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_errors::{pluralize, struct_span_err, Applicability, ErrorGuaranteed};
|
||||
use rustc_errors::{pluralize, struct_span_err, Applicability, Diagnostic, ErrorGuaranteed};
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_middle::ty;
|
||||
use rustc_infer::traits::FulfillmentError;
|
||||
use rustc_middle::ty::{self, Ty};
|
||||
use rustc_session::parse::feature_err;
|
||||
use rustc_span::lev_distance::find_best_match_for_name;
|
||||
use rustc_span::symbol::{sym, Ident};
|
||||
|
@ -221,6 +222,231 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
err.emit()
|
||||
}
|
||||
|
||||
pub(crate) fn complain_about_ambiguous_inherent_assoc_type(
|
||||
&self,
|
||||
name: Ident,
|
||||
candidates: Vec<DefId>,
|
||||
span: Span,
|
||||
) -> ErrorGuaranteed {
|
||||
let mut err = struct_span_err!(
|
||||
self.tcx().sess,
|
||||
name.span,
|
||||
E0034,
|
||||
"multiple applicable items in scope"
|
||||
);
|
||||
err.span_label(name.span, format!("multiple `{name}` found"));
|
||||
self.note_ambiguous_inherent_assoc_type(&mut err, candidates, span);
|
||||
err.emit()
|
||||
}
|
||||
|
||||
// FIXME(fmease): Heavily adapted from `rustc_hir_typeck::method::suggest`. Deduplicate.
|
||||
fn note_ambiguous_inherent_assoc_type(
|
||||
&self,
|
||||
err: &mut Diagnostic,
|
||||
candidates: Vec<DefId>,
|
||||
span: Span,
|
||||
) {
|
||||
let tcx = self.tcx();
|
||||
|
||||
// Dynamic limit to avoid hiding just one candidate, which is silly.
|
||||
let limit = if candidates.len() == 5 { 5 } else { 4 };
|
||||
|
||||
for (index, &item) in candidates.iter().take(limit).enumerate() {
|
||||
let impl_ = tcx.impl_of_method(item).unwrap();
|
||||
|
||||
let note_span = if item.is_local() {
|
||||
Some(tcx.def_span(item))
|
||||
} else if impl_.is_local() {
|
||||
Some(tcx.def_span(impl_))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let title = if candidates.len() > 1 {
|
||||
format!("candidate #{}", index + 1)
|
||||
} else {
|
||||
"the candidate".into()
|
||||
};
|
||||
|
||||
let impl_ty = tcx.at(span).type_of(impl_).subst_identity();
|
||||
let note = format!("{title} is defined in an impl for the type `{impl_ty}`");
|
||||
|
||||
if let Some(span) = note_span {
|
||||
err.span_note(span, ¬e);
|
||||
} else {
|
||||
err.note(¬e);
|
||||
}
|
||||
}
|
||||
if candidates.len() > limit {
|
||||
err.note(&format!("and {} others", candidates.len() - limit));
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME(inherent_associated_types): Find similarly named associated types and suggest them.
|
||||
pub(crate) fn complain_about_inherent_assoc_type_not_found(
|
||||
&self,
|
||||
name: Ident,
|
||||
self_ty: Ty<'tcx>,
|
||||
candidates: Vec<(DefId, (DefId, DefId))>,
|
||||
fulfillment_errors: Vec<FulfillmentError<'tcx>>,
|
||||
span: Span,
|
||||
) -> ErrorGuaranteed {
|
||||
// FIXME(fmease): This was copied in parts from an old version of `rustc_hir_typeck::method::suggest`.
|
||||
// Either
|
||||
// * update this code by applying changes similar to #106702 or by taking a
|
||||
// Vec<(DefId, (DefId, DefId), Option<Vec<FulfillmentError<'tcx>>>)> or
|
||||
// * deduplicate this code across the two crates.
|
||||
|
||||
let tcx = self.tcx();
|
||||
|
||||
let adt_did = self_ty.ty_adt_def().map(|def| def.did());
|
||||
let add_def_label = |err: &mut Diagnostic| {
|
||||
if let Some(did) = adt_did {
|
||||
err.span_label(
|
||||
tcx.def_span(did),
|
||||
format!(
|
||||
"associated item `{name}` not found for this {}",
|
||||
tcx.def_kind(did).descr(did)
|
||||
),
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
if fulfillment_errors.is_empty() {
|
||||
// FIXME(fmease): Copied from `rustc_hir_typeck::method::probe`. Deduplicate.
|
||||
|
||||
let limit = if candidates.len() == 5 { 5 } else { 4 };
|
||||
let type_candidates = candidates
|
||||
.iter()
|
||||
.take(limit)
|
||||
.map(|&(impl_, _)| format!("- `{}`", tcx.at(span).type_of(impl_).subst_identity()))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
let additional_types = if candidates.len() > limit {
|
||||
format!("\nand {} more types", candidates.len() - limit)
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
|
||||
let mut err = struct_span_err!(
|
||||
tcx.sess,
|
||||
name.span,
|
||||
E0220,
|
||||
"associated type `{name}` not found for `{self_ty}` in the current scope"
|
||||
);
|
||||
err.span_label(name.span, format!("associated item not found in `{self_ty}`"));
|
||||
err.note(&format!(
|
||||
"the associated type was found for\n{type_candidates}{additional_types}",
|
||||
));
|
||||
add_def_label(&mut err);
|
||||
return err.emit();
|
||||
}
|
||||
|
||||
let mut bound_spans = Vec::new();
|
||||
|
||||
let mut bound_span_label = |self_ty: Ty<'_>, obligation: &str, quiet: &str| {
|
||||
let msg = format!(
|
||||
"doesn't satisfy `{}`",
|
||||
if obligation.len() > 50 { quiet } else { obligation }
|
||||
);
|
||||
match &self_ty.kind() {
|
||||
// Point at the type that couldn't satisfy the bound.
|
||||
ty::Adt(def, _) => bound_spans.push((tcx.def_span(def.did()), msg)),
|
||||
// Point at the trait object that couldn't satisfy the bound.
|
||||
ty::Dynamic(preds, _, _) => {
|
||||
for pred in preds.iter() {
|
||||
match pred.skip_binder() {
|
||||
ty::ExistentialPredicate::Trait(tr) => {
|
||||
bound_spans.push((tcx.def_span(tr.def_id), msg.clone()))
|
||||
}
|
||||
ty::ExistentialPredicate::Projection(_)
|
||||
| ty::ExistentialPredicate::AutoTrait(_) => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Point at the closure that couldn't satisfy the bound.
|
||||
ty::Closure(def_id, _) => {
|
||||
bound_spans.push((tcx.def_span(*def_id), format!("doesn't satisfy `{quiet}`")))
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
};
|
||||
|
||||
let format_pred = |pred: ty::Predicate<'tcx>| {
|
||||
let bound_predicate = pred.kind();
|
||||
match bound_predicate.skip_binder() {
|
||||
ty::PredicateKind::Clause(ty::Clause::Projection(pred)) => {
|
||||
let pred = bound_predicate.rebind(pred);
|
||||
// `<Foo as Iterator>::Item = String`.
|
||||
let projection_ty = pred.skip_binder().projection_ty;
|
||||
|
||||
let substs_with_infer_self = tcx.mk_substs(
|
||||
std::iter::once(tcx.mk_ty_var(ty::TyVid::from_u32(0)).into())
|
||||
.chain(projection_ty.substs.iter().skip(1)),
|
||||
);
|
||||
|
||||
let quiet_projection_ty =
|
||||
tcx.mk_alias_ty(projection_ty.def_id, substs_with_infer_self);
|
||||
|
||||
let term = pred.skip_binder().term;
|
||||
|
||||
let obligation = format!("{projection_ty} = {term}");
|
||||
let quiet = format!("{quiet_projection_ty} = {term}");
|
||||
|
||||
bound_span_label(projection_ty.self_ty(), &obligation, &quiet);
|
||||
Some((obligation, projection_ty.self_ty()))
|
||||
}
|
||||
ty::PredicateKind::Clause(ty::Clause::Trait(poly_trait_ref)) => {
|
||||
let p = poly_trait_ref.trait_ref;
|
||||
let self_ty = p.self_ty();
|
||||
let path = p.print_only_trait_path();
|
||||
let obligation = format!("{self_ty}: {path}");
|
||||
let quiet = format!("_: {path}");
|
||||
bound_span_label(self_ty, &obligation, &quiet);
|
||||
Some((obligation, self_ty))
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
};
|
||||
|
||||
// FIXME(fmease): `rustc_hir_typeck::method::suggest` uses a `skip_list` to filter out some bounds.
|
||||
// I would do the same here if it didn't mean more code duplication.
|
||||
let mut bounds: Vec<_> = fulfillment_errors
|
||||
.into_iter()
|
||||
.map(|error| error.root_obligation.predicate)
|
||||
.filter_map(format_pred)
|
||||
.map(|(p, _)| format!("`{}`", p))
|
||||
.collect();
|
||||
bounds.sort();
|
||||
bounds.dedup();
|
||||
|
||||
let mut err = tcx.sess.struct_span_err(
|
||||
name.span,
|
||||
&format!("the associated type `{name}` exists for `{self_ty}`, but its trait bounds were not satisfied")
|
||||
);
|
||||
if !bounds.is_empty() {
|
||||
err.note(&format!(
|
||||
"the following trait bounds were not satisfied:\n{}",
|
||||
bounds.join("\n")
|
||||
));
|
||||
}
|
||||
err.span_label(
|
||||
name.span,
|
||||
format!("associated type cannot be referenced on `{self_ty}` due to unsatisfied trait bounds")
|
||||
);
|
||||
|
||||
bound_spans.sort();
|
||||
bound_spans.dedup();
|
||||
for (span, msg) in bound_spans {
|
||||
if !tcx.sess.source_map().is_span_accessible(span) {
|
||||
continue;
|
||||
}
|
||||
err.span_label(span, &msg);
|
||||
}
|
||||
add_def_label(&mut err);
|
||||
err.emit()
|
||||
}
|
||||
|
||||
/// When there are any missing associated types, emit an E0191 error and attempt to supply a
|
||||
/// reasonable suggestion on how to write it. For the case of multiple associated types in the
|
||||
/// same trait bound have the same name (as they come from different supertraits), we instead
|
||||
|
|
|
@ -27,7 +27,10 @@ use rustc_hir::def::{CtorOf, DefKind, Namespace, Res};
|
|||
use rustc_hir::def_id::{DefId, LocalDefId};
|
||||
use rustc_hir::intravisit::{walk_generics, Visitor as _};
|
||||
use rustc_hir::{GenericArg, GenericArgs, OpaqueTyOrigin};
|
||||
use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
|
||||
use rustc_infer::infer::{InferCtxt, TyCtxtInferExt};
|
||||
use rustc_infer::traits::ObligationCause;
|
||||
use rustc_middle::infer::unify_key::{ConstVariableOrigin, ConstVariableOriginKind};
|
||||
use rustc_middle::middle::stability::AllowUnstable;
|
||||
use rustc_middle::ty::subst::{self, GenericArgKind, InternalSubsts, SubstsRef};
|
||||
use rustc_middle::ty::DynKind;
|
||||
|
@ -39,12 +42,11 @@ use rustc_span::lev_distance::find_best_match_for_name;
|
|||
use rustc_span::symbol::{kw, Ident, Symbol};
|
||||
use rustc_span::{sym, Span, DUMMY_SP};
|
||||
use rustc_target::spec::abi;
|
||||
use rustc_trait_selection::traits;
|
||||
use rustc_trait_selection::traits::astconv_object_safety_violations;
|
||||
use rustc_trait_selection::traits::error_reporting::{
|
||||
report_object_safety_error, suggestions::NextTypeParamName,
|
||||
};
|
||||
use rustc_trait_selection::traits::wf::object_region_bounds;
|
||||
use rustc_trait_selection::traits::{self, astconv_object_safety_violations, ObligationCtxt};
|
||||
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
use std::collections::BTreeSet;
|
||||
|
@ -1944,7 +1946,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
Res::Err
|
||||
};
|
||||
|
||||
// Check if we have an enum variant.
|
||||
// Check if we have an enum variant or an inherent associated type.
|
||||
let mut variant_resolution = None;
|
||||
if let Some(adt_def) = self.probe_adt(span, qself_ty) {
|
||||
if adt_def.is_enum() {
|
||||
|
@ -2043,23 +2045,15 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
}
|
||||
}
|
||||
|
||||
// see if we can satisfy using an inherent associated type
|
||||
for &impl_ in tcx.inherent_impls(adt_def.did()) {
|
||||
let Some(assoc_ty_did) = self.lookup_assoc_ty(assoc_ident, hir_ref_id, span, impl_) else {
|
||||
continue;
|
||||
};
|
||||
let ty::Adt(_, adt_substs) = qself_ty.kind() else {
|
||||
// FIXME(inherent_associated_types)
|
||||
bug!("unimplemented: non-adt self of inherent assoc ty");
|
||||
};
|
||||
let item_substs = self.create_substs_for_associated_item(
|
||||
span,
|
||||
assoc_ty_did,
|
||||
assoc_segment,
|
||||
adt_substs,
|
||||
);
|
||||
let ty = tcx.type_of(assoc_ty_did).subst(tcx, item_substs);
|
||||
return Ok((ty, DefKind::AssocTy, assoc_ty_did));
|
||||
if let Some((ty, did)) = self.lookup_inherent_assoc_ty(
|
||||
assoc_ident,
|
||||
assoc_segment,
|
||||
adt_def.did(),
|
||||
qself_ty,
|
||||
hir_ref_id,
|
||||
span,
|
||||
)? {
|
||||
return Ok((ty, DefKind::AssocTy, did));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2202,6 +2196,172 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
Ok((ty, DefKind::AssocTy, assoc_ty_did))
|
||||
}
|
||||
|
||||
fn lookup_inherent_assoc_ty(
|
||||
&self,
|
||||
name: Ident,
|
||||
segment: &hir::PathSegment<'_>,
|
||||
adt_did: DefId,
|
||||
self_ty: Ty<'tcx>,
|
||||
block: hir::HirId,
|
||||
span: Span,
|
||||
) -> Result<Option<(Ty<'tcx>, DefId)>, ErrorGuaranteed> {
|
||||
let tcx = self.tcx();
|
||||
|
||||
let candidates: Vec<_> = tcx
|
||||
.inherent_impls(adt_did)
|
||||
.iter()
|
||||
.filter_map(|&impl_| Some((impl_, self.lookup_assoc_ty_unchecked(name, block, impl_)?)))
|
||||
.collect();
|
||||
|
||||
if candidates.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// In contexts that have no inference context, just make a new one.
|
||||
// We do need a local variable to store it, though.
|
||||
let infcx_;
|
||||
let infcx = match self.infcx() {
|
||||
Some(infcx) => infcx,
|
||||
None => {
|
||||
assert!(!self_ty.needs_infer());
|
||||
infcx_ = tcx.infer_ctxt().ignoring_regions().build();
|
||||
&infcx_
|
||||
}
|
||||
};
|
||||
|
||||
let param_env = tcx.param_env(block.owner.to_def_id());
|
||||
let cause = ObligationCause::misc(span, block.owner.def_id);
|
||||
let mut fulfillment_errors = Vec::new();
|
||||
let mut applicable_candidates: Vec<_> = candidates
|
||||
.iter()
|
||||
.filter_map(|&(impl_, (assoc_item, def_scope))| {
|
||||
infcx.probe(|_| {
|
||||
let ocx = ObligationCtxt::new_in_snapshot(&infcx);
|
||||
|
||||
let impl_ty = tcx.type_of(impl_);
|
||||
let impl_substs = infcx.fresh_item_substs(impl_);
|
||||
let impl_ty = impl_ty.subst(tcx, impl_substs);
|
||||
let impl_ty = ocx.normalize(&cause, param_env, impl_ty);
|
||||
|
||||
// Check that the Self-types can be related.
|
||||
// FIXME(fmease): Should we use `eq` here?
|
||||
ocx.sup(&ObligationCause::dummy(), param_env, impl_ty, self_ty).ok()?;
|
||||
|
||||
// Check whether the impl imposes obligations we have to worry about.
|
||||
let impl_bounds = tcx.predicates_of(impl_);
|
||||
let impl_bounds = impl_bounds.instantiate(tcx, impl_substs);
|
||||
|
||||
let impl_bounds = ocx.normalize(&cause, param_env, impl_bounds);
|
||||
|
||||
let impl_obligations = traits::predicates_for_generics(
|
||||
|_, _| cause.clone(),
|
||||
param_env,
|
||||
impl_bounds,
|
||||
);
|
||||
|
||||
ocx.register_obligations(impl_obligations);
|
||||
|
||||
let mut errors = ocx.select_where_possible();
|
||||
if !errors.is_empty() {
|
||||
fulfillment_errors.append(&mut errors);
|
||||
return None;
|
||||
}
|
||||
|
||||
// FIXME(fmease): Unsolved vars can escape this InferCtxt snapshot.
|
||||
Some((assoc_item, def_scope, infcx.resolve_vars_if_possible(impl_substs)))
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
if applicable_candidates.len() > 1 {
|
||||
return Err(self.complain_about_ambiguous_inherent_assoc_type(
|
||||
name,
|
||||
applicable_candidates.into_iter().map(|(candidate, ..)| candidate).collect(),
|
||||
span,
|
||||
));
|
||||
}
|
||||
|
||||
if let Some((assoc_item, def_scope, impl_substs)) = applicable_candidates.pop() {
|
||||
self.check_assoc_ty(assoc_item, name, def_scope, block, span);
|
||||
|
||||
// FIXME(inherent_associated_types): To fully *confirm* the *probed* candidate, we still
|
||||
// need to relate the Self-type with fresh item substs & register region obligations for
|
||||
// regionck to prove/disprove.
|
||||
|
||||
let item_substs =
|
||||
self.create_substs_for_associated_item(span, assoc_item, segment, impl_substs);
|
||||
|
||||
// FIXME(fmease, #106722): Check if the bounds on the parameters of the
|
||||
// associated type hold, if any.
|
||||
let ty = tcx.type_of(assoc_item).subst(tcx, item_substs);
|
||||
|
||||
return Ok(Some((ty, assoc_item)));
|
||||
}
|
||||
|
||||
Err(self.complain_about_inherent_assoc_type_not_found(
|
||||
name,
|
||||
self_ty,
|
||||
candidates,
|
||||
fulfillment_errors,
|
||||
span,
|
||||
))
|
||||
}
|
||||
|
||||
fn lookup_assoc_ty(
|
||||
&self,
|
||||
name: Ident,
|
||||
block: hir::HirId,
|
||||
span: Span,
|
||||
scope: DefId,
|
||||
) -> Option<DefId> {
|
||||
let (item, def_scope) = self.lookup_assoc_ty_unchecked(name, block, scope)?;
|
||||
self.check_assoc_ty(item, name, def_scope, block, span);
|
||||
Some(item)
|
||||
}
|
||||
|
||||
fn lookup_assoc_ty_unchecked(
|
||||
&self,
|
||||
name: Ident,
|
||||
block: hir::HirId,
|
||||
scope: DefId,
|
||||
) -> Option<(DefId, DefId)> {
|
||||
let tcx = self.tcx();
|
||||
let (ident, def_scope) = tcx.adjust_ident_and_get_scope(name, scope, block);
|
||||
|
||||
// We have already adjusted the item name above, so compare with `ident.normalize_to_macros_2_0()` instead
|
||||
// of calling `find_by_name_and_kind`.
|
||||
let item = tcx.associated_items(scope).in_definition_order().find(|i| {
|
||||
i.kind.namespace() == Namespace::TypeNS
|
||||
&& i.ident(tcx).normalize_to_macros_2_0() == ident
|
||||
})?;
|
||||
|
||||
Some((item.def_id, def_scope))
|
||||
}
|
||||
|
||||
fn check_assoc_ty(
|
||||
&self,
|
||||
item: DefId,
|
||||
name: Ident,
|
||||
def_scope: DefId,
|
||||
block: hir::HirId,
|
||||
span: Span,
|
||||
) {
|
||||
let tcx = self.tcx();
|
||||
let kind = DefKind::AssocTy;
|
||||
|
||||
if !tcx.visibility(item).is_accessible_from(def_scope, tcx) {
|
||||
let kind = kind.descr(item);
|
||||
let msg = format!("{kind} `{name}` is private");
|
||||
let def_span = tcx.def_span(item);
|
||||
tcx.sess
|
||||
.struct_span_err_with_code(span, &msg, rustc_errors::error_code!(E0624))
|
||||
.span_label(span, &format!("private {kind}"))
|
||||
.span_label(def_span, &format!("{kind} defined here"))
|
||||
.emit();
|
||||
}
|
||||
tcx.check_stability(item, Some(block), span, None);
|
||||
}
|
||||
|
||||
fn probe_traits_that_match_assoc_ty(
|
||||
&self,
|
||||
qself_ty: Ty<'tcx>,
|
||||
|
@ -2255,39 +2415,6 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
.collect()
|
||||
}
|
||||
|
||||
fn lookup_assoc_ty(
|
||||
&self,
|
||||
ident: Ident,
|
||||
block: hir::HirId,
|
||||
span: Span,
|
||||
scope: DefId,
|
||||
) -> Option<DefId> {
|
||||
let tcx = self.tcx();
|
||||
let (ident, def_scope) = tcx.adjust_ident_and_get_scope(ident, scope, block);
|
||||
|
||||
// We have already adjusted the item name above, so compare with `ident.normalize_to_macros_2_0()` instead
|
||||
// of calling `find_by_name_and_kind`.
|
||||
let item = tcx.associated_items(scope).in_definition_order().find(|i| {
|
||||
i.kind.namespace() == Namespace::TypeNS
|
||||
&& i.ident(tcx).normalize_to_macros_2_0() == ident
|
||||
})?;
|
||||
|
||||
let kind = DefKind::AssocTy;
|
||||
if !item.visibility(tcx).is_accessible_from(def_scope, tcx) {
|
||||
let kind = kind.descr(item.def_id);
|
||||
let msg = format!("{kind} `{ident}` is private");
|
||||
let def_span = self.tcx().def_span(item.def_id);
|
||||
tcx.sess
|
||||
.struct_span_err_with_code(span, &msg, rustc_errors::error_code!(E0624))
|
||||
.span_label(span, &format!("private {kind}"))
|
||||
.span_label(def_span, &format!("{kind} defined here"))
|
||||
.emit();
|
||||
}
|
||||
tcx.check_stability(item.def_id, Some(block), span, None);
|
||||
|
||||
Some(item.def_id)
|
||||
}
|
||||
|
||||
fn qpath_to_ty(
|
||||
&self,
|
||||
span: Span,
|
||||
|
@ -3375,3 +3502,36 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait InferCtxtExt<'tcx> {
|
||||
fn fresh_item_substs(&self, def_id: DefId) -> SubstsRef<'tcx>;
|
||||
}
|
||||
|
||||
impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> {
|
||||
fn fresh_item_substs(&self, def_id: DefId) -> SubstsRef<'tcx> {
|
||||
InternalSubsts::for_item(self.tcx, def_id, |param, _| match param.kind {
|
||||
GenericParamDefKind::Lifetime => self.tcx.lifetimes.re_erased.into(),
|
||||
GenericParamDefKind::Type { .. } => self
|
||||
.next_ty_var(TypeVariableOrigin {
|
||||
kind: TypeVariableOriginKind::SubstitutionPlaceholder,
|
||||
span: self.tcx.def_span(def_id),
|
||||
})
|
||||
.into(),
|
||||
GenericParamDefKind::Const { .. } => {
|
||||
let span = self.tcx.def_span(def_id);
|
||||
let origin = ConstVariableOrigin {
|
||||
kind: ConstVariableOriginKind::SubstitutionPlaceholder,
|
||||
span,
|
||||
};
|
||||
self.next_const_var(
|
||||
self.tcx
|
||||
.type_of(param.def_id)
|
||||
.no_bound_vars()
|
||||
.expect("const parameter types cannot be generic"),
|
||||
origin,
|
||||
)
|
||||
.into()
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1599,7 +1599,7 @@ fn check_return_position_impl_trait_in_trait_bounds<'tcx>(
|
|||
{
|
||||
for arg in fn_output.walk() {
|
||||
if let ty::GenericArgKind::Type(ty) = arg.unpack()
|
||||
&& let ty::Alias(ty::Projection, proj) = ty.kind()
|
||||
&& let ty::Alias(ty::Opaque, proj) = ty.kind()
|
||||
&& tcx.def_kind(proj.def_id) == DefKind::ImplTraitPlaceholder
|
||||
&& tcx.impl_trait_in_trait_parent(proj.def_id) == fn_def_id.to_def_id()
|
||||
{
|
||||
|
|
|
@ -9,12 +9,11 @@ use rustc_data_structures::fx::FxHashSet;
|
|||
use rustc_errors::Applicability;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::DefKind;
|
||||
use rustc_hir_analysis::astconv::InferCtxtExt as _;
|
||||
use rustc_hir_analysis::autoderef::{self, Autoderef};
|
||||
use rustc_infer::infer::canonical::OriginalQueryValues;
|
||||
use rustc_infer::infer::canonical::{Canonical, QueryResponse};
|
||||
use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
|
||||
use rustc_infer::infer::{self, InferOk, TyCtxtInferExt};
|
||||
use rustc_middle::infer::unify_key::{ConstVariableOrigin, ConstVariableOriginKind};
|
||||
use rustc_middle::middle::stability;
|
||||
use rustc_middle::ty::fast_reject::{simplify_type, TreatParams};
|
||||
use rustc_middle::ty::AssocItem;
|
||||
|
@ -1941,33 +1940,6 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
|
|||
(self.tcx.type_of(impl_def_id), self.fresh_item_substs(impl_def_id))
|
||||
}
|
||||
|
||||
fn fresh_item_substs(&self, def_id: DefId) -> SubstsRef<'tcx> {
|
||||
InternalSubsts::for_item(self.tcx, def_id, |param, _| match param.kind {
|
||||
GenericParamDefKind::Lifetime => self.tcx.lifetimes.re_erased.into(),
|
||||
GenericParamDefKind::Type { .. } => self
|
||||
.next_ty_var(TypeVariableOrigin {
|
||||
kind: TypeVariableOriginKind::SubstitutionPlaceholder,
|
||||
span: self.tcx.def_span(def_id),
|
||||
})
|
||||
.into(),
|
||||
GenericParamDefKind::Const { .. } => {
|
||||
let span = self.tcx.def_span(def_id);
|
||||
let origin = ConstVariableOrigin {
|
||||
kind: ConstVariableOriginKind::SubstitutionPlaceholder,
|
||||
span,
|
||||
};
|
||||
self.next_const_var(
|
||||
self.tcx
|
||||
.type_of(param.def_id)
|
||||
.no_bound_vars()
|
||||
.expect("const parameter types cannot be generic"),
|
||||
origin,
|
||||
)
|
||||
.into()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Replaces late-bound-regions bound by `value` with `'static` using
|
||||
/// `ty::erase_late_bound_regions`.
|
||||
///
|
||||
|
|
|
@ -418,10 +418,15 @@ impl<'cx, 'tcx> TypeFolder<TyCtxt<'tcx>> for Canonicalizer<'cx, 'tcx> {
|
|||
bug!("encountered a fresh type during canonicalization")
|
||||
}
|
||||
|
||||
ty::Placeholder(placeholder) => self.canonicalize_ty_var(
|
||||
CanonicalVarInfo { kind: CanonicalVarKind::PlaceholderTy(placeholder) },
|
||||
t,
|
||||
),
|
||||
ty::Placeholder(mut placeholder) => {
|
||||
if !self.canonicalize_mode.preserve_universes() {
|
||||
placeholder.universe = ty::UniverseIndex::ROOT;
|
||||
}
|
||||
self.canonicalize_ty_var(
|
||||
CanonicalVarInfo { kind: CanonicalVarKind::PlaceholderTy(placeholder) },
|
||||
t,
|
||||
)
|
||||
}
|
||||
|
||||
ty::Bound(debruijn, _) => {
|
||||
if debruijn >= self.binder_index {
|
||||
|
|
|
@ -671,6 +671,12 @@ pub enum TerminatorKind<'tcx> {
|
|||
/// as parameters, and `None` for the destination. Keep in mind that the `cleanup` path is not
|
||||
/// necessarily executed even in the case of a panic, for example in `-C panic=abort`. If the
|
||||
/// assertion does not fail, execution continues at the specified basic block.
|
||||
///
|
||||
/// When overflow checking is disabled and this is run-time MIR (as opposed to compile-time MIR
|
||||
/// that is used for CTFE), the following variants of this terminator behave as `goto target`:
|
||||
/// - `OverflowNeg(..)`,
|
||||
/// - `Overflow(op, ..)` if op is a "checkable" operation (add, sub, mul, shl, shr, but NOT
|
||||
/// div or rem).
|
||||
Assert {
|
||||
cond: Operand<'tcx>,
|
||||
expected: bool,
|
||||
|
@ -1103,10 +1109,6 @@ pub enum Rvalue<'tcx> {
|
|||
|
||||
/// Same as `BinaryOp`, but yields `(T, bool)` with a `bool` indicating an error condition.
|
||||
///
|
||||
/// When overflow checking is disabled and we are generating run-time code, the error condition
|
||||
/// is false. Otherwise, and always during CTFE, the error condition is determined as described
|
||||
/// below.
|
||||
///
|
||||
/// For addition, subtraction, and multiplication on integers the error condition is set when
|
||||
/// the infinite precision result would be unequal to the actual result.
|
||||
///
|
||||
|
|
|
@ -217,23 +217,21 @@ impl<'tcx> ConstKind<'tcx> {
|
|||
// Note that we erase regions *before* calling `with_reveal_all_normalized`,
|
||||
// so that we don't try to invoke this query with
|
||||
// any region variables.
|
||||
let param_env_and = tcx
|
||||
.erase_regions(param_env)
|
||||
.with_reveal_all_normalized(tcx)
|
||||
.and(tcx.erase_regions(unevaluated));
|
||||
|
||||
// HACK(eddyb) when the query key would contain inference variables,
|
||||
// attempt using identity substs and `ParamEnv` instead, that will succeed
|
||||
// when the expression doesn't depend on any parameters.
|
||||
// FIXME(eddyb, skinny121) pass `InferCtxt` into here when it's available, so that
|
||||
// we can call `infcx.const_eval_resolve` which handles inference variables.
|
||||
let param_env_and = if param_env_and.needs_infer() {
|
||||
let param_env_and = if (param_env, unevaluated).has_non_region_infer() {
|
||||
tcx.param_env(unevaluated.def.did).and(ty::UnevaluatedConst {
|
||||
def: unevaluated.def,
|
||||
substs: InternalSubsts::identity_for_item(tcx, unevaluated.def.did),
|
||||
})
|
||||
} else {
|
||||
param_env_and
|
||||
tcx.erase_regions(param_env)
|
||||
.with_reveal_all_normalized(tcx)
|
||||
.and(tcx.erase_regions(unevaluated))
|
||||
};
|
||||
|
||||
// FIXME(eddyb) maybe the `const_eval_*` methods should take
|
||||
|
|
|
@ -770,7 +770,7 @@ where
|
|||
|
||||
ty::Dynamic(_, _, ty::DynStar) => {
|
||||
if i == 0 {
|
||||
TyMaybeWithLayout::Ty(tcx.types.usize)
|
||||
TyMaybeWithLayout::Ty(tcx.mk_mut_ptr(tcx.types.unit))
|
||||
} else if i == 1 {
|
||||
// FIXME(dyn-star) same FIXME as above applies here too
|
||||
TyMaybeWithLayout::Ty(
|
||||
|
|
|
@ -735,7 +735,10 @@ pub trait PrettyPrinter<'tcx>:
|
|||
p!(print(data))
|
||||
}
|
||||
}
|
||||
ty::Placeholder(placeholder) => p!(write("Placeholder({:?})", placeholder)),
|
||||
ty::Placeholder(placeholder) => match placeholder.name {
|
||||
ty::BoundTyKind::Anon(_) => p!(write("Placeholder({:?})", placeholder)),
|
||||
ty::BoundTyKind::Param(_, name) => p!(write("{}", name)),
|
||||
},
|
||||
ty::Alias(ty::Opaque, ty::AliasTy { def_id, substs, .. }) => {
|
||||
// We use verbose printing in 'NO_QUERIES' mode, to
|
||||
// avoid needing to call `predicates_of`. This should
|
||||
|
|
|
@ -4,7 +4,7 @@ use crate::middle::codegen_fn_attrs::CodegenFnAttrFlags;
|
|||
use crate::mir;
|
||||
use crate::ty::layout::IntegerExt;
|
||||
use crate::ty::{
|
||||
self, ir::TypeFolder, DefIdTree, FallibleTypeFolder, Ty, TyCtxt, TypeFoldable,
|
||||
self, ir::TypeFolder, DefIdTree, FallibleTypeFolder, ToPredicate, Ty, TyCtxt, TypeFoldable,
|
||||
TypeSuperFoldable,
|
||||
};
|
||||
use crate::ty::{GenericArgKind, SubstsRef};
|
||||
|
@ -865,6 +865,26 @@ impl<'tcx> TypeFolder<TyCtxt<'tcx>> for OpaqueTypeExpander<'tcx> {
|
|||
}
|
||||
t
|
||||
}
|
||||
|
||||
fn fold_predicate(&mut self, p: ty::Predicate<'tcx>) -> ty::Predicate<'tcx> {
|
||||
if let ty::PredicateKind::Clause(clause) = p.kind().skip_binder()
|
||||
&& let ty::Clause::Projection(projection_pred) = clause
|
||||
{
|
||||
p.kind()
|
||||
.rebind(ty::ProjectionPredicate {
|
||||
projection_ty: projection_pred.projection_ty.fold_with(self),
|
||||
// Don't fold the term on the RHS of the projection predicate.
|
||||
// This is because for default trait methods with RPITITs, we
|
||||
// install a `NormalizesTo(Projection(RPITIT) -> Opaque(RPITIT))`
|
||||
// predicate, which would trivially cause a cycle when we do
|
||||
// anything that requires `ParamEnv::with_reveal_all_normalized`.
|
||||
term: projection_pred.term,
|
||||
})
|
||||
.to_predicate(self.tcx)
|
||||
} else {
|
||||
p.super_fold_with(self)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> Ty<'tcx> {
|
||||
|
|
|
@ -15,7 +15,7 @@ use rustc_middle::mir::visit::{
|
|||
};
|
||||
use rustc_middle::mir::{
|
||||
BasicBlock, BinOp, Body, Constant, ConstantKind, Local, LocalDecl, LocalKind, Location,
|
||||
Operand, Place, Rvalue, SourceInfo, Statement, StatementKind, Terminator, TerminatorKind, UnOp,
|
||||
Operand, Place, Rvalue, SourceInfo, Statement, StatementKind, Terminator, TerminatorKind,
|
||||
RETURN_PLACE,
|
||||
};
|
||||
use rustc_middle::ty::layout::{LayoutError, LayoutOf, LayoutOfHelpers, TyAndLayout};
|
||||
|
@ -503,55 +503,6 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
fn check_unary_op(&mut self, op: UnOp, arg: &Operand<'tcx>) -> Option<()> {
|
||||
if self.use_ecx(|this| {
|
||||
let val = this.ecx.read_immediate(&this.ecx.eval_operand(arg, None)?)?;
|
||||
let (_res, overflow, _ty) = this.ecx.overflowing_unary_op(op, &val)?;
|
||||
Ok(overflow)
|
||||
})? {
|
||||
// `AssertKind` only has an `OverflowNeg` variant, so make sure that is
|
||||
// appropriate to use.
|
||||
assert_eq!(op, UnOp::Neg, "Neg is the only UnOp that can overflow");
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(())
|
||||
}
|
||||
|
||||
fn check_binary_op(
|
||||
&mut self,
|
||||
op: BinOp,
|
||||
left: &Operand<'tcx>,
|
||||
right: &Operand<'tcx>,
|
||||
) -> Option<()> {
|
||||
let r = self.use_ecx(|this| this.ecx.read_immediate(&this.ecx.eval_operand(right, None)?));
|
||||
let l = self.use_ecx(|this| this.ecx.read_immediate(&this.ecx.eval_operand(left, None)?));
|
||||
// Check for exceeding shifts *even if* we cannot evaluate the LHS.
|
||||
if matches!(op, BinOp::Shr | BinOp::Shl) {
|
||||
let r = r.clone()?;
|
||||
// We need the type of the LHS. We cannot use `place_layout` as that is the type
|
||||
// of the result, which for checked binops is not the same!
|
||||
let left_ty = left.ty(self.local_decls, self.tcx);
|
||||
let left_size = self.ecx.layout_of(left_ty).ok()?.size;
|
||||
let right_size = r.layout.size;
|
||||
let r_bits = r.to_scalar().to_bits(right_size).ok();
|
||||
if r_bits.map_or(false, |b| b >= left_size.bits() as u128) {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
if let (Some(l), Some(r)) = (&l, &r) {
|
||||
// The remaining operators are handled through `overflowing_binary_op`.
|
||||
if self.use_ecx(|this| {
|
||||
let (_res, overflow, _ty) = this.ecx.overflowing_binary_op(op, l, r)?;
|
||||
Ok(overflow)
|
||||
})? {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
Some(())
|
||||
}
|
||||
|
||||
fn propagate_operand(&mut self, operand: &mut Operand<'tcx>) {
|
||||
match *operand {
|
||||
Operand::Copy(l) | Operand::Move(l) => {
|
||||
|
@ -587,28 +538,6 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
|
|||
// 2. Working around bugs in other parts of the compiler
|
||||
// - In this case, we'll return `None` from this function to stop evaluation.
|
||||
match rvalue {
|
||||
// Additional checking: give lints to the user if an overflow would occur.
|
||||
// We do this here and not in the `Assert` terminator as that terminator is
|
||||
// only sometimes emitted (overflow checks can be disabled), but we want to always
|
||||
// lint.
|
||||
Rvalue::UnaryOp(op, arg) => {
|
||||
trace!("checking UnaryOp(op = {:?}, arg = {:?})", op, arg);
|
||||
self.check_unary_op(*op, arg)?;
|
||||
}
|
||||
Rvalue::BinaryOp(op, box (left, right)) => {
|
||||
trace!("checking BinaryOp(op = {:?}, left = {:?}, right = {:?})", op, left, right);
|
||||
self.check_binary_op(*op, left, right)?;
|
||||
}
|
||||
Rvalue::CheckedBinaryOp(op, box (left, right)) => {
|
||||
trace!(
|
||||
"checking CheckedBinaryOp(op = {:?}, left = {:?}, right = {:?})",
|
||||
op,
|
||||
left,
|
||||
right
|
||||
);
|
||||
self.check_binary_op(*op, left, right)?;
|
||||
}
|
||||
|
||||
// Do not try creating references (#67862)
|
||||
Rvalue::AddressOf(_, place) | Rvalue::Ref(_, _, place) => {
|
||||
trace!("skipping AddressOf | Ref for {:?}", place);
|
||||
|
@ -638,7 +567,10 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
|
|||
| Rvalue::Cast(..)
|
||||
| Rvalue::ShallowInitBox(..)
|
||||
| Rvalue::Discriminant(..)
|
||||
| Rvalue::NullaryOp(..) => {}
|
||||
| Rvalue::NullaryOp(..)
|
||||
| Rvalue::UnaryOp(..)
|
||||
| Rvalue::BinaryOp(..)
|
||||
| Rvalue::CheckedBinaryOp(..) => {}
|
||||
}
|
||||
|
||||
// FIXME we need to revisit this for #67176
|
||||
|
@ -1079,31 +1011,18 @@ impl<'tcx> MutVisitor<'tcx> for ConstPropagator<'_, 'tcx> {
|
|||
// Do NOT early return in this function, it does some crucial fixup of the state at the end!
|
||||
match &mut terminator.kind {
|
||||
TerminatorKind::Assert { expected, ref mut cond, .. } => {
|
||||
if let Some(ref value) = self.eval_operand(&cond) {
|
||||
trace!("assertion on {:?} should be {:?}", value, expected);
|
||||
let expected = Scalar::from_bool(*expected);
|
||||
if let Some(ref value) = self.eval_operand(&cond)
|
||||
// FIXME should be used use_ecx rather than a local match... but we have
|
||||
// quite a few of these read_scalar/read_immediate that need fixing.
|
||||
if let Ok(value_const) = self.ecx.read_scalar(&value) {
|
||||
if expected != value_const {
|
||||
// Poison all places this operand references so that further code
|
||||
// doesn't use the invalid value
|
||||
match cond {
|
||||
Operand::Move(ref place) | Operand::Copy(ref place) => {
|
||||
Self::remove_const(&mut self.ecx, place.local);
|
||||
}
|
||||
Operand::Constant(_) => {}
|
||||
}
|
||||
} else {
|
||||
if self.should_const_prop(value) {
|
||||
*cond = self.operand_from_scalar(
|
||||
value_const,
|
||||
self.tcx.types.bool,
|
||||
source_info.span,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
&& let Ok(value_const) = self.ecx.read_scalar(&value)
|
||||
&& self.should_const_prop(value)
|
||||
{
|
||||
trace!("assertion on {:?} should be {:?}", value, expected);
|
||||
*cond = self.operand_from_scalar(
|
||||
value_const,
|
||||
self.tcx.types.bool,
|
||||
source_info.span,
|
||||
);
|
||||
}
|
||||
}
|
||||
TerminatorKind::SwitchInt { ref mut discr, .. } => {
|
||||
|
|
|
@ -180,12 +180,7 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> {
|
|||
let overflow = match overflow {
|
||||
FlatSet::Top => FlatSet::Top,
|
||||
FlatSet::Elem(overflow) => {
|
||||
if overflow {
|
||||
// Overflow cannot be reliably propagated. See: https://github.com/rust-lang/rust/pull/101168#issuecomment-1288091446
|
||||
FlatSet::Top
|
||||
} else {
|
||||
self.wrap_scalar(Scalar::from_bool(false), self.tcx.types.bool)
|
||||
}
|
||||
self.wrap_scalar(Scalar::from_bool(overflow), self.tcx.types.bool)
|
||||
}
|
||||
FlatSet::Bottom => FlatSet::Bottom,
|
||||
};
|
||||
|
|
|
@ -107,9 +107,29 @@ impl<'tcx> MirPass<'tcx> for LowerIntrinsics {
|
|||
}
|
||||
}
|
||||
sym::add_with_overflow | sym::sub_with_overflow | sym::mul_with_overflow => {
|
||||
// The checked binary operations are not suitable target for lowering here,
|
||||
// since their semantics depend on the value of overflow-checks flag used
|
||||
// during codegen. Issue #35310.
|
||||
if let Some(target) = *target {
|
||||
let lhs;
|
||||
let rhs;
|
||||
{
|
||||
let mut args = args.drain(..);
|
||||
lhs = args.next().unwrap();
|
||||
rhs = args.next().unwrap();
|
||||
}
|
||||
let bin_op = match intrinsic_name {
|
||||
sym::add_with_overflow => BinOp::Add,
|
||||
sym::sub_with_overflow => BinOp::Sub,
|
||||
sym::mul_with_overflow => BinOp::Mul,
|
||||
_ => bug!("unexpected intrinsic"),
|
||||
};
|
||||
block.statements.push(Statement {
|
||||
source_info: terminator.source_info,
|
||||
kind: StatementKind::Assign(Box::new((
|
||||
*destination,
|
||||
Rvalue::CheckedBinaryOp(bin_op, Box::new((lhs, rhs))),
|
||||
))),
|
||||
});
|
||||
terminator.kind = TerminatorKind::Goto { target };
|
||||
}
|
||||
}
|
||||
sym::size_of | sym::min_align_of => {
|
||||
if let Some(target) = *target {
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
#![feature(hash_raw_entry)]
|
||||
#![feature(min_specialization)]
|
||||
#![feature(extern_types)]
|
||||
#![feature(let_chains)]
|
||||
#![allow(rustc::potential_query_instability)]
|
||||
#![deny(rustc::untranslatable_diagnostic)]
|
||||
#![deny(rustc::diagnostic_outside_of_impl)]
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
//! generate the actual methods on tcx which find and execute the provider,
|
||||
//! manage the caches, and so forth.
|
||||
|
||||
use crate::dep_graph::{DepContext, DepKind, DepNode, DepNodeIndex};
|
||||
use crate::dep_graph::{DepContext, DepKind, DepNode, DepNodeIndex, DepNodeParams};
|
||||
use crate::ich::StableHashingContext;
|
||||
use crate::query::caches::QueryCache;
|
||||
use crate::query::job::{report_cycle, QueryInfo, QueryJob, QueryJobId, QueryJobInfo};
|
||||
|
@ -408,10 +408,27 @@ where
|
|||
|
||||
// Fast path for when incr. comp. is off.
|
||||
if !dep_graph.is_fully_enabled() {
|
||||
// Fingerprint the key, just to assert that it doesn't
|
||||
// have anything we don't consider hashable
|
||||
if cfg!(debug_assertions) {
|
||||
let _ = key.to_fingerprint(*qcx.dep_context());
|
||||
}
|
||||
|
||||
let prof_timer = qcx.dep_context().profiler().query_provider();
|
||||
let result = qcx.start_query(job_id, Q::DEPTH_LIMIT, None, || Q::compute(qcx, key));
|
||||
let dep_node_index = dep_graph.next_virtual_depnode_index();
|
||||
prof_timer.finish_with_query_invocation_id(dep_node_index.into());
|
||||
|
||||
// Similarly, fingerprint the result to assert that
|
||||
// it doesn't have anything not considered hashable.
|
||||
if cfg!(debug_assertions)
|
||||
&& let Some(hash_result) = Q::HASH_RESULT
|
||||
{
|
||||
qcx.dep_context().with_stable_hashing_context(|mut hcx| {
|
||||
hash_result(&mut hcx, &result);
|
||||
});
|
||||
}
|
||||
|
||||
return (result, dep_node_index);
|
||||
}
|
||||
|
||||
|
|
|
@ -4211,7 +4211,8 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
|||
if let Some(res) = res
|
||||
&& let Some(def_id) = res.opt_def_id()
|
||||
&& !def_id.is_local()
|
||||
&& self.r.session.crate_types().contains(&CrateType::ProcMacro) {
|
||||
&& self.r.session.crate_types().contains(&CrateType::ProcMacro)
|
||||
&& matches!(self.r.session.opts.resolve_doc_links, ResolveDocLinks::ExportedMetadata) {
|
||||
// Encoding foreign def ids in proc macro crate metadata will ICE.
|
||||
return None;
|
||||
}
|
||||
|
@ -4281,6 +4282,10 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
|||
.filter_map(|tr| {
|
||||
if !tr.def_id.is_local()
|
||||
&& self.r.session.crate_types().contains(&CrateType::ProcMacro)
|
||||
&& matches!(
|
||||
self.r.session.opts.resolve_doc_links,
|
||||
ResolveDocLinks::ExportedMetadata
|
||||
)
|
||||
{
|
||||
// Encoding foreign def ids in proc macro crate metadata will ICE.
|
||||
return None;
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
use pulldown_cmark::{BrokenLink, Event, Options, Parser, Tag};
|
||||
use pulldown_cmark::{BrokenLink, Event, LinkType, Options, Parser, Tag};
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::util::comments::beautify_doc_string;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_span::def_id::DefId;
|
||||
use rustc_span::symbol::{kw, Symbol};
|
||||
use rustc_span::Span;
|
||||
use std::cell::RefCell;
|
||||
use std::{cmp, mem};
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
|
@ -340,6 +339,7 @@ pub fn inner_docs(attrs: &[ast::Attribute]) -> bool {
|
|||
fn preprocess_link(link: &str) -> String {
|
||||
let link = link.replace('`', "");
|
||||
let link = link.split('#').next().unwrap();
|
||||
let link = link.trim();
|
||||
let link = link.rsplit('@').next().unwrap();
|
||||
let link = link.strip_suffix("()").unwrap_or(link);
|
||||
let link = link.strip_suffix("{}").unwrap_or(link);
|
||||
|
@ -348,22 +348,37 @@ fn preprocess_link(link: &str) -> String {
|
|||
strip_generics_from_path(link).unwrap_or_else(|_| link.to_string())
|
||||
}
|
||||
|
||||
/// Keep inline and reference links `[]`,
|
||||
/// but skip autolinks `<>` which we never consider to be intra-doc links.
|
||||
pub fn may_be_doc_link(link_type: LinkType) -> bool {
|
||||
match link_type {
|
||||
LinkType::Inline
|
||||
| LinkType::Reference
|
||||
| LinkType::ReferenceUnknown
|
||||
| LinkType::Collapsed
|
||||
| LinkType::CollapsedUnknown
|
||||
| LinkType::Shortcut
|
||||
| LinkType::ShortcutUnknown => true,
|
||||
LinkType::Autolink | LinkType::Email => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Simplified version of `preprocessed_markdown_links` from rustdoc.
|
||||
/// Must return at least the same links as it, but may add some more links on top of that.
|
||||
pub(crate) fn attrs_to_preprocessed_links(attrs: &[ast::Attribute]) -> Vec<String> {
|
||||
let (doc_fragments, _) = attrs_to_doc_fragments(attrs.iter().map(|attr| (attr, None)), true);
|
||||
let doc = prepare_to_doc_link_resolution(&doc_fragments).into_values().next().unwrap();
|
||||
|
||||
let links = RefCell::new(Vec::new());
|
||||
let mut callback = |link: BrokenLink<'_>| {
|
||||
links.borrow_mut().push(preprocess_link(&link.reference));
|
||||
None
|
||||
};
|
||||
for event in Parser::new_with_broken_link_callback(&doc, main_body_opts(), Some(&mut callback))
|
||||
{
|
||||
if let Event::Start(Tag::Link(_, dest, _)) = event {
|
||||
links.borrow_mut().push(preprocess_link(&dest));
|
||||
Parser::new_with_broken_link_callback(
|
||||
&doc,
|
||||
main_body_opts(),
|
||||
Some(&mut |link: BrokenLink<'_>| Some((link.reference, "".into()))),
|
||||
)
|
||||
.filter_map(|event| match event {
|
||||
Event::Start(Tag::Link(link_type, dest, _)) if may_be_doc_link(link_type) => {
|
||||
Some(preprocess_link(&dest))
|
||||
}
|
||||
}
|
||||
links.into_inner()
|
||||
_ => None,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
|
|
@ -90,18 +90,22 @@ pub(super) trait GoalKind<'tcx>: TypeFoldable<'tcx> + Copy + Eq {
|
|||
|
||||
fn trait_def_id(self, tcx: TyCtxt<'tcx>) -> DefId;
|
||||
|
||||
// Consider a clause, which consists of a "assumption" and some "requirements",
|
||||
// to satisfy a goal. If the requirements hold, then attempt to satisfy our
|
||||
// goal by equating it with the assumption.
|
||||
fn consider_implied_clause(
|
||||
ecx: &mut EvalCtxt<'_, 'tcx>,
|
||||
goal: Goal<'tcx, Self>,
|
||||
assumption: ty::Predicate<'tcx>,
|
||||
requirements: impl IntoIterator<Item = Goal<'tcx, ty::Predicate<'tcx>>>,
|
||||
) -> QueryResult<'tcx>;
|
||||
|
||||
fn consider_impl_candidate(
|
||||
ecx: &mut EvalCtxt<'_, 'tcx>,
|
||||
goal: Goal<'tcx, Self>,
|
||||
impl_def_id: DefId,
|
||||
) -> QueryResult<'tcx>;
|
||||
|
||||
fn consider_assumption(
|
||||
ecx: &mut EvalCtxt<'_, 'tcx>,
|
||||
goal: Goal<'tcx, Self>,
|
||||
assumption: ty::Predicate<'tcx>,
|
||||
) -> QueryResult<'tcx>;
|
||||
|
||||
// A type implements an `auto trait` if its components do as well. These components
|
||||
// are given by built-in rules from [`instantiate_constituent_tys_for_auto_trait`].
|
||||
fn consider_auto_trait_candidate(
|
||||
|
@ -355,7 +359,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
|
|||
candidates: &mut Vec<Candidate<'tcx>>,
|
||||
) {
|
||||
for (i, assumption) in goal.param_env.caller_bounds().iter().enumerate() {
|
||||
match G::consider_assumption(self, goal, assumption) {
|
||||
match G::consider_implied_clause(self, goal, assumption, []) {
|
||||
Ok(result) => {
|
||||
candidates.push(Candidate { source: CandidateSource::ParamEnv(i), result })
|
||||
}
|
||||
|
@ -402,7 +406,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
|
|||
|
||||
for assumption in self.tcx().item_bounds(alias_ty.def_id).subst(self.tcx(), alias_ty.substs)
|
||||
{
|
||||
match G::consider_assumption(self, goal, assumption) {
|
||||
match G::consider_implied_clause(self, goal, assumption, []) {
|
||||
Ok(result) => {
|
||||
candidates.push(Candidate { source: CandidateSource::AliasBound, result })
|
||||
}
|
||||
|
@ -452,7 +456,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
|
|||
for assumption in
|
||||
elaborate_predicates(tcx, bounds.iter().map(|bound| bound.with_self_ty(tcx, self_ty)))
|
||||
{
|
||||
match G::consider_assumption(self, goal, assumption.predicate) {
|
||||
match G::consider_implied_clause(self, goal, assumption.predicate, []) {
|
||||
Ok(result) => {
|
||||
candidates.push(Candidate { source: CandidateSource::BuiltinImpl, result })
|
||||
}
|
||||
|
|
|
@ -168,6 +168,37 @@ impl<'tcx> assembly::GoalKind<'tcx> for ProjectionPredicate<'tcx> {
|
|||
self.trait_def_id(tcx)
|
||||
}
|
||||
|
||||
fn consider_implied_clause(
|
||||
ecx: &mut EvalCtxt<'_, 'tcx>,
|
||||
goal: Goal<'tcx, Self>,
|
||||
assumption: ty::Predicate<'tcx>,
|
||||
requirements: impl IntoIterator<Item = Goal<'tcx, ty::Predicate<'tcx>>>,
|
||||
) -> QueryResult<'tcx> {
|
||||
if let Some(poly_projection_pred) = assumption.to_opt_poly_projection_pred()
|
||||
&& poly_projection_pred.projection_def_id() == goal.predicate.def_id()
|
||||
{
|
||||
ecx.infcx.probe(|_| {
|
||||
let assumption_projection_pred =
|
||||
ecx.infcx.instantiate_binder_with_infer(poly_projection_pred);
|
||||
let mut nested_goals = ecx.infcx.eq(
|
||||
goal.param_env,
|
||||
goal.predicate.projection_ty,
|
||||
assumption_projection_pred.projection_ty,
|
||||
)?;
|
||||
nested_goals.extend(requirements);
|
||||
let subst_certainty = ecx.evaluate_all(nested_goals)?;
|
||||
|
||||
ecx.eq_term_and_make_canonical_response(
|
||||
goal,
|
||||
subst_certainty,
|
||||
assumption_projection_pred.term,
|
||||
)
|
||||
})
|
||||
} else {
|
||||
Err(NoSolution)
|
||||
}
|
||||
}
|
||||
|
||||
fn consider_impl_candidate(
|
||||
ecx: &mut EvalCtxt<'_, 'tcx>,
|
||||
goal: Goal<'tcx, ProjectionPredicate<'tcx>>,
|
||||
|
@ -260,35 +291,6 @@ impl<'tcx> assembly::GoalKind<'tcx> for ProjectionPredicate<'tcx> {
|
|||
})
|
||||
}
|
||||
|
||||
fn consider_assumption(
|
||||
ecx: &mut EvalCtxt<'_, 'tcx>,
|
||||
goal: Goal<'tcx, Self>,
|
||||
assumption: ty::Predicate<'tcx>,
|
||||
) -> QueryResult<'tcx> {
|
||||
if let Some(poly_projection_pred) = assumption.to_opt_poly_projection_pred()
|
||||
&& poly_projection_pred.projection_def_id() == goal.predicate.def_id()
|
||||
{
|
||||
ecx.infcx.probe(|_| {
|
||||
let assumption_projection_pred =
|
||||
ecx.infcx.instantiate_binder_with_infer(poly_projection_pred);
|
||||
let nested_goals = ecx.infcx.eq(
|
||||
goal.param_env,
|
||||
goal.predicate.projection_ty,
|
||||
assumption_projection_pred.projection_ty,
|
||||
)?;
|
||||
let subst_certainty = ecx.evaluate_all(nested_goals)?;
|
||||
|
||||
ecx.eq_term_and_make_canonical_response(
|
||||
goal,
|
||||
subst_certainty,
|
||||
assumption_projection_pred.term,
|
||||
)
|
||||
})
|
||||
} else {
|
||||
Err(NoSolution)
|
||||
}
|
||||
}
|
||||
|
||||
fn consider_auto_trait_candidate(
|
||||
_ecx: &mut EvalCtxt<'_, 'tcx>,
|
||||
goal: Goal<'tcx, Self>,
|
||||
|
@ -329,25 +331,28 @@ impl<'tcx> assembly::GoalKind<'tcx> for ProjectionPredicate<'tcx> {
|
|||
goal: Goal<'tcx, Self>,
|
||||
goal_kind: ty::ClosureKind,
|
||||
) -> QueryResult<'tcx> {
|
||||
if let Some(tupled_inputs_and_output) =
|
||||
structural_traits::extract_tupled_inputs_and_output_from_callable(
|
||||
ecx.tcx(),
|
||||
goal.predicate.self_ty(),
|
||||
goal_kind,
|
||||
)?
|
||||
{
|
||||
let pred = tupled_inputs_and_output
|
||||
.map_bound(|(inputs, output)| ty::ProjectionPredicate {
|
||||
projection_ty: ecx
|
||||
.tcx()
|
||||
.mk_alias_ty(goal.predicate.def_id(), [goal.predicate.self_ty(), inputs]),
|
||||
term: output.into(),
|
||||
})
|
||||
.to_predicate(ecx.tcx());
|
||||
Self::consider_assumption(ecx, goal, pred)
|
||||
} else {
|
||||
ecx.make_canonical_response(Certainty::AMBIGUOUS)
|
||||
}
|
||||
let tcx = ecx.tcx();
|
||||
let Some(tupled_inputs_and_output) =
|
||||
structural_traits::extract_tupled_inputs_and_output_from_callable(
|
||||
tcx,
|
||||
goal.predicate.self_ty(),
|
||||
goal_kind,
|
||||
)? else {
|
||||
return ecx.make_canonical_response(Certainty::AMBIGUOUS);
|
||||
};
|
||||
let output_is_sized_pred = tupled_inputs_and_output
|
||||
.map_bound(|(_, output)| tcx.at(DUMMY_SP).mk_trait_ref(LangItem::Sized, [output]));
|
||||
|
||||
let pred = tupled_inputs_and_output
|
||||
.map_bound(|(inputs, output)| ty::ProjectionPredicate {
|
||||
projection_ty: tcx
|
||||
.mk_alias_ty(goal.predicate.def_id(), [goal.predicate.self_ty(), inputs]),
|
||||
term: output.into(),
|
||||
})
|
||||
.to_predicate(tcx);
|
||||
// A built-in `Fn` impl only holds if the output is sized.
|
||||
// (FIXME: technically we only need to check this if the type is a fn ptr...)
|
||||
Self::consider_implied_clause(ecx, goal, pred, [goal.with(tcx, output_is_sized_pred)])
|
||||
}
|
||||
|
||||
fn consider_builtin_tuple_candidate(
|
||||
|
@ -466,7 +471,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for ProjectionPredicate<'tcx> {
|
|||
|
||||
let term = substs.as_generator().return_ty().into();
|
||||
|
||||
Self::consider_assumption(
|
||||
Self::consider_implied_clause(
|
||||
ecx,
|
||||
goal,
|
||||
ty::Binder::dummy(ty::ProjectionPredicate {
|
||||
|
@ -474,6 +479,9 @@ impl<'tcx> assembly::GoalKind<'tcx> for ProjectionPredicate<'tcx> {
|
|||
term,
|
||||
})
|
||||
.to_predicate(tcx),
|
||||
// Technically, we need to check that the future type is Sized,
|
||||
// but that's already proven by the generator being WF.
|
||||
[],
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -503,7 +511,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for ProjectionPredicate<'tcx> {
|
|||
bug!("unexpected associated item `<{self_ty} as Generator>::{name}`")
|
||||
};
|
||||
|
||||
Self::consider_assumption(
|
||||
Self::consider_implied_clause(
|
||||
ecx,
|
||||
goal,
|
||||
ty::Binder::dummy(ty::ProjectionPredicate {
|
||||
|
@ -513,6 +521,9 @@ impl<'tcx> assembly::GoalKind<'tcx> for ProjectionPredicate<'tcx> {
|
|||
term,
|
||||
})
|
||||
.to_predicate(tcx),
|
||||
// Technically, we need to check that the future type is Sized,
|
||||
// but that's already proven by the generator being WF.
|
||||
[],
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@ use super::assembly;
|
|||
use super::infcx_ext::InferCtxtExt;
|
||||
use super::{CanonicalResponse, Certainty, EvalCtxt, Goal, QueryResult};
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_hir::LangItem;
|
||||
use rustc_infer::infer::InferCtxt;
|
||||
use rustc_infer::traits::query::NoSolution;
|
||||
use rustc_infer::traits::util::supertraits;
|
||||
|
@ -61,10 +62,11 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> {
|
|||
})
|
||||
}
|
||||
|
||||
fn consider_assumption(
|
||||
fn consider_implied_clause(
|
||||
ecx: &mut EvalCtxt<'_, 'tcx>,
|
||||
goal: Goal<'tcx, Self>,
|
||||
assumption: ty::Predicate<'tcx>,
|
||||
requirements: impl IntoIterator<Item = Goal<'tcx, ty::Predicate<'tcx>>>,
|
||||
) -> QueryResult<'tcx> {
|
||||
if let Some(poly_trait_pred) = assumption.to_opt_poly_trait_pred()
|
||||
&& poly_trait_pred.def_id() == goal.predicate.def_id()
|
||||
|
@ -73,11 +75,12 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> {
|
|||
ecx.infcx.probe(|_| {
|
||||
let assumption_trait_pred =
|
||||
ecx.infcx.instantiate_binder_with_infer(poly_trait_pred);
|
||||
let nested_goals = ecx.infcx.eq(
|
||||
let mut nested_goals = ecx.infcx.eq(
|
||||
goal.param_env,
|
||||
goal.predicate.trait_ref,
|
||||
assumption_trait_pred.trait_ref,
|
||||
)?;
|
||||
nested_goals.extend(requirements);
|
||||
ecx.evaluate_all_and_make_canonical_response(nested_goals)
|
||||
})
|
||||
} else {
|
||||
|
@ -173,23 +176,26 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> {
|
|||
goal: Goal<'tcx, Self>,
|
||||
goal_kind: ty::ClosureKind,
|
||||
) -> QueryResult<'tcx> {
|
||||
if let Some(tupled_inputs_and_output) =
|
||||
let tcx = ecx.tcx();
|
||||
let Some(tupled_inputs_and_output) =
|
||||
structural_traits::extract_tupled_inputs_and_output_from_callable(
|
||||
ecx.tcx(),
|
||||
tcx,
|
||||
goal.predicate.self_ty(),
|
||||
goal_kind,
|
||||
)?
|
||||
{
|
||||
let pred = tupled_inputs_and_output
|
||||
.map_bound(|(inputs, _)| {
|
||||
ecx.tcx()
|
||||
.mk_trait_ref(goal.predicate.def_id(), [goal.predicate.self_ty(), inputs])
|
||||
})
|
||||
.to_predicate(ecx.tcx());
|
||||
Self::consider_assumption(ecx, goal, pred)
|
||||
} else {
|
||||
ecx.make_canonical_response(Certainty::AMBIGUOUS)
|
||||
}
|
||||
)? else {
|
||||
return ecx.make_canonical_response(Certainty::AMBIGUOUS);
|
||||
};
|
||||
let output_is_sized_pred = tupled_inputs_and_output
|
||||
.map_bound(|(_, output)| tcx.at(DUMMY_SP).mk_trait_ref(LangItem::Sized, [output]));
|
||||
|
||||
let pred = tupled_inputs_and_output
|
||||
.map_bound(|(inputs, _)| {
|
||||
tcx.mk_trait_ref(goal.predicate.def_id(), [goal.predicate.self_ty(), inputs])
|
||||
})
|
||||
.to_predicate(tcx);
|
||||
// A built-in `Fn` impl only holds if the output is sized.
|
||||
// (FIXME: technically we only need to check this if the type is a fn ptr...)
|
||||
Self::consider_implied_clause(ecx, goal, pred, [goal.with(tcx, output_is_sized_pred)])
|
||||
}
|
||||
|
||||
fn consider_builtin_tuple_candidate(
|
||||
|
@ -225,6 +231,8 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> {
|
|||
}
|
||||
|
||||
// Async generator unconditionally implement `Future`
|
||||
// Technically, we need to check that the future output type is Sized,
|
||||
// but that's already proven by the generator being WF.
|
||||
ecx.make_canonical_response(Certainty::Yes)
|
||||
}
|
||||
|
||||
|
@ -244,13 +252,16 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> {
|
|||
}
|
||||
|
||||
let generator = substs.as_generator();
|
||||
Self::consider_assumption(
|
||||
Self::consider_implied_clause(
|
||||
ecx,
|
||||
goal,
|
||||
ty::Binder::dummy(
|
||||
tcx.mk_trait_ref(goal.predicate.def_id(), [self_ty, generator.resume_ty()]),
|
||||
)
|
||||
.to_predicate(tcx),
|
||||
// Technically, we need to check that the generator types are Sized,
|
||||
// but that's already proven by the generator being WF.
|
||||
[],
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
@ -90,15 +90,7 @@ enum ProjectionCandidate<'tcx> {
|
|||
/// From an "impl" (or a "pseudo-impl" returned by select)
|
||||
Select(Selection<'tcx>),
|
||||
|
||||
ImplTraitInTrait(ImplTraitInTraitCandidate<'tcx>),
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Debug)]
|
||||
enum ImplTraitInTraitCandidate<'tcx> {
|
||||
// The `impl Trait` from a trait function's default body
|
||||
Trait,
|
||||
// A concrete type provided from a trait's `impl Trait` from an impl
|
||||
Impl(ImplSourceUserDefinedData<'tcx, PredicateObligation<'tcx>>),
|
||||
ImplTraitInTrait(ImplSourceUserDefinedData<'tcx, PredicateObligation<'tcx>>),
|
||||
}
|
||||
|
||||
enum ProjectionCandidateSet<'tcx> {
|
||||
|
@ -1292,17 +1284,6 @@ fn assemble_candidate_for_impl_trait_in_trait<'cx, 'tcx>(
|
|||
let tcx = selcx.tcx();
|
||||
if tcx.def_kind(obligation.predicate.def_id) == DefKind::ImplTraitPlaceholder {
|
||||
let trait_fn_def_id = tcx.impl_trait_in_trait_parent(obligation.predicate.def_id);
|
||||
// If we are trying to project an RPITIT with trait's default `Self` parameter,
|
||||
// then we must be within a default trait body.
|
||||
if obligation.predicate.self_ty()
|
||||
== ty::InternalSubsts::identity_for_item(tcx, obligation.predicate.def_id).type_at(0)
|
||||
&& tcx.associated_item(trait_fn_def_id).defaultness(tcx).has_value()
|
||||
{
|
||||
candidate_set.push_candidate(ProjectionCandidate::ImplTraitInTrait(
|
||||
ImplTraitInTraitCandidate::Trait,
|
||||
));
|
||||
return;
|
||||
}
|
||||
|
||||
let trait_def_id = tcx.parent(trait_fn_def_id);
|
||||
let trait_substs =
|
||||
|
@ -1313,9 +1294,7 @@ fn assemble_candidate_for_impl_trait_in_trait<'cx, 'tcx>(
|
|||
let _ = selcx.infcx.commit_if_ok(|_| {
|
||||
match selcx.select(&obligation.with(tcx, trait_predicate)) {
|
||||
Ok(Some(super::ImplSource::UserDefined(data))) => {
|
||||
candidate_set.push_candidate(ProjectionCandidate::ImplTraitInTrait(
|
||||
ImplTraitInTraitCandidate::Impl(data),
|
||||
));
|
||||
candidate_set.push_candidate(ProjectionCandidate::ImplTraitInTrait(data));
|
||||
Ok(())
|
||||
}
|
||||
Ok(None) => {
|
||||
|
@ -1777,18 +1756,9 @@ fn confirm_candidate<'cx, 'tcx>(
|
|||
ProjectionCandidate::Select(impl_source) => {
|
||||
confirm_select_candidate(selcx, obligation, impl_source)
|
||||
}
|
||||
ProjectionCandidate::ImplTraitInTrait(ImplTraitInTraitCandidate::Impl(data)) => {
|
||||
ProjectionCandidate::ImplTraitInTrait(data) => {
|
||||
confirm_impl_trait_in_trait_candidate(selcx, obligation, data)
|
||||
}
|
||||
// If we're projecting an RPITIT for a default trait body, that's just
|
||||
// the same def-id, but as an opaque type (with regular RPIT semantics).
|
||||
ProjectionCandidate::ImplTraitInTrait(ImplTraitInTraitCandidate::Trait) => Progress {
|
||||
term: selcx
|
||||
.tcx()
|
||||
.mk_opaque(obligation.predicate.def_id, obligation.predicate.substs)
|
||||
.into(),
|
||||
obligations: vec![],
|
||||
},
|
||||
};
|
||||
|
||||
// When checking for cycle during evaluation, we compare predicates with
|
||||
|
|
|
@ -2148,12 +2148,13 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
|||
}))
|
||||
}
|
||||
|
||||
ty::Alias(..) | ty::Param(_) => None,
|
||||
ty::Alias(..) | ty::Param(_) | ty::Placeholder(..) => None,
|
||||
ty::Infer(ty::TyVar(_)) => Ambiguous,
|
||||
|
||||
ty::Placeholder(..)
|
||||
| ty::Bound(..)
|
||||
| ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => {
|
||||
// We can make this an ICE if/once we actually instantiate the trait obligation.
|
||||
ty::Bound(..) => None,
|
||||
|
||||
ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => {
|
||||
bug!("asked to assemble builtin bounds of unexpected type: {:?}", self_ty);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -193,7 +193,7 @@ fn layout_of_uncached<'tcx>(
|
|||
}
|
||||
|
||||
ty::Dynamic(_, _, ty::DynStar) => {
|
||||
let mut data = scalar_unit(Int(dl.ptr_sized_integer(), false));
|
||||
let mut data = scalar_unit(Pointer(AddressSpace::DATA));
|
||||
data.valid_range_mut().start = 0;
|
||||
let mut vtable = scalar_unit(Pointer(AddressSpace::DATA));
|
||||
vtable.valid_range_mut().start = 1;
|
||||
|
|
|
@ -1,8 +1,12 @@
|
|||
use rustc_data_structures::fx::FxIndexSet;
|
||||
use rustc_data_structures::fx::{FxHashSet, FxIndexSet};
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::DefKind;
|
||||
use rustc_index::bit_set::BitSet;
|
||||
#[cfg(not(bootstrap))]
|
||||
use rustc_middle::ty::ir::TypeVisitable;
|
||||
use rustc_middle::ty::{
|
||||
self, Binder, EarlyBinder, Predicate, PredicateKind, ToPredicate, Ty, TyCtxt,
|
||||
self, ir::TypeVisitor, Binder, EarlyBinder, Predicate, PredicateKind, ToPredicate, Ty, TyCtxt,
|
||||
TypeSuperVisitable,
|
||||
};
|
||||
use rustc_session::config::TraitSolver;
|
||||
use rustc_span::def_id::{DefId, CRATE_DEF_ID};
|
||||
|
@ -136,6 +140,19 @@ fn param_env(tcx: TyCtxt<'_>, def_id: DefId) -> ty::ParamEnv<'_> {
|
|||
predicates.extend(environment);
|
||||
}
|
||||
|
||||
if tcx.def_kind(def_id) == DefKind::AssocFn
|
||||
&& tcx.associated_item(def_id).container == ty::AssocItemContainer::TraitContainer
|
||||
{
|
||||
let sig = tcx.fn_sig(def_id).subst_identity();
|
||||
sig.visit_with(&mut ImplTraitInTraitFinder {
|
||||
tcx,
|
||||
fn_def_id: def_id,
|
||||
bound_vars: sig.bound_vars(),
|
||||
predicates: &mut predicates,
|
||||
seen: FxHashSet::default(),
|
||||
});
|
||||
}
|
||||
|
||||
let local_did = def_id.as_local();
|
||||
let hir_id = local_did.map(|def_id| tcx.hir().local_def_id_to_hir_id(def_id));
|
||||
|
||||
|
@ -222,6 +239,46 @@ fn param_env(tcx: TyCtxt<'_>, def_id: DefId) -> ty::ParamEnv<'_> {
|
|||
traits::normalize_param_env_or_error(tcx, unnormalized_env, cause)
|
||||
}
|
||||
|
||||
/// Walk through a function type, gathering all RPITITs and installing a
|
||||
/// `NormalizesTo(Projection(RPITIT) -> Opaque(RPITIT))` predicate into the
|
||||
/// predicates list. This allows us to observe that an RPITIT projects to
|
||||
/// its corresponding opaque within the body of a default-body trait method.
|
||||
struct ImplTraitInTraitFinder<'a, 'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
predicates: &'a mut Vec<Predicate<'tcx>>,
|
||||
fn_def_id: DefId,
|
||||
bound_vars: &'tcx ty::List<ty::BoundVariableKind>,
|
||||
seen: FxHashSet<DefId>,
|
||||
}
|
||||
|
||||
impl<'tcx> TypeVisitor<TyCtxt<'tcx>> for ImplTraitInTraitFinder<'_, 'tcx> {
|
||||
fn visit_ty(&mut self, ty: Ty<'tcx>) -> std::ops::ControlFlow<Self::BreakTy> {
|
||||
if let ty::Alias(ty::Projection, alias_ty) = *ty.kind()
|
||||
&& self.tcx.def_kind(alias_ty.def_id) == DefKind::ImplTraitPlaceholder
|
||||
&& self.tcx.impl_trait_in_trait_parent(alias_ty.def_id) == self.fn_def_id
|
||||
&& self.seen.insert(alias_ty.def_id)
|
||||
{
|
||||
self.predicates.push(
|
||||
ty::Binder::bind_with_vars(
|
||||
ty::ProjectionPredicate {
|
||||
projection_ty: alias_ty,
|
||||
term: self.tcx.mk_alias(ty::Opaque, alias_ty).into(),
|
||||
},
|
||||
self.bound_vars,
|
||||
)
|
||||
.to_predicate(self.tcx),
|
||||
);
|
||||
|
||||
for bound in self.tcx.item_bounds(alias_ty.def_id).subst_iter(self.tcx, alias_ty.substs)
|
||||
{
|
||||
bound.visit_with(self);
|
||||
}
|
||||
}
|
||||
|
||||
ty.super_visit_with(self)
|
||||
}
|
||||
}
|
||||
|
||||
/// Elaborate the environment.
|
||||
///
|
||||
/// Collect a list of `Predicate`'s used for building the `ParamEnv`. Adds `TypeWellFormedFromEnv`'s
|
||||
|
|
|
@ -26,11 +26,9 @@ pub enum DynKind {
|
|||
Dyn,
|
||||
/// A sized `dyn* Trait` object
|
||||
///
|
||||
/// These objects are represented as a `(data, vtable)` pair where `data` is a ptr-sized value
|
||||
/// (often a pointer to the real object, but not necessarily) and `vtable` is a pointer to
|
||||
/// the vtable for `dyn* Trait`. The representation is essentially the same as `&dyn Trait`
|
||||
/// or similar, but the drop function included in the vtable is responsible for freeing the
|
||||
/// underlying storage if needed. This allows a `dyn*` object to be treated agnostically with
|
||||
/// These objects are represented as a `(data, vtable)` pair where `data` is a value of some
|
||||
/// ptr-sized and ptr-aligned dynamically determined type `T` and `vtable` is a pointer to the
|
||||
/// vtable of `impl T for Trait`. This allows a `dyn*` object to be treated agnostically with
|
||||
/// respect to whether it points to a `Box<T>`, `Rc<T>`, etc.
|
||||
DynStar,
|
||||
}
|
||||
|
|
|
@ -1,4 +1,8 @@
|
|||
use std::collections::VecDeque;
|
||||
use core::iter::Iterator;
|
||||
use std::{
|
||||
collections::{vec_deque, VecDeque},
|
||||
mem,
|
||||
};
|
||||
use test::{black_box, Bencher};
|
||||
|
||||
#[bench]
|
||||
|
@ -53,6 +57,146 @@ fn bench_try_fold(b: &mut Bencher) {
|
|||
b.iter(|| black_box(ring.iter().try_fold(0, |a, b| Some(a + b))))
|
||||
}
|
||||
|
||||
/// does the memory bookkeeping to reuse the buffer of the Vec between iterations.
|
||||
/// `setup` must not modify its argument's length or capacity. `g` must not move out of its argument.
|
||||
fn into_iter_helper<
|
||||
T: Copy,
|
||||
F: FnOnce(&mut VecDeque<T>),
|
||||
G: FnOnce(&mut vec_deque::IntoIter<T>),
|
||||
>(
|
||||
v: &mut Vec<T>,
|
||||
setup: F,
|
||||
g: G,
|
||||
) {
|
||||
let ptr = v.as_mut_ptr();
|
||||
let len = v.len();
|
||||
// ensure that the vec is full, to make sure that any wrapping from the deque doesn't
|
||||
// access uninitialized memory.
|
||||
assert_eq!(v.len(), v.capacity());
|
||||
|
||||
let mut deque = VecDeque::from(mem::take(v));
|
||||
setup(&mut deque);
|
||||
|
||||
let mut it = deque.into_iter();
|
||||
g(&mut it);
|
||||
|
||||
mem::forget(it);
|
||||
|
||||
// SAFETY: the provided functions are not allowed to modify the allocation, so the buffer is still alive.
|
||||
// len and capacity are accurate due to the above assertion.
|
||||
// All the elements in the buffer are still valid, because of `T: Copy` which implies `T: !Drop`.
|
||||
mem::forget(mem::replace(v, unsafe { Vec::from_raw_parts(ptr, len, len) }));
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_into_iter(b: &mut Bencher) {
|
||||
let len = 1024;
|
||||
// we reuse this allocation for every run
|
||||
let mut vec: Vec<usize> = (0..len).collect();
|
||||
vec.shrink_to_fit();
|
||||
|
||||
b.iter(|| {
|
||||
let mut sum = 0;
|
||||
into_iter_helper(
|
||||
&mut vec,
|
||||
|_| {},
|
||||
|it| {
|
||||
for i in it {
|
||||
sum += i;
|
||||
}
|
||||
},
|
||||
);
|
||||
black_box(sum);
|
||||
|
||||
let mut sum = 0;
|
||||
// rotating a full deque doesn't move any memory.
|
||||
into_iter_helper(
|
||||
&mut vec,
|
||||
|d| d.rotate_left(len / 2),
|
||||
|it| {
|
||||
for i in it {
|
||||
sum += i;
|
||||
}
|
||||
},
|
||||
);
|
||||
black_box(sum);
|
||||
});
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_into_iter_fold(b: &mut Bencher) {
|
||||
let len = 1024;
|
||||
|
||||
// because `fold` takes ownership of the iterator,
|
||||
// we can't prevent it from dropping the memory,
|
||||
// so we have to bite the bullet and reallocate
|
||||
// for every iteration.
|
||||
b.iter(|| {
|
||||
let deque: VecDeque<usize> = (0..len).collect();
|
||||
assert_eq!(deque.len(), deque.capacity());
|
||||
let sum = deque.into_iter().fold(0, |a, b| a + b);
|
||||
black_box(sum);
|
||||
|
||||
// rotating a full deque doesn't move any memory.
|
||||
let mut deque: VecDeque<usize> = (0..len).collect();
|
||||
assert_eq!(deque.len(), deque.capacity());
|
||||
deque.rotate_left(len / 2);
|
||||
let sum = deque.into_iter().fold(0, |a, b| a + b);
|
||||
black_box(sum);
|
||||
});
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_into_iter_try_fold(b: &mut Bencher) {
|
||||
let len = 1024;
|
||||
// we reuse this allocation for every run
|
||||
let mut vec: Vec<usize> = (0..len).collect();
|
||||
vec.shrink_to_fit();
|
||||
|
||||
// Iterator::any uses Iterator::try_fold under the hood
|
||||
b.iter(|| {
|
||||
let mut b = false;
|
||||
into_iter_helper(&mut vec, |_| {}, |it| b = it.any(|i| i == len - 1));
|
||||
black_box(b);
|
||||
|
||||
into_iter_helper(&mut vec, |d| d.rotate_left(len / 2), |it| b = it.any(|i| i == len - 1));
|
||||
black_box(b);
|
||||
});
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_into_iter_next_chunk(b: &mut Bencher) {
|
||||
let len = 1024;
|
||||
// we reuse this allocation for every run
|
||||
let mut vec: Vec<usize> = (0..len).collect();
|
||||
vec.shrink_to_fit();
|
||||
|
||||
b.iter(|| {
|
||||
let mut buf = [0; 64];
|
||||
into_iter_helper(
|
||||
&mut vec,
|
||||
|_| {},
|
||||
|it| {
|
||||
while let Ok(a) = it.next_chunk() {
|
||||
buf = a;
|
||||
}
|
||||
},
|
||||
);
|
||||
black_box(buf);
|
||||
|
||||
into_iter_helper(
|
||||
&mut vec,
|
||||
|d| d.rotate_left(len / 2),
|
||||
|it| {
|
||||
while let Ok(a) = it.next_chunk() {
|
||||
buf = a;
|
||||
}
|
||||
},
|
||||
);
|
||||
black_box(buf);
|
||||
});
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_from_array_1000(b: &mut Bencher) {
|
||||
const N: usize = 1000;
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use core::fmt;
|
||||
use core::iter::{FusedIterator, TrustedLen};
|
||||
use core::{array, fmt, mem::MaybeUninit, ops::Try, ptr};
|
||||
|
||||
use crate::alloc::{Allocator, Global};
|
||||
|
||||
|
@ -52,6 +52,126 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
|
|||
let len = self.inner.len();
|
||||
(len, Some(len))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn advance_by(&mut self, n: usize) -> Result<(), usize> {
|
||||
if self.inner.len < n {
|
||||
let len = self.inner.len;
|
||||
self.inner.clear();
|
||||
Err(len)
|
||||
} else {
|
||||
self.inner.drain(..n);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn count(self) -> usize {
|
||||
self.inner.len
|
||||
}
|
||||
|
||||
fn try_fold<B, F, R>(&mut self, mut init: B, mut f: F) -> R
|
||||
where
|
||||
F: FnMut(B, Self::Item) -> R,
|
||||
R: Try<Output = B>,
|
||||
{
|
||||
struct Guard<'a, T, A: Allocator> {
|
||||
deque: &'a mut VecDeque<T, A>,
|
||||
// `consumed <= deque.len` always holds.
|
||||
consumed: usize,
|
||||
}
|
||||
|
||||
impl<'a, T, A: Allocator> Drop for Guard<'a, T, A> {
|
||||
fn drop(&mut self) {
|
||||
self.deque.len -= self.consumed;
|
||||
self.deque.head = self.deque.to_physical_idx(self.consumed);
|
||||
}
|
||||
}
|
||||
|
||||
let mut guard = Guard { deque: &mut self.inner, consumed: 0 };
|
||||
|
||||
let (head, tail) = guard.deque.as_slices();
|
||||
|
||||
init = head
|
||||
.iter()
|
||||
.map(|elem| {
|
||||
guard.consumed += 1;
|
||||
// SAFETY: Because we incremented `guard.consumed`, the
|
||||
// deque effectively forgot the element, so we can take
|
||||
// ownership
|
||||
unsafe { ptr::read(elem) }
|
||||
})
|
||||
.try_fold(init, &mut f)?;
|
||||
|
||||
tail.iter()
|
||||
.map(|elem| {
|
||||
guard.consumed += 1;
|
||||
// SAFETY: Same as above.
|
||||
unsafe { ptr::read(elem) }
|
||||
})
|
||||
.try_fold(init, &mut f)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn fold<B, F>(mut self, init: B, mut f: F) -> B
|
||||
where
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
match self.try_fold(init, |b, item| Ok::<B, !>(f(b, item))) {
|
||||
Ok(b) => b,
|
||||
Err(e) => match e {},
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn last(mut self) -> Option<Self::Item> {
|
||||
self.inner.pop_back()
|
||||
}
|
||||
|
||||
fn next_chunk<const N: usize>(
|
||||
&mut self,
|
||||
) -> Result<[Self::Item; N], array::IntoIter<Self::Item, N>> {
|
||||
let mut raw_arr = MaybeUninit::uninit_array();
|
||||
let raw_arr_ptr = raw_arr.as_mut_ptr().cast();
|
||||
let (head, tail) = self.inner.as_slices();
|
||||
|
||||
if head.len() >= N {
|
||||
// SAFETY: By manually adjusting the head and length of the deque, we effectively
|
||||
// make it forget the first `N` elements, so taking ownership of them is safe.
|
||||
unsafe { ptr::copy_nonoverlapping(head.as_ptr(), raw_arr_ptr, N) };
|
||||
self.inner.head = self.inner.to_physical_idx(N);
|
||||
self.inner.len -= N;
|
||||
// SAFETY: We initialized the entire array with items from `head`
|
||||
return Ok(unsafe { raw_arr.transpose().assume_init() });
|
||||
}
|
||||
|
||||
// SAFETY: Same argument as above.
|
||||
unsafe { ptr::copy_nonoverlapping(head.as_ptr(), raw_arr_ptr, head.len()) };
|
||||
let remaining = N - head.len();
|
||||
|
||||
if tail.len() >= remaining {
|
||||
// SAFETY: Same argument as above.
|
||||
unsafe {
|
||||
ptr::copy_nonoverlapping(tail.as_ptr(), raw_arr_ptr.add(head.len()), remaining)
|
||||
};
|
||||
self.inner.head = self.inner.to_physical_idx(N);
|
||||
self.inner.len -= N;
|
||||
// SAFETY: We initialized the entire array with items from `head` and `tail`
|
||||
Ok(unsafe { raw_arr.transpose().assume_init() })
|
||||
} else {
|
||||
// SAFETY: Same argument as above.
|
||||
unsafe {
|
||||
ptr::copy_nonoverlapping(tail.as_ptr(), raw_arr_ptr.add(head.len()), tail.len())
|
||||
};
|
||||
let init = head.len() + tail.len();
|
||||
// We completely drained all the deques elements.
|
||||
self.inner.head = 0;
|
||||
self.inner.len = 0;
|
||||
// SAFETY: We copied all elements from both slices to the beginning of the array, so
|
||||
// the given range is initialized.
|
||||
Err(unsafe { array::IntoIter::new_unchecked(raw_arr, 0..init) })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
|
@ -60,10 +180,73 @@ impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
|
|||
fn next_back(&mut self) -> Option<T> {
|
||||
self.inner.pop_back()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn advance_back_by(&mut self, n: usize) -> Result<(), usize> {
|
||||
let len = self.inner.len;
|
||||
if len >= n {
|
||||
self.inner.truncate(len - n);
|
||||
Ok(())
|
||||
} else {
|
||||
self.inner.clear();
|
||||
Err(len)
|
||||
}
|
||||
}
|
||||
|
||||
fn try_rfold<B, F, R>(&mut self, mut init: B, mut f: F) -> R
|
||||
where
|
||||
F: FnMut(B, Self::Item) -> R,
|
||||
R: Try<Output = B>,
|
||||
{
|
||||
struct Guard<'a, T, A: Allocator> {
|
||||
deque: &'a mut VecDeque<T, A>,
|
||||
// `consumed <= deque.len` always holds.
|
||||
consumed: usize,
|
||||
}
|
||||
|
||||
impl<'a, T, A: Allocator> Drop for Guard<'a, T, A> {
|
||||
fn drop(&mut self) {
|
||||
self.deque.len -= self.consumed;
|
||||
}
|
||||
}
|
||||
|
||||
let mut guard = Guard { deque: &mut self.inner, consumed: 0 };
|
||||
|
||||
let (head, tail) = guard.deque.as_slices();
|
||||
|
||||
init = tail
|
||||
.iter()
|
||||
.map(|elem| {
|
||||
guard.consumed += 1;
|
||||
// SAFETY: See `try_fold`'s safety comment.
|
||||
unsafe { ptr::read(elem) }
|
||||
})
|
||||
.try_rfold(init, &mut f)?;
|
||||
|
||||
head.iter()
|
||||
.map(|elem| {
|
||||
guard.consumed += 1;
|
||||
// SAFETY: Same as above.
|
||||
unsafe { ptr::read(elem) }
|
||||
})
|
||||
.try_rfold(init, &mut f)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn rfold<B, F>(mut self, init: B, mut f: F) -> B
|
||||
where
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
match self.try_rfold(init, |b, item| Ok::<B, !>(f(b, item))) {
|
||||
Ok(b) => b,
|
||||
Err(e) => match e {},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
|
||||
#[inline]
|
||||
fn is_empty(&self) -> bool {
|
||||
self.inner.is_empty()
|
||||
}
|
||||
|
|
|
@ -525,8 +525,6 @@ impl<T, E> Result<T, E> {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Basic usage:
|
||||
///
|
||||
/// ```
|
||||
/// let x: Result<i32, &str> = Ok(-3);
|
||||
/// assert_eq!(x.is_ok(), true);
|
||||
|
@ -572,8 +570,6 @@ impl<T, E> Result<T, E> {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Basic usage:
|
||||
///
|
||||
/// ```
|
||||
/// let x: Result<i32, &str> = Ok(-3);
|
||||
/// assert_eq!(x.is_err(), false);
|
||||
|
@ -627,8 +623,6 @@ impl<T, E> Result<T, E> {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Basic usage:
|
||||
///
|
||||
/// ```
|
||||
/// let x: Result<u32, &str> = Ok(2);
|
||||
/// assert_eq!(x.ok(), Some(2));
|
||||
|
@ -658,8 +652,6 @@ impl<T, E> Result<T, E> {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Basic usage:
|
||||
///
|
||||
/// ```
|
||||
/// let x: Result<u32, &str> = Ok(2);
|
||||
/// assert_eq!(x.err(), None);
|
||||
|
@ -693,8 +685,6 @@ impl<T, E> Result<T, E> {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Basic usage:
|
||||
///
|
||||
/// ```
|
||||
/// let x: Result<u32, &str> = Ok(2);
|
||||
/// assert_eq!(x.as_ref(), Ok(&2));
|
||||
|
@ -716,8 +706,6 @@ impl<T, E> Result<T, E> {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Basic usage:
|
||||
///
|
||||
/// ```
|
||||
/// fn mutate(r: &mut Result<i32, i32>) {
|
||||
/// match r.as_mut() {
|
||||
|
@ -812,8 +800,6 @@ impl<T, E> Result<T, E> {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Basic usage:
|
||||
///
|
||||
/// ```
|
||||
/// let k = 21;
|
||||
///
|
||||
|
@ -841,8 +827,6 @@ impl<T, E> Result<T, E> {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Basic usage:
|
||||
///
|
||||
/// ```
|
||||
/// fn stringify(x: u32) -> String { format!("error code: {x}") }
|
||||
///
|
||||
|
@ -968,8 +952,6 @@ impl<T, E> Result<T, E> {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Basic usage:
|
||||
///
|
||||
/// ```
|
||||
/// let x: Result<u32, &str> = Ok(7);
|
||||
/// assert_eq!(x.iter().next(), Some(&7));
|
||||
|
@ -989,8 +971,6 @@ impl<T, E> Result<T, E> {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Basic usage:
|
||||
///
|
||||
/// ```
|
||||
/// let mut x: Result<u32, &str> = Ok(7);
|
||||
/// match x.iter_mut().next() {
|
||||
|
@ -1031,8 +1011,6 @@ impl<T, E> Result<T, E> {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Basic usage:
|
||||
///
|
||||
/// ```should_panic
|
||||
/// let x: Result<u32, &str> = Err("emergency failure");
|
||||
/// x.expect("Testing expect"); // panics with `Testing expect: emergency failure`
|
||||
|
@ -1160,8 +1138,6 @@ impl<T, E> Result<T, E> {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Basic usage:
|
||||
///
|
||||
/// ```should_panic
|
||||
/// let x: Result<u32, &str> = Ok(10);
|
||||
/// x.expect_err("Testing expect_err"); // panics with `Testing expect_err: 10`
|
||||
|
@ -1222,8 +1198,6 @@ impl<T, E> Result<T, E> {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Basic usage:
|
||||
///
|
||||
/// ```
|
||||
/// # #![feature(never_type)]
|
||||
/// # #![feature(unwrap_infallible)]
|
||||
|
@ -1259,8 +1233,6 @@ impl<T, E> Result<T, E> {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Basic usage:
|
||||
///
|
||||
/// ```
|
||||
/// # #![feature(never_type)]
|
||||
/// # #![feature(unwrap_infallible)]
|
||||
|
@ -1298,8 +1270,6 @@ impl<T, E> Result<T, E> {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Basic usage:
|
||||
///
|
||||
/// ```
|
||||
/// let x: Result<u32, &str> = Ok(2);
|
||||
/// let y: Result<&str, &str> = Err("late error");
|
||||
|
@ -1383,8 +1353,6 @@ impl<T, E> Result<T, E> {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Basic usage:
|
||||
///
|
||||
/// ```
|
||||
/// let x: Result<u32, &str> = Ok(2);
|
||||
/// let y: Result<u32, &str> = Err("late error");
|
||||
|
@ -1426,8 +1394,6 @@ impl<T, E> Result<T, E> {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Basic usage:
|
||||
///
|
||||
/// ```
|
||||
/// fn sq(x: u32) -> Result<u32, u32> { Ok(x * x) }
|
||||
/// fn err(x: u32) -> Result<u32, u32> { Err(x) }
|
||||
|
@ -1456,8 +1422,6 @@ impl<T, E> Result<T, E> {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Basic usage:
|
||||
///
|
||||
/// ```
|
||||
/// let default = 2;
|
||||
/// let x: Result<u32, &str> = Ok(9);
|
||||
|
@ -1487,8 +1451,6 @@ impl<T, E> Result<T, E> {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Basic usage:
|
||||
///
|
||||
/// ```
|
||||
/// fn count(x: &str) -> usize { x.len() }
|
||||
///
|
||||
|
@ -1752,8 +1714,6 @@ impl<T, E> Result<Result<T, E>, E> {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Basic usage:
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(result_flattening)]
|
||||
/// let x: Result<Result<&'static str, u32>, u32> = Ok(Ok("hello"));
|
||||
|
@ -1842,8 +1802,6 @@ impl<T, E> IntoIterator for Result<T, E> {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Basic usage:
|
||||
///
|
||||
/// ```
|
||||
/// let x: Result<u32, &str> = Ok(5);
|
||||
/// let v: Vec<u32> = x.into_iter().collect();
|
||||
|
|
|
@ -2730,8 +2730,10 @@ impl<T> [T] {
|
|||
/// This reordering has the additional property that any value at position `i < index` will be
|
||||
/// less than or equal to any value at a position `j > index`. Additionally, this reordering is
|
||||
/// unstable (i.e. any number of equal elements may end up at position `index`), in-place
|
||||
/// (i.e. does not allocate), and *O*(*n*) worst-case. This function is also/ known as "kth
|
||||
/// element" in other libraries. It returns a triplet of the following from the reordered slice:
|
||||
/// (i.e. does not allocate), and *O*(*n*) on average. The worst-case performance is *O*(*n* log *n*).
|
||||
/// This function is also known as "kth element" in other libraries.
|
||||
///
|
||||
/// It returns a triplet of the following from the reordered slice:
|
||||
/// the subslice prior to `index`, the element at `index`, and the subslice after `index`;
|
||||
/// accordingly, the values in those two subslices will respectively all be less-than-or-equal-to
|
||||
/// and greater-than-or-equal-to the value of the element at `index`.
|
||||
|
@ -2777,8 +2779,11 @@ impl<T> [T] {
|
|||
/// This reordering has the additional property that any value at position `i < index` will be
|
||||
/// less than or equal to any value at a position `j > index` using the comparator function.
|
||||
/// Additionally, this reordering is unstable (i.e. any number of equal elements may end up at
|
||||
/// position `index`), in-place (i.e. does not allocate), and *O*(*n*) worst-case. This function
|
||||
/// is also known as "kth element" in other libraries. It returns a triplet of the following from
|
||||
/// position `index`), in-place (i.e. does not allocate), and *O*(*n*) on average.
|
||||
/// The worst-case performance is *O*(*n* log *n*). This function is also known as
|
||||
/// "kth element" in other libraries.
|
||||
///
|
||||
/// It returns a triplet of the following from
|
||||
/// the slice reordered according to the provided comparator function: the subslice prior to
|
||||
/// `index`, the element at `index`, and the subslice after `index`; accordingly, the values in
|
||||
/// those two subslices will respectively all be less-than-or-equal-to and greater-than-or-equal-to
|
||||
|
@ -2829,8 +2834,11 @@ impl<T> [T] {
|
|||
/// This reordering has the additional property that any value at position `i < index` will be
|
||||
/// less than or equal to any value at a position `j > index` using the key extraction function.
|
||||
/// Additionally, this reordering is unstable (i.e. any number of equal elements may end up at
|
||||
/// position `index`), in-place (i.e. does not allocate), and *O*(*n*) worst-case. This function
|
||||
/// is also known as "kth element" in other libraries. It returns a triplet of the following from
|
||||
/// position `index`), in-place (i.e. does not allocate), and *O*(*n*) on average.
|
||||
/// The worst-case performance is *O*(*n* log *n*).
|
||||
/// This function is also known as "kth element" in other libraries.
|
||||
///
|
||||
/// It returns a triplet of the following from
|
||||
/// the slice reordered according to the provided key extraction function: the subslice prior to
|
||||
/// `index`, the element at `index`, and the subslice after `index`; accordingly, the values in
|
||||
/// those two subslices will respectively all be less-than-or-equal-to and greater-than-or-equal-to
|
||||
|
|
|
@ -284,6 +284,10 @@ impl<'a> Read for &'a FileDesc {
|
|||
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
|
||||
(**self).read(buf)
|
||||
}
|
||||
|
||||
fn read_buf(&mut self, cursor: BorrowedCursor<'_>) -> io::Result<()> {
|
||||
(**self).read_buf(cursor)
|
||||
}
|
||||
}
|
||||
|
||||
impl AsInner<OwnedFd> for FileDesc {
|
||||
|
|
|
@ -124,8 +124,10 @@
|
|||
//!
|
||||
//! ## Stack size
|
||||
//!
|
||||
//! The default stack size is platform-dependent and subject to change. Currently it is 2MB on all
|
||||
//! Tier-1 platforms. There are two ways to manually specify the stack size for spawned threads:
|
||||
//! The default stack size is platform-dependent and subject to change.
|
||||
//! Currently, it is 2 MiB on all Tier-1 platforms.
|
||||
//!
|
||||
//! There are two ways to manually specify the stack size for spawned threads:
|
||||
//!
|
||||
//! * Build the thread with [`Builder`] and pass the desired stack size to [`Builder::stack_size`].
|
||||
//! * Set the `RUST_MIN_STACK` environment variable to an integer representing the desired stack
|
||||
|
|
|
@ -1315,15 +1315,6 @@ impl Config {
|
|||
} else {
|
||||
RustfmtState::Unavailable
|
||||
};
|
||||
} else {
|
||||
// If using a system toolchain for bootstrapping, see if that has rustfmt available.
|
||||
let host = config.build;
|
||||
let rustfmt_path = config.initial_rustc.with_file_name(exe("rustfmt", host));
|
||||
let bin_root = config.out.join(host.triple).join("stage0");
|
||||
if !rustfmt_path.starts_with(&bin_root) {
|
||||
// Using a system-provided toolchain; we shouldn't download rustfmt.
|
||||
*config.initial_rustfmt.borrow_mut() = RustfmtState::SystemToolchain(rustfmt_path);
|
||||
}
|
||||
}
|
||||
|
||||
// Now that we've reached the end of our configuration, infer the
|
||||
|
|
|
@ -29,6 +29,7 @@ pub enum Profile {
|
|||
static SETTINGS_HASHES: &[&str] = &[
|
||||
"ea67e259dedf60d4429b6c349a564ffcd1563cf41c920a856d1f5b16b4701ac8",
|
||||
"56e7bf011c71c5d81e0bf42e84938111847a810eee69d906bba494ea90b51922",
|
||||
"af1b5efe196aed007577899db9dae15d6dbc923d6fa42fa0934e68617ba9bbe0",
|
||||
];
|
||||
static VSCODE_SETTINGS: &str = include_str!("../etc/vscode_settings.json");
|
||||
|
||||
|
|
|
@ -435,6 +435,10 @@ impl Step for Rustfmt {
|
|||
&[],
|
||||
);
|
||||
|
||||
if !builder.fail_fast {
|
||||
cargo.arg("--no-fail-fast");
|
||||
}
|
||||
|
||||
let dir = testdir(builder, compiler.host);
|
||||
t!(fs::create_dir_all(&dir));
|
||||
cargo.env("RUSTFMT_TEST_DIR", dir);
|
||||
|
@ -615,6 +619,10 @@ impl Step for Miri {
|
|||
);
|
||||
cargo.add_rustc_lib_path(builder, compiler);
|
||||
|
||||
if !builder.fail_fast {
|
||||
cargo.arg("--no-fail-fast");
|
||||
}
|
||||
|
||||
// miri tests need to know about the stage sysroot
|
||||
cargo.env("MIRI_SYSROOT", &miri_sysroot);
|
||||
cargo.env("MIRI_HOST_SYSROOT", sysroot);
|
||||
|
@ -746,6 +754,10 @@ impl Step for Clippy {
|
|||
&[],
|
||||
);
|
||||
|
||||
if !builder.fail_fast {
|
||||
cargo.arg("--no-fail-fast");
|
||||
}
|
||||
|
||||
cargo.env("RUSTC_TEST_SUITE", builder.rustc(compiler));
|
||||
cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler));
|
||||
let host_libs = builder.stage_out(compiler, Mode::ToolRustc).join(builder.cargo_dir());
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"rust-analyzer.check.invocationLocation": "root",
|
||||
"rust-analyzer.check.invocationStrategy": "once",
|
||||
"rust-analyzer.checkOnSave.overrideCommand": [
|
||||
"rust-analyzer.check.overrideCommand": [
|
||||
"python3",
|
||||
"x.py",
|
||||
"check",
|
||||
|
@ -23,6 +23,6 @@
|
|||
"check",
|
||||
"--json-output"
|
||||
],
|
||||
"rust-analyzer.cargo.sysroot": "./build/host/stage0-sysroot",
|
||||
"rust-analyzer.cargo.sysrootSrc": "./library",
|
||||
"rust-analyzer.rustc.source": "./Cargo.toml"
|
||||
}
|
||||
|
|
|
@ -287,6 +287,16 @@ impl<'a, 'tcx> DocFolder for CacheBuilder<'a, 'tcx> {
|
|||
} else {
|
||||
let last = self.cache.parent_stack.last().expect("parent_stack is empty 2");
|
||||
let did = match &*last {
|
||||
ParentStackItem::Impl {
|
||||
// impl Trait for &T { fn method(self); }
|
||||
//
|
||||
// When generating a function index with the above shape, we want it
|
||||
// associated with `T`, not with the primitive reference type. It should
|
||||
// show up as `T::method`, rather than `reference::method`, in the search
|
||||
// results page.
|
||||
for_: clean::Type::BorrowedRef { type_, .. },
|
||||
..
|
||||
} => type_.def_id(&self.cache),
|
||||
ParentStackItem::Impl { for_, .. } => for_.def_id(&self.cache),
|
||||
ParentStackItem::Type(item_id) => item_id.as_def_id(),
|
||||
};
|
||||
|
|
|
@ -29,12 +29,12 @@ use rustc_data_structures::fx::FxHashMap;
|
|||
use rustc_hir::def_id::DefId;
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
pub(crate) use rustc_resolve::rustdoc::main_body_opts;
|
||||
use rustc_resolve::rustdoc::may_be_doc_link;
|
||||
use rustc_span::edition::Edition;
|
||||
use rustc_span::{Span, Symbol};
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use std::borrow::Cow;
|
||||
use std::cell::RefCell;
|
||||
use std::collections::VecDeque;
|
||||
use std::default::Default;
|
||||
use std::fmt::Write;
|
||||
|
@ -1226,14 +1226,12 @@ pub(crate) struct MarkdownLink {
|
|||
|
||||
pub(crate) fn markdown_links<R>(
|
||||
md: &str,
|
||||
filter_map: impl Fn(MarkdownLink) -> Option<R>,
|
||||
preprocess_link: impl Fn(MarkdownLink) -> Option<R>,
|
||||
) -> Vec<R> {
|
||||
if md.is_empty() {
|
||||
return vec![];
|
||||
}
|
||||
|
||||
let links = RefCell::new(vec![]);
|
||||
|
||||
// FIXME: remove this function once pulldown_cmark can provide spans for link definitions.
|
||||
let locate = |s: &str, fallback: Range<usize>| unsafe {
|
||||
let s_start = s.as_ptr();
|
||||
|
@ -1265,46 +1263,23 @@ pub(crate) fn markdown_links<R>(
|
|||
}
|
||||
};
|
||||
|
||||
let mut push = |link: BrokenLink<'_>| {
|
||||
let span = span_for_link(&link.reference, link.span);
|
||||
filter_map(MarkdownLink {
|
||||
kind: LinkType::ShortcutUnknown,
|
||||
link: link.reference.to_string(),
|
||||
range: span,
|
||||
})
|
||||
.map(|link| links.borrow_mut().push(link));
|
||||
None
|
||||
};
|
||||
let p = Parser::new_with_broken_link_callback(md, main_body_opts(), Some(&mut push))
|
||||
.into_offset_iter();
|
||||
|
||||
// There's no need to thread an IdMap through to here because
|
||||
// the IDs generated aren't going to be emitted anywhere.
|
||||
let mut ids = IdMap::new();
|
||||
let iter = Footnotes::new(HeadingLinks::new(p, None, &mut ids, HeadingOffset::H1));
|
||||
|
||||
for ev in iter {
|
||||
if let Event::Start(Tag::Link(
|
||||
// `<>` links cannot be intra-doc links so we skip them.
|
||||
kind @ (LinkType::Inline
|
||||
| LinkType::Reference
|
||||
| LinkType::ReferenceUnknown
|
||||
| LinkType::Collapsed
|
||||
| LinkType::CollapsedUnknown
|
||||
| LinkType::Shortcut
|
||||
| LinkType::ShortcutUnknown),
|
||||
dest,
|
||||
_,
|
||||
)) = ev.0
|
||||
{
|
||||
debug!("found link: {dest}");
|
||||
let span = span_for_link(&dest, ev.1);
|
||||
filter_map(MarkdownLink { kind, link: dest.into_string(), range: span })
|
||||
.map(|link| links.borrow_mut().push(link));
|
||||
Parser::new_with_broken_link_callback(
|
||||
md,
|
||||
main_body_opts(),
|
||||
Some(&mut |link: BrokenLink<'_>| Some((link.reference, "".into()))),
|
||||
)
|
||||
.into_offset_iter()
|
||||
.filter_map(|(event, span)| match event {
|
||||
Event::Start(Tag::Link(link_type, dest, _)) if may_be_doc_link(link_type) => {
|
||||
preprocess_link(MarkdownLink {
|
||||
kind: link_type,
|
||||
range: span_for_link(&dest, span),
|
||||
link: dest.into_string(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
links.into_inner()
|
||||
_ => None,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
|
|
@ -39,10 +39,10 @@ use crate::html::{highlight, static_files};
|
|||
use askama::Template;
|
||||
use itertools::Itertools;
|
||||
|
||||
const ITEM_TABLE_OPEN: &str = "<div class=\"item-table\">";
|
||||
const ITEM_TABLE_CLOSE: &str = "</div>";
|
||||
const ITEM_TABLE_ROW_OPEN: &str = "<div class=\"item-row\">";
|
||||
const ITEM_TABLE_ROW_CLOSE: &str = "</div>";
|
||||
const ITEM_TABLE_OPEN: &str = "<ul class=\"item-table\">";
|
||||
const ITEM_TABLE_CLOSE: &str = "</ul>";
|
||||
const ITEM_TABLE_ROW_OPEN: &str = "<li>";
|
||||
const ITEM_TABLE_ROW_CLOSE: &str = "</li>";
|
||||
|
||||
// A component in a `use` path, like `string` in std::string::ToString
|
||||
struct PathComponent {
|
||||
|
@ -338,14 +338,14 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items:
|
|||
match *src {
|
||||
Some(src) => write!(
|
||||
w,
|
||||
"<div class=\"item-left\"><code>{}extern crate {} as {};",
|
||||
"<div class=\"item-name\"><code>{}extern crate {} as {};",
|
||||
visibility_print_with_space(myitem.visibility(tcx), myitem.item_id, cx),
|
||||
anchor(myitem.item_id.expect_def_id(), src, cx),
|
||||
myitem.name.unwrap(),
|
||||
),
|
||||
None => write!(
|
||||
w,
|
||||
"<div class=\"item-left\"><code>{}extern crate {};",
|
||||
"<div class=\"item-name\"><code>{}extern crate {};",
|
||||
visibility_print_with_space(myitem.visibility(tcx), myitem.item_id, cx),
|
||||
anchor(myitem.item_id.expect_def_id(), myitem.name.unwrap(), cx),
|
||||
),
|
||||
|
@ -384,11 +384,11 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items:
|
|||
let (stab_tags_before, stab_tags_after) = if stab_tags.is_empty() {
|
||||
("", "")
|
||||
} else {
|
||||
("<div class=\"item-right docblock-short\">", "</div>")
|
||||
("<div class=\"desc docblock-short\">", "</div>")
|
||||
};
|
||||
write!(
|
||||
w,
|
||||
"<div class=\"item-left\"{id}>\
|
||||
"<div class=\"item-name\"{id}>\
|
||||
<code>{vis}{imp}</code>\
|
||||
</div>\
|
||||
{stab_tags_before}{stab_tags}{stab_tags_after}",
|
||||
|
@ -426,11 +426,11 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items:
|
|||
let (docs_before, docs_after) = if docs.is_empty() {
|
||||
("", "")
|
||||
} else {
|
||||
("<div class=\"item-right docblock-short\">", "</div>")
|
||||
("<div class=\"desc docblock-short\">", "</div>")
|
||||
};
|
||||
write!(
|
||||
w,
|
||||
"<div class=\"item-left\">\
|
||||
"<div class=\"item-name\">\
|
||||
<a class=\"{class}\" href=\"{href}\" title=\"{title}\">{name}</a>\
|
||||
{visibility_emoji}\
|
||||
{unsafety_flag}\
|
||||
|
|
|
@ -201,7 +201,7 @@ h1, h2, h3, h4, h5, h6,
|
|||
.mobile-topbar,
|
||||
.search-input,
|
||||
.search-results .result-name,
|
||||
.item-left > a,
|
||||
.item-name > a,
|
||||
.out-of-band,
|
||||
span.since,
|
||||
a.srclink,
|
||||
|
@ -750,14 +750,16 @@ table,
|
|||
|
||||
.item-table {
|
||||
display: table;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
.item-row {
|
||||
.item-table > li {
|
||||
display: table-row;
|
||||
}
|
||||
.item-left, .item-right {
|
||||
.item-table > li > div {
|
||||
display: table-cell;
|
||||
}
|
||||
.item-left {
|
||||
.item-table > li > .item-name {
|
||||
padding-right: 1.25rem;
|
||||
}
|
||||
|
||||
|
@ -962,7 +964,7 @@ so that we can apply CSS-filters to change the arrow color in themes */
|
|||
padding: 3px;
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
.item-left .stab {
|
||||
.item-name .stab {
|
||||
margin-left: 0.3125em;
|
||||
}
|
||||
.stab {
|
||||
|
@ -1695,7 +1697,7 @@ in storage.js
|
|||
}
|
||||
|
||||
/* Display an alternating layout on tablets and phones */
|
||||
.item-table, .item-row, .item-left, .item-right,
|
||||
.item-table, .item-row, .item-table > li, .item-table > li > div,
|
||||
.search-results > a, .search-results > a > div {
|
||||
display: block;
|
||||
}
|
||||
|
@ -1704,7 +1706,7 @@ in storage.js
|
|||
.search-results > a {
|
||||
padding: 5px 0px;
|
||||
}
|
||||
.search-results > a > div.desc, .item-right {
|
||||
.search-results > a > div.desc, .item-table > li > div.desc {
|
||||
padding-left: 2em;
|
||||
}
|
||||
|
||||
|
|
|
@ -38,7 +38,7 @@ impl JsonRenderer<'_> {
|
|||
Some(UrlFragment::UserWritten(_)) | None => *page_id,
|
||||
};
|
||||
|
||||
(link.clone(), from_item_id(id.into(), self.tcx))
|
||||
(link.clone(), id_from_item_default(id.into(), self.tcx))
|
||||
})
|
||||
.collect();
|
||||
let docs = item.attrs.collapsed_doc_value();
|
||||
|
@ -50,7 +50,8 @@ impl JsonRenderer<'_> {
|
|||
.collect();
|
||||
let span = item.span(self.tcx);
|
||||
let visibility = item.visibility(self.tcx);
|
||||
let clean::Item { name, attrs: _, kind: _, item_id, cfg: _, .. } = item;
|
||||
let clean::Item { name, item_id, .. } = item;
|
||||
let id = id_from_item(&item, self.tcx);
|
||||
let inner = match *item.kind {
|
||||
clean::KeywordItem => return None,
|
||||
clean::StrippedItem(ref inner) => {
|
||||
|
@ -69,7 +70,7 @@ impl JsonRenderer<'_> {
|
|||
_ => from_clean_item(item, self.tcx),
|
||||
};
|
||||
Some(Item {
|
||||
id: from_item_id_with_name(item_id, self.tcx, name),
|
||||
id,
|
||||
crate_id: item_id.krate().as_u32(),
|
||||
name: name.map(|sym| sym.to_string()),
|
||||
span: span.and_then(|span| self.convert_span(span)),
|
||||
|
@ -107,7 +108,7 @@ impl JsonRenderer<'_> {
|
|||
Some(ty::Visibility::Public) => Visibility::Public,
|
||||
Some(ty::Visibility::Restricted(did)) if did.is_crate_root() => Visibility::Crate,
|
||||
Some(ty::Visibility::Restricted(did)) => Visibility::Restricted {
|
||||
parent: from_item_id(did.into(), self.tcx),
|
||||
parent: id_from_item_default(did.into(), self.tcx),
|
||||
path: self.tcx.def_path(did).to_string_no_crate_verbose(),
|
||||
},
|
||||
}
|
||||
|
@ -204,21 +205,42 @@ impl FromWithTcx<clean::TypeBindingKind> for TypeBindingKind {
|
|||
}
|
||||
}
|
||||
|
||||
/// It generates an ID as follows:
|
||||
///
|
||||
/// `CRATE_ID:ITEM_ID[:NAME_ID]` (if there is no name, NAME_ID is not generated).
|
||||
pub(crate) fn from_item_id(item_id: ItemId, tcx: TyCtxt<'_>) -> Id {
|
||||
from_item_id_with_name(item_id, tcx, None)
|
||||
#[inline]
|
||||
pub(crate) fn id_from_item_default(item_id: ItemId, tcx: TyCtxt<'_>) -> Id {
|
||||
id_from_item_inner(item_id, tcx, None, None)
|
||||
}
|
||||
|
||||
// FIXME: this function (and appending the name at the end of the ID) should be removed when
|
||||
// reexports are not inlined anymore for json format. It should be done in #93518.
|
||||
pub(crate) fn from_item_id_with_name(item_id: ItemId, tcx: TyCtxt<'_>, name: Option<Symbol>) -> Id {
|
||||
struct DisplayDefId<'a>(DefId, TyCtxt<'a>, Option<Symbol>);
|
||||
/// It generates an ID as follows:
|
||||
///
|
||||
/// `CRATE_ID:ITEM_ID[:NAME_ID][-EXTRA]`:
|
||||
/// * If there is no `name`, `NAME_ID` is not generated.
|
||||
/// * If there is no `extra`, `EXTRA` is not generated.
|
||||
///
|
||||
/// * `name` is the item's name if available (it's not for impl blocks for example).
|
||||
/// * `extra` is used for reexports: it contains the ID of the reexported item. It is used to allow
|
||||
/// to have items with the same name but different types to both appear in the generated JSON.
|
||||
pub(crate) fn id_from_item_inner(
|
||||
item_id: ItemId,
|
||||
tcx: TyCtxt<'_>,
|
||||
name: Option<Symbol>,
|
||||
extra: Option<&Id>,
|
||||
) -> Id {
|
||||
struct DisplayDefId<'a, 'b>(DefId, TyCtxt<'a>, Option<&'b Id>, Option<Symbol>);
|
||||
|
||||
impl<'a> fmt::Display for DisplayDefId<'a> {
|
||||
impl<'a, 'b> fmt::Display for DisplayDefId<'a, 'b> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let DisplayDefId(def_id, tcx, name) = self;
|
||||
let DisplayDefId(def_id, tcx, extra, name) = self;
|
||||
// We need this workaround because primitive types' DefId actually refers to
|
||||
// their parent module, which isn't present in the output JSON items. So
|
||||
// instead, we directly get the primitive symbol and convert it to u32 to
|
||||
// generate the ID.
|
||||
let s;
|
||||
let extra = if let Some(e) = extra {
|
||||
s = format!("-{}", e.0);
|
||||
&s
|
||||
} else {
|
||||
""
|
||||
};
|
||||
let name = match name {
|
||||
Some(name) => format!(":{}", name.as_u32()),
|
||||
None => {
|
||||
|
@ -240,18 +262,33 @@ pub(crate) fn from_item_id_with_name(item_id: ItemId, tcx: TyCtxt<'_>, name: Opt
|
|||
}
|
||||
}
|
||||
};
|
||||
write!(f, "{}:{}{}", self.0.krate.as_u32(), u32::from(self.0.index), name)
|
||||
write!(f, "{}:{}{name}{extra}", def_id.krate.as_u32(), u32::from(def_id.index))
|
||||
}
|
||||
}
|
||||
|
||||
match item_id {
|
||||
ItemId::DefId(did) => Id(format!("{}", DisplayDefId(did, tcx, name))),
|
||||
ItemId::Blanket { for_, impl_id } => {
|
||||
Id(format!("b:{}-{}", DisplayDefId(impl_id, tcx, None), DisplayDefId(for_, tcx, name)))
|
||||
}
|
||||
ItemId::Auto { for_, trait_ } => {
|
||||
Id(format!("a:{}-{}", DisplayDefId(trait_, tcx, None), DisplayDefId(for_, tcx, name)))
|
||||
ItemId::DefId(did) => Id(format!("{}", DisplayDefId(did, tcx, extra, name))),
|
||||
ItemId::Blanket { for_, impl_id } => Id(format!(
|
||||
"b:{}-{}",
|
||||
DisplayDefId(impl_id, tcx, None, None),
|
||||
DisplayDefId(for_, tcx, extra, name)
|
||||
)),
|
||||
ItemId::Auto { for_, trait_ } => Id(format!(
|
||||
"a:{}-{}",
|
||||
DisplayDefId(trait_, tcx, None, None),
|
||||
DisplayDefId(for_, tcx, extra, name)
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn id_from_item(item: &clean::Item, tcx: TyCtxt<'_>) -> Id {
|
||||
match *item.kind {
|
||||
clean::ItemKind::ImportItem(ref import) => {
|
||||
let extra =
|
||||
import.source.did.map(ItemId::from).map(|i| id_from_item_inner(i, tcx, None, None));
|
||||
id_from_item_inner(item.item_id, tcx, item.name, extra.as_ref())
|
||||
}
|
||||
_ => id_from_item_inner(item.item_id, tcx, item.name, None),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -525,7 +562,7 @@ impl FromWithTcx<clean::Path> for Path {
|
|||
fn from_tcx(path: clean::Path, tcx: TyCtxt<'_>) -> Path {
|
||||
Path {
|
||||
name: path.whole_name(),
|
||||
id: from_item_id(path.def_id().into(), tcx),
|
||||
id: id_from_item_default(path.def_id().into(), tcx),
|
||||
args: path.segments.last().map(|args| Box::new(args.clone().args.into_tcx(tcx))),
|
||||
}
|
||||
}
|
||||
|
@ -702,7 +739,7 @@ impl FromWithTcx<clean::Import> for Import {
|
|||
Import {
|
||||
source: import.source.path.whole_name(),
|
||||
name,
|
||||
id: import.source.did.map(ItemId::from).map(|i| from_item_id(i, tcx)),
|
||||
id: import.source.did.map(ItemId::from).map(|i| id_from_item_default(i, tcx)),
|
||||
glob,
|
||||
}
|
||||
}
|
||||
|
@ -791,7 +828,7 @@ fn ids(items: impl IntoIterator<Item = clean::Item>, tcx: TyCtxt<'_>) -> Vec<Id>
|
|||
items
|
||||
.into_iter()
|
||||
.filter(|x| !x.is_stripped() && !x.is_keyword())
|
||||
.map(|i| from_item_id_with_name(i.item_id, tcx, i.name))
|
||||
.map(|i| id_from_item(&i, tcx))
|
||||
.collect()
|
||||
}
|
||||
|
||||
|
@ -801,12 +838,10 @@ fn ids_keeping_stripped(
|
|||
) -> Vec<Option<Id>> {
|
||||
items
|
||||
.into_iter()
|
||||
.map(|i| {
|
||||
if !i.is_stripped() && !i.is_keyword() {
|
||||
Some(from_item_id_with_name(i.item_id, tcx, i.name))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.map(
|
||||
|i| {
|
||||
if !i.is_stripped() && !i.is_keyword() { Some(id_from_item(&i, tcx)) } else { None }
|
||||
},
|
||||
)
|
||||
.collect()
|
||||
}
|
||||
|
|
|
@ -28,7 +28,7 @@ use crate::docfs::PathError;
|
|||
use crate::error::Error;
|
||||
use crate::formats::cache::Cache;
|
||||
use crate::formats::FormatRenderer;
|
||||
use crate::json::conversions::{from_item_id, from_item_id_with_name, IntoWithTcx};
|
||||
use crate::json::conversions::{id_from_item, id_from_item_default, IntoWithTcx};
|
||||
use crate::{clean, try_err};
|
||||
|
||||
#[derive(Clone)]
|
||||
|
@ -58,7 +58,7 @@ impl<'tcx> JsonRenderer<'tcx> {
|
|||
.map(|i| {
|
||||
let item = &i.impl_item;
|
||||
self.item(item.clone()).unwrap();
|
||||
from_item_id_with_name(item.item_id, self.tcx, item.name)
|
||||
id_from_item(&item, self.tcx)
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
|
@ -89,7 +89,7 @@ impl<'tcx> JsonRenderer<'tcx> {
|
|||
|
||||
if item.item_id.is_local() || is_primitive_impl {
|
||||
self.item(item.clone()).unwrap();
|
||||
Some(from_item_id_with_name(item.item_id, self.tcx, item.name))
|
||||
Some(id_from_item(&item, self.tcx))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -150,7 +150,6 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
|
|||
// Flatten items that recursively store other items
|
||||
item.kind.inner_items().for_each(|i| self.item(i.clone()).unwrap());
|
||||
|
||||
let name = item.name;
|
||||
let item_id = item.item_id;
|
||||
if let Some(mut new_item) = self.convert_item(item) {
|
||||
let can_be_ignored = match new_item.inner {
|
||||
|
@ -193,10 +192,7 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
|
|||
| types::ItemEnum::Macro(_)
|
||||
| types::ItemEnum::ProcMacro(_) => false,
|
||||
};
|
||||
let removed = self
|
||||
.index
|
||||
.borrow_mut()
|
||||
.insert(from_item_id_with_name(item_id, self.tcx, name), new_item.clone());
|
||||
let removed = self.index.borrow_mut().insert(new_item.id.clone(), new_item.clone());
|
||||
|
||||
// FIXME(adotinthevoid): Currently, the index is duplicated. This is a sanity check
|
||||
// to make sure the items are unique. The main place this happens is when an item, is
|
||||
|
@ -207,6 +203,7 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
|
|||
if !can_be_ignored {
|
||||
assert_eq!(old_item, new_item);
|
||||
}
|
||||
trace!("replaced {:?}\nwith {:?}", old_item, new_item);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -246,7 +243,7 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
|
|||
.chain(&self.cache.external_paths)
|
||||
.map(|(&k, &(ref path, kind))| {
|
||||
(
|
||||
from_item_id(k.into(), self.tcx),
|
||||
id_from_item_default(k.into(), self.tcx),
|
||||
types::ItemSummary {
|
||||
crate_id: k.krate.as_u32(),
|
||||
path: path.iter().map(|s| s.to_string()).collect(),
|
||||
|
|
|
@ -884,7 +884,8 @@ fn preprocess_link(
|
|||
let mut parts = stripped.split('#');
|
||||
|
||||
let link = parts.next().unwrap();
|
||||
if link.trim().is_empty() {
|
||||
let link = link.trim();
|
||||
if link.is_empty() {
|
||||
// This is an anchor to an element of the current page, nothing to do in here!
|
||||
return None;
|
||||
}
|
||||
|
@ -897,7 +898,7 @@ fn preprocess_link(
|
|||
// Parse and strip the disambiguator from the link, if present.
|
||||
let (disambiguator, path_str, link_text) = match Disambiguator::from_str(link) {
|
||||
Ok(Some((d, path, link_text))) => (Some(d), path.trim(), link_text.trim()),
|
||||
Ok(None) => (None, link.trim(), link.trim()),
|
||||
Ok(None) => (None, link, link),
|
||||
Err((err_msg, relative_range)) => {
|
||||
// Only report error if we would not have ignored this link. See issue #83859.
|
||||
if !should_ignore_link_with_disambiguators(link) {
|
||||
|
|
|
@ -1 +1 @@
|
|||
3eb5c4581a386b13c414e8c8bd73846ef37236d1
|
||||
7e253a7fb2e2e050021fed32da6fa2ec7bcea0fb
|
||||
|
|
|
@ -821,8 +821,8 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for MiriMachine<'mir, 'tcx> {
|
|||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn checked_binop_checks_overflow(ecx: &MiriInterpCx<'mir, 'tcx>) -> bool {
|
||||
ecx.tcx.sess.overflow_checks()
|
||||
fn ignore_checkable_overflow_assertions(ecx: &MiriInterpCx<'mir, 'tcx>) -> bool {
|
||||
!ecx.tcx.sess.overflow_checks()
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
// compile-flags: -O -C no-prepopulate-passes
|
||||
|
||||
#![crate_type = "lib"]
|
||||
#![feature(dyn_star)]
|
||||
|
||||
use std::mem::MaybeUninit;
|
||||
use std::num::NonZeroU64;
|
||||
|
@ -279,3 +280,11 @@ pub fn enum_id_1(x: Option<Result<u16, u16>>) -> Option<Result<u16, u16>> {
|
|||
pub fn enum_id_2(x: Option<u8>) -> Option<u8> {
|
||||
x
|
||||
}
|
||||
|
||||
// CHECK: { {{\{\}\*|ptr}}, {{.+}} } @dyn_star({{\{\}\*|ptr}} noundef %x.0, {{.+}} noalias noundef readonly align {{.*}} dereferenceable({{.*}}) %x.1)
|
||||
// Expect an ABI something like `{ {}*, [3 x i64]* }`, but that's hard to match on generically,
|
||||
// so do like the `trait_box` test and just match on `{{.+}}` for the vtable.
|
||||
#[no_mangle]
|
||||
pub fn dyn_star(x: dyn* Drop) -> dyn* Drop {
|
||||
x
|
||||
}
|
||||
|
|
14
tests/codegen/inherit_overflow.rs
Normal file
14
tests/codegen/inherit_overflow.rs
Normal file
|
@ -0,0 +1,14 @@
|
|||
// compile-flags: -Zmir-enable-passes=+Inline,+ConstProp --crate-type lib
|
||||
// revisions: ASSERT NOASSERT
|
||||
//[ASSERT] compile-flags: -Coverflow-checks=on
|
||||
//[NOASSERT] compile-flags: -Coverflow-checks=off
|
||||
|
||||
// CHECK-LABEL: define{{.*}} @assertion
|
||||
// ASSERT: call void @_ZN4core9panicking5panic17h
|
||||
// NOASSERT: ret i8 0
|
||||
#[no_mangle]
|
||||
pub fn assertion() -> u8 {
|
||||
// Optimized MIR will replace this `CheckedBinaryOp` by `const (0, true)`.
|
||||
// Verify that codegen does or does not emit the panic.
|
||||
<u8 as std::ops::Add>::add(255, 1)
|
||||
}
|
|
@ -24,9 +24,10 @@
|
|||
StorageLive(_3); // scope 1 at $DIR/bad_op_div_by_zero.rs:+2:18: +2:19
|
||||
- _3 = _1; // scope 1 at $DIR/bad_op_div_by_zero.rs:+2:18: +2:19
|
||||
- _4 = Eq(_3, const 0_i32); // scope 1 at $DIR/bad_op_div_by_zero.rs:+2:14: +2:19
|
||||
- assert(!move _4, "attempt to divide `{}` by zero", const 1_i32) -> bb1; // scope 1 at $DIR/bad_op_div_by_zero.rs:+2:14: +2:19
|
||||
+ _3 = const 0_i32; // scope 1 at $DIR/bad_op_div_by_zero.rs:+2:18: +2:19
|
||||
+ _4 = const true; // scope 1 at $DIR/bad_op_div_by_zero.rs:+2:14: +2:19
|
||||
assert(!move _4, "attempt to divide `{}` by zero", const 1_i32) -> bb1; // scope 1 at $DIR/bad_op_div_by_zero.rs:+2:14: +2:19
|
||||
+ assert(!const true, "attempt to divide `{}` by zero", const 1_i32) -> bb1; // scope 1 at $DIR/bad_op_div_by_zero.rs:+2:14: +2:19
|
||||
}
|
||||
|
||||
bb1: {
|
||||
|
|
|
@ -0,0 +1,39 @@
|
|||
- // MIR for `main` before ConstProp
|
||||
+ // MIR for `main` after ConstProp
|
||||
|
||||
fn main() -> () {
|
||||
let mut _0: (); // return place in scope 0 at $DIR/inherit_overflow.rs:+0:11: +0:11
|
||||
let mut _1: u8; // in scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
|
||||
let mut _2: u8; // in scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
|
||||
let mut _3: u8; // in scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
|
||||
scope 1 {
|
||||
}
|
||||
scope 2 (inlined <u8 as Add>::add) { // at $DIR/inherit_overflow.rs:8:13: 8:47
|
||||
debug self => _2; // in scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
|
||||
debug other => _3; // in scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
|
||||
let mut _4: (u8, bool); // in scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
|
||||
}
|
||||
|
||||
bb0: {
|
||||
StorageLive(_1); // scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
|
||||
StorageLive(_2); // scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
|
||||
_2 = const u8::MAX; // scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
|
||||
StorageLive(_3); // scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
|
||||
_3 = const 1_u8; // scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
|
||||
- _4 = CheckedAdd(_2, _3); // scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
|
||||
- assert(!move (_4.1: bool), "attempt to compute `{} + {}`, which would overflow", _2, _3) -> bb1; // scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
|
||||
+ _4 = const (0_u8, true); // scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
|
||||
+ assert(!const true, "attempt to compute `{} + {}`, which would overflow", _2, _3) -> bb1; // scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
|
||||
}
|
||||
|
||||
bb1: {
|
||||
- _1 = move (_4.0: u8); // scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
|
||||
+ _1 = const 0_u8; // scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
|
||||
StorageDead(_3); // scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
|
||||
StorageDead(_2); // scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
|
||||
StorageDead(_1); // scope 0 at $DIR/inherit_overflow.rs:+3:47: +3:48
|
||||
_0 = const (); // scope 0 at $DIR/inherit_overflow.rs:+0:11: +4:2
|
||||
return; // scope 0 at $DIR/inherit_overflow.rs:+4:2: +4:2
|
||||
}
|
||||
}
|
||||
|
9
tests/mir-opt/const_prop/inherit_overflow.rs
Normal file
9
tests/mir-opt/const_prop/inherit_overflow.rs
Normal file
|
@ -0,0 +1,9 @@
|
|||
// unit-test: ConstProp
|
||||
// compile-flags: -Zmir-enable-passes=+Inline
|
||||
|
||||
// EMIT_MIR inherit_overflow.main.ConstProp.diff
|
||||
fn main() {
|
||||
// After inlining, this will contain a `CheckedBinaryOp`.
|
||||
// Propagating the overflow is ok as codegen will just skip emitting the panic.
|
||||
let _ = <u8 as std::ops::Add>::add(255, 1);
|
||||
}
|
|
@ -61,7 +61,7 @@
|
|||
- assert(!move (_10.1: bool), "attempt to compute `{} + {}`, which would overflow", move _9, const 1_i32) -> bb2; // scope 4 at $DIR/checked.rs:+6:13: +6:18
|
||||
+ _9 = const i32::MAX; // scope 4 at $DIR/checked.rs:+6:13: +6:14
|
||||
+ _10 = CheckedAdd(const i32::MAX, const 1_i32); // scope 4 at $DIR/checked.rs:+6:13: +6:18
|
||||
+ assert(!move (_10.1: bool), "attempt to compute `{} + {}`, which would overflow", const i32::MAX, const 1_i32) -> bb2; // scope 4 at $DIR/checked.rs:+6:13: +6:18
|
||||
+ assert(!const true, "attempt to compute `{} + {}`, which would overflow", const i32::MAX, const 1_i32) -> bb2; // scope 4 at $DIR/checked.rs:+6:13: +6:18
|
||||
}
|
||||
|
||||
bb2: {
|
||||
|
|
|
@ -5,26 +5,34 @@
|
|||
let mut _0: (); // return place in scope 0 at $DIR/inherit_overflow.rs:+0:11: +0:11
|
||||
let mut _1: u8; // in scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
|
||||
let mut _2: u8; // in scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
|
||||
let mut _3: u8; // in scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
|
||||
scope 1 {
|
||||
}
|
||||
scope 2 (inlined <u8 as Add>::add) { // at $DIR/inherit_overflow.rs:7:13: 7:47
|
||||
debug self => _1; // in scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
|
||||
debug other => _2; // in scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
|
||||
let mut _3: (u8, bool); // in scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
|
||||
scope 2 (inlined <u8 as Add>::add) { // at $DIR/inherit_overflow.rs:8:13: 8:47
|
||||
debug self => _2; // in scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
|
||||
debug other => _3; // in scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
|
||||
let mut _4: (u8, bool); // in scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
|
||||
}
|
||||
|
||||
bb0: {
|
||||
StorageLive(_1); // scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
|
||||
_1 = const u8::MAX; // scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
|
||||
StorageLive(_2); // scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
|
||||
_2 = const 1_u8; // scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
|
||||
_3 = CheckedAdd(const u8::MAX, const 1_u8); // scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
|
||||
assert(!move (_3.1: bool), "attempt to compute `{} + {}`, which would overflow", const u8::MAX, const 1_u8) -> bb1; // scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
|
||||
_2 = const u8::MAX; // scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
|
||||
StorageLive(_3); // scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
|
||||
_3 = const 1_u8; // scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
|
||||
- _4 = CheckedAdd(_2, _3); // scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
|
||||
- assert(!move (_4.1: bool), "attempt to compute `{} + {}`, which would overflow", _2, _3) -> bb1; // scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
|
||||
+ _4 = CheckedAdd(const u8::MAX, const 1_u8); // scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
|
||||
+ assert(!const true, "attempt to compute `{} + {}`, which would overflow", const u8::MAX, const 1_u8) -> bb1; // scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
|
||||
}
|
||||
|
||||
bb1: {
|
||||
- _1 = move (_4.0: u8); // scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
|
||||
+ _1 = const 0_u8; // scope 2 at $SRC_DIR/core/src/ops/arith.rs:LL:COL
|
||||
StorageDead(_3); // scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
|
||||
StorageDead(_2); // scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
|
||||
StorageDead(_1); // scope 0 at $DIR/inherit_overflow.rs:+3:13: +3:47
|
||||
StorageDead(_1); // scope 0 at $DIR/inherit_overflow.rs:+3:47: +3:48
|
||||
_0 = const (); // scope 0 at $DIR/inherit_overflow.rs:+0:11: +4:2
|
||||
return; // scope 0 at $DIR/inherit_overflow.rs:+4:2: +4:2
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
// compile-flags: -Zunsound-mir-opts
|
||||
// unit-test: DataflowConstProp
|
||||
// compile-flags: -Zmir-enable-passes=+Inline
|
||||
|
||||
// EMIT_MIR inherit_overflow.main.DataflowConstProp.diff
|
||||
fn main() {
|
||||
// After inlining, this will contain a `CheckedBinaryOp`. The overflow
|
||||
// must be ignored by the constant propagation to avoid triggering a panic.
|
||||
// After inlining, this will contain a `CheckedBinaryOp`.
|
||||
// Propagating the overflow is ok as codegen will just skip emitting the panic.
|
||||
let _ = <u8 as std::ops::Add>::add(255, 1);
|
||||
}
|
||||
|
|
|
@ -72,3 +72,10 @@ pub fn assume() {
|
|||
std::intrinsics::assume(true);
|
||||
}
|
||||
}
|
||||
|
||||
// EMIT_MIR lower_intrinsics.with_overflow.LowerIntrinsics.diff
|
||||
pub fn with_overflow(a: i32, b: i32) {
|
||||
let _x = core::intrinsics::add_with_overflow(a, b);
|
||||
let _y = core::intrinsics::sub_with_overflow(a, b);
|
||||
let _z = core::intrinsics::mul_with_overflow(a, b);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,83 @@
|
|||
- // MIR for `with_overflow` before LowerIntrinsics
|
||||
+ // MIR for `with_overflow` after LowerIntrinsics
|
||||
|
||||
fn with_overflow(_1: i32, _2: i32) -> () {
|
||||
debug a => _1; // in scope 0 at $DIR/lower_intrinsics.rs:+0:22: +0:23
|
||||
debug b => _2; // in scope 0 at $DIR/lower_intrinsics.rs:+0:30: +0:31
|
||||
let mut _0: (); // return place in scope 0 at $DIR/lower_intrinsics.rs:+0:38: +0:38
|
||||
let _3: (i32, bool); // in scope 0 at $DIR/lower_intrinsics.rs:+1:9: +1:11
|
||||
let mut _4: i32; // in scope 0 at $DIR/lower_intrinsics.rs:+1:50: +1:51
|
||||
let mut _5: i32; // in scope 0 at $DIR/lower_intrinsics.rs:+1:53: +1:54
|
||||
let mut _7: i32; // in scope 0 at $DIR/lower_intrinsics.rs:+2:50: +2:51
|
||||
let mut _8: i32; // in scope 0 at $DIR/lower_intrinsics.rs:+2:53: +2:54
|
||||
let mut _10: i32; // in scope 0 at $DIR/lower_intrinsics.rs:+3:50: +3:51
|
||||
let mut _11: i32; // in scope 0 at $DIR/lower_intrinsics.rs:+3:53: +3:54
|
||||
scope 1 {
|
||||
debug _x => _3; // in scope 1 at $DIR/lower_intrinsics.rs:+1:9: +1:11
|
||||
let _6: (i32, bool); // in scope 1 at $DIR/lower_intrinsics.rs:+2:9: +2:11
|
||||
scope 2 {
|
||||
debug _y => _6; // in scope 2 at $DIR/lower_intrinsics.rs:+2:9: +2:11
|
||||
let _9: (i32, bool); // in scope 2 at $DIR/lower_intrinsics.rs:+3:9: +3:11
|
||||
scope 3 {
|
||||
debug _z => _9; // in scope 3 at $DIR/lower_intrinsics.rs:+3:9: +3:11
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bb0: {
|
||||
StorageLive(_3); // scope 0 at $DIR/lower_intrinsics.rs:+1:9: +1:11
|
||||
StorageLive(_4); // scope 0 at $DIR/lower_intrinsics.rs:+1:50: +1:51
|
||||
_4 = _1; // scope 0 at $DIR/lower_intrinsics.rs:+1:50: +1:51
|
||||
StorageLive(_5); // scope 0 at $DIR/lower_intrinsics.rs:+1:53: +1:54
|
||||
_5 = _2; // scope 0 at $DIR/lower_intrinsics.rs:+1:53: +1:54
|
||||
- _3 = add_with_overflow::<i32>(move _4, move _5) -> bb1; // scope 0 at $DIR/lower_intrinsics.rs:+1:14: +1:55
|
||||
- // mir::Constant
|
||||
- // + span: $DIR/lower_intrinsics.rs:78:14: 78:49
|
||||
- // + literal: Const { ty: extern "rust-intrinsic" fn(i32, i32) -> (i32, bool) {add_with_overflow::<i32>}, val: Value(<ZST>) }
|
||||
+ _3 = CheckedAdd(move _4, move _5); // scope 0 at $DIR/lower_intrinsics.rs:+1:14: +1:55
|
||||
+ goto -> bb1; // scope 0 at $DIR/lower_intrinsics.rs:+1:14: +1:55
|
||||
}
|
||||
|
||||
bb1: {
|
||||
StorageDead(_5); // scope 0 at $DIR/lower_intrinsics.rs:+1:54: +1:55
|
||||
StorageDead(_4); // scope 0 at $DIR/lower_intrinsics.rs:+1:54: +1:55
|
||||
StorageLive(_6); // scope 1 at $DIR/lower_intrinsics.rs:+2:9: +2:11
|
||||
StorageLive(_7); // scope 1 at $DIR/lower_intrinsics.rs:+2:50: +2:51
|
||||
_7 = _1; // scope 1 at $DIR/lower_intrinsics.rs:+2:50: +2:51
|
||||
StorageLive(_8); // scope 1 at $DIR/lower_intrinsics.rs:+2:53: +2:54
|
||||
_8 = _2; // scope 1 at $DIR/lower_intrinsics.rs:+2:53: +2:54
|
||||
- _6 = sub_with_overflow::<i32>(move _7, move _8) -> bb2; // scope 1 at $DIR/lower_intrinsics.rs:+2:14: +2:55
|
||||
- // mir::Constant
|
||||
- // + span: $DIR/lower_intrinsics.rs:79:14: 79:49
|
||||
- // + literal: Const { ty: extern "rust-intrinsic" fn(i32, i32) -> (i32, bool) {sub_with_overflow::<i32>}, val: Value(<ZST>) }
|
||||
+ _6 = CheckedSub(move _7, move _8); // scope 1 at $DIR/lower_intrinsics.rs:+2:14: +2:55
|
||||
+ goto -> bb2; // scope 1 at $DIR/lower_intrinsics.rs:+2:14: +2:55
|
||||
}
|
||||
|
||||
bb2: {
|
||||
StorageDead(_8); // scope 1 at $DIR/lower_intrinsics.rs:+2:54: +2:55
|
||||
StorageDead(_7); // scope 1 at $DIR/lower_intrinsics.rs:+2:54: +2:55
|
||||
StorageLive(_9); // scope 2 at $DIR/lower_intrinsics.rs:+3:9: +3:11
|
||||
StorageLive(_10); // scope 2 at $DIR/lower_intrinsics.rs:+3:50: +3:51
|
||||
_10 = _1; // scope 2 at $DIR/lower_intrinsics.rs:+3:50: +3:51
|
||||
StorageLive(_11); // scope 2 at $DIR/lower_intrinsics.rs:+3:53: +3:54
|
||||
_11 = _2; // scope 2 at $DIR/lower_intrinsics.rs:+3:53: +3:54
|
||||
- _9 = mul_with_overflow::<i32>(move _10, move _11) -> bb3; // scope 2 at $DIR/lower_intrinsics.rs:+3:14: +3:55
|
||||
- // mir::Constant
|
||||
- // + span: $DIR/lower_intrinsics.rs:80:14: 80:49
|
||||
- // + literal: Const { ty: extern "rust-intrinsic" fn(i32, i32) -> (i32, bool) {mul_with_overflow::<i32>}, val: Value(<ZST>) }
|
||||
+ _9 = CheckedMul(move _10, move _11); // scope 2 at $DIR/lower_intrinsics.rs:+3:14: +3:55
|
||||
+ goto -> bb3; // scope 2 at $DIR/lower_intrinsics.rs:+3:14: +3:55
|
||||
}
|
||||
|
||||
bb3: {
|
||||
StorageDead(_11); // scope 2 at $DIR/lower_intrinsics.rs:+3:54: +3:55
|
||||
StorageDead(_10); // scope 2 at $DIR/lower_intrinsics.rs:+3:54: +3:55
|
||||
_0 = const (); // scope 0 at $DIR/lower_intrinsics.rs:+0:38: +4:2
|
||||
StorageDead(_9); // scope 2 at $DIR/lower_intrinsics.rs:+4:1: +4:2
|
||||
StorageDead(_6); // scope 1 at $DIR/lower_intrinsics.rs:+4:1: +4:2
|
||||
StorageDead(_3); // scope 0 at $DIR/lower_intrinsics.rs:+4:1: +4:2
|
||||
return; // scope 0 at $DIR/lower_intrinsics.rs:+4:2: +4:2
|
||||
}
|
||||
}
|
||||
|
|
@ -3,7 +3,7 @@
|
|||
goto: "file://" + |DOC_PATH| + "/test_docs/huge_amount_of_consts/index.html"
|
||||
|
||||
compare-elements-position-near-false: (
|
||||
"//*[@class='item-table']//div[last()-1]",
|
||||
"//*[@class='item-table']//div[last()-3]",
|
||||
"//ul[@class='item-table']/li[last()-1]",
|
||||
"//ul[@class='item-table']/li[last()-3]",
|
||||
{"y": 12},
|
||||
)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// This test ensures that <table> elements aren't display in items summary.
|
||||
goto: "file://" + |DOC_PATH| + "/lib2/summary_table/index.html"
|
||||
// We check that we picked the right item first.
|
||||
assert-text: (".item-table .item-left", "Foo")
|
||||
assert-text: (".item-table .item-name", "Foo")
|
||||
// Then we check that its summary is empty.
|
||||
assert-false: ".item-table .item-right"
|
||||
assert-false: ".item-table .desc"
|
||||
|
|
|
@ -9,31 +9,31 @@ assert: (".stab.portability")
|
|||
|
||||
// make sure that deprecated and portability have the right colors
|
||||
assert-css: (
|
||||
".item-table .item-left .stab.deprecated",
|
||||
".item-table .item-name .stab.deprecated",
|
||||
{ "background-color": "rgb(255, 245, 214)" },
|
||||
)
|
||||
assert-css: (
|
||||
".item-table .item-left .stab.portability",
|
||||
".item-table .item-name .stab.portability",
|
||||
{ "background-color": "rgb(255, 245, 214)" },
|
||||
)
|
||||
|
||||
// table like view
|
||||
assert-css: (".item-right.docblock-short", { "padding-left": "0px" })
|
||||
assert-css: (".desc.docblock-short", { "padding-left": "0px" })
|
||||
compare-elements-position-near: (
|
||||
"//*[@class='item-left']//a[text()='replaced_function']",
|
||||
".item-left .stab.deprecated",
|
||||
"//*[@class='item-name']//a[text()='replaced_function']",
|
||||
".item-name .stab.deprecated",
|
||||
{"y": 2},
|
||||
)
|
||||
compare-elements-position: (
|
||||
".item-left .stab.deprecated",
|
||||
".item-left .stab.portability",
|
||||
".item-name .stab.deprecated",
|
||||
".item-name .stab.portability",
|
||||
("y"),
|
||||
)
|
||||
|
||||
// Ensure no wrap
|
||||
compare-elements-position: (
|
||||
"//*[@class='item-left']//a[text()='replaced_function']/..",
|
||||
"//*[@class='item-right docblock-short'][text()='a thing with a label']",
|
||||
"//*[@class='item-name']//a[text()='replaced_function']/..",
|
||||
"//*[@class='desc docblock-short'][text()='a thing with a label']",
|
||||
("y"),
|
||||
)
|
||||
|
||||
|
@ -41,26 +41,26 @@ compare-elements-position: (
|
|||
// Mobile view
|
||||
size: (600, 600)
|
||||
// staggered layout with 2em spacing
|
||||
assert-css: (".item-right.docblock-short", { "padding-left": "32px" })
|
||||
assert-css: (".desc.docblock-short", { "padding-left": "32px" })
|
||||
compare-elements-position-near: (
|
||||
"//*[@class='item-left']//a[text()='replaced_function']",
|
||||
".item-left .stab.deprecated",
|
||||
"//*[@class='item-name']//a[text()='replaced_function']",
|
||||
".item-name .stab.deprecated",
|
||||
{"y": 2},
|
||||
)
|
||||
compare-elements-position: (
|
||||
".item-left .stab.deprecated",
|
||||
".item-left .stab.portability",
|
||||
".item-name .stab.deprecated",
|
||||
".item-name .stab.portability",
|
||||
("y"),
|
||||
)
|
||||
|
||||
// Ensure wrap
|
||||
compare-elements-position-false: (
|
||||
"//*[@class='item-left']//a[text()='replaced_function']/..",
|
||||
"//*[@class='item-right docblock-short'][text()='a thing with a label']",
|
||||
"//*[@class='item-name']//a[text()='replaced_function']/..",
|
||||
"//*[@class='desc docblock-short'][text()='a thing with a label']",
|
||||
("y"),
|
||||
)
|
||||
compare-elements-position-false: (
|
||||
".item-left .stab.deprecated",
|
||||
"//*[@class='item-right docblock-short'][text()='a thing with a label']",
|
||||
".item-name .stab.deprecated",
|
||||
"//*[@class='desc docblock-short'][text()='a thing with a label']",
|
||||
("y"),
|
||||
)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
// This test checks that the correct font is used on module items (in index.html pages).
|
||||
goto: "file://" + |DOC_PATH| + "/test_docs/index.html"
|
||||
assert-css: (
|
||||
".item-table .item-left > a",
|
||||
".item-table .item-name > a",
|
||||
{"font-family": '"Fira Sans", Arial, NanumBarunGothic, sans-serif'},
|
||||
ALL,
|
||||
)
|
||||
|
@ -13,55 +13,55 @@ assert-css: (
|
|||
|
||||
// modules
|
||||
assert-css: (
|
||||
"#modules + .item-table .item-left a",
|
||||
"#modules + .item-table .item-name a",
|
||||
{"font-family": '"Fira Sans", Arial, NanumBarunGothic, sans-serif'},
|
||||
)
|
||||
assert-css: (
|
||||
"#modules + .item-table .item-right.docblock-short",
|
||||
"#modules + .item-table .desc.docblock-short",
|
||||
{"font-family": '"Source Serif 4", NanumBarunGothic, serif'},
|
||||
)
|
||||
// structs
|
||||
assert-css: (
|
||||
"#structs + .item-table .item-left a",
|
||||
"#structs + .item-table .item-name a",
|
||||
{"font-family": '"Fira Sans", Arial, NanumBarunGothic, sans-serif'},
|
||||
)
|
||||
assert-css: (
|
||||
"#structs + .item-table .item-right.docblock-short",
|
||||
"#structs + .item-table .desc.docblock-short",
|
||||
{"font-family": '"Source Serif 4", NanumBarunGothic, serif'},
|
||||
)
|
||||
// enums
|
||||
assert-css: (
|
||||
"#enums + .item-table .item-left a",
|
||||
"#enums + .item-table .item-name a",
|
||||
{"font-family": '"Fira Sans", Arial, NanumBarunGothic, sans-serif'},
|
||||
)
|
||||
assert-css: (
|
||||
"#enums + .item-table .item-right.docblock-short",
|
||||
"#enums + .item-table .desc.docblock-short",
|
||||
{"font-family": '"Source Serif 4", NanumBarunGothic, serif'},
|
||||
)
|
||||
// traits
|
||||
assert-css: (
|
||||
"#traits + .item-table .item-left a",
|
||||
"#traits + .item-table .item-name a",
|
||||
{"font-family": '"Fira Sans", Arial, NanumBarunGothic, sans-serif'},
|
||||
)
|
||||
assert-css: (
|
||||
"#traits + .item-table .item-right.docblock-short",
|
||||
"#traits + .item-table .desc.docblock-short",
|
||||
{"font-family": '"Source Serif 4", NanumBarunGothic, serif'},
|
||||
)
|
||||
// functions
|
||||
assert-css: (
|
||||
"#functions + .item-table .item-left a",
|
||||
"#functions + .item-table .item-name a",
|
||||
{"font-family": '"Fira Sans", Arial, NanumBarunGothic, sans-serif'},
|
||||
)
|
||||
assert-css: (
|
||||
"#functions + .item-table .item-right.docblock-short",
|
||||
"#functions + .item-table .desc.docblock-short",
|
||||
{"font-family": '"Source Serif 4", NanumBarunGothic, serif'},
|
||||
)
|
||||
// keywords
|
||||
assert-css: (
|
||||
"#keywords + .item-table .item-left a",
|
||||
"#keywords + .item-table .item-name a",
|
||||
{"font-family": '"Fira Sans", Arial, NanumBarunGothic, sans-serif'},
|
||||
)
|
||||
assert-css: (
|
||||
"#keywords + .item-table .item-right.docblock-short",
|
||||
"#keywords + .item-table .desc.docblock-short",
|
||||
{"font-family": '"Source Serif 4", NanumBarunGothic, serif'},
|
||||
)
|
||||
|
|
|
@ -70,8 +70,8 @@ assert-text: (".sidebar-elems section ul > li:nth-child(8)", "Functions")
|
|||
assert-text: (".sidebar-elems section ul > li:nth-child(9)", "Type Definitions")
|
||||
assert-text: (".sidebar-elems section ul > li:nth-child(10)", "Unions")
|
||||
assert-text: (".sidebar-elems section ul > li:nth-child(11)", "Keywords")
|
||||
assert-text: ("#structs + .item-table .item-left > a", "Foo")
|
||||
click: "#structs + .item-table .item-left > a"
|
||||
assert-text: ("#structs + .item-table .item-name > a", "Foo")
|
||||
click: "#structs + .item-table .item-name > a"
|
||||
|
||||
// PAGE: struct.Foo.html
|
||||
assert-count: (".sidebar .location", 1)
|
||||
|
@ -103,8 +103,8 @@ assert-text: (".sidebar-elems > section ul.block > li:nth-child(2)", "Structs")
|
|||
assert-text: (".sidebar-elems > section ul.block > li:nth-child(3)", "Traits")
|
||||
assert-text: (".sidebar-elems > section ul.block > li:nth-child(4)", "Functions")
|
||||
assert-text: (".sidebar-elems > section ul.block > li:nth-child(5)", "Type Definitions")
|
||||
assert-text: ("#functions + .item-table .item-left > a", "foobar")
|
||||
click: "#functions + .item-table .item-left > a"
|
||||
assert-text: ("#functions + .item-table .item-name > a", "foobar")
|
||||
click: "#functions + .item-table .item-name > a"
|
||||
|
||||
// PAGE: fn.foobar.html
|
||||
// In items containing no items (like functions or constants) and in modules, we have no
|
||||
|
@ -127,7 +127,7 @@ assert-text: (".sidebar > .location", "Module sub_sub_module")
|
|||
// We check that we don't have the crate list.
|
||||
assert-false: ".sidebar-elems .crate"
|
||||
assert-text: (".sidebar-elems > section ul > li:nth-child(1)", "Functions")
|
||||
assert-text: ("#functions + .item-table .item-left > a", "foo")
|
||||
assert-text: ("#functions + .item-table .item-name > a", "foo")
|
||||
|
||||
// Links to trait implementations in the sidebar should not wrap even if they are long.
|
||||
goto: "file://" + |DOC_PATH| + "/lib2/struct.HasALongTraitWithParams.html"
|
||||
|
|
|
@ -19,7 +19,7 @@ define-function: (
|
|||
local-storage: {"rustdoc-theme": |theme|, "rustdoc-use-system-theme": "false"}
|
||||
// We reload the page so the local storage settings are being used.
|
||||
reload:
|
||||
assert-css: (".item-left sup", {"color": |color|})
|
||||
assert-css: (".item-name sup", {"color": |color|})
|
||||
},
|
||||
)
|
||||
|
||||
|
|
8
tests/rustdoc-js-std/reference-shrink.js
Normal file
8
tests/rustdoc-js-std/reference-shrink.js
Normal file
|
@ -0,0 +1,8 @@
|
|||
// exact-check
|
||||
|
||||
const QUERY = 'reference::shrink';
|
||||
|
||||
const EXPECTED = {
|
||||
// avoid including the method that's not going to be in the HTML
|
||||
'others': [],
|
||||
};
|
25
tests/rustdoc-json/reexport/same_name_different_types.rs
Normal file
25
tests/rustdoc-json/reexport/same_name_different_types.rs
Normal file
|
@ -0,0 +1,25 @@
|
|||
// Regression test for <https://github.com/rust-lang/rust/issues/107677>.
|
||||
|
||||
#![feature(no_core)]
|
||||
#![no_core]
|
||||
|
||||
pub mod nested {
|
||||
// @set foo_struct = "$.index[*][?(@.docs == 'Foo the struct')].id"
|
||||
|
||||
/// Foo the struct
|
||||
pub struct Foo {}
|
||||
|
||||
// @set foo_fn = "$.index[*][?(@.docs == 'Foo the function')].id"
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
/// Foo the function
|
||||
pub fn Foo() {}
|
||||
}
|
||||
|
||||
// @ismany "$.index[*][?(@.inner.name == 'Foo' && @.kind == 'import')].inner.id" $foo_fn $foo_struct
|
||||
// @ismany "$.index[*][?(@.inner.name == 'Bar' && @.kind == 'import')].inner.id" $foo_fn $foo_struct
|
||||
|
||||
// @count "$.index[*][?(@.inner.name == 'Foo' && @.kind == 'import')]" 2
|
||||
pub use nested::Foo;
|
||||
// @count "$.index[*][?(@.inner.name == 'Bar' && @.kind == 'import')]" 2
|
||||
pub use Foo as Bar;
|
27
tests/rustdoc-ui/intra-doc/proc-macro-doc.rs
Normal file
27
tests/rustdoc-ui/intra-doc/proc-macro-doc.rs
Normal file
|
@ -0,0 +1,27 @@
|
|||
// check-pass
|
||||
// force-host
|
||||
// no-prefer-dynamic
|
||||
// compile-flags: --crate-type proc-macro
|
||||
|
||||
#![deny(rustdoc::broken_intra_doc_links)]
|
||||
|
||||
extern crate proc_macro;
|
||||
use proc_macro::*;
|
||||
|
||||
/// [`Unpin`]
|
||||
#[proc_macro_derive(F)]
|
||||
pub fn derive_(t: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
t
|
||||
}
|
||||
|
||||
/// [`Vec`]
|
||||
#[proc_macro_attribute]
|
||||
pub fn attr(t: proc_macro::TokenStream, _: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
t
|
||||
}
|
||||
|
||||
/// [`std::fs::File`]
|
||||
#[proc_macro]
|
||||
pub fn func(t: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
t
|
||||
}
|
|
@ -20,22 +20,6 @@ LL | //! Linking to [foo@banana] and [`bar@banana!()`].
|
|||
|
|
||||
= note: see https://doc.rust-lang.org/$CHANNEL/rustdoc/write-documentation/linking-to-items-by-name.html#namespaces-and-disambiguators for more info about disambiguators
|
||||
|
||||
error: unknown disambiguator `foo`
|
||||
--> $DIR/unknown-disambiguator.rs:10:34
|
||||
|
|
||||
LL | //! And with weird backticks: [``foo@hello``] [foo`@`hello].
|
||||
| ^^^
|
||||
|
|
||||
= note: see https://doc.rust-lang.org/$CHANNEL/rustdoc/write-documentation/linking-to-items-by-name.html#namespaces-and-disambiguators for more info about disambiguators
|
||||
|
||||
error: unknown disambiguator `foo`
|
||||
--> $DIR/unknown-disambiguator.rs:10:48
|
||||
|
|
||||
LL | //! And with weird backticks: [``foo@hello``] [foo`@`hello].
|
||||
| ^^^
|
||||
|
|
||||
= note: see https://doc.rust-lang.org/$CHANNEL/rustdoc/write-documentation/linking-to-items-by-name.html#namespaces-and-disambiguators for more info about disambiguators
|
||||
|
||||
error: unknown disambiguator ``
|
||||
--> $DIR/unknown-disambiguator.rs:7:31
|
||||
|
|
||||
|
@ -52,5 +36,21 @@ LL | //! And to [no disambiguator](@nectarine) and [another](@apricot!()).
|
|||
|
|
||||
= note: see https://doc.rust-lang.org/$CHANNEL/rustdoc/write-documentation/linking-to-items-by-name.html#namespaces-and-disambiguators for more info about disambiguators
|
||||
|
||||
error: unknown disambiguator `foo`
|
||||
--> $DIR/unknown-disambiguator.rs:10:34
|
||||
|
|
||||
LL | //! And with weird backticks: [``foo@hello``] [foo`@`hello].
|
||||
| ^^^
|
||||
|
|
||||
= note: see https://doc.rust-lang.org/$CHANNEL/rustdoc/write-documentation/linking-to-items-by-name.html#namespaces-and-disambiguators for more info about disambiguators
|
||||
|
||||
error: unknown disambiguator `foo`
|
||||
--> $DIR/unknown-disambiguator.rs:10:48
|
||||
|
|
||||
LL | //! And with weird backticks: [``foo@hello``] [foo`@`hello].
|
||||
| ^^^
|
||||
|
|
||||
= note: see https://doc.rust-lang.org/$CHANNEL/rustdoc/write-documentation/linking-to-items-by-name.html#namespaces-and-disambiguators for more info about disambiguators
|
||||
|
||||
error: aborting due to 6 previous errors
|
||||
|
||||
|
|
|
@ -5,8 +5,8 @@
|
|||
#![no_core]
|
||||
|
||||
// @has 'foo/index.html'
|
||||
// @has - '//*[@class="item-left"]/*[@class="stab portability"]' 'foobar'
|
||||
// @has - '//*[@class="item-left"]/*[@class="stab portability"]' 'bar'
|
||||
// @has - '//*[@class="item-name"]/*[@class="stab portability"]' 'foobar'
|
||||
// @has - '//*[@class="item-name"]/*[@class="stab portability"]' 'bar'
|
||||
|
||||
#[doc(cfg(feature = "foobar"))]
|
||||
mod imp_priv {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// @has deprecated/index.html '//*[@class="item-left"]/span[@class="stab deprecated"]' \
|
||||
// @has deprecated/index.html '//*[@class="item-name"]/span[@class="stab deprecated"]' \
|
||||
// 'Deprecated'
|
||||
// @has - '//*[@class="item-right docblock-short"]' 'Deprecated docs'
|
||||
// @has - '//*[@class="desc docblock-short"]' 'Deprecated docs'
|
||||
|
||||
// @has deprecated/struct.S.html '//*[@class="stab deprecated"]' \
|
||||
// 'Deprecated since 1.0.0: text'
|
||||
|
@ -8,7 +8,7 @@
|
|||
#[deprecated(since = "1.0.0", note = "text")]
|
||||
pub struct S;
|
||||
|
||||
// @matches deprecated/index.html '//*[@class="item-right docblock-short"]' '^Docs'
|
||||
// @matches deprecated/index.html '//*[@class="desc docblock-short"]' '^Docs'
|
||||
/// Docs
|
||||
pub struct T;
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ pub struct Portable;
|
|||
// @has doc_cfg/unix_only/index.html \
|
||||
// '//*[@id="main-content"]/*[@class="item-info"]/*[@class="stab portability"]' \
|
||||
// 'Available on Unix only.'
|
||||
// @matches - '//*[@class="item-left"]//*[@class="stab portability"]' '\AARM\Z'
|
||||
// @matches - '//*[@class="item-name"]//*[@class="stab portability"]' '\AARM\Z'
|
||||
// @count - '//*[@class="stab portability"]' 2
|
||||
#[doc(cfg(unix))]
|
||||
pub mod unix_only {
|
||||
|
@ -42,7 +42,7 @@ pub mod unix_only {
|
|||
// @has doc_cfg/wasi_only/index.html \
|
||||
// '//*[@id="main-content"]/*[@class="item-info"]/*[@class="stab portability"]' \
|
||||
// 'Available on WASI only.'
|
||||
// @matches - '//*[@class="item-left"]//*[@class="stab portability"]' '\AWebAssembly\Z'
|
||||
// @matches - '//*[@class="item-name"]//*[@class="stab portability"]' '\AWebAssembly\Z'
|
||||
// @count - '//*[@class="stab portability"]' 2
|
||||
#[doc(cfg(target_os = "wasi"))]
|
||||
pub mod wasi_only {
|
||||
|
@ -74,7 +74,7 @@ pub mod wasi_only {
|
|||
|
||||
// the portability header is different on the module view versus the full view
|
||||
// @has doc_cfg/index.html
|
||||
// @matches - '//*[@class="item-left"]//*[@class="stab portability"]' '\Aavx\Z'
|
||||
// @matches - '//*[@class="item-name"]//*[@class="stab portability"]' '\Aavx\Z'
|
||||
|
||||
// @has doc_cfg/fn.uses_target_feature.html
|
||||
// @has - '//*[@id="main-content"]/*[@class="item-info"]/*[@class="stab portability"]' \
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
#![feature(doc_cfg)]
|
||||
|
||||
// @has 'foo/index.html'
|
||||
// @matches '-' '//*[@class="item-left"]//*[@class="stab portability"]' '^sync$'
|
||||
// @has '-' '//*[@class="item-left"]//*[@class="stab portability"]/@title' 'Available on crate feature `sync` only'
|
||||
// @matches '-' '//*[@class="item-name"]//*[@class="stab portability"]' '^sync$'
|
||||
// @has '-' '//*[@class="item-name"]//*[@class="stab portability"]/@title' 'Available on crate feature `sync` only'
|
||||
|
||||
// @has 'foo/struct.Foo.html'
|
||||
// @has '-' '//*[@class="stab portability"]' 'sync'
|
||||
|
|
|
@ -15,6 +15,6 @@ mod sub4 {
|
|||
pub use sub4::inner::*;
|
||||
|
||||
// @has 'foo/index.html'
|
||||
// @has - '//div[@class="item-right docblock-short"]' '1'
|
||||
// @!has - '//div[@class="item-right docblock-short"]' '0'
|
||||
// @has - '//div[@class="desc docblock-short"]' '1'
|
||||
// @!has - '//div[@class="desc docblock-short"]' '0'
|
||||
fn main() { assert_eq!(X, 1); }
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
// @has 'glob_shadowing/index.html'
|
||||
// @count - '//div[@class="item-left"]' 6
|
||||
// @!has - '//div[@class="item-right docblock-short"]' 'sub1::describe'
|
||||
// @has - '//div[@class="item-right docblock-short"]' 'sub2::describe'
|
||||
// @count - '//div[@class="item-name"]' 6
|
||||
// @!has - '//div[@class="desc docblock-short"]' 'sub1::describe'
|
||||
// @has - '//div[@class="desc docblock-short"]' 'sub2::describe'
|
||||
|
||||
// @!has - '//div[@class="item-right docblock-short"]' 'sub1::describe2'
|
||||
// @!has - '//div[@class="desc docblock-short"]' 'sub1::describe2'
|
||||
|
||||
// @!has - '//div[@class="item-right docblock-short"]' 'sub1::prelude'
|
||||
// @has - '//div[@class="item-right docblock-short"]' 'mod::prelude'
|
||||
// @!has - '//div[@class="desc docblock-short"]' 'sub1::prelude'
|
||||
// @has - '//div[@class="desc docblock-short"]' 'mod::prelude'
|
||||
|
||||
// @has - '//div[@class="item-right docblock-short"]' 'sub1::Foo (struct)'
|
||||
// @has - '//div[@class="item-right docblock-short"]' 'mod::Foo (function)'
|
||||
// @has - '//div[@class="desc docblock-short"]' 'sub1::Foo (struct)'
|
||||
// @has - '//div[@class="desc docblock-short"]' 'mod::Foo (function)'
|
||||
|
||||
// @has - '//div[@class="item-right docblock-short"]' 'sub4::inner::X'
|
||||
// @has - '//div[@class="desc docblock-short"]' 'sub4::inner::X'
|
||||
|
||||
// @has 'glob_shadowing/fn.describe.html'
|
||||
// @has - '//div[@class="docblock"]' 'sub2::describe'
|
||||
|
|
|
@ -6,9 +6,9 @@
|
|||
|
||||
extern crate macros;
|
||||
|
||||
// @has foo/index.html '//*[@class="item-left"]/span[@class="stab deprecated"]' \
|
||||
// @has foo/index.html '//*[@class="item-name"]/span[@class="stab deprecated"]' \
|
||||
// Deprecated
|
||||
// @has - '//*[@class="item-left"]/span[@class="stab unstable"]' \
|
||||
// @has - '//*[@class="item-name"]/span[@class="stab unstable"]' \
|
||||
// Experimental
|
||||
|
||||
// @has foo/macro.my_macro.html
|
||||
|
|
|
@ -3,12 +3,12 @@
|
|||
// Check that the unstable marker is not added for "rustc_private".
|
||||
|
||||
// @!matches internal/index.html \
|
||||
// '//*[@class="item-right docblock-short"]/span[@class="stab unstable"]' \
|
||||
// '//*[@class="desc docblock-short"]/span[@class="stab unstable"]' \
|
||||
// ''
|
||||
// @!matches internal/index.html \
|
||||
// '//*[@class="item-right docblock-short"]/span[@class="stab internal"]' \
|
||||
// '//*[@class="desc docblock-short"]/span[@class="stab internal"]' \
|
||||
// ''
|
||||
// @matches - '//*[@class="item-right docblock-short"]' 'Docs'
|
||||
// @matches - '//*[@class="desc docblock-short"]' 'Docs'
|
||||
|
||||
// @!has internal/struct.S.html '//*[@class="stab unstable"]' ''
|
||||
// @!has internal/struct.S.html '//*[@class="stab internal"]' ''
|
||||
|
|
28
tests/rustdoc/issue-107995.rs
Normal file
28
tests/rustdoc/issue-107995.rs
Normal file
|
@ -0,0 +1,28 @@
|
|||
// Regression test for <https://github.com/rust-lang/rust/issues/107995>.
|
||||
|
||||
#![crate_name = "foo"]
|
||||
|
||||
// @has 'foo/fn.foo.html'
|
||||
// @has - '//*[@class="docblock"]//a[@href="fn.bar.html"]' 'bar`'
|
||||
/// A foo, see also [ bar`]
|
||||
pub fn foo() {}
|
||||
|
||||
// @has 'foo/fn.bar.html'
|
||||
// @has - '//*[@class="docblock"]' 'line Path line'
|
||||
// @has - '//*[@class="docblock"]//a[@href="struct.Path.html"]' 'Path'
|
||||
#[doc = "line ["]
|
||||
#[doc = "Path"]
|
||||
#[doc = "] line"]
|
||||
pub fn bar() {}
|
||||
|
||||
// @has 'foo/fn.another.html'
|
||||
// @has - '//*[@class="docblock"]//a[@href="struct.Path.html"]' 'Path'
|
||||
/// [ `Path`]
|
||||
pub fn another() {}
|
||||
|
||||
// @has 'foo/fn.last.html'
|
||||
// @has - '//*[@class="docblock"]//a[@href="struct.Path.html"]' 'Path'
|
||||
/// [ Path`]
|
||||
pub fn last() {}
|
||||
|
||||
pub struct Path;
|
|
@ -2,11 +2,11 @@
|
|||
#![doc(issue_tracker_base_url = "https://issue_url/")]
|
||||
#![unstable(feature = "test", issue = "32374")]
|
||||
|
||||
// @matches issue_32374/index.html '//*[@class="item-left"]/span[@class="stab deprecated"]' \
|
||||
// @matches issue_32374/index.html '//*[@class="item-name"]/span[@class="stab deprecated"]' \
|
||||
// 'Deprecated'
|
||||
// @matches issue_32374/index.html '//*[@class="item-left"]/span[@class="stab unstable"]' \
|
||||
// @matches issue_32374/index.html '//*[@class="item-name"]/span[@class="stab unstable"]' \
|
||||
// 'Experimental'
|
||||
// @matches issue_32374/index.html '//*[@class="item-right docblock-short"]/text()' 'Docs'
|
||||
// @matches issue_32374/index.html '//*[@class="desc docblock-short"]/text()' 'Docs'
|
||||
|
||||
// @has issue_32374/struct.T.html '//*[@class="stab deprecated"]/span' '👎'
|
||||
// @has issue_32374/struct.T.html '//*[@class="stab deprecated"]/span' \
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
// @has 'issue_46377/index.html' '//*[@class="item-right docblock-short"]' 'Check out this struct!'
|
||||
// @has 'issue_46377/index.html' '//*[@class="desc docblock-short"]' 'Check out this struct!'
|
||||
/// # Check out this struct!
|
||||
pub struct SomeStruct;
|
||||
|
|
|
@ -29,8 +29,8 @@ pub mod subone {
|
|||
// @has - '//section[@id="main-content"]/details/div[@class="docblock"]//a[@href="../fn.foo.html"]' 'foo'
|
||||
// @has - '//section[@id="main-content"]/details/div[@class="docblock"]//a[@href="../fn.bar.html"]' 'bar'
|
||||
// Though there should be such links later
|
||||
// @has - '//section[@id="main-content"]/div[@class="item-table"]//div[@class="item-left"]/a[@class="fn"][@href="fn.foo.html"]' 'foo'
|
||||
// @has - '//section[@id="main-content"]/div[@class="item-table"]//div[@class="item-left"]/a[@class="fn"][@href="fn.bar.html"]' 'bar'
|
||||
// @has - '//section[@id="main-content"]/ul[@class="item-table"]//div[@class="item-name"]/a[@class="fn"][@href="fn.foo.html"]' 'foo'
|
||||
// @has - '//section[@id="main-content"]/ul[@class="item-table"]//div[@class="item-name"]/a[@class="fn"][@href="fn.bar.html"]' 'bar'
|
||||
/// See either [foo] or [bar].
|
||||
pub mod subtwo {
|
||||
|
||||
|
@ -68,8 +68,8 @@ pub mod subthree {
|
|||
// Next we go *deeper* - In order to ensure it's not just "this or parent"
|
||||
// we test `crate::` and a `super::super::...` chain
|
||||
// @has issue_55364/subfour/subfive/subsix/subseven/subeight/index.html
|
||||
// @has - '//section[@id="main-content"]/div[@class="item-table"]//div[@class="item-right docblock-short"]//a[@href="../../../../../subone/fn.foo.html"]' 'other foo'
|
||||
// @has - '//section[@id="main-content"]/div[@class="item-table"]//div[@class="item-right docblock-short"]//a[@href="../../../../../subtwo/fn.bar.html"]' 'other bar'
|
||||
// @has - '//section[@id="main-content"]/ul[@class="item-table"]//div[@class="desc docblock-short"]//a[@href="../../../../../subone/fn.foo.html"]' 'other foo'
|
||||
// @has - '//section[@id="main-content"]/ul[@class="item-table"]//div[@class="desc docblock-short"]//a[@href="../../../../../subtwo/fn.bar.html"]' 'other bar'
|
||||
pub mod subfour {
|
||||
pub mod subfive {
|
||||
pub mod subsix {
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
// @has issue_95873/index.html "//*[@class='item-left']" "pub use ::std as x;"
|
||||
// @has issue_95873/index.html "//*[@class='item-name']" "pub use ::std as x;"
|
||||
pub use ::std as x;
|
||||
|
|
|
@ -8,13 +8,13 @@ extern crate reexport_check;
|
|||
#[allow(deprecated, deprecated_in_future)]
|
||||
pub use std::i32;
|
||||
// @!has 'foo/index.html' '//code' 'pub use self::string::String;'
|
||||
// @has 'foo/index.html' '//div[@class="item-left"]' 'String'
|
||||
// @has 'foo/index.html' '//div[@class="item-name"]' 'String'
|
||||
pub use std::string::String;
|
||||
|
||||
// i32 is deprecated, String is not
|
||||
// @count 'foo/index.html' '//span[@class="stab deprecated"]' 1
|
||||
|
||||
// @has 'foo/index.html' '//div[@class="item-right docblock-short"]' 'Docs in original'
|
||||
// @has 'foo/index.html' '//div[@class="desc docblock-short"]' 'Docs in original'
|
||||
// this is a no-op, but shows what happens if there's an attribute that isn't a doc-comment
|
||||
#[doc(inline)]
|
||||
pub use reexport_check::S;
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#![crate_name = "foo"]
|
||||
|
||||
// @count foo/index.html '//*[@class="item-right docblock-short"]' 0
|
||||
// @count foo/index.html '//*[@class="desc docblock-short"]' 0
|
||||
|
||||
/// ```
|
||||
/// let x = 12;
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#![crate_name = "foo"]
|
||||
|
||||
// @has foo/index.html '//*[@class="item-right docblock-short"]' 'fooo'
|
||||
// @!has foo/index.html '//*[@class="item-right docblock-short"]/h1' 'fooo'
|
||||
// @has foo/index.html '//*[@class="desc docblock-short"]' 'fooo'
|
||||
// @!has foo/index.html '//*[@class="desc docblock-short"]/h1' 'fooo'
|
||||
// @has foo/fn.foo.html '//h2[@id="fooo"]/a[@href="#fooo"]' 'fooo'
|
||||
|
||||
/// # fooo
|
||||
|
@ -9,8 +9,8 @@
|
|||
/// foo
|
||||
pub fn foo() {}
|
||||
|
||||
// @has foo/index.html '//*[@class="item-right docblock-short"]' 'mooood'
|
||||
// @!has foo/index.html '//*[@class="item-right docblock-short"]/h2' 'mooood'
|
||||
// @has foo/index.html '//*[@class="desc docblock-short"]' 'mooood'
|
||||
// @!has foo/index.html '//*[@class="desc docblock-short"]/h2' 'mooood'
|
||||
// @has foo/foo/index.html '//h3[@id="mooood"]/a[@href="#mooood"]' 'mooood'
|
||||
|
||||
/// ## mooood
|
||||
|
@ -18,7 +18,7 @@ pub fn foo() {}
|
|||
/// foo mod
|
||||
pub mod foo {}
|
||||
|
||||
// @has foo/index.html '//*[@class="item-right docblock-short"]/a[@href=\
|
||||
// @has foo/index.html '//*[@class="desc docblock-short"]/a[@href=\
|
||||
// "https://nougat.world"]/code' 'nougat'
|
||||
|
||||
/// [`nougat`](https://nougat.world)
|
||||
|
|
16
tests/ui/associated-inherent-types/ambiguity.rs
Normal file
16
tests/ui/associated-inherent-types/ambiguity.rs
Normal file
|
@ -0,0 +1,16 @@
|
|||
#![feature(inherent_associated_types)]
|
||||
#![allow(incomplete_features)]
|
||||
|
||||
struct Wrapper<T>(T);
|
||||
|
||||
impl Wrapper<i32> {
|
||||
type Foo = i32;
|
||||
}
|
||||
|
||||
impl Wrapper<()> {
|
||||
type Foo = ();
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let _: Wrapper<_>::Foo = (); //~ ERROR multiple applicable items in scope
|
||||
}
|
20
tests/ui/associated-inherent-types/ambiguity.stderr
Normal file
20
tests/ui/associated-inherent-types/ambiguity.stderr
Normal file
|
@ -0,0 +1,20 @@
|
|||
error[E0034]: multiple applicable items in scope
|
||||
--> $DIR/ambiguity.rs:15:24
|
||||
|
|
||||
LL | let _: Wrapper<_>::Foo = ();
|
||||
| ^^^ multiple `Foo` found
|
||||
|
|
||||
note: candidate #1 is defined in an impl for the type `Wrapper<i32>`
|
||||
--> $DIR/ambiguity.rs:7:5
|
||||
|
|
||||
LL | type Foo = i32;
|
||||
| ^^^^^^^^
|
||||
note: candidate #2 is defined in an impl for the type `Wrapper<()>`
|
||||
--> $DIR/ambiguity.rs:11:5
|
||||
|
|
||||
LL | type Foo = ();
|
||||
| ^^^^^^^^
|
||||
|
||||
error: aborting due to previous error
|
||||
|
||||
For more information about this error, try `rustc --explain E0034`.
|
23
tests/ui/associated-inherent-types/bugs/ice-substitution.rs
Normal file
23
tests/ui/associated-inherent-types/bugs/ice-substitution.rs
Normal file
|
@ -0,0 +1,23 @@
|
|||
// known-bug: unknown
|
||||
// failure-status: 101
|
||||
// normalize-stderr-test "note: .*\n\n" -> ""
|
||||
// normalize-stderr-test "thread 'rustc' panicked.*\n" -> ""
|
||||
// rustc-env:RUST_BACKTRACE=0
|
||||
|
||||
// FIXME: I presume a type variable that couldn't be solved by `resolve_vars_if_possible`
|
||||
// escapes the InferCtxt snapshot.
|
||||
|
||||
#![feature(inherent_associated_types)]
|
||||
#![allow(incomplete_features)]
|
||||
|
||||
struct Cont<T>(T);
|
||||
|
||||
impl<T: Copy> Cont<T> {
|
||||
type Out = Vec<T>;
|
||||
}
|
||||
|
||||
pub fn weird<T: Copy>(x: T) {
|
||||
let _: Cont<_>::Out = vec![true];
|
||||
}
|
||||
|
||||
fn main() {}
|
|
@ -0,0 +1,6 @@
|
|||
error: the compiler unexpectedly panicked. this is a bug.
|
||||
|
||||
query stack during panic:
|
||||
#0 [typeck] type-checking `weird`
|
||||
#1 [typeck_item_bodies] type-checking all item bodies
|
||||
end of query stack
|
15
tests/ui/associated-inherent-types/bugs/inference-fail.rs
Normal file
15
tests/ui/associated-inherent-types/bugs/inference-fail.rs
Normal file
|
@ -0,0 +1,15 @@
|
|||
// known-bug: unknown
|
||||
|
||||
#![feature(inherent_associated_types)]
|
||||
#![allow(incomplete_features)]
|
||||
|
||||
struct S<T>(T);
|
||||
|
||||
impl S<()> {
|
||||
type P = i128;
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// We fail to infer `_ == ()` here.
|
||||
let _: S<_>::P;
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
error[E0282]: type annotations needed
|
||||
--> $DIR/inference-fail.rs:14:14
|
||||
|
|
||||
LL | let _: S<_>::P;
|
||||
| ^ cannot infer type
|
||||
|
||||
error: aborting due to previous error
|
||||
|
||||
For more information about this error, try `rustc --explain E0282`.
|
19
tests/ui/associated-inherent-types/bugs/lack-of-regionck.rs
Normal file
19
tests/ui/associated-inherent-types/bugs/lack-of-regionck.rs
Normal file
|
@ -0,0 +1,19 @@
|
|||
// known-bug: unknown
|
||||
// check-pass
|
||||
|
||||
// We currently don't region-check inherent associated type projections at all.
|
||||
|
||||
#![feature(inherent_associated_types)]
|
||||
#![allow(incomplete_features, dead_code)]
|
||||
|
||||
struct S<T>(T);
|
||||
|
||||
impl S<&'static ()> {
|
||||
type T = ();
|
||||
}
|
||||
|
||||
fn usr<'a>() {
|
||||
let _: S::<&'a ()>::T; // this should *fail* but it doesn't!
|
||||
}
|
||||
|
||||
fn main() {}
|
|
@ -0,0 +1,41 @@
|
|||
// check-pass
|
||||
|
||||
#![feature(inherent_associated_types)]
|
||||
#![allow(incomplete_features)]
|
||||
|
||||
// Check that inherent associated types are dispatched on the concrete Self type.
|
||||
|
||||
struct Select<T>(T);
|
||||
|
||||
impl Select<u8> {
|
||||
type Projection = ();
|
||||
}
|
||||
|
||||
impl Select<String> {
|
||||
type Projection = bool;
|
||||
}
|
||||
|
||||
struct Choose<T>(T);
|
||||
struct NonCopy;
|
||||
|
||||
impl<T: Copy> Choose<T> {
|
||||
type Result = Vec<T>;
|
||||
}
|
||||
|
||||
impl Choose<NonCopy> {
|
||||
type Result = ();
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let _: Select<String>::Projection = false;
|
||||
let _: Select<u8>::Projection = ();
|
||||
|
||||
let _: Choose<NonCopy>::Result = ();
|
||||
let _: Choose<bool>::Result = vec![true];
|
||||
}
|
||||
|
||||
// Test if we use the correct `ParamEnv` when proving obligations.
|
||||
|
||||
pub fn parameterized<T: Copy>(x: T) {
|
||||
let _: Choose<T>::Result = vec![x];
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue