Rollup merge of #77739 - est31:remove_unused_code, r=petrochenkov,varkor
Remove unused code Rustc has a builtin lint for detecting unused code inside a crate, but when an item is marked `pub`, the code, even if unused inside the entire workspace, is never marked as such. Therefore, I've built [warnalyzer](https://github.com/est31/warnalyzer) to detect unused items in a cross-crate setting. Closes https://github.com/est31/warnalyzer/issues/2
This commit is contained in:
commit
022d20759b
46 changed files with 8 additions and 684 deletions
|
@ -167,13 +167,6 @@ pub enum GenericArgs {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl GenericArgs {
|
impl GenericArgs {
|
||||||
pub fn is_parenthesized(&self) -> bool {
|
|
||||||
match *self {
|
|
||||||
Parenthesized(..) => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_angle_bracketed(&self) -> bool {
|
pub fn is_angle_bracketed(&self) -> bool {
|
||||||
match *self {
|
match *self {
|
||||||
AngleBracketed(..) => true,
|
AngleBracketed(..) => true,
|
||||||
|
@ -857,13 +850,6 @@ impl BinOpKind {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_shift(&self) -> bool {
|
|
||||||
match *self {
|
|
||||||
BinOpKind::Shl | BinOpKind::Shr => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_comparison(&self) -> bool {
|
pub fn is_comparison(&self) -> bool {
|
||||||
use BinOpKind::*;
|
use BinOpKind::*;
|
||||||
// Note for developers: please keep this as is;
|
// Note for developers: please keep this as is;
|
||||||
|
@ -873,11 +859,6 @@ impl BinOpKind {
|
||||||
And | Or | Add | Sub | Mul | Div | Rem | BitXor | BitAnd | BitOr | Shl | Shr => false,
|
And | Or | Add | Sub | Mul | Div | Rem | BitXor | BitAnd | BitOr | Shl | Shr => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if the binary operator takes its arguments by value
|
|
||||||
pub fn is_by_value(&self) -> bool {
|
|
||||||
!self.is_comparison()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type BinOp = Spanned<BinOpKind>;
|
pub type BinOp = Spanned<BinOpKind>;
|
||||||
|
@ -896,14 +877,6 @@ pub enum UnOp {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl UnOp {
|
impl UnOp {
|
||||||
/// Returns `true` if the unary operator takes its argument by value
|
|
||||||
pub fn is_by_value(u: UnOp) -> bool {
|
|
||||||
match u {
|
|
||||||
UnOp::Neg | UnOp::Not => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn to_string(op: UnOp) -> &'static str {
|
pub fn to_string(op: UnOp) -> &'static str {
|
||||||
match op {
|
match op {
|
||||||
UnOp::Deref => "*",
|
UnOp::Deref => "*",
|
||||||
|
@ -1753,13 +1726,6 @@ impl IntTy {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn val_to_string(&self, val: i128) -> String {
|
|
||||||
// Cast to a `u128` so we can correctly print `INT128_MIN`. All integral types
|
|
||||||
// are parsed as `u128`, so we wouldn't want to print an extra negative
|
|
||||||
// sign.
|
|
||||||
format!("{}{}", val as u128, self.name_str())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn bit_width(&self) -> Option<u64> {
|
pub fn bit_width(&self) -> Option<u64> {
|
||||||
Some(match *self {
|
Some(match *self {
|
||||||
IntTy::Isize => return None,
|
IntTy::Isize => return None,
|
||||||
|
@ -1818,10 +1784,6 @@ impl UintTy {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn val_to_string(&self, val: u128) -> String {
|
|
||||||
format!("{}{}", val, self.name_str())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn bit_width(&self) -> Option<u64> {
|
pub fn bit_width(&self) -> Option<u64> {
|
||||||
Some(match *self {
|
Some(match *self {
|
||||||
UintTy::Usize => return None,
|
UintTy::Usize => return None,
|
||||||
|
|
|
@ -101,11 +101,6 @@ impl NestedMetaItem {
|
||||||
self.meta_item().is_some()
|
self.meta_item().is_some()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if the variant is `Literal`.
|
|
||||||
pub fn is_literal(&self) -> bool {
|
|
||||||
self.literal().is_some()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if `self` is a `MetaItem` and the meta item is a word.
|
/// Returns `true` if `self` is a `MetaItem` and the meta item is a word.
|
||||||
pub fn is_word(&self) -> bool {
|
pub fn is_word(&self) -> bool {
|
||||||
self.meta_item().map_or(false, |meta_item| meta_item.is_word())
|
self.meta_item().map_or(false, |meta_item| meta_item.is_word())
|
||||||
|
@ -232,10 +227,6 @@ impl MetaItem {
|
||||||
pub fn is_value_str(&self) -> bool {
|
pub fn is_value_str(&self) -> bool {
|
||||||
self.value_str().is_some()
|
self.value_str().is_some()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_meta_item_list(&self) -> bool {
|
|
||||||
self.meta_item_list().is_some()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AttrItem {
|
impl AttrItem {
|
||||||
|
|
|
@ -54,16 +54,6 @@ pub enum DelimToken {
|
||||||
NoDelim,
|
NoDelim,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DelimToken {
|
|
||||||
pub fn len(self) -> usize {
|
|
||||||
if self == NoDelim { 0 } else { 1 }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_empty(self) -> bool {
|
|
||||||
self == NoDelim
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
|
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
|
||||||
pub enum LitKind {
|
pub enum LitKind {
|
||||||
Bool, // AST only, must never appear in a `Token`
|
Bool, // AST only, must never appear in a `Token`
|
||||||
|
|
|
@ -295,12 +295,6 @@ impl TokenStream {
|
||||||
.collect(),
|
.collect(),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn map<F: FnMut(TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
|
|
||||||
TokenStream(Lrc::new(
|
|
||||||
self.0.iter().map(|(tree, is_joint)| (f(tree.clone()), *is_joint)).collect(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// 99.5%+ of the time we have 1 or 2 elements in this vector.
|
// 99.5%+ of the time we have 1 or 2 elements in this vector.
|
||||||
|
|
|
@ -231,7 +231,6 @@ impl AssocOp {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const PREC_RESET: i8 = -100;
|
|
||||||
pub const PREC_CLOSURE: i8 = -40;
|
pub const PREC_CLOSURE: i8 = -40;
|
||||||
pub const PREC_JUMP: i8 = -30;
|
pub const PREC_JUMP: i8 = -30;
|
||||||
pub const PREC_RANGE: i8 = -10;
|
pub const PREC_RANGE: i8 = -10;
|
||||||
|
|
|
@ -118,11 +118,6 @@ pub fn SetUnnamedAddress(global: &'a Value, unnamed: UnnamedAddr) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_thread_local(global: &'a Value, is_thread_local: bool) {
|
|
||||||
unsafe {
|
|
||||||
LLVMSetThreadLocal(global, is_thread_local as Bool);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub fn set_thread_local_mode(global: &'a Value, mode: ThreadLocalMode) {
|
pub fn set_thread_local_mode(global: &'a Value, mode: ThreadLocalMode) {
|
||||||
unsafe {
|
unsafe {
|
||||||
LLVMSetThreadLocalMode(global, mode);
|
LLVMSetThreadLocalMode(global, mode);
|
||||||
|
|
|
@ -1519,8 +1519,6 @@ fn start_executing_work<B: ExtraBackendMethods>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const CODEGEN_WORKER_ID: usize = usize::MAX;
|
|
||||||
|
|
||||||
/// `FatalError` is explicitly not `Send`.
|
/// `FatalError` is explicitly not `Send`.
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub struct WorkerFatalError;
|
pub struct WorkerFatalError;
|
||||||
|
|
|
@ -479,8 +479,6 @@ fn get_argc_argv<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const CODEGEN_WORKER_ID: usize = usize::MAX;
|
|
||||||
|
|
||||||
pub fn codegen_crate<B: ExtraBackendMethods>(
|
pub fn codegen_crate<B: ExtraBackendMethods>(
|
||||||
backend: B,
|
backend: B,
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
|
|
|
@ -14,12 +14,6 @@ pub struct WorkQueue<T: Idx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Idx> WorkQueue<T> {
|
impl<T: Idx> WorkQueue<T> {
|
||||||
/// Creates a new work queue with all the elements from (0..len).
|
|
||||||
#[inline]
|
|
||||||
pub fn with_all(len: usize) -> Self {
|
|
||||||
WorkQueue { deque: (0..len).map(T::new).collect(), set: BitSet::new_filled(len) }
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates a new work queue that starts empty, where elements range from (0..len).
|
/// Creates a new work queue that starts empty, where elements range from (0..len).
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn with_none(len: usize) -> Self {
|
pub fn with_none(len: usize) -> Self {
|
||||||
|
|
|
@ -121,11 +121,6 @@ impl Diagnostic {
|
||||||
self.level == Level::Cancelled
|
self.level == Level::Cancelled
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the sorting span.
|
|
||||||
pub fn set_sort_span(&mut self, sp: Span) {
|
|
||||||
self.sort_span = sp;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Adds a span/label to be included in the resulting snippet.
|
/// Adds a span/label to be included in the resulting snippet.
|
||||||
///
|
///
|
||||||
/// This is pushed onto the [`MultiSpan`] that was created when the diagnostic
|
/// This is pushed onto the [`MultiSpan`] that was created when the diagnostic
|
||||||
|
@ -535,14 +530,6 @@ impl Diagnostic {
|
||||||
&self.message
|
&self.message
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Used by a lint. Copies over all details *but* the "main
|
|
||||||
/// message".
|
|
||||||
pub fn copy_details_not_message(&mut self, from: &Diagnostic) {
|
|
||||||
self.span = from.span.clone();
|
|
||||||
self.code = from.code.clone();
|
|
||||||
self.children.extend(from.children.iter().cloned())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Convenience function for internal use, clients should use one of the
|
/// Convenience function for internal use, clients should use one of the
|
||||||
/// public methods above.
|
/// public methods above.
|
||||||
pub fn sub(
|
pub fn sub(
|
||||||
|
|
|
@ -510,8 +510,6 @@ impl Emitter for SilentEmitter {
|
||||||
fn emit_diagnostic(&mut self, _: &Diagnostic) {}
|
fn emit_diagnostic(&mut self, _: &Diagnostic) {}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Maximum number of lines we will print for each error; arbitrary.
|
|
||||||
pub const MAX_HIGHLIGHT_LINES: usize = 6;
|
|
||||||
/// Maximum number of lines we will print for a multiline suggestion; arbitrary.
|
/// Maximum number of lines we will print for a multiline suggestion; arbitrary.
|
||||||
///
|
///
|
||||||
/// This should be replaced with a more involved mechanism to output multiline suggestions that
|
/// This should be replaced with a more involved mechanism to output multiline suggestions that
|
||||||
|
|
|
@ -148,17 +148,6 @@ impl Annotatable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn map_item_or<F, G>(self, mut f: F, mut or: G) -> Annotatable
|
|
||||||
where
|
|
||||||
F: FnMut(P<ast::Item>) -> P<ast::Item>,
|
|
||||||
G: FnMut(Annotatable) -> Annotatable,
|
|
||||||
{
|
|
||||||
match self {
|
|
||||||
Annotatable::Item(i) => Annotatable::Item(f(i)),
|
|
||||||
_ => or(self),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn expect_trait_item(self) -> P<ast::AssocItem> {
|
pub fn expect_trait_item(self) -> P<ast::AssocItem> {
|
||||||
match self {
|
match self {
|
||||||
Annotatable::TraitItem(i) => i,
|
Annotatable::TraitItem(i) => i,
|
||||||
|
@ -1052,9 +1041,6 @@ impl<'a> ExtCtxt<'a> {
|
||||||
.chain(components.iter().map(|&s| Ident::with_dummy_span(s)))
|
.chain(components.iter().map(|&s| Ident::with_dummy_span(s)))
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
pub fn name_of(&self, st: &str) -> Symbol {
|
|
||||||
Symbol::intern(st)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn check_unused_macros(&mut self) {
|
pub fn check_unused_macros(&mut self) {
|
||||||
self.resolver.check_unused_macros();
|
self.resolver.check_unused_macros();
|
||||||
|
|
|
@ -139,24 +139,6 @@ impl<'a> ExtCtxt<'a> {
|
||||||
ast::Lifetime { id: ast::DUMMY_NODE_ID, ident: ident.with_span_pos(span) }
|
ast::Lifetime { id: ast::DUMMY_NODE_ID, ident: ident.with_span_pos(span) }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn lifetime_def(
|
|
||||||
&self,
|
|
||||||
span: Span,
|
|
||||||
ident: Ident,
|
|
||||||
attrs: Vec<ast::Attribute>,
|
|
||||||
bounds: ast::GenericBounds,
|
|
||||||
) -> ast::GenericParam {
|
|
||||||
let lifetime = self.lifetime(span, ident);
|
|
||||||
ast::GenericParam {
|
|
||||||
ident: lifetime.ident,
|
|
||||||
id: lifetime.id,
|
|
||||||
attrs: attrs.into(),
|
|
||||||
bounds,
|
|
||||||
kind: ast::GenericParamKind::Lifetime,
|
|
||||||
is_placeholder: false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn stmt_expr(&self, expr: P<ast::Expr>) -> ast::Stmt {
|
pub fn stmt_expr(&self, expr: P<ast::Expr>) -> ast::Stmt {
|
||||||
ast::Stmt {
|
ast::Stmt {
|
||||||
id: ast::DUMMY_NODE_ID,
|
id: ast::DUMMY_NODE_ID,
|
||||||
|
@ -465,24 +447,6 @@ impl<'a> ExtCtxt<'a> {
|
||||||
self.pat_tuple_struct(span, path, vec![pat])
|
self.pat_tuple_struct(span, path, vec![pat])
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn pat_none(&self, span: Span) -> P<ast::Pat> {
|
|
||||||
let some = self.std_path(&[sym::option, sym::Option, sym::None]);
|
|
||||||
let path = self.path_global(span, some);
|
|
||||||
self.pat_path(span, path)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn pat_ok(&self, span: Span, pat: P<ast::Pat>) -> P<ast::Pat> {
|
|
||||||
let some = self.std_path(&[sym::result, sym::Result, sym::Ok]);
|
|
||||||
let path = self.path_global(span, some);
|
|
||||||
self.pat_tuple_struct(span, path, vec![pat])
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn pat_err(&self, span: Span, pat: P<ast::Pat>) -> P<ast::Pat> {
|
|
||||||
let some = self.std_path(&[sym::result, sym::Result, sym::Err]);
|
|
||||||
let path = self.path_global(span, some);
|
|
||||||
self.pat_tuple_struct(span, path, vec![pat])
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn arm(&self, span: Span, pat: P<ast::Pat>, expr: P<ast::Expr>) -> ast::Arm {
|
pub fn arm(&self, span: Span, pat: P<ast::Pat>, expr: P<ast::Expr>) -> ast::Arm {
|
||||||
ast::Arm {
|
ast::Arm {
|
||||||
attrs: vec![],
|
attrs: vec![],
|
||||||
|
@ -514,26 +478,6 @@ impl<'a> ExtCtxt<'a> {
|
||||||
self.expr(span, ast::ExprKind::If(cond, self.block_expr(then), els))
|
self.expr(span, ast::ExprKind::If(cond, self.block_expr(then), els))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn lambda_fn_decl(
|
|
||||||
&self,
|
|
||||||
span: Span,
|
|
||||||
fn_decl: P<ast::FnDecl>,
|
|
||||||
body: P<ast::Expr>,
|
|
||||||
fn_decl_span: Span,
|
|
||||||
) -> P<ast::Expr> {
|
|
||||||
self.expr(
|
|
||||||
span,
|
|
||||||
ast::ExprKind::Closure(
|
|
||||||
ast::CaptureBy::Ref,
|
|
||||||
ast::Async::No,
|
|
||||||
ast::Movability::Movable,
|
|
||||||
fn_decl,
|
|
||||||
body,
|
|
||||||
fn_decl_span,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn lambda(&self, span: Span, ids: Vec<Ident>, body: P<ast::Expr>) -> P<ast::Expr> {
|
pub fn lambda(&self, span: Span, ids: Vec<Ident>, body: P<ast::Expr>) -> P<ast::Expr> {
|
||||||
let fn_decl = self.fn_decl(
|
let fn_decl = self.fn_decl(
|
||||||
ids.iter().map(|id| self.param(span, *id, self.ty(span, ast::TyKind::Infer))).collect(),
|
ids.iter().map(|id| self.param(span, *id, self.ty(span, ast::TyKind::Infer))).collect(),
|
||||||
|
@ -610,47 +554,6 @@ impl<'a> ExtCtxt<'a> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn variant(&self, span: Span, ident: Ident, tys: Vec<P<ast::Ty>>) -> ast::Variant {
|
|
||||||
let vis_span = span.shrink_to_lo();
|
|
||||||
let fields: Vec<_> = tys
|
|
||||||
.into_iter()
|
|
||||||
.map(|ty| ast::StructField {
|
|
||||||
span: ty.span,
|
|
||||||
ty,
|
|
||||||
ident: None,
|
|
||||||
vis: ast::Visibility {
|
|
||||||
span: vis_span,
|
|
||||||
kind: ast::VisibilityKind::Inherited,
|
|
||||||
tokens: None,
|
|
||||||
},
|
|
||||||
attrs: Vec::new(),
|
|
||||||
id: ast::DUMMY_NODE_ID,
|
|
||||||
is_placeholder: false,
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let vdata = if fields.is_empty() {
|
|
||||||
ast::VariantData::Unit(ast::DUMMY_NODE_ID)
|
|
||||||
} else {
|
|
||||||
ast::VariantData::Tuple(fields, ast::DUMMY_NODE_ID)
|
|
||||||
};
|
|
||||||
|
|
||||||
ast::Variant {
|
|
||||||
attrs: Vec::new(),
|
|
||||||
data: vdata,
|
|
||||||
disr_expr: None,
|
|
||||||
id: ast::DUMMY_NODE_ID,
|
|
||||||
ident,
|
|
||||||
vis: ast::Visibility {
|
|
||||||
span: vis_span,
|
|
||||||
kind: ast::VisibilityKind::Inherited,
|
|
||||||
tokens: None,
|
|
||||||
},
|
|
||||||
span,
|
|
||||||
is_placeholder: false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn item_static(
|
pub fn item_static(
|
||||||
&self,
|
&self,
|
||||||
span: Span,
|
span: Span,
|
||||||
|
|
|
@ -75,33 +75,6 @@ pub fn link_or_copy<P: AsRef<Path>, Q: AsRef<Path>>(p: P, q: Q) -> io::Result<Li
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum RenameOrCopyRemove {
|
|
||||||
Rename,
|
|
||||||
CopyRemove,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Rename `p` into `q`, preferring to use `rename` if possible.
|
|
||||||
/// If `rename` fails (rename may fail for reasons such as crossing
|
|
||||||
/// filesystem), fallback to copy & remove
|
|
||||||
pub fn rename_or_copy_remove<P: AsRef<Path>, Q: AsRef<Path>>(
|
|
||||||
p: P,
|
|
||||||
q: Q,
|
|
||||||
) -> io::Result<RenameOrCopyRemove> {
|
|
||||||
let p = p.as_ref();
|
|
||||||
let q = q.as_ref();
|
|
||||||
match fs::rename(p, q) {
|
|
||||||
Ok(()) => Ok(RenameOrCopyRemove::Rename),
|
|
||||||
Err(_) => match fs::copy(p, q) {
|
|
||||||
Ok(_) => {
|
|
||||||
fs::remove_file(p)?;
|
|
||||||
Ok(RenameOrCopyRemove::CopyRemove)
|
|
||||||
}
|
|
||||||
Err(e) => Err(e),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(unix)]
|
#[cfg(unix)]
|
||||||
pub fn path_to_c_string(p: &Path) -> CString {
|
pub fn path_to_c_string(p: &Path) -> CString {
|
||||||
use std::ffi::OsStr;
|
use std::ffi::OsStr;
|
||||||
|
|
|
@ -188,10 +188,6 @@ pub struct DefPath {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DefPath {
|
impl DefPath {
|
||||||
pub fn is_local(&self) -> bool {
|
|
||||||
self.krate == LOCAL_CRATE
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn make<FN>(krate: CrateNum, start_index: DefIndex, mut get_key: FN) -> DefPath
|
pub fn make<FN>(krate: CrateNum, start_index: DefIndex, mut get_key: FN) -> DefPath
|
||||||
where
|
where
|
||||||
FN: FnMut(DefIndex) -> DefKey,
|
FN: FnMut(DefIndex) -> DefKey,
|
||||||
|
|
|
@ -3,7 +3,6 @@ use crate::def_id::DefId;
|
||||||
crate use crate::hir_id::HirId;
|
crate use crate::hir_id::HirId;
|
||||||
use crate::{itemlikevisit, LangItem};
|
use crate::{itemlikevisit, LangItem};
|
||||||
|
|
||||||
use rustc_ast::node_id::NodeMap;
|
|
||||||
use rustc_ast::util::parser::ExprPrecedence;
|
use rustc_ast::util::parser::ExprPrecedence;
|
||||||
use rustc_ast::{self as ast, CrateSugar, LlvmAsmDialect};
|
use rustc_ast::{self as ast, CrateSugar, LlvmAsmDialect};
|
||||||
use rustc_ast::{AttrVec, Attribute, FloatTy, IntTy, Label, LitKind, StrStyle, UintTy};
|
use rustc_ast::{AttrVec, Attribute, FloatTy, IntTy, Label, LitKind, StrStyle, UintTy};
|
||||||
|
@ -306,10 +305,6 @@ impl GenericArgs<'_> {
|
||||||
Self { args: &[], bindings: &[], parenthesized: false }
|
Self { args: &[], bindings: &[], parenthesized: false }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_empty(&self) -> bool {
|
|
||||||
self.args.is_empty() && self.bindings.is_empty() && !self.parenthesized
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn inputs(&self) -> &[Ty<'_>] {
|
pub fn inputs(&self) -> &[Ty<'_>] {
|
||||||
if self.parenthesized {
|
if self.parenthesized {
|
||||||
for arg in self.args {
|
for arg in self.args {
|
||||||
|
@ -467,23 +462,6 @@ impl Generics<'hir> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn own_counts(&self) -> GenericParamCount {
|
|
||||||
// We could cache this as a property of `GenericParamCount`, but
|
|
||||||
// the aim is to refactor this away entirely eventually and the
|
|
||||||
// presence of this method will be a constant reminder.
|
|
||||||
let mut own_counts: GenericParamCount = Default::default();
|
|
||||||
|
|
||||||
for param in self.params {
|
|
||||||
match param.kind {
|
|
||||||
GenericParamKind::Lifetime { .. } => own_counts.lifetimes += 1,
|
|
||||||
GenericParamKind::Type { .. } => own_counts.types += 1,
|
|
||||||
GenericParamKind::Const { .. } => own_counts.consts += 1,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
own_counts
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_named(&self, name: Symbol) -> Option<&GenericParam<'_>> {
|
pub fn get_named(&self, name: Symbol) -> Option<&GenericParam<'_>> {
|
||||||
for param in self.params {
|
for param in self.params {
|
||||||
if name == param.name.ident().name {
|
if name == param.name.ident().name {
|
||||||
|
@ -2679,8 +2657,6 @@ pub struct Upvar {
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type CaptureModeMap = NodeMap<CaptureBy>;
|
|
||||||
|
|
||||||
// The TraitCandidate's import_ids is empty if the trait is defined in the same module, and
|
// The TraitCandidate's import_ids is empty if the trait is defined in the same module, and
|
||||||
// has length > 0 if the trait is found through an chain of imports, starting with the
|
// has length > 0 if the trait is found through an chain of imports, starting with the
|
||||||
// import/use statement in the scope where the trait is used.
|
// import/use statement in the scope where the trait is used.
|
||||||
|
@ -2766,32 +2742,4 @@ impl<'hir> Node<'hir> {
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn hir_id(&self) -> Option<HirId> {
|
|
||||||
match self {
|
|
||||||
Node::Item(Item { hir_id, .. })
|
|
||||||
| Node::ForeignItem(ForeignItem { hir_id, .. })
|
|
||||||
| Node::TraitItem(TraitItem { hir_id, .. })
|
|
||||||
| Node::ImplItem(ImplItem { hir_id, .. })
|
|
||||||
| Node::Field(StructField { hir_id, .. })
|
|
||||||
| Node::AnonConst(AnonConst { hir_id, .. })
|
|
||||||
| Node::Expr(Expr { hir_id, .. })
|
|
||||||
| Node::Stmt(Stmt { hir_id, .. })
|
|
||||||
| Node::Ty(Ty { hir_id, .. })
|
|
||||||
| Node::Binding(Pat { hir_id, .. })
|
|
||||||
| Node::Pat(Pat { hir_id, .. })
|
|
||||||
| Node::Arm(Arm { hir_id, .. })
|
|
||||||
| Node::Block(Block { hir_id, .. })
|
|
||||||
| Node::Local(Local { hir_id, .. })
|
|
||||||
| Node::MacroDef(MacroDef { hir_id, .. })
|
|
||||||
| Node::Lifetime(Lifetime { hir_id, .. })
|
|
||||||
| Node::Param(Param { hir_id, .. })
|
|
||||||
| Node::GenericParam(GenericParam { hir_id, .. }) => Some(*hir_id),
|
|
||||||
Node::TraitRef(TraitRef { hir_ref_id, .. }) => Some(*hir_ref_id),
|
|
||||||
Node::PathSegment(PathSegment { hir_id, .. }) => *hir_id,
|
|
||||||
Node::Variant(Variant { id, .. }) => Some(*id),
|
|
||||||
Node::Ctor(variant) => variant.ctor_hir_id(),
|
|
||||||
Node::Crate(_) | Node::Visibility(_) => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -45,5 +45,3 @@ pub const CRATE_HIR_ID: HirId = HirId {
|
||||||
owner: LocalDefId { local_def_index: CRATE_DEF_INDEX },
|
owner: LocalDefId { local_def_index: CRATE_DEF_INDEX },
|
||||||
local_id: ItemLocalId::from_u32(0),
|
local_id: ItemLocalId::from_u32(0),
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const DUMMY_ITEM_LOCAL_ID: ItemLocalId = ItemLocalId::MAX;
|
|
||||||
|
|
|
@ -58,25 +58,6 @@ impl<T: ExactSizeIterator> EnumerateAndAdjustIterator for T {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl hir::Pat<'_> {
|
impl hir::Pat<'_> {
|
||||||
pub fn is_refutable(&self) -> bool {
|
|
||||||
match self.kind {
|
|
||||||
PatKind::Lit(_)
|
|
||||||
| PatKind::Range(..)
|
|
||||||
| PatKind::Path(hir::QPath::Resolved(Some(..), _) | hir::QPath::TypeRelative(..)) => {
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
PatKind::Path(hir::QPath::Resolved(_, ref path))
|
|
||||||
| PatKind::TupleStruct(hir::QPath::Resolved(_, ref path), ..)
|
|
||||||
| PatKind::Struct(hir::QPath::Resolved(_, ref path), ..) => match path.res {
|
|
||||||
Res::Def(DefKind::Variant, _) => true,
|
|
||||||
_ => false,
|
|
||||||
},
|
|
||||||
PatKind::Slice(..) => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Call `f` on every "binding" in a pattern, e.g., on `a` in
|
/// Call `f` on every "binding" in a pattern, e.g., on `a` in
|
||||||
/// `match foo() { Some(a) => (), None => () }`
|
/// `match foo() { Some(a) => (), None => () }`
|
||||||
pub fn each_binding(&self, mut f: impl FnMut(hir::BindingAnnotation, HirId, Span, Ident)) {
|
pub fn each_binding(&self, mut f: impl FnMut(hir::BindingAnnotation, HirId, Span, Ident)) {
|
||||||
|
@ -117,15 +98,6 @@ impl hir::Pat<'_> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Checks if the pattern contains any patterns that bind something to
|
|
||||||
/// an ident or wildcard, e.g., `foo`, or `Foo(_)`, `foo @ Bar(..)`,
|
|
||||||
pub fn contains_bindings_or_wild(&self) -> bool {
|
|
||||||
self.satisfies(|p| match p.kind {
|
|
||||||
PatKind::Binding(..) | PatKind::Wild => true,
|
|
||||||
_ => false,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Checks if the pattern satisfies the given predicate on some sub-pattern.
|
/// Checks if the pattern satisfies the given predicate on some sub-pattern.
|
||||||
fn satisfies(&self, pred: impl Fn(&hir::Pat<'_>) -> bool) -> bool {
|
fn satisfies(&self, pred: impl Fn(&hir::Pat<'_>) -> bool) -> bool {
|
||||||
let mut satisfies = false;
|
let mut satisfies = false;
|
||||||
|
|
|
@ -44,9 +44,6 @@ pub trait PpAnn {
|
||||||
fn nested(&self, _state: &mut State<'_>, _nested: Nested) {}
|
fn nested(&self, _state: &mut State<'_>, _nested: Nested) {}
|
||||||
fn pre(&self, _state: &mut State<'_>, _node: AnnNode<'_>) {}
|
fn pre(&self, _state: &mut State<'_>, _node: AnnNode<'_>) {}
|
||||||
fn post(&self, _state: &mut State<'_>, _node: AnnNode<'_>) {}
|
fn post(&self, _state: &mut State<'_>, _node: AnnNode<'_>) {}
|
||||||
fn try_fetch_item(&self, _: hir::HirId) -> Option<&hir::Item<'_>> {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct NoAnn;
|
pub struct NoAnn;
|
||||||
|
@ -54,9 +51,6 @@ impl PpAnn for NoAnn {}
|
||||||
pub const NO_ANN: &dyn PpAnn = &NoAnn;
|
pub const NO_ANN: &dyn PpAnn = &NoAnn;
|
||||||
|
|
||||||
impl PpAnn for hir::Crate<'_> {
|
impl PpAnn for hir::Crate<'_> {
|
||||||
fn try_fetch_item(&self, item: hir::HirId) -> Option<&hir::Item<'_>> {
|
|
||||||
Some(self.item(item))
|
|
||||||
}
|
|
||||||
fn nested(&self, state: &mut State<'_>, nested: Nested) {
|
fn nested(&self, state: &mut State<'_>, nested: Nested) {
|
||||||
match nested {
|
match nested {
|
||||||
Nested::Item(id) => state.print_item(self.item(id.id)),
|
Nested::Item(id) => state.print_item(self.item(id.id)),
|
||||||
|
|
|
@ -113,13 +113,6 @@ impl Default for RegionckMode {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RegionckMode {
|
impl RegionckMode {
|
||||||
pub fn suppressed(self) -> bool {
|
|
||||||
match self {
|
|
||||||
Self::Solve => false,
|
|
||||||
Self::Erase { suppress_errors } => suppress_errors,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Indicates that the MIR borrowck will repeat these region
|
/// Indicates that the MIR borrowck will repeat these region
|
||||||
/// checks, so we should ignore errors if NLL is (unconditionally)
|
/// checks, so we should ignore errors if NLL is (unconditionally)
|
||||||
/// enabled.
|
/// enabled.
|
||||||
|
@ -420,15 +413,6 @@ pub enum SubregionOrigin<'tcx> {
|
||||||
#[cfg(target_arch = "x86_64")]
|
#[cfg(target_arch = "x86_64")]
|
||||||
static_assert_size!(SubregionOrigin<'_>, 32);
|
static_assert_size!(SubregionOrigin<'_>, 32);
|
||||||
|
|
||||||
/// Places that type/region parameters can appear.
|
|
||||||
#[derive(Clone, Copy, Debug)]
|
|
||||||
pub enum ParameterOrigin {
|
|
||||||
Path, // foo::bar
|
|
||||||
MethodCall, // foo.bar() <-- parameters on impl providing bar()
|
|
||||||
OverloadedOperator, // a + b when overloaded
|
|
||||||
OverloadedDeref, // *a when overloaded
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Times when we replace late-bound regions with variables:
|
/// Times when we replace late-bound regions with variables:
|
||||||
#[derive(Clone, Copy, Debug)]
|
#[derive(Clone, Copy, Debug)]
|
||||||
pub enum LateBoundRegionConversionTime {
|
pub enum LateBoundRegionConversionTime {
|
||||||
|
@ -508,21 +492,6 @@ pub enum NLLRegionVariableOrigin {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
impl NLLRegionVariableOrigin {
|
|
||||||
pub fn is_universal(self) -> bool {
|
|
||||||
match self {
|
|
||||||
NLLRegionVariableOrigin::FreeRegion => true,
|
|
||||||
NLLRegionVariableOrigin::Placeholder(..) => true,
|
|
||||||
NLLRegionVariableOrigin::Existential { .. } => false,
|
|
||||||
NLLRegionVariableOrigin::RootEmptyRegion => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_existential(self) -> bool {
|
|
||||||
!self.is_universal()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// FIXME(eddyb) investigate overlap between this and `TyOrConstInferVar`.
|
// FIXME(eddyb) investigate overlap between this and `TyOrConstInferVar`.
|
||||||
#[derive(Copy, Clone, Debug)]
|
#[derive(Copy, Clone, Debug)]
|
||||||
pub enum FixupError<'tcx> {
|
pub enum FixupError<'tcx> {
|
||||||
|
|
|
@ -28,7 +28,6 @@ use rustc_data_structures::fx::FxHashMap;
|
||||||
use rustc_middle::ty::error::TypeError;
|
use rustc_middle::ty::error::TypeError;
|
||||||
use rustc_middle::ty::fold::{TypeFoldable, TypeVisitor};
|
use rustc_middle::ty::fold::{TypeFoldable, TypeVisitor};
|
||||||
use rustc_middle::ty::relate::{self, Relate, RelateResult, TypeRelation};
|
use rustc_middle::ty::relate::{self, Relate, RelateResult, TypeRelation};
|
||||||
use rustc_middle::ty::subst::GenericArg;
|
|
||||||
use rustc_middle::ty::{self, InferConst, Ty, TyCtxt};
|
use rustc_middle::ty::{self, InferConst, Ty, TyCtxt};
|
||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
|
|
||||||
|
@ -119,12 +118,6 @@ pub trait TypeRelatingDelegate<'tcx> {
|
||||||
fn forbid_inference_vars() -> bool;
|
fn forbid_inference_vars() -> bool;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
struct ScopesAndKind<'tcx> {
|
|
||||||
scopes: Vec<BoundRegionScope<'tcx>>,
|
|
||||||
kind: GenericArg<'tcx>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default)]
|
#[derive(Clone, Debug, Default)]
|
||||||
struct BoundRegionScope<'tcx> {
|
struct BoundRegionScope<'tcx> {
|
||||||
map: FxHashMap<ty::BoundRegion, ty::Region<'tcx>>,
|
map: FxHashMap<ty::BoundRegion, ty::Region<'tcx>>,
|
||||||
|
|
|
@ -59,9 +59,7 @@ pub type TraitObligation<'tcx> = Obligation<'tcx, ty::PolyTraitPredicate<'tcx>>;
|
||||||
#[cfg(target_arch = "x86_64")]
|
#[cfg(target_arch = "x86_64")]
|
||||||
static_assert_size!(PredicateObligation<'_>, 32);
|
static_assert_size!(PredicateObligation<'_>, 32);
|
||||||
|
|
||||||
pub type Obligations<'tcx, O> = Vec<Obligation<'tcx, O>>;
|
|
||||||
pub type PredicateObligations<'tcx> = Vec<PredicateObligation<'tcx>>;
|
pub type PredicateObligations<'tcx> = Vec<PredicateObligation<'tcx>>;
|
||||||
pub type TraitObligations<'tcx> = Vec<TraitObligation<'tcx>>;
|
|
||||||
|
|
||||||
pub type Selection<'tcx> = ImplSource<'tcx, PredicateObligation<'tcx>>;
|
pub type Selection<'tcx> = ImplSource<'tcx, PredicateObligation<'tcx>>;
|
||||||
|
|
||||||
|
|
|
@ -711,10 +711,6 @@ impl<'tcx> LateContext<'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn current_lint_root(&self) -> hir::HirId {
|
|
||||||
self.last_node_with_lint_attrs
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Check if a `DefId`'s path matches the given absolute type path usage.
|
/// Check if a `DefId`'s path matches the given absolute type path usage.
|
||||||
///
|
///
|
||||||
/// Anonymous scopes such as `extern` imports are matched with `kw::Invalid`;
|
/// Anonymous scopes such as `extern` imports are matched with `kw::Invalid`;
|
||||||
|
|
|
@ -313,27 +313,6 @@ impl<'a, 'tcx> TyDecoder<'tcx> for DecodeContext<'a, 'tcx> {
|
||||||
Ok(ty)
|
Ok(ty)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn cached_predicate_for_shorthand<F>(
|
|
||||||
&mut self,
|
|
||||||
shorthand: usize,
|
|
||||||
or_insert_with: F,
|
|
||||||
) -> Result<ty::Predicate<'tcx>, Self::Error>
|
|
||||||
where
|
|
||||||
F: FnOnce(&mut Self) -> Result<ty::Predicate<'tcx>, Self::Error>,
|
|
||||||
{
|
|
||||||
let tcx = self.tcx();
|
|
||||||
|
|
||||||
let key = ty::CReaderCacheKey { cnum: self.cdata().cnum, pos: shorthand };
|
|
||||||
|
|
||||||
if let Some(&pred) = tcx.pred_rcache.borrow().get(&key) {
|
|
||||||
return Ok(pred);
|
|
||||||
}
|
|
||||||
|
|
||||||
let pred = or_insert_with(self)?;
|
|
||||||
tcx.pred_rcache.borrow_mut().insert(key, pred);
|
|
||||||
Ok(pred)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn with_position<F, R>(&mut self, pos: usize, f: F) -> R
|
fn with_position<F, R>(&mut self, pos: usize, f: F) -> R
|
||||||
where
|
where
|
||||||
F: FnOnce(&mut Self) -> R,
|
F: FnOnce(&mut Self) -> R,
|
||||||
|
|
|
@ -3,7 +3,6 @@
|
||||||
//! which are available for use externally when compiled as a library.
|
//! which are available for use externally when compiled as a library.
|
||||||
|
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::FxHashMap;
|
||||||
use rustc_hir::def_id::DefIdSet;
|
|
||||||
use rustc_hir::HirId;
|
use rustc_hir::HirId;
|
||||||
use rustc_macros::HashStable;
|
use rustc_macros::HashStable;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
@ -59,7 +58,3 @@ impl<Id: Hash + Eq + fmt::Debug> fmt::Debug for AccessLevels<Id> {
|
||||||
fmt::Debug::fmt(&self.map, f)
|
fmt::Debug::fmt(&self.map, f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A set containing all exported definitions from external crates.
|
|
||||||
/// The set does not contain any entries from local crates.
|
|
||||||
pub type ExternalExports = DefIdSet;
|
|
||||||
|
|
|
@ -56,15 +56,6 @@ impl<'tcx> ConstValue<'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn try_to_str_slice(&self) -> Option<&'tcx str> {
|
|
||||||
if let ConstValue::Slice { data, start, end } = *self {
|
|
||||||
std::str::from_utf8(data.inspect_with_uninit_and_ptr_outside_interpreter(start..end))
|
|
||||||
.ok()
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn try_to_bits(&self, size: Size) -> Option<u128> {
|
pub fn try_to_bits(&self, size: Size) -> Option<u128> {
|
||||||
self.try_to_scalar()?.to_bits(size).ok()
|
self.try_to_scalar()?.to_bits(size).ok()
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
//! [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/mir/index.html
|
//! [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/mir/index.html
|
||||||
|
|
||||||
use crate::mir::coverage::{CodeRegion, CoverageKind};
|
use crate::mir::coverage::{CodeRegion, CoverageKind};
|
||||||
use crate::mir::interpret::{Allocation, ConstValue, GlobalAlloc, Scalar};
|
use crate::mir::interpret::{Allocation, GlobalAlloc, Scalar};
|
||||||
use crate::mir::visit::MirVisitable;
|
use crate::mir::visit::MirVisitable;
|
||||||
use crate::ty::adjustment::PointerCast;
|
use crate::ty::adjustment::PointerCast;
|
||||||
use crate::ty::codec::{TyDecoder, TyEncoder};
|
use crate::ty::codec::{TyDecoder, TyEncoder};
|
||||||
|
@ -460,17 +460,6 @@ impl<'tcx> Body<'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Checks if `sub` is a sub scope of `sup`
|
|
||||||
pub fn is_sub_scope(&self, mut sub: SourceScope, sup: SourceScope) -> bool {
|
|
||||||
while sub != sup {
|
|
||||||
match self.source_scopes[sub].parent_scope {
|
|
||||||
None => return false,
|
|
||||||
Some(p) => sub = p,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the return type; it always return first element from `local_decls` array.
|
/// Returns the return type; it always return first element from `local_decls` array.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn return_ty(&self) -> Ty<'tcx> {
|
pub fn return_ty(&self) -> Ty<'tcx> {
|
||||||
|
@ -1978,45 +1967,6 @@ impl<'tcx> Operand<'tcx> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convenience helper to make a `Scalar` from the given `Operand`, assuming that `Operand`
|
|
||||||
/// wraps a constant literal value. Panics if this is not the case.
|
|
||||||
pub fn scalar_from_const(operand: &Operand<'tcx>) -> Scalar {
|
|
||||||
match operand {
|
|
||||||
Operand::Constant(constant) => match constant.literal.val.try_to_scalar() {
|
|
||||||
Some(scalar) => scalar,
|
|
||||||
_ => panic!("{:?}: Scalar value expected", constant.literal.val),
|
|
||||||
},
|
|
||||||
_ => panic!("{:?}: Constant expected", operand),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Convenience helper to make a literal-like constant from a given `&str` slice.
|
|
||||||
/// Since this is used to synthesize MIR, assumes `user_ty` is None.
|
|
||||||
pub fn const_from_str(tcx: TyCtxt<'tcx>, val: &str, span: Span) -> Operand<'tcx> {
|
|
||||||
let tcx = tcx;
|
|
||||||
let allocation = Allocation::from_byte_aligned_bytes(val.as_bytes());
|
|
||||||
let allocation = tcx.intern_const_alloc(allocation);
|
|
||||||
let const_val = ConstValue::Slice { data: allocation, start: 0, end: val.len() };
|
|
||||||
let ty = tcx.mk_imm_ref(tcx.lifetimes.re_erased, tcx.types.str_);
|
|
||||||
Operand::Constant(box Constant {
|
|
||||||
span,
|
|
||||||
user_ty: None,
|
|
||||||
literal: ty::Const::from_value(tcx, const_val, ty),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Convenience helper to make a `ConstValue` from the given `Operand`, assuming that `Operand`
|
|
||||||
/// wraps a constant value (such as a `&str` slice). Panics if this is not the case.
|
|
||||||
pub fn value_from_const(operand: &Operand<'tcx>) -> ConstValue<'tcx> {
|
|
||||||
match operand {
|
|
||||||
Operand::Constant(constant) => match constant.literal.val.try_to_value() {
|
|
||||||
Some(const_value) => const_value,
|
|
||||||
_ => panic!("{:?}: ConstValue expected", constant.literal.val),
|
|
||||||
},
|
|
||||||
_ => panic!("{:?}: Constant expected", operand),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn to_copy(&self) -> Self {
|
pub fn to_copy(&self) -> Self {
|
||||||
match *self {
|
match *self {
|
||||||
Operand::Copy(_) | Operand::Constant(_) => self.clone(),
|
Operand::Copy(_) | Operand::Constant(_) => self.clone(),
|
||||||
|
@ -2413,10 +2363,6 @@ impl<'tcx> UserTypeProjections {
|
||||||
self.contents.is_empty()
|
self.contents.is_empty()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_projections(projs: impl Iterator<Item = (UserTypeProjection, Span)>) -> Self {
|
|
||||||
UserTypeProjections { contents: projs.collect() }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn projections_and_spans(
|
pub fn projections_and_spans(
|
||||||
&self,
|
&self,
|
||||||
) -> impl Iterator<Item = &(UserTypeProjection, Span)> + ExactSizeIterator {
|
) -> impl Iterator<Item = &(UserTypeProjection, Span)> + ExactSizeIterator {
|
||||||
|
|
|
@ -413,18 +413,6 @@ pub struct CoverageInfo {
|
||||||
/// For more information on why this is needed, consider looking
|
/// For more information on why this is needed, consider looking
|
||||||
/// at the docs for `WithOptConstParam` itself.
|
/// at the docs for `WithOptConstParam` itself.
|
||||||
impl<'tcx> TyCtxt<'tcx> {
|
impl<'tcx> TyCtxt<'tcx> {
|
||||||
#[inline]
|
|
||||||
pub fn mir_borrowck_opt_const_arg(
|
|
||||||
self,
|
|
||||||
def: ty::WithOptConstParam<LocalDefId>,
|
|
||||||
) -> &'tcx BorrowCheckResult<'tcx> {
|
|
||||||
if let Some(param_did) = def.const_param_did {
|
|
||||||
self.mir_borrowck_const_arg((def.did, param_did))
|
|
||||||
} else {
|
|
||||||
self.mir_borrowck(def.did)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn mir_const_qualif_opt_const_arg(
|
pub fn mir_const_qualif_opt_const_arg(
|
||||||
self,
|
self,
|
||||||
|
|
|
@ -1186,16 +1186,6 @@ impl PlaceContext {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if this place context represents a storage live marker.
|
|
||||||
pub fn is_storage_live_marker(&self) -> bool {
|
|
||||||
matches!(self, PlaceContext::NonUse(NonUseContext::StorageLive))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if this place context represents a storage dead marker.
|
|
||||||
pub fn is_storage_dead_marker(&self) -> bool {
|
|
||||||
matches!(self, PlaceContext::NonUse(NonUseContext::StorageDead))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if this place context represents a use that potentially changes the value.
|
/// Returns `true` if this place context represents a use that potentially changes the value.
|
||||||
pub fn is_mutating_use(&self) -> bool {
|
pub fn is_mutating_use(&self) -> bool {
|
||||||
matches!(self, PlaceContext::MutatingUse(..))
|
matches!(self, PlaceContext::MutatingUse(..))
|
||||||
|
|
|
@ -182,14 +182,6 @@ pub trait TyDecoder<'tcx>: Decoder {
|
||||||
where
|
where
|
||||||
F: FnOnce(&mut Self) -> Result<Ty<'tcx>, Self::Error>;
|
F: FnOnce(&mut Self) -> Result<Ty<'tcx>, Self::Error>;
|
||||||
|
|
||||||
fn cached_predicate_for_shorthand<F>(
|
|
||||||
&mut self,
|
|
||||||
shorthand: usize,
|
|
||||||
or_insert_with: F,
|
|
||||||
) -> Result<ty::Predicate<'tcx>, Self::Error>
|
|
||||||
where
|
|
||||||
F: FnOnce(&mut Self) -> Result<ty::Predicate<'tcx>, Self::Error>;
|
|
||||||
|
|
||||||
fn with_position<F, R>(&mut self, pos: usize, f: F) -> R
|
fn with_position<F, R>(&mut self, pos: usize, f: F) -> R
|
||||||
where
|
where
|
||||||
F: FnOnce(&mut Self) -> R;
|
F: FnOnce(&mut Self) -> R;
|
||||||
|
|
|
@ -534,10 +534,6 @@ impl<'tcx> TypeckResults<'tcx> {
|
||||||
self.node_type(pat.hir_id)
|
self.node_type(pat.hir_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn pat_ty_opt(&self, pat: &hir::Pat<'_>) -> Option<Ty<'tcx>> {
|
|
||||||
self.node_type_opt(pat.hir_id)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns the type of an expression as a monotype.
|
// Returns the type of an expression as a monotype.
|
||||||
//
|
//
|
||||||
// NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in
|
// NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in
|
||||||
|
|
|
@ -97,9 +97,6 @@ pub trait TypeFoldable<'tcx>: fmt::Debug + Clone {
|
||||||
fn has_infer_types_or_consts(&self) -> bool {
|
fn has_infer_types_or_consts(&self) -> bool {
|
||||||
self.has_type_flags(TypeFlags::HAS_TY_INFER | TypeFlags::HAS_CT_INFER)
|
self.has_type_flags(TypeFlags::HAS_TY_INFER | TypeFlags::HAS_CT_INFER)
|
||||||
}
|
}
|
||||||
fn has_infer_consts(&self) -> bool {
|
|
||||||
self.has_type_flags(TypeFlags::HAS_CT_INFER)
|
|
||||||
}
|
|
||||||
fn needs_infer(&self) -> bool {
|
fn needs_infer(&self) -> bool {
|
||||||
self.has_type_flags(TypeFlags::NEEDS_INFER)
|
self.has_type_flags(TypeFlags::NEEDS_INFER)
|
||||||
}
|
}
|
||||||
|
@ -113,9 +110,6 @@ pub trait TypeFoldable<'tcx>: fmt::Debug + Clone {
|
||||||
fn needs_subst(&self) -> bool {
|
fn needs_subst(&self) -> bool {
|
||||||
self.has_type_flags(TypeFlags::NEEDS_SUBST)
|
self.has_type_flags(TypeFlags::NEEDS_SUBST)
|
||||||
}
|
}
|
||||||
fn has_re_placeholders(&self) -> bool {
|
|
||||||
self.has_type_flags(TypeFlags::HAS_RE_PLACEHOLDER)
|
|
||||||
}
|
|
||||||
/// "Free" regions in this context means that it has any region
|
/// "Free" regions in this context means that it has any region
|
||||||
/// that is not (a) erased or (b) late-bound.
|
/// that is not (a) erased or (b) late-bound.
|
||||||
fn has_free_regions(&self) -> bool {
|
fn has_free_regions(&self) -> bool {
|
||||||
|
@ -719,21 +713,15 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||||
// vars. See comment on `shift_vars_through_binders` method in
|
// vars. See comment on `shift_vars_through_binders` method in
|
||||||
// `subst.rs` for more details.
|
// `subst.rs` for more details.
|
||||||
|
|
||||||
enum Direction {
|
|
||||||
In,
|
|
||||||
Out,
|
|
||||||
}
|
|
||||||
|
|
||||||
struct Shifter<'tcx> {
|
struct Shifter<'tcx> {
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
current_index: ty::DebruijnIndex,
|
current_index: ty::DebruijnIndex,
|
||||||
amount: u32,
|
amount: u32,
|
||||||
direction: Direction,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Shifter<'tcx> {
|
impl Shifter<'tcx> {
|
||||||
pub fn new(tcx: TyCtxt<'tcx>, amount: u32, direction: Direction) -> Self {
|
pub fn new(tcx: TyCtxt<'tcx>, amount: u32) -> Self {
|
||||||
Shifter { tcx, current_index: ty::INNERMOST, amount, direction }
|
Shifter { tcx, current_index: ty::INNERMOST, amount }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -755,13 +743,7 @@ impl TypeFolder<'tcx> for Shifter<'tcx> {
|
||||||
if self.amount == 0 || debruijn < self.current_index {
|
if self.amount == 0 || debruijn < self.current_index {
|
||||||
r
|
r
|
||||||
} else {
|
} else {
|
||||||
let debruijn = match self.direction {
|
let debruijn = debruijn.shifted_in(self.amount);
|
||||||
Direction::In => debruijn.shifted_in(self.amount),
|
|
||||||
Direction::Out => {
|
|
||||||
assert!(debruijn.as_u32() >= self.amount);
|
|
||||||
debruijn.shifted_out(self.amount)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let shifted = ty::ReLateBound(debruijn, br);
|
let shifted = ty::ReLateBound(debruijn, br);
|
||||||
self.tcx.mk_region(shifted)
|
self.tcx.mk_region(shifted)
|
||||||
}
|
}
|
||||||
|
@ -776,13 +758,7 @@ impl TypeFolder<'tcx> for Shifter<'tcx> {
|
||||||
if self.amount == 0 || debruijn < self.current_index {
|
if self.amount == 0 || debruijn < self.current_index {
|
||||||
ty
|
ty
|
||||||
} else {
|
} else {
|
||||||
let debruijn = match self.direction {
|
let debruijn = debruijn.shifted_in(self.amount);
|
||||||
Direction::In => debruijn.shifted_in(self.amount),
|
|
||||||
Direction::Out => {
|
|
||||||
assert!(debruijn.as_u32() >= self.amount);
|
|
||||||
debruijn.shifted_out(self.amount)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
self.tcx.mk_ty(ty::Bound(debruijn, bound_ty))
|
self.tcx.mk_ty(ty::Bound(debruijn, bound_ty))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -796,13 +772,7 @@ impl TypeFolder<'tcx> for Shifter<'tcx> {
|
||||||
if self.amount == 0 || debruijn < self.current_index {
|
if self.amount == 0 || debruijn < self.current_index {
|
||||||
ct
|
ct
|
||||||
} else {
|
} else {
|
||||||
let debruijn = match self.direction {
|
let debruijn = debruijn.shifted_in(self.amount);
|
||||||
Direction::In => debruijn.shifted_in(self.amount),
|
|
||||||
Direction::Out => {
|
|
||||||
assert!(debruijn.as_u32() >= self.amount);
|
|
||||||
debruijn.shifted_out(self.amount)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
self.tcx.mk_const(ty::Const { val: ty::ConstKind::Bound(debruijn, bound_ct), ty })
|
self.tcx.mk_const(ty::Const { val: ty::ConstKind::Bound(debruijn, bound_ct), ty })
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -830,16 +800,7 @@ where
|
||||||
{
|
{
|
||||||
debug!("shift_vars(value={:?}, amount={})", value, amount);
|
debug!("shift_vars(value={:?}, amount={})", value, amount);
|
||||||
|
|
||||||
value.fold_with(&mut Shifter::new(tcx, amount, Direction::In))
|
value.fold_with(&mut Shifter::new(tcx, amount))
|
||||||
}
|
|
||||||
|
|
||||||
pub fn shift_out_vars<'tcx, T>(tcx: TyCtxt<'tcx>, value: &T, amount: u32) -> T
|
|
||||||
where
|
|
||||||
T: TypeFoldable<'tcx>,
|
|
||||||
{
|
|
||||||
debug!("shift_out_vars(value={:?}, amount={})", value, amount);
|
|
||||||
|
|
||||||
value.fold_with(&mut Shifter::new(tcx, amount, Direction::Out))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An "escaping var" is a bound var whose binder is not part of `t`. A bound var can be a
|
/// An "escaping var" is a bound var whose binder is not part of `t`. A bound var can be a
|
||||||
|
|
|
@ -104,14 +104,6 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||||
// ```
|
// ```
|
||||||
ty.uninhabited_from(self, param_env).contains(self, module)
|
ty.uninhabited_from(self, param_env).contains(self, module)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_ty_uninhabited_from_any_module(
|
|
||||||
self,
|
|
||||||
ty: Ty<'tcx>,
|
|
||||||
param_env: ty::ParamEnv<'tcx>,
|
|
||||||
) -> bool {
|
|
||||||
!ty.uninhabited_from(self, param_env).is_empty()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> AdtDef {
|
impl<'tcx> AdtDef {
|
||||||
|
|
|
@ -7,7 +7,6 @@ pub use self::Variance::*;
|
||||||
|
|
||||||
use crate::hir::exports::ExportMap;
|
use crate::hir::exports::ExportMap;
|
||||||
use crate::ich::StableHashingContext;
|
use crate::ich::StableHashingContext;
|
||||||
use crate::infer::canonical::Canonical;
|
|
||||||
use crate::middle::cstore::CrateStoreDyn;
|
use crate::middle::cstore::CrateStoreDyn;
|
||||||
use crate::middle::resolve_lifetime::ObjectLifetimeDefault;
|
use crate::middle::resolve_lifetime::ObjectLifetimeDefault;
|
||||||
use crate::mir::interpret::ErrorHandled;
|
use crate::mir::interpret::ErrorHandled;
|
||||||
|
@ -656,8 +655,6 @@ impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for TyS<'tcx> {
|
||||||
#[rustc_diagnostic_item = "Ty"]
|
#[rustc_diagnostic_item = "Ty"]
|
||||||
pub type Ty<'tcx> = &'tcx TyS<'tcx>;
|
pub type Ty<'tcx> = &'tcx TyS<'tcx>;
|
||||||
|
|
||||||
pub type CanonicalTy<'tcx> = Canonical<'tcx, Ty<'tcx>>;
|
|
||||||
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, TyEncodable, TyDecodable, HashStable)]
|
#[derive(Clone, Copy, PartialEq, Eq, Hash, TyEncodable, TyDecodable, HashStable)]
|
||||||
pub struct UpvarPath {
|
pub struct UpvarPath {
|
||||||
pub hir_id: hir::HirId,
|
pub hir_id: hir::HirId,
|
||||||
|
@ -767,10 +764,6 @@ pub enum IntVarValue {
|
||||||
pub struct FloatVarValue(pub ast::FloatTy);
|
pub struct FloatVarValue(pub ast::FloatTy);
|
||||||
|
|
||||||
impl ty::EarlyBoundRegion {
|
impl ty::EarlyBoundRegion {
|
||||||
pub fn to_bound_region(&self) -> ty::BoundRegion {
|
|
||||||
ty::BoundRegion::BrNamed(self.def_id, self.name)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Does this early bound region have a name? Early bound regions normally
|
/// Does this early bound region have a name? Early bound regions normally
|
||||||
/// always have names except when using anonymous lifetimes (`'_`).
|
/// always have names except when using anonymous lifetimes (`'_`).
|
||||||
pub fn has_name(&self) -> bool {
|
pub fn has_name(&self) -> bool {
|
||||||
|
@ -821,14 +814,6 @@ impl GenericParamDef {
|
||||||
bug!("cannot convert a non-lifetime parameter def to an early bound region")
|
bug!("cannot convert a non-lifetime parameter def to an early bound region")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_bound_region(&self) -> ty::BoundRegion {
|
|
||||||
if let GenericParamDefKind::Lifetime = self.kind {
|
|
||||||
self.to_early_bound_region_data().to_bound_region()
|
|
||||||
} else {
|
|
||||||
bug!("cannot convert a non-lifetime parameter def to an early bound region")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
|
@ -1003,22 +988,6 @@ impl<'tcx> GenericPredicates<'tcx> {
|
||||||
instantiated.predicates.extend(self.predicates.iter().map(|(p, _)| p));
|
instantiated.predicates.extend(self.predicates.iter().map(|(p, _)| p));
|
||||||
instantiated.spans.extend(self.predicates.iter().map(|(_, s)| s));
|
instantiated.spans.extend(self.predicates.iter().map(|(_, s)| s));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn instantiate_supertrait(
|
|
||||||
&self,
|
|
||||||
tcx: TyCtxt<'tcx>,
|
|
||||||
poly_trait_ref: &ty::PolyTraitRef<'tcx>,
|
|
||||||
) -> InstantiatedPredicates<'tcx> {
|
|
||||||
assert_eq!(self.parent, None);
|
|
||||||
InstantiatedPredicates {
|
|
||||||
predicates: self
|
|
||||||
.predicates
|
|
||||||
.iter()
|
|
||||||
.map(|(pred, _)| pred.subst_supertrait(tcx, poly_trait_ref))
|
|
||||||
.collect(),
|
|
||||||
spans: self.predicates.iter().map(|(_, sp)| *sp).collect(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -1303,7 +1272,6 @@ impl<'tcx> PolyTraitPredicate<'tcx> {
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)]
|
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)]
|
||||||
#[derive(HashStable, TypeFoldable)]
|
#[derive(HashStable, TypeFoldable)]
|
||||||
pub struct OutlivesPredicate<A, B>(pub A, pub B); // `A: B`
|
pub struct OutlivesPredicate<A, B>(pub A, pub B); // `A: B`
|
||||||
pub type PolyOutlivesPredicate<A, B> = ty::Binder<OutlivesPredicate<A, B>>;
|
|
||||||
pub type RegionOutlivesPredicate<'tcx> = OutlivesPredicate<ty::Region<'tcx>, ty::Region<'tcx>>;
|
pub type RegionOutlivesPredicate<'tcx> = OutlivesPredicate<ty::Region<'tcx>, ty::Region<'tcx>>;
|
||||||
pub type TypeOutlivesPredicate<'tcx> = OutlivesPredicate<Ty<'tcx>, ty::Region<'tcx>>;
|
pub type TypeOutlivesPredicate<'tcx> = OutlivesPredicate<Ty<'tcx>, ty::Region<'tcx>>;
|
||||||
pub type PolyRegionOutlivesPredicate<'tcx> = ty::Binder<RegionOutlivesPredicate<'tcx>>;
|
pub type PolyRegionOutlivesPredicate<'tcx> = ty::Binder<RegionOutlivesPredicate<'tcx>>;
|
||||||
|
|
|
@ -601,29 +601,6 @@ impl<'a, 'tcx> TyDecoder<'tcx> for CacheDecoder<'a, 'tcx> {
|
||||||
Ok(ty)
|
Ok(ty)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn cached_predicate_for_shorthand<F>(
|
|
||||||
&mut self,
|
|
||||||
shorthand: usize,
|
|
||||||
or_insert_with: F,
|
|
||||||
) -> Result<ty::Predicate<'tcx>, Self::Error>
|
|
||||||
where
|
|
||||||
F: FnOnce(&mut Self) -> Result<ty::Predicate<'tcx>, Self::Error>,
|
|
||||||
{
|
|
||||||
let tcx = self.tcx();
|
|
||||||
|
|
||||||
let cache_key =
|
|
||||||
ty::CReaderCacheKey { cnum: CrateNum::ReservedForIncrCompCache, pos: shorthand };
|
|
||||||
|
|
||||||
if let Some(&pred) = tcx.pred_rcache.borrow().get(&cache_key) {
|
|
||||||
return Ok(pred);
|
|
||||||
}
|
|
||||||
|
|
||||||
let pred = or_insert_with(self)?;
|
|
||||||
// This may overwrite the entry, but it should overwrite with the same value.
|
|
||||||
tcx.pred_rcache.borrow_mut().insert_same(cache_key, pred);
|
|
||||||
Ok(pred)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn with_position<F, R>(&mut self, pos: usize, f: F) -> R
|
fn with_position<F, R>(&mut self, pos: usize, f: F) -> R
|
||||||
where
|
where
|
||||||
F: FnOnce(&mut Self) -> R,
|
F: FnOnce(&mut Self) -> R,
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
// Type substitutions.
|
// Type substitutions.
|
||||||
|
|
||||||
use crate::infer::canonical::Canonical;
|
|
||||||
use crate::ty::codec::{TyDecoder, TyEncoder};
|
use crate::ty::codec::{TyDecoder, TyEncoder};
|
||||||
use crate::ty::fold::{TypeFoldable, TypeFolder, TypeVisitor};
|
use crate::ty::fold::{TypeFoldable, TypeFolder, TypeVisitor};
|
||||||
use crate::ty::sty::{ClosureSubsts, GeneratorSubsts};
|
use crate::ty::sty::{ClosureSubsts, GeneratorSubsts};
|
||||||
|
@ -648,8 +647,6 @@ impl<'a, 'tcx> SubstFolder<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type CanonicalUserSubsts<'tcx> = Canonical<'tcx, UserSubsts<'tcx>>;
|
|
||||||
|
|
||||||
/// Stores the user-given substs to reach some fully qualified path
|
/// Stores the user-given substs to reach some fully qualified path
|
||||||
/// (e.g., `<T>::Item` or `<T as Trait>::Item`).
|
/// (e.g., `<T>::Item` or `<T as Trait>::Item`).
|
||||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, TyEncodable, TyDecodable)]
|
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, TyEncodable, TyDecodable)]
|
||||||
|
|
|
@ -62,15 +62,6 @@ where
|
||||||
let blocks = mir::traversal::reachable(body);
|
let blocks = mir::traversal::reachable(body);
|
||||||
visit_results(body, blocks.map(|(bb, _)| bb), self, vis)
|
visit_results(body, blocks.map(|(bb, _)| bb), self, vis)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn visit_in_rpo_with(
|
|
||||||
&self,
|
|
||||||
body: &'mir mir::Body<'tcx>,
|
|
||||||
vis: &mut impl ResultsVisitor<'mir, 'tcx, FlowState = A::Domain>,
|
|
||||||
) {
|
|
||||||
let blocks = mir::traversal::reverse_postorder(body);
|
|
||||||
visit_results(body, blocks.map(|(bb, _)| bb), self, vis)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A solver for dataflow problems.
|
/// A solver for dataflow problems.
|
||||||
|
|
|
@ -114,16 +114,6 @@ pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path, sp: Option<Spa
|
||||||
source_file_to_parser(sess, file_to_source_file(sess, path, sp))
|
source_file_to_parser(sess, file_to_source_file(sess, path, sp))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a new parser, returning buffered diagnostics if the file doesn't exist,
|
|
||||||
/// or from lexing the initial token stream.
|
|
||||||
pub fn maybe_new_parser_from_file<'a>(
|
|
||||||
sess: &'a ParseSess,
|
|
||||||
path: &Path,
|
|
||||||
) -> Result<Parser<'a>, Vec<Diagnostic>> {
|
|
||||||
let file = try_file_to_source_file(sess, path, None).map_err(|db| vec![db])?;
|
|
||||||
maybe_source_file_to_parser(sess, file)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Given a `source_file` and config, returns a parser.
|
/// Given a `source_file` and config, returns a parser.
|
||||||
fn source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>) -> Parser<'_> {
|
fn source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>) -> Parser<'_> {
|
||||||
panictry_buffer!(&sess.span_diagnostic, maybe_source_file_to_parser(sess, source_file))
|
panictry_buffer!(&sess.span_diagnostic, maybe_source_file_to_parser(sess, source_file))
|
||||||
|
@ -146,12 +136,6 @@ fn maybe_source_file_to_parser(
|
||||||
Ok(parser)
|
Ok(parser)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Must preserve old name for now, because `quote!` from the *existing*
|
|
||||||
// compiler expands into it.
|
|
||||||
pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser<'_> {
|
|
||||||
stream_to_parser(sess, tts.into_iter().collect(), crate::MACRO_ARGUMENTS)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Base abstractions
|
// Base abstractions
|
||||||
|
|
||||||
/// Given a session and a path and an optional span (for error reporting),
|
/// Given a session and a path and an optional span (for error reporting),
|
||||||
|
|
|
@ -165,10 +165,6 @@ impl WorkProductId {
|
||||||
cgu_name.hash(&mut hasher);
|
cgu_name.hash(&mut hasher);
|
||||||
WorkProductId { hash: hasher.finish() }
|
WorkProductId { hash: hasher.finish() }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_fingerprint(fingerprint: Fingerprint) -> WorkProductId {
|
|
||||||
WorkProductId { hash: fingerprint }
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<HCX> HashStable<HCX> for WorkProductId {
|
impl<HCX> HashStable<HCX> for WorkProductId {
|
||||||
|
|
|
@ -402,11 +402,6 @@ impl<K: DepKind> DepGraph<K> {
|
||||||
self.data.as_ref().unwrap().previous.fingerprint_of(dep_node)
|
self.data.as_ref().unwrap().previous.fingerprint_of(dep_node)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
|
||||||
pub fn prev_dep_node_index_of(&self, dep_node: &DepNode<K>) -> SerializedDepNodeIndex {
|
|
||||||
self.data.as_ref().unwrap().previous.node_to_index(dep_node)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Checks whether a previous work product exists for `v` and, if
|
/// Checks whether a previous work product exists for `v` and, if
|
||||||
/// so, return the path that leads to it. Used to skip doing work.
|
/// so, return the path that leads to it. Used to skip doing work.
|
||||||
pub fn previous_work_product(&self, v: &WorkProductId) -> Option<WorkProduct> {
|
pub fn previous_work_product(&self, v: &WorkProductId) -> Option<WorkProduct> {
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::FxHashMap;
|
||||||
use rustc_data_structures::graph::implementation::{
|
use rustc_data_structures::graph::implementation::{Direction, Graph, NodeIndex, INCOMING};
|
||||||
Direction, Graph, NodeIndex, INCOMING, OUTGOING,
|
|
||||||
};
|
|
||||||
|
|
||||||
use super::{DepKind, DepNode};
|
use super::{DepKind, DepNode};
|
||||||
|
|
||||||
|
@ -52,23 +50,8 @@ impl<K: DepKind> DepGraphQuery<K> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// All nodes reachable from `node`. In other words, things that
|
|
||||||
/// will have to be recomputed if `node` changes.
|
|
||||||
pub fn transitive_successors(&self, node: &DepNode<K>) -> Vec<&DepNode<K>> {
|
|
||||||
self.reachable_nodes(node, OUTGOING)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// All nodes that can reach `node`.
|
/// All nodes that can reach `node`.
|
||||||
pub fn transitive_predecessors(&self, node: &DepNode<K>) -> Vec<&DepNode<K>> {
|
pub fn transitive_predecessors(&self, node: &DepNode<K>) -> Vec<&DepNode<K>> {
|
||||||
self.reachable_nodes(node, INCOMING)
|
self.reachable_nodes(node, INCOMING)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Just the outgoing edges from `node`.
|
|
||||||
pub fn immediate_successors(&self, node: &DepNode<K>) -> Vec<&DepNode<K>> {
|
|
||||||
if let Some(&index) = self.indices.get(&node) {
|
|
||||||
self.graph.successor_nodes(index).map(|s| self.graph.node_data(s)).collect()
|
|
||||||
} else {
|
|
||||||
vec![]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1586,5 +1586,3 @@ pub fn early_warn(output: config::ErrorOutputType, msg: &str) {
|
||||||
let handler = rustc_errors::Handler::with_emitter(true, None, emitter);
|
let handler = rustc_errors::Handler::with_emitter(true, None, emitter);
|
||||||
handler.struct_warn(msg).emit();
|
handler.struct_warn(msg).emit();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type CompileResult = Result<(), ErrorReported>;
|
|
||||||
|
|
|
@ -619,14 +619,6 @@ impl SyntaxContext {
|
||||||
HygieneData::with(|data| data.outer_mark(self))
|
HygieneData::with(|data| data.outer_mark(self))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
|
||||||
pub fn outer_mark_with_data(self) -> (ExpnId, Transparency, ExpnData) {
|
|
||||||
HygieneData::with(|data| {
|
|
||||||
let (expn_id, transparency) = data.outer_mark(self);
|
|
||||||
(expn_id, transparency, data.expn_data(expn_id).clone())
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn dollar_crate_name(self) -> Symbol {
|
pub fn dollar_crate_name(self) -> Symbol {
|
||||||
HygieneData::with(|data| data.syntax_context_data[self.0 as usize].dollar_crate_name)
|
HygieneData::with(|data| data.syntax_context_data[self.0 as usize].dollar_crate_name)
|
||||||
}
|
}
|
||||||
|
|
|
@ -223,12 +223,6 @@ impl FileName {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn quote_expansion_source_code(src: &str) -> FileName {
|
|
||||||
let mut hasher = StableHasher::new();
|
|
||||||
src.hash(&mut hasher);
|
|
||||||
FileName::QuoteExpansion(hasher.finish())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn macro_expansion_source_code(src: &str) -> FileName {
|
pub fn macro_expansion_source_code(src: &str) -> FileName {
|
||||||
let mut hasher = StableHasher::new();
|
let mut hasher = StableHasher::new();
|
||||||
src.hash(&mut hasher);
|
src.hash(&mut hasher);
|
||||||
|
|
|
@ -451,15 +451,6 @@ impl SourceMap {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a new `Span` covering the start and end `BytePos`s of the file containing the given
|
|
||||||
/// `pos`. This can be used to quickly determine if another `BytePos` or `Span` is from the same
|
|
||||||
/// file.
|
|
||||||
pub fn lookup_file_span(&self, pos: BytePos) -> Span {
|
|
||||||
let idx = self.lookup_source_file_idx(pos);
|
|
||||||
let SourceFile { start_pos, end_pos, .. } = *(*self.files.borrow().source_files)[idx];
|
|
||||||
Span::with_root_ctxt(start_pos, end_pos)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `Some(span)`, a union of the LHS and RHS span. The LHS must precede the RHS. If
|
/// Returns `Some(span)`, a union of the LHS and RHS span. The LHS must precede the RHS. If
|
||||||
/// there are gaps between LHS and RHS, the resulting union will cross these gaps.
|
/// there are gaps between LHS and RHS, the resulting union will cross these gaps.
|
||||||
/// For this to work,
|
/// For this to work,
|
||||||
|
|
|
@ -290,10 +290,6 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
self.infcx.tcx
|
self.infcx.tcx
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn closure_typer(&self) -> &'cx InferCtxt<'cx, 'tcx> {
|
|
||||||
self.infcx
|
|
||||||
}
|
|
||||||
|
|
||||||
///////////////////////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////////////////////
|
||||||
// Selection
|
// Selection
|
||||||
//
|
//
|
||||||
|
|
Loading…
Add table
Reference in a new issue