Auto merge of #64864 - Centril:rollup-pxz6tw3, r=Centril

Rollup of 14 pull requests

Successful merges:

 - #64703 (Docs: slice elements are equidistant)
 - #64745 (Include message on tests that should panic but do not)
 - #64781 (Remove stray references to the old global tcx)
 - #64794 (Remove unused DepTrackingMap)
 - #64802 (Account for tail expressions when pointing at return type)
 - #64809 (hir: Disallow `target_feature` on constants)
 - #64815 (Fix div_duration() marked as stable by mistake)
 - #64818 (update rtpSpawn's parameters type(It's prototype has been updated in libc))
 - #64830 (Thou shallt not `.abort_if_errors()`)
 - #64836 (Stabilize map_get_key_value feature)
 - #64845 (pin.rs: fix links to primitives in documentation)
 - #64847 (Upgrade env_logger to 0.7)
 - #64851 (Add mailmap entry for Dustin Bensing by request)
 - #64859 (check_match: improve diagnostics for `let A = 2;` with `const A: i32 = 3`)

Failed merges:

r? @ghost
This commit is contained in:
bors 2019-09-28 03:38:48 +00:00
commit f3c8eba643
80 changed files with 569 additions and 600 deletions

View file

@ -69,6 +69,7 @@ David Manescu <david.manescu@gmail.com> <dman2626@uni.sydney.edu.au>
David Ross <daboross@daboross.net>
Derek Chiang <derekchiang93@gmail.com> Derek Chiang (Enchi Jiang) <derekchiang93@gmail.com>
Diggory Hardy <diggory.hardy@gmail.com> Diggory Hardy <github@dhardy.name>
Dustin Bensing <dustin.bensing@googlemail.com>
Dylan Braithwaite <dylanbraithwaite1@gmail.com> <mail@dylanb.me>
Dzmitry Malyshau <kvarkus@gmail.com>
E. Dunham <edunham@mozilla.com> edunham <edunham@mozilla.com>

View file

@ -534,7 +534,7 @@ name = "compiletest"
version = "0.0.0"
dependencies = [
"diff",
"env_logger 0.6.2",
"env_logger 0.7.0",
"getopts",
"lazy_static 1.3.0",
"libc",
@ -3409,7 +3409,7 @@ dependencies = [
name = "rustc_driver"
version = "0.0.0"
dependencies = [
"env_logger 0.6.2",
"env_logger 0.7.0",
"graphviz",
"lazy_static 1.3.0",
"log",

View file

@ -47,8 +47,6 @@ Stabilized APIs
- [`<*mut T>::cast`]
- [`Duration::as_secs_f32`]
- [`Duration::as_secs_f64`]
- [`Duration::div_duration_f32`]
- [`Duration::div_duration_f64`]
- [`Duration::div_f32`]
- [`Duration::div_f64`]
- [`Duration::from_secs_f32`]
@ -100,8 +98,6 @@ Compatibility Notes
[`<*mut T>::cast`]: https://doc.rust-lang.org/std/primitive.pointer.html#method.cast
[`Duration::as_secs_f32`]: https://doc.rust-lang.org/std/time/struct.Duration.html#method.as_secs_f32
[`Duration::as_secs_f64`]: https://doc.rust-lang.org/std/time/struct.Duration.html#method.as_secs_f64
[`Duration::div_duration_f32`]: https://doc.rust-lang.org/std/time/struct.Duration.html#method.div_duration_f32
[`Duration::div_duration_f64`]: https://doc.rust-lang.org/std/time/struct.Duration.html#method.div_duration_f64
[`Duration::div_f32`]: https://doc.rust-lang.org/std/time/struct.Duration.html#method.div_f32
[`Duration::div_f64`]: https://doc.rust-lang.org/std/time/struct.Duration.html#method.div_f64
[`Duration::from_secs_f32`]: https://doc.rust-lang.org/std/time/struct.Duration.html#method.from_secs_f32

View file

@ -580,7 +580,6 @@ impl<K: Ord, V> BTreeMap<K, V> {
/// # Examples
///
/// ```
/// #![feature(map_get_key_value)]
/// use std::collections::BTreeMap;
///
/// let mut map = BTreeMap::new();
@ -588,7 +587,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
/// assert_eq!(map.get_key_value(&1), Some((&1, &"a")));
/// assert_eq!(map.get_key_value(&2), None);
/// ```
#[unstable(feature = "map_get_key_value", issue = "49347")]
#[stable(feature = "map_get_key_value", since = "1.40.0")]
pub fn get_key_value<Q: ?Sized>(&self, k: &Q) -> Option<(&K, &V)>
where K: Borrow<Q>,
Q: Ord

View file

@ -369,6 +369,8 @@
//! [drop-guarantee]: #drop-guarantee
//! [`poll`]: ../../std/future/trait.Future.html#tymethod.poll
//! [`Pin::get_unchecked_mut`]: struct.Pin.html#method.get_unchecked_mut
//! [`bool`]: ../../std/primitive.bool.html
//! [`i32`]: ../../std/primitive.i32.html
#![stable(feature = "pin", since = "1.33.0")]

View file

@ -1,87 +0,0 @@
use rustc_data_structures::fx::FxHashMap;
use std::cell::RefCell;
use std::hash::Hash;
use std::marker::PhantomData;
use crate::util::common::MemoizationMap;
use super::{DepKind, DepNodeIndex, DepGraph};
/// A DepTrackingMap offers a subset of the `Map` API and ensures that
/// we make calls to `read` and `write` as appropriate. We key the
/// maps with a unique type for brevity.
pub struct DepTrackingMap<M: DepTrackingMapConfig> {
phantom: PhantomData<M>,
graph: DepGraph,
map: FxHashMap<M::Key, (M::Value, DepNodeIndex)>,
}
pub trait DepTrackingMapConfig {
type Key: Eq + Hash + Clone;
type Value: Clone;
fn to_dep_kind() -> DepKind;
}
impl<M: DepTrackingMapConfig> DepTrackingMap<M> {
pub fn new(graph: DepGraph) -> DepTrackingMap<M> {
DepTrackingMap {
phantom: PhantomData,
graph,
map: Default::default(),
}
}
}
impl<M: DepTrackingMapConfig> MemoizationMap for RefCell<DepTrackingMap<M>> {
type Key = M::Key;
type Value = M::Value;
/// Memoizes an entry in the dep-tracking-map. If the entry is not
/// already present, then `op` will be executed to compute its value.
/// The resulting dependency graph looks like this:
///
/// [op] -> Map(key) -> CurrentTask
///
/// Here, `[op]` represents whatever nodes `op` reads in the
/// course of execution; `Map(key)` represents the node for this
/// map, and `CurrentTask` represents the current task when
/// `memoize` is invoked.
///
/// **Important:** when `op` is invoked, the current task will be
/// switched to `Map(key)`. Therefore, if `op` makes use of any
/// HIR nodes or shared state accessed through its closure
/// environment, it must explicitly register a read of that
/// state. As an example, see `type_of_item` in `collect`,
/// which looks something like this:
///
/// ```
/// fn type_of_item(..., item: &hir::Item) -> Ty<'tcx> {
/// let item_def_id = ccx.tcx.hir().local_def_id(it.hir_id);
/// ccx.tcx.item_types.memoized(item_def_id, || {
/// ccx.tcx.dep_graph.read(DepNode::Hir(item_def_id)); // (*)
/// compute_type_of_item(ccx, item)
/// });
/// }
/// ```
///
/// The key is the line marked `(*)`: the closure implicitly
/// accesses the body of the item `item`, so we register a read
/// from `Hir(item_def_id)`.
fn memoize<OP>(&self, key: M::Key, op: OP) -> M::Value
where OP: FnOnce() -> M::Value
{
let graph;
{
let this = self.borrow();
if let Some(&(ref result, dep_node)) = this.map.get(&key) {
this.graph.read_index(dep_node);
return result.clone();
}
graph = this.graph.clone();
}
let (result, dep_node) = graph.with_anon_task(M::to_dep_kind(), op);
self.borrow_mut().map.insert(key, (result.clone(), dep_node));
graph.read_index(dep_node);
result
}
}

View file

@ -590,7 +590,7 @@ impl DepGraph {
// mark it as green by recursively marking all of its
// dependencies green.
self.try_mark_previous_green(
tcx.global_tcx(),
tcx,
data,
prev_index,
&dep_node

View file

@ -1,6 +1,5 @@
pub mod debug;
mod dep_node;
mod dep_tracking_map;
mod graph;
mod prev;
mod query;
@ -8,7 +7,6 @@ mod safe;
mod serialized;
pub mod cgu_reuse_tracker;
pub use self::dep_tracking_map::{DepTrackingMap, DepTrackingMapConfig};
pub use self::dep_node::{DepNode, DepKind, DepConstructor, WorkProductId, RecoverKey, label_strs};
pub use self::graph::{DepGraph, WorkProduct, DepNodeIndex, DepNodeColor, TaskDeps, hash_result};
pub use self::graph::WorkProductFileKind;

View file

@ -93,30 +93,35 @@ struct CheckAttrVisitor<'tcx> {
impl CheckAttrVisitor<'tcx> {
/// Checks any attribute.
fn check_attributes(&self, item: &hir::Item, target: Target) {
if target == Target::Fn || target == Target::Const {
self.tcx.codegen_fn_attrs(self.tcx.hir().local_def_id(item.hir_id));
} else if let Some(a) = item.attrs.iter().find(|a| a.check_name(sym::target_feature)) {
self.tcx.sess.struct_span_err(a.span, "attribute should be applied to a function")
.span_label(item.span, "not a function")
.emit();
}
let mut is_valid = true;
for attr in &item.attrs {
if attr.check_name(sym::inline) {
is_valid &= if attr.check_name(sym::inline) {
self.check_inline(attr, &item.span, target)
} else if attr.check_name(sym::non_exhaustive) {
self.check_non_exhaustive(attr, item, target)
} else if attr.check_name(sym::marker) {
self.check_marker(attr, item, target)
} else if attr.check_name(sym::target_feature) {
self.check_target_feature(attr, item, target)
} else {
true
};
}
if !is_valid {
return;
}
if target == Target::Fn {
self.tcx.codegen_fn_attrs(self.tcx.hir().local_def_id(item.hir_id));
}
self.check_repr(item, target);
self.check_used(item, target);
}
/// Checks if an `#[inline]` is applied to a function or a closure.
fn check_inline(&self, attr: &hir::Attribute, span: &Span, target: Target) {
/// Checks if an `#[inline]` is applied to a function or a closure. Returns `true` if valid.
fn check_inline(&self, attr: &hir::Attribute, span: &Span, target: Target) -> bool {
if target != Target::Fn && target != Target::Closure {
struct_span_err!(self.tcx.sess,
attr.span,
@ -124,13 +129,21 @@ impl CheckAttrVisitor<'tcx> {
"attribute should be applied to function or closure")
.span_label(*span, "not a function or closure")
.emit();
false
} else {
true
}
}
/// Checks if the `#[non_exhaustive]` attribute on an `item` is valid.
fn check_non_exhaustive(&self, attr: &hir::Attribute, item: &hir::Item, target: Target) {
/// Checks if the `#[non_exhaustive]` attribute on an `item` is valid. Returns `true` if valid.
fn check_non_exhaustive(
&self,
attr: &hir::Attribute,
item: &hir::Item,
target: Target,
) -> bool {
match target {
Target::Struct | Target::Enum => { /* Valid */ },
Target::Struct | Target::Enum => true,
_ => {
struct_span_err!(self.tcx.sess,
attr.span,
@ -138,25 +151,44 @@ impl CheckAttrVisitor<'tcx> {
"attribute can only be applied to a struct or enum")
.span_label(item.span, "not a struct or enum")
.emit();
return;
false
}
}
}
/// Checks if the `#[marker]` attribute on an `item` is valid.
fn check_marker(&self, attr: &hir::Attribute, item: &hir::Item, target: Target) {
/// Checks if the `#[marker]` attribute on an `item` is valid. Returns `true` if valid.
fn check_marker(&self, attr: &hir::Attribute, item: &hir::Item, target: Target) -> bool {
match target {
Target::Trait => { /* Valid */ },
Target::Trait => true,
_ => {
self.tcx.sess
.struct_span_err(attr.span, "attribute can only be applied to a trait")
.span_label(item.span, "not a trait")
.emit();
return;
false
}
}
}
/// Checks if the `#[target_feature]` attribute on `item` is valid. Returns `true` if valid.
fn check_target_feature(
&self,
attr: &hir::Attribute,
item: &hir::Item,
target: Target,
) -> bool {
match target {
Target::Fn => true,
_ => {
self.tcx.sess
.struct_span_err(attr.span, "attribute should be applied to a function")
.span_label(item.span, "not a function")
.emit();
false
},
}
}
/// Checks if the `#[repr]` attributes on `item` are valid.
fn check_repr(&self, item: &hir::Item, target: Target) {
// Extract the names of all repr hints, e.g., [foo, bar, align] for:

View file

@ -705,7 +705,6 @@ impl LoweringContext<'_> {
E0628,
"generators cannot have explicit parameters"
);
self.sess.abort_if_errors();
}
Some(match movability {
Movability::Movable => hir::GeneratorMovability::Movable,
@ -998,7 +997,7 @@ impl LoweringContext<'_> {
E0727,
"`async` generators are not yet supported",
);
self.sess.abort_if_errors();
return hir::ExprKind::Err;
},
None => self.generator_kind = Some(hir::GeneratorKind::Gen),
}

View file

@ -23,8 +23,6 @@ use syntax::source_map::Spanned;
use syntax::ext::base::MacroKind;
use syntax_pos::{Span, DUMMY_SP};
use std::result::Result::Err;
pub mod blocks;
mod collector;
mod def_collector;
@ -183,6 +181,44 @@ pub struct Map<'hir> {
hir_to_node_id: FxHashMap<HirId, NodeId>,
}
struct ParentHirIterator<'map> {
current_id: HirId,
map: &'map Map<'map>,
}
impl<'map> ParentHirIterator<'map> {
fn new(current_id: HirId, map: &'map Map<'map>) -> ParentHirIterator<'map> {
ParentHirIterator {
current_id,
map,
}
}
}
impl<'map> Iterator for ParentHirIterator<'map> {
type Item = (HirId, Node<'map>);
fn next(&mut self) -> Option<Self::Item> {
if self.current_id == CRATE_HIR_ID {
return None;
}
loop { // There are nodes that do not have entries, so we need to skip them.
let parent_id = self.map.get_parent_node(self.current_id);
if parent_id == self.current_id {
self.current_id = CRATE_HIR_ID;
return None;
}
self.current_id = parent_id;
if let Some(entry) = self.map.find_entry(parent_id) {
return Some((parent_id, entry.node));
}
// If this `HirId` doesn't have an `Entry`, skip it and look for its `parent_id`.
}
}
}
impl<'hir> Map<'hir> {
#[inline]
fn lookup(&self, id: HirId) -> Option<&Entry<'hir>> {
@ -682,45 +718,6 @@ impl<'hir> Map<'hir> {
}
}
/// If there is some error when walking the parents (e.g., a node does not
/// have a parent in the map or a node can't be found), then we return the
/// last good `HirId` we found. Note that reaching the crate root (`id == 0`),
/// is not an error, since items in the crate module have the crate root as
/// parent.
fn walk_parent_nodes<F, F2>(&self,
start_id: HirId,
found: F,
bail_early: F2)
-> Result<HirId, HirId>
where F: Fn(&Node<'hir>) -> bool, F2: Fn(&Node<'hir>) -> bool
{
let mut id = start_id;
loop {
let parent_id = self.get_parent_node(id);
if parent_id == CRATE_HIR_ID {
return Ok(CRATE_HIR_ID);
}
if parent_id == id {
return Err(id);
}
if let Some(entry) = self.find_entry(parent_id) {
if let Node::Crate = entry.node {
return Err(id);
}
if found(&entry.node) {
return Ok(parent_id);
} else if bail_early(&entry.node) {
return Err(parent_id);
}
id = parent_id;
} else {
return Err(id);
}
}
}
/// Retrieves the `HirId` for `id`'s enclosing method, unless there's a
/// `while` or `loop` before reaching it, as block tail returns are not
/// available in them.
@ -744,29 +741,46 @@ impl<'hir> Map<'hir> {
/// }
/// ```
pub fn get_return_block(&self, id: HirId) -> Option<HirId> {
let match_fn = |node: &Node<'_>| {
match *node {
let mut iter = ParentHirIterator::new(id, &self).peekable();
let mut ignore_tail = false;
if let Some(entry) = self.find_entry(id) {
if let Node::Expr(Expr { kind: ExprKind::Ret(_), .. }) = entry.node {
// When dealing with `return` statements, we don't care about climbing only tail
// expressions.
ignore_tail = true;
}
}
while let Some((hir_id, node)) = iter.next() {
if let (Some((_, next_node)), false) = (iter.peek(), ignore_tail) {
match next_node {
Node::Block(Block { expr: None, .. }) => return None,
Node::Block(Block { expr: Some(expr), .. }) => {
if hir_id != expr.hir_id {
// The current node is not the tail expression of its parent.
return None;
}
}
_ => {}
}
}
match node {
Node::Item(_) |
Node::ForeignItem(_) |
Node::TraitItem(_) |
Node::Expr(Expr { kind: ExprKind::Closure(..), ..}) |
Node::ImplItem(_) => true,
_ => false,
}
};
let match_non_returning_block = |node: &Node<'_>| {
match *node {
Node::ImplItem(_) => return Some(hir_id),
Node::Expr(ref expr) => {
match expr.kind {
ExprKind::Loop(..) | ExprKind::Ret(..) => true,
_ => false,
// Ignore `return`s on the first iteration
ExprKind::Loop(..) | ExprKind::Ret(..) => return None,
_ => {}
}
}
_ => false,
Node::Local(_) => return None,
_ => {}
}
};
self.walk_parent_nodes(id, match_fn, match_non_returning_block).ok()
}
None
}
/// Retrieves the `HirId` for `id`'s parent item, or `id` itself if no
@ -774,17 +788,18 @@ impl<'hir> Map<'hir> {
/// in the HIR which is recorded by the map and is an item, either an item
/// in a module, trait, or impl.
pub fn get_parent_item(&self, hir_id: HirId) -> HirId {
match self.walk_parent_nodes(hir_id, |node| match *node {
for (hir_id, node) in ParentHirIterator::new(hir_id, &self) {
match node {
Node::Crate |
Node::Item(_) |
Node::ForeignItem(_) |
Node::TraitItem(_) |
Node::ImplItem(_) => true,
_ => false,
}, |_| false) {
Ok(id) => id,
Err(id) => id,
Node::ImplItem(_) => return hir_id,
_ => {}
}
}
hir_id
}
/// Returns the `DefId` of `id`'s nearest module parent, or `id` itself if no
/// module parent is in this map.
@ -795,18 +810,18 @@ impl<'hir> Map<'hir> {
/// Returns the `HirId` of `id`'s nearest module parent, or `id` itself if no
/// module parent is in this map.
pub fn get_module_parent_node(&self, hir_id: HirId) -> HirId {
match self.walk_parent_nodes(hir_id, |node| match *node {
Node::Item(&Item { kind: ItemKind::Mod(_), .. }) => true,
_ => false,
}, |_| false) {
Ok(id) => id,
Err(id) => id,
for (hir_id, node) in ParentHirIterator::new(hir_id, &self) {
if let Node::Item(&Item { kind: ItemKind::Mod(_), .. }) = node {
return hir_id;
}
}
CRATE_HIR_ID
}
/// Returns the nearest enclosing scope. A scope is roughly an item or block.
pub fn get_enclosing_scope(&self, hir_id: HirId) -> Option<HirId> {
self.walk_parent_nodes(hir_id, |node| match *node {
for (hir_id, node) in ParentHirIterator::new(hir_id, &self) {
if match node {
Node::Item(i) => {
match i.kind {
ItemKind::Fn(..)
@ -839,16 +854,20 @@ impl<'hir> Map<'hir> {
},
Node::Block(_) => true,
_ => false,
}, |_| false).ok()
} {
return Some(hir_id);
}
}
None
}
/// Returns the defining scope for an opaque type definition.
pub fn get_defining_scope(&self, id: HirId) -> Option<HirId> {
pub fn get_defining_scope(&self, id: HirId) -> HirId {
let mut scope = id;
loop {
scope = self.get_enclosing_scope(scope)?;
scope = self.get_enclosing_scope(scope).unwrap_or(CRATE_HIR_ID);
if scope == CRATE_HIR_ID {
return Some(CRATE_HIR_ID);
return CRATE_HIR_ID;
}
match self.get(scope) {
Node::Item(i) => {
@ -861,7 +880,7 @@ impl<'hir> Map<'hir> {
_ => break,
}
}
Some(scope)
scope
}
pub fn get_parent_did(&self, id: HirId) -> DefId {
@ -1064,6 +1083,14 @@ impl<'hir> Map<'hir> {
self.as_local_hir_id(id).map(|id| self.span(id))
}
pub fn res_span(&self, res: Res) -> Option<Span> {
match res {
Res::Err => None,
Res::Local(id) => Some(self.span(id)),
res => self.span_if_local(res.opt_def_id()?),
}
}
pub fn node_to_string(&self, id: HirId) -> String {
hir_id_to_string(self, id, true)
}

View file

@ -1552,7 +1552,7 @@ pub enum ExprKind {
/// Thus, `x.foo::<Bar, Baz>(a, b, c, d)` is represented as
/// `ExprKind::MethodCall(PathSegment { foo, [Bar, Baz] }, [x, a, b, c, d])`.
MethodCall(P<PathSegment>, Span, HirVec<Expr>),
/// A tuple (e.g., `(a, b, c ,d)`).
/// A tuple (e.g., `(a, b, c, d)`).
Tup(HirVec<Expr>),
/// A binary operation (e.g., `a + b`, `a * b`).
Binary(BinOp, P<Expr>, P<Expr>),

View file

@ -1462,7 +1462,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
// type-checking closure types are in local tables only.
if !self.in_progress_tables.is_some() || !ty.has_closure_types() {
if !(param_env, ty).has_local_value() {
return ty.is_copy_modulo_regions(self.tcx.global_tcx(), param_env, span);
return ty.is_copy_modulo_regions(self.tcx, param_env, span);
}
}

View file

@ -561,15 +561,13 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
def_id, instantiated_ty
);
let gcx = self.tcx.global_tcx();
// Use substs to build up a reverse map from regions to their
// identity mappings. This is necessary because of `impl
// Trait` lifetimes are computed by replacing existing
// lifetimes with 'static and remapping only those used in the
// `impl Trait` return type, resulting in the parameters
// shifting.
let id_substs = InternalSubsts::identity_for_item(gcx, def_id);
let id_substs = InternalSubsts::identity_for_item(self.tcx, def_id);
let map: FxHashMap<GenericArg<'tcx>, GenericArg<'tcx>> = opaque_defn
.substs
.iter()
@ -854,7 +852,7 @@ impl TypeFolder<'tcx> for ReverseMapper<'tcx> {
)
.emit();
self.tcx().global_tcx().mk_region(ty::ReStatic)
self.tcx().mk_region(ty::ReStatic)
},
}
}
@ -1215,7 +1213,7 @@ pub fn may_define_opaque_type(
let mut hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
// Named opaque types can be defined by any siblings or children of siblings.
let scope = tcx.hir().get_defining_scope(opaque_hir_id).expect("could not get defining scope");
let scope = tcx.hir().get_defining_scope(opaque_hir_id);
// We walk up the node tree until we hit the root or the scope of the opaque type.
while hir_id != scope && hir_id != hir::CRATE_HIR_ID {
hir_id = tcx.hir().get_parent_item(hir_id);

View file

@ -82,7 +82,7 @@ impl ExprVisitor<'tcx> {
// Special-case transmutting from `typeof(function)` and
// `Option<typeof(function)>` to present a clearer error.
let from = unpack_option_like(self.tcx.global_tcx(), from);
let from = unpack_option_like(self.tcx, from);
if let (&ty::FnDef(..), SizeSkeleton::Known(size_to)) = (&from.kind, sk_to) {
if size_to == Pointer.size(&self.tcx) {
struct_span_err!(self.tcx.sess, span, E0591,

View file

@ -749,7 +749,7 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> {
.unwrap_or(ty::ClosureKind::LATTICE_BOTTOM),
None =>
closure_substs.closure_kind(closure_def_id, self.tcx.global_tcx()),
closure_substs.closure_kind(closure_def_id, self.tcx),
}
}
_ => span_bug!(span, "unexpected type for fn in mem_categorization: {:?}", ty),

View file

@ -1504,7 +1504,7 @@ impl<'tcx> TerminatorKind<'tcx> {
Goto { .. } => vec!["".into()],
SwitchInt { ref values, switch_ty, .. } => ty::tls::with(|tcx| {
let param_env = ty::ParamEnv::empty();
let switch_ty = tcx.lift_to_global(&switch_ty).unwrap();
let switch_ty = tcx.lift(&switch_ty).unwrap();
let size = tcx.layout_of(param_env.and(switch_ty)).unwrap().size;
values
.iter()

View file

@ -108,7 +108,7 @@ impl TraitEngine<'tcx> for FulfillmentContext<'tcx> {
goal: obligation.goal.predicate,
}, &mut orig_values);
match infcx.tcx.global_tcx().evaluate_goal(canonical_goal) {
match infcx.tcx.evaluate_goal(canonical_goal) {
Ok(response) => {
if response.is_proven() {
making_progress = true;

View file

@ -3,12 +3,10 @@
// seems likely that they should eventually be merged into more
// general routines.
use crate::dep_graph::{DepKind, DepTrackingMapConfig};
use std::marker::PhantomData;
use crate::infer::InferCtxt;
use crate::traits::{FulfillmentContext, Obligation, ObligationCause, SelectionContext,
TraitEngine, Vtable};
use crate::ty::{self, Ty, TyCtxt};
use crate::ty::{self, TyCtxt};
use crate::ty::subst::{Subst, SubstsRef};
use crate::ty::fold::TypeFoldable;
@ -100,33 +98,8 @@ impl<'tcx> TyCtxt<'tcx> {
}
}
// Implement DepTrackingMapConfig for `trait_cache`
pub struct TraitSelectionCache<'tcx> {
data: PhantomData<&'tcx ()>
}
impl<'tcx> DepTrackingMapConfig for TraitSelectionCache<'tcx> {
type Key = (ty::ParamEnv<'tcx>, ty::PolyTraitRef<'tcx>);
type Value = Vtable<'tcx, ()>;
fn to_dep_kind() -> DepKind {
DepKind::TraitSelect
}
}
// # Global Cache
pub struct ProjectionCache<'tcx> {
data: PhantomData<&'tcx ()>,
}
impl<'tcx> DepTrackingMapConfig for ProjectionCache<'tcx> {
type Key = Ty<'tcx>;
type Value = Ty<'tcx>;
fn to_dep_kind() -> DepKind {
DepKind::TraitSelect
}
}
impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
/// Finishes processes any obligations that remain in the
/// fulfillment context, and then returns the result with all type

View file

@ -497,7 +497,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
4
};
let normalize = |candidate| self.tcx.global_tcx().infer_ctxt().enter(|ref infcx| {
let normalize = |candidate| self.tcx.infer_ctxt().enter(|ref infcx| {
let normalized = infcx
.at(&ObligationCause::dummy(), ty::ParamEnv::empty())
.normalize(candidate)
@ -783,8 +783,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
}
ty::Predicate::ObjectSafe(trait_def_id) => {
let violations = self.tcx.global_tcx()
.object_safety_violations(trait_def_id);
let violations = self.tcx.object_safety_violations(trait_def_id);
if let Some(err) = self.tcx.report_object_safety_error(
span,
trait_def_id,
@ -920,7 +919,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
}
TraitNotObjectSafe(did) => {
let violations = self.tcx.global_tcx().object_safety_violations(did);
let violations = self.tcx.object_safety_violations(did);
if let Some(err) = self.tcx.report_object_safety_error(span, did, violations) {
err
} else {

View file

@ -495,7 +495,7 @@ impl<'a, 'b, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'b, 'tcx> {
} else {
if !substs.has_local_value() {
let instance = ty::Instance::resolve(
self.selcx.tcx().global_tcx(),
self.selcx.tcx(),
obligation.param_env,
def_id,
substs,

View file

@ -40,12 +40,11 @@ impl<'cx, 'tcx> At<'cx, 'tcx> {
};
}
let gcx = tcx.global_tcx();
let mut orig_values = OriginalQueryValues::default();
let c_ty = self.infcx.canonicalize_query(&self.param_env.and(ty), &mut orig_values);
let span = self.cause.span;
debug!("c_ty = {:?}", c_ty);
if let Ok(result) = &gcx.dropck_outlives(c_ty) {
if let Ok(result) = &tcx.dropck_outlives(c_ty) {
if result.is_proven() {
if let Ok(InferOk { value, obligations }) =
self.infcx.instantiate_query_response_and_region_obligations(

View file

@ -50,7 +50,7 @@ impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
// Run canonical query. If overflow occurs, rerun from scratch but this time
// in standard trait query mode so that overflow is handled appropriately
// within `SelectionContext`.
self.tcx.global_tcx().evaluate_obligation(c_pred)
self.tcx.evaluate_obligation(c_pred)
}
// Helper function that canonicalizes and runs the query. If an

View file

@ -141,7 +141,7 @@ impl<'cx, 'tcx> TypeFolder<'tcx> for QueryNormalizer<'cx, 'tcx> {
// binder). It would be better to normalize in a
// binding-aware fashion.
let gcx = self.infcx.tcx.global_tcx();
let tcx = self.infcx.tcx;
let mut orig_values = OriginalQueryValues::default();
// HACK(matthewjasper) `'static` is special-cased in selection,
@ -150,7 +150,7 @@ impl<'cx, 'tcx> TypeFolder<'tcx> for QueryNormalizer<'cx, 'tcx> {
&self.param_env.and(*data), &mut orig_values);
debug!("QueryNormalizer: c_data = {:#?}", c_data);
debug!("QueryNormalizer: orig_values = {:#?}", orig_values);
match gcx.normalize_projection_ty(c_data) {
match tcx.normalize_projection_ty(c_data) {
Ok(result) => {
// We don't expect ambiguity.
if result.is_ambiguous() {

View file

@ -97,7 +97,7 @@ impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
let mut orig_values = OriginalQueryValues::default();
let key = self.canonicalize_query(&param_env.and(ty), &mut orig_values);
let result = match self.tcx.global_tcx().implied_outlives_bounds(key) {
let result = match self.tcx.implied_outlives_bounds(key) {
Ok(r) => r,
Err(NoSolution) => {
self.tcx.sess.delay_span_bug(

View file

@ -1,4 +1,4 @@
use crate::infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse};
use crate::infer::canonical::{Canonicalized, CanonicalizedQueryResponse};
use crate::traits::query::Fallible;
use crate::hir::def_id::DefId;
use crate::ty::{ParamEnvAnd, Ty, TyCtxt};
@ -37,12 +37,6 @@ impl<'tcx> super::QueryTypeOp<'tcx> for AscribeUserType<'tcx> {
) -> Fallible<CanonicalizedQueryResponse<'tcx, ()>> {
tcx.type_op_ascribe_user_type(canonicalized)
}
fn shrink_to_tcx_lifetime(
v: &'a CanonicalizedQueryResponse<'tcx, ()>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, ()>> {
v
}
}
BraceStructTypeFoldableImpl! {

View file

@ -1,4 +1,4 @@
use crate::infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse};
use crate::infer::canonical::{Canonicalized, CanonicalizedQueryResponse};
use crate::traits::query::Fallible;
use crate::ty::{ParamEnvAnd, Ty, TyCtxt};
@ -34,12 +34,6 @@ impl<'tcx> super::QueryTypeOp<'tcx> for Eq<'tcx> {
) -> Fallible<CanonicalizedQueryResponse<'tcx, ()>> {
tcx.type_op_eq(canonicalized)
}
fn shrink_to_tcx_lifetime(
v: &'a CanonicalizedQueryResponse<'tcx, ()>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, ()>> {
v
}
}
BraceStructTypeFoldableImpl! {

View file

@ -1,4 +1,4 @@
use crate::infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse};
use crate::infer::canonical::{Canonicalized, CanonicalizedQueryResponse};
use crate::traits::query::outlives_bounds::OutlivesBound;
use crate::traits::query::Fallible;
use crate::ty::{ParamEnvAnd, Ty, TyCtxt};
@ -38,12 +38,6 @@ impl<'tcx> super::QueryTypeOp<'tcx> for ImpliedOutlivesBounds<'tcx> {
tcx.implied_outlives_bounds(canonicalized)
}
fn shrink_to_tcx_lifetime(
v: &'a CanonicalizedQueryResponse<'tcx, Self::QueryResponse>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self::QueryResponse>> {
v
}
}
BraceStructTypeFoldableImpl! {

View file

@ -1,6 +1,6 @@
use crate::infer::canonical::{
Canonical, Canonicalized, CanonicalizedQueryResponse, OriginalQueryValues,
QueryRegionConstraints, QueryResponse,
Canonicalized, CanonicalizedQueryResponse, OriginalQueryValues,
QueryRegionConstraints,
};
use crate::infer::{InferCtxt, InferOk};
use std::fmt;
@ -66,22 +66,6 @@ pub trait QueryTypeOp<'tcx>: fmt::Debug + Sized + TypeFoldable<'tcx> + 'tcx {
canonicalized: Canonicalized<'tcx, ParamEnvAnd<'tcx, Self>>,
) -> Fallible<CanonicalizedQueryResponse<'tcx, Self::QueryResponse>>;
/// Casts a lifted query result (which is in the gcx lifetime)
/// into the tcx lifetime. This is always just an identity cast,
/// but the generic code doesn't realize it -- put another way, in
/// the generic code, we have a `Lifted<'tcx, Self::QueryResponse>`
/// and we want to convert that to a `Self::QueryResponse`. This is
/// not a priori valid, so we can't do it -- but in practice, it
/// is always a no-op (e.g., the lifted form of a type,
/// `Ty<'tcx>`, is a subtype of `Ty<'tcx>`). So we have to push
/// the operation into the impls that know more specifically what
/// `QueryResponse` is. This operation would (maybe) be nicer with
/// something like HKTs or GATs, since then we could make
/// `QueryResponse` parametric and `'tcx` and `'tcx` etc.
fn shrink_to_tcx_lifetime(
lifted_query_result: &'a CanonicalizedQueryResponse<'tcx, Self::QueryResponse>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self::QueryResponse>>;
fn fully_perform_into(
query_key: ParamEnvAnd<'tcx, Self>,
infcx: &InferCtxt<'_, 'tcx>,
@ -99,7 +83,6 @@ pub trait QueryTypeOp<'tcx>: fmt::Debug + Sized + TypeFoldable<'tcx> + 'tcx {
let canonical_self =
infcx.canonicalize_hr_query_hack(&query_key, &mut canonical_var_values);
let canonical_result = Self::perform_query(infcx.tcx, canonical_self)?;
let canonical_result = Self::shrink_to_tcx_lifetime(&canonical_result);
let param_env = query_key.param_env;

View file

@ -1,4 +1,4 @@
use crate::infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse};
use crate::infer::canonical::{Canonicalized, CanonicalizedQueryResponse};
use std::fmt;
use crate::traits::query::Fallible;
use crate::ty::fold::TypeFoldable;
@ -38,12 +38,6 @@ where
) -> Fallible<CanonicalizedQueryResponse<'tcx, Self::QueryResponse>> {
T::type_op_method(tcx, canonicalized)
}
fn shrink_to_tcx_lifetime(
v: &'a CanonicalizedQueryResponse<'tcx, T>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, T>> {
T::shrink_to_tcx_lifetime(v)
}
}
pub trait Normalizable<'tcx>: fmt::Debug + TypeFoldable<'tcx> + Lift<'tcx> + Copy {
@ -51,12 +45,6 @@ pub trait Normalizable<'tcx>: fmt::Debug + TypeFoldable<'tcx> + Lift<'tcx> + Cop
tcx: TyCtxt<'tcx>,
canonicalized: Canonicalized<'tcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
) -> Fallible<CanonicalizedQueryResponse<'tcx, Self>>;
/// Converts from the `'tcx` (lifted) form of `Self` into the `tcx`
/// form of `Self`.
fn shrink_to_tcx_lifetime(
v: &'a CanonicalizedQueryResponse<'tcx, Self>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self>>;
}
impl Normalizable<'tcx> for Ty<'tcx> {
@ -66,12 +54,6 @@ impl Normalizable<'tcx> for Ty<'tcx> {
) -> Fallible<CanonicalizedQueryResponse<'tcx, Self>> {
tcx.type_op_normalize_ty(canonicalized)
}
fn shrink_to_tcx_lifetime(
v: &'a CanonicalizedQueryResponse<'tcx, Self>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self>> {
v
}
}
impl Normalizable<'tcx> for ty::Predicate<'tcx> {
@ -81,12 +63,6 @@ impl Normalizable<'tcx> for ty::Predicate<'tcx> {
) -> Fallible<CanonicalizedQueryResponse<'tcx, Self>> {
tcx.type_op_normalize_predicate(canonicalized)
}
fn shrink_to_tcx_lifetime(
v: &'a CanonicalizedQueryResponse<'tcx, Self>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self>> {
v
}
}
impl Normalizable<'tcx> for ty::PolyFnSig<'tcx> {
@ -96,12 +72,6 @@ impl Normalizable<'tcx> for ty::PolyFnSig<'tcx> {
) -> Fallible<CanonicalizedQueryResponse<'tcx, Self>> {
tcx.type_op_normalize_poly_fn_sig(canonicalized)
}
fn shrink_to_tcx_lifetime(
v: &'a CanonicalizedQueryResponse<'tcx, Self>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self>> {
v
}
}
impl Normalizable<'tcx> for ty::FnSig<'tcx> {
@ -111,12 +81,6 @@ impl Normalizable<'tcx> for ty::FnSig<'tcx> {
) -> Fallible<CanonicalizedQueryResponse<'tcx, Self>> {
tcx.type_op_normalize_fn_sig(canonicalized)
}
fn shrink_to_tcx_lifetime(
v: &'a CanonicalizedQueryResponse<'tcx, Self>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self>> {
v
}
}
BraceStructTypeFoldableImpl! {

View file

@ -1,4 +1,4 @@
use crate::infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse};
use crate::infer::canonical::{Canonicalized, CanonicalizedQueryResponse};
use crate::traits::query::dropck_outlives::trivial_dropck_outlives;
use crate::traits::query::dropck_outlives::DropckOutlivesResult;
use crate::traits::query::Fallible;
@ -53,12 +53,6 @@ impl super::QueryTypeOp<'tcx> for DropckOutlives<'tcx> {
tcx.dropck_outlives(canonicalized)
}
fn shrink_to_tcx_lifetime(
lifted_query_result: &'a CanonicalizedQueryResponse<'tcx, Self::QueryResponse>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self::QueryResponse>> {
lifted_query_result
}
}
BraceStructTypeFoldableImpl! {

View file

@ -1,4 +1,4 @@
use crate::infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse};
use crate::infer::canonical::{Canonicalized, CanonicalizedQueryResponse};
use crate::traits::query::Fallible;
use crate::ty::{ParamEnvAnd, Predicate, TyCtxt};
@ -43,12 +43,6 @@ impl<'tcx> super::QueryTypeOp<'tcx> for ProvePredicate<'tcx> {
) -> Fallible<CanonicalizedQueryResponse<'tcx, ()>> {
tcx.type_op_prove_predicate(canonicalized)
}
fn shrink_to_tcx_lifetime(
v: &'a CanonicalizedQueryResponse<'tcx, ()>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, ()>> {
v
}
}
BraceStructTypeFoldableImpl! {

View file

@ -1,4 +1,4 @@
use crate::infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse};
use crate::infer::canonical::{Canonicalized, CanonicalizedQueryResponse};
use crate::traits::query::Fallible;
use crate::ty::{ParamEnvAnd, Ty, TyCtxt};
@ -34,12 +34,6 @@ impl<'tcx> super::QueryTypeOp<'tcx> for Subtype<'tcx> {
) -> Fallible<CanonicalizedQueryResponse<'tcx, ()>> {
tcx.type_op_subtype(canonicalized)
}
fn shrink_to_tcx_lifetime(
v: &'a CanonicalizedQueryResponse<'tcx, ()>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, ()>> {
v
}
}
BraceStructTypeFoldableImpl! {

View file

@ -2491,7 +2491,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
if other.evaluation.must_apply_modulo_regions() {
match victim.candidate {
ImplCandidate(victim_def) => {
let tcx = self.tcx().global_tcx();
let tcx = self.tcx();
return tcx.specializes((other_def, victim_def))
|| tcx.impls_are_allowed_to_overlap(
other_def, victim_def).is_some();

View file

@ -162,7 +162,6 @@ impl<'tcx> Children {
}
};
let tcx = tcx.global_tcx();
let (le, ge) = traits::overlapping_impls(
tcx,
possible_sibling,

View file

@ -661,8 +661,7 @@ impl<'tcx> TyCtxt<'tcx> {
}
}
None => {
self.global_tcx()
.impl_defaultness(node_item_def_id)
self.impl_defaultness(node_item_def_id)
.is_default()
}
}

View file

@ -1067,14 +1067,6 @@ pub struct GlobalCtxt<'tcx> {
}
impl<'tcx> TyCtxt<'tcx> {
/// Gets the global `TyCtxt`.
#[inline]
pub fn global_tcx(self) -> TyCtxt<'tcx> {
TyCtxt {
gcx: self.gcx,
}
}
#[inline(always)]
pub fn hir(self) -> &'tcx hir_map::Map<'tcx> {
&self.hir_map
@ -1156,11 +1148,6 @@ impl<'tcx> TyCtxt<'tcx> {
value.lift_to_tcx(self)
}
/// Like lift, but only tries in the global tcx.
pub fn lift_to_global<T: ?Sized + Lift<'tcx>>(self, value: &T) -> Option<T::Lifted> {
value.lift_to_tcx(self.global_tcx())
}
/// Creates a type context and call the closure with a `TyCtxt` reference
/// to the context. The closure enforces that the type context and any interned
/// value (types, substs, etc.) can only be used while `ty::tls` has a valid
@ -1432,7 +1419,7 @@ impl<'tcx> TyCtxt<'tcx> {
-> Result<(), E::Error>
where E: ty::codec::TyEncoder
{
self.queries.on_disk_cache.serialize(self.global_tcx(), encoder)
self.queries.on_disk_cache.serialize(self, encoder)
}
/// If `true`, we should use the MIR-based borrowck, but also
@ -1600,7 +1587,7 @@ impl<'tcx> GlobalCtxt<'tcx> {
let tcx = TyCtxt {
gcx: self,
};
ty::tls::with_related_context(tcx.global_tcx(), |icx| {
ty::tls::with_related_context(tcx, |icx| {
let new_icx = ty::tls::ImplicitCtxt {
tcx,
query: icx.query.clone(),
@ -2425,7 +2412,7 @@ impl<'tcx> TyCtxt<'tcx> {
#[inline]
pub fn mk_array(self, ty: Ty<'tcx>, n: u64) -> Ty<'tcx> {
self.mk_ty(Array(ty, ty::Const::from_usize(self.global_tcx(), n)))
self.mk_ty(Array(ty, ty::Const::from_usize(self, n)))
}
#[inline]
@ -2640,7 +2627,7 @@ impl<'tcx> TyCtxt<'tcx> {
if ts.len() == 0 {
List::empty()
} else {
self.global_tcx()._intern_canonical_var_infos(ts)
self._intern_canonical_var_infos(ts)
}
}

View file

@ -193,7 +193,7 @@ impl<'tcx> ty::TyS<'tcx> {
ty::Adt(def, _) => format!("{} `{}`", def.descr(), tcx.def_path_str(def.did)).into(),
ty::Foreign(def_id) => format!("extern type `{}`", tcx.def_path_str(def_id)).into(),
ty::Array(_, n) => {
let n = tcx.lift_to_global(&n).unwrap();
let n = tcx.lift(&n).unwrap();
match n.try_eval_usize(tcx, ty::ParamEnv::empty()) {
Some(n) => {
format!("array of {} element{}", n, pluralise!(n)).into()

View file

@ -210,7 +210,7 @@ impl<'tcx> Instance<'tcx> {
}
pub fn mono(tcx: TyCtxt<'tcx>, def_id: DefId) -> Instance<'tcx> {
Instance::new(def_id, tcx.global_tcx().empty_substs_for_def_id(def_id))
Instance::new(def_id, tcx.empty_substs_for_def_id(def_id))
}
#[inline]

View file

@ -1883,7 +1883,7 @@ impl<'tcx> HasDataLayout for TyCtxt<'tcx> {
impl<'tcx> HasTyCtxt<'tcx> for TyCtxt<'tcx> {
fn tcx(&self) -> TyCtxt<'tcx> {
self.global_tcx()
*self
}
}
@ -2003,7 +2003,7 @@ impl TyCtxt<'tcx> {
pub fn layout_of(self, param_env_and_ty: ty::ParamEnvAnd<'tcx, Ty<'tcx>>)
-> Result<TyLayout<'tcx>, LayoutError<'tcx>> {
let cx = LayoutCx {
tcx: self.global_tcx(),
tcx: self,
param_env: param_env_and_ty.param_env
};
cx.layout_of(param_env_and_ty.value)
@ -2017,7 +2017,7 @@ impl ty::query::TyCtxtAt<'tcx> {
pub fn layout_of(self, param_env_and_ty: ty::ParamEnvAnd<'tcx, Ty<'tcx>>)
-> Result<TyLayout<'tcx>, LayoutError<'tcx>> {
let cx = LayoutCx {
tcx: self.global_tcx().at(self.span),
tcx: self.at(self.span),
param_env: param_env_and_ty.param_env
};
cx.layout_of(param_env_and_ty.value)

View file

@ -2378,7 +2378,7 @@ impl<'tcx> AdtDef {
pub fn eval_explicit_discr(&self, tcx: TyCtxt<'tcx>, expr_did: DefId) -> Option<Discr<'tcx>> {
let param_env = tcx.param_env(expr_did);
let repr_type = self.repr.discr_type();
let substs = InternalSubsts::identity_for_item(tcx.global_tcx(), expr_did);
let substs = InternalSubsts::identity_for_item(tcx, expr_did);
let instance = ty::Instance::new(expr_did, substs);
let cid = GlobalId {
instance,
@ -2387,7 +2387,7 @@ impl<'tcx> AdtDef {
match tcx.const_eval(param_env.and(cid)) {
Ok(val) => {
// FIXME: Find the right type and use it instead of `val.ty` here
if let Some(b) = val.try_eval_bits(tcx.global_tcx(), param_env, val.ty) {
if let Some(b) = val.try_eval_bits(tcx, param_env, val.ty) {
trace!("discriminants: {} ({:?})", b, repr_type);
Some(Discr {
val: b,
@ -2423,7 +2423,7 @@ impl<'tcx> AdtDef {
tcx: TyCtxt<'tcx>,
) -> impl Iterator<Item = (VariantIdx, Discr<'tcx>)> + Captures<'tcx> {
let repr_type = self.repr.discr_type();
let initial = repr_type.initial_discriminant(tcx.global_tcx());
let initial = repr_type.initial_discriminant(tcx);
let mut prev_discr = None::<Discr<'tcx>>;
self.variants.iter_enumerated().map(move |(i, v)| {
let mut discr = prev_discr.map_or(initial, |d| d.wrap_incr(tcx));
@ -2457,7 +2457,7 @@ impl<'tcx> AdtDef {
let (val, offset) = self.discriminant_def_for_variant(variant_index);
let explicit_value = val
.and_then(|expr_did| self.eval_explicit_discr(tcx, expr_did))
.unwrap_or_else(|| self.repr.discr_type().initial_discriminant(tcx.global_tcx()));
.unwrap_or_else(|| self.repr.discr_type().initial_discriminant(tcx));
explicit_value.checked_add(tcx, offset as u128).0
}

View file

@ -917,7 +917,7 @@ pub trait PrettyPrinter<'tcx>:
let min = 1u128 << (bit_size - 1);
let max = min - 1;
let ty = self.tcx().lift_to_global(&ct.ty).unwrap();
let ty = self.tcx().lift(&ct.ty).unwrap();
let size = self.tcx().layout_of(ty::ParamEnv::empty().and(ty))
.unwrap()
.size;

View file

@ -265,7 +265,7 @@ impl<'tcx> TyCtxt<'tcx> {
tls::with_related_context(self, move |current_icx| {
// Update the `ImplicitCtxt` to point to our new query job.
let new_icx = tls::ImplicitCtxt {
tcx: self.global_tcx(),
tcx: self,
query: Some(job),
diagnostics,
layout_depth: current_icx.layout_depth,
@ -274,7 +274,7 @@ impl<'tcx> TyCtxt<'tcx> {
// Use the `ImplicitCtxt` while we execute the query.
tls::enter_context(&new_icx, |_| {
compute(self.global_tcx())
compute(self)
})
})
}
@ -384,7 +384,7 @@ impl<'tcx> TyCtxt<'tcx> {
let ((result, dep_node_index), diagnostics) = with_diagnostics(|diagnostics| {
self.start_query(job.job.clone(), diagnostics, |tcx| {
tcx.dep_graph.with_anon_task(Q::dep_kind(), || {
Q::compute(tcx.global_tcx(), key)
Q::compute(tcx, key)
})
})
});
@ -445,10 +445,10 @@ impl<'tcx> TyCtxt<'tcx> {
debug_assert!(self.dep_graph.is_green(dep_node));
// First we try to load the result from the on-disk cache.
let result = if Q::cache_on_disk(self.global_tcx(), key.clone(), None) &&
let result = if Q::cache_on_disk(self, key.clone(), None) &&
self.sess.opts.debugging_opts.incremental_queries {
self.sess.profiler(|p| p.incremental_load_result_start(Q::NAME));
let result = Q::try_load_from_disk(self.global_tcx(), prev_dep_node_index);
let result = Q::try_load_from_disk(self, prev_dep_node_index);
self.sess.profiler(|p| p.incremental_load_result_end(Q::NAME));
// We always expect to find a cached result for things that
@ -643,7 +643,7 @@ impl<'tcx> TyCtxt<'tcx> {
macro_rules! handle_cycle_error {
([][$tcx: expr, $error:expr]) => {{
$tcx.report_cycle($error).emit();
Value::from_cycle_error($tcx.global_tcx())
Value::from_cycle_error($tcx)
}};
([fatal_cycle$(, $modifiers:ident)*][$tcx:expr, $error:expr]) => {{
$tcx.report_cycle($error).emit();
@ -652,7 +652,7 @@ macro_rules! handle_cycle_error {
}};
([cycle_delay_bug$(, $modifiers:ident)*][$tcx:expr, $error:expr]) => {{
$tcx.report_cycle($error).delay_as_bug();
Value::from_cycle_error($tcx.global_tcx())
Value::from_cycle_error($tcx)
}};
([$other:ident$(, $modifiers:ident)*][$($args:tt)*]) => {
handle_cycle_error!([$($modifiers),*][$($args)*])
@ -999,7 +999,7 @@ macro_rules! define_queries_inner {
// would be missing appropriate entries in `providers`.
.unwrap_or(&tcx.queries.fallback_extern_providers)
.$name;
provider(tcx.global_tcx(), key)
provider(tcx, key)
})
}

View file

@ -1,10 +1,9 @@
#![allow(non_camel_case_types)]
use rustc_data_structures::{fx::FxHashMap, sync::Lock};
use rustc_data_structures::sync::Lock;
use std::cell::{RefCell, Cell};
use std::cell::Cell;
use std::fmt::Debug;
use std::hash::Hash;
use std::time::{Duration, Instant};
use std::sync::mpsc::{Sender};
@ -279,39 +278,3 @@ pub fn indenter() -> Indenter {
debug!(">>");
Indenter { _cannot_construct_outside_of_this_module: () }
}
pub trait MemoizationMap {
type Key: Clone;
type Value: Clone;
/// If `key` is present in the map, return the value,
/// otherwise invoke `op` and store the value in the map.
///
/// N.B., if the receiver is a `DepTrackingMap`, special care is
/// needed in the `op` to ensure that the correct edges are
/// added into the dep graph. See the `DepTrackingMap` impl for
/// more details!
fn memoize<OP>(&self, key: Self::Key, op: OP) -> Self::Value
where OP: FnOnce() -> Self::Value;
}
impl<K, V> MemoizationMap for RefCell<FxHashMap<K,V>>
where K: Hash+Eq+Clone, V: Clone
{
type Key = K;
type Value = V;
fn memoize<OP>(&self, key: K, op: OP) -> V
where OP: FnOnce() -> V
{
let result = self.borrow().get(&key).cloned();
match result {
Some(result) => result,
None => {
let result = op();
self.borrow_mut().insert(key, result.clone());
result
}
}
}
}

View file

@ -13,7 +13,7 @@ crate-type = ["dylib"]
graphviz = { path = "../libgraphviz" }
lazy_static = "1.0"
log = "0.4"
env_logger = { version = "0.6", default-features = false }
env_logger = { version = "0.7", default-features = false }
rustc = { path = "../librustc" }
rustc_target = { path = "../librustc_target" }
rustc_data_structures = { path = "../librustc_data_structures" }

View file

@ -442,8 +442,8 @@ pub fn rustc_queries(input: TokenStream) -> TokenStream {
.map(|c| c.is_green())
.unwrap_or(false));
let key = RecoverKey::recover(tcx.global_tcx(), self).unwrap();
if queries::#name::cache_on_disk(tcx.global_tcx(), key, None) {
let key = RecoverKey::recover(tcx, self).unwrap();
if queries::#name::cache_on_disk(tcx, key, None) {
let _ = tcx.#name(key);
}
}

View file

@ -621,7 +621,7 @@ impl<'cx, 'tcx> DataflowResultsConsumer<'cx, 'tcx> for MirBorrowckCtxt<'cx, 'tcx
target: _,
unwind: _,
} => {
let gcx = self.infcx.tcx.global_tcx();
let tcx = self.infcx.tcx;
// Compute the type with accurate region information.
let drop_place_ty = drop_place.ty(self.body, self.infcx.tcx);
@ -629,10 +629,10 @@ impl<'cx, 'tcx> DataflowResultsConsumer<'cx, 'tcx> for MirBorrowckCtxt<'cx, 'tcx
// Erase the regions.
let drop_place_ty = self.infcx.tcx.erase_regions(&drop_place_ty).ty;
// "Lift" into the gcx -- once regions are erased, this type should be in the
// "Lift" into the tcx -- once regions are erased, this type should be in the
// global arenas; this "lift" operation basically just asserts that is true, but
// that is useful later.
gcx.lift_to_global(&drop_place_ty).unwrap();
tcx.lift(&drop_place_ty).unwrap();
debug!("visit_terminator_drop \
loc: {:?} term: {:?} drop_place: {:?} drop_place_ty: {:?} span: {:?}",

View file

@ -1894,9 +1894,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
// Erase the regions from `ty` to get a global type. The
// `Sized` bound in no way depends on precise regions, so this
// shouldn't affect `is_sized`.
let gcx = tcx.global_tcx();
let erased_ty = tcx.erase_regions(&ty);
if !erased_ty.is_sized(gcx.at(span), self.param_env) {
if !erased_ty.is_sized(tcx.at(span), self.param_env) {
// in current MIR construction, all non-control-flow rvalue
// expressions evaluate through `as_temp` or `into` a return
// slot or local, so to find all unsized rvalues it is enough

View file

@ -521,9 +521,8 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> {
defining_ty: DefiningTy<'tcx>,
) -> UniversalRegionIndices<'tcx> {
let tcx = self.infcx.tcx;
let gcx = tcx.global_tcx();
let closure_base_def_id = tcx.closure_base_def_id(self.mir_def_id);
let identity_substs = InternalSubsts::identity_for_item(gcx, closure_base_def_id);
let identity_substs = InternalSubsts::identity_for_item(tcx, closure_base_def_id);
let fr_substs = match defining_ty {
DefiningTy::Closure(_, ClosureSubsts { ref substs })
| DefiningTy::Generator(_, GeneratorSubsts { ref substs }, _) => {
@ -542,7 +541,7 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> {
DefiningTy::FnDef(_, substs) | DefiningTy::Const(_, substs) => substs,
};
let global_mapping = iter::once((gcx.lifetimes.re_static, fr_static));
let global_mapping = iter::once((tcx.lifetimes.re_static, fr_static));
let subst_mapping = identity_substs
.regions()
.zip(fr_substs.regions().map(|r| r.to_region_vid()));

View file

@ -148,9 +148,8 @@ pub(crate) fn on_all_drop_children_bits<'tcx, F>(
let ty = place.ty(body, tcx).ty;
debug!("on_all_drop_children_bits({:?}, {:?} : {:?})", path, place, ty);
let gcx = tcx.global_tcx();
let erased_ty = tcx.erase_regions(&ty);
if erased_ty.needs_drop(gcx, ctxt.param_env) {
if erased_ty.needs_drop(tcx, ctxt.param_env) {
each_child(child);
} else {
debug!("on_all_drop_children_bits - skipping")

View file

@ -543,9 +543,9 @@ fn make_mirror_unadjusted<'a, 'tcx>(
// Now comes the rote stuff:
hir::ExprKind::Repeat(ref v, ref count) => {
let def_id = cx.tcx.hir().local_def_id(count.hir_id);
let substs = InternalSubsts::identity_for_item(cx.tcx.global_tcx(), def_id);
let substs = InternalSubsts::identity_for_item(cx.tcx, def_id);
let instance = ty::Instance::resolve(
cx.tcx.global_tcx(),
cx.tcx,
cx.param_env,
def_id,
substs,

View file

@ -83,7 +83,7 @@ impl<'a, 'tcx> Cx<'a, 'tcx> {
infcx,
root_lint_level: src_id,
param_env: tcx.param_env(src_def_id),
identity_substs: InternalSubsts::identity_for_item(tcx.global_tcx(), src_def_id),
identity_substs: InternalSubsts::identity_for_item(tcx, src_def_id),
region_scope_tree: tcx.region_scope_tree(src_def_id),
tables,
constness,
@ -154,12 +154,11 @@ impl<'a, 'tcx> Cx<'a, 'tcx> {
}
pub fn pattern_from_hir(&mut self, p: &hir::Pat) -> Pat<'tcx> {
let tcx = self.tcx.global_tcx();
let p = match tcx.hir().get(p.hir_id) {
let p = match self.tcx.hir().get(p.hir_id) {
Node::Pat(p) | Node::Binding(p) => p,
node => bug!("pattern became {:?}", node)
};
Pat::from_hir(tcx, self.param_env.and(self.identity_substs), self.tables(), p)
Pat::from_hir(self.tcx, self.param_env.and(self.identity_substs), self.tables(), p)
}
pub fn trait_method(&mut self,
@ -187,7 +186,7 @@ impl<'a, 'tcx> Cx<'a, 'tcx> {
}
pub fn needs_drop(&mut self, ty: Ty<'tcx>) -> bool {
ty.needs_drop(self.tcx.global_tcx(), self.param_env)
ty.needs_drop(self.tcx, self.param_env)
}
pub fn tcx(&self) -> TyCtxt<'tcx> {

View file

@ -266,20 +266,51 @@ impl<'tcx> MatchVisitor<'_, 'tcx> {
"refutable pattern in {}: {} not covered",
origin, joined_patterns
);
err.span_label(pat.span, match &pat.kind {
match &pat.kind {
hir::PatKind::Path(hir::QPath::Resolved(None, path))
if path.segments.len() == 1 && path.segments[0].args.is_none() => {
format!("interpreted as {} {} pattern, not new variable",
path.res.article(), path.res.descr())
if path.segments.len() == 1 && path.segments[0].args.is_none() =>
{
const_not_var(&mut err, cx.tcx, pat, path);
}
_ => pattern_not_convered_label(&witnesses, &joined_patterns),
});
_ => {
err.span_label(
pat.span,
pattern_not_covered_label(&witnesses, &joined_patterns),
);
}
}
adt_defined_here(cx, &mut err, pattern_ty, &witnesses);
err.emit();
});
}
}
/// A path pattern was interpreted as a constant, not a new variable.
/// This caused an irrefutable match failure in e.g. `let`.
fn const_not_var(err: &mut DiagnosticBuilder<'_>, tcx: TyCtxt<'_>, pat: &Pat, path: &hir::Path) {
let descr = path.res.descr();
err.span_label(pat.span, format!(
"interpreted as {} {} pattern, not a new variable",
path.res.article(),
descr,
));
err.span_suggestion(
pat.span,
"introduce a variable instead",
format!("{}_var", path.segments[0].ident).to_lowercase(),
// Cannot use `MachineApplicable` as it's not really *always* correct
// because there may be such an identifier in scope or the user maybe
// really wanted to match against the constant. This is quite unlikely however.
Applicability::MaybeIncorrect,
);
if let Some(span) = tcx.hir().res_span(path.res) {
err.span_label(span, format!("{} defined here", descr));
}
}
fn check_for_bindings_named_same_as_variants(cx: &MatchVisitor<'_, '_>, pat: &Pat) {
pat.walk(|p| {
if let hir::PatKind::Binding(_, _, ident, None) = p.kind {
@ -445,7 +476,7 @@ fn check_exhaustive<'tcx>(
cx.tcx.sess, sp,
format!("non-exhaustive patterns: {} not covered", joined_patterns),
);
err.span_label(sp, pattern_not_convered_label(&witnesses, &joined_patterns));
err.span_label(sp, pattern_not_covered_label(&witnesses, &joined_patterns));
adt_defined_here(cx, &mut err, scrut_ty, &witnesses);
err.help(
"ensure that all possible cases are being handled, \
@ -471,7 +502,7 @@ fn joined_uncovered_patterns(witnesses: &[super::Pat<'_>]) -> String {
}
}
fn pattern_not_convered_label(witnesses: &[super::Pat<'_>], joined_patterns: &str) -> String {
fn pattern_not_covered_label(witnesses: &[super::Pat<'_>], joined_patterns: &str) -> String {
format!("pattern{} {} not covered", rustc_errors::pluralise!(witnesses.len()), joined_patterns)
}

View file

@ -79,7 +79,7 @@ fn make_shim<'tcx>(tcx: TyCtxt<'tcx>, instance: ty::InstanceDef<'tcx>) -> &'tcx
}
ty::InstanceDef::ClosureOnceShim { call_once } => {
let fn_mut = tcx.lang_items().fn_mut_trait().unwrap();
let call_mut = tcx.global_tcx()
let call_mut = tcx
.associated_items(fn_mut)
.find(|it| it.kind == ty::AssocKind::Method)
.unwrap().def_id;

View file

@ -474,7 +474,7 @@ impl context::UnificationOps<ChalkArenas<'tcx>, ChalkArenas<'tcx>>
&self,
value: DelayedLiteral<ChalkArenas<'tcx>>,
) -> DelayedLiteral<ChalkArenas<'tcx>> {
match self.infcx.tcx.lift_to_global(&value) {
match self.infcx.tcx.lift(&value) {
Some(literal) => literal,
None => bug!("cannot lift {:?}", value),
}

View file

@ -1269,7 +1269,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
// to avoid ICEs.
for item in &regular_traits {
let object_safety_violations =
tcx.global_tcx().astconv_object_safety_violations(item.trait_ref().def_id());
tcx.astconv_object_safety_violations(item.trait_ref().def_id());
if !object_safety_violations.is_empty() {
tcx.report_object_safety_error(
span,
@ -1368,11 +1368,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
span,
format!("associated type `{}` must be specified", assoc_item.ident),
);
if item_def_id.is_local() {
err.span_label(
tcx.def_span(*item_def_id),
format!("`{}` defined here", assoc_item.ident),
);
if let Some(sp) = tcx.hir().span_if_local(*item_def_id) {
err.span_label(sp, format!("`{}` defined here", assoc_item.ident));
}
if suggest {
if let Ok(snippet) = tcx.sess.source_map().span_to_snippet(

View file

@ -351,16 +351,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
err.span_label(call_expr.span, "call expression requires function");
let def_span = match def {
Res::Err => None,
Res::Local(id) => {
Some(self.tcx.hir().span(id))
},
_ => def
.opt_def_id()
.and_then(|did| self.tcx.hir().span_if_local(did)),
};
if let Some(span) = def_span {
if let Some(span) = self.tcx.hir().res_span(def) {
let label = match (unit_variant, inner_callee_path) {
(Some(path), _) => format!("`{}` defined here", path),
(_, Some(hir::QPath::Resolved(_, path))) => format!(

View file

@ -620,8 +620,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
expr: &'tcx hir::Expr
) -> Ty<'tcx> {
if self.ret_coercion.is_none() {
struct_span_err!(self.tcx.sess, expr.span, E0572,
"return statement outside of function body").emit();
struct_span_err!(
self.tcx.sess,
expr.span,
E0572,
"return statement outside of function body",
).emit();
} else if let Some(ref e) = expr_opt {
if self.ret_coercion_span.borrow().is_none() {
*self.ret_coercion_span.borrow_mut() = Some(e.span);
@ -932,9 +936,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
Ok(self.to_const(count, tcx.type_of(count_def_id)))
} else {
let param_env = ty::ParamEnv::empty();
let substs = InternalSubsts::identity_for_item(tcx.global_tcx(), count_def_id);
let substs = InternalSubsts::identity_for_item(tcx, count_def_id);
let instance = ty::Instance::resolve(
tcx.global_tcx(),
tcx,
param_env,
count_def_id,
substs,

View file

@ -48,7 +48,7 @@ impl<'tcx> CheckWfFcxBuilder<'tcx> {
// empty `param_env`.
check_false_global_bounds(&fcx, span, id);
}
let wf_tys = f(&fcx, fcx.tcx.global_tcx());
let wf_tys = f(&fcx, fcx.tcx);
fcx.select_all_obligations_or_error();
fcx.regionck_item(id, span, &wf_tys);
});
@ -366,8 +366,8 @@ fn check_item_type(
) {
debug!("check_item_type: {:?}", item_id);
for_id(tcx, item_id, ty_span).with_fcx(|fcx, gcx| {
let ty = gcx.type_of(gcx.hir().local_def_id(item_id));
for_id(tcx, item_id, ty_span).with_fcx(|fcx, tcx| {
let ty = tcx.type_of(tcx.hir().local_def_id(item_id));
let item_ty = fcx.normalize_associated_types_in(ty_span, &ty);
let mut forbid_unsized = true;

View file

@ -322,29 +322,29 @@ fn visit_implementation_of_dispatch_from_dyn(tcx: TyCtxt<'_>, impl_did: DefId) {
}
}
pub fn coerce_unsized_info<'tcx>(gcx: TyCtxt<'tcx>, impl_did: DefId) -> CoerceUnsizedInfo {
pub fn coerce_unsized_info<'tcx>(tcx: TyCtxt<'tcx>, impl_did: DefId) -> CoerceUnsizedInfo {
debug!("compute_coerce_unsized_info(impl_did={:?})", impl_did);
let coerce_unsized_trait = gcx.lang_items().coerce_unsized_trait().unwrap();
let coerce_unsized_trait = tcx.lang_items().coerce_unsized_trait().unwrap();
let unsize_trait = gcx.lang_items().require(UnsizeTraitLangItem).unwrap_or_else(|err| {
gcx.sess.fatal(&format!("`CoerceUnsized` implementation {}", err));
let unsize_trait = tcx.lang_items().require(UnsizeTraitLangItem).unwrap_or_else(|err| {
tcx.sess.fatal(&format!("`CoerceUnsized` implementation {}", err));
});
// this provider should only get invoked for local def-ids
let impl_hir_id = gcx.hir().as_local_hir_id(impl_did).unwrap_or_else(|| {
let impl_hir_id = tcx.hir().as_local_hir_id(impl_did).unwrap_or_else(|| {
bug!("coerce_unsized_info: invoked for non-local def-id {:?}", impl_did)
});
let source = gcx.type_of(impl_did);
let trait_ref = gcx.impl_trait_ref(impl_did).unwrap();
let source = tcx.type_of(impl_did);
let trait_ref = tcx.impl_trait_ref(impl_did).unwrap();
assert_eq!(trait_ref.def_id, coerce_unsized_trait);
let target = trait_ref.substs.type_at(1);
debug!("visit_implementation_of_coerce_unsized: {:?} -> {:?} (bound)",
source,
target);
let span = gcx.hir().span(impl_hir_id);
let param_env = gcx.param_env(impl_did);
let span = tcx.hir().span(impl_hir_id);
let param_env = tcx.param_env(impl_did);
assert!(!source.has_escaping_bound_vars());
let err_info = CoerceUnsizedInfo { custom_kind: None };
@ -353,7 +353,7 @@ pub fn coerce_unsized_info<'tcx>(gcx: TyCtxt<'tcx>, impl_did: DefId) -> CoerceUn
source,
target);
gcx.infer_ctxt().enter(|infcx| {
tcx.infer_ctxt().enter(|infcx| {
let cause = ObligationCause::misc(span, impl_hir_id);
let check_mutbl = |mt_a: ty::TypeAndMut<'tcx>,
mt_b: ty::TypeAndMut<'tcx>,
@ -372,24 +372,24 @@ pub fn coerce_unsized_info<'tcx>(gcx: TyCtxt<'tcx>, impl_did: DefId) -> CoerceUn
infcx.sub_regions(infer::RelateObjectBound(span), r_b, r_a);
let mt_a = ty::TypeAndMut { ty: ty_a, mutbl: mutbl_a };
let mt_b = ty::TypeAndMut { ty: ty_b, mutbl: mutbl_b };
check_mutbl(mt_a, mt_b, &|ty| gcx.mk_imm_ref(r_b, ty))
check_mutbl(mt_a, mt_b, &|ty| tcx.mk_imm_ref(r_b, ty))
}
(&ty::Ref(_, ty_a, mutbl_a), &ty::RawPtr(mt_b)) => {
let mt_a = ty::TypeAndMut { ty: ty_a, mutbl: mutbl_a };
check_mutbl(mt_a, mt_b, &|ty| gcx.mk_imm_ptr(ty))
check_mutbl(mt_a, mt_b, &|ty| tcx.mk_imm_ptr(ty))
}
(&ty::RawPtr(mt_a), &ty::RawPtr(mt_b)) => {
check_mutbl(mt_a, mt_b, &|ty| gcx.mk_imm_ptr(ty))
check_mutbl(mt_a, mt_b, &|ty| tcx.mk_imm_ptr(ty))
}
(&ty::Adt(def_a, substs_a), &ty::Adt(def_b, substs_b)) if def_a.is_struct() &&
def_b.is_struct() => {
if def_a != def_b {
let source_path = gcx.def_path_str(def_a.did);
let target_path = gcx.def_path_str(def_b.did);
span_err!(gcx.sess,
let source_path = tcx.def_path_str(def_a.did);
let target_path = tcx.def_path_str(def_b.did);
span_err!(tcx.sess,
span,
E0377,
"the trait `CoerceUnsized` may only be implemented \
@ -443,9 +443,9 @@ pub fn coerce_unsized_info<'tcx>(gcx: TyCtxt<'tcx>, impl_did: DefId) -> CoerceUn
let diff_fields = fields.iter()
.enumerate()
.filter_map(|(i, f)| {
let (a, b) = (f.ty(gcx, substs_a), f.ty(gcx, substs_b));
let (a, b) = (f.ty(tcx, substs_a), f.ty(tcx, substs_b));
if gcx.type_of(f.did).is_phantom_data() {
if tcx.type_of(f.did).is_phantom_data() {
// Ignore PhantomData fields
return None;
}
@ -472,7 +472,7 @@ pub fn coerce_unsized_info<'tcx>(gcx: TyCtxt<'tcx>, impl_did: DefId) -> CoerceUn
.collect::<Vec<_>>();
if diff_fields.is_empty() {
span_err!(gcx.sess,
span_err!(tcx.sess,
span,
E0374,
"the trait `CoerceUnsized` may only be implemented \
@ -480,14 +480,14 @@ pub fn coerce_unsized_info<'tcx>(gcx: TyCtxt<'tcx>, impl_did: DefId) -> CoerceUn
being coerced, none found");
return err_info;
} else if diff_fields.len() > 1 {
let item = gcx.hir().expect_item(impl_hir_id);
let item = tcx.hir().expect_item(impl_hir_id);
let span = if let ItemKind::Impl(.., Some(ref t), _, _) = item.kind {
t.path.span
} else {
gcx.hir().span(impl_hir_id)
tcx.hir().span(impl_hir_id)
};
let mut err = struct_span_err!(gcx.sess,
let mut err = struct_span_err!(tcx.sess,
span,
E0375,
"implementing the trait \
@ -514,7 +514,7 @@ pub fn coerce_unsized_info<'tcx>(gcx: TyCtxt<'tcx>, impl_did: DefId) -> CoerceUn
}
_ => {
span_err!(gcx.sess,
span_err!(tcx.sess,
span,
E0376,
"the trait `CoerceUnsized` may only be implemented \
@ -527,7 +527,7 @@ pub fn coerce_unsized_info<'tcx>(gcx: TyCtxt<'tcx>, impl_did: DefId) -> CoerceUn
// Register an obligation for `A: Trait<B>`.
let cause = traits::ObligationCause::misc(span, impl_hir_id);
let predicate = gcx.predicate_for_trait_def(param_env,
let predicate = tcx.predicate_for_trait_def(param_env,
cause,
trait_def_id,
0,

View file

@ -1717,9 +1717,7 @@ fn find_opaque_ty_constraints(tcx: TyCtxt<'_>, def_id: DefId) -> Ty<'_> {
}
let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
let scope = tcx.hir()
.get_defining_scope(hir_id)
.expect("could not get defining scope");
let scope = tcx.hir().get_defining_scope(hir_id);
let mut locator = ConstraintLocator {
def_id,
tcx,

View file

@ -714,7 +714,6 @@ where
/// # Examples
///
/// ```
/// #![feature(map_get_key_value)]
/// use std::collections::HashMap;
///
/// let mut map = HashMap::new();
@ -722,7 +721,7 @@ where
/// assert_eq!(map.get_key_value(&1), Some((&1, &"a")));
/// assert_eq!(map.get_key_value(&2), None);
/// ```
#[unstable(feature = "map_get_key_value", issue = "49347")]
#[stable(feature = "map_get_key_value", since = "1.40.0")]
#[inline]
pub fn get_key_value<Q: ?Sized>(&self, k: &Q) -> Option<(&K, &V)>
where

View file

@ -566,7 +566,9 @@ mod prim_array { }
#[doc(alias = "[")]
#[doc(alias = "]")]
#[doc(alias = "[]")]
/// A dynamically-sized view into a contiguous sequence, `[T]`.
/// A dynamically-sized view into a contiguous sequence, `[T]`. Contiguous here
/// means that elements are layed out so that every element is the same
/// distance from its neighbors.
///
/// *[See also the `std::slice` module](slice/index.html).*
///

View file

@ -54,8 +54,8 @@ impl Command {
let ret = libc::rtpSpawn(
self.get_argv()[0], // executing program
self.get_argv().as_ptr() as *const _, // argv
*sys::os::environ() as *const *const c_char,
self.get_argv().as_ptr() as *mut *const c_char, // argv
*sys::os::environ() as *mut *const c_char,
100 as c_int, // initial priority
thread::min_stack(), // initial stack size.
0, // options

View file

@ -1546,6 +1546,7 @@ fn calc_result(desc: &TestDesc, task_result: Result<(), Box<dyn Any + Send>>) ->
}
}
}
(&ShouldPanic::Yes, Ok(())) => TrFailedMsg("test did not panic as expected".to_string()),
_ if desc.allow_fail => TrAllowedFail,
_ => TrFailed,
}

View file

@ -2,7 +2,7 @@ use super::*;
use crate::test::{
filter_tests, parse_opts, run_test, DynTestFn, DynTestName, MetricMap, RunIgnored,
ShouldPanic, StaticTestName, TestDesc, TestDescAndFn, TestOpts, TrFailed, TrFailedMsg,
ShouldPanic, StaticTestName, TestDesc, TestDescAndFn, TestOpts, TrFailedMsg,
TrIgnored, TrOk,
};
use std::sync::mpsc::channel;
@ -167,7 +167,7 @@ fn test_should_panic_but_succeeds() {
let (tx, rx) = channel();
run_test(&TestOpts::new(), false, desc, tx, Concurrent::No);
let (_, res, _, _) = rx.recv().unwrap();
assert!(res == TrFailed);
assert!(res == TrFailedMsg("test did not panic as expected".to_string()));
}
fn report_time_test_template(report_time: bool) -> Option<TestExecTime> {

View file

@ -0,0 +1,10 @@
// This test checks that all expected errors occur when there are multiple invalid attributes
// on an item.
#[inline]
//~^ ERROR attribute should be applied to function or closure [E0518]
#[target_feature(enable = "sse2")]
//~^ ERROR attribute should be applied to a function
const FOO: u8 = 0;
fn main() { }

View file

@ -0,0 +1,21 @@
error[E0518]: attribute should be applied to function or closure
--> $DIR/multiple-invalid.rs:4:1
|
LL | #[inline]
| ^^^^^^^^^
...
LL | const FOO: u8 = 0;
| ------------------ not a function or closure
error: attribute should be applied to a function
--> $DIR/multiple-invalid.rs:6:1
|
LL | #[target_feature(enable = "sse2")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
LL |
LL | const FOO: u8 = 0;
| ------------------ not a function
error: aborting due to 2 previous errors
For more information about this error, try `rustc --explain E0518`.

View file

@ -1,20 +1,38 @@
error[E0005]: refutable pattern in local binding: `0u8..=1u8` and `3u8..=std::u8::MAX` not covered
--> $DIR/const-pattern-irrefutable.rs:12:9
|
LL | const a: u8 = 2;
| ---------------- constant defined here
...
LL | let a = 4;
| ^ interpreted as a constant pattern, not new variable
| ^
| |
| interpreted as a constant pattern, not a new variable
| help: introduce a variable instead: `a_var`
error[E0005]: refutable pattern in local binding: `0u8..=1u8` and `3u8..=std::u8::MAX` not covered
--> $DIR/const-pattern-irrefutable.rs:13:9
|
LL | pub const b: u8 = 2;
| -------------------- constant defined here
...
LL | let c = 4;
| ^ interpreted as a constant pattern, not new variable
| ^
| |
| interpreted as a constant pattern, not a new variable
| help: introduce a variable instead: `c_var`
error[E0005]: refutable pattern in local binding: `0u8..=1u8` and `3u8..=std::u8::MAX` not covered
--> $DIR/const-pattern-irrefutable.rs:14:9
|
LL | pub const d: u8 = 2;
| -------------------- constant defined here
...
LL | let d = 4;
| ^ interpreted as a constant pattern, not new variable
| ^
| |
| interpreted as a constant pattern, not a new variable
| help: introduce a variable instead: `d_var`
error: aborting due to 3 previous errors

View file

@ -2,6 +2,7 @@
fn main() {
let gen = |start| { //~ ERROR generators cannot have explicit parameters
//~^ ERROR type inside generator must be known in this context
yield;
};
}

View file

@ -4,5 +4,18 @@ error[E0628]: generators cannot have explicit parameters
LL | let gen = |start| {
| ^^^^^^^
error: aborting due to previous error
error[E0698]: type inside generator must be known in this context
--> $DIR/no-parameters-on-generators.rs:4:16
|
LL | let gen = |start| {
| ^^^^^ cannot infer type
|
note: the type is part of the generator because of this `yield`
--> $DIR/no-parameters-on-generators.rs:6:9
|
LL | yield;
| ^^^^^
error: aborting due to 2 previous errors
For more information about this error, try `rustc --explain E0698`.

View file

@ -49,9 +49,6 @@ LL | if x == E::V { field } {}
error[E0308]: mismatched types
--> $DIR/struct-literal-variant-in-if.rs:10:20
|
LL | fn test_E(x: E) {
| - help: try adding a return type: `-> bool`
LL | let field = true;
LL | if x == E::V { field } {}
| ^^^^^ expected (), found bool
|

View file

@ -0,0 +1,10 @@
fn main() {
let A = 3;
//~^ ERROR refutable pattern in local binding: `std::i32::MIN..=1i32` and
//~| interpreted as a constant pattern, not a new variable
//~| HELP introduce a variable instead
//~| SUGGESTION a_var
const A: i32 = 2;
//~^ constant defined here
}

View file

@ -0,0 +1,15 @@
error[E0005]: refutable pattern in local binding: `std::i32::MIN..=1i32` and `3i32..=std::i32::MAX` not covered
--> $DIR/const-pat-non-exaustive-let-new-var.rs:2:9
|
LL | let A = 3;
| ^
| |
| interpreted as a constant pattern, not a new variable
| help: introduce a variable instead: `a_var`
...
LL | const A: i32 = 2;
| ----------------- constant defined here
error: aborting due to previous error
For more information about this error, try `rustc --explain E0005`.

View file

@ -1,50 +0,0 @@
error: malformed `target_feature` attribute input
--> $DIR/target-feature-wrong.rs:16:1
|
LL | #[target_feature = "+sse2"]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: must be of the form: `#[target_feature(enable = "name")]`
error: the feature named `foo` is not valid for this target
--> $DIR/target-feature-wrong.rs:18:18
|
LL | #[target_feature(enable = "foo")]
| ^^^^^^^^^^^^^^ `foo` is not valid for this target
error: malformed `target_feature` attribute input
--> $DIR/target-feature-wrong.rs:21:18
|
LL | #[target_feature(bar)]
| ^^^ help: must be of the form: `enable = ".."`
error: malformed `target_feature` attribute input
--> $DIR/target-feature-wrong.rs:23:18
|
LL | #[target_feature(disable = "baz")]
| ^^^^^^^^^^^^^^^ help: must be of the form: `enable = ".."`
error: `#[target_feature(..)]` can only be applied to `unsafe` functions
--> $DIR/target-feature-wrong.rs:27:1
|
LL | #[target_feature(enable = "sse2")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ can only be applied to `unsafe` functions
...
LL | fn bar() {}
| ----------- not an `unsafe` function
error: attribute should be applied to a function
--> $DIR/target-feature-wrong.rs:33:1
|
LL | #[target_feature(enable = "sse2")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
LL |
LL | mod another {}
| -------------- not a function
error: cannot use `#[inline(always)]` with `#[target_feature]`
--> $DIR/target-feature-wrong.rs:38:1
|
LL | #[inline(always)]
| ^^^^^^^^^^^^^^^^^
error: aborting due to 7 previous errors

View file

@ -1,5 +1,5 @@
error[E0658]: the target feature `avx512bw` is currently unstable
--> $DIR/target-feature-gate.rs:30:18
--> $DIR/gate.rs:30:18
|
LL | #[target_feature(enable = "avx512bw")]
| ^^^^^^^^^^^^^^^^^^^

View file

@ -35,6 +35,31 @@ fn bar() {}
mod another {}
//~^ NOTE not a function
#[target_feature(enable = "sse2")]
//~^ ERROR attribute should be applied to a function
const FOO: usize = 7;
//~^ NOTE not a function
#[target_feature(enable = "sse2")]
//~^ ERROR attribute should be applied to a function
struct Foo;
//~^ NOTE not a function
#[target_feature(enable = "sse2")]
//~^ ERROR attribute should be applied to a function
enum Bar { }
//~^ NOTE not a function
#[target_feature(enable = "sse2")]
//~^ ERROR attribute should be applied to a function
union Qux { f1: u16, f2: u16 }
//~^ NOTE not a function
#[target_feature(enable = "sse2")]
//~^ ERROR attribute should be applied to a function
trait Baz { }
//~^ NOTE not a function
#[inline(always)]
//~^ ERROR: cannot use `#[inline(always)]`
#[target_feature(enable = "sse2")]

View file

@ -0,0 +1,95 @@
error: malformed `target_feature` attribute input
--> $DIR/invalid-attribute.rs:16:1
|
LL | #[target_feature = "+sse2"]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: must be of the form: `#[target_feature(enable = "name")]`
error: the feature named `foo` is not valid for this target
--> $DIR/invalid-attribute.rs:18:18
|
LL | #[target_feature(enable = "foo")]
| ^^^^^^^^^^^^^^ `foo` is not valid for this target
error: malformed `target_feature` attribute input
--> $DIR/invalid-attribute.rs:21:18
|
LL | #[target_feature(bar)]
| ^^^ help: must be of the form: `enable = ".."`
error: malformed `target_feature` attribute input
--> $DIR/invalid-attribute.rs:23:18
|
LL | #[target_feature(disable = "baz")]
| ^^^^^^^^^^^^^^^ help: must be of the form: `enable = ".."`
error: `#[target_feature(..)]` can only be applied to `unsafe` functions
--> $DIR/invalid-attribute.rs:27:1
|
LL | #[target_feature(enable = "sse2")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ can only be applied to `unsafe` functions
...
LL | fn bar() {}
| ----------- not an `unsafe` function
error: attribute should be applied to a function
--> $DIR/invalid-attribute.rs:33:1
|
LL | #[target_feature(enable = "sse2")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
LL |
LL | mod another {}
| -------------- not a function
error: attribute should be applied to a function
--> $DIR/invalid-attribute.rs:38:1
|
LL | #[target_feature(enable = "sse2")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
LL |
LL | const FOO: usize = 7;
| --------------------- not a function
error: attribute should be applied to a function
--> $DIR/invalid-attribute.rs:43:1
|
LL | #[target_feature(enable = "sse2")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
LL |
LL | struct Foo;
| ----------- not a function
error: attribute should be applied to a function
--> $DIR/invalid-attribute.rs:48:1
|
LL | #[target_feature(enable = "sse2")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
LL |
LL | enum Bar { }
| ------------ not a function
error: attribute should be applied to a function
--> $DIR/invalid-attribute.rs:53:1
|
LL | #[target_feature(enable = "sse2")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
LL |
LL | union Qux { f1: u16, f2: u16 }
| ------------------------------ not a function
error: attribute should be applied to a function
--> $DIR/invalid-attribute.rs:58:1
|
LL | #[target_feature(enable = "sse2")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
LL |
LL | trait Baz { }
| ------------- not a function
error: cannot use `#[inline(always)]` with `#[target_feature]`
--> $DIR/invalid-attribute.rs:63:1
|
LL | #[inline(always)]
| ^^^^^^^^^^^^^^^^^
error: aborting due to 12 previous errors

View file

@ -6,7 +6,7 @@ edition = "2018"
[dependencies]
diff = "0.1.10"
env_logger = { version = "0.6", default-features = false }
env_logger = { version = "0.7", default-features = false }
getopts = "0.2"
log = "0.4"
regex = "1.0"