Auto merge of #96931 - JohnTitor:rollup-3um8o4j, r=JohnTitor
Rollup of 7 pull requests Successful merges: - #96543 (Remove hacks in `make_token_stream`.) - #96887 (rustdoc: correct path to type alias methods) - #96896 (Add regression test for #68408) - #96900 (Fix js error) - #96903 (Use lifetimes on type-alias-impl-trait used in function signatures to infer output type lifetimes) - #96916 (simplify length count) - #96925 (Fix issue #95151) Failed merges: r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
ee6eaabdd4
15 changed files with 92 additions and 58 deletions
|
@ -1349,7 +1349,7 @@ impl<'tcx> TypeVisitor<'tcx> for LateBoundRegionsCollector {
|
|||
// ignore the inputs to a projection, as they may not appear
|
||||
// in the normalized form
|
||||
if self.just_constrained {
|
||||
if let ty::Projection(..) | ty::Opaque(..) = t.kind() {
|
||||
if let ty::Projection(..) = t.kind() {
|
||||
return ControlFlow::CONTINUE;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -485,7 +485,7 @@ impl<'a, 'tcx> CoverageSpans<'a, 'tcx> {
|
|||
}) {
|
||||
let merged_prefix_len = self.curr_original_span.lo() - self.curr().span.lo();
|
||||
let after_macro_bang =
|
||||
merged_prefix_len + BytePos(visible_macro.as_str().bytes().count() as u32 + 1);
|
||||
merged_prefix_len + BytePos(visible_macro.as_str().len() as u32 + 1);
|
||||
let mut macro_name_cov = self.curr().clone();
|
||||
self.curr_mut().span =
|
||||
self.curr().span.with_lo(self.curr().span.lo() + after_macro_bang);
|
||||
|
|
|
@ -388,12 +388,6 @@ impl<'a> Parser<'a> {
|
|||
/// Converts a flattened iterator of tokens (including open and close delimiter tokens)
|
||||
/// into a `TokenStream`, creating a `TokenTree::Delimited` for each matching pair
|
||||
/// of open and close delims.
|
||||
// FIXME(#67062): Currently, we don't parse `Invisible`-delimited groups correctly,
|
||||
// which can cause us to end up with mismatched `Invisible` delimiters in our
|
||||
// captured tokens. This function contains several hacks to work around this -
|
||||
// essentially, we throw away mismatched `Invisible` delimiters when we encounter them.
|
||||
// Once we properly parse `Invisible` delimiters, they can be captured just like any
|
||||
// other tokens, and these hacks can be removed.
|
||||
fn make_token_stream(
|
||||
mut iter: impl Iterator<Item = (FlatToken, Spacing)>,
|
||||
break_last_token: bool,
|
||||
|
@ -412,35 +406,10 @@ fn make_token_stream(
|
|||
stack.push(FrameData { open_delim_sp: Some((delim, span)), inner: vec![] });
|
||||
}
|
||||
FlatToken::Token(Token { kind: TokenKind::CloseDelim(delim), span }) => {
|
||||
// HACK: If we encounter a mismatched `Invisible` delimiter at the top
|
||||
// level, just ignore it.
|
||||
if matches!(delim, Delimiter::Invisible)
|
||||
&& (stack.len() == 1
|
||||
|| !matches!(
|
||||
stack.last_mut().unwrap().open_delim_sp.unwrap().0,
|
||||
Delimiter::Invisible
|
||||
))
|
||||
{
|
||||
token_and_spacing = iter.next();
|
||||
continue;
|
||||
}
|
||||
let frame_data = stack
|
||||
.pop()
|
||||
.unwrap_or_else(|| panic!("Token stack was empty for token: {:?}", token));
|
||||
|
||||
// HACK: If our current frame has a mismatched opening `Invisible` delimiter,
|
||||
// merge our current frame with the one above it. That is, transform
|
||||
// `[ { < first second } third ]` into `[ { first second } third ]`
|
||||
if !matches!(delim, Delimiter::Invisible)
|
||||
&& matches!(frame_data.open_delim_sp.unwrap().0, Delimiter::Invisible)
|
||||
{
|
||||
stack.last_mut().unwrap().inner.extend(frame_data.inner);
|
||||
// Process our closing delimiter again, this time at the previous
|
||||
// frame in the stack
|
||||
token_and_spacing = Some((token, spacing));
|
||||
continue;
|
||||
}
|
||||
|
||||
let (open_delim, open_sp) = frame_data.open_delim_sp.unwrap();
|
||||
assert_eq!(
|
||||
open_delim, delim,
|
||||
|
@ -472,13 +441,6 @@ fn make_token_stream(
|
|||
}
|
||||
token_and_spacing = iter.next();
|
||||
}
|
||||
// HACK: If we don't have a closing `Invisible` delimiter for our last
|
||||
// frame, merge the frame with the top-level frame. That is,
|
||||
// turn `< first second` into `first second`
|
||||
if stack.len() == 2 && stack[1].open_delim_sp.unwrap().0 == Delimiter::Invisible {
|
||||
let temp_buf = stack.pop().unwrap();
|
||||
stack.last_mut().unwrap().inner.extend(temp_buf.inner);
|
||||
}
|
||||
let mut final_buf = stack.pop().expect("Missing final buf!");
|
||||
if break_last_token {
|
||||
let (last_token, spacing) = final_buf.inner.pop().unwrap();
|
||||
|
|
|
@ -2109,8 +2109,7 @@ impl<'a> Parser<'a> {
|
|||
brace_depth -= 1;
|
||||
continue;
|
||||
}
|
||||
} else if self.token == token::Eof || self.eat(&token::CloseDelim(Delimiter::Invisible))
|
||||
{
|
||||
} else if self.token == token::Eof {
|
||||
return;
|
||||
} else {
|
||||
self.bump();
|
||||
|
|
|
@ -288,14 +288,7 @@ impl<'a, 'tcx> DocFolder for CacheBuilder<'a, 'tcx> {
|
|||
// for where the type was defined. On the other
|
||||
// hand, `paths` always has the right
|
||||
// information if present.
|
||||
Some(&(
|
||||
ref fqp,
|
||||
ItemType::Trait
|
||||
| ItemType::Struct
|
||||
| ItemType::Union
|
||||
| ItemType::Enum,
|
||||
)) => Some(&fqp[..fqp.len() - 1]),
|
||||
Some(..) => Some(&*self.cache.stack),
|
||||
Some(&(ref fqp, _)) => Some(&fqp[..fqp.len() - 1]),
|
||||
None => None,
|
||||
};
|
||||
((Some(*last), path), true)
|
||||
|
|
|
@ -331,7 +331,6 @@ li {
|
|||
nav.sub {
|
||||
position: relative;
|
||||
font-size: 1rem;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
.sub-container {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
// From rust:
|
||||
/* global search, sourcesIndex */
|
||||
/* global sourcesIndex */
|
||||
|
||||
// Local js definitions:
|
||||
/* global addClass, getCurrentValue, hasClass, onEachLazy, removeClass, browserSupportsHistoryApi */
|
||||
|
@ -69,7 +69,6 @@ function createDirEntry(elem, parent, fullPath, currentFile, hasFoundFile) {
|
|||
files.appendChild(file);
|
||||
}
|
||||
}
|
||||
search.fullPath = fullPath;
|
||||
children.appendChild(files);
|
||||
parent.appendChild(name);
|
||||
parent.appendChild(children);
|
||||
|
|
|
@ -15,6 +15,5 @@ assert-css: ("#sidebar-toggle", {"visibility": "visible", "opacity": 1})
|
|||
assert-css: (".sidebar > *:not(#sidebar-toggle)", {"visibility": "hidden", "opacity": 0})
|
||||
// Let's expand the sidebar now.
|
||||
click: "#sidebar-toggle"
|
||||
// Because of the transition CSS, better wait a second before checking.
|
||||
// Because of the transition CSS, we check by using `wait-for-css` instead of `assert-css`.
|
||||
wait-for-css: ("#sidebar-toggle", {"visibility": "visible", "opacity": 1})
|
||||
assert-css: (".sidebar > *:not(#sidebar-toggle)", {"visibility": "visible", "opacity": 1})
|
||||
|
|
14
src/test/rustdoc-js-std/asrawfd.js
Normal file
14
src/test/rustdoc-js-std/asrawfd.js
Normal file
|
@ -0,0 +1,14 @@
|
|||
// ignore-order
|
||||
|
||||
const QUERY = 'RawFd::as_raw_fd';
|
||||
|
||||
const EXPECTED = {
|
||||
'others': [
|
||||
// Reproduction test for https://github.com/rust-lang/rust/issues/78724
|
||||
// Validate that type alias methods get the correct path.
|
||||
{ 'path': 'std::os::unix::io::AsRawFd', 'name': 'as_raw_fd' },
|
||||
{ 'path': 'std::os::wasi::io::AsRawFd', 'name': 'as_raw_fd' },
|
||||
{ 'path': 'std::os::linux::process::PidFd', 'name': 'as_raw_fd' },
|
||||
{ 'path': 'std::os::unix::io::RawFd', 'name': 'as_raw_fd' },
|
||||
],
|
||||
};
|
9
src/test/rustdoc-js/foreign-type-path.js
Normal file
9
src/test/rustdoc-js/foreign-type-path.js
Normal file
|
@ -0,0 +1,9 @@
|
|||
const QUERY = 'MyForeignType::my_method';
|
||||
|
||||
const EXPECTED = {
|
||||
'others': [
|
||||
// Test case for https://github.com/rust-lang/rust/pull/96887#pullrequestreview-967154358
|
||||
// Validates that the parent path for a foreign type method is correct.
|
||||
{ 'path': 'foreign_type_path::aaaaaaa::MyForeignType', 'name': 'my_method' },
|
||||
],
|
||||
};
|
13
src/test/rustdoc-js/foreign-type-path.rs
Normal file
13
src/test/rustdoc-js/foreign-type-path.rs
Normal file
|
@ -0,0 +1,13 @@
|
|||
#![feature(extern_types)]
|
||||
|
||||
pub mod aaaaaaa {
|
||||
|
||||
extern {
|
||||
pub type MyForeignType;
|
||||
}
|
||||
|
||||
impl MyForeignType {
|
||||
pub fn my_method() {}
|
||||
}
|
||||
|
||||
}
|
22
src/test/ui/lint/dead-code/issue-68408-false-positive.rs
Normal file
22
src/test/ui/lint/dead-code/issue-68408-false-positive.rs
Normal file
|
@ -0,0 +1,22 @@
|
|||
// check-pass
|
||||
|
||||
// Make sure we don't have any false positives here.
|
||||
|
||||
#![deny(dead_code)]
|
||||
|
||||
enum X {
|
||||
A { _a: () },
|
||||
B { _b: () },
|
||||
}
|
||||
impl X {
|
||||
fn a() -> X {
|
||||
X::A { _a: () }
|
||||
}
|
||||
fn b() -> Self {
|
||||
Self::B { _b: () }
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let (_, _) = (X::a(), X::b());
|
||||
}
|
10
src/test/ui/rfc-2091-track-caller/macro-declaration.rs
Normal file
10
src/test/ui/rfc-2091-track-caller/macro-declaration.rs
Normal file
|
@ -0,0 +1,10 @@
|
|||
// check-pass
|
||||
|
||||
// See https://github.com/rust-lang/rust/issues/95151
|
||||
#[track_caller]
|
||||
macro_rules! _foo {
|
||||
() => {};
|
||||
}
|
||||
|
||||
fn main() {
|
||||
}
|
17
src/test/ui/type-alias-impl-trait/constrain_inputs.rs
Normal file
17
src/test/ui/type-alias-impl-trait/constrain_inputs.rs
Normal file
|
@ -0,0 +1,17 @@
|
|||
// check-pass
|
||||
|
||||
#![feature(type_alias_impl_trait)]
|
||||
|
||||
mod foo {
|
||||
type Ty<'a> = impl Sized;
|
||||
fn defining(s: &str) -> Ty<'_> { s }
|
||||
fn execute(ty: Ty<'_>) -> &str { todo!() }
|
||||
}
|
||||
|
||||
mod bar {
|
||||
type Ty<'a> = impl FnOnce() -> &'a str;
|
||||
fn defining(s: &str) -> Ty<'_> { move || s }
|
||||
fn execute(ty: Ty<'_>) -> &str { ty() }
|
||||
}
|
||||
|
||||
fn main() {}
|
|
@ -79,9 +79,7 @@ fn check_keyword<'a, 'b: 'a>(parser: &'a mut Parser<'b>) -> Option<MacroArg> {
|
|||
for &keyword in RUST_KW.iter() {
|
||||
if parser.token.is_keyword(keyword)
|
||||
&& parser.look_ahead(1, |t| {
|
||||
t.kind == TokenKind::Eof
|
||||
|| t.kind == TokenKind::Comma
|
||||
|| t.kind == TokenKind::CloseDelim(Delimiter::Invisible)
|
||||
t.kind == TokenKind::Eof || t.kind == TokenKind::Comma
|
||||
})
|
||||
{
|
||||
parser.bump();
|
||||
|
|
Loading…
Add table
Reference in a new issue