Rollup merge of #133537 - GuillaumeGomez:fix-clippy-lints, r=GuillaumeGomez
[rustdoc] Fix new clippy lints Lots of small things that clippy lints about. r? `@fmease`
This commit is contained in:
commit
0cad2dcba6
48 changed files with 327 additions and 342 deletions
|
@ -391,7 +391,7 @@ fn write_with_opt_paren<T: fmt::Display>(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
impl<'a> fmt::Display for Display<'a> {
|
||||
impl fmt::Display for Display<'_> {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match *self.0 {
|
||||
Cfg::Not(ref child) => match **child {
|
||||
|
|
|
@ -367,7 +367,7 @@ pub(crate) fn build_impls(
|
|||
let tcx = cx.tcx;
|
||||
|
||||
// for each implementation of an item represented by `did`, build the clean::Item for that impl
|
||||
for &did in tcx.inherent_impls(did).into_iter() {
|
||||
for &did in tcx.inherent_impls(did).iter() {
|
||||
cx.with_param_env(did, |cx| {
|
||||
build_impl(cx, did, attrs, ret);
|
||||
});
|
||||
|
@ -382,7 +382,7 @@ pub(crate) fn build_impls(
|
|||
if tcx.has_attr(did, sym::rustc_has_incoherent_inherent_impls) {
|
||||
let type_ =
|
||||
if tcx.is_trait(did) { SimplifiedType::Trait(did) } else { SimplifiedType::Adt(did) };
|
||||
for &did in tcx.incoherent_impls(type_).into_iter() {
|
||||
for &did in tcx.incoherent_impls(type_).iter() {
|
||||
cx.with_param_env(did, |cx| {
|
||||
build_impl(cx, did, attrs, ret);
|
||||
});
|
||||
|
|
|
@ -2509,16 +2509,14 @@ fn clean_generic_args<'tcx>(
|
|||
let args = generic_args
|
||||
.args
|
||||
.iter()
|
||||
.filter_map(|arg| {
|
||||
Some(match arg {
|
||||
hir::GenericArg::Lifetime(lt) if !lt.is_anonymous() => {
|
||||
GenericArg::Lifetime(clean_lifetime(lt, cx))
|
||||
}
|
||||
hir::GenericArg::Lifetime(_) => GenericArg::Lifetime(Lifetime::elided()),
|
||||
hir::GenericArg::Type(ty) => GenericArg::Type(clean_ty(ty, cx)),
|
||||
hir::GenericArg::Const(ct) => GenericArg::Const(Box::new(clean_const(ct, cx))),
|
||||
hir::GenericArg::Infer(_inf) => GenericArg::Infer,
|
||||
})
|
||||
.map(|arg| match arg {
|
||||
hir::GenericArg::Lifetime(lt) if !lt.is_anonymous() => {
|
||||
GenericArg::Lifetime(clean_lifetime(lt, cx))
|
||||
}
|
||||
hir::GenericArg::Lifetime(_) => GenericArg::Lifetime(Lifetime::elided()),
|
||||
hir::GenericArg::Type(ty) => GenericArg::Type(clean_ty(ty, cx)),
|
||||
hir::GenericArg::Const(ct) => GenericArg::Const(Box::new(clean_const(ct, cx))),
|
||||
hir::GenericArg::Infer(_inf) => GenericArg::Infer,
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.into();
|
||||
|
|
|
@ -264,17 +264,19 @@ impl ExternalCrate {
|
|||
// rendering by delegating everything to a hash map.
|
||||
let as_primitive = |res: Res<!>| {
|
||||
let Res::Def(DefKind::Mod, def_id) = res else { return None };
|
||||
tcx.get_attrs(def_id, sym::rustc_doc_primitive).find_map(|attr| {
|
||||
let attr_value = attr.value_str().expect("syntax should already be validated");
|
||||
let Some(prim) = PrimitiveType::from_symbol(attr_value) else {
|
||||
span_bug!(
|
||||
attr.span,
|
||||
"primitive `{attr_value}` is not a member of `PrimitiveType`"
|
||||
);
|
||||
};
|
||||
tcx.get_attrs(def_id, sym::rustc_doc_primitive)
|
||||
.map(|attr| {
|
||||
let attr_value = attr.value_str().expect("syntax should already be validated");
|
||||
let Some(prim) = PrimitiveType::from_symbol(attr_value) else {
|
||||
span_bug!(
|
||||
attr.span,
|
||||
"primitive `{attr_value}` is not a member of `PrimitiveType`"
|
||||
);
|
||||
};
|
||||
|
||||
Some((def_id, prim))
|
||||
})
|
||||
(def_id, prim)
|
||||
})
|
||||
.next()
|
||||
};
|
||||
|
||||
if root.is_local() {
|
||||
|
@ -339,7 +341,7 @@ pub(crate) struct ItemInner {
|
|||
impl std::ops::Deref for Item {
|
||||
type Target = ItemInner;
|
||||
fn deref(&self) -> &ItemInner {
|
||||
&*self.inner
|
||||
&self.inner
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -412,7 +414,7 @@ impl Item {
|
|||
|
||||
pub(crate) fn span(&self, tcx: TyCtxt<'_>) -> Option<Span> {
|
||||
let kind = match &self.kind {
|
||||
ItemKind::StrippedItem(k) => &*k,
|
||||
ItemKind::StrippedItem(k) => k,
|
||||
_ => &self.kind,
|
||||
};
|
||||
match kind {
|
||||
|
@ -1870,7 +1872,7 @@ impl PrimitiveType {
|
|||
.get(self)
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.flat_map(move |&simp| tcx.incoherent_impls(simp).into_iter())
|
||||
.flat_map(move |&simp| tcx.incoherent_impls(simp).iter())
|
||||
.copied()
|
||||
}
|
||||
|
||||
|
@ -1878,7 +1880,7 @@ impl PrimitiveType {
|
|||
Self::simplified_types()
|
||||
.values()
|
||||
.flatten()
|
||||
.flat_map(move |&simp| tcx.incoherent_impls(simp).into_iter())
|
||||
.flat_map(move |&simp| tcx.incoherent_impls(simp).iter())
|
||||
.copied()
|
||||
}
|
||||
|
||||
|
|
|
@ -178,7 +178,7 @@ impl fmt::Debug for Options {
|
|||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
struct FmtExterns<'a>(&'a Externs);
|
||||
|
||||
impl<'a> fmt::Debug for FmtExterns<'a> {
|
||||
impl fmt::Debug for FmtExterns<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_map().entries(self.0.iter()).finish()
|
||||
}
|
||||
|
@ -508,7 +508,7 @@ impl Options {
|
|||
};
|
||||
|
||||
let parts_out_dir =
|
||||
match matches.opt_str("parts-out-dir").map(|p| PathToParts::from_flag(p)).transpose() {
|
||||
match matches.opt_str("parts-out-dir").map(PathToParts::from_flag).transpose() {
|
||||
Ok(parts_out_dir) => parts_out_dir,
|
||||
Err(e) => dcx.fatal(e),
|
||||
};
|
||||
|
|
|
@ -700,7 +700,7 @@ impl IndividualTestOptions {
|
|||
fn new(options: &RustdocOptions, test_id: &Option<String>, test_path: PathBuf) -> Self {
|
||||
let outdir = if let Some(ref path) = options.persist_doctests {
|
||||
let mut path = path.clone();
|
||||
path.push(&test_id.as_deref().unwrap_or("<doctest>"));
|
||||
path.push(test_id.as_deref().unwrap_or("<doctest>"));
|
||||
|
||||
if let Err(err) = std::fs::create_dir_all(&path) {
|
||||
eprintln!("Couldn't create directory for doctest executables: {err}");
|
||||
|
|
|
@ -88,7 +88,7 @@ impl<'tcx> HirCollector<'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'tcx> HirCollector<'tcx> {
|
||||
impl HirCollector<'_> {
|
||||
fn visit_testable<F: FnOnce(&mut Self)>(
|
||||
&mut self,
|
||||
name: String,
|
||||
|
|
|
@ -203,7 +203,7 @@ impl Cache {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> DocFolder for CacheBuilder<'a, 'tcx> {
|
||||
impl DocFolder for CacheBuilder<'_, '_> {
|
||||
fn fold_item(&mut self, item: clean::Item) -> Option<clean::Item> {
|
||||
if item.item_id.is_local() {
|
||||
debug!(
|
||||
|
|
|
@ -11,7 +11,7 @@ use unicode_segmentation::UnicodeSegmentation;
|
|||
/// string when passed to a format string.
|
||||
pub(crate) struct Escape<'a>(pub &'a str);
|
||||
|
||||
impl<'a> fmt::Display for Escape<'a> {
|
||||
impl fmt::Display for Escape<'_> {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
// Because the internet is always right, turns out there's not that many
|
||||
// characters to escape: http://stackoverflow.com/questions/7381974
|
||||
|
@ -49,7 +49,7 @@ impl<'a> fmt::Display for Escape<'a> {
|
|||
/// difference, use [`Escape`].
|
||||
pub(crate) struct EscapeBodyText<'a>(pub &'a str);
|
||||
|
||||
impl<'a> fmt::Display for EscapeBodyText<'a> {
|
||||
impl fmt::Display for EscapeBodyText<'_> {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
// Because the internet is always right, turns out there's not that many
|
||||
// characters to escape: http://stackoverflow.com/questions/7381974
|
||||
|
@ -86,7 +86,7 @@ impl<'a> fmt::Display for EscapeBodyText<'a> {
|
|||
/// difference, use [`Escape`].
|
||||
pub(crate) struct EscapeBodyTextWithWbr<'a>(pub &'a str);
|
||||
|
||||
impl<'a> fmt::Display for EscapeBodyTextWithWbr<'a> {
|
||||
impl fmt::Display for EscapeBodyTextWithWbr<'_> {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let EscapeBodyTextWithWbr(text) = *self;
|
||||
if text.len() < 8 {
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
|
||||
use std::borrow::Cow;
|
||||
use std::cell::Cell;
|
||||
use std::cmp::Ordering;
|
||||
use std::fmt::{self, Display, Write};
|
||||
use std::iter::{self, once};
|
||||
|
||||
|
@ -785,16 +786,20 @@ pub(crate) fn href_relative_parts<'fqp>(
|
|||
);
|
||||
}
|
||||
}
|
||||
// e.g. linking to std::sync::atomic from std::sync
|
||||
if relative_to_fqp.len() < fqp.len() {
|
||||
Box::new(fqp[relative_to_fqp.len()..fqp.len()].iter().copied())
|
||||
// e.g. linking to std::sync from std::sync::atomic
|
||||
} else if fqp.len() < relative_to_fqp.len() {
|
||||
let dissimilar_part_count = relative_to_fqp.len() - fqp.len();
|
||||
Box::new(iter::repeat(sym::dotdot).take(dissimilar_part_count))
|
||||
// linking to the same module
|
||||
} else {
|
||||
Box::new(iter::empty())
|
||||
match relative_to_fqp.len().cmp(&fqp.len()) {
|
||||
Ordering::Less => {
|
||||
// e.g. linking to std::sync::atomic from std::sync
|
||||
Box::new(fqp[relative_to_fqp.len()..fqp.len()].iter().copied())
|
||||
}
|
||||
Ordering::Greater => {
|
||||
// e.g. linking to std::sync from std::sync::atomic
|
||||
let dissimilar_part_count = relative_to_fqp.len() - fqp.len();
|
||||
Box::new(iter::repeat(sym::dotdot).take(dissimilar_part_count))
|
||||
}
|
||||
Ordering::Equal => {
|
||||
// linking to the same module
|
||||
Box::new(iter::empty())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1384,7 +1389,7 @@ impl clean::Impl {
|
|||
write!(f, ">")?;
|
||||
}
|
||||
} else {
|
||||
fmt_type(&type_, f, use_absolute, cx)?;
|
||||
fmt_type(type_, f, use_absolute, cx)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1531,14 +1536,14 @@ impl clean::FnDecl {
|
|||
(None, Some(last_i)) if i != last_i => write!(f, ", ")?,
|
||||
(None, Some(_)) => (),
|
||||
(Some(n), Some(last_i)) if i != last_i => write!(f, ",\n{}", Indent(n + 4))?,
|
||||
(Some(_), Some(_)) => write!(f, ",\n")?,
|
||||
(Some(_), Some(_)) => writeln!(f, ",")?,
|
||||
}
|
||||
}
|
||||
|
||||
if self.c_variadic {
|
||||
match line_wrapping_indent {
|
||||
None => write!(f, ", ...")?,
|
||||
Some(n) => write!(f, "{}...\n", Indent(n + 4))?,
|
||||
Some(n) => writeln!(f, "{}...", Indent(n + 4))?,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -144,7 +144,7 @@ struct TokenHandler<'a, 'tcx, F: Write> {
|
|||
href_context: Option<HrefContext<'a, 'tcx>>,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx, F: Write> TokenHandler<'a, 'tcx, F> {
|
||||
impl<F: Write> TokenHandler<'_, '_, F> {
|
||||
fn handle_exit_span(&mut self) {
|
||||
// We can't get the last `closing_tags` element using `pop()` because `closing_tags` is
|
||||
// being used in `write_pending_elems`.
|
||||
|
@ -207,7 +207,7 @@ impl<'a, 'tcx, F: Write> TokenHandler<'a, 'tcx, F> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx, F: Write> Drop for TokenHandler<'a, 'tcx, F> {
|
||||
impl<F: Write> Drop for TokenHandler<'_, '_, F> {
|
||||
/// When leaving, we need to flush all pending data to not have missing content.
|
||||
fn drop(&mut self) {
|
||||
if self.pending_exit_span.is_some() {
|
||||
|
@ -1017,7 +1017,7 @@ fn string_without_closing_tag<T: Display>(
|
|||
.ok()
|
||||
.map(|(url, _, _)| url),
|
||||
LinkFromSrc::Doc(def_id) => {
|
||||
format::href_with_root_path(*def_id, context, Some(&href_context.root_path))
|
||||
format::href_with_root_path(*def_id, context, Some(href_context.root_path))
|
||||
.ok()
|
||||
.map(|(doc_link, _, _)| doc_link)
|
||||
}
|
||||
|
|
|
@ -33,7 +33,7 @@ pub(crate) struct Page<'a> {
|
|||
pub(crate) rust_logo: bool,
|
||||
}
|
||||
|
||||
impl<'a> Page<'a> {
|
||||
impl Page<'_> {
|
||||
pub(crate) fn get_static_root_path(&self) -> String {
|
||||
match self.static_root_path {
|
||||
Some(s) => s.to_string(),
|
||||
|
|
|
@ -77,7 +77,7 @@ impl HtmlWithLimit {
|
|||
/// This function will panic if called with a non-alphabetic `tag_name`.
|
||||
pub(super) fn open_tag(&mut self, tag_name: &'static str) {
|
||||
assert!(
|
||||
tag_name.chars().all(|c| ('a'..='z').contains(&c)),
|
||||
tag_name.chars().all(|c: char| c.is_ascii_lowercase()),
|
||||
"tag_name contained non-alphabetic chars: {tag_name:?}",
|
||||
);
|
||||
self.queued_tags.push(tag_name);
|
||||
|
@ -85,16 +85,15 @@ impl HtmlWithLimit {
|
|||
|
||||
/// Close the most recently opened HTML tag.
|
||||
pub(super) fn close_tag(&mut self) {
|
||||
match self.unclosed_tags.pop() {
|
||||
if let Some(tag_name) = self.unclosed_tags.pop() {
|
||||
// Close the most recently opened tag.
|
||||
Some(tag_name) => write!(self.buf, "</{tag_name}>").unwrap(),
|
||||
// There are valid cases where `close_tag()` is called without
|
||||
// there being any tags to close. For example, this occurs when
|
||||
// a tag is opened after the length limit is exceeded;
|
||||
// `flush_queue()` will never be called, and thus, the tag will
|
||||
// not end up being added to `unclosed_tags`.
|
||||
None => {}
|
||||
write!(self.buf, "</{tag_name}>").unwrap()
|
||||
}
|
||||
// There are valid cases where `close_tag()` is called without
|
||||
// there being any tags to close. For example, this occurs when
|
||||
// a tag is opened after the length limit is exceeded;
|
||||
// `flush_queue()` will never be called, and thus, the tag will
|
||||
// not end up being added to `unclosed_tags`.
|
||||
}
|
||||
|
||||
/// Write all queued tags and add them to the `unclosed_tags` list.
|
||||
|
|
|
@ -499,7 +499,7 @@ struct HeadingLinks<'a, 'b, 'ids, I> {
|
|||
heading_offset: HeadingOffset,
|
||||
}
|
||||
|
||||
impl<'a, 'b, 'ids, I> HeadingLinks<'a, 'b, 'ids, I> {
|
||||
impl<'b, 'ids, I> HeadingLinks<'_, 'b, 'ids, I> {
|
||||
fn new(
|
||||
iter: I,
|
||||
toc: Option<&'b mut TocBuilder>,
|
||||
|
@ -510,9 +510,7 @@ impl<'a, 'b, 'ids, I> HeadingLinks<'a, 'b, 'ids, I> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a, 'b, 'ids, I: Iterator<Item = SpannedEvent<'a>>> Iterator
|
||||
for HeadingLinks<'a, 'b, 'ids, I>
|
||||
{
|
||||
impl<'a, I: Iterator<Item = SpannedEvent<'a>>> Iterator for HeadingLinks<'a, '_, '_, I> {
|
||||
type Item = SpannedEvent<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
@ -908,7 +906,7 @@ impl<'a, 'tcx> TagIterator<'a, 'tcx> {
|
|||
}
|
||||
|
||||
fn parse_string(&mut self, start: usize) -> Option<Indices> {
|
||||
while let Some((pos, c)) = self.inner.next() {
|
||||
for (pos, c) in self.inner.by_ref() {
|
||||
if c == '"' {
|
||||
return Some(Indices { start: start + 1, end: pos });
|
||||
}
|
||||
|
@ -1032,7 +1030,7 @@ impl<'a, 'tcx> TagIterator<'a, 'tcx> {
|
|||
|
||||
/// Returns `false` if an error was emitted.
|
||||
fn skip_paren_block(&mut self) -> bool {
|
||||
while let Some((_, c)) = self.inner.next() {
|
||||
for (_, c) in self.inner.by_ref() {
|
||||
if c == ')' {
|
||||
return true;
|
||||
}
|
||||
|
@ -1074,9 +1072,8 @@ impl<'a, 'tcx> TagIterator<'a, 'tcx> {
|
|||
return Some(LangStringToken::LangToken(&self.data[start..pos]));
|
||||
}
|
||||
return self.next();
|
||||
} else if pos == start && is_leading_char(c) {
|
||||
continue;
|
||||
} else if pos != start && is_bareword_char(c) {
|
||||
} else if (pos == start && is_leading_char(c)) || (pos != start && is_bareword_char(c))
|
||||
{
|
||||
continue;
|
||||
} else {
|
||||
self.emit_error(format!("unexpected character `{c}`"));
|
||||
|
@ -1088,7 +1085,7 @@ impl<'a, 'tcx> TagIterator<'a, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> Iterator for TagIterator<'a, 'tcx> {
|
||||
impl<'a> Iterator for TagIterator<'a, '_> {
|
||||
type Item = LangStringToken<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
@ -1324,7 +1321,7 @@ impl Markdown<'_> {
|
|||
let mut replacer = |broken_link: BrokenLink<'_>| {
|
||||
links
|
||||
.iter()
|
||||
.find(|link| &*link.original_text == &*broken_link.reference)
|
||||
.find(|link| *link.original_text == *broken_link.reference)
|
||||
.map(|link| (link.href.as_str().into(), link.tooltip.as_str().into()))
|
||||
};
|
||||
|
||||
|
@ -1358,7 +1355,7 @@ impl MarkdownWithToc<'_> {
|
|||
let mut replacer = |broken_link: BrokenLink<'_>| {
|
||||
links
|
||||
.iter()
|
||||
.find(|link| &*link.original_text == &*broken_link.reference)
|
||||
.find(|link| *link.original_text == *broken_link.reference)
|
||||
.map(|link| (link.href.as_str().into(), link.tooltip.as_str().into()))
|
||||
};
|
||||
|
||||
|
@ -1428,7 +1425,7 @@ impl MarkdownSummaryLine<'_> {
|
|||
let mut replacer = |broken_link: BrokenLink<'_>| {
|
||||
links
|
||||
.iter()
|
||||
.find(|link| &*link.original_text == &*broken_link.reference)
|
||||
.find(|link| *link.original_text == *broken_link.reference)
|
||||
.map(|link| (link.href.as_str().into(), link.tooltip.as_str().into()))
|
||||
};
|
||||
|
||||
|
@ -1475,7 +1472,7 @@ fn markdown_summary_with_limit(
|
|||
let mut replacer = |broken_link: BrokenLink<'_>| {
|
||||
link_names
|
||||
.iter()
|
||||
.find(|link| &*link.original_text == &*broken_link.reference)
|
||||
.find(|link| *link.original_text == *broken_link.reference)
|
||||
.map(|link| (link.href.as_str().into(), link.tooltip.as_str().into()))
|
||||
};
|
||||
|
||||
|
@ -1556,7 +1553,7 @@ pub(crate) fn plain_text_summary(md: &str, link_names: &[RenderedLink]) -> Strin
|
|||
let mut replacer = |broken_link: BrokenLink<'_>| {
|
||||
link_names
|
||||
.iter()
|
||||
.find(|link| &*link.original_text == &*broken_link.reference)
|
||||
.find(|link| *link.original_text == *broken_link.reference)
|
||||
.map(|link| (link.href.as_str().into(), link.tooltip.as_str().into()))
|
||||
};
|
||||
|
||||
|
@ -1751,7 +1748,7 @@ pub(crate) fn markdown_links<'md, R>(
|
|||
};
|
||||
|
||||
let mut broken_link_callback = |link: BrokenLink<'md>| Some((link.reference, "".into()));
|
||||
let mut event_iter = Parser::new_with_broken_link_callback(
|
||||
let event_iter = Parser::new_with_broken_link_callback(
|
||||
md,
|
||||
main_body_opts(),
|
||||
Some(&mut broken_link_callback),
|
||||
|
@ -1759,7 +1756,7 @@ pub(crate) fn markdown_links<'md, R>(
|
|||
.into_offset_iter();
|
||||
let mut links = Vec::new();
|
||||
|
||||
while let Some((event, span)) = event_iter.next() {
|
||||
for (event, span) in event_iter {
|
||||
match event {
|
||||
Event::Start(Tag::Link { link_type, dest_url, .. }) if may_be_doc_link(link_type) => {
|
||||
let range = match link_type {
|
||||
|
@ -1821,7 +1818,7 @@ pub(crate) fn rust_code_blocks(md: &str, extra_info: &ExtraInfo<'_>) -> Vec<Rust
|
|||
let lang_string = if syntax.is_empty() {
|
||||
Default::default()
|
||||
} else {
|
||||
LangString::parse(&*syntax, ErrorCodes::Yes, false, Some(extra_info))
|
||||
LangString::parse(syntax, ErrorCodes::Yes, false, Some(extra_info))
|
||||
};
|
||||
if !lang_string.rust {
|
||||
continue;
|
||||
|
|
|
@ -64,7 +64,7 @@ impl<'a, 'b, I: Iterator<Item = SpannedEvent<'a>>> Footnotes<'a, 'b, I> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a, 'b, I: Iterator<Item = SpannedEvent<'a>>> Iterator for Footnotes<'a, 'b, I> {
|
||||
impl<'a, I: Iterator<Item = SpannedEvent<'a>>> Iterator for Footnotes<'a, '_, I> {
|
||||
type Item = SpannedEvent<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
|
|
@ -200,7 +200,7 @@ impl<'tcx> Context<'tcx> {
|
|||
};
|
||||
title.push_str(" - Rust");
|
||||
let tyname = it.type_();
|
||||
let desc = plain_text_summary(&it.doc_value(), &it.link_names(&self.cache()));
|
||||
let desc = plain_text_summary(&it.doc_value(), &it.link_names(self.cache()));
|
||||
let desc = if !desc.is_empty() {
|
||||
desc
|
||||
} else if it.is_crate() {
|
||||
|
@ -739,7 +739,7 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
|
|||
&shared.layout,
|
||||
&page,
|
||||
"",
|
||||
scrape_examples_help(&*shared),
|
||||
scrape_examples_help(&shared),
|
||||
&shared.style_files,
|
||||
);
|
||||
shared.fs.write(scrape_examples_help_file, v)?;
|
||||
|
@ -779,7 +779,7 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
|
|||
self.render_redirect_pages = item.is_stripped();
|
||||
}
|
||||
let item_name = item.name.unwrap();
|
||||
self.dst.push(&*item_name.as_str());
|
||||
self.dst.push(item_name.as_str());
|
||||
self.current.push(item_name);
|
||||
|
||||
info!("Recursing into {}", self.dst.display());
|
||||
|
@ -812,7 +812,7 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
|
|||
unreachable!()
|
||||
};
|
||||
let items = self.build_sidebar_items(module);
|
||||
let js_dst = self.dst.join(&format!("sidebar-items{}.js", self.shared.resource_suffix));
|
||||
let js_dst = self.dst.join(format!("sidebar-items{}.js", self.shared.resource_suffix));
|
||||
let v = format!("window.SIDEBAR_ITEMS = {};", serde_json::to_string(&items).unwrap());
|
||||
self.shared.fs.write(js_dst, v)?;
|
||||
}
|
||||
|
|
|
@ -151,13 +151,13 @@ impl RenderType {
|
|||
string.push('{');
|
||||
write_optional_id(self.id, string);
|
||||
string.push('{');
|
||||
for generic in &self.generics.as_deref().unwrap_or_default()[..] {
|
||||
for generic in self.generics.as_deref().unwrap_or_default() {
|
||||
generic.write_to_string(string);
|
||||
}
|
||||
string.push('}');
|
||||
if self.bindings.is_some() {
|
||||
string.push('{');
|
||||
for binding in &self.bindings.as_deref().unwrap_or_default()[..] {
|
||||
for binding in self.bindings.as_deref().unwrap_or_default() {
|
||||
string.push('{');
|
||||
binding.0.write_to_string(string);
|
||||
string.push('{');
|
||||
|
@ -1963,7 +1963,7 @@ fn render_impl(
|
|||
w,
|
||||
"<div class=\"docblock\">{}</div>",
|
||||
Markdown {
|
||||
content: &*dox,
|
||||
content: dox,
|
||||
links: &i.impl_item.links(cx),
|
||||
ids: &mut cx.id_map,
|
||||
error_codes: cx.shared.codes,
|
||||
|
@ -2093,9 +2093,7 @@ pub(crate) fn small_url_encode(s: String) -> String {
|
|||
// and https://url.spec.whatwg.org/#urlencoded-parsing
|
||||
// and https://url.spec.whatwg.org/#url-code-points
|
||||
fn dont_escape(c: u8) -> bool {
|
||||
(b'a' <= c && c <= b'z')
|
||||
|| (b'A' <= c && c <= b'Z')
|
||||
|| (b'0' <= c && c <= b'9')
|
||||
c.is_ascii_alphanumeric()
|
||||
|| c == b'-'
|
||||
|| c == b'_'
|
||||
|| c == b'.'
|
||||
|
@ -2150,7 +2148,7 @@ pub(crate) fn small_url_encode(s: String) -> String {
|
|||
}
|
||||
}
|
||||
|
||||
fn get_id_for_impl<'tcx>(tcx: TyCtxt<'tcx>, impl_id: ItemId) -> String {
|
||||
fn get_id_for_impl(tcx: TyCtxt<'_>, impl_id: ItemId) -> String {
|
||||
use rustc_middle::ty::print::with_forced_trimmed_paths;
|
||||
let (type_, trait_) = match impl_id {
|
||||
ItemId::Auto { trait_, for_ } => {
|
||||
|
@ -2381,7 +2379,7 @@ fn collect_paths_for_type(first_ty: clean::Type, cache: &Cache) -> Vec<String> {
|
|||
let fqp = cache.exact_paths.get(&did).or_else(get_extern);
|
||||
|
||||
if let Some(path) = fqp {
|
||||
out.push(join_with_double_colon(&path));
|
||||
out.push(join_with_double_colon(path));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -2462,8 +2460,8 @@ fn render_call_locations<W: fmt::Write>(mut w: W, cx: &mut Context<'_>, item: &c
|
|||
};
|
||||
|
||||
// Generate the HTML for a single example, being the title and code block
|
||||
let write_example = |mut w: &mut W, (path, call_data): (&PathBuf, &CallData)| -> bool {
|
||||
let contents = match fs::read_to_string(&path) {
|
||||
let write_example = |w: &mut W, (path, call_data): (&PathBuf, &CallData)| -> bool {
|
||||
let contents = match fs::read_to_string(path) {
|
||||
Ok(contents) => contents,
|
||||
Err(err) => {
|
||||
let span = item.span(tcx).map_or(DUMMY_SP, |span| span.inner());
|
||||
|
@ -2532,7 +2530,7 @@ fn render_call_locations<W: fmt::Write>(mut w: W, cx: &mut Context<'_>, item: &c
|
|||
decoration_info.insert("highlight", byte_ranges);
|
||||
|
||||
sources::print_src(
|
||||
&mut w,
|
||||
w,
|
||||
contents_subset,
|
||||
file_span,
|
||||
cx,
|
||||
|
@ -2581,7 +2579,7 @@ fn render_call_locations<W: fmt::Write>(mut w: W, cx: &mut Context<'_>, item: &c
|
|||
// An example may fail to write if its source can't be read for some reason, so this method
|
||||
// continues iterating until a write succeeds
|
||||
let write_and_skip_failure = |w: &mut W, it: &mut Peekable<_>| {
|
||||
while let Some(example) = it.next() {
|
||||
for example in it.by_ref() {
|
||||
if write_example(&mut *w, example) {
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@ impl OrderedJson {
|
|||
pub(crate) fn array_sorted<T: Borrow<Self>, I: IntoIterator<Item = T>>(items: I) -> Self {
|
||||
let items = items
|
||||
.into_iter()
|
||||
.sorted_unstable_by(|a, b| a.borrow().cmp(&b.borrow()))
|
||||
.sorted_unstable_by(|a, b| a.borrow().cmp(b.borrow()))
|
||||
.format_with(",", |item, f| f(item.borrow()));
|
||||
Self(format!("[{}]", items))
|
||||
}
|
||||
|
|
|
@ -751,7 +751,7 @@ fn item_trait(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean:
|
|||
}
|
||||
|
||||
if !required_methods.is_empty() {
|
||||
write!(w, " // Required method{}\n", pluralize(required_methods.len()));
|
||||
writeln!(w, " // Required method{}", pluralize(required_methods.len()));
|
||||
}
|
||||
for (pos, m) in required_methods.iter().enumerate() {
|
||||
render_assoc_item(
|
||||
|
@ -773,7 +773,7 @@ fn item_trait(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean:
|
|||
}
|
||||
|
||||
if !provided_methods.is_empty() {
|
||||
write!(w, " // Provided method{}\n", pluralize(provided_methods.len()));
|
||||
writeln!(w, " // Provided method{}", pluralize(provided_methods.len()));
|
||||
}
|
||||
for (pos, m) in provided_methods.iter().enumerate() {
|
||||
render_assoc_item(
|
||||
|
@ -940,7 +940,7 @@ fn item_trait(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean:
|
|||
"Dyn Compatibility",
|
||||
"dyn-compatibility",
|
||||
None,
|
||||
&format!(
|
||||
format!(
|
||||
"<div class=\"dyn-compatibility-info\"><p>This trait is <b>not</b> \
|
||||
<a href=\"{base}/reference/items/traits.html#object-safety\">dyn compatible</a>.</p>\
|
||||
<p><i>In older versions of Rust, dyn compatibility was called \"object safety\", \
|
||||
|
@ -1225,14 +1225,14 @@ fn item_type_alias(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &c
|
|||
w,
|
||||
cx,
|
||||
Some(&t.generics),
|
||||
&variants,
|
||||
variants,
|
||||
variants_count,
|
||||
has_stripped_entries,
|
||||
*is_non_exhaustive,
|
||||
enum_def_id,
|
||||
)
|
||||
});
|
||||
item_variants(w, cx, it, &variants, enum_def_id);
|
||||
item_variants(w, cx, it, variants, enum_def_id);
|
||||
}
|
||||
clean::TypeAliasInnerType::Union { fields } => {
|
||||
wrap_item(w, |w| {
|
||||
|
@ -1824,7 +1824,7 @@ fn item_constant(
|
|||
name = it.name.unwrap(),
|
||||
generics = generics.print(cx),
|
||||
typ = ty.print(cx),
|
||||
where_clause = print_where_clause(&generics, cx, 0, Ending::NoNewline),
|
||||
where_clause = print_where_clause(generics, cx, 0, Ending::NoNewline),
|
||||
);
|
||||
|
||||
// FIXME: The code below now prints
|
||||
|
@ -2194,7 +2194,7 @@ fn render_union<'a, 'cx: 'a>(
|
|||
f.write_str(" ")?;
|
||||
}
|
||||
|
||||
write!(f, "{{\n")?;
|
||||
writeln!(f, "{{")?;
|
||||
let count_fields =
|
||||
fields.iter().filter(|field| matches!(field.kind, clean::StructFieldItem(..))).count();
|
||||
let toggle = should_hide_fields(count_fields);
|
||||
|
@ -2204,9 +2204,9 @@ fn render_union<'a, 'cx: 'a>(
|
|||
|
||||
for field in fields {
|
||||
if let clean::StructFieldItem(ref ty) = field.kind {
|
||||
write!(
|
||||
writeln!(
|
||||
f,
|
||||
" {}{}: {},\n",
|
||||
" {}{}: {},",
|
||||
visibility_print_with_space(field, cx),
|
||||
field.name.unwrap(),
|
||||
ty.print(cx)
|
||||
|
@ -2215,7 +2215,7 @@ fn render_union<'a, 'cx: 'a>(
|
|||
}
|
||||
|
||||
if it.has_stripped_entries().unwrap() {
|
||||
write!(f, " <span class=\"comment\">/* private fields */</span>\n")?;
|
||||
writeln!(f, " <span class=\"comment\">/* private fields */</span>")?;
|
||||
}
|
||||
if toggle {
|
||||
toggle_close(&mut f);
|
||||
|
@ -2355,7 +2355,7 @@ fn document_non_exhaustive_header(item: &clean::Item) -> &str {
|
|||
if item.is_non_exhaustive() { " (Non-exhaustive)" } else { "" }
|
||||
}
|
||||
|
||||
fn document_non_exhaustive<'a>(item: &'a clean::Item) -> impl fmt::Display + 'a {
|
||||
fn document_non_exhaustive(item: &clean::Item) -> impl fmt::Display + '_ {
|
||||
display_fn(|f| {
|
||||
if item.is_non_exhaustive() {
|
||||
write!(
|
||||
|
|
|
@ -55,10 +55,10 @@ pub(crate) struct SerializedSearchIndex {
|
|||
const DESC_INDEX_SHARD_LEN: usize = 128 * 1024;
|
||||
|
||||
/// Builds the search index from the collected metadata
|
||||
pub(crate) fn build_index<'tcx>(
|
||||
pub(crate) fn build_index(
|
||||
krate: &clean::Crate,
|
||||
cache: &mut Cache,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
tcx: TyCtxt<'_>,
|
||||
) -> SerializedSearchIndex {
|
||||
// Maps from ID to position in the `crate_paths` array.
|
||||
let mut itemid_to_pathid = FxHashMap::default();
|
||||
|
@ -126,7 +126,7 @@ pub(crate) fn build_index<'tcx>(
|
|||
let mut lastpathid = 0isize;
|
||||
|
||||
// First, on function signatures
|
||||
let mut search_index = std::mem::replace(&mut cache.search_index, Vec::new());
|
||||
let mut search_index = std::mem::take(&mut cache.search_index);
|
||||
for item in search_index.iter_mut() {
|
||||
fn insert_into_map<F: std::hash::Hash + Eq>(
|
||||
map: &mut FxHashMap<F, isize>,
|
||||
|
@ -194,7 +194,7 @@ pub(crate) fn build_index<'tcx>(
|
|||
{
|
||||
let exact_fqp = exact_paths
|
||||
.get(&defid)
|
||||
.or_else(|| external_paths.get(&defid).map(|&(ref fqp, _)| fqp))
|
||||
.or_else(|| external_paths.get(&defid).map(|(fqp, _)| fqp))
|
||||
// Re-exports only count if the name is exactly the same.
|
||||
// This is a size optimization, since it means we only need
|
||||
// to store the name once (and the path is re-used for everything
|
||||
|
@ -298,7 +298,7 @@ pub(crate) fn build_index<'tcx>(
|
|||
true
|
||||
});
|
||||
}
|
||||
let Some(id) = ty.id.clone() else {
|
||||
let Some(id) = ty.id else {
|
||||
assert!(ty.generics.is_some());
|
||||
return;
|
||||
};
|
||||
|
@ -372,7 +372,7 @@ pub(crate) fn build_index<'tcx>(
|
|||
if let Some(&(ref fqp, short)) = paths.get(&defid) {
|
||||
let exact_fqp = exact_paths
|
||||
.get(&defid)
|
||||
.or_else(|| external_paths.get(&defid).map(|&(ref fqp, _)| fqp))
|
||||
.or_else(|| external_paths.get(&defid).map(|(fqp, _)| fqp))
|
||||
.filter(|exact_fqp| {
|
||||
exact_fqp.last() == Some(&item.name) && *exact_fqp != fqp
|
||||
});
|
||||
|
@ -397,7 +397,7 @@ pub(crate) fn build_index<'tcx>(
|
|||
// Their parent carries the exact fqp instead.
|
||||
let exact_fqp = exact_paths
|
||||
.get(&defid)
|
||||
.or_else(|| external_paths.get(&defid).map(|&(ref fqp, _)| fqp));
|
||||
.or_else(|| external_paths.get(&defid).map(|(fqp, _)| fqp));
|
||||
item.exact_path = exact_fqp.and_then(|fqp| {
|
||||
// Re-exports only count if the name is exactly the same.
|
||||
// This is a size optimization, since it means we only need
|
||||
|
@ -426,7 +426,7 @@ pub(crate) fn build_index<'tcx>(
|
|||
}
|
||||
|
||||
// Omit the parent path if it is same to that of the prior item.
|
||||
if lastpath == &item.path {
|
||||
if lastpath == item.path {
|
||||
item.path.clear();
|
||||
} else {
|
||||
lastpath = &item.path;
|
||||
|
@ -512,7 +512,7 @@ pub(crate) fn build_index<'tcx>(
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Serialize for CrateData<'a> {
|
||||
impl Serialize for CrateData<'_> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
|
@ -640,7 +640,7 @@ pub(crate) fn build_index<'tcx>(
|
|||
let mut last_name = None;
|
||||
for (index, item) in self.items.iter().enumerate() {
|
||||
let n = item.ty as u8;
|
||||
let c = char::try_from(n + b'A').expect("item types must fit in ASCII");
|
||||
let c = char::from(n + b'A');
|
||||
assert!(c <= 'z', "item types must fit within ASCII printables");
|
||||
types.push(c);
|
||||
|
||||
|
@ -741,22 +741,22 @@ pub(crate) fn build_index<'tcx>(
|
|||
let mut len: usize = 0;
|
||||
let mut item_index: u32 = 0;
|
||||
for desc in std::iter::once(&crate_doc).chain(crate_items.iter().map(|item| &item.desc)) {
|
||||
if desc == "" {
|
||||
if desc.is_empty() {
|
||||
empty_desc.push(item_index);
|
||||
item_index += 1;
|
||||
continue;
|
||||
}
|
||||
if set.len() >= DESC_INDEX_SHARD_LEN {
|
||||
result.push((len, std::mem::replace(&mut set, String::new())));
|
||||
result.push((len, std::mem::take(&mut set)));
|
||||
len = 0;
|
||||
} else if len != 0 {
|
||||
set.push('\n');
|
||||
}
|
||||
set.push_str(&desc);
|
||||
set.push_str(desc);
|
||||
len += 1;
|
||||
item_index += 1;
|
||||
}
|
||||
result.push((len, std::mem::replace(&mut set, String::new())));
|
||||
result.push((len, std::mem::take(&mut set)));
|
||||
(empty_desc, result)
|
||||
};
|
||||
|
||||
|
@ -792,9 +792,9 @@ pub(crate) fn build_index<'tcx>(
|
|||
SerializedSearchIndex { index, desc }
|
||||
}
|
||||
|
||||
pub(crate) fn get_function_type_for_search<'tcx>(
|
||||
pub(crate) fn get_function_type_for_search(
|
||||
item: &clean::Item,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
tcx: TyCtxt<'_>,
|
||||
impl_generics: Option<&(clean::Type, clean::Generics)>,
|
||||
parent: Option<DefId>,
|
||||
cache: &Cache,
|
||||
|
@ -861,7 +861,7 @@ fn get_index_type_id(
|
|||
match *clean_type {
|
||||
clean::Type::Path { ref path, .. } => Some(RenderTypeId::DefId(path.def_id())),
|
||||
clean::DynTrait(ref bounds, _) => {
|
||||
bounds.get(0).map(|b| RenderTypeId::DefId(b.trait_.def_id()))
|
||||
bounds.first().map(|b| RenderTypeId::DefId(b.trait_.def_id()))
|
||||
}
|
||||
clean::Primitive(p) => Some(RenderTypeId::Primitive(p)),
|
||||
clean::BorrowedRef { .. } => Some(RenderTypeId::Primitive(clean::PrimitiveType::Reference)),
|
||||
|
@ -953,7 +953,7 @@ fn simplify_fn_type<'a, 'tcx>(
|
|||
WherePredicate::BoundPredicate { ty, .. } => *ty == *arg,
|
||||
_ => false,
|
||||
}) {
|
||||
let bounds = where_pred.get_bounds().unwrap_or_else(|| &[]);
|
||||
let bounds = where_pred.get_bounds().unwrap_or(&[]);
|
||||
for bound in bounds.iter() {
|
||||
if let Some(path) = bound.get_trait_path() {
|
||||
let ty = Type::Path { path };
|
||||
|
@ -1043,7 +1043,7 @@ fn simplify_fn_type<'a, 'tcx>(
|
|||
simplify_fn_type(
|
||||
self_,
|
||||
generics,
|
||||
&ty,
|
||||
ty,
|
||||
tcx,
|
||||
recurse + 1,
|
||||
&mut ty_generics,
|
||||
|
@ -1058,7 +1058,7 @@ fn simplify_fn_type<'a, 'tcx>(
|
|||
simplify_fn_type(
|
||||
self_,
|
||||
generics,
|
||||
&ty,
|
||||
ty,
|
||||
tcx,
|
||||
recurse + 1,
|
||||
&mut ty_generics,
|
||||
|
@ -1074,7 +1074,7 @@ fn simplify_fn_type<'a, 'tcx>(
|
|||
simplify_fn_type(
|
||||
self_,
|
||||
generics,
|
||||
&ty,
|
||||
ty,
|
||||
tcx,
|
||||
recurse + 1,
|
||||
&mut ty_generics,
|
||||
|
@ -1117,7 +1117,7 @@ fn simplify_fn_type<'a, 'tcx>(
|
|||
);
|
||||
let ty_bindings = vec![(RenderTypeId::AssociatedType(sym::Output), ty_output)];
|
||||
res.push(RenderType {
|
||||
id: get_index_type_id(&arg, rgen),
|
||||
id: get_index_type_id(arg, rgen),
|
||||
bindings: Some(ty_bindings),
|
||||
generics: Some(ty_generics),
|
||||
});
|
||||
|
@ -1134,7 +1134,7 @@ fn simplify_fn_type<'a, 'tcx>(
|
|||
simplify_fn_type(
|
||||
self_,
|
||||
generics,
|
||||
&type_,
|
||||
type_,
|
||||
tcx,
|
||||
recurse + 1,
|
||||
&mut ty_generics,
|
||||
|
@ -1249,7 +1249,7 @@ fn simplify_fn_type<'a, 'tcx>(
|
|||
}
|
||||
}
|
||||
}
|
||||
let id = get_index_type_id(&arg, rgen);
|
||||
let id = get_index_type_id(arg, rgen);
|
||||
if id.is_some() || !ty_generics.is_empty() {
|
||||
res.push(RenderType {
|
||||
id,
|
||||
|
@ -1261,11 +1261,11 @@ fn simplify_fn_type<'a, 'tcx>(
|
|||
}
|
||||
}
|
||||
|
||||
fn simplify_fn_constraint<'a, 'tcx>(
|
||||
fn simplify_fn_constraint<'a>(
|
||||
self_: Option<&'a Type>,
|
||||
generics: &Generics,
|
||||
constraint: &'a clean::AssocItemConstraint,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
tcx: TyCtxt<'_>,
|
||||
recurse: usize,
|
||||
res: &mut Vec<(RenderTypeId, Vec<RenderType>)>,
|
||||
rgen: &mut FxIndexMap<SimplifiedParam, (isize, Vec<RenderType>)>,
|
||||
|
@ -1347,9 +1347,9 @@ fn simplify_fn_constraint<'a, 'tcx>(
|
|||
///
|
||||
/// i.e. `fn foo<A: Display, B: Option<A>>(x: u32, y: B)` will return
|
||||
/// `[u32, Display, Option]`.
|
||||
fn get_fn_inputs_and_outputs<'tcx>(
|
||||
fn get_fn_inputs_and_outputs(
|
||||
func: &Function,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
tcx: TyCtxt<'_>,
|
||||
impl_or_trait_generics: Option<&(clean::Type, clean::Generics)>,
|
||||
cache: &Cache,
|
||||
) -> (Vec<RenderType>, Vec<RenderType>, Vec<Symbol>, Vec<Vec<RenderType>>) {
|
||||
|
|
|
@ -25,7 +25,7 @@ pub(crate) fn write_vlqhex_to_string(n: i32, string: &mut String) {
|
|||
break;
|
||||
}
|
||||
shift = shift.wrapping_sub(4);
|
||||
mask = mask >> 4;
|
||||
mask >>= 4;
|
||||
}
|
||||
// now write the rest
|
||||
while shift < 32 {
|
||||
|
@ -33,7 +33,7 @@ pub(crate) fn write_vlqhex_to_string(n: i32, string: &mut String) {
|
|||
let hex = char::try_from(if shift == 0 { '`' } else { '@' } as u32 + hexit).unwrap();
|
||||
string.push(hex);
|
||||
shift = shift.wrapping_sub(4);
|
||||
mask = mask >> 4;
|
||||
mask >>= 4;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -64,7 +64,7 @@ impl Container {
|
|||
Container::Array(array) => {
|
||||
array.push(value);
|
||||
if array.len() >= 4096 {
|
||||
let array = std::mem::replace(array, Vec::new());
|
||||
let array = std::mem::take(array);
|
||||
*self = Container::Bits(Box::new([0; 1024]));
|
||||
for value in array {
|
||||
self.push(value);
|
||||
|
@ -123,7 +123,7 @@ impl Container {
|
|||
if 2 + 4 * r >= 2 * array.len() + 2 {
|
||||
return false;
|
||||
}
|
||||
let array = std::mem::replace(array, Vec::new());
|
||||
let array = std::mem::take(array);
|
||||
*self = Container::Run(Vec::new());
|
||||
for value in array {
|
||||
self.push(value);
|
||||
|
@ -145,7 +145,7 @@ pub(crate) fn write_bitmap_to_bytes(
|
|||
let mut keys = Vec::<u16>::new();
|
||||
let mut containers = Vec::<Container>::new();
|
||||
let mut key: u16;
|
||||
let mut domain_iter = domain.into_iter().copied().peekable();
|
||||
let mut domain_iter = domain.iter().copied().peekable();
|
||||
let mut has_run = false;
|
||||
while let Some(entry) = domain_iter.next() {
|
||||
key = (entry >> 16).try_into().expect("shifted off the top 16 bits, so it should fit");
|
||||
|
@ -236,7 +236,7 @@ pub(crate) fn write_bitmap_to_bytes(
|
|||
pub(crate) fn bitmap_to_string(domain: &[u32]) -> String {
|
||||
let mut buf = Vec::new();
|
||||
let mut strbuf = String::new();
|
||||
write_bitmap_to_bytes(&domain, &mut buf).unwrap();
|
||||
write_bitmap_to_bytes(domain, &mut buf).unwrap();
|
||||
BASE64_STANDARD.encode_string(&buf, &mut strbuf);
|
||||
strbuf
|
||||
}
|
||||
|
|
|
@ -44,7 +44,7 @@ pub(super) struct Sidebar<'a> {
|
|||
pub(super) path: String,
|
||||
}
|
||||
|
||||
impl<'a> Sidebar<'a> {
|
||||
impl Sidebar<'_> {
|
||||
/// Only create a `<section>` if there are any blocks
|
||||
/// which should actually be rendered.
|
||||
pub fn should_render_blocks(&self) -> bool {
|
||||
|
@ -564,9 +564,9 @@ pub(crate) fn sidebar_module_like(
|
|||
.filter(|sec| item_sections_in_use.contains(sec))
|
||||
.map(|sec| Link::new(ids.derive(sec.id()), sec.name()))
|
||||
.collect();
|
||||
let header = if let Some(first_section) = item_sections.get(0) {
|
||||
let header = if let Some(first_section) = item_sections.first() {
|
||||
Link::new(
|
||||
first_section.href.to_owned(),
|
||||
first_section.href.clone(),
|
||||
if module_like.is_crate() { "Crate Items" } else { "Module Items" },
|
||||
)
|
||||
} else {
|
||||
|
|
|
@ -84,7 +84,7 @@ impl<F: FileFormat> FromStr for SortedTemplate<F> {
|
|||
let offset = offset
|
||||
.strip_suffix(F::COMMENT_END)
|
||||
.ok_or(Error("last line expected to end with a comment"))?;
|
||||
let offset: Offset = serde_json::from_str(&offset).map_err(|_| {
|
||||
let offset: Offset = serde_json::from_str(offset).map_err(|_| {
|
||||
Error("could not find insertion location descriptor object on last line")
|
||||
})?;
|
||||
let (before, mut s) =
|
||||
|
|
|
@ -63,7 +63,7 @@ struct SpanMapVisitor<'tcx> {
|
|||
pub(crate) matches: FxHashMap<Span, LinkFromSrc>,
|
||||
}
|
||||
|
||||
impl<'tcx> SpanMapVisitor<'tcx> {
|
||||
impl SpanMapVisitor<'_> {
|
||||
/// This function is where we handle `hir::Path` elements and add them into the "span map".
|
||||
fn handle_path(&mut self, path: &rustc_hir::Path<'_>) {
|
||||
match path.res {
|
||||
|
|
|
@ -68,8 +68,8 @@ pub(crate) fn write_shared(
|
|||
let _lock = try_err!(flock::Lock::new(&lock_file, true, true, true), &lock_file);
|
||||
|
||||
let SerializedSearchIndex { index, desc } =
|
||||
build_index(&krate, &mut Rc::get_mut(&mut cx.shared).unwrap().cache, tcx);
|
||||
write_search_desc(cx, &krate, &desc)?; // does not need to be merged
|
||||
build_index(krate, &mut Rc::get_mut(&mut cx.shared).unwrap().cache, tcx);
|
||||
write_search_desc(cx, krate, &desc)?; // does not need to be merged
|
||||
|
||||
let crate_name = krate.name(cx.tcx());
|
||||
let crate_name = crate_name.as_str(); // rand
|
||||
|
@ -80,7 +80,7 @@ pub(crate) fn write_shared(
|
|||
src_files_js: SourcesPart::get(cx, &crate_name_json)?,
|
||||
search_index_js: SearchIndexPart::get(index, &cx.shared.resource_suffix)?,
|
||||
all_crates: AllCratesPart::get(crate_name_json.clone(), &cx.shared.resource_suffix)?,
|
||||
crates_index: CratesIndexPart::get(&crate_name, &external_crates)?,
|
||||
crates_index: CratesIndexPart::get(crate_name, &external_crates)?,
|
||||
trait_impl: TraitAliasPart::get(cx, &crate_name_json)?,
|
||||
type_impl: TypeAliasPart::get(cx, krate, &crate_name_json)?,
|
||||
};
|
||||
|
@ -112,7 +112,7 @@ pub(crate) fn write_shared(
|
|||
md_opts.output = cx.dst.clone();
|
||||
md_opts.external_html = cx.shared.layout.external_html.clone();
|
||||
try_err!(
|
||||
crate::markdown::render_and_write(&index_page, md_opts, cx.shared.edition()),
|
||||
crate::markdown::render_and_write(index_page, md_opts, cx.shared.edition()),
|
||||
&index_page
|
||||
);
|
||||
}
|
||||
|
@ -158,13 +158,13 @@ fn write_rendered_cross_crate_info(
|
|||
let m = &opt.should_merge;
|
||||
if opt.emit.is_empty() || opt.emit.contains(&EmitType::InvocationSpecific) {
|
||||
if include_sources {
|
||||
write_rendered_cci::<SourcesPart, _>(SourcesPart::blank, dst, &crates, m)?;
|
||||
write_rendered_cci::<SourcesPart, _>(SourcesPart::blank, dst, crates, m)?;
|
||||
}
|
||||
write_rendered_cci::<SearchIndexPart, _>(SearchIndexPart::blank, dst, &crates, m)?;
|
||||
write_rendered_cci::<AllCratesPart, _>(AllCratesPart::blank, dst, &crates, m)?;
|
||||
write_rendered_cci::<SearchIndexPart, _>(SearchIndexPart::blank, dst, crates, m)?;
|
||||
write_rendered_cci::<AllCratesPart, _>(AllCratesPart::blank, dst, crates, m)?;
|
||||
}
|
||||
write_rendered_cci::<TraitAliasPart, _>(TraitAliasPart::blank, dst, &crates, m)?;
|
||||
write_rendered_cci::<TypeAliasPart, _>(TypeAliasPart::blank, dst, &crates, m)?;
|
||||
write_rendered_cci::<TraitAliasPart, _>(TraitAliasPart::blank, dst, crates, m)?;
|
||||
write_rendered_cci::<TypeAliasPart, _>(TypeAliasPart::blank, dst, crates, m)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -234,7 +234,7 @@ fn write_search_desc(
|
|||
&cx.shared.resource_suffix,
|
||||
);
|
||||
let path = path.join(filename);
|
||||
let part = OrderedJson::serialize(&part).unwrap();
|
||||
let part = OrderedJson::serialize(part).unwrap();
|
||||
let part = format!("searchState.loadedDescShard({encoded_crate_name}, {i}, {part})");
|
||||
create_parents(&path)?;
|
||||
try_err!(fs::write(&path, part), &path);
|
||||
|
@ -261,7 +261,7 @@ impl CrateInfo {
|
|||
.iter()
|
||||
.map(|parts_path| {
|
||||
let path = &parts_path.0;
|
||||
let parts = try_err!(fs::read(&path), &path);
|
||||
let parts = try_err!(fs::read(path), &path);
|
||||
let parts: CrateInfo = try_err!(serde_json::from_slice(&parts), &path);
|
||||
Ok::<_, Error>(parts)
|
||||
})
|
||||
|
@ -439,7 +439,7 @@ impl CratesIndexPart {
|
|||
const DELIMITER: &str = "\u{FFFC}"; // users are being naughty if they have this
|
||||
let content =
|
||||
format!("<h1>List of all crates</h1><ul class=\"all-items\">{DELIMITER}</ul>");
|
||||
let template = layout::render(layout, &page, "", content, &style_files);
|
||||
let template = layout::render(layout, &page, "", content, style_files);
|
||||
match SortedTemplate::from_template(&template, DELIMITER) {
|
||||
Ok(template) => template,
|
||||
Err(e) => panic!(
|
||||
|
@ -534,7 +534,7 @@ impl Hierarchy {
|
|||
}
|
||||
|
||||
fn add_path(self: &Rc<Self>, path: &Path) {
|
||||
let mut h = Rc::clone(&self);
|
||||
let mut h = Rc::clone(self);
|
||||
let mut elems = path
|
||||
.components()
|
||||
.filter_map(|s| match s {
|
||||
|
@ -606,7 +606,7 @@ impl TypeAliasPart {
|
|||
cache,
|
||||
cx,
|
||||
};
|
||||
DocVisitor::visit_crate(&mut type_impl_collector, &krate);
|
||||
DocVisitor::visit_crate(&mut type_impl_collector, krate);
|
||||
let cx = type_impl_collector.cx;
|
||||
let aliased_types = type_impl_collector.aliased_types;
|
||||
for aliased_type in aliased_types.values() {
|
||||
|
@ -623,7 +623,7 @@ impl TypeAliasPart {
|
|||
// render_impl will filter out "impossible-to-call" methods
|
||||
// to make that functionality work here, it needs to be called with
|
||||
// each type alias, and if it gives a different result, split the impl
|
||||
for &(type_alias_fqp, ref type_alias_item) in type_aliases {
|
||||
for &(type_alias_fqp, type_alias_item) in type_aliases {
|
||||
let mut buf = Buffer::html();
|
||||
cx.id_map = Default::default();
|
||||
cx.deref_id_map = Default::default();
|
||||
|
@ -643,8 +643,8 @@ impl TypeAliasPart {
|
|||
super::render_impl(
|
||||
&mut buf,
|
||||
cx,
|
||||
*impl_,
|
||||
&type_alias_item,
|
||||
impl_,
|
||||
type_alias_item,
|
||||
assoc_link,
|
||||
RenderMode::Normal,
|
||||
None,
|
||||
|
@ -680,7 +680,7 @@ impl TypeAliasPart {
|
|||
path.push(component.as_str());
|
||||
}
|
||||
let aliased_item_type = aliased_type.target_type;
|
||||
path.push(&format!(
|
||||
path.push(format!(
|
||||
"{aliased_item_type}.{}.js",
|
||||
aliased_type.target_fqp[aliased_type.target_fqp.len() - 1]
|
||||
));
|
||||
|
@ -781,7 +781,7 @@ impl TraitAliasPart {
|
|||
for component in &remote_path[..remote_path.len() - 1] {
|
||||
path.push(component.as_str());
|
||||
}
|
||||
path.push(&format!("{remote_item_type}.{}.js", remote_path[remote_path.len() - 1]));
|
||||
path.push(format!("{remote_item_type}.{}.js", remote_path[remote_path.len() - 1]));
|
||||
|
||||
let part = OrderedJson::array_sorted(
|
||||
implementors
|
||||
|
@ -865,7 +865,7 @@ struct AliasedTypeImpl<'cache, 'item> {
|
|||
type_aliases: Vec<(&'cache [Symbol], &'item Item)>,
|
||||
}
|
||||
|
||||
impl<'cx, 'cache, 'item> DocVisitor<'item> for TypeImplCollector<'cx, 'cache, 'item> {
|
||||
impl<'item> DocVisitor<'item> for TypeImplCollector<'_, '_, 'item> {
|
||||
fn visit_item(&mut self, it: &'item Item) {
|
||||
self.visit_item_recur(it);
|
||||
let cache = self.cache;
|
||||
|
@ -963,15 +963,13 @@ fn get_path_parts<T: CciPart>(
|
|||
crates_info: &[CrateInfo],
|
||||
) -> FxIndexMap<PathBuf, Vec<String>> {
|
||||
let mut templates: FxIndexMap<PathBuf, Vec<String>> = FxIndexMap::default();
|
||||
crates_info
|
||||
.iter()
|
||||
.map(|crate_info| T::from_crate_info(crate_info).parts.iter())
|
||||
.flatten()
|
||||
.for_each(|(path, part)| {
|
||||
let path = dst.join(&path);
|
||||
crates_info.iter().flat_map(|crate_info| T::from_crate_info(crate_info).parts.iter()).for_each(
|
||||
|(path, part)| {
|
||||
let path = dst.join(path);
|
||||
let part = part.to_string();
|
||||
templates.entry(path).or_default().push(part);
|
||||
});
|
||||
},
|
||||
);
|
||||
templates
|
||||
}
|
||||
|
||||
|
@ -994,10 +992,10 @@ where
|
|||
if !should_merge.read_rendered_cci {
|
||||
return Ok(make_blank());
|
||||
}
|
||||
match fs::read_to_string(&path) {
|
||||
match fs::read_to_string(path) {
|
||||
Ok(template) => Ok(try_err!(SortedTemplate::from_str(&template), &path)),
|
||||
Err(e) if e.kind() == io::ErrorKind::NotFound => Ok(make_blank()),
|
||||
Err(e) => Err(Error::new(e, &path)),
|
||||
Err(e) => Err(Error::new(e, path)),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -35,8 +35,8 @@ pub(crate) fn render(cx: &mut Context<'_>, krate: &clean::Crate) -> Result<(), E
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn collect_local_sources<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
pub(crate) fn collect_local_sources(
|
||||
tcx: TyCtxt<'_>,
|
||||
src_root: &Path,
|
||||
krate: &clean::Crate,
|
||||
) -> FxIndexMap<PathBuf, String> {
|
||||
|
@ -80,7 +80,7 @@ impl LocalSourcesCollector<'_, '_> {
|
|||
|
||||
let href = RefCell::new(PathBuf::new());
|
||||
clean_path(
|
||||
&self.src_root,
|
||||
self.src_root,
|
||||
&p,
|
||||
|component| {
|
||||
href.borrow_mut().push(component);
|
||||
|
|
|
@ -57,7 +57,7 @@ pub(crate) fn suffix_path(filename: &str, suffix: &str) -> PathBuf {
|
|||
|
||||
pub(crate) fn static_filename(filename: &str, sha256: &str) -> PathBuf {
|
||||
let filename = filename.rsplit('/').next().unwrap();
|
||||
suffix_path(filename, &sha256)
|
||||
suffix_path(filename, sha256)
|
||||
}
|
||||
|
||||
macro_rules! static_files {
|
||||
|
|
|
@ -73,7 +73,7 @@ impl<'tcx> JsonRenderer<'tcx> {
|
|||
.map(|i| {
|
||||
let item = &i.impl_item;
|
||||
self.item(item.clone()).unwrap();
|
||||
self.id_from_item(&item)
|
||||
self.id_from_item(item)
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
|
@ -104,7 +104,7 @@ impl<'tcx> JsonRenderer<'tcx> {
|
|||
|
||||
if item.item_id.is_local() || is_primitive_impl {
|
||||
self.item(item.clone()).unwrap();
|
||||
Some(self.id_from_item(&item))
|
||||
Some(self.id_from_item(item))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -223,7 +223,7 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
|
|||
| types::ItemEnum::Macro(_)
|
||||
| types::ItemEnum::ProcMacro(_) => false,
|
||||
};
|
||||
let removed = self.index.borrow_mut().insert(new_item.id.clone(), new_item.clone());
|
||||
let removed = self.index.borrow_mut().insert(new_item.id, new_item.clone());
|
||||
|
||||
// FIXME(adotinthevoid): Currently, the index is duplicated. This is a sanity check
|
||||
// to make sure the items are unique. The main place this happens is when an item, is
|
||||
|
@ -289,7 +289,7 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
|
|||
format_version: types::FORMAT_VERSION,
|
||||
};
|
||||
if let Some(ref out_dir) = self.out_dir {
|
||||
try_err!(create_dir_all(&out_dir), out_dir);
|
||||
try_err!(create_dir_all(out_dir), out_dir);
|
||||
|
||||
let mut p = out_dir.clone();
|
||||
p.push(output_crate.index.get(&output_crate.root).unwrap().name.clone().unwrap());
|
||||
|
|
|
@ -222,7 +222,7 @@ pub(crate) static RUSTDOC_LINTS: Lazy<Vec<&'static Lint>> = Lazy::new(|| {
|
|||
});
|
||||
|
||||
pub(crate) fn register_lints(_sess: &Session, lint_store: &mut LintStore) {
|
||||
lint_store.register_lints(&**RUSTDOC_LINTS);
|
||||
lint_store.register_lints(&RUSTDOC_LINTS);
|
||||
lint_store.register_group(
|
||||
true,
|
||||
"rustdoc::all",
|
||||
|
|
|
@ -118,7 +118,7 @@ fn limit_filename_len(filename: String) -> String {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a, 'b> CoverageCalculator<'a, 'b> {
|
||||
impl CoverageCalculator<'_, '_> {
|
||||
fn to_json(&self) -> String {
|
||||
serde_json::to_string(
|
||||
&self
|
||||
|
@ -188,7 +188,7 @@ impl<'a, 'b> CoverageCalculator<'a, 'b> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a, 'b> DocVisitor<'_> for CoverageCalculator<'a, 'b> {
|
||||
impl DocVisitor<'_> for CoverageCalculator<'_, '_> {
|
||||
fn visit_item(&mut self, i: &clean::Item) {
|
||||
if !i.item_id.is_local() {
|
||||
// non-local items are skipped because they can be out of the users control,
|
||||
|
|
|
@ -34,7 +34,7 @@ pub(crate) fn check_doc_test_visibility(krate: Crate, cx: &mut DocContext<'_>) -
|
|||
krate
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> DocVisitor<'_> for DocTestVisibilityLinter<'a, 'tcx> {
|
||||
impl DocVisitor<'_> for DocTestVisibilityLinter<'_, '_> {
|
||||
fn visit_item(&mut self, item: &Item) {
|
||||
look_for_tests(self.cx, &item.doc_value(), item);
|
||||
|
||||
|
@ -106,7 +106,7 @@ pub(crate) fn should_have_doc_example(cx: &DocContext<'_>, item: &clean::Item) -
|
|||
level != lint::Level::Allow || matches!(source, LintLevelSource::Default)
|
||||
}
|
||||
|
||||
pub(crate) fn look_for_tests<'tcx>(cx: &DocContext<'tcx>, dox: &str, item: &Item) {
|
||||
pub(crate) fn look_for_tests(cx: &DocContext<'_>, dox: &str, item: &Item) {
|
||||
let Some(hir_id) = DocContext::as_local_hir_id(cx.tcx, item.item_id) else {
|
||||
// If non-local, no need to check anything.
|
||||
return;
|
||||
|
|
|
@ -53,12 +53,12 @@ pub(crate) fn collect_intra_doc_links<'a, 'tcx>(
|
|||
(krate, collector)
|
||||
}
|
||||
|
||||
fn filter_assoc_items_by_name_and_namespace<'a>(
|
||||
tcx: TyCtxt<'a>,
|
||||
fn filter_assoc_items_by_name_and_namespace(
|
||||
tcx: TyCtxt<'_>,
|
||||
assoc_items_of: DefId,
|
||||
ident: Ident,
|
||||
ns: Namespace,
|
||||
) -> impl Iterator<Item = &'a ty::AssocItem> + 'a {
|
||||
) -> impl Iterator<Item = &ty::AssocItem> + '_ {
|
||||
tcx.associated_items(assoc_items_of).filter_by_name_unhygienic(ident.name).filter(move |item| {
|
||||
item.kind.namespace() == ns && tcx.hygienic_eq(ident, item.ident(tcx), assoc_items_of)
|
||||
})
|
||||
|
@ -232,7 +232,7 @@ impl UrlFragment {
|
|||
s.push_str(kind);
|
||||
s.push_str(tcx.item_name(def_id).as_str());
|
||||
}
|
||||
UrlFragment::UserWritten(raw) => s.push_str(&raw),
|
||||
UrlFragment::UserWritten(raw) => s.push_str(raw),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -307,7 +307,7 @@ pub(crate) struct AmbiguousLinks {
|
|||
resolved: Vec<(Res, Option<UrlFragment>)>,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
|
||||
impl<'tcx> LinkCollector<'_, 'tcx> {
|
||||
/// Given a full link, parse it as an [enum struct variant].
|
||||
///
|
||||
/// In particular, this will return an error whenever there aren't three
|
||||
|
@ -339,7 +339,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
|
|||
// If there's no third component, we saw `[a::b]` before and it failed to resolve.
|
||||
// So there's no partial res.
|
||||
let path = split.next().ok_or_else(no_res)?;
|
||||
let ty_res = self.resolve_path(&path, TypeNS, item_id, module_id).ok_or_else(no_res)?;
|
||||
let ty_res = self.resolve_path(path, TypeNS, item_id, module_id).ok_or_else(no_res)?;
|
||||
|
||||
match ty_res {
|
||||
Res::Def(DefKind::Enum, did) => match tcx.type_of(did).instantiate_identity().kind() {
|
||||
|
@ -628,7 +628,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
|
|||
.map(|item| (root_res, item.def_id))
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.unwrap_or(Vec::new())
|
||||
.unwrap_or_default()
|
||||
}
|
||||
}
|
||||
Res::Def(DefKind::TyAlias, did) => {
|
||||
|
@ -693,7 +693,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
|
|||
// Checks if item_name belongs to `impl SomeItem`
|
||||
let mut assoc_items: Vec<_> = tcx
|
||||
.inherent_impls(did)
|
||||
.into_iter()
|
||||
.iter()
|
||||
.flat_map(|&imp| {
|
||||
filter_assoc_items_by_name_and_namespace(
|
||||
tcx,
|
||||
|
@ -878,7 +878,7 @@ fn is_derive_trait_collision<T>(ns: &PerNS<Result<Vec<(Res, T)>, ResolutionFailu
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> DocVisitor<'_> for LinkCollector<'a, 'tcx> {
|
||||
impl DocVisitor<'_> for LinkCollector<'_, '_> {
|
||||
fn visit_item(&mut self, item: &Item) {
|
||||
self.resolve_links(item);
|
||||
self.visit_item_recur(item)
|
||||
|
@ -1152,7 +1152,7 @@ impl LinkCollector<'_, '_> {
|
|||
}
|
||||
|
||||
cache.paths.get(&did).is_some()
|
||||
|| cache.external_paths.get(&did).is_some()
|
||||
|| cache.external_paths.contains_key(&did)
|
||||
|| !did.is_local()
|
||||
}
|
||||
|
||||
|
@ -1271,7 +1271,7 @@ impl LinkCollector<'_, '_> {
|
|||
}
|
||||
|
||||
res.def_id(self.cx.tcx).map(|page_id| ItemLink {
|
||||
link: Box::<str>::from(&*diag_info.ori_link),
|
||||
link: Box::<str>::from(diag_info.ori_link),
|
||||
link_text: link_text.clone(),
|
||||
page_id,
|
||||
fragment,
|
||||
|
@ -1293,7 +1293,7 @@ impl LinkCollector<'_, '_> {
|
|||
|
||||
let page_id = clean::register_res(self.cx, rustc_hir::def::Res::Def(kind, id));
|
||||
Some(ItemLink {
|
||||
link: Box::<str>::from(&*diag_info.ori_link),
|
||||
link: Box::<str>::from(diag_info.ori_link),
|
||||
link_text: link_text.clone(),
|
||||
page_id,
|
||||
fragment,
|
||||
|
@ -1387,7 +1387,7 @@ impl LinkCollector<'_, '_> {
|
|||
)
|
||||
.unwrap_or_else(|| item.attr_span(self.cx.tcx));
|
||||
rustc_session::parse::feature_err(
|
||||
&self.cx.tcx.sess,
|
||||
self.cx.tcx.sess,
|
||||
sym::intra_doc_pointers,
|
||||
span,
|
||||
"linking to associated items of raw pointers is experimental",
|
||||
|
@ -1414,7 +1414,7 @@ impl LinkCollector<'_, '_> {
|
|||
|
||||
// FIXME: it would be nice to check that the feature gate was enabled in the original crate, not just ignore it altogether.
|
||||
// However I'm not sure how to check that across crates.
|
||||
if let Some(candidate) = candidates.get(0)
|
||||
if let Some(candidate) = candidates.first()
|
||||
&& candidate.0 == Res::Primitive(PrimitiveType::RawPointer)
|
||||
&& key.path_str.contains("::")
|
||||
// We only want to check this if this is an associated item.
|
||||
|
@ -1493,7 +1493,7 @@ impl LinkCollector<'_, '_> {
|
|||
}
|
||||
}
|
||||
resolution_failure(self, diag, path_str, disambiguator, smallvec![err]);
|
||||
return vec![];
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1509,15 +1509,12 @@ impl LinkCollector<'_, '_> {
|
|||
type_ns: candidate(TypeNS),
|
||||
value_ns: candidate(ValueNS).and_then(|v_res| {
|
||||
for (res, _) in v_res.iter() {
|
||||
match res {
|
||||
// Constructors are picked up in the type namespace.
|
||||
Res::Def(DefKind::Ctor(..), _) => {
|
||||
return Err(ResolutionFailure::WrongNamespace {
|
||||
res: *res,
|
||||
expected_ns: TypeNS,
|
||||
});
|
||||
}
|
||||
_ => {}
|
||||
// Constructors are picked up in the type namespace.
|
||||
if let Res::Def(DefKind::Ctor(..), _) = res {
|
||||
return Err(ResolutionFailure::WrongNamespace {
|
||||
res: *res,
|
||||
expected_ns: TypeNS,
|
||||
});
|
||||
}
|
||||
}
|
||||
Ok(v_res)
|
||||
|
@ -1536,7 +1533,7 @@ impl LinkCollector<'_, '_> {
|
|||
disambiguator,
|
||||
candidates.into_iter().filter_map(|res| res.err()).collect(),
|
||||
);
|
||||
return vec![];
|
||||
vec![]
|
||||
} else if len == 1 {
|
||||
candidates.into_iter().filter_map(|res| res.ok()).flatten().collect::<Vec<_>>()
|
||||
} else {
|
||||
|
@ -1850,7 +1847,7 @@ fn report_diagnostic(
|
|||
(sp, MarkdownLinkRange::Destination(md_range))
|
||||
}
|
||||
MarkdownLinkRange::WholeLink(md_range) => (
|
||||
source_span_for_markdown_range(tcx, dox, &md_range, &item.attrs.doc_strings),
|
||||
source_span_for_markdown_range(tcx, dox, md_range, &item.attrs.doc_strings),
|
||||
link_range.clone(),
|
||||
),
|
||||
};
|
||||
|
@ -1985,8 +1982,7 @@ fn resolution_failure(
|
|||
.tcx
|
||||
.resolutions(())
|
||||
.all_macro_rules
|
||||
.get(&Symbol::intern(path_str))
|
||||
.is_some()
|
||||
.contains_key(&Symbol::intern(path_str))
|
||||
{
|
||||
diag.note(format!(
|
||||
"`macro_rules` named `{path_str}` exists in this crate, \
|
||||
|
|
|
@ -229,7 +229,7 @@ struct SyntheticImplCollector<'a, 'tcx> {
|
|||
impls: Vec<Item>,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> DocVisitor<'_> for SyntheticImplCollector<'a, 'tcx> {
|
||||
impl DocVisitor<'_> for SyntheticImplCollector<'_, '_> {
|
||||
fn visit_item(&mut self, i: &Item) {
|
||||
if i.is_struct() || i.is_enum() || i.is_union() {
|
||||
// FIXME(eddyb) is this `doc(hidden)` check needed?
|
||||
|
@ -256,7 +256,7 @@ impl<'cache> ItemAndAliasCollector<'cache> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'cache> DocVisitor<'_> for ItemAndAliasCollector<'cache> {
|
||||
impl DocVisitor<'_> for ItemAndAliasCollector<'_> {
|
||||
fn visit_item(&mut self, i: &Item) {
|
||||
self.items.insert(i.item_id);
|
||||
|
||||
|
@ -276,7 +276,7 @@ struct BadImplStripper<'a> {
|
|||
cache: &'a Cache,
|
||||
}
|
||||
|
||||
impl<'a> BadImplStripper<'a> {
|
||||
impl BadImplStripper<'_> {
|
||||
fn keep_impl(&self, ty: &Type, is_deref: bool) -> bool {
|
||||
if let Generic(_) = ty {
|
||||
// keep impls made on generics
|
||||
|
|
|
@ -25,7 +25,7 @@ pub(crate) fn run_lints(krate: Crate, cx: &mut DocContext<'_>) -> Crate {
|
|||
krate
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> DocVisitor<'_> for Linter<'a, 'tcx> {
|
||||
impl DocVisitor<'_> for Linter<'_, '_> {
|
||||
fn visit_item(&mut self, item: &Item) {
|
||||
let Some(hir_id) = DocContext::as_local_hir_id(self.cx.tcx, item.item_id) else {
|
||||
// If non-local, no need to check anything.
|
||||
|
@ -34,7 +34,7 @@ impl<'a, 'tcx> DocVisitor<'_> for Linter<'a, 'tcx> {
|
|||
let dox = item.doc_value();
|
||||
if !dox.is_empty() {
|
||||
let may_have_link = dox.contains(&[':', '['][..]);
|
||||
let may_have_block_comment_or_html = dox.contains(&['<', '>']);
|
||||
let may_have_block_comment_or_html = dox.contains(['<', '>']);
|
||||
// ~~~rust
|
||||
// // This is a real, supported commonmark syntax for block code
|
||||
// ~~~
|
||||
|
|
|
@ -18,7 +18,7 @@ use crate::html::markdown::main_body_opts;
|
|||
|
||||
pub(super) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &str) {
|
||||
let report_diag = |cx: &DocContext<'_>, msg: &'static str, range: Range<usize>| {
|
||||
let sp = source_span_for_markdown_range(cx.tcx, &dox, &range, &item.attrs.doc_strings)
|
||||
let sp = source_span_for_markdown_range(cx.tcx, dox, &range, &item.attrs.doc_strings)
|
||||
.unwrap_or_else(|| item.attr_span(cx.tcx));
|
||||
cx.tcx.node_span_lint(crate::lint::BARE_URLS, hir_id, sp, |lint| {
|
||||
lint.primary_message(msg)
|
||||
|
@ -34,14 +34,14 @@ pub(super) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
|
|||
});
|
||||
};
|
||||
|
||||
let mut p = Parser::new_ext(&dox, main_body_opts()).into_offset_iter();
|
||||
let mut p = Parser::new_ext(dox, main_body_opts()).into_offset_iter();
|
||||
|
||||
while let Some((event, range)) = p.next() {
|
||||
match event {
|
||||
Event::Text(s) => find_raw_urls(cx, &s, range, &report_diag),
|
||||
// We don't want to check the text inside code blocks or links.
|
||||
Event::Start(tag @ (Tag::CodeBlock(_) | Tag::Link { .. })) => {
|
||||
while let Some((event, _)) = p.next() {
|
||||
for (event, _) in p.by_ref() {
|
||||
match event {
|
||||
Event::End(end)
|
||||
if mem::discriminant(&end) == mem::discriminant(&tag.to_end()) =>
|
||||
|
|
|
@ -150,7 +150,7 @@ impl Translate for BufferEmitter {
|
|||
}
|
||||
|
||||
fn fallback_fluent_bundle(&self) -> &rustc_errors::FluentBundle {
|
||||
&**self.fallback_bundle
|
||||
&self.fallback_bundle
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -15,7 +15,7 @@ use crate::html::markdown::main_body_opts;
|
|||
pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &str) {
|
||||
let tcx = cx.tcx;
|
||||
let report_diag = |msg: String, range: &Range<usize>, is_open_tag: bool| {
|
||||
let sp = match source_span_for_markdown_range(tcx, &dox, range, &item.attrs.doc_strings) {
|
||||
let sp = match source_span_for_markdown_range(tcx, dox, range, &item.attrs.doc_strings) {
|
||||
Some(sp) => sp,
|
||||
None => item.attr_span(tcx),
|
||||
};
|
||||
|
@ -30,7 +30,7 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
|
|||
let mut generics_end = range.end;
|
||||
if let Some(Some(mut generics_start)) = (is_open_tag
|
||||
&& dox[..generics_end].ends_with('>'))
|
||||
.then(|| extract_path_backwards(&dox, range.start))
|
||||
.then(|| extract_path_backwards(dox, range.start))
|
||||
{
|
||||
while generics_start != 0
|
||||
&& generics_end < dox.len()
|
||||
|
@ -39,19 +39,19 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
|
|||
{
|
||||
generics_end += 1;
|
||||
generics_start -= 1;
|
||||
if let Some(new_start) = extract_path_backwards(&dox, generics_start) {
|
||||
if let Some(new_start) = extract_path_backwards(dox, generics_start) {
|
||||
generics_start = new_start;
|
||||
}
|
||||
if let Some(new_end) = extract_path_forward(&dox, generics_end) {
|
||||
if let Some(new_end) = extract_path_forward(dox, generics_end) {
|
||||
generics_end = new_end;
|
||||
}
|
||||
}
|
||||
if let Some(new_end) = extract_path_forward(&dox, generics_end) {
|
||||
if let Some(new_end) = extract_path_forward(dox, generics_end) {
|
||||
generics_end = new_end;
|
||||
}
|
||||
let generics_sp = match source_span_for_markdown_range(
|
||||
tcx,
|
||||
&dox,
|
||||
dox,
|
||||
&(generics_start..generics_end),
|
||||
&item.attrs.doc_strings,
|
||||
) {
|
||||
|
@ -125,7 +125,7 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
|
|||
}
|
||||
};
|
||||
|
||||
let p = Parser::new_with_broken_link_callback(&dox, main_body_opts(), Some(&mut replacer))
|
||||
let p = Parser::new_with_broken_link_callback(dox, main_body_opts(), Some(&mut replacer))
|
||||
.into_offset_iter();
|
||||
|
||||
for (event, range) in p {
|
||||
|
@ -233,7 +233,7 @@ fn extract_path_forward(text: &str, start_pos: usize) -> Option<usize> {
|
|||
break;
|
||||
}
|
||||
}
|
||||
while let Some(c) = chars.next() {
|
||||
for c in chars {
|
||||
if is_id_continue(c) {
|
||||
current_pos += c.len_utf8();
|
||||
} else {
|
||||
|
|
|
@ -35,12 +35,12 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId) {
|
|||
}
|
||||
}
|
||||
|
||||
fn check_redundant_explicit_link_for_did<'md>(
|
||||
fn check_redundant_explicit_link_for_did(
|
||||
cx: &DocContext<'_>,
|
||||
item: &Item,
|
||||
did: DefId,
|
||||
hir_id: HirId,
|
||||
doc: &'md str,
|
||||
doc: &str,
|
||||
) {
|
||||
let Some(local_item_id) = did.as_local() else {
|
||||
return;
|
||||
|
@ -71,7 +71,7 @@ fn check_redundant_explicit_link_for_did<'md>(
|
|||
return;
|
||||
};
|
||||
|
||||
check_redundant_explicit_link(cx, item, hir_id, &doc, &resolutions);
|
||||
check_redundant_explicit_link(cx, item, hir_id, doc, resolutions);
|
||||
}
|
||||
|
||||
fn check_redundant_explicit_link<'md>(
|
||||
|
@ -90,60 +90,52 @@ fn check_redundant_explicit_link<'md>(
|
|||
.into_offset_iter();
|
||||
|
||||
while let Some((event, link_range)) = offset_iter.next() {
|
||||
match event {
|
||||
Event::Start(Tag::Link { link_type, dest_url, .. }) => {
|
||||
let link_data = collect_link_data(&mut offset_iter);
|
||||
if let Event::Start(Tag::Link { link_type, dest_url, .. }) = event {
|
||||
let link_data = collect_link_data(&mut offset_iter);
|
||||
|
||||
if let Some(resolvable_link) = link_data.resolvable_link.as_ref() {
|
||||
if &link_data.display_link.replace('`', "") != resolvable_link {
|
||||
// Skips if display link does not match to actual
|
||||
// resolvable link, usually happens if display link
|
||||
// has several segments, e.g.
|
||||
// [this is just an `Option`](Option)
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let explicit_link = dest_url.to_string();
|
||||
let display_link = link_data.resolvable_link.clone()?;
|
||||
|
||||
if explicit_link.ends_with(&display_link) || display_link.ends_with(&explicit_link)
|
||||
{
|
||||
match link_type {
|
||||
LinkType::Inline | LinkType::ReferenceUnknown => {
|
||||
check_inline_or_reference_unknown_redundancy(
|
||||
cx,
|
||||
item,
|
||||
hir_id,
|
||||
doc,
|
||||
resolutions,
|
||||
link_range,
|
||||
dest_url.to_string(),
|
||||
link_data,
|
||||
if link_type == LinkType::Inline {
|
||||
(b'(', b')')
|
||||
} else {
|
||||
(b'[', b']')
|
||||
},
|
||||
);
|
||||
}
|
||||
LinkType::Reference => {
|
||||
check_reference_redundancy(
|
||||
cx,
|
||||
item,
|
||||
hir_id,
|
||||
doc,
|
||||
resolutions,
|
||||
link_range,
|
||||
&dest_url,
|
||||
link_data,
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
if let Some(resolvable_link) = link_data.resolvable_link.as_ref() {
|
||||
if &link_data.display_link.replace('`', "") != resolvable_link {
|
||||
// Skips if display link does not match to actual
|
||||
// resolvable link, usually happens if display link
|
||||
// has several segments, e.g.
|
||||
// [this is just an `Option`](Option)
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let explicit_link = dest_url.to_string();
|
||||
let display_link = link_data.resolvable_link.clone()?;
|
||||
|
||||
if explicit_link.ends_with(&display_link) || display_link.ends_with(&explicit_link) {
|
||||
match link_type {
|
||||
LinkType::Inline | LinkType::ReferenceUnknown => {
|
||||
check_inline_or_reference_unknown_redundancy(
|
||||
cx,
|
||||
item,
|
||||
hir_id,
|
||||
doc,
|
||||
resolutions,
|
||||
link_range,
|
||||
dest_url.to_string(),
|
||||
link_data,
|
||||
if link_type == LinkType::Inline { (b'(', b')') } else { (b'[', b']') },
|
||||
);
|
||||
}
|
||||
LinkType::Reference => {
|
||||
check_reference_redundancy(
|
||||
cx,
|
||||
item,
|
||||
hir_id,
|
||||
doc,
|
||||
resolutions,
|
||||
link_range,
|
||||
&dest_url,
|
||||
link_data,
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -169,18 +161,18 @@ fn check_inline_or_reference_unknown_redundancy(
|
|||
|
||||
if dest_res == display_res {
|
||||
let link_span =
|
||||
source_span_for_markdown_range(cx.tcx, &doc, &link_range, &item.attrs.doc_strings)
|
||||
source_span_for_markdown_range(cx.tcx, doc, &link_range, &item.attrs.doc_strings)
|
||||
.unwrap_or(item.attr_span(cx.tcx));
|
||||
let explicit_span = source_span_for_markdown_range(
|
||||
cx.tcx,
|
||||
&doc,
|
||||
doc,
|
||||
&offset_explicit_range(doc, link_range, open, close),
|
||||
&item.attrs.doc_strings,
|
||||
)?;
|
||||
let display_span = source_span_for_markdown_range(
|
||||
cx.tcx,
|
||||
&doc,
|
||||
&resolvable_link_range,
|
||||
doc,
|
||||
resolvable_link_range,
|
||||
&item.attrs.doc_strings,
|
||||
)?;
|
||||
|
||||
|
@ -210,27 +202,27 @@ fn check_reference_redundancy(
|
|||
let (resolvable_link, resolvable_link_range) =
|
||||
(&link_data.resolvable_link?, &link_data.resolvable_link_range?);
|
||||
let (dest_res, display_res) =
|
||||
(find_resolution(resolutions, &dest)?, find_resolution(resolutions, resolvable_link)?);
|
||||
(find_resolution(resolutions, dest)?, find_resolution(resolutions, resolvable_link)?);
|
||||
|
||||
if dest_res == display_res {
|
||||
let link_span =
|
||||
source_span_for_markdown_range(cx.tcx, &doc, &link_range, &item.attrs.doc_strings)
|
||||
source_span_for_markdown_range(cx.tcx, doc, &link_range, &item.attrs.doc_strings)
|
||||
.unwrap_or(item.attr_span(cx.tcx));
|
||||
let explicit_span = source_span_for_markdown_range(
|
||||
cx.tcx,
|
||||
&doc,
|
||||
doc,
|
||||
&offset_explicit_range(doc, link_range.clone(), b'[', b']'),
|
||||
&item.attrs.doc_strings,
|
||||
)?;
|
||||
let display_span = source_span_for_markdown_range(
|
||||
cx.tcx,
|
||||
&doc,
|
||||
&resolvable_link_range,
|
||||
doc,
|
||||
resolvable_link_range,
|
||||
&item.attrs.doc_strings,
|
||||
)?;
|
||||
let def_span = source_span_for_markdown_range(
|
||||
cx.tcx,
|
||||
&doc,
|
||||
doc,
|
||||
&offset_reference_def_range(doc, dest, link_range),
|
||||
&item.attrs.doc_strings,
|
||||
)?;
|
||||
|
@ -263,7 +255,7 @@ fn collect_link_data<'input, F: BrokenLinkCallback<'input>>(
|
|||
let mut display_link = String::new();
|
||||
let mut is_resolvable = true;
|
||||
|
||||
while let Some((event, range)) = offset_iter.next() {
|
||||
for (event, range) in offset_iter.by_ref() {
|
||||
match event {
|
||||
Event::Text(code) => {
|
||||
let code = code.to_string();
|
||||
|
|
|
@ -22,7 +22,7 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
|
|||
.find(|link| *link.original_text == *broken_link.reference)
|
||||
.map(|link| ((*link.href).into(), (*link.new_text).into()))
|
||||
};
|
||||
let parser = Parser::new_with_broken_link_callback(&dox, main_body_opts(), Some(&mut replacer))
|
||||
let parser = Parser::new_with_broken_link_callback(dox, main_body_opts(), Some(&mut replacer))
|
||||
.into_offset_iter();
|
||||
|
||||
let mut element_stack = Vec::new();
|
||||
|
@ -44,7 +44,7 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
|
|||
// use the span of the entire attribute as a fallback.
|
||||
let span = source_span_for_markdown_range(
|
||||
tcx,
|
||||
&dox,
|
||||
dox,
|
||||
&(backtick_index..backtick_index + 1),
|
||||
&item.attrs.doc_strings,
|
||||
)
|
||||
|
@ -61,12 +61,12 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
|
|||
// "foo` `bar`" -> "`foo` `bar`"
|
||||
if let Some(suggest_index) =
|
||||
clamp_start(guess, &element.suggestible_ranges)
|
||||
&& can_suggest_backtick(&dox, suggest_index)
|
||||
&& can_suggest_backtick(dox, suggest_index)
|
||||
{
|
||||
suggest_insertion(
|
||||
cx,
|
||||
item,
|
||||
&dox,
|
||||
dox,
|
||||
lint,
|
||||
suggest_index,
|
||||
'`',
|
||||
|
@ -80,11 +80,11 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
|
|||
// Don't `clamp_end` here, because the suggestion is guaranteed to be inside
|
||||
// an inline code node and we intentionally "break" the inline code here.
|
||||
let suggest_index = guess;
|
||||
if can_suggest_backtick(&dox, suggest_index) {
|
||||
if can_suggest_backtick(dox, suggest_index) {
|
||||
suggest_insertion(
|
||||
cx,
|
||||
item,
|
||||
&dox,
|
||||
dox,
|
||||
lint,
|
||||
suggest_index,
|
||||
'`',
|
||||
|
@ -98,15 +98,15 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
|
|||
if !element.prev_code_guess.is_confident() {
|
||||
// "`foo` bar`" -> "`foo` `bar`"
|
||||
if let Some(guess) =
|
||||
guess_start_of_code(&dox, element.element_range.start..backtick_index)
|
||||
guess_start_of_code(dox, element.element_range.start..backtick_index)
|
||||
&& let Some(suggest_index) =
|
||||
clamp_start(guess, &element.suggestible_ranges)
|
||||
&& can_suggest_backtick(&dox, suggest_index)
|
||||
&& can_suggest_backtick(dox, suggest_index)
|
||||
{
|
||||
suggest_insertion(
|
||||
cx,
|
||||
item,
|
||||
&dox,
|
||||
dox,
|
||||
lint,
|
||||
suggest_index,
|
||||
'`',
|
||||
|
@ -120,16 +120,16 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
|
|||
// if we already suggested opening backtick. For example:
|
||||
// "foo`." -> "`foo`." or "foo`s" -> "`foo`s".
|
||||
if let Some(guess) =
|
||||
guess_end_of_code(&dox, backtick_index + 1..element.element_range.end)
|
||||
guess_end_of_code(dox, backtick_index + 1..element.element_range.end)
|
||||
&& let Some(suggest_index) =
|
||||
clamp_end(guess, &element.suggestible_ranges)
|
||||
&& can_suggest_backtick(&dox, suggest_index)
|
||||
&& can_suggest_backtick(dox, suggest_index)
|
||||
&& (!help_emitted || suggest_index - backtick_index > 2)
|
||||
{
|
||||
suggest_insertion(
|
||||
cx,
|
||||
item,
|
||||
&dox,
|
||||
dox,
|
||||
lint,
|
||||
suggest_index,
|
||||
'`',
|
||||
|
@ -148,7 +148,7 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
|
|||
suggest_insertion(
|
||||
cx,
|
||||
item,
|
||||
&dox,
|
||||
dox,
|
||||
lint,
|
||||
backtick_index,
|
||||
'\\',
|
||||
|
@ -177,13 +177,13 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
|
|||
let is_confident = text_inside.starts_with(char::is_whitespace)
|
||||
|| text_inside.ends_with(char::is_whitespace);
|
||||
|
||||
if let Some(guess) = guess_end_of_code(&dox, range_inside) {
|
||||
if let Some(guess) = guess_end_of_code(dox, range_inside) {
|
||||
// Find earlier end of code.
|
||||
element.prev_code_guess = PrevCodeGuess::End { guess, is_confident };
|
||||
} else {
|
||||
// Find alternate start of code.
|
||||
let range_before = element.element_range.start..event_range.start;
|
||||
if let Some(guess) = guess_start_of_code(&dox, range_before) {
|
||||
if let Some(guess) = guess_start_of_code(dox, range_before) {
|
||||
element.prev_code_guess = PrevCodeGuess::Start { guess, is_confident };
|
||||
}
|
||||
}
|
||||
|
@ -421,7 +421,7 @@ fn suggest_insertion(
|
|||
|
||||
if let Some(span) = source_span_for_markdown_range(
|
||||
cx.tcx,
|
||||
&dox,
|
||||
dox,
|
||||
&(insert_index..insert_index),
|
||||
&item.attrs.doc_strings,
|
||||
) {
|
||||
|
|
|
@ -49,8 +49,8 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
|
|||
| cmarkn::Options::ENABLE_TASKLISTS
|
||||
| cmarkn::Options::ENABLE_SMART_PUNCTUATION
|
||||
}
|
||||
let mut parser_new = cmarkn::Parser::new_ext(&dox, main_body_opts_new()).into_offset_iter();
|
||||
while let Some((event, span)) = parser_new.next() {
|
||||
let parser_new = cmarkn::Parser::new_ext(dox, main_body_opts_new()).into_offset_iter();
|
||||
for (event, span) in parser_new {
|
||||
if let cmarkn::Event::Start(cmarkn::Tag::BlockQuote(_)) = event {
|
||||
if !dox[span.clone()].starts_with("> ") {
|
||||
spaceless_block_quotes.insert(span.start);
|
||||
|
@ -71,8 +71,8 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
|
|||
| cmarko::Options::ENABLE_TASKLISTS
|
||||
| cmarko::Options::ENABLE_SMART_PUNCTUATION
|
||||
}
|
||||
let mut parser_old = cmarko::Parser::new_ext(&dox, main_body_opts_old()).into_offset_iter();
|
||||
while let Some((event, span)) = parser_old.next() {
|
||||
let parser_old = cmarko::Parser::new_ext(dox, main_body_opts_old()).into_offset_iter();
|
||||
for (event, span) in parser_old {
|
||||
if let cmarko::Event::Start(cmarko::Tag::BlockQuote) = event {
|
||||
if !dox[span.clone()].starts_with("> ") {
|
||||
spaceless_block_quotes.remove(&span.start);
|
||||
|
@ -88,13 +88,13 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
|
|||
|
||||
for start in spaceless_block_quotes {
|
||||
let (span, precise) =
|
||||
source_span_for_markdown_range(tcx, &dox, &(start..start + 1), &item.attrs.doc_strings)
|
||||
source_span_for_markdown_range(tcx, dox, &(start..start + 1), &item.attrs.doc_strings)
|
||||
.map(|span| (span, true))
|
||||
.unwrap_or_else(|| (item.attr_span(tcx), false));
|
||||
|
||||
tcx.node_span_lint(crate::lint::UNPORTABLE_MARKDOWN, hir_id, span, |lint| {
|
||||
lint.primary_message("unportable markdown");
|
||||
lint.help(format!("confusing block quote with no space after the `>` marker"));
|
||||
lint.help("confusing block quote with no space after the `>` marker".to_string());
|
||||
if precise {
|
||||
lint.span_suggestion(
|
||||
span.shrink_to_hi(),
|
||||
|
@ -113,7 +113,7 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
|
|||
}
|
||||
for (_caret, span) in missing_footnote_references {
|
||||
let (ref_span, precise) =
|
||||
source_span_for_markdown_range(tcx, &dox, &span, &item.attrs.doc_strings)
|
||||
source_span_for_markdown_range(tcx, dox, &span, &item.attrs.doc_strings)
|
||||
.map(|span| (span, true))
|
||||
.unwrap_or_else(|| (item.attr_span(tcx), false));
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ struct CfgPropagator<'a, 'tcx> {
|
|||
cx: &'a mut DocContext<'tcx>,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> CfgPropagator<'a, 'tcx> {
|
||||
impl CfgPropagator<'_, '_> {
|
||||
// Some items need to merge their attributes with their parents' otherwise a few of them
|
||||
// (mostly `cfg` ones) will be missing.
|
||||
fn merge_with_parent_attributes(&mut self, item: &mut Item) {
|
||||
|
@ -65,7 +65,7 @@ impl<'a, 'tcx> CfgPropagator<'a, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> DocFolder for CfgPropagator<'a, 'tcx> {
|
||||
impl DocFolder for CfgPropagator<'_, '_> {
|
||||
fn fold_item(&mut self, mut item: Item) -> Option<Item> {
|
||||
let old_parent_cfg = self.parent_cfg.clone();
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ struct StabilityPropagator<'a, 'tcx> {
|
|||
cx: &'a mut DocContext<'tcx>,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> DocFolder for StabilityPropagator<'a, 'tcx> {
|
||||
impl DocFolder for StabilityPropagator<'_, '_> {
|
||||
fn fold_item(&mut self, mut item: Item) -> Option<Item> {
|
||||
let parent_stability = self.parent_stability;
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ struct AliasedNonLocalStripper<'tcx> {
|
|||
tcx: TyCtxt<'tcx>,
|
||||
}
|
||||
|
||||
impl<'tcx> DocFolder for AliasedNonLocalStripper<'tcx> {
|
||||
impl DocFolder for AliasedNonLocalStripper<'_> {
|
||||
fn fold_item(&mut self, i: Item) -> Option<Item> {
|
||||
Some(match i.kind {
|
||||
clean::TypeAliasItem(..) => {
|
||||
|
@ -39,7 +39,7 @@ struct NonLocalStripper<'tcx> {
|
|||
tcx: TyCtxt<'tcx>,
|
||||
}
|
||||
|
||||
impl<'tcx> DocFolder for NonLocalStripper<'tcx> {
|
||||
impl DocFolder for NonLocalStripper<'_> {
|
||||
fn fold_item(&mut self, i: Item) -> Option<Item> {
|
||||
// If not local, we want to respect the original visibility of
|
||||
// the field and not the one given by the user for the currrent crate.
|
||||
|
@ -50,7 +50,7 @@ impl<'tcx> DocFolder for NonLocalStripper<'tcx> {
|
|||
{
|
||||
if i.is_doc_hidden()
|
||||
// Default to *not* stripping items with inherited visibility.
|
||||
|| i.visibility(self.tcx).map_or(false, |viz| viz != Visibility::Public)
|
||||
|| i.visibility(self.tcx).is_some_and(|viz| viz != Visibility::Public)
|
||||
{
|
||||
return Some(strip_item(i));
|
||||
}
|
||||
|
|
|
@ -57,7 +57,7 @@ struct Stripper<'a, 'tcx> {
|
|||
last_reexport: Option<LocalDefId>,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> Stripper<'a, 'tcx> {
|
||||
impl Stripper<'_, '_> {
|
||||
fn set_last_reexport_then_fold_item(&mut self, i: Item) -> Item {
|
||||
let prev_from_reexport = self.last_reexport;
|
||||
if i.inline_stmt_id.is_some() {
|
||||
|
@ -86,7 +86,7 @@ impl<'a, 'tcx> Stripper<'a, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> DocFolder for Stripper<'a, 'tcx> {
|
||||
impl DocFolder for Stripper<'_, '_> {
|
||||
fn fold_item(&mut self, i: Item) -> Option<Item> {
|
||||
let has_doc_hidden = i.is_doc_hidden();
|
||||
let is_impl_or_exported_macro = match i.kind {
|
||||
|
|
|
@ -37,7 +37,7 @@ fn is_item_reachable(
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> DocFolder for Stripper<'a, 'tcx> {
|
||||
impl DocFolder for Stripper<'_, '_> {
|
||||
fn fold_item(&mut self, i: Item) -> Option<Item> {
|
||||
match i.kind {
|
||||
clean::StrippedItem(..) => {
|
||||
|
@ -171,7 +171,7 @@ pub(crate) struct ImplStripper<'a, 'tcx> {
|
|||
pub(crate) document_hidden: bool,
|
||||
}
|
||||
|
||||
impl<'a> ImplStripper<'a, '_> {
|
||||
impl ImplStripper<'_, '_> {
|
||||
#[inline]
|
||||
fn should_keep_impl(&self, item: &Item, for_def_id: DefId) -> bool {
|
||||
if !for_def_id.is_local() || self.retained.contains(&for_def_id.into()) {
|
||||
|
@ -193,7 +193,7 @@ impl<'a> ImplStripper<'a, '_> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> DocFolder for ImplStripper<'a, '_> {
|
||||
impl DocFolder for ImplStripper<'_, '_> {
|
||||
fn fold_item(&mut self, i: Item) -> Option<Item> {
|
||||
if let clean::ImplItem(ref imp) = i.kind {
|
||||
// Impl blocks can be skipped if they are: empty; not a trait impl; and have no
|
||||
|
@ -259,7 +259,7 @@ pub(crate) struct ImportStripper<'tcx> {
|
|||
pub(crate) document_hidden: bool,
|
||||
}
|
||||
|
||||
impl<'tcx> ImportStripper<'tcx> {
|
||||
impl ImportStripper<'_> {
|
||||
fn import_should_be_hidden(&self, i: &Item, imp: &clean::Import) -> bool {
|
||||
if self.is_json_output {
|
||||
// FIXME: This should be handled the same way as for HTML output.
|
||||
|
@ -270,11 +270,11 @@ impl<'tcx> ImportStripper<'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'tcx> DocFolder for ImportStripper<'tcx> {
|
||||
impl DocFolder for ImportStripper<'_> {
|
||||
fn fold_item(&mut self, i: Item) -> Option<Item> {
|
||||
match &i.kind {
|
||||
clean::ImportItem(imp)
|
||||
if !self.document_hidden && self.import_should_be_hidden(&i, &imp) =>
|
||||
if !self.document_hidden && self.import_should_be_hidden(&i, imp) =>
|
||||
{
|
||||
None
|
||||
}
|
||||
|
|
|
@ -54,7 +54,7 @@ fn skip_comment(iter: &mut Peekable<Chars<'_>>) {
|
|||
|
||||
/// Skips a line comment (`//`).
|
||||
fn skip_line_comment(iter: &mut Peekable<Chars<'_>>) {
|
||||
while let Some(c) = iter.next() {
|
||||
for c in iter.by_ref() {
|
||||
if c == '\n' {
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -55,7 +55,7 @@ pub(crate) trait DocVisitor<'a>: Sized {
|
|||
/// Don't override!
|
||||
fn visit_item_recur(&mut self, item: &'a Item) {
|
||||
match &item.kind {
|
||||
StrippedItem(i) => self.visit_inner_recur(&*i),
|
||||
StrippedItem(i) => self.visit_inner_recur(i),
|
||||
_ => self.visit_inner_recur(&item.kind),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -312,7 +312,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
|
|||
Node::Item(_) if is_bang_macro && !please_inline && renamed.is_some() && is_hidden => {
|
||||
return false;
|
||||
}
|
||||
Node::Item(&hir::Item { kind: hir::ItemKind::Mod(ref m), .. }) if glob => {
|
||||
Node::Item(&hir::Item { kind: hir::ItemKind::Mod(m), .. }) if glob => {
|
||||
let prev = mem::replace(&mut self.inlining, true);
|
||||
for &i in m.item_ids {
|
||||
let i = tcx.hir().item(i);
|
||||
|
@ -476,7 +476,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
|
|||
self.add_to_current_mod(item, renamed, import_id);
|
||||
}
|
||||
}
|
||||
hir::ItemKind::Macro(ref macro_def, _) => {
|
||||
hir::ItemKind::Macro(macro_def, _) => {
|
||||
// `#[macro_export] macro_rules!` items are handled separately in `visit()`,
|
||||
// above, since they need to be documented at the module top level. Accordingly,
|
||||
// we only want to handle macros if one of three conditions holds:
|
||||
|
@ -496,7 +496,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
|
|||
self.add_to_current_mod(item, renamed, import_id);
|
||||
}
|
||||
}
|
||||
hir::ItemKind::Mod(ref m) => {
|
||||
hir::ItemKind::Mod(m) => {
|
||||
self.enter_mod(item.owner_id.def_id, m, name, renamed, import_id);
|
||||
}
|
||||
hir::ItemKind::Fn(..)
|
||||
|
@ -560,7 +560,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
|
|||
|
||||
// We need to implement this visitor so it'll go everywhere and retrieve items we're interested in
|
||||
// such as impl blocks in const blocks.
|
||||
impl<'a, 'tcx> Visitor<'tcx> for RustdocVisitor<'a, 'tcx> {
|
||||
impl<'tcx> Visitor<'tcx> for RustdocVisitor<'_, 'tcx> {
|
||||
type NestedFilter = nested_filter::All;
|
||||
|
||||
fn nested_visit_map(&mut self) -> Self::Map {
|
||||
|
|
Loading…
Add table
Reference in a new issue