Fix fallout in rustdoc
.
This commit is contained in:
parent
4c98e1bc59
commit
0b9e26f390
2 changed files with 10 additions and 12 deletions
|
@ -27,7 +27,7 @@ use std::io;
|
||||||
use std::io::prelude::*;
|
use std::io::prelude::*;
|
||||||
|
|
||||||
use syntax::codemap::CodeMap;
|
use syntax::codemap::CodeMap;
|
||||||
use syntax::parse::lexer::{self, Reader, TokenAndSpan};
|
use syntax::parse::lexer::{self, TokenAndSpan};
|
||||||
use syntax::parse::token;
|
use syntax::parse::token;
|
||||||
use syntax::parse;
|
use syntax::parse;
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
|
@ -42,8 +42,7 @@ pub fn render_with_highlighting(src: &str, class: Option<&str>, id: Option<&str>
|
||||||
let mut out = Vec::new();
|
let mut out = Vec::new();
|
||||||
write_header(class, id, &mut out).unwrap();
|
write_header(class, id, &mut out).unwrap();
|
||||||
|
|
||||||
let mut classifier = Classifier::new(lexer::StringReader::new(&sess.span_diagnostic, fm),
|
let mut classifier = Classifier::new(lexer::StringReader::new(&sess, fm), sess.codemap());
|
||||||
sess.codemap());
|
|
||||||
if let Err(_) = classifier.write_source(&mut out) {
|
if let Err(_) = classifier.write_source(&mut out) {
|
||||||
return format!("<pre>{}</pre>", src);
|
return format!("<pre>{}</pre>", src);
|
||||||
}
|
}
|
||||||
|
@ -63,8 +62,7 @@ pub fn render_inner_with_highlighting(src: &str) -> io::Result<String> {
|
||||||
let fm = sess.codemap().new_filemap("<stdin>".to_string(), None, src.to_string());
|
let fm = sess.codemap().new_filemap("<stdin>".to_string(), None, src.to_string());
|
||||||
|
|
||||||
let mut out = Vec::new();
|
let mut out = Vec::new();
|
||||||
let mut classifier = Classifier::new(lexer::StringReader::new(&sess.span_diagnostic, fm),
|
let mut classifier = Classifier::new(lexer::StringReader::new(&sess, fm), sess.codemap());
|
||||||
sess.codemap());
|
|
||||||
classifier.write_source(&mut out)?;
|
classifier.write_source(&mut out)?;
|
||||||
|
|
||||||
Ok(String::from_utf8_lossy(&out).into_owned())
|
Ok(String::from_utf8_lossy(&out).into_owned())
|
||||||
|
@ -185,10 +183,10 @@ impl<'a> Classifier<'a> {
|
||||||
Ok(tas) => tas,
|
Ok(tas) => tas,
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
self.lexer.emit_fatal_errors();
|
self.lexer.emit_fatal_errors();
|
||||||
self.lexer.span_diagnostic.struct_warn("Backing out of syntax highlighting")
|
self.lexer.sess.span_diagnostic
|
||||||
.note("You probably did not intend to render this \
|
.struct_warn("Backing out of syntax highlighting")
|
||||||
as a rust code-block")
|
.note("You probably did not intend to render this as a rust code-block")
|
||||||
.emit();
|
.emit();
|
||||||
return Err(io::Error::new(io::ErrorKind::Other, ""));
|
return Err(io::Error::new(io::ErrorKind::Other, ""));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -111,7 +111,7 @@ impl<'a> StringReader<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/// Return the next token. EFFECT: advances the string_reader.
|
/// Return the next token. EFFECT: advances the string_reader.
|
||||||
fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> {
|
pub fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> {
|
||||||
assert!(self.fatal_errs.is_empty());
|
assert!(self.fatal_errs.is_empty());
|
||||||
let ret_val = TokenAndSpan {
|
let ret_val = TokenAndSpan {
|
||||||
tok: replace(&mut self.peek_tok, token::Underscore),
|
tok: replace(&mut self.peek_tok, token::Underscore),
|
||||||
|
@ -123,13 +123,13 @@ impl<'a> StringReader<'a> {
|
||||||
fn fatal(&self, m: &str) -> FatalError {
|
fn fatal(&self, m: &str) -> FatalError {
|
||||||
self.fatal_span(self.peek_span, m)
|
self.fatal_span(self.peek_span, m)
|
||||||
}
|
}
|
||||||
fn emit_fatal_errors(&mut self) {
|
pub fn emit_fatal_errors(&mut self) {
|
||||||
for err in &mut self.fatal_errs {
|
for err in &mut self.fatal_errs {
|
||||||
err.emit();
|
err.emit();
|
||||||
}
|
}
|
||||||
self.fatal_errs.clear();
|
self.fatal_errs.clear();
|
||||||
}
|
}
|
||||||
fn peek(&self) -> TokenAndSpan {
|
pub fn peek(&self) -> TokenAndSpan {
|
||||||
// FIXME(pcwalton): Bad copy!
|
// FIXME(pcwalton): Bad copy!
|
||||||
TokenAndSpan {
|
TokenAndSpan {
|
||||||
tok: self.peek_tok.clone(),
|
tok: self.peek_tok.clone(),
|
||||||
|
|
Loading…
Add table
Reference in a new issue