libsyntax: Remove all uses of {:?}.
This commit is contained in:
parent
38aca17c47
commit
26e547af5d
10 changed files with 27 additions and 27 deletions
|
@ -51,7 +51,7 @@
|
||||||
|
|
||||||
TARGET_CRATES := libc std green native flate arena glob term semver \
|
TARGET_CRATES := libc std green native flate arena glob term semver \
|
||||||
uuid serialize sync getopts collections num test time rand \
|
uuid serialize sync getopts collections num test time rand \
|
||||||
url log regex graphviz core rbml rlibc alloc debug rustrt \
|
url log regex graphviz core rbml rlibc alloc rustrt \
|
||||||
unicode
|
unicode
|
||||||
HOST_CRATES := syntax rustc rustdoc fourcc hexfloat regex_macros fmt_macros \
|
HOST_CRATES := syntax rustc rustdoc fourcc hexfloat regex_macros fmt_macros \
|
||||||
rustc_llvm rustc_back
|
rustc_llvm rustc_back
|
||||||
|
@ -63,20 +63,19 @@ DEPS_libc := core
|
||||||
DEPS_rlibc := core
|
DEPS_rlibc := core
|
||||||
DEPS_unicode := core
|
DEPS_unicode := core
|
||||||
DEPS_alloc := core libc native:jemalloc
|
DEPS_alloc := core libc native:jemalloc
|
||||||
DEPS_debug := std
|
|
||||||
DEPS_rustrt := alloc core libc collections native:rustrt_native
|
DEPS_rustrt := alloc core libc collections native:rustrt_native
|
||||||
DEPS_std := core libc rand alloc collections rustrt sync unicode \
|
DEPS_std := core libc rand alloc collections rustrt sync unicode \
|
||||||
native:rust_builtin native:backtrace
|
native:rust_builtin native:backtrace
|
||||||
DEPS_graphviz := std
|
DEPS_graphviz := std
|
||||||
DEPS_green := std native:context_switch
|
DEPS_green := std native:context_switch
|
||||||
DEPS_native := std
|
DEPS_native := std
|
||||||
DEPS_syntax := std term serialize log fmt_macros debug arena libc
|
DEPS_syntax := std term serialize log fmt_macros arena libc
|
||||||
DEPS_rustc := syntax flate arena serialize getopts rbml \
|
DEPS_rustc := syntax flate arena serialize getopts rbml \
|
||||||
time log graphviz debug rustc_llvm rustc_back
|
time log graphviz rustc_llvm rustc_back
|
||||||
DEPS_rustc_llvm := native:rustllvm libc std
|
DEPS_rustc_llvm := native:rustllvm libc std
|
||||||
DEPS_rustc_back := std syntax rustc_llvm flate log libc
|
DEPS_rustc_back := std syntax rustc_llvm flate log libc
|
||||||
DEPS_rustdoc := rustc native:hoedown serialize getopts \
|
DEPS_rustdoc := rustc native:hoedown serialize getopts \
|
||||||
test time debug
|
test time
|
||||||
DEPS_flate := std native:miniz
|
DEPS_flate := std native:miniz
|
||||||
DEPS_arena := std
|
DEPS_arena := std
|
||||||
DEPS_graphviz := std
|
DEPS_graphviz := std
|
||||||
|
|
|
@ -81,7 +81,7 @@ impl PartialEq for Ident {
|
||||||
// one example and its non-hygienic counterpart would be:
|
// one example and its non-hygienic counterpart would be:
|
||||||
// syntax::parse::token::mtwt_token_eq
|
// syntax::parse::token::mtwt_token_eq
|
||||||
// syntax::ext::tt::macro_parser::token_name_eq
|
// syntax::ext::tt::macro_parser::token_name_eq
|
||||||
fail!("not allowed to compare these idents: {:?}, {:?}. \
|
fail!("not allowed to compare these idents: {}, {}. \
|
||||||
Probably related to issue \\#6993", self, other);
|
Probably related to issue \\#6993", self, other);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -96,6 +96,7 @@ pub fn path_to_string<PI: Iterator<PathElem>>(mut path: PI) -> String {
|
||||||
}).to_string()
|
}).to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[deriving(Show)]
|
||||||
pub enum Node<'ast> {
|
pub enum Node<'ast> {
|
||||||
NodeItem(&'ast Item),
|
NodeItem(&'ast Item),
|
||||||
NodeForeignItem(&'ast ForeignItem),
|
NodeForeignItem(&'ast ForeignItem),
|
||||||
|
@ -387,7 +388,7 @@ impl<'ast> Map<'ast> {
|
||||||
PathName(ident.name)
|
PathName(ident.name)
|
||||||
}
|
}
|
||||||
MethMac(_) => {
|
MethMac(_) => {
|
||||||
fail!("no path elem for {:?}", node)
|
fail!("no path elem for {}", node)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -401,13 +402,13 @@ impl<'ast> Map<'ast> {
|
||||||
MethDecl(ident, _, _, _, _, _, _, _) => {
|
MethDecl(ident, _, _, _, _, _, _, _) => {
|
||||||
PathName(ident.name)
|
PathName(ident.name)
|
||||||
}
|
}
|
||||||
MethMac(_) => fail!("no path elem for {:?}", node),
|
MethMac(_) => fail!("no path elem for {}", node),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TypeTraitItem(ref m) => PathName(m.ident.name),
|
TypeTraitItem(ref m) => PathName(m.ident.name),
|
||||||
},
|
},
|
||||||
NodeVariant(v) => PathName(v.node.name.name),
|
NodeVariant(v) => PathName(v.node.name.name),
|
||||||
_ => fail!("no path elem for {:?}", node)
|
_ => fail!("no path elem for {}", node)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -308,7 +308,7 @@ pub fn empty_generics() -> Generics {
|
||||||
// ______________________________________________________________________
|
// ______________________________________________________________________
|
||||||
// Enumerating the IDs which appear in an AST
|
// Enumerating the IDs which appear in an AST
|
||||||
|
|
||||||
#[deriving(Encodable, Decodable)]
|
#[deriving(Encodable, Decodable, Show)]
|
||||||
pub struct IdRange {
|
pub struct IdRange {
|
||||||
pub min: NodeId,
|
pub min: NodeId,
|
||||||
pub max: NodeId,
|
pub max: NodeId,
|
||||||
|
|
|
@ -1700,11 +1700,11 @@ foo_module!()
|
||||||
}).enumerate() {
|
}).enumerate() {
|
||||||
if mtwt::resolve(v.segments.get(0).identifier) != resolved_binding {
|
if mtwt::resolve(v.segments.get(0).identifier) != resolved_binding {
|
||||||
println!("uh oh, xx binding didn't match xx varref:");
|
println!("uh oh, xx binding didn't match xx varref:");
|
||||||
println!("this is xx varref \\# {:?}",idx);
|
println!("this is xx varref \\# {}", idx);
|
||||||
println!("binding: {:?}",cxbind);
|
println!("binding: {}", cxbind);
|
||||||
println!("resolves to: {:?}",resolved_binding);
|
println!("resolves to: {}", resolved_binding);
|
||||||
println!("varref: {:?}",v.segments.get(0).identifier);
|
println!("varref: {}", v.segments.get(0).identifier);
|
||||||
println!("resolves to: {:?}",
|
println!("resolves to: {}",
|
||||||
mtwt::resolve(v.segments.get(0).identifier));
|
mtwt::resolve(v.segments.get(0).identifier));
|
||||||
mtwt::with_sctable(|x| mtwt::display_sctable(x));
|
mtwt::with_sctable(|x| mtwt::display_sctable(x));
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,7 +38,7 @@ pub struct SCTable {
|
||||||
rename_memo: RefCell<HashMap<(SyntaxContext,Ident,Name),SyntaxContext>>,
|
rename_memo: RefCell<HashMap<(SyntaxContext,Ident,Name),SyntaxContext>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[deriving(PartialEq, Encodable, Decodable, Hash)]
|
#[deriving(PartialEq, Encodable, Decodable, Hash, Show)]
|
||||||
pub enum SyntaxContext_ {
|
pub enum SyntaxContext_ {
|
||||||
EmptyCtxt,
|
EmptyCtxt,
|
||||||
Mark (Mrk,SyntaxContext),
|
Mark (Mrk,SyntaxContext),
|
||||||
|
@ -129,7 +129,7 @@ fn new_sctable_internal() -> SCTable {
|
||||||
pub fn display_sctable(table: &SCTable) {
|
pub fn display_sctable(table: &SCTable) {
|
||||||
error!("SC table:");
|
error!("SC table:");
|
||||||
for (idx,val) in table.table.borrow().iter().enumerate() {
|
for (idx,val) in table.table.borrow().iter().enumerate() {
|
||||||
error!("{:4u} : {:?}",idx,val);
|
error!("{:4u} : {}",idx,val);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1389,7 +1389,7 @@ mod test {
|
||||||
let a_val = $a;
|
let a_val = $a;
|
||||||
let b_val = $b;
|
let b_val = $b;
|
||||||
if !(pred_val(a_val.as_slice(),b_val.as_slice())) {
|
if !(pred_val(a_val.as_slice(),b_val.as_slice())) {
|
||||||
fail!("expected args satisfying {}, got {:?} and {:?}",
|
fail!("expected args satisfying {}, got {} and {}",
|
||||||
$predname, a_val, b_val);
|
$predname, a_val, b_val);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -63,7 +63,7 @@ impl<'a> ParserAttr for Parser<'a> {
|
||||||
/// If permit_inner is true, then a leading `!` indicates an inner
|
/// If permit_inner is true, then a leading `!` indicates an inner
|
||||||
/// attribute
|
/// attribute
|
||||||
fn parse_attribute(&mut self, permit_inner: bool) -> ast::Attribute {
|
fn parse_attribute(&mut self, permit_inner: bool) -> ast::Attribute {
|
||||||
debug!("parse_attributes: permit_inner={:?} self.token={:?}",
|
debug!("parse_attributes: permit_inner={} self.token={}",
|
||||||
permit_inner, self.token);
|
permit_inner, self.token);
|
||||||
let (span, value, mut style) = match self.token {
|
let (span, value, mut style) = match self.token {
|
||||||
token::POUND => {
|
token::POUND => {
|
||||||
|
|
|
@ -823,19 +823,19 @@ mod test {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
_ => {
|
_ => {
|
||||||
error!("failing value 3: {:?}",first_set);
|
error!("failing value 3: {}",first_set);
|
||||||
assert_eq!("wrong 3","correct")
|
assert_eq!("wrong 3","correct")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
_ => {
|
_ => {
|
||||||
error!("failing value 2: {:?}",delim_elts);
|
error!("failing value 2: {}",delim_elts);
|
||||||
assert_eq!("wrong","correct");
|
assert_eq!("wrong","correct");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
_ => {
|
_ => {
|
||||||
error!("failing value: {:?}",tts);
|
error!("failing value: {}",tts);
|
||||||
assert_eq!("wrong 1","correct");
|
assert_eq!("wrong 1","correct");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -492,7 +492,7 @@ impl<'a> Parser<'a> {
|
||||||
/// followed by some token from the set edible + inedible. Recover
|
/// followed by some token from the set edible + inedible. Recover
|
||||||
/// from anticipated input errors, discarding erroneous characters.
|
/// from anticipated input errors, discarding erroneous characters.
|
||||||
pub fn commit_expr(&mut self, e: &Expr, edible: &[token::Token], inedible: &[token::Token]) {
|
pub fn commit_expr(&mut self, e: &Expr, edible: &[token::Token], inedible: &[token::Token]) {
|
||||||
debug!("commit_expr {:?}", e);
|
debug!("commit_expr {}", e);
|
||||||
match e.node {
|
match e.node {
|
||||||
ExprPath(..) => {
|
ExprPath(..) => {
|
||||||
// might be unit-struct construction; check for recoverableinput error.
|
// might be unit-struct construction; check for recoverableinput error.
|
||||||
|
@ -1535,7 +1535,7 @@ impl<'a> Parser<'a> {
|
||||||
// TYPE TO BE INFERRED
|
// TYPE TO BE INFERRED
|
||||||
TyInfer
|
TyInfer
|
||||||
} else {
|
} else {
|
||||||
let msg = format!("expected type, found token {:?}", self.token);
|
let msg = format!("expected type, found token {}", self.token);
|
||||||
self.fatal(msg.as_slice());
|
self.fatal(msg.as_slice());
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1591,7 +1591,7 @@ impl<'a> Parser<'a> {
|
||||||
/// identifier names.
|
/// identifier names.
|
||||||
pub fn parse_arg_general(&mut self, require_name: bool) -> Arg {
|
pub fn parse_arg_general(&mut self, require_name: bool) -> Arg {
|
||||||
let pat = if require_name || self.is_named_argument() {
|
let pat = if require_name || self.is_named_argument() {
|
||||||
debug!("parse_arg_general parse_pat (require_name:{:?})",
|
debug!("parse_arg_general parse_pat (require_name:{})",
|
||||||
require_name);
|
require_name);
|
||||||
let pat = self.parse_pat();
|
let pat = self.parse_pat();
|
||||||
|
|
||||||
|
@ -1882,7 +1882,7 @@ impl<'a> Parser<'a> {
|
||||||
token::BINOP(token::SHR) => { return res; }
|
token::BINOP(token::SHR) => { return res; }
|
||||||
_ => {
|
_ => {
|
||||||
let msg = format!("expected `,` or `>` after lifetime \
|
let msg = format!("expected `,` or `>` after lifetime \
|
||||||
name, got: {:?}",
|
name, got: {}",
|
||||||
self.token);
|
self.token);
|
||||||
self.fatal(msg.as_slice());
|
self.fatal(msg.as_slice());
|
||||||
}
|
}
|
||||||
|
@ -4711,7 +4711,7 @@ impl<'a> Parser<'a> {
|
||||||
attrs = attrs_remaining.clone().append(attrs.as_slice());
|
attrs = attrs_remaining.clone().append(attrs.as_slice());
|
||||||
first = false;
|
first = false;
|
||||||
}
|
}
|
||||||
debug!("parse_mod_items: parse_item_or_view_item(attrs={:?})",
|
debug!("parse_mod_items: parse_item_or_view_item(attrs={})",
|
||||||
attrs);
|
attrs);
|
||||||
match self.parse_item_or_view_item(attrs,
|
match self.parse_item_or_view_item(attrs,
|
||||||
true /* macros allowed */) {
|
true /* macros allowed */) {
|
||||||
|
|
Loading…
Add table
Reference in a new issue