Auto merge of #23682 - tamird:DRY-is-empty, r=alexcrichton
r? @alexcrichton
This commit is contained in:
commit
288809c8f3
104 changed files with 262 additions and 259 deletions
|
@ -368,7 +368,7 @@ pub fn make_metrics_test_closure(config: &Config, testfile: &Path) -> test::Test
|
|||
fn extract_gdb_version(full_version_line: Option<String>) -> Option<String> {
|
||||
match full_version_line {
|
||||
Some(ref full_version_line)
|
||||
if full_version_line.trim().len() > 0 => {
|
||||
if !full_version_line.trim().is_empty() => {
|
||||
let full_version_line = full_version_line.trim();
|
||||
|
||||
// used to be a regex "(^|[^0-9])([0-9]\.[0-9])([^0-9]|$)"
|
||||
|
@ -408,7 +408,7 @@ fn extract_lldb_version(full_version_line: Option<String>) -> Option<String> {
|
|||
|
||||
match full_version_line {
|
||||
Some(ref full_version_line)
|
||||
if full_version_line.trim().len() > 0 => {
|
||||
if !full_version_line.trim().is_empty() => {
|
||||
let full_version_line = full_version_line.trim();
|
||||
|
||||
for (pos, l) in full_version_line.char_indices() {
|
||||
|
@ -426,7 +426,7 @@ fn extract_lldb_version(full_version_line: Option<String>) -> Option<String> {
|
|||
let vers = full_version_line[pos + 5..].chars().take_while(|c| {
|
||||
c.is_digit(10)
|
||||
}).collect::<String>();
|
||||
if vers.len() > 0 { return Some(vers) }
|
||||
if !vers.is_empty() { return Some(vers) }
|
||||
}
|
||||
println!("Could not extract LLDB version from line '{}'",
|
||||
full_version_line);
|
||||
|
|
|
@ -864,7 +864,7 @@ fn check_debugger_output(debugger_run_result: &ProcRes, check_lines: &[String])
|
|||
}
|
||||
first = false;
|
||||
}
|
||||
if !failed && rest.len() == 0 {
|
||||
if !failed && rest.is_empty() {
|
||||
i += 1;
|
||||
}
|
||||
if i == num_check_lines {
|
||||
|
@ -1662,7 +1662,7 @@ fn _arm_push_aux_shared_library(config: &Config, testfile: &Path) {
|
|||
// codegen tests (vs. clang)
|
||||
|
||||
fn append_suffix_to_stem(p: &Path, suffix: &str) -> PathBuf {
|
||||
if suffix.len() == 0 {
|
||||
if suffix.is_empty() {
|
||||
p.to_path_buf()
|
||||
} else {
|
||||
let mut stem = p.file_stem().unwrap().to_os_string();
|
||||
|
|
|
@ -3788,7 +3788,7 @@ its type parameters are types:
|
|||
|
||||
```ignore
|
||||
fn map<A: Clone, B: Clone>(f: |A| -> B, xs: &[A]) -> Vec<B> {
|
||||
if xs.len() == 0 {
|
||||
if xs.is_empty() {
|
||||
return vec![];
|
||||
}
|
||||
let first: B = f(xs[0].clone());
|
||||
|
|
|
@ -692,7 +692,7 @@ mod stack {
|
|||
// We've reached the root, so no matter what, we're done. We manually
|
||||
// access the root via the tree itself to avoid creating any dangling
|
||||
// pointers.
|
||||
if self.map.root.len() == 0 && !self.map.root.is_leaf() {
|
||||
if self.map.root.is_empty() && !self.map.root.is_leaf() {
|
||||
// We've emptied out the root, so make its only child the new root.
|
||||
// If it's a leaf, we just let it become empty.
|
||||
self.map.depth -= 1;
|
||||
|
|
|
@ -585,6 +585,9 @@ impl <K, V> Node<K, V> {
|
|||
self._len
|
||||
}
|
||||
|
||||
/// Does the node not contain any key-value pairs
|
||||
pub fn is_empty(&self) -> bool { self.len() == 0 }
|
||||
|
||||
/// How many key-value pairs the node can fit
|
||||
pub fn capacity(&self) -> usize {
|
||||
self._capacity
|
||||
|
@ -1097,7 +1100,7 @@ impl<K, V> Node<K, V> {
|
|||
/// When a node has no keys or values and only a single edge, extract that edge.
|
||||
pub fn hoist_lone_child(&mut self) {
|
||||
// Necessary for correctness, but in a private module
|
||||
debug_assert!(self.len() == 0);
|
||||
debug_assert!(self.is_empty());
|
||||
debug_assert!(!self.is_leaf());
|
||||
|
||||
unsafe {
|
||||
|
@ -1225,7 +1228,7 @@ impl<K, V> Node<K, V> {
|
|||
/// because we have one too many, and our parent now has one too few
|
||||
fn split(&mut self) -> (K, V, Node<K, V>) {
|
||||
// Necessary for correctness, but in a private function
|
||||
debug_assert!(self.len() > 0);
|
||||
debug_assert!(!self.is_empty());
|
||||
|
||||
let mut right = if self.is_leaf() {
|
||||
Node::new_leaf(self.capacity())
|
||||
|
|
|
@ -227,7 +227,7 @@ impl CharExt for char {
|
|||
#[inline]
|
||||
pub fn encode_utf8_raw(code: u32, dst: &mut [u8]) -> Option<usize> {
|
||||
// Marked #[inline] to allow llvm optimizing it away
|
||||
if code < MAX_ONE_B && dst.len() >= 1 {
|
||||
if code < MAX_ONE_B && !dst.is_empty() {
|
||||
dst[0] = code as u8;
|
||||
Some(1)
|
||||
} else if code < MAX_TWO_B && dst.len() >= 2 {
|
||||
|
@ -258,7 +258,7 @@ pub fn encode_utf8_raw(code: u32, dst: &mut [u8]) -> Option<usize> {
|
|||
#[inline]
|
||||
pub fn encode_utf16_raw(mut ch: u32, dst: &mut [u16]) -> Option<usize> {
|
||||
// Marked #[inline] to allow llvm optimizing it away
|
||||
if (ch & 0xFFFF) == ch && dst.len() >= 1 {
|
||||
if (ch & 0xFFFF) == ch && !dst.is_empty() {
|
||||
// The BMP falls through (assuming non-surrogate, as it should)
|
||||
dst[0] = ch as u16;
|
||||
Some(1)
|
||||
|
|
|
@ -204,7 +204,7 @@ impl<T> SliceExt for [T] {
|
|||
|
||||
#[inline]
|
||||
fn first(&self) -> Option<&T> {
|
||||
if self.len() == 0 { None } else { Some(&self[0]) }
|
||||
if self.is_empty() { None } else { Some(&self[0]) }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
@ -217,7 +217,7 @@ impl<T> SliceExt for [T] {
|
|||
|
||||
#[inline]
|
||||
fn last(&self) -> Option<&T> {
|
||||
if self.len() == 0 { None } else { Some(&self[self.len() - 1]) }
|
||||
if self.is_empty() { None } else { Some(&self[self.len() - 1]) }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
@ -296,7 +296,7 @@ impl<T> SliceExt for [T] {
|
|||
|
||||
#[inline]
|
||||
fn first_mut(&mut self) -> Option<&mut T> {
|
||||
if self.len() == 0 { None } else { Some(&mut self[0]) }
|
||||
if self.is_empty() { None } else { Some(&mut self[0]) }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
@ -1306,7 +1306,7 @@ impl<'a, T> Iterator for Chunks<'a, T> {
|
|||
|
||||
#[inline]
|
||||
fn next(&mut self) -> Option<&'a [T]> {
|
||||
if self.v.len() == 0 {
|
||||
if self.v.is_empty() {
|
||||
None
|
||||
} else {
|
||||
let chunksz = cmp::min(self.v.len(), self.size);
|
||||
|
@ -1318,7 +1318,7 @@ impl<'a, T> Iterator for Chunks<'a, T> {
|
|||
|
||||
#[inline]
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
if self.v.len() == 0 {
|
||||
if self.v.is_empty() {
|
||||
(0, Some(0))
|
||||
} else {
|
||||
let n = self.v.len() / self.size;
|
||||
|
@ -1333,7 +1333,7 @@ impl<'a, T> Iterator for Chunks<'a, T> {
|
|||
impl<'a, T> DoubleEndedIterator for Chunks<'a, T> {
|
||||
#[inline]
|
||||
fn next_back(&mut self) -> Option<&'a [T]> {
|
||||
if self.v.len() == 0 {
|
||||
if self.v.is_empty() {
|
||||
None
|
||||
} else {
|
||||
let remainder = self.v.len() % self.size;
|
||||
|
@ -1384,7 +1384,7 @@ impl<'a, T> Iterator for ChunksMut<'a, T> {
|
|||
|
||||
#[inline]
|
||||
fn next(&mut self) -> Option<&'a mut [T]> {
|
||||
if self.v.len() == 0 {
|
||||
if self.v.is_empty() {
|
||||
None
|
||||
} else {
|
||||
let sz = cmp::min(self.v.len(), self.chunk_size);
|
||||
|
@ -1397,7 +1397,7 @@ impl<'a, T> Iterator for ChunksMut<'a, T> {
|
|||
|
||||
#[inline]
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
if self.v.len() == 0 {
|
||||
if self.v.is_empty() {
|
||||
(0, Some(0))
|
||||
} else {
|
||||
let n = self.v.len() / self.chunk_size;
|
||||
|
@ -1412,7 +1412,7 @@ impl<'a, T> Iterator for ChunksMut<'a, T> {
|
|||
impl<'a, T> DoubleEndedIterator for ChunksMut<'a, T> {
|
||||
#[inline]
|
||||
fn next_back(&mut self) -> Option<&'a mut [T]> {
|
||||
if self.v.len() == 0 {
|
||||
if self.v.is_empty() {
|
||||
None
|
||||
} else {
|
||||
let remainder = self.v.len() % self.chunk_size;
|
||||
|
|
|
@ -1119,7 +1119,7 @@ enum OldSearcher {
|
|||
impl OldSearcher {
|
||||
#[allow(dead_code)]
|
||||
fn new(haystack: &[u8], needle: &[u8]) -> OldSearcher {
|
||||
if needle.len() == 0 {
|
||||
if needle.is_empty() {
|
||||
// Handle specially
|
||||
unimplemented!()
|
||||
// FIXME: Tune this.
|
||||
|
|
|
@ -457,7 +457,7 @@ fn str_search_step<F, G>(mut m: &mut StrSearcher,
|
|||
{
|
||||
if m.state.done() {
|
||||
SearchStep::Done
|
||||
} else if m.needle.len() == 0 && m.start <= m.end {
|
||||
} else if m.needle.is_empty() && m.start <= m.end {
|
||||
// Case for needle == ""
|
||||
if let State::Reject(a, b) = m.state.take() {
|
||||
SearchStep::Reject(a, b)
|
||||
|
|
|
@ -371,7 +371,7 @@ impl<'a> Parser<'a> {
|
|||
None => {
|
||||
let tmp = self.cur.clone();
|
||||
match self.word() {
|
||||
word if word.len() > 0 => {
|
||||
word if !word.is_empty() => {
|
||||
if self.consume('$') {
|
||||
CountIsName(word)
|
||||
} else {
|
||||
|
@ -463,7 +463,7 @@ mod tests {
|
|||
fn musterr(s: &str) {
|
||||
let mut p = Parser::new(s);
|
||||
p.next();
|
||||
assert!(p.errors.len() != 0);
|
||||
assert!(!p.errors.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -804,7 +804,7 @@ fn format_option(opt: &OptGroup) -> String {
|
|||
}
|
||||
|
||||
// Use short_name is possible, but fallback to long_name.
|
||||
if opt.short_name.len() > 0 {
|
||||
if !opt.short_name.is_empty() {
|
||||
line.push('-');
|
||||
line.push_str(&opt.short_name[..]);
|
||||
} else {
|
||||
|
|
|
@ -45,7 +45,7 @@ pub fn parse_logging_spec(spec: &str) -> (Vec<LogDirective>, Option<String>) {
|
|||
return (dirs, None);
|
||||
}
|
||||
mods.map(|m| { for s in m.split(',') {
|
||||
if s.len() == 0 { continue }
|
||||
if s.is_empty() { continue }
|
||||
let mut parts = s.split('=');
|
||||
let (log_level, name) = match (parts.next(), parts.next().map(|s| s.trim()), parts.next()) {
|
||||
(Some(part0), None, None) => {
|
||||
|
|
|
@ -80,7 +80,7 @@ pub fn validate_crate_name(sess: Option<&Session>, s: &str, sp: Option<Span>) {
|
|||
(None, Some(sess)) => sess.err(s),
|
||||
}
|
||||
};
|
||||
if s.len() == 0 {
|
||||
if s.is_empty() {
|
||||
say("crate name must not be empty");
|
||||
}
|
||||
for c in s.chars() {
|
||||
|
|
|
@ -767,7 +767,7 @@ pub fn get_enum_variants<'tcx>(intr: Rc<IdentInterner>, cdata: Cmd, id: ast::Nod
|
|||
get_type(cdata, field_ty.id.node, tcx).ty
|
||||
})
|
||||
.collect();
|
||||
let arg_names = if arg_names.len() == 0 { None } else { Some(arg_names) };
|
||||
let arg_names = if arg_names.is_empty() { None } else { Some(arg_names) };
|
||||
|
||||
(None, arg_tys, arg_names)
|
||||
}
|
||||
|
@ -1383,7 +1383,7 @@ pub fn get_dylib_dependency_formats(cdata: Cmd)
|
|||
|
||||
debug!("found dylib deps: {}", formats.as_str_slice());
|
||||
for spec in formats.as_str_slice().split(',') {
|
||||
if spec.len() == 0 { continue }
|
||||
if spec.is_empty() { continue }
|
||||
let cnum = spec.split(':').nth(0).unwrap();
|
||||
let link = spec.split(':').nth(1).unwrap();
|
||||
let cnum: ast::CrateNum = cnum.parse().unwrap();
|
||||
|
|
|
@ -469,7 +469,7 @@ fn each_auxiliary_node_id<F>(item: &ast::Item, callback: F) -> bool where
|
|||
ast::ItemStruct(ref struct_def, _) => {
|
||||
// If this is a newtype struct, return the constructor.
|
||||
match struct_def.ctor_id {
|
||||
Some(ctor_id) if struct_def.fields.len() > 0 &&
|
||||
Some(ctor_id) if !struct_def.fields.is_empty() &&
|
||||
struct_def.fields[0].node.kind.is_unnamed() => {
|
||||
continue_ = callback(ctor_id);
|
||||
}
|
||||
|
@ -1751,7 +1751,7 @@ fn encode_codemap(ecx: &EncodeContext, rbml_w: &mut Encoder) {
|
|||
|
||||
for filemap in &codemap.files.borrow()[..] {
|
||||
|
||||
if filemap.lines.borrow().len() == 0 || filemap.is_imported() {
|
||||
if filemap.lines.borrow().is_empty() || filemap.is_imported() {
|
||||
// No need to export empty filemaps, as they can't contain spans
|
||||
// that need translation.
|
||||
// Also no need to re-export imported filemaps, as any downstream
|
||||
|
|
|
@ -307,13 +307,13 @@ impl<'a> Context<'a> {
|
|||
}
|
||||
|
||||
pub fn report_load_errs(&mut self) {
|
||||
let message = if self.rejected_via_hash.len() > 0 {
|
||||
let message = if !self.rejected_via_hash.is_empty() {
|
||||
format!("found possibly newer version of crate `{}`",
|
||||
self.ident)
|
||||
} else if self.rejected_via_triple.len() > 0 {
|
||||
} else if !self.rejected_via_triple.is_empty() {
|
||||
format!("couldn't find crate `{}` with expected target triple {}",
|
||||
self.ident, self.triple)
|
||||
} else if self.rejected_via_kind.len() > 0 {
|
||||
} else if !self.rejected_via_kind.is_empty() {
|
||||
format!("found staticlib `{}` instead of rlib or dylib", self.ident)
|
||||
} else {
|
||||
format!("can't find crate for `{}`", self.ident)
|
||||
|
@ -325,7 +325,7 @@ impl<'a> Context<'a> {
|
|||
};
|
||||
self.sess.span_err(self.span, &message[..]);
|
||||
|
||||
if self.rejected_via_triple.len() > 0 {
|
||||
if !self.rejected_via_triple.is_empty() {
|
||||
let mismatches = self.rejected_via_triple.iter();
|
||||
for (i, &CrateMismatch{ ref path, ref got }) in mismatches.enumerate() {
|
||||
self.sess.fileline_note(self.span,
|
||||
|
@ -333,7 +333,7 @@ impl<'a> Context<'a> {
|
|||
self.ident, i+1, got, path.display()));
|
||||
}
|
||||
}
|
||||
if self.rejected_via_hash.len() > 0 {
|
||||
if !self.rejected_via_hash.is_empty() {
|
||||
self.sess.span_note(self.span, "perhaps this crate needs \
|
||||
to be recompiled?");
|
||||
let mismatches = self.rejected_via_hash.iter();
|
||||
|
@ -353,7 +353,7 @@ impl<'a> Context<'a> {
|
|||
}
|
||||
}
|
||||
}
|
||||
if self.rejected_via_kind.len() > 0 {
|
||||
if !self.rejected_via_kind.is_empty() {
|
||||
self.sess.fileline_help(self.span, "please recompile this crate using \
|
||||
--crate-type lib");
|
||||
let mismatches = self.rejected_via_kind.iter();
|
||||
|
@ -517,7 +517,7 @@ impl<'a> Context<'a> {
|
|||
// library's metadata sections. In theory we should
|
||||
// read both, but reading dylib metadata is quite
|
||||
// slow.
|
||||
if m.len() == 0 {
|
||||
if m.is_empty() {
|
||||
return None
|
||||
} else if m.len() == 1 {
|
||||
return Some(m.into_iter().next().unwrap())
|
||||
|
|
|
@ -239,7 +239,7 @@ fn check_for_bindings_named_the_same_as_variants(cx: &MatchCheckCtxt, pat: &Pat)
|
|||
if let Some(DefLocal(_)) = def {
|
||||
if ty::enum_variants(cx.tcx, def_id).iter().any(|variant|
|
||||
token::get_name(variant.name) == token::get_name(ident.node.name)
|
||||
&& variant.args.len() == 0
|
||||
&& variant.args.is_empty()
|
||||
) {
|
||||
span_warn!(cx.tcx.sess, p.span, E0170,
|
||||
"pattern binding `{}` is named the same as one \
|
||||
|
@ -636,19 +636,19 @@ fn is_useful(cx: &MatchCheckCtxt,
|
|||
-> Usefulness {
|
||||
let &Matrix(ref rows) = matrix;
|
||||
debug!("{:?}", matrix);
|
||||
if rows.len() == 0 {
|
||||
if rows.is_empty() {
|
||||
return match witness {
|
||||
ConstructWitness => UsefulWithWitness(vec!()),
|
||||
LeaveOutWitness => Useful
|
||||
};
|
||||
}
|
||||
if rows[0].len() == 0 {
|
||||
if rows[0].is_empty() {
|
||||
return NotUseful;
|
||||
}
|
||||
assert!(rows.iter().all(|r| r.len() == v.len()));
|
||||
let real_pat = match rows.iter().find(|r| (*r)[0].id != DUMMY_NODE_ID) {
|
||||
Some(r) => raw_pat(r[0]),
|
||||
None if v.len() == 0 => return NotUseful,
|
||||
None if v.is_empty() => return NotUseful,
|
||||
None => v[0]
|
||||
};
|
||||
let left_ty = if real_pat.id == DUMMY_NODE_ID {
|
||||
|
|
|
@ -182,7 +182,7 @@ impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> {
|
|||
|
||||
fn mark_live_symbols(&mut self) {
|
||||
let mut scanned = HashSet::new();
|
||||
while self.worklist.len() > 0 {
|
||||
while !self.worklist.is_empty() {
|
||||
let id = self.worklist.pop().unwrap();
|
||||
if scanned.contains(&id) {
|
||||
continue
|
||||
|
|
|
@ -966,7 +966,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> {
|
|||
fn pick_lifetime(&self,
|
||||
region_names: &HashSet<ast::Name>)
|
||||
-> (ast::Lifetime, FreshOrKept) {
|
||||
if region_names.len() > 0 {
|
||||
if !region_names.is_empty() {
|
||||
// It's not necessary to convert the set of region names to a
|
||||
// vector of string and then sort them. However, it makes the
|
||||
// choice of lifetime name deterministic and thus easier to test.
|
||||
|
@ -1241,7 +1241,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> {
|
|||
let lifetimes =
|
||||
path.segments.last().unwrap().parameters.lifetimes();
|
||||
let mut insert = Vec::new();
|
||||
if lifetimes.len() == 0 {
|
||||
if lifetimes.is_empty() {
|
||||
let anon = self.cur_anon.get();
|
||||
for (i, a) in (anon..anon+expected).enumerate() {
|
||||
if anon_nums.contains(&a) {
|
||||
|
@ -1361,7 +1361,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> {
|
|||
|
||||
ast::AngleBracketedParameters(ref data) => {
|
||||
let mut new_lts = Vec::new();
|
||||
if data.lifetimes.len() == 0 {
|
||||
if data.lifetimes.is_empty() {
|
||||
// traverse once to see if there's a need to insert lifetime
|
||||
let need_insert = (0..expected).any(|i| {
|
||||
indexes.contains(&i)
|
||||
|
|
|
@ -88,7 +88,7 @@ pub fn maybe_print_constraints_for<'a, 'tcx>(region_vars: &RegionVarBindings<'a,
|
|||
Err(_) => "/tmp/constraints.node%.dot".to_string(),
|
||||
};
|
||||
|
||||
if output_template.len() == 0 {
|
||||
if output_template.is_empty() {
|
||||
tcx.sess.bug("empty string provided as RUST_REGION_GRAPH");
|
||||
}
|
||||
|
||||
|
|
|
@ -246,7 +246,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
|||
}
|
||||
|
||||
fn in_snapshot(&self) -> bool {
|
||||
self.undo_log.borrow().len() > 0
|
||||
!self.undo_log.borrow().is_empty()
|
||||
}
|
||||
|
||||
pub fn start_snapshot(&self) -> RegionSnapshot {
|
||||
|
|
|
@ -716,7 +716,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
|
|||
None => {
|
||||
// Vanilla 'break' or 'loop', so use the enclosing
|
||||
// loop scope
|
||||
if self.loop_scope.len() == 0 {
|
||||
if self.loop_scope.is_empty() {
|
||||
self.ir.tcx.sess.span_bug(sp, "break outside loop");
|
||||
} else {
|
||||
*self.loop_scope.last().unwrap()
|
||||
|
@ -1527,7 +1527,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
|
|||
// for nil return types, it is ok to not return a value expl.
|
||||
} else {
|
||||
let ends_with_stmt = match body.expr {
|
||||
None if body.stmts.len() > 0 =>
|
||||
None if !body.stmts.is_empty() =>
|
||||
match body.stmts.first().unwrap().node {
|
||||
ast::StmtSemi(ref e, _) => {
|
||||
ty::expr_ty(self.ir.tcx, &**e) == t_ret
|
||||
|
@ -1586,7 +1586,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
|
|||
|
||||
fn should_warn(&self, var: Variable) -> Option<String> {
|
||||
let name = self.ir.variable_name(var);
|
||||
if name.len() == 0 || name.as_bytes()[0] == ('_' as u8) {
|
||||
if name.is_empty() || name.as_bytes()[0] == ('_' as u8) {
|
||||
None
|
||||
} else {
|
||||
Some(name)
|
||||
|
|
|
@ -227,7 +227,7 @@ impl<'a, 'v> Visitor<'v> for LifetimeContext<'a> {
|
|||
ref bounds,
|
||||
ref bound_lifetimes,
|
||||
.. }) => {
|
||||
if bound_lifetimes.len() > 0 {
|
||||
if !bound_lifetimes.is_empty() {
|
||||
self.trait_ref_hack = true;
|
||||
let result = self.with(LateScope(bound_lifetimes, self.scope),
|
||||
|old_scope, this| {
|
||||
|
@ -267,7 +267,7 @@ impl<'a, 'v> Visitor<'v> for LifetimeContext<'a> {
|
|||
_modifier: &ast::TraitBoundModifier) {
|
||||
debug!("visit_poly_trait_ref trait_ref={:?}", trait_ref);
|
||||
|
||||
if !self.trait_ref_hack || trait_ref.bound_lifetimes.len() > 0 {
|
||||
if !self.trait_ref_hack || !trait_ref.bound_lifetimes.is_empty() {
|
||||
if self.trait_ref_hack {
|
||||
println!("{:?}", trait_ref.span);
|
||||
span_err!(self.sess, trait_ref.span, E0316,
|
||||
|
|
|
@ -361,7 +361,7 @@ impl<T> VecPerParamSpace<T> {
|
|||
pub fn get_self<'a>(&'a self) -> Option<&'a T> {
|
||||
let v = self.get_slice(SelfSpace);
|
||||
assert!(v.len() <= 1);
|
||||
if v.len() == 0 { None } else { Some(&v[0]) }
|
||||
if v.is_empty() { None } else { Some(&v[0]) }
|
||||
}
|
||||
|
||||
pub fn len(&self, space: ParamSpace) -> usize {
|
||||
|
|
|
@ -298,7 +298,7 @@ impl<'tcx> FulfillmentContext<'tcx> {
|
|||
self.predicates.len(),
|
||||
errors.len());
|
||||
|
||||
if errors.len() == 0 {
|
||||
if errors.is_empty() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(errors)
|
||||
|
|
|
@ -698,7 +698,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
|||
// is checked for in `evaluate_stack` (and hence users
|
||||
// who might care about this case, like coherence, should use
|
||||
// that function).
|
||||
if candidates.len() == 0 {
|
||||
if candidates.is_empty() {
|
||||
return Err(Unimplemented);
|
||||
}
|
||||
|
||||
|
@ -873,7 +873,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
|||
try!(self.assemble_candidates_from_caller_bounds(stack, &mut candidates));
|
||||
// Default implementations have lower priority, so we only
|
||||
// consider triggering a default if there is no other impl that can apply.
|
||||
if candidates.vec.len() == 0 {
|
||||
if candidates.vec.is_empty() {
|
||||
try!(self.assemble_candidates_from_default_impls(obligation, &mut candidates));
|
||||
}
|
||||
debug!("candidate list size: {}", candidates.vec.len());
|
||||
|
|
|
@ -3036,7 +3036,7 @@ pub fn mk_trait<'tcx>(cx: &ctxt<'tcx>,
|
|||
}
|
||||
|
||||
fn bound_list_is_sorted(bounds: &[ty::PolyProjectionPredicate]) -> bool {
|
||||
bounds.len() == 0 ||
|
||||
bounds.is_empty() ||
|
||||
bounds[1..].iter().enumerate().all(
|
||||
|(index, bound)| bounds[index].sort_key() <= bound.sort_key())
|
||||
}
|
||||
|
@ -3665,7 +3665,7 @@ pub fn type_contents<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> TypeContents {
|
|||
res = res | TC::OwnsDtor;
|
||||
}
|
||||
|
||||
if variants.len() != 0 {
|
||||
if !variants.is_empty() {
|
||||
let repr_hints = lookup_repr_hints(cx, did);
|
||||
if repr_hints.len() > 1 {
|
||||
// this is an error later on, but this type isn't safe
|
||||
|
@ -3687,7 +3687,7 @@ pub fn type_contents<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> TypeContents {
|
|||
if variants.len() == 2 {
|
||||
let mut data_idx = 0;
|
||||
|
||||
if variants[0].args.len() == 0 {
|
||||
if variants[0].args.is_empty() {
|
||||
data_idx = 1;
|
||||
}
|
||||
|
||||
|
@ -4200,10 +4200,10 @@ pub fn type_is_c_like_enum(cx: &ctxt, ty: Ty) -> bool {
|
|||
match ty.sty {
|
||||
ty_enum(did, _) => {
|
||||
let variants = enum_variants(cx, did);
|
||||
if variants.len() == 0 {
|
||||
if variants.is_empty() {
|
||||
false
|
||||
} else {
|
||||
variants.iter().all(|v| v.args.len() == 0)
|
||||
variants.iter().all(|v| v.args.is_empty())
|
||||
}
|
||||
}
|
||||
_ => false
|
||||
|
@ -4654,7 +4654,7 @@ pub fn expr_kind(tcx: &ctxt, expr: &ast::Expr) -> ExprKind {
|
|||
match resolve_expr(tcx, expr) {
|
||||
def::DefVariant(tid, vid, _) => {
|
||||
let variant_info = enum_variant_with_id(tcx, tid, vid);
|
||||
if variant_info.args.len() > 0 {
|
||||
if !variant_info.args.is_empty() {
|
||||
// N-ary variant.
|
||||
RvalueDatumExpr
|
||||
} else {
|
||||
|
@ -5259,7 +5259,7 @@ impl<'tcx> VariantInfo<'tcx> {
|
|||
|
||||
match ast_variant.node.kind {
|
||||
ast::TupleVariantKind(ref args) => {
|
||||
let arg_tys = if args.len() > 0 {
|
||||
let arg_tys = if !args.is_empty() {
|
||||
// the regions in the argument types come from the
|
||||
// enum def'n, and hence will all be early bound
|
||||
ty::no_late_bound_regions(cx, &ty_fn_args(ctor_ty)).unwrap()
|
||||
|
@ -5280,7 +5280,7 @@ impl<'tcx> VariantInfo<'tcx> {
|
|||
ast::StructVariantKind(ref struct_def) => {
|
||||
let fields: &[StructField] = &struct_def.fields;
|
||||
|
||||
assert!(fields.len() > 0);
|
||||
assert!(!fields.is_empty());
|
||||
|
||||
let arg_tys = struct_def.fields.iter()
|
||||
.map(|field| node_id_to_type(cx, field.node.id)).collect();
|
||||
|
|
|
@ -544,7 +544,7 @@ pub fn super_relate_tys<'a,'tcx:'a,R>(relation: &mut R,
|
|||
.map(|(a, b)| relation.relate(a, b))
|
||||
.collect::<Result<_, _>>());
|
||||
Ok(ty::mk_tup(tcx, ts))
|
||||
} else if as_.len() != 0 && bs.len() != 0 {
|
||||
} else if !(as_.is_empty() || bs.is_empty()) {
|
||||
Err(ty::terr_tuple_size(
|
||||
expected_found(relation, &as_.len(), &bs.len())))
|
||||
} else {
|
||||
|
|
|
@ -902,7 +902,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
|
|||
};
|
||||
output_types.sort();
|
||||
output_types.dedup();
|
||||
if output_types.len() == 0 {
|
||||
if output_types.is_empty() {
|
||||
output_types.push(OutputTypeExe);
|
||||
}
|
||||
|
||||
|
|
|
@ -558,7 +558,7 @@ pub fn parameterized<'tcx,GG>(cx: &ctxt<'tcx>,
|
|||
&strs[0][..]
|
||||
},
|
||||
tail)
|
||||
} else if strs.len() > 0 {
|
||||
} else if !strs.is_empty() {
|
||||
format!("{}<{}>", base, strs.connect(", "))
|
||||
} else {
|
||||
format!("{}", base)
|
||||
|
@ -1269,7 +1269,7 @@ impl<'tcx, T> UserString<'tcx> for ty::Binder<T>
|
|||
let names: Vec<_> = names.iter().map(|s| &s[..]).collect();
|
||||
|
||||
let value_str = unbound_value.user_string(tcx);
|
||||
if names.len() == 0 {
|
||||
if names.is_empty() {
|
||||
value_str
|
||||
} else {
|
||||
format!("for<{}> {}", names.connect(","), value_str)
|
||||
|
|
|
@ -50,7 +50,7 @@ impl TempDir {
|
|||
let mut rng = thread_rng();
|
||||
for _ in 0..NUM_RETRIES {
|
||||
let suffix: String = rng.gen_ascii_chars().take(NUM_RAND_CHARS).collect();
|
||||
let leaf = if prefix.len() > 0 {
|
||||
let leaf = if !prefix.is_empty() {
|
||||
format!("{}.{}", prefix, suffix)
|
||||
} else {
|
||||
// If we're given an empty string for a prefix, then creating a
|
||||
|
|
|
@ -887,9 +887,9 @@ pub fn collect_crate_types(session: &Session,
|
|||
// command line, then reuse the empty `base` Vec to hold the types that
|
||||
// will be found in crate attributes.
|
||||
let mut base = session.opts.crate_types.clone();
|
||||
if base.len() == 0 {
|
||||
if base.is_empty() {
|
||||
base.extend(attr_types.into_iter());
|
||||
if base.len() == 0 {
|
||||
if base.is_empty() {
|
||||
base.push(link::default_output_for_target(session));
|
||||
}
|
||||
base.sort();
|
||||
|
|
|
@ -425,7 +425,7 @@ impl RustcDefaultCalls {
|
|||
odir: &Option<PathBuf>,
|
||||
ofile: &Option<PathBuf>)
|
||||
-> Compilation {
|
||||
if sess.opts.prints.len() == 0 {
|
||||
if sess.opts.prints.is_empty() {
|
||||
return Compilation::Continue;
|
||||
}
|
||||
|
||||
|
|
|
@ -676,7 +676,7 @@ fn print_flowgraph<W: Write>(variants: Vec<borrowck_dot::Variant>,
|
|||
};
|
||||
|
||||
match code {
|
||||
_ if variants.len() == 0 => {
|
||||
_ if variants.is_empty() => {
|
||||
let r = dot::render(&lcfg, &mut out);
|
||||
return expand_err_details(r);
|
||||
}
|
||||
|
|
|
@ -778,7 +778,7 @@ impl NonCamelCaseTypes {
|
|||
|
||||
// start with a non-lowercase letter rather than non-uppercase
|
||||
// ones (some scripts don't have a concept of upper/lowercase)
|
||||
ident.len() > 0 && !ident.char_at(0).is_lowercase() && !ident.contains('_')
|
||||
!ident.is_empty() && !ident.char_at(0).is_lowercase() && !ident.contains('_')
|
||||
}
|
||||
|
||||
fn to_camel_case(s: &str) -> String {
|
||||
|
@ -1900,7 +1900,7 @@ impl LintPass for UnconditionalRecursion {
|
|||
// doesn't return (e.g. calls a `-> !` function or `loop { /*
|
||||
// no break */ }`) shouldn't be linted unless it actually
|
||||
// recurs.
|
||||
if !reached_exit_without_self_call && self_call_spans.len() > 0 {
|
||||
if !reached_exit_without_self_call && !self_call_spans.is_empty() {
|
||||
cx.span_lint(UNCONDITIONAL_RECURSION, sp,
|
||||
"function cannot return without recurring");
|
||||
|
||||
|
|
|
@ -1055,7 +1055,7 @@ impl<'a, 'tcx> SanePrivacyVisitor<'a, 'tcx> {
|
|||
let check_inherited = |sp: Span, vis: ast::Visibility, note: &str| {
|
||||
if vis != ast::Inherited {
|
||||
tcx.sess.span_err(sp, "unnecessary visibility qualifier");
|
||||
if note.len() > 0 {
|
||||
if !note.is_empty() {
|
||||
tcx.sess.span_note(sp, note);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -767,7 +767,7 @@ impl<'a, 'b:'a, 'tcx:'b> GraphBuilder<'a, 'b, 'tcx> {
|
|||
f.name
|
||||
}).collect::<Vec<_>>();
|
||||
|
||||
if fields.len() == 0 {
|
||||
if fields.is_empty() {
|
||||
child_name_bindings.define_value(def, DUMMY_SP, modifiers);
|
||||
}
|
||||
|
||||
|
|
|
@ -2172,7 +2172,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
// check that all of the arms in an or-pattern have exactly the
|
||||
// same set of bindings, with the same binding modes for each.
|
||||
fn check_consistent_bindings(&mut self, arm: &Arm) {
|
||||
if arm.pats.len() == 0 {
|
||||
if arm.pats.is_empty() {
|
||||
return
|
||||
}
|
||||
let map_0 = self.binding_mode_map(&*arm.pats[0]);
|
||||
|
@ -3072,7 +3072,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
if values.len() > 0 &&
|
||||
if !values.is_empty() &&
|
||||
values[smallest] != usize::MAX &&
|
||||
values[smallest] < name.len() + 2 &&
|
||||
values[smallest] <= max_distance &&
|
||||
|
@ -3228,7 +3228,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
format!("to call `{}::{}`", path_str, path_name)
|
||||
};
|
||||
|
||||
if msg.len() > 0 {
|
||||
if !msg.is_empty() {
|
||||
msg = format!(". Did you mean {}?", msg)
|
||||
}
|
||||
|
||||
|
@ -3522,7 +3522,7 @@ fn module_to_string(module: &Module) -> String {
|
|||
}
|
||||
collect_mod(&mut names, module);
|
||||
|
||||
if names.len() == 0 {
|
||||
if names.is_empty() {
|
||||
return "???".to_string();
|
||||
}
|
||||
names_to_string(&names.into_iter().rev().collect::<Vec<ast::Name>>())
|
||||
|
|
|
@ -304,7 +304,7 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> {
|
|||
module_to_string(&*module_));
|
||||
|
||||
// First, resolve the module path for the directive, if necessary.
|
||||
let container = if module_path.len() == 0 {
|
||||
let container = if module_path.is_empty() {
|
||||
// Use the crate root.
|
||||
Some((self.resolver.graph_root.get_module(), LastMod(AllPublic)))
|
||||
} else {
|
||||
|
|
|
@ -269,7 +269,7 @@ pub fn sanitize(s: &str) -> String {
|
|||
}
|
||||
|
||||
// Underscore-qualify anything that didn't start as an ident.
|
||||
if result.len() > 0 &&
|
||||
if !result.is_empty() &&
|
||||
result.as_bytes()[0] != '_' as u8 &&
|
||||
! (result.as_bytes()[0] as char).is_xid_start() {
|
||||
return format!("_{}", &result[..]);
|
||||
|
@ -463,7 +463,7 @@ pub fn filename_for_input(sess: &Session,
|
|||
}
|
||||
config::CrateTypeExecutable => {
|
||||
let suffix = &sess.target.target.options.exe_suffix;
|
||||
if suffix.len() == 0 {
|
||||
if suffix.is_empty() {
|
||||
out_filename.to_path_buf()
|
||||
} else {
|
||||
out_filename.with_extension(&suffix[1..])
|
||||
|
|
|
@ -263,7 +263,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
|||
|
||||
fn process_formals(&mut self, formals: &Vec<ast::Arg>, qualname: &str) {
|
||||
for arg in formals {
|
||||
assert!(self.collected_paths.len() == 0 && !self.collecting);
|
||||
assert!(self.collected_paths.is_empty() && !self.collecting);
|
||||
self.collecting = true;
|
||||
self.visit_pat(&*arg.pat);
|
||||
self.collecting = false;
|
||||
|
@ -1119,7 +1119,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
|
|||
let glob_map = glob_map.as_ref().unwrap();
|
||||
if glob_map.contains_key(&item.id) {
|
||||
for n in glob_map.get(&item.id).unwrap() {
|
||||
if name_string.len() > 0 {
|
||||
if !name_string.is_empty() {
|
||||
name_string.push_str(", ");
|
||||
}
|
||||
name_string.push_str(n.as_str());
|
||||
|
@ -1394,7 +1394,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
|
|||
}
|
||||
|
||||
fn visit_arm(&mut self, arm: &ast::Arm) {
|
||||
assert!(self.collected_paths.len() == 0 && !self.collecting);
|
||||
assert!(self.collected_paths.is_empty() && !self.collecting);
|
||||
self.collecting = true;
|
||||
for pattern in &arm.pats {
|
||||
// collect paths from the arm's patterns
|
||||
|
@ -1462,7 +1462,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
|
|||
|
||||
// The local could declare multiple new vars, we must walk the
|
||||
// pattern and collect them all.
|
||||
assert!(self.collected_paths.len() == 0 && !self.collecting);
|
||||
assert!(self.collected_paths.is_empty() && !self.collecting);
|
||||
self.collecting = true;
|
||||
self.visit_pat(&*l.pat);
|
||||
self.collecting = false;
|
||||
|
|
|
@ -988,7 +988,7 @@ fn compile_submatch<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
|||
let _indenter = indenter();
|
||||
let _icx = push_ctxt("match::compile_submatch");
|
||||
let mut bcx = bcx;
|
||||
if m.len() == 0 {
|
||||
if m.is_empty() {
|
||||
if chk.is_fallible() {
|
||||
chk.handle_fail(bcx);
|
||||
}
|
||||
|
@ -1112,7 +1112,7 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
|
|||
let mut kind = NoBranch;
|
||||
let mut test_val = val;
|
||||
debug!("test_val={}", bcx.val_to_string(test_val));
|
||||
if opts.len() > 0 {
|
||||
if !opts.is_empty() {
|
||||
match opts[0] {
|
||||
ConstantValue(..) | ConstantRange(..) => {
|
||||
test_val = load_if_immediate(bcx, val, left_ty);
|
||||
|
@ -1152,7 +1152,7 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
|
|||
};
|
||||
|
||||
let defaults = enter_default(else_cx, dm, m, col, val);
|
||||
let exhaustive = chk.is_infallible() && defaults.len() == 0;
|
||||
let exhaustive = chk.is_infallible() && defaults.is_empty();
|
||||
let len = opts.len();
|
||||
|
||||
// Compile subtrees for each option
|
||||
|
|
|
@ -235,7 +235,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
|||
|
||||
let dtor = ty::ty_dtor(cx.tcx(), def_id).has_drop_flag();
|
||||
|
||||
if cases.len() == 0 {
|
||||
if cases.is_empty() {
|
||||
// Uninhabitable; represent as unit
|
||||
// (Typechecking will reject discriminant-sizing attrs.)
|
||||
assert_eq!(hint, attr::ReprAny);
|
||||
|
@ -244,7 +244,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
|||
dtor_to_init_u8(dtor));
|
||||
}
|
||||
|
||||
if !dtor && cases.iter().all(|c| c.tys.len() == 0) {
|
||||
if !dtor && cases.iter().all(|c| c.tys.is_empty()) {
|
||||
// All bodies empty -> intlike
|
||||
let discrs: Vec<u64> = cases.iter().map(|c| c.discr).collect();
|
||||
let bounds = IntBounds {
|
||||
|
|
|
@ -2392,7 +2392,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef {
|
|||
ccx.sess().bug("struct variant kind unexpected in get_item_val")
|
||||
}
|
||||
};
|
||||
assert!(args.len() != 0);
|
||||
assert!(!args.is_empty());
|
||||
let ty = ty::node_id_to_type(ccx.tcx(), id);
|
||||
let parent = ccx.tcx().map.get_parent(id);
|
||||
let enm = ccx.tcx().map.expect_item(parent);
|
||||
|
|
|
@ -70,7 +70,7 @@ trait ClassList {
|
|||
|
||||
impl ClassList for [RegClass] {
|
||||
fn is_pass_byval(&self) -> bool {
|
||||
if self.len() == 0 { return false; }
|
||||
if self.is_empty() { return false; }
|
||||
|
||||
let class = self[0];
|
||||
class == Memory
|
||||
|
@ -79,7 +79,7 @@ impl ClassList for [RegClass] {
|
|||
}
|
||||
|
||||
fn is_ret_bysret(&self) -> bool {
|
||||
if self.len() == 0 { return false; }
|
||||
if self.is_empty() { return false; }
|
||||
|
||||
self[0] == Memory
|
||||
}
|
||||
|
|
|
@ -184,7 +184,7 @@ fn trans<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr: &ast::Expr)
|
|||
bcx.fcx.param_substs);
|
||||
|
||||
// Nullary variants are not callable
|
||||
assert!(vinfo.args.len() > 0);
|
||||
assert!(!vinfo.args.is_empty());
|
||||
|
||||
Callee {
|
||||
bcx: bcx,
|
||||
|
@ -495,7 +495,7 @@ pub fn trans_fn_ref_with_substs<'a, 'tcx>(
|
|||
|
||||
match map_node {
|
||||
ast_map::NodeVariant(v) => match v.node.kind {
|
||||
ast::TupleVariantKind(ref args) => args.len() > 0,
|
||||
ast::TupleVariantKind(ref args) => !args.is_empty(),
|
||||
_ => false
|
||||
},
|
||||
ast_map::NodeStructCtor(_) => true,
|
||||
|
|
|
@ -781,7 +781,7 @@ fn const_expr_unadjusted<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
|||
let vinfo = ty::enum_variant_with_id(cx.tcx(),
|
||||
enum_did,
|
||||
variant_did);
|
||||
if vinfo.args.len() > 0 {
|
||||
if !vinfo.args.is_empty() {
|
||||
// N-ary variant.
|
||||
expr::trans_def_fn_unadjusted(cx, e, def, param_substs).val
|
||||
} else {
|
||||
|
|
|
@ -532,7 +532,7 @@ impl<'tcx> TypeMap<'tcx> {
|
|||
// Maybe check that there is no self type here.
|
||||
|
||||
let tps = substs.types.get_slice(subst::TypeSpace);
|
||||
if tps.len() > 0 {
|
||||
if !tps.is_empty() {
|
||||
output.push('<');
|
||||
|
||||
for &type_parameter in tps {
|
||||
|
@ -1102,7 +1102,7 @@ pub fn get_cleanup_debug_loc_for_ast_node<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
|||
if let Ok(code_snippet) = code_snippet {
|
||||
let bytes = code_snippet.as_bytes();
|
||||
|
||||
if bytes.len() > 0 && &bytes[bytes.len()-1..] == b"}" {
|
||||
if !bytes.is_empty() && &bytes[bytes.len()-1..] == b"}" {
|
||||
cleanup_span = Span {
|
||||
lo: node_span.hi - codemap::BytePos(1),
|
||||
hi: node_span.hi,
|
||||
|
@ -2017,7 +2017,7 @@ struct StructMemberDescriptionFactory<'tcx> {
|
|||
impl<'tcx> StructMemberDescriptionFactory<'tcx> {
|
||||
fn create_member_descriptions<'a>(&self, cx: &CrateContext<'a, 'tcx>)
|
||||
-> Vec<MemberDescription> {
|
||||
if self.fields.len() == 0 {
|
||||
if self.fields.is_empty() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
|
@ -2210,7 +2210,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> {
|
|||
adt::Univariant(ref struct_def, _) => {
|
||||
assert!(self.variants.len() <= 1);
|
||||
|
||||
if self.variants.len() == 0 {
|
||||
if self.variants.is_empty() {
|
||||
vec![]
|
||||
} else {
|
||||
let (variant_type_metadata,
|
||||
|
@ -3834,7 +3834,7 @@ fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
|||
output.push_str("fn(");
|
||||
|
||||
let sig = ty::erase_late_bound_regions(cx.tcx(), sig);
|
||||
if sig.inputs.len() > 0 {
|
||||
if !sig.inputs.is_empty() {
|
||||
for ¶meter_type in &sig.inputs {
|
||||
push_debuginfo_type_name(cx, parameter_type, true, output);
|
||||
output.push_str(", ");
|
||||
|
@ -3844,7 +3844,7 @@ fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
|||
}
|
||||
|
||||
if sig.variadic {
|
||||
if sig.inputs.len() > 0 {
|
||||
if !sig.inputs.is_empty() {
|
||||
output.push_str(", ...");
|
||||
} else {
|
||||
output.push_str("...");
|
||||
|
|
|
@ -1135,7 +1135,7 @@ fn trans_def_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
|||
match def {
|
||||
def::DefVariant(tid, vid, _) => {
|
||||
let variant_info = ty::enum_variant_with_id(bcx.tcx(), tid, vid);
|
||||
if variant_info.args.len() > 0 {
|
||||
if !variant_info.args.is_empty() {
|
||||
// N-ary variant.
|
||||
let llfn = callee::trans_fn_ref(bcx.ccx(), vid,
|
||||
ExprId(ref_expr.id),
|
||||
|
|
|
@ -83,7 +83,7 @@ pub fn trans_impl(ccx: &CrateContext,
|
|||
for impl_item in impl_items {
|
||||
match impl_item.node {
|
||||
ast::MethodImplItem(ref sig, ref body) => {
|
||||
if sig.generics.ty_params.len() == 0 {
|
||||
if sig.generics.ty_params.is_empty() {
|
||||
let trans_everywhere = attr::requests_inline(&impl_item.attrs);
|
||||
for (ref ccx, is_origin) in ccx.maybe_iter(trans_everywhere) {
|
||||
let llfn = get_item_val(ccx, impl_item.id);
|
||||
|
|
|
@ -69,7 +69,7 @@ pub fn untuple_arguments_if_necessary<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
|||
return inputs.iter().cloned().collect()
|
||||
}
|
||||
|
||||
if inputs.len() == 0 {
|
||||
if inputs.is_empty() {
|
||||
return Vec::new()
|
||||
}
|
||||
|
||||
|
|
|
@ -1125,7 +1125,7 @@ fn one_bound_for_assoc_type<'tcx>(tcx: &ty::ctxt<'tcx>,
|
|||
span: Span)
|
||||
-> Result<ty::PolyTraitRef<'tcx>, ErrorReported>
|
||||
{
|
||||
if bounds.len() == 0 {
|
||||
if bounds.is_empty() {
|
||||
span_err!(tcx.sess, span, E0220,
|
||||
"associated type `{}` not found for `{}`",
|
||||
assoc_name,
|
||||
|
@ -2025,7 +2025,7 @@ fn compute_object_lifetime_bound<'tcx>(
|
|||
"only a single explicit lifetime bound is permitted");
|
||||
}
|
||||
|
||||
if explicit_region_bounds.len() != 0 {
|
||||
if !explicit_region_bounds.is_empty() {
|
||||
// Explicitly specified region bound. Use that.
|
||||
let r = explicit_region_bounds[0];
|
||||
return ast_region_to_region(tcx, r);
|
||||
|
@ -2042,7 +2042,7 @@ fn compute_object_lifetime_bound<'tcx>(
|
|||
|
||||
// If there are no derived region bounds, then report back that we
|
||||
// can find no region bound.
|
||||
if derived_region_bounds.len() == 0 {
|
||||
if derived_region_bounds.is_empty() {
|
||||
match rscope.object_lifetime_default(span) {
|
||||
Some(r) => { return r; }
|
||||
None => {
|
||||
|
@ -2130,11 +2130,11 @@ pub fn partition_bounds<'a>(tcx: &ty::ctxt,
|
|||
&mut builtin_bounds) {
|
||||
let segments = &b.trait_ref.path.segments;
|
||||
let parameters = &segments[segments.len() - 1].parameters;
|
||||
if parameters.types().len() > 0 {
|
||||
if !parameters.types().is_empty() {
|
||||
check_type_argument_count(tcx, b.trait_ref.path.span,
|
||||
parameters.types().len(), 0, 0);
|
||||
}
|
||||
if parameters.lifetimes().len() > 0 {
|
||||
if !parameters.lifetimes().is_empty() {
|
||||
report_lifetime_number_error(tcx, b.trait_ref.path.span,
|
||||
parameters.lifetimes().len(), 0);
|
||||
}
|
||||
|
|
|
@ -594,7 +594,7 @@ pub fn check_pat_enum<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>,
|
|||
for (subpat, arg_ty) in subpats.iter().zip(arg_tys.iter()) {
|
||||
check_pat(pcx, &**subpat, *arg_ty);
|
||||
}
|
||||
} else if arg_tys.len() == 0 {
|
||||
} else if arg_tys.is_empty() {
|
||||
span_err!(tcx.sess, pat.span, E0024,
|
||||
"this pattern has {} field{}, but the corresponding {} has no fields",
|
||||
subpats.len(), if subpats.len() == 1 {""} else {"s"}, kind_name);
|
||||
|
|
|
@ -67,7 +67,7 @@ pub fn report_error<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
|
|||
}
|
||||
}
|
||||
|
||||
if static_sources.len() > 0 {
|
||||
if !static_sources.is_empty() {
|
||||
fcx.tcx().sess.fileline_note(
|
||||
span,
|
||||
"found defined static methods, maybe a `self` is missing?");
|
||||
|
@ -200,7 +200,7 @@ fn suggest_traits_to_import<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
|
|||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if candidates.len() > 0 {
|
||||
if !candidates.is_empty() {
|
||||
// sort from most relevant to least relevant
|
||||
candidates.sort_by(|a, b| a.cmp(b).reverse());
|
||||
candidates.dedup();
|
||||
|
|
|
@ -4258,7 +4258,7 @@ pub fn instantiate_path<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
|
|||
//
|
||||
// The first step then is to categorize the segments appropriately.
|
||||
|
||||
assert!(segments.len() >= 1);
|
||||
assert!(!segments.is_empty());
|
||||
|
||||
let mut ufcs_method = None;
|
||||
let mut segment_spaces: Vec<_>;
|
||||
|
@ -4480,7 +4480,7 @@ pub fn instantiate_path<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
|
|||
}
|
||||
}
|
||||
|
||||
if data.bindings.len() > 0 {
|
||||
if !data.bindings.is_empty() {
|
||||
span_err!(fcx.tcx().sess, data.bindings[0].span, E0182,
|
||||
"unexpected binding of associated item in expression path \
|
||||
(only allowed in type paths)");
|
||||
|
@ -4719,7 +4719,7 @@ pub fn check_bounds_are_used<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
|
|||
tps.len(), ppaux::ty_to_string(ccx.tcx, ty));
|
||||
|
||||
// make a vector of booleans initially false, set to true when used
|
||||
if tps.len() == 0 { return; }
|
||||
if tps.is_empty() { return; }
|
||||
let mut tps_used: Vec<_> = repeat(false).take(tps.len()).collect();
|
||||
|
||||
ty::walk_ty(ty, |t| {
|
||||
|
|
|
@ -174,7 +174,7 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> {
|
|||
}
|
||||
|
||||
// For DST, all intermediate types must be sized.
|
||||
if variant.fields.len() > 0 {
|
||||
if !variant.fields.is_empty() {
|
||||
for field in variant.fields.init() {
|
||||
fcx.register_builtin_bound(
|
||||
field.ty,
|
||||
|
@ -658,7 +658,7 @@ fn enum_variants<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
|
|||
enum_def.variants.iter()
|
||||
.map(|variant| {
|
||||
match variant.node.kind {
|
||||
ast::TupleVariantKind(ref args) if args.len() > 0 => {
|
||||
ast::TupleVariantKind(ref args) if !args.is_empty() => {
|
||||
let ctor_ty = ty::node_id_to_type(fcx.tcx(), variant.node.id);
|
||||
|
||||
// the regions in the argument types come from the
|
||||
|
|
|
@ -387,7 +387,7 @@ impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> {
|
|||
|
||||
for &impl_did in &*trait_impls.borrow() {
|
||||
let items = impl_items.get(&impl_did).unwrap();
|
||||
if items.len() < 1 {
|
||||
if items.is_empty() {
|
||||
// We'll error out later. For now, just don't ICE.
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -576,7 +576,7 @@ fn get_enum_variant_types<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
|
|||
// Nullary enum constructors get turned into constants; n-ary enum
|
||||
// constructors get turned into functions.
|
||||
let result_ty = match variant.node.kind {
|
||||
ast::TupleVariantKind(ref args) if args.len() > 0 => {
|
||||
ast::TupleVariantKind(ref args) if !args.is_empty() => {
|
||||
let rs = ExplicitRscope;
|
||||
let input_tys: Vec<_> = args.iter().map(|va| icx.to_ty(&rs, &*va.ty)).collect();
|
||||
ty::mk_ctor_fn(tcx, variant_def_id, &input_tys, enum_scheme.ty)
|
||||
|
@ -1035,7 +1035,7 @@ fn convert_struct<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
|
|||
match struct_def.ctor_id {
|
||||
None => {}
|
||||
Some(ctor_id) => {
|
||||
if struct_def.fields.len() == 0 {
|
||||
if struct_def.fields.is_empty() {
|
||||
// Enum-like.
|
||||
write_ty_to_tcx(tcx, ctor_id, selfty);
|
||||
|
||||
|
@ -1893,7 +1893,7 @@ fn compute_object_lifetime_default<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
|
|||
.flat_map(|predicate| {
|
||||
match *predicate {
|
||||
ast::WherePredicate::BoundPredicate(ref data) => {
|
||||
if data.bound_lifetimes.len() == 0 &&
|
||||
if data.bound_lifetimes.is_empty() &&
|
||||
is_param(ccx.tcx, &data.bounded_ty, param_id)
|
||||
{
|
||||
from_bounds(ccx, &data.bounds).into_iter()
|
||||
|
|
|
@ -712,7 +712,7 @@ impl<'tcx> Clean<Option<Vec<TyParamBound>>> for subst::Substs<'tcx> {
|
|||
trait_: t.clean(cx),
|
||||
lifetimes: vec![]
|
||||
}, ast::TraitBoundModifier::None)));
|
||||
if v.len() > 0 {Some(v)} else {None}
|
||||
if !v.is_empty() {Some(v)} else {None}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1820,7 +1820,7 @@ impl<'tcx> Clean<Item> for ty::VariantInfo<'tcx> {
|
|||
fn clean(&self, cx: &DocContext) -> Item {
|
||||
// use syntax::parse::token::special_idents::unnamed_field;
|
||||
let kind = match self.arg_names.as_ref().map(|s| &**s) {
|
||||
None | Some([]) if self.args.len() == 0 => CLikeVariant,
|
||||
None | Some([]) if self.args.is_empty() => CLikeVariant,
|
||||
None | Some([]) => {
|
||||
TupleVariant(self.args.clean(cx))
|
||||
}
|
||||
|
@ -1874,7 +1874,7 @@ impl Clean<VariantKind> for ast::VariantKind {
|
|||
fn clean(&self, cx: &DocContext) -> VariantKind {
|
||||
match self {
|
||||
&ast::TupleVariantKind(ref args) => {
|
||||
if args.len() == 0 {
|
||||
if args.is_empty() {
|
||||
CLikeVariant
|
||||
} else {
|
||||
TupleVariant(args.iter().map(|x| x.ty.clean(cx)).collect())
|
||||
|
|
|
@ -94,7 +94,7 @@ impl<'a> fmt::Display for TyParamBounds<'a> {
|
|||
|
||||
impl fmt::Display for clean::Generics {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
if self.lifetimes.len() == 0 && self.type_params.len() == 0 { return Ok(()) }
|
||||
if self.lifetimes.is_empty() && self.type_params.is_empty() { return Ok(()) }
|
||||
try!(f.write_str("<"));
|
||||
|
||||
for (i, life) in self.lifetimes.iter().enumerate() {
|
||||
|
@ -104,8 +104,8 @@ impl fmt::Display for clean::Generics {
|
|||
try!(write!(f, "{}", *life));
|
||||
}
|
||||
|
||||
if self.type_params.len() > 0 {
|
||||
if self.lifetimes.len() > 0 {
|
||||
if !self.type_params.is_empty() {
|
||||
if !self.lifetimes.is_empty() {
|
||||
try!(f.write_str(", "));
|
||||
}
|
||||
for (i, tp) in self.type_params.iter().enumerate() {
|
||||
|
@ -114,7 +114,7 @@ impl fmt::Display for clean::Generics {
|
|||
}
|
||||
try!(f.write_str(&tp.name));
|
||||
|
||||
if tp.bounds.len() > 0 {
|
||||
if !tp.bounds.is_empty() {
|
||||
try!(write!(f, ": {}", TyParamBounds(&tp.bounds)));
|
||||
}
|
||||
|
||||
|
@ -132,7 +132,7 @@ impl fmt::Display for clean::Generics {
|
|||
impl<'a> fmt::Display for WhereClause<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let &WhereClause(gens) = self;
|
||||
if gens.where_predicates.len() == 0 {
|
||||
if gens.where_predicates.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
try!(f.write_str(" <span class='where'>where "));
|
||||
|
@ -175,7 +175,7 @@ impl fmt::Display for clean::Lifetime {
|
|||
|
||||
impl fmt::Display for clean::PolyTrait {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
if self.lifetimes.len() > 0 {
|
||||
if !self.lifetimes.is_empty() {
|
||||
try!(f.write_str("for<"));
|
||||
for (i, lt) in self.lifetimes.iter().enumerate() {
|
||||
if i > 0 {
|
||||
|
@ -212,7 +212,7 @@ impl fmt::Display for clean::PathParameters {
|
|||
clean::PathParameters::AngleBracketed {
|
||||
ref lifetimes, ref types, ref bindings
|
||||
} => {
|
||||
if lifetimes.len() > 0 || types.len() > 0 || bindings.len() > 0 {
|
||||
if !lifetimes.is_empty() || !types.is_empty() || !bindings.is_empty() {
|
||||
try!(f.write_str("<"));
|
||||
let mut comma = false;
|
||||
for lifetime in lifetimes {
|
||||
|
@ -541,7 +541,7 @@ impl fmt::Display for clean::Arguments {
|
|||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
for (i, input) in self.values.iter().enumerate() {
|
||||
if i > 0 { try!(write!(f, ", ")); }
|
||||
if input.name.len() > 0 {
|
||||
if !input.name.is_empty() {
|
||||
try!(write!(f, "{}: ", input.name));
|
||||
}
|
||||
try!(write!(f, "{}", input.type_));
|
||||
|
@ -585,8 +585,8 @@ impl<'a> fmt::Display for Method<'a> {
|
|||
}
|
||||
}
|
||||
for (i, input) in d.inputs.values.iter().enumerate() {
|
||||
if i > 0 || args.len() > 0 { args.push_str(", "); }
|
||||
if input.name.len() > 0 {
|
||||
if i > 0 || !args.is_empty() { args.push_str(", "); }
|
||||
if !input.name.is_empty() {
|
||||
args.push_str(&format!("{}: ", input.name));
|
||||
}
|
||||
args.push_str(&format!("{}", input.type_));
|
||||
|
@ -734,7 +734,7 @@ impl<'a> fmt::Display for ConciseStability<'a> {
|
|||
};
|
||||
write!(f, "<a class='stability {lvl}' title='{lvl}{colon}{reason}'></a>",
|
||||
lvl = Escape(&*lvl),
|
||||
colon = if stability.reason.len() > 0 { ": " } else { "" },
|
||||
colon = if !stability.reason.is_empty() { ": " } else { "" },
|
||||
reason = Escape(&*stability.reason))
|
||||
}
|
||||
None => {
|
||||
|
|
|
@ -130,7 +130,7 @@ r##"<!DOCTYPE html>
|
|||
content = *t,
|
||||
root_path = page.root_path,
|
||||
ty = page.ty,
|
||||
logo = if layout.logo.len() == 0 {
|
||||
logo = if layout.logo.is_empty() {
|
||||
"".to_string()
|
||||
} else {
|
||||
format!("<a href='{}{}/index.html'>\
|
||||
|
@ -141,7 +141,7 @@ r##"<!DOCTYPE html>
|
|||
title = page.title,
|
||||
description = page.description,
|
||||
keywords = page.keywords,
|
||||
favicon = if layout.favicon.len() == 0 {
|
||||
favicon = if layout.favicon.is_empty() {
|
||||
"".to_string()
|
||||
} else {
|
||||
format!(r#"<link rel="shortcut icon" href="{}">"#, layout.favicon)
|
||||
|
@ -152,7 +152,7 @@ r##"<!DOCTYPE html>
|
|||
sidebar = *sidebar,
|
||||
krate = layout.krate,
|
||||
play_url = layout.playground_url,
|
||||
play_js = if layout.playground_url.len() == 0 {
|
||||
play_js = if layout.playground_url.is_empty() {
|
||||
"".to_string()
|
||||
} else {
|
||||
format!(r#"<script src="{}playpen.js"></script>"#, page.root_path)
|
||||
|
|
|
@ -305,7 +305,7 @@ pub fn render(w: &mut fmt::Formatter, s: &str, print_toc: bool) -> fmt::Result {
|
|||
let text = format!(r##"<h{lvl} id="{id}" class='section-header'><a
|
||||
href="#{id}">{sec}{}</a></h{lvl}>"##,
|
||||
s, lvl = level, id = id,
|
||||
sec = if sec.len() == 0 {
|
||||
sec = if sec.is_empty() {
|
||||
sec.to_string()
|
||||
} else {
|
||||
format!("{} ", sec)
|
||||
|
@ -491,7 +491,7 @@ impl<'a> fmt::Display for Markdown<'a> {
|
|||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
let Markdown(md) = *self;
|
||||
// This is actually common enough to special-case
|
||||
if md.len() == 0 { return Ok(()) }
|
||||
if md.is_empty() { return Ok(()) }
|
||||
render(fmt, md, false)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -912,7 +912,7 @@ impl DocFolder for Cache {
|
|||
false)
|
||||
}
|
||||
clean::MethodItem(..) => {
|
||||
if self.parent_stack.len() == 0 {
|
||||
if self.parent_stack.is_empty() {
|
||||
((None, None), false)
|
||||
} else {
|
||||
let last = self.parent_stack.last().unwrap();
|
||||
|
@ -973,7 +973,7 @@ impl DocFolder for Cache {
|
|||
// Keep track of the fully qualified path for this item.
|
||||
let pushed = if item.name.is_some() {
|
||||
let n = item.name.as_ref().unwrap();
|
||||
if n.len() > 0 {
|
||||
if !n.is_empty() {
|
||||
self.stack.push(n.to_string());
|
||||
true
|
||||
} else { false }
|
||||
|
@ -1115,7 +1115,7 @@ impl Context {
|
|||
fn recurse<T, F>(&mut self, s: String, f: F) -> T where
|
||||
F: FnOnce(&mut Context) -> T,
|
||||
{
|
||||
if s.len() == 0 {
|
||||
if s.is_empty() {
|
||||
panic!("Unexpected empty destination: {:?}", self.current);
|
||||
}
|
||||
let prev = self.dst.clone();
|
||||
|
@ -1208,7 +1208,7 @@ impl Context {
|
|||
|
||||
let mut title = cx.current.connect("::");
|
||||
if pushname {
|
||||
if title.len() > 0 {
|
||||
if !title.is_empty() {
|
||||
title.push_str("::");
|
||||
}
|
||||
title.push_str(it.name.as_ref().unwrap());
|
||||
|
@ -1343,7 +1343,7 @@ impl Context {
|
|||
fn ignore_private_item(&self, it: &clean::Item) -> bool {
|
||||
match it.inner {
|
||||
clean::ModuleItem(ref m) => {
|
||||
(m.items.len() == 0 &&
|
||||
(m.items.is_empty() &&
|
||||
it.doc_value().is_none() &&
|
||||
it.visibility != Some(ast::Public)) ||
|
||||
(self.passes.contains("strip-private") && it.visibility != Some(ast::Public))
|
||||
|
@ -1690,7 +1690,7 @@ struct Initializer<'a>(&'a str);
|
|||
impl<'a> fmt::Display for Initializer<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let Initializer(s) = *self;
|
||||
if s.len() == 0 { return Ok(()); }
|
||||
if s.is_empty() { return Ok(()); }
|
||||
try!(write!(f, "<code> = </code>"));
|
||||
write!(f, "<code>{}</code>", s)
|
||||
}
|
||||
|
@ -1736,8 +1736,8 @@ fn item_function(w: &mut fmt::Formatter, it: &clean::Item,
|
|||
fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
|
||||
t: &clean::Trait) -> fmt::Result {
|
||||
let mut bounds = String::new();
|
||||
if t.bounds.len() > 0 {
|
||||
if bounds.len() > 0 {
|
||||
if !t.bounds.is_empty() {
|
||||
if !bounds.is_empty() {
|
||||
bounds.push(' ');
|
||||
}
|
||||
bounds.push_str(": ");
|
||||
|
@ -1766,7 +1766,7 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
|
|||
match m.inner { clean::MethodItem(_) => true, _ => false }
|
||||
}).collect::<Vec<_>>();
|
||||
|
||||
if t.items.len() == 0 {
|
||||
if t.items.is_empty() {
|
||||
try!(write!(w, "{{ }}"));
|
||||
} else {
|
||||
try!(write!(w, "{{\n"));
|
||||
|
@ -1775,7 +1775,7 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
|
|||
try!(render_method(w, t, MethodLink::Anchor));
|
||||
try!(write!(w, ";\n"));
|
||||
}
|
||||
if types.len() > 0 && required.len() > 0 {
|
||||
if !types.is_empty() && !required.is_empty() {
|
||||
try!(w.write_str("\n"));
|
||||
}
|
||||
for m in &required {
|
||||
|
@ -1783,7 +1783,7 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
|
|||
try!(render_method(w, m, MethodLink::Anchor));
|
||||
try!(write!(w, ";\n"));
|
||||
}
|
||||
if required.len() > 0 && provided.len() > 0 {
|
||||
if !required.is_empty() && !provided.is_empty() {
|
||||
try!(w.write_str("\n"));
|
||||
}
|
||||
for m in &provided {
|
||||
|
@ -1810,7 +1810,7 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
|
|||
Ok(())
|
||||
}
|
||||
|
||||
if types.len() > 0 {
|
||||
if !types.is_empty() {
|
||||
try!(write!(w, "
|
||||
<h2 id='associated-types'>Associated Types</h2>
|
||||
<div class='methods'>
|
||||
|
@ -1822,7 +1822,7 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
|
|||
}
|
||||
|
||||
// Output the documentation for each function individually
|
||||
if required.len() > 0 {
|
||||
if !required.is_empty() {
|
||||
try!(write!(w, "
|
||||
<h2 id='required-methods'>Required Methods</h2>
|
||||
<div class='methods'>
|
||||
|
@ -1832,7 +1832,7 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
|
|||
}
|
||||
try!(write!(w, "</div>"));
|
||||
}
|
||||
if provided.len() > 0 {
|
||||
if !provided.is_empty() {
|
||||
try!(write!(w, "
|
||||
<h2 id='provided-methods'>Provided Methods</h2>
|
||||
<div class='methods'>
|
||||
|
@ -1882,7 +1882,7 @@ fn assoc_type(w: &mut fmt::Formatter, it: &clean::Item,
|
|||
default: &Option<clean::Type>)
|
||||
-> fmt::Result {
|
||||
try!(write!(w, "type {}", it.name.as_ref().unwrap()));
|
||||
if bounds.len() > 0 {
|
||||
if !bounds.is_empty() {
|
||||
try!(write!(w, ": {}", TyParamBounds(bounds)))
|
||||
}
|
||||
if let Some(ref default) = *default {
|
||||
|
@ -1986,7 +1986,7 @@ fn item_enum(w: &mut fmt::Formatter, it: &clean::Item,
|
|||
it.name.as_ref().unwrap(),
|
||||
e.generics,
|
||||
WhereClause(&e.generics)));
|
||||
if e.variants.len() == 0 && !e.variants_stripped {
|
||||
if e.variants.is_empty() && !e.variants_stripped {
|
||||
try!(write!(w, " {{}}"));
|
||||
} else {
|
||||
try!(write!(w, " {{\n"));
|
||||
|
@ -2031,7 +2031,7 @@ fn item_enum(w: &mut fmt::Formatter, it: &clean::Item,
|
|||
try!(write!(w, "</pre>"));
|
||||
|
||||
try!(document(w, it));
|
||||
if e.variants.len() > 0 {
|
||||
if !e.variants.is_empty() {
|
||||
try!(write!(w, "<h2 class='variants'>Variants</h2>\n<table>"));
|
||||
for variant in &e.variants {
|
||||
try!(write!(w, "<tr><td id='variant.{name}'>{stab}<code>{name}</code></td><td>",
|
||||
|
@ -2170,13 +2170,13 @@ fn render_methods(w: &mut fmt::Formatter, it: &clean::Item) -> fmt::Result {
|
|||
};
|
||||
let (non_trait, traits): (Vec<_>, _) = v.into_iter()
|
||||
.partition(|i| i.impl_.trait_.is_none());
|
||||
if non_trait.len() > 0 {
|
||||
if !non_trait.is_empty() {
|
||||
try!(write!(w, "<h2 id='methods'>Methods</h2>"));
|
||||
for i in &non_trait {
|
||||
try!(render_impl(w, i, MethodLink::Anchor));
|
||||
}
|
||||
}
|
||||
if traits.len() > 0 {
|
||||
if !traits.is_empty() {
|
||||
try!(write!(w, "<h2 id='implementations'>Trait \
|
||||
Implementations</h2>"));
|
||||
let (derived, manual): (Vec<_>, _) = traits.into_iter()
|
||||
|
@ -2185,7 +2185,7 @@ fn render_methods(w: &mut fmt::Formatter, it: &clean::Item) -> fmt::Result {
|
|||
let did = i.trait_did().unwrap();
|
||||
try!(render_impl(w, i, MethodLink::GotoSource(did)));
|
||||
}
|
||||
if derived.len() > 0 {
|
||||
if !derived.is_empty() {
|
||||
try!(write!(w, "<h3 id='derived_implementations'>\
|
||||
Derived Implementations \
|
||||
</h3>"));
|
||||
|
|
|
@ -219,7 +219,7 @@ pub fn main_args(args: &[String]) -> isize {
|
|||
return 0;
|
||||
}
|
||||
|
||||
if matches.free.len() == 0 {
|
||||
if matches.free.is_empty() {
|
||||
println!("expected an input file to act on");
|
||||
return 1;
|
||||
} if matches.free.len() > 1 {
|
||||
|
|
|
@ -74,7 +74,7 @@ pub fn render(input: &str, mut output: PathBuf, matches: &getopts::Matches,
|
|||
};
|
||||
|
||||
let (metadata, text) = extract_leading_metadata(&input_str);
|
||||
if metadata.len() == 0 {
|
||||
if metadata.is_empty() {
|
||||
let _ = writeln!(&mut io::stderr(),
|
||||
"invalid markdown file: expecting initial line with `% ...TITLE...`");
|
||||
return 5;
|
||||
|
|
|
@ -215,9 +215,9 @@ impl<'a> fold::DocFolder for Stripper<'a> {
|
|||
match i.inner {
|
||||
// emptied modules/impls have no need to exist
|
||||
clean::ModuleItem(ref m)
|
||||
if m.items.len() == 0 &&
|
||||
if m.items.is_empty() &&
|
||||
i.doc_value().is_none() => None,
|
||||
clean::ImplItem(ref i) if i.items.len() == 0 => None,
|
||||
clean::ImplItem(ref i) if i.items.is_empty() => None,
|
||||
_ => {
|
||||
self.retained.insert(i.def_id.node);
|
||||
Some(i)
|
||||
|
@ -294,7 +294,7 @@ pub fn collapse_docs(krate: clean::Crate) -> plugins::PluginResult {
|
|||
&clean::NameValue(ref x, _) if "doc" == *x => false,
|
||||
_ => true
|
||||
}).cloned().collect();
|
||||
if docstr.len() > 0 {
|
||||
if !docstr.is_empty() {
|
||||
a.push(clean::NameValue("doc".to_string(), docstr));
|
||||
}
|
||||
i.attrs = a;
|
||||
|
@ -350,7 +350,7 @@ pub fn unindent(s: &str) -> String {
|
|||
}
|
||||
});
|
||||
|
||||
if lines.len() >= 1 {
|
||||
if !lines.is_empty() {
|
||||
let mut unindented = vec![ lines[0].trim().to_string() ];
|
||||
unindented.push_all(&lines.tail().iter().map(|&line| {
|
||||
if line.chars().all(|c| c.is_whitespace()) {
|
||||
|
|
|
@ -409,7 +409,7 @@ impl Collector {
|
|||
impl DocFolder for Collector {
|
||||
fn fold_item(&mut self, item: clean::Item) -> Option<clean::Item> {
|
||||
let pushed = match item.name {
|
||||
Some(ref name) if name.len() == 0 => false,
|
||||
Some(ref name) if name.is_empty() => false,
|
||||
Some(ref name) => { self.names.push(name.to_string()); true }
|
||||
None => false
|
||||
};
|
||||
|
|
|
@ -175,7 +175,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
|
|||
please_inline)
|
||||
}).collect::<Vec<ast::PathListItem>>();
|
||||
|
||||
if mine.len() == 0 {
|
||||
if mine.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(ast::ViewPathList(p, mine))
|
||||
|
|
|
@ -3837,7 +3837,7 @@ mod tests {
|
|||
let mut stack = Stack::new();
|
||||
|
||||
assert!(stack.is_empty());
|
||||
assert!(stack.len() == 0);
|
||||
assert!(stack.is_empty());
|
||||
assert!(!stack.last_is_index());
|
||||
|
||||
stack.push_index(0);
|
||||
|
|
|
@ -430,7 +430,7 @@ impl<T, S> HashSet<T, S>
|
|||
/// assert!(!v.is_empty());
|
||||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub fn is_empty(&self) -> bool { self.map.len() == 0 }
|
||||
pub fn is_empty(&self) -> bool { self.map.is_empty() }
|
||||
|
||||
/// Clears the set, returning all elements in an iterator.
|
||||
#[inline]
|
||||
|
|
|
@ -349,7 +349,7 @@ pub trait Write {
|
|||
/// This function will return the first error that `write` returns.
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
fn write_all(&mut self, mut buf: &[u8]) -> Result<()> {
|
||||
while buf.len() > 0 {
|
||||
while !buf.is_empty() {
|
||||
match self.write(buf) {
|
||||
Ok(0) => return Err(Error::new(ErrorKind::WriteZero,
|
||||
"failed to write whole buffer")),
|
||||
|
|
|
@ -218,7 +218,7 @@ mod platform {
|
|||
return Some(DeviceNS(u8_slice_as_os_str(slice)));
|
||||
}
|
||||
match parse_two_comps(path, is_sep_byte) {
|
||||
Some((server, share)) if server.len() > 0 && share.len() > 0 => {
|
||||
Some((server, share)) if !server.is_empty() && !share.is_empty() => {
|
||||
// \\server\share
|
||||
return Some(UNC(u8_slice_as_os_str(server),
|
||||
u8_slice_as_os_str(share)));
|
||||
|
@ -401,7 +401,7 @@ unsafe fn u8_slice_as_os_str(s: &[u8]) -> &OsStr {
|
|||
/// Says whether the first byte after the prefix is a separator.
|
||||
fn has_physical_root(s: &[u8], prefix: Option<Prefix>) -> bool {
|
||||
let path = if let Some(p) = prefix { &s[p.len()..] } else { s };
|
||||
path.len() > 0 && is_sep_byte(path[0])
|
||||
!path.is_empty() && is_sep_byte(path[0])
|
||||
}
|
||||
|
||||
// basic workhorse for splitting stem and extension
|
||||
|
@ -810,7 +810,7 @@ impl<'a> Iterator for Components<'a> {
|
|||
State::StartDir => {
|
||||
self.front = State::Body;
|
||||
if self.has_physical_root {
|
||||
debug_assert!(self.path.len() > 0);
|
||||
debug_assert!(!self.path.is_empty());
|
||||
self.path = &self.path[1..];
|
||||
return Some(Component::RootDir)
|
||||
} else if let Some(p) = self.prefix {
|
||||
|
@ -818,7 +818,7 @@ impl<'a> Iterator for Components<'a> {
|
|||
return Some(Component::RootDir)
|
||||
}
|
||||
} else if self.include_cur_dir() {
|
||||
debug_assert!(self.path.len() > 0);
|
||||
debug_assert!(!self.path.is_empty());
|
||||
self.path = &self.path[1..];
|
||||
return Some(Component::CurDir)
|
||||
}
|
||||
|
@ -1055,7 +1055,7 @@ impl PathBuf {
|
|||
};
|
||||
|
||||
let extension = extension.as_ref();
|
||||
if os_str_as_u8_slice(extension).len() > 0 {
|
||||
if !os_str_as_u8_slice(extension).is_empty() {
|
||||
stem.push(".");
|
||||
stem.push(extension);
|
||||
}
|
||||
|
|
|
@ -51,7 +51,7 @@ impl<R: Read> Rng for ReaderRng<R> {
|
|||
unsafe { *(bytes.as_ptr() as *const u64) }
|
||||
}
|
||||
fn fill_bytes(&mut self, mut v: &mut [u8]) {
|
||||
while v.len() > 0 {
|
||||
while !v.is_empty() {
|
||||
let t = v;
|
||||
match self.reader.read(t) {
|
||||
Ok(0) => panic!("ReaderRng.fill_bytes: EOF reached"),
|
||||
|
|
|
@ -76,7 +76,7 @@ pub fn demangle(writer: &mut Write, s: &str) -> io::Result<()> {
|
|||
try!(writer.write_all(s.as_bytes()));
|
||||
} else {
|
||||
let mut first = true;
|
||||
while inner.len() > 0 {
|
||||
while !inner.is_empty() {
|
||||
if !first {
|
||||
try!(writer.write_all(b"::"));
|
||||
} else {
|
||||
|
@ -89,7 +89,7 @@ pub fn demangle(writer: &mut Write, s: &str) -> io::Result<()> {
|
|||
let i: usize = inner[.. (inner.len() - rest.len())].parse().unwrap();
|
||||
inner = &rest[i..];
|
||||
rest = &rest[..i];
|
||||
while rest.len() > 0 {
|
||||
while !rest.is_empty() {
|
||||
if rest.starts_with("$") {
|
||||
macro_rules! demangle {
|
||||
($($pat:expr, => $demangled:expr),*) => ({
|
||||
|
|
|
@ -381,7 +381,7 @@ fn make_command_line(prog: &OsStr, args: &[OsString]) -> Vec<u16> {
|
|||
// it will be dropped entirely when parsed on the other end.
|
||||
let arg_bytes = &arg.as_inner().inner.as_inner();
|
||||
let quote = arg_bytes.iter().any(|c| *c == b' ' || *c == b'\t')
|
||||
|| arg_bytes.len() == 0;
|
||||
|| arg_bytes.is_empty();
|
||||
if quote {
|
||||
cmd.push('"' as u16);
|
||||
}
|
||||
|
|
|
@ -429,14 +429,14 @@ pub struct Generics {
|
|||
}
|
||||
|
||||
impl Generics {
|
||||
pub fn is_parameterized(&self) -> bool {
|
||||
self.lifetimes.len() + self.ty_params.len() > 0
|
||||
}
|
||||
pub fn is_lt_parameterized(&self) -> bool {
|
||||
self.lifetimes.len() > 0
|
||||
!self.lifetimes.is_empty()
|
||||
}
|
||||
pub fn is_type_parameterized(&self) -> bool {
|
||||
self.ty_params.len() > 0
|
||||
!self.ty_params.is_empty()
|
||||
}
|
||||
pub fn is_parameterized(&self) -> bool {
|
||||
self.is_lt_parameterized() || self.is_type_parameterized()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -360,7 +360,7 @@ impl Encodable for FileMap {
|
|||
// store the length
|
||||
try! { s.emit_u32(lines.len() as u32) };
|
||||
|
||||
if lines.len() > 0 {
|
||||
if !lines.is_empty() {
|
||||
// In order to preserve some space, we exploit the fact that
|
||||
// the lines list is sorted and individual lines are
|
||||
// probably not that long. Because of that we can store lines
|
||||
|
@ -569,7 +569,7 @@ impl CodeMap {
|
|||
// accidentally overflowing into the next filemap in case the last byte
|
||||
// of span is also the last byte of filemap, which leads to incorrect
|
||||
// results from CodeMap.span_to_*.
|
||||
if src.len() > 0 && !src.ends_with("\n") {
|
||||
if !src.is_empty() && !src.ends_with("\n") {
|
||||
src.push('\n');
|
||||
}
|
||||
|
||||
|
@ -652,7 +652,7 @@ impl CodeMap {
|
|||
}
|
||||
|
||||
pub fn span_to_string(&self, sp: Span) -> String {
|
||||
if self.files.borrow().len() == 0 && sp == DUMMY_SP {
|
||||
if self.files.borrow().is_empty() && sp == DUMMY_SP {
|
||||
return "no-location".to_string();
|
||||
}
|
||||
|
||||
|
@ -808,7 +808,7 @@ impl CodeMap {
|
|||
loop {
|
||||
let lines = files[a].lines.borrow();
|
||||
let lines = lines;
|
||||
if lines.len() > 0 {
|
||||
if !lines.is_empty() {
|
||||
break;
|
||||
}
|
||||
if a == 0 {
|
||||
|
|
|
@ -90,7 +90,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
|||
p.token != token::Colon &&
|
||||
p.token != token::ModSep {
|
||||
|
||||
if outputs.len() != 0 {
|
||||
if !outputs.is_empty() {
|
||||
panictry!(p.eat(&token::Comma));
|
||||
}
|
||||
|
||||
|
@ -130,7 +130,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
|||
p.token != token::Colon &&
|
||||
p.token != token::ModSep {
|
||||
|
||||
if inputs.len() != 0 {
|
||||
if !inputs.is_empty() {
|
||||
panictry!(p.eat(&token::Comma));
|
||||
}
|
||||
|
||||
|
@ -154,7 +154,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
|||
p.token != token::Colon &&
|
||||
p.token != token::ModSep {
|
||||
|
||||
if clobs.len() != 0 {
|
||||
if !clobs.is_empty() {
|
||||
panictry!(p.eat(&token::Comma));
|
||||
}
|
||||
|
||||
|
|
|
@ -774,7 +774,7 @@ pub fn check_zero_tts(cx: &ExtCtxt,
|
|||
sp: Span,
|
||||
tts: &[ast::TokenTree],
|
||||
name: &str) {
|
||||
if tts.len() != 0 {
|
||||
if !tts.is_empty() {
|
||||
cx.span_err(sp, &format!("{} takes no arguments", name));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -89,7 +89,7 @@ fn cs_clone(
|
|||
}
|
||||
}
|
||||
|
||||
if all_fields.len() >= 1 && all_fields[0].name.is_none() {
|
||||
if !all_fields.is_empty() && all_fields[0].name.is_none() {
|
||||
// enum-like
|
||||
let subcalls = all_fields.iter().map(subcall).collect();
|
||||
let path = cx.expr_path(ctor_path);
|
||||
|
|
|
@ -240,7 +240,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span,
|
|||
let encoder = cx.expr_ident(trait_span, blkarg);
|
||||
let emit_variant_arg = cx.ident_of("emit_enum_variant_arg");
|
||||
let mut stmts = Vec::new();
|
||||
if fields.len() > 0 {
|
||||
if !fields.is_empty() {
|
||||
let last = fields.len() - 1;
|
||||
for (i, &FieldInfo { ref self_, span, .. }) in fields.iter().enumerate() {
|
||||
let enc = cx.expr_method_call(span, self_.clone(),
|
||||
|
|
|
@ -912,7 +912,7 @@ impl<'a> MethodDef<'a> {
|
|||
}
|
||||
|
||||
// transpose raw_fields
|
||||
let fields = if raw_fields.len() > 0 {
|
||||
let fields = if !raw_fields.is_empty() {
|
||||
let mut raw_fields = raw_fields.into_iter().map(|v| v.into_iter());
|
||||
let first_field = raw_fields.next().unwrap();
|
||||
let mut other_fields: Vec<vec::IntoIter<(Span, Option<Ident>, P<Expr>)>>
|
||||
|
@ -1248,7 +1248,7 @@ impl<'a> MethodDef<'a> {
|
|||
|
||||
match_arms.push(catch_all_match_arm);
|
||||
|
||||
} else if variants.len() == 0 {
|
||||
} else if variants.is_empty() {
|
||||
// As an additional wrinkle, For a zero-variant enum A,
|
||||
// currently the compiler
|
||||
// will accept `fn (a: &Self) { match *a { } }`
|
||||
|
|
|
@ -65,7 +65,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenT
|
|||
pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
-> Box<base::MacResult+'cx> {
|
||||
let mut exprs = match get_exprs_from_tts(cx, sp, tts) {
|
||||
Some(ref exprs) if exprs.len() == 0 => {
|
||||
Some(ref exprs) if exprs.is_empty() => {
|
||||
cx.span_err(sp, "env! takes 1 or 2 arguments");
|
||||
return DummyResult::expr(sp);
|
||||
}
|
||||
|
|
|
@ -841,7 +841,7 @@ fn expand_non_macro_stmt(Spanned {node, span: stmt_span}: Stmt, fld: &mut MacroE
|
|||
fn expand_arm(arm: ast::Arm, fld: &mut MacroExpander) -> ast::Arm {
|
||||
// expand pats... they might contain macro uses:
|
||||
let expanded_pats = arm.pats.move_map(|pat| fld.fold_pat(pat));
|
||||
if expanded_pats.len() == 0 {
|
||||
if expanded_pats.is_empty() {
|
||||
panic!("encountered match arm with 0 patterns");
|
||||
}
|
||||
// all of the pats must have the same set of bindings, so use the
|
||||
|
@ -1887,7 +1887,7 @@ mod test {
|
|||
let binding_name = mtwt::resolve(bindings[binding_idx]);
|
||||
let binding_marks = mtwt::marksof(bindings[binding_idx].ctxt, invalid_name);
|
||||
// shouldmatch can't name varrefs that don't exist:
|
||||
assert!((shouldmatch.len() == 0) ||
|
||||
assert!((shouldmatch.is_empty()) ||
|
||||
(varrefs.len() > *shouldmatch.iter().max().unwrap()));
|
||||
for (idx,varref) in varrefs.iter().enumerate() {
|
||||
let print_hygiene_debug_info = || {
|
||||
|
|
|
@ -688,7 +688,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span,
|
|||
loop {
|
||||
match parser.next() {
|
||||
Some(piece) => {
|
||||
if parser.errors.len() > 0 { break }
|
||||
if !parser.errors.is_empty() { break }
|
||||
cx.verify_piece(&piece);
|
||||
match cx.trans_piece(&piece) {
|
||||
Some(piece) => {
|
||||
|
|
|
@ -266,7 +266,7 @@ pub fn outer_mark(ctxt: SyntaxContext) -> Mrk {
|
|||
/// Push a name... unless it matches the one on top, in which
|
||||
/// case pop and discard (so two of the same marks cancel)
|
||||
fn xor_push(marks: &mut Vec<Mrk>, mark: Mrk) {
|
||||
if (marks.len() > 0) && (*marks.last().unwrap() == mark) {
|
||||
if (!marks.is_empty()) && (*marks.last().unwrap() == mark) {
|
||||
marks.pop().unwrap();
|
||||
} else {
|
||||
marks.push(mark);
|
||||
|
|
|
@ -457,7 +457,7 @@ pub fn parse(sess: &ParseSess,
|
|||
return Failure(sp, "unexpected end of macro invocation".to_string());
|
||||
}
|
||||
} else {
|
||||
if (bb_eis.len() > 0 && next_eis.len() > 0)
|
||||
if (!bb_eis.is_empty() && !next_eis.is_empty())
|
||||
|| bb_eis.len() > 1 {
|
||||
let nts = bb_eis.iter().map(|ei| {
|
||||
match ei.top_elts.get_tt(ei.idx) {
|
||||
|
@ -472,12 +472,12 @@ pub fn parse(sess: &ParseSess,
|
|||
"local ambiguity: multiple parsing options: \
|
||||
built-in NTs {} or {} other options.",
|
||||
nts, next_eis.len()).to_string());
|
||||
} else if bb_eis.len() == 0 && next_eis.len() == 0 {
|
||||
} else if bb_eis.is_empty() && next_eis.is_empty() {
|
||||
return Failure(sp, format!("no rules expected the token `{}`",
|
||||
pprust::token_to_string(&tok)).to_string());
|
||||
} else if next_eis.len() > 0 {
|
||||
} else if !next_eis.is_empty() {
|
||||
/* Now process the next token */
|
||||
while next_eis.len() > 0 {
|
||||
while !next_eis.is_empty() {
|
||||
cur_eis.push(next_eis.pop().unwrap());
|
||||
}
|
||||
rdr.next_token();
|
||||
|
@ -504,7 +504,7 @@ pub fn parse(sess: &ParseSess,
|
|||
}
|
||||
}
|
||||
|
||||
assert!(cur_eis.len() > 0);
|
||||
assert!(!cur_eis.is_empty());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -63,7 +63,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String {
|
|||
let mut i = 0;
|
||||
let mut j = lines.len();
|
||||
// first line of all-stars should be omitted
|
||||
if lines.len() > 0 &&
|
||||
if !lines.is_empty() &&
|
||||
lines[0].chars().all(|c| c == '*') {
|
||||
i += 1;
|
||||
}
|
||||
|
@ -294,7 +294,7 @@ fn read_block_comment(rdr: &mut StringReader,
|
|||
}
|
||||
}
|
||||
}
|
||||
if curr_line.len() != 0 {
|
||||
if !curr_line.is_empty() {
|
||||
trim_whitespace_prefix_and_push_line(&mut lines,
|
||||
curr_line,
|
||||
col);
|
||||
|
|
|
@ -449,7 +449,7 @@ impl<'a> Parser<'a> {
|
|||
(format!("expected one of {}, found `{}`",
|
||||
expect,
|
||||
actual))
|
||||
} else if expected.len() == 0 {
|
||||
} else if expected.is_empty() {
|
||||
(format!("unexpected token: `{}`",
|
||||
actual))
|
||||
} else {
|
||||
|
@ -1244,7 +1244,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
// In type grammar, `+` is treated like a binary operator,
|
||||
// and hence both L and R side are required.
|
||||
if bounds.len() == 0 {
|
||||
if bounds.is_empty() {
|
||||
let last_span = self.last_span;
|
||||
self.span_err(last_span,
|
||||
"at least one type parameter bound \
|
||||
|
@ -2191,7 +2191,7 @@ impl<'a> Parser<'a> {
|
|||
&[token::CloseDelim(token::Brace)]));
|
||||
}
|
||||
|
||||
if fields.len() == 0 && base.is_none() {
|
||||
if fields.is_empty() && base.is_none() {
|
||||
let last_span = self.last_span;
|
||||
self.span_err(last_span,
|
||||
"structure literal must either \
|
||||
|
@ -2254,7 +2254,7 @@ impl<'a> Parser<'a> {
|
|||
(Vec::new(), Vec::new(), Vec::new())
|
||||
};
|
||||
|
||||
if bindings.len() > 0 {
|
||||
if !bindings.is_empty() {
|
||||
let last_span = self.last_span;
|
||||
self.span_err(last_span, "type bindings are only permitted on trait paths");
|
||||
}
|
||||
|
@ -3024,7 +3024,7 @@ impl<'a> Parser<'a> {
|
|||
try!(self.expect(&token::Comma));
|
||||
|
||||
if self.token == token::CloseDelim(token::Bracket)
|
||||
&& (before_slice || after.len() != 0) {
|
||||
&& (before_slice || !after.is_empty()) {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
@ -3914,7 +3914,7 @@ impl<'a> Parser<'a> {
|
|||
let hi = self.span.hi;
|
||||
let span = mk_sp(lo, hi);
|
||||
|
||||
if bounds.len() == 0 {
|
||||
if bounds.is_empty() {
|
||||
self.span_err(span,
|
||||
"each predicate in a `where` clause must have \
|
||||
at least one bound in it");
|
||||
|
@ -4572,7 +4572,7 @@ impl<'a> Parser<'a> {
|
|||
fields.push(try!(self.parse_struct_decl_field(true)));
|
||||
}
|
||||
|
||||
if fields.len() == 0 {
|
||||
if fields.is_empty() {
|
||||
return Err(self.fatal(&format!("unit-like struct definition should be \
|
||||
written as `struct {};`",
|
||||
token::get_ident(class_name.clone()))));
|
||||
|
@ -4611,7 +4611,7 @@ impl<'a> Parser<'a> {
|
|||
Ok(spanned(lo, p.span.hi, struct_field_))
|
||||
}));
|
||||
|
||||
if fields.len() == 0 {
|
||||
if fields.is_empty() {
|
||||
return Err(self.fatal(&format!("unit-like struct definition should be \
|
||||
written as `struct {};`",
|
||||
token::get_ident(class_name.clone()))));
|
||||
|
@ -5023,7 +5023,7 @@ impl<'a> Parser<'a> {
|
|||
all_nullary = false;
|
||||
let start_span = self.span;
|
||||
let struct_def = try!(self.parse_struct_def());
|
||||
if struct_def.fields.len() == 0 {
|
||||
if struct_def.fields.is_empty() {
|
||||
self.span_err(start_span,
|
||||
&format!("unit-like struct variant should be written \
|
||||
without braces, as `{},`",
|
||||
|
|
|
@ -565,7 +565,7 @@ impl<'a> Printer<'a> {
|
|||
Token::End => {
|
||||
debug!("print End -> pop End");
|
||||
let print_stack = &mut self.print_stack;
|
||||
assert!((print_stack.len() != 0));
|
||||
assert!((!print_stack.is_empty()));
|
||||
print_stack.pop().unwrap();
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -1656,7 +1656,7 @@ impl<'a> State<'a> {
|
|||
try!(self.print_expr(&*args[0]));
|
||||
try!(word(&mut self.s, "."));
|
||||
try!(self.print_ident(ident.node));
|
||||
if tys.len() > 0 {
|
||||
if !tys.is_empty() {
|
||||
try!(word(&mut self.s, "::<"));
|
||||
try!(self.commasep(Inconsistent, tys,
|
||||
|s, ty| s.print_type(&**ty)));
|
||||
|
@ -1956,7 +1956,7 @@ impl<'a> State<'a> {
|
|||
options.push("intel");
|
||||
}
|
||||
|
||||
if options.len() > 0 {
|
||||
if !options.is_empty() {
|
||||
try!(space(&mut self.s));
|
||||
try!(self.word_space(":"));
|
||||
try!(self.commasep(Inconsistent, &*options,
|
||||
|
@ -2214,7 +2214,7 @@ impl<'a> State<'a> {
|
|||
},
|
||||
|f| f.node.pat.span));
|
||||
if etc {
|
||||
if fields.len() != 0 { try!(self.word_space(",")); }
|
||||
if !fields.is_empty() { try!(self.word_space(",")); }
|
||||
try!(word(&mut self.s, ".."));
|
||||
}
|
||||
try!(space(&mut self.s));
|
||||
|
@ -2546,7 +2546,7 @@ impl<'a> State<'a> {
|
|||
|
||||
pub fn print_where_clause(&mut self, where_clause: &ast::WhereClause)
|
||||
-> io::Result<()> {
|
||||
if where_clause.predicates.len() == 0 {
|
||||
if where_clause.predicates.is_empty() {
|
||||
return Ok(())
|
||||
}
|
||||
|
||||
|
@ -2727,7 +2727,7 @@ impl<'a> State<'a> {
|
|||
opt_explicit_self: Option<&ast::ExplicitSelf_>)
|
||||
-> io::Result<()> {
|
||||
try!(self.ibox(indent_unit));
|
||||
if generics.lifetimes.len() > 0 || generics.ty_params.len() > 0 {
|
||||
if !generics.lifetimes.is_empty() || !generics.ty_params.is_empty() {
|
||||
try!(word(&mut self.s, "for"));
|
||||
try!(self.print_generics(generics));
|
||||
}
|
||||
|
|
|
@ -123,7 +123,7 @@ pub fn expand(cap: &[u8], params: &[Param], vars: &mut Variables)
|
|||
Percent => {
|
||||
match cur {
|
||||
'%' => { output.push(c); state = Nothing },
|
||||
'c' => if stack.len() > 0 {
|
||||
'c' => if !stack.is_empty() {
|
||||
match stack.pop().unwrap() {
|
||||
// if c is 0, use 0200 (128) for ncurses compatibility
|
||||
Number(c) => {
|
||||
|
@ -141,7 +141,7 @@ pub fn expand(cap: &[u8], params: &[Param], vars: &mut Variables)
|
|||
'g' => state = GetVar,
|
||||
'\'' => state = CharConstant,
|
||||
'{' => state = IntConstant(0),
|
||||
'l' => if stack.len() > 0 {
|
||||
'l' => if !stack.is_empty() {
|
||||
match stack.pop().unwrap() {
|
||||
Words(s) => stack.push(Number(s.len() as isize)),
|
||||
_ => return Err("a non-str was used with %l".to_string())
|
||||
|
@ -231,14 +231,14 @@ pub fn expand(cap: &[u8], params: &[Param], vars: &mut Variables)
|
|||
_ => return Err("non-numbers on stack with logical or".to_string())
|
||||
}
|
||||
} else { return Err("stack is empty".to_string()) },
|
||||
'!' => if stack.len() > 0 {
|
||||
'!' => if !stack.is_empty() {
|
||||
match stack.pop().unwrap() {
|
||||
Number(0) => stack.push(Number(1)),
|
||||
Number(_) => stack.push(Number(0)),
|
||||
_ => return Err("non-number on stack with logical not".to_string())
|
||||
}
|
||||
} else { return Err("stack is empty".to_string()) },
|
||||
'~' => if stack.len() > 0 {
|
||||
'~' => if !stack.is_empty() {
|
||||
match stack.pop().unwrap() {
|
||||
Number(x) => stack.push(Number(!x)),
|
||||
_ => return Err("non-number on stack with %~".to_string())
|
||||
|
@ -253,7 +253,7 @@ pub fn expand(cap: &[u8], params: &[Param], vars: &mut Variables)
|
|||
},
|
||||
|
||||
// printf-style support for %doxXs
|
||||
'd'|'o'|'x'|'X'|'s' => if stack.len() > 0 {
|
||||
'd'|'o'|'x'|'X'|'s' => if !stack.is_empty() {
|
||||
let flags = Flags::new();
|
||||
let res = format(stack.pop().unwrap(), FormatOp::from_char(cur), flags);
|
||||
if res.is_err() { return res }
|
||||
|
@ -278,7 +278,7 @@ pub fn expand(cap: &[u8], params: &[Param], vars: &mut Variables)
|
|||
|
||||
// conditionals
|
||||
'?' => (),
|
||||
't' => if stack.len() > 0 {
|
||||
't' => if !stack.is_empty() {
|
||||
match stack.pop().unwrap() {
|
||||
Number(0) => state = SeekIfElse(0),
|
||||
Number(_) => (),
|
||||
|
@ -303,12 +303,12 @@ pub fn expand(cap: &[u8], params: &[Param], vars: &mut Variables)
|
|||
},
|
||||
SetVar => {
|
||||
if cur >= 'A' && cur <= 'Z' {
|
||||
if stack.len() > 0 {
|
||||
if !stack.is_empty() {
|
||||
let idx = (cur as u8) - b'A';
|
||||
vars.sta[idx as usize] = stack.pop().unwrap();
|
||||
} else { return Err("stack is empty".to_string()) }
|
||||
} else if cur >= 'a' && cur <= 'z' {
|
||||
if stack.len() > 0 {
|
||||
if !stack.is_empty() {
|
||||
let idx = (cur as u8) - b'a';
|
||||
vars.dyn[idx as usize] = stack.pop().unwrap();
|
||||
} else { return Err("stack is empty".to_string()) }
|
||||
|
@ -352,7 +352,7 @@ pub fn expand(cap: &[u8], params: &[Param], vars: &mut Variables)
|
|||
FormatPattern(ref mut flags, ref mut fstate) => {
|
||||
old_state = Nothing;
|
||||
match (*fstate, cur) {
|
||||
(_,'d')|(_,'o')|(_,'x')|(_,'X')|(_,'s') => if stack.len() > 0 {
|
||||
(_,'d')|(_,'o')|(_,'x')|(_,'X')|(_,'s') => if !stack.is_empty() {
|
||||
let res = format(stack.pop().unwrap(), FormatOp::from_char(cur), *flags);
|
||||
if res.is_err() { return res }
|
||||
output.push_all(&res.unwrap());
|
||||
|
|
|
@ -20,7 +20,7 @@ use std::path::PathBuf;
|
|||
/// Return path to database entry for `term`
|
||||
#[allow(deprecated)]
|
||||
pub fn get_dbpath_for_term(term: &str) -> Option<Box<PathBuf>> {
|
||||
if term.len() == 0 {
|
||||
if term.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
|
|
|
@ -373,7 +373,7 @@ pub fn parse_opts(args: &[String]) -> Option<OptRes> {
|
|||
|
||||
if matches.opt_present("h") { usage(&args[0]); return None; }
|
||||
|
||||
let filter = if matches.free.len() > 0 {
|
||||
let filter = if !matches.free.is_empty() {
|
||||
Some(matches.free[0].clone())
|
||||
} else {
|
||||
None
|
||||
|
@ -588,14 +588,14 @@ impl<T: Write> ConsoleTestState<T> {
|
|||
let mut fail_out = String::new();
|
||||
for &(ref f, ref stdout) in &self.failures {
|
||||
failures.push(f.name.to_string());
|
||||
if stdout.len() > 0 {
|
||||
if !stdout.is_empty() {
|
||||
fail_out.push_str(&format!("---- {} stdout ----\n\t", f.name));
|
||||
let output = String::from_utf8_lossy(stdout);
|
||||
fail_out.push_str(&output);
|
||||
fail_out.push_str("\n");
|
||||
}
|
||||
}
|
||||
if fail_out.len() > 0 {
|
||||
if !fail_out.is_empty() {
|
||||
try!(self.write_plain("\n"));
|
||||
try!(self.write_plain(&fail_out));
|
||||
}
|
||||
|
|
|
@ -196,17 +196,17 @@ impl<T: Float + FromPrimitive> Stats<T> for [T] {
|
|||
}
|
||||
|
||||
fn min(&self) -> T {
|
||||
assert!(self.len() != 0);
|
||||
assert!(!self.is_empty());
|
||||
self.iter().fold(self[0], |p, q| p.min(*q))
|
||||
}
|
||||
|
||||
fn max(&self) -> T {
|
||||
assert!(self.len() != 0);
|
||||
assert!(!self.is_empty());
|
||||
self.iter().fold(self[0], |p, q| p.max(*q))
|
||||
}
|
||||
|
||||
fn mean(&self) -> T {
|
||||
assert!(self.len() != 0);
|
||||
assert!(!self.is_empty());
|
||||
self.sum() / FromPrimitive::from_usize(self.len()).unwrap()
|
||||
}
|
||||
|
||||
|
@ -284,7 +284,7 @@ impl<T: Float + FromPrimitive> Stats<T> for [T] {
|
|||
// linear interpolation. If samples are not sorted, return nonsensical value.
|
||||
fn percentile_of_sorted<T: Float + FromPrimitive>(sorted_samples: &[T],
|
||||
pct: T) -> T {
|
||||
assert!(sorted_samples.len() != 0);
|
||||
assert!(!sorted_samples.is_empty());
|
||||
if sorted_samples.len() == 1 {
|
||||
return sorted_samples[0];
|
||||
}
|
||||
|
|
|
@ -157,7 +157,7 @@ impl<'a> Iterator for Graphemes<'a> {
|
|||
#[inline]
|
||||
fn next(&mut self) -> Option<&'a str> {
|
||||
use tables::grapheme as gr;
|
||||
if self.string.len() == 0 {
|
||||
if self.string.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
|
@ -257,7 +257,7 @@ impl<'a> DoubleEndedIterator for Graphemes<'a> {
|
|||
#[inline]
|
||||
fn next_back(&mut self) -> Option<&'a str> {
|
||||
use tables::grapheme as gr;
|
||||
if self.string.len() == 0 {
|
||||
if self.string.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
|
|
|
@ -43,7 +43,7 @@ fn maybe_run_test<F>(argv: &[String], name: String, test: F) where F: FnOnce() {
|
|||
|
||||
if env::var_os("RUST_BENCH").is_some() {
|
||||
run_test = true
|
||||
} else if argv.len() > 0 {
|
||||
} else if !argv.is_empty() {
|
||||
run_test = argv.iter().any(|x| x == &"all".to_string()) || argv.iter().any(|x| x == &name)
|
||||
}
|
||||
|
||||
|
@ -60,7 +60,7 @@ fn shift_push() {
|
|||
let mut v1 = repeat(1).take(30000).collect::<Vec<_>>();
|
||||
let mut v2 = Vec::new();
|
||||
|
||||
while v1.len() > 0 {
|
||||
while !v1.is_empty() {
|
||||
v2.push(v1.remove(0));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -184,7 +184,7 @@ fn main() {
|
|||
for line in rdr.lines() {
|
||||
let line = line.unwrap().trim().to_string();
|
||||
|
||||
if line.len() == 0 { continue; }
|
||||
if line.is_empty() { continue; }
|
||||
|
||||
match (line.as_bytes()[0] as char, proc_mode) {
|
||||
|
||||
|
|
|
@ -252,7 +252,7 @@ fn generate_frequencies(mut input: &[u8], frame: usize) -> Table {
|
|||
}
|
||||
frequencies.lookup(code, BumpCallback);
|
||||
|
||||
while input.len() != 0 && input[0] != ('>' as u8) {
|
||||
while !input.is_empty() && input[0] != ('>' as u8) {
|
||||
code = code.rotate(input[0], frame);
|
||||
frequencies.lookup(code, BumpCallback);
|
||||
input = &input[1..];
|
||||
|
|
|
@ -196,7 +196,7 @@ fn shift_mut_ref<'a, T>(r: &mut &'a mut [T]) -> Option<&'a mut T> {
|
|||
use std::mem;
|
||||
use std::raw::Repr;
|
||||
|
||||
if r.len() == 0 { return None }
|
||||
if r.is_empty() { return None }
|
||||
unsafe {
|
||||
let mut raw = r.repr();
|
||||
let ret = raw.data as *mut T;
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue