Merge pull request #8244 from thestinger/for
make `for` parse as `foreach` does r=huonw, bors is acting up and this has been run through the try bots
This commit is contained in:
commit
deddb009f0
68 changed files with 791 additions and 847 deletions
doc
src
compiletest
libextra
librustc
metadata
middle
astencode.rs
borrowck
check_match.rsdataflow.rsgraph.rskind.rslang_items.rslint.rsmoves.rspat_util.rsreachable.rsresolve.rstrans
ty.rstypeck
util
librusti
librustpkg
libstd
libsyntax
test
|
@ -164,19 +164,18 @@ dropped when they become unnecessary.
|
||||||
|
|
||||||
## For loops
|
## For loops
|
||||||
|
|
||||||
The `foreach` keyword is transitional, and is going to replace the current
|
The `for` keyword can be used as sugar for iterating through any iterator:
|
||||||
obsolete `for` loop.
|
|
||||||
|
|
||||||
~~~
|
~~~
|
||||||
let xs = [2, 3, 5, 7, 11, 13, 17];
|
let xs = [2, 3, 5, 7, 11, 13, 17];
|
||||||
|
|
||||||
// print out all the elements in the vector
|
// print out all the elements in the vector
|
||||||
foreach x in xs.iter() {
|
for x in xs.iter() {
|
||||||
println(x.to_str())
|
println(x.to_str())
|
||||||
}
|
}
|
||||||
|
|
||||||
// print out all but the first 3 elements in the vector
|
// print out all but the first 3 elements in the vector
|
||||||
foreach x in xs.iter().skip(3) {
|
for x in xs.iter().skip(3) {
|
||||||
println(x.to_str())
|
println(x.to_str())
|
||||||
}
|
}
|
||||||
~~~
|
~~~
|
||||||
|
@ -192,7 +191,7 @@ let ys = ["foo", "bar", "baz", "foobar"];
|
||||||
let mut it = xs.iter().zip(ys.iter());
|
let mut it = xs.iter().zip(ys.iter());
|
||||||
|
|
||||||
// print out the pairs of elements up to (&3, &"baz")
|
// print out the pairs of elements up to (&3, &"baz")
|
||||||
foreach (x, y) in it {
|
for (x, y) in it {
|
||||||
printfln!("%d %s", *x, *y);
|
printfln!("%d %s", *x, *y);
|
||||||
|
|
||||||
if *x == 3 {
|
if *x == 3 {
|
||||||
|
@ -229,7 +228,7 @@ impl<A, T: Iterator<A>> FromIterator<A, T> for ~[A] {
|
||||||
pub fn from_iterator(iterator: &mut T) -> ~[A] {
|
pub fn from_iterator(iterator: &mut T) -> ~[A] {
|
||||||
let (lower, _) = iterator.size_hint();
|
let (lower, _) = iterator.size_hint();
|
||||||
let mut xs = with_capacity(lower);
|
let mut xs = with_capacity(lower);
|
||||||
foreach x in iterator {
|
for x in iterator {
|
||||||
xs.push(x);
|
xs.push(x);
|
||||||
}
|
}
|
||||||
xs
|
xs
|
||||||
|
@ -300,7 +299,7 @@ printfln!("%?", it.next()); // prints `Some(&2)`
|
||||||
printfln!("%?", it.next_back()); // prints `Some(&6)`
|
printfln!("%?", it.next_back()); // prints `Some(&6)`
|
||||||
|
|
||||||
// prints `5`, `4` and `3`
|
// prints `5`, `4` and `3`
|
||||||
foreach &x in it.invert() {
|
for &x in it.invert() {
|
||||||
printfln!("%?", x)
|
printfln!("%?", x)
|
||||||
}
|
}
|
||||||
~~~
|
~~~
|
||||||
|
@ -319,7 +318,7 @@ let mut it = xs.iter().chain_(ys.iter()).transform(|&x| x * 2);
|
||||||
printfln!("%?", it.next()); // prints `Some(2)`
|
printfln!("%?", it.next()); // prints `Some(2)`
|
||||||
|
|
||||||
// prints `16`, `14`, `12`, `10`, `8`, `6`, `4`
|
// prints `16`, `14`, `12`, `10`, `8`, `6`, `4`
|
||||||
foreach x in it.invert() {
|
for x in it.invert() {
|
||||||
printfln!("%?", x);
|
printfln!("%?", x);
|
||||||
}
|
}
|
||||||
~~~
|
~~~
|
||||||
|
|
|
@ -567,11 +567,6 @@ loop {
|
||||||
This code prints out a weird sequence of numbers and stops as soon as
|
This code prints out a weird sequence of numbers and stops as soon as
|
||||||
it finds one that can be divided by five.
|
it finds one that can be divided by five.
|
||||||
|
|
||||||
Rust also has a `for` construct. It's different from C's `for` and it works
|
|
||||||
best when iterating over collections. See the section on [closures](#closures)
|
|
||||||
to find out how to use `for` and higher-order functions for enumerating
|
|
||||||
elements of a collection.
|
|
||||||
|
|
||||||
# Data structures
|
# Data structures
|
||||||
|
|
||||||
## Structs
|
## Structs
|
||||||
|
@ -1397,8 +1392,8 @@ assert!(crayons.len() == 3);
|
||||||
assert!(!crayons.is_empty());
|
assert!(!crayons.is_empty());
|
||||||
|
|
||||||
// Iterate over a vector, obtaining a pointer to each element
|
// Iterate over a vector, obtaining a pointer to each element
|
||||||
// (`for` is explained in the next section)
|
// (`for` is explained in the container/iterator tutorial)
|
||||||
foreach crayon in crayons.iter() {
|
for crayon in crayons.iter() {
|
||||||
let delicious_crayon_wax = unwrap_crayon(*crayon);
|
let delicious_crayon_wax = unwrap_crayon(*crayon);
|
||||||
eat_crayon_wax(delicious_crayon_wax);
|
eat_crayon_wax(delicious_crayon_wax);
|
||||||
}
|
}
|
||||||
|
@ -1749,7 +1744,7 @@ of `vector`:
|
||||||
~~~~
|
~~~~
|
||||||
fn map<T, U>(vector: &[T], function: &fn(v: &T) -> U) -> ~[U] {
|
fn map<T, U>(vector: &[T], function: &fn(v: &T) -> U) -> ~[U] {
|
||||||
let mut accumulator = ~[];
|
let mut accumulator = ~[];
|
||||||
foreach element in vector.iter() {
|
for element in vector.iter() {
|
||||||
accumulator.push(function(element));
|
accumulator.push(function(element));
|
||||||
}
|
}
|
||||||
return accumulator;
|
return accumulator;
|
||||||
|
@ -2027,7 +2022,7 @@ generic types.
|
||||||
~~~~
|
~~~~
|
||||||
# trait Printable { fn print(&self); }
|
# trait Printable { fn print(&self); }
|
||||||
fn print_all<T: Printable>(printable_things: ~[T]) {
|
fn print_all<T: Printable>(printable_things: ~[T]) {
|
||||||
foreach thing in printable_things.iter() {
|
for thing in printable_things.iter() {
|
||||||
thing.print();
|
thing.print();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2073,7 +2068,7 @@ However, consider this function:
|
||||||
trait Drawable { fn draw(&self); }
|
trait Drawable { fn draw(&self); }
|
||||||
|
|
||||||
fn draw_all<T: Drawable>(shapes: ~[T]) {
|
fn draw_all<T: Drawable>(shapes: ~[T]) {
|
||||||
foreach shape in shapes.iter() { shape.draw(); }
|
for shape in shapes.iter() { shape.draw(); }
|
||||||
}
|
}
|
||||||
# let c: Circle = new_circle();
|
# let c: Circle = new_circle();
|
||||||
# draw_all(~[c]);
|
# draw_all(~[c]);
|
||||||
|
@ -2088,7 +2083,7 @@ an _object_.
|
||||||
~~~~
|
~~~~
|
||||||
# trait Drawable { fn draw(&self); }
|
# trait Drawable { fn draw(&self); }
|
||||||
fn draw_all(shapes: &[@Drawable]) {
|
fn draw_all(shapes: &[@Drawable]) {
|
||||||
foreach shape in shapes.iter() { shape.draw(); }
|
for shape in shapes.iter() { shape.draw(); }
|
||||||
}
|
}
|
||||||
~~~~
|
~~~~
|
||||||
|
|
||||||
|
|
|
@ -41,7 +41,7 @@ pub fn load_props(testfile: &Path) -> TestProps {
|
||||||
let mut pp_exact = None;
|
let mut pp_exact = None;
|
||||||
let mut debugger_cmds = ~[];
|
let mut debugger_cmds = ~[];
|
||||||
let mut check_lines = ~[];
|
let mut check_lines = ~[];
|
||||||
for iter_header(testfile) |ln| {
|
do iter_header(testfile) |ln| {
|
||||||
match parse_error_pattern(ln) {
|
match parse_error_pattern(ln) {
|
||||||
Some(ep) => error_patterns.push(ep),
|
Some(ep) => error_patterns.push(ep),
|
||||||
None => ()
|
None => ()
|
||||||
|
@ -74,6 +74,8 @@ pub fn load_props(testfile: &Path) -> TestProps {
|
||||||
Some(cl) => check_lines.push(cl),
|
Some(cl) => check_lines.push(cl),
|
||||||
None => ()
|
None => ()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
true
|
||||||
};
|
};
|
||||||
return TestProps {
|
return TestProps {
|
||||||
error_patterns: error_patterns,
|
error_patterns: error_patterns,
|
||||||
|
@ -87,17 +89,19 @@ pub fn load_props(testfile: &Path) -> TestProps {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_test_ignored(config: &config, testfile: &Path) -> bool {
|
pub fn is_test_ignored(config: &config, testfile: &Path) -> bool {
|
||||||
for iter_header(testfile) |ln| {
|
|
||||||
if parse_name_directive(ln, "xfail-test") { return true; }
|
|
||||||
if parse_name_directive(ln, xfail_target()) { return true; }
|
|
||||||
if config.mode == common::mode_pretty &&
|
|
||||||
parse_name_directive(ln, "xfail-pretty") { return true; }
|
|
||||||
};
|
|
||||||
return false;
|
|
||||||
|
|
||||||
fn xfail_target() -> ~str {
|
fn xfail_target() -> ~str {
|
||||||
~"xfail-" + os::SYSNAME
|
~"xfail-" + os::SYSNAME
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let val = do iter_header(testfile) |ln| {
|
||||||
|
if parse_name_directive(ln, "xfail-test") { false }
|
||||||
|
else if parse_name_directive(ln, xfail_target()) { false }
|
||||||
|
else if config.mode == common::mode_pretty &&
|
||||||
|
parse_name_directive(ln, "xfail-pretty") { false }
|
||||||
|
else { true }
|
||||||
|
};
|
||||||
|
|
||||||
|
!val
|
||||||
}
|
}
|
||||||
|
|
||||||
fn iter_header(testfile: &Path, it: &fn(~str) -> bool) -> bool {
|
fn iter_header(testfile: &Path, it: &fn(~str) -> bool) -> bool {
|
||||||
|
@ -109,7 +113,7 @@ fn iter_header(testfile: &Path, it: &fn(~str) -> bool) -> bool {
|
||||||
// module or function. This doesn't seem to be an optimization
|
// module or function. This doesn't seem to be an optimization
|
||||||
// with a warm page cache. Maybe with a cold one.
|
// with a warm page cache. Maybe with a cold one.
|
||||||
if ln.starts_with("fn") || ln.starts_with("mod") {
|
if ln.starts_with("fn") || ln.starts_with("mod") {
|
||||||
return false;
|
return true;
|
||||||
} else { if !(it(ln)) { return false; } }
|
} else { if !(it(ln)) { return false; } }
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
|
|
|
@ -206,13 +206,14 @@ impl BigBitv {
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn equals(&self, b: &BigBitv, nbits: uint) -> bool {
|
pub fn equals(&self, b: &BigBitv, nbits: uint) -> bool {
|
||||||
let len = b.storage.len();
|
let len = b.storage.len();
|
||||||
for uint::iterate(0, len) |i| {
|
do uint::iterate(0, len) |i| {
|
||||||
let mask = big_mask(nbits, i);
|
let mask = big_mask(nbits, i);
|
||||||
if mask & self.storage[i] != mask & b.storage[i] {
|
if mask & self.storage[i] != mask & b.storage[i] {
|
||||||
return false;
|
false
|
||||||
|
} else {
|
||||||
|
true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -358,7 +359,7 @@ impl Bitv {
|
||||||
pub fn clear(&mut self) {
|
pub fn clear(&mut self) {
|
||||||
match self.rep {
|
match self.rep {
|
||||||
Small(ref mut b) => b.clear(),
|
Small(ref mut b) => b.clear(),
|
||||||
Big(ref mut s) => for s.each_storage() |w| { *w = 0u }
|
Big(ref mut s) => { do s.each_storage() |w| { *w = 0u; true }; }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -367,7 +368,8 @@ impl Bitv {
|
||||||
pub fn set_all(&mut self) {
|
pub fn set_all(&mut self) {
|
||||||
match self.rep {
|
match self.rep {
|
||||||
Small(ref mut b) => b.set_all(),
|
Small(ref mut b) => b.set_all(),
|
||||||
Big(ref mut s) => for s.each_storage() |w| { *w = !0u } }
|
Big(ref mut s) => { do s.each_storage() |w| { *w = !0u; true }; }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Invert all bits
|
/// Invert all bits
|
||||||
|
@ -375,7 +377,8 @@ impl Bitv {
|
||||||
pub fn negate(&mut self) {
|
pub fn negate(&mut self) {
|
||||||
match self.rep {
|
match self.rep {
|
||||||
Small(ref mut b) => b.negate(),
|
Small(ref mut b) => b.negate(),
|
||||||
Big(ref mut s) => for s.each_storage() |w| { *w = !*w } }
|
Big(ref mut s) => { do s.each_storage() |w| { *w = !*w; true }; }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -718,11 +721,11 @@ impl BitvSet {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn difference(&self, other: &BitvSet, f: &fn(&uint) -> bool) -> bool {
|
pub fn difference(&self, other: &BitvSet, f: &fn(&uint) -> bool) -> bool {
|
||||||
for self.common_iter(other).advance |(i, w1, w2)| {
|
foreach (i, w1, w2) in self.common_iter(other) {
|
||||||
if !iterate_bits(i, w1 & !w2, |b| f(&b)) {
|
if !iterate_bits(i, w1 & !w2, |b| f(&b)) {
|
||||||
return false;
|
return false
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
/* everything we have that they don't also shows up */
|
/* everything we have that they don't also shows up */
|
||||||
self.outlier_iter(other).advance(|(mine, i, w)|
|
self.outlier_iter(other).advance(|(mine, i, w)|
|
||||||
!mine || iterate_bits(i, w, |b| f(&b))
|
!mine || iterate_bits(i, w, |b| f(&b))
|
||||||
|
@ -731,11 +734,11 @@ impl BitvSet {
|
||||||
|
|
||||||
pub fn symmetric_difference(&self, other: &BitvSet,
|
pub fn symmetric_difference(&self, other: &BitvSet,
|
||||||
f: &fn(&uint) -> bool) -> bool {
|
f: &fn(&uint) -> bool) -> bool {
|
||||||
for self.common_iter(other).advance |(i, w1, w2)| {
|
foreach (i, w1, w2) in self.common_iter(other) {
|
||||||
if !iterate_bits(i, w1 ^ w2, |b| f(&b)) {
|
if !iterate_bits(i, w1 ^ w2, |b| f(&b)) {
|
||||||
return false;
|
return false
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
self.outlier_iter(other).advance(|(_, i, w)| iterate_bits(i, w, |b| f(&b)))
|
self.outlier_iter(other).advance(|(_, i, w)| iterate_bits(i, w, |b| f(&b)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -744,11 +747,11 @@ impl BitvSet {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn union(&self, other: &BitvSet, f: &fn(&uint) -> bool) -> bool {
|
pub fn union(&self, other: &BitvSet, f: &fn(&uint) -> bool) -> bool {
|
||||||
for self.common_iter(other).advance |(i, w1, w2)| {
|
foreach (i, w1, w2) in self.common_iter(other) {
|
||||||
if !iterate_bits(i, w1 | w2, |b| f(&b)) {
|
if !iterate_bits(i, w1 | w2, |b| f(&b)) {
|
||||||
return false;
|
return false
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
self.outlier_iter(other).advance(|(_, i, w)| iterate_bits(i, w, |b| f(&b)))
|
self.outlier_iter(other).advance(|(_, i, w)| iterate_bits(i, w, |b| f(&b)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -758,12 +761,12 @@ impl cmp::Eq for BitvSet {
|
||||||
if self.size != other.size {
|
if self.size != other.size {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
for self.common_iter(other).advance |(_, w1, w2)| {
|
foreach (_, w1, w2) in self.common_iter(other) {
|
||||||
if w1 != w2 {
|
if w1 != w2 {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for self.outlier_iter(other).advance |(_, _, w)| {
|
foreach (_, _, w) in self.outlier_iter(other) {
|
||||||
if w != 0 {
|
if w != 0 {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -798,7 +801,7 @@ impl Set<uint> for BitvSet {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_subset(&self, other: &BitvSet) -> bool {
|
fn is_subset(&self, other: &BitvSet) -> bool {
|
||||||
for self.common_iter(other).advance |(_, w1, w2)| {
|
foreach (_, w1, w2) in self.common_iter(other) {
|
||||||
if w1 & w2 != w1 {
|
if w1 & w2 != w1 {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -806,7 +809,7 @@ impl Set<uint> for BitvSet {
|
||||||
/* If anything is not ours, then everything is not ours so we're
|
/* If anything is not ours, then everything is not ours so we're
|
||||||
definitely a subset in that case. Otherwise if there's any stray
|
definitely a subset in that case. Otherwise if there's any stray
|
||||||
ones that 'other' doesn't have, we're not a subset. */
|
ones that 'other' doesn't have, we're not a subset. */
|
||||||
for self.outlier_iter(other).advance |(mine, _, w)| {
|
foreach (mine, _, w) in self.outlier_iter(other) {
|
||||||
if !mine {
|
if !mine {
|
||||||
return true;
|
return true;
|
||||||
} else if w != 0 {
|
} else if w != 0 {
|
||||||
|
|
|
@ -112,7 +112,7 @@ impl Engine512State {
|
||||||
|
|
||||||
// Putting the message schedule inside the same loop as the round calculations allows for
|
// Putting the message schedule inside the same loop as the round calculations allows for
|
||||||
// the compiler to generate better code.
|
// the compiler to generate better code.
|
||||||
for uint::range_step(0, 64, 8) |t| {
|
do uint::range_step(0, 64, 8) |t| {
|
||||||
schedule_round!(t + 16);
|
schedule_round!(t + 16);
|
||||||
schedule_round!(t + 17);
|
schedule_round!(t + 17);
|
||||||
schedule_round!(t + 18);
|
schedule_round!(t + 18);
|
||||||
|
@ -130,9 +130,10 @@ impl Engine512State {
|
||||||
sha2_round!(d, e, f, g, h, a, b, c, K64, t + 5);
|
sha2_round!(d, e, f, g, h, a, b, c, K64, t + 5);
|
||||||
sha2_round!(c, d, e, f, g, h, a, b, K64, t + 6);
|
sha2_round!(c, d, e, f, g, h, a, b, K64, t + 6);
|
||||||
sha2_round!(b, c, d, e, f, g, h, a, K64, t + 7);
|
sha2_round!(b, c, d, e, f, g, h, a, K64, t + 7);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
|
|
||||||
for uint::range_step(64, 80, 8) |t| {
|
do uint::range_step(64, 80, 8) |t| {
|
||||||
sha2_round!(a, b, c, d, e, f, g, h, K64, t);
|
sha2_round!(a, b, c, d, e, f, g, h, K64, t);
|
||||||
sha2_round!(h, a, b, c, d, e, f, g, K64, t + 1);
|
sha2_round!(h, a, b, c, d, e, f, g, K64, t + 1);
|
||||||
sha2_round!(g, h, a, b, c, d, e, f, K64, t + 2);
|
sha2_round!(g, h, a, b, c, d, e, f, K64, t + 2);
|
||||||
|
@ -141,7 +142,8 @@ impl Engine512State {
|
||||||
sha2_round!(d, e, f, g, h, a, b, c, K64, t + 5);
|
sha2_round!(d, e, f, g, h, a, b, c, K64, t + 5);
|
||||||
sha2_round!(c, d, e, f, g, h, a, b, K64, t + 6);
|
sha2_round!(c, d, e, f, g, h, a, b, K64, t + 6);
|
||||||
sha2_round!(b, c, d, e, f, g, h, a, K64, t + 7);
|
sha2_round!(b, c, d, e, f, g, h, a, K64, t + 7);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
|
|
||||||
self.H0 += a;
|
self.H0 += a;
|
||||||
self.H1 += b;
|
self.H1 += b;
|
||||||
|
@ -507,7 +509,7 @@ impl Engine256State {
|
||||||
|
|
||||||
// Putting the message schedule inside the same loop as the round calculations allows for
|
// Putting the message schedule inside the same loop as the round calculations allows for
|
||||||
// the compiler to generate better code.
|
// the compiler to generate better code.
|
||||||
for uint::range_step(0, 48, 8) |t| {
|
do uint::range_step(0, 48, 8) |t| {
|
||||||
schedule_round!(t + 16);
|
schedule_round!(t + 16);
|
||||||
schedule_round!(t + 17);
|
schedule_round!(t + 17);
|
||||||
schedule_round!(t + 18);
|
schedule_round!(t + 18);
|
||||||
|
@ -525,9 +527,10 @@ impl Engine256State {
|
||||||
sha2_round!(d, e, f, g, h, a, b, c, K32, t + 5);
|
sha2_round!(d, e, f, g, h, a, b, c, K32, t + 5);
|
||||||
sha2_round!(c, d, e, f, g, h, a, b, K32, t + 6);
|
sha2_round!(c, d, e, f, g, h, a, b, K32, t + 6);
|
||||||
sha2_round!(b, c, d, e, f, g, h, a, K32, t + 7);
|
sha2_round!(b, c, d, e, f, g, h, a, K32, t + 7);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
|
|
||||||
for uint::range_step(48, 64, 8) |t| {
|
do uint::range_step(48, 64, 8) |t| {
|
||||||
sha2_round!(a, b, c, d, e, f, g, h, K32, t);
|
sha2_round!(a, b, c, d, e, f, g, h, K32, t);
|
||||||
sha2_round!(h, a, b, c, d, e, f, g, K32, t + 1);
|
sha2_round!(h, a, b, c, d, e, f, g, K32, t + 1);
|
||||||
sha2_round!(g, h, a, b, c, d, e, f, K32, t + 2);
|
sha2_round!(g, h, a, b, c, d, e, f, K32, t + 2);
|
||||||
|
@ -536,7 +539,8 @@ impl Engine256State {
|
||||||
sha2_round!(d, e, f, g, h, a, b, c, K32, t + 5);
|
sha2_round!(d, e, f, g, h, a, b, c, K32, t + 5);
|
||||||
sha2_round!(c, d, e, f, g, h, a, b, K32, t + 6);
|
sha2_round!(c, d, e, f, g, h, a, b, K32, t + 6);
|
||||||
sha2_round!(b, c, d, e, f, g, h, a, K32, t + 7);
|
sha2_round!(b, c, d, e, f, g, h, a, K32, t + 7);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
|
|
||||||
self.H0 += a;
|
self.H0 += a;
|
||||||
self.H1 += b;
|
self.H1 += b;
|
||||||
|
|
|
@ -505,9 +505,10 @@ mod test {
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut read_lines = ~[];
|
let mut read_lines = ~[];
|
||||||
for input_vec(filenames) |line| {
|
do input_vec(filenames) |line| {
|
||||||
read_lines.push(line.to_owned());
|
read_lines.push(line.to_owned());
|
||||||
}
|
true
|
||||||
|
};
|
||||||
assert_eq!(read_lines, all_lines);
|
assert_eq!(read_lines, all_lines);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -523,13 +524,14 @@ mod test {
|
||||||
make_file(filename.get_ref(), contents);
|
make_file(filename.get_ref(), contents);
|
||||||
}
|
}
|
||||||
|
|
||||||
for input_vec_state(filenames) |line, state| {
|
do input_vec_state(filenames) |line, state| {
|
||||||
let nums: ~[&str] = line.split_iter(' ').collect();
|
let nums: ~[&str] = line.split_iter(' ').collect();
|
||||||
let file_num = uint::from_str(nums[0]).get();
|
let file_num = uint::from_str(nums[0]).get();
|
||||||
let line_num = uint::from_str(nums[1]).get();
|
let line_num = uint::from_str(nums[1]).get();
|
||||||
assert_eq!(line_num, state.line_num_file);
|
assert_eq!(line_num, state.line_num_file);
|
||||||
assert_eq!(file_num * 3 + line_num, state.line_num);
|
assert_eq!(file_num * 3 + line_num, state.line_num);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -543,7 +545,7 @@ mod test {
|
||||||
make_file(filenames[2].get_ref(), [~"3", ~"4"]);
|
make_file(filenames[2].get_ref(), [~"3", ~"4"]);
|
||||||
|
|
||||||
let mut count = 0;
|
let mut count = 0;
|
||||||
for input_vec_state(filenames.clone()) |line, state| {
|
do input_vec_state(filenames.clone()) |line, state| {
|
||||||
let expected_path = match line {
|
let expected_path = match line {
|
||||||
"1" | "2" => filenames[0].clone(),
|
"1" | "2" => filenames[0].clone(),
|
||||||
"3" | "4" => filenames[2].clone(),
|
"3" | "4" => filenames[2].clone(),
|
||||||
|
@ -551,7 +553,8 @@ mod test {
|
||||||
};
|
};
|
||||||
assert_eq!(state.current_path.clone(), expected_path);
|
assert_eq!(state.current_path.clone(), expected_path);
|
||||||
count += 1;
|
count += 1;
|
||||||
}
|
true
|
||||||
|
};
|
||||||
assert_eq!(count, 4);
|
assert_eq!(count, 4);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -570,9 +573,10 @@ mod test {
|
||||||
wr.write_str("3\n4");
|
wr.write_str("3\n4");
|
||||||
|
|
||||||
let mut lines = ~[];
|
let mut lines = ~[];
|
||||||
for input_vec(~[f1, f2]) |line| {
|
do input_vec(~[f1, f2]) |line| {
|
||||||
lines.push(line.to_owned());
|
lines.push(line.to_owned());
|
||||||
}
|
true
|
||||||
|
};
|
||||||
assert_eq!(lines, ~[~"1", ~"2", ~"3", ~"4"]);
|
assert_eq!(lines, ~[~"1", ~"2", ~"3", ~"4"]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -610,8 +614,9 @@ mod test {
|
||||||
#[test]
|
#[test]
|
||||||
#[should_fail]
|
#[should_fail]
|
||||||
fn test_input_vec_missing_file() {
|
fn test_input_vec_missing_file() {
|
||||||
for input_vec(pathify([~"this/file/doesnt/exist"], true)) |line| {
|
do input_vec(pathify([~"this/file/doesnt/exist"], true)) |line| {
|
||||||
println(line);
|
println(line);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -155,13 +155,12 @@ impl<V> SmallIntMap<V> {
|
||||||
|
|
||||||
/// Visit all key-value pairs in reverse order
|
/// Visit all key-value pairs in reverse order
|
||||||
pub fn each_reverse<'a>(&'a self, it: &fn(uint, &'a V) -> bool) -> bool {
|
pub fn each_reverse<'a>(&'a self, it: &fn(uint, &'a V) -> bool) -> bool {
|
||||||
for uint::range_rev(self.v.len(), 0) |i| {
|
do uint::range_rev(self.v.len(), 0) |i| {
|
||||||
match self.v[i] {
|
match self.v[i] {
|
||||||
Some(ref elt) => if !it(i, elt) { return false; },
|
Some(ref elt) => it(i, elt),
|
||||||
None => ()
|
None => true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get<'a>(&'a self, key: &uint) -> &'a V {
|
pub fn get<'a>(&'a self, key: &uint) -> &'a V {
|
||||||
|
|
|
@ -148,7 +148,7 @@ struct crate_hash {
|
||||||
pub fn get_dep_hashes(cstore: &CStore) -> ~[@str] {
|
pub fn get_dep_hashes(cstore: &CStore) -> ~[@str] {
|
||||||
let mut result = ~[];
|
let mut result = ~[];
|
||||||
|
|
||||||
for cstore.extern_mod_crate_map.each_value |&cnum| {
|
foreach (_, &cnum) in cstore.extern_mod_crate_map.iter() {
|
||||||
let cdata = cstore::get_crate_data(cstore, cnum);
|
let cdata = cstore::get_crate_data(cstore, cnum);
|
||||||
let hash = decoder::get_crate_hash(cdata.data);
|
let hash = decoder::get_crate_hash(cdata.data);
|
||||||
let vers = decoder::get_crate_vers(cdata.data);
|
let vers = decoder::get_crate_vers(cdata.data);
|
||||||
|
|
|
@ -60,13 +60,18 @@ fn lookup_hash(d: ebml::Doc, eq_fn: &fn(x:&[u8]) -> bool, hash: uint) ->
|
||||||
let tagged_doc = reader::doc_at(d.data, pos);
|
let tagged_doc = reader::doc_at(d.data, pos);
|
||||||
|
|
||||||
let belt = tag_index_buckets_bucket_elt;
|
let belt = tag_index_buckets_bucket_elt;
|
||||||
for reader::tagged_docs(tagged_doc.doc, belt) |elt| {
|
|
||||||
|
let mut ret = None;
|
||||||
|
do reader::tagged_docs(tagged_doc.doc, belt) |elt| {
|
||||||
let pos = io::u64_from_be_bytes(*elt.data, elt.start, 4u) as uint;
|
let pos = io::u64_from_be_bytes(*elt.data, elt.start, 4u) as uint;
|
||||||
if eq_fn(elt.data.slice(elt.start + 4u, elt.end)) {
|
if eq_fn(elt.data.slice(elt.start + 4u, elt.end)) {
|
||||||
return Some(reader::doc_at(d.data, pos).doc);
|
ret = Some(reader::doc_at(d.data, pos).doc);
|
||||||
|
false
|
||||||
|
} else {
|
||||||
|
true
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
None
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type GetCrateDataCb<'self> = &'self fn(ast::CrateNum) -> cmd;
|
pub type GetCrateDataCb<'self> = &'self fn(ast::CrateNum) -> cmd;
|
||||||
|
@ -160,10 +165,12 @@ fn item_visibility(item: ebml::Doc) -> ast::visibility {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn item_method_sort(item: ebml::Doc) -> char {
|
fn item_method_sort(item: ebml::Doc) -> char {
|
||||||
for reader::tagged_docs(item, tag_item_trait_method_sort) |doc| {
|
let mut ret = 'r';
|
||||||
return doc.as_str_slice()[0] as char;
|
do reader::tagged_docs(item, tag_item_trait_method_sort) |doc| {
|
||||||
}
|
ret = doc.as_str_slice()[0] as char;
|
||||||
return 'r';
|
false
|
||||||
|
};
|
||||||
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
fn item_symbol(item: ebml::Doc) -> ~str {
|
fn item_symbol(item: ebml::Doc) -> ~str {
|
||||||
|
@ -171,10 +178,12 @@ fn item_symbol(item: ebml::Doc) -> ~str {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn item_parent_item(d: ebml::Doc) -> Option<ast::def_id> {
|
fn item_parent_item(d: ebml::Doc) -> Option<ast::def_id> {
|
||||||
for reader::tagged_docs(d, tag_items_data_parent_item) |did| {
|
let mut ret = None;
|
||||||
return Some(reader::with_doc_data(did, parse_def_id));
|
do reader::tagged_docs(d, tag_items_data_parent_item) |did| {
|
||||||
}
|
ret = Some(reader::with_doc_data(did, parse_def_id));
|
||||||
None
|
false
|
||||||
|
};
|
||||||
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
fn item_reqd_and_translated_parent_item(cnum: ast::CrateNum,
|
fn item_reqd_and_translated_parent_item(cnum: ast::CrateNum,
|
||||||
|
@ -195,12 +204,7 @@ fn get_provided_source(d: ebml::Doc, cdata: cmd) -> Option<ast::def_id> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn each_reexport(d: ebml::Doc, f: &fn(ebml::Doc) -> bool) -> bool {
|
fn each_reexport(d: ebml::Doc, f: &fn(ebml::Doc) -> bool) -> bool {
|
||||||
for reader::tagged_docs(d, tag_items_data_item_reexport) |reexport_doc| {
|
reader::tagged_docs(d, tag_items_data_item_reexport, f)
|
||||||
if !f(reexport_doc) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn variant_disr_val(d: ebml::Doc) -> Option<uint> {
|
fn variant_disr_val(d: ebml::Doc) -> Option<uint> {
|
||||||
|
@ -250,12 +254,13 @@ fn item_ty_param_defs(item: ebml::Doc, tcx: ty::ctxt, cdata: cmd,
|
||||||
tag: uint)
|
tag: uint)
|
||||||
-> @~[ty::TypeParameterDef] {
|
-> @~[ty::TypeParameterDef] {
|
||||||
let mut bounds = ~[];
|
let mut bounds = ~[];
|
||||||
for reader::tagged_docs(item, tag) |p| {
|
do reader::tagged_docs(item, tag) |p| {
|
||||||
let bd = parse_type_param_def_data(
|
let bd = parse_type_param_def_data(
|
||||||
*p.data, p.start, cdata.cnum, tcx,
|
*p.data, p.start, cdata.cnum, tcx,
|
||||||
|_, did| translate_def_id(cdata, did));
|
|_, did| translate_def_id(cdata, did));
|
||||||
bounds.push(bd);
|
bounds.push(bd);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
@bounds
|
@bounds
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -276,9 +281,10 @@ fn item_ty_param_count(item: ebml::Doc) -> uint {
|
||||||
fn enum_variant_ids(item: ebml::Doc, cdata: cmd) -> ~[ast::def_id] {
|
fn enum_variant_ids(item: ebml::Doc, cdata: cmd) -> ~[ast::def_id] {
|
||||||
let mut ids: ~[ast::def_id] = ~[];
|
let mut ids: ~[ast::def_id] = ~[];
|
||||||
let v = tag_items_data_item_variant;
|
let v = tag_items_data_item_variant;
|
||||||
for reader::tagged_docs(item, v) |p| {
|
do reader::tagged_docs(item, v) |p| {
|
||||||
let ext = reader::with_doc_data(p, parse_def_id);
|
let ext = reader::with_doc_data(p, parse_def_id);
|
||||||
ids.push(ast::def_id { crate: cdata.cnum, node: ext.node });
|
ids.push(ast::def_id { crate: cdata.cnum, node: ext.node });
|
||||||
|
true
|
||||||
};
|
};
|
||||||
return ids;
|
return ids;
|
||||||
}
|
}
|
||||||
|
@ -290,7 +296,7 @@ fn item_path(item_doc: ebml::Doc) -> ast_map::path {
|
||||||
let len = reader::doc_as_u32(len_doc) as uint;
|
let len = reader::doc_as_u32(len_doc) as uint;
|
||||||
|
|
||||||
let mut result = vec::with_capacity(len);
|
let mut result = vec::with_capacity(len);
|
||||||
for reader::docs(path_doc) |tag, elt_doc| {
|
do reader::docs(path_doc) |tag, elt_doc| {
|
||||||
if tag == tag_path_elt_mod {
|
if tag == tag_path_elt_mod {
|
||||||
let str = elt_doc.as_str_slice();
|
let str = elt_doc.as_str_slice();
|
||||||
result.push(ast_map::path_mod(token::str_to_ident(str)));
|
result.push(ast_map::path_mod(token::str_to_ident(str)));
|
||||||
|
@ -300,7 +306,8 @@ fn item_path(item_doc: ebml::Doc) -> ast_map::path {
|
||||||
} else {
|
} else {
|
||||||
// ignore tag_path_len element
|
// ignore tag_path_len element
|
||||||
}
|
}
|
||||||
}
|
true
|
||||||
|
};
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
@ -432,13 +439,13 @@ pub fn get_impl_method(intr: @ident_interner, cdata: cmd, id: ast::NodeId,
|
||||||
name: ast::ident) -> Option<ast::def_id> {
|
name: ast::ident) -> Option<ast::def_id> {
|
||||||
let items = reader::get_doc(reader::Doc(cdata.data), tag_items);
|
let items = reader::get_doc(reader::Doc(cdata.data), tag_items);
|
||||||
let mut found = None;
|
let mut found = None;
|
||||||
for reader::tagged_docs(find_item(id, items), tag_item_impl_method)
|
do reader::tagged_docs(find_item(id, items), tag_item_impl_method) |mid| {
|
||||||
|mid| {
|
let m_did = reader::with_doc_data(mid, parse_def_id);
|
||||||
let m_did = reader::with_doc_data(mid, parse_def_id);
|
if item_name(intr, find_item(m_did.node, items)) == name {
|
||||||
if item_name(intr, find_item(m_did.node, items)) == name {
|
found = Some(translate_def_id(cdata, m_did));
|
||||||
found = Some(translate_def_id(cdata, m_did));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
true
|
||||||
|
};
|
||||||
found
|
found
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -465,18 +472,15 @@ fn def_like_to_def(def_like: def_like) -> ast::def {
|
||||||
pub fn each_lang_item(cdata: cmd, f: &fn(ast::NodeId, uint) -> bool) -> bool {
|
pub fn each_lang_item(cdata: cmd, f: &fn(ast::NodeId, uint) -> bool) -> bool {
|
||||||
let root = reader::Doc(cdata.data);
|
let root = reader::Doc(cdata.data);
|
||||||
let lang_items = reader::get_doc(root, tag_lang_items);
|
let lang_items = reader::get_doc(root, tag_lang_items);
|
||||||
for reader::tagged_docs(lang_items, tag_lang_items_item) |item_doc| {
|
do reader::tagged_docs(lang_items, tag_lang_items_item) |item_doc| {
|
||||||
let id_doc = reader::get_doc(item_doc, tag_lang_items_item_id);
|
let id_doc = reader::get_doc(item_doc, tag_lang_items_item_id);
|
||||||
let id = reader::doc_as_u32(id_doc) as uint;
|
let id = reader::doc_as_u32(id_doc) as uint;
|
||||||
let node_id_doc = reader::get_doc(item_doc,
|
let node_id_doc = reader::get_doc(item_doc,
|
||||||
tag_lang_items_item_node_id);
|
tag_lang_items_item_node_id);
|
||||||
let node_id = reader::doc_as_u32(node_id_doc) as ast::NodeId;
|
let node_id = reader::doc_as_u32(node_id_doc) as ast::NodeId;
|
||||||
|
|
||||||
if !f(node_id, id) {
|
f(node_id, id)
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
struct EachItemContext<'self> {
|
struct EachItemContext<'self> {
|
||||||
|
@ -581,7 +585,7 @@ impl<'self> EachItemContext<'self> {
|
||||||
let mut continue = true;
|
let mut continue = true;
|
||||||
|
|
||||||
// Iterate over all children.
|
// Iterate over all children.
|
||||||
for reader::tagged_docs(item_doc, tag_mod_child) |child_info_doc| {
|
do reader::tagged_docs(item_doc, tag_mod_child) |child_info_doc| {
|
||||||
let child_def_id = reader::with_doc_data(child_info_doc,
|
let child_def_id = reader::with_doc_data(child_info_doc,
|
||||||
parse_def_id);
|
parse_def_id);
|
||||||
let child_def_id = translate_def_id(self.cdata, child_def_id);
|
let child_def_id = translate_def_id(self.cdata, child_def_id);
|
||||||
|
@ -621,20 +625,17 @@ impl<'self> EachItemContext<'self> {
|
||||||
continue = self.process_item_and_pop_name(child_item_doc,
|
continue = self.process_item_and_pop_name(child_item_doc,
|
||||||
child_def_id,
|
child_def_id,
|
||||||
old_len);
|
old_len);
|
||||||
|
|
||||||
if !continue {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
continue
|
||||||
|
};
|
||||||
|
|
||||||
if !continue {
|
if !continue {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
// Iterate over reexports.
|
// Iterate over reexports.
|
||||||
for each_reexport(item_doc) |reexport_doc| {
|
do each_reexport(item_doc) |reexport_doc| {
|
||||||
let def_id_doc = reader::get_doc(
|
let def_id_doc = reader::get_doc(
|
||||||
reexport_doc,
|
reexport_doc,
|
||||||
tag_items_data_item_reexport_def_id);
|
tag_items_data_item_reexport_def_id);
|
||||||
|
@ -678,10 +679,8 @@ impl<'self> EachItemContext<'self> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !continue {
|
continue
|
||||||
break
|
};
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
@ -824,10 +823,11 @@ fn get_explicit_self(item: ebml::Doc) -> ast::explicit_self_ {
|
||||||
fn item_impl_methods(intr: @ident_interner, cdata: cmd, item: ebml::Doc,
|
fn item_impl_methods(intr: @ident_interner, cdata: cmd, item: ebml::Doc,
|
||||||
tcx: ty::ctxt) -> ~[@ty::Method] {
|
tcx: ty::ctxt) -> ~[@ty::Method] {
|
||||||
let mut rslt = ~[];
|
let mut rslt = ~[];
|
||||||
for reader::tagged_docs(item, tag_item_impl_method) |doc| {
|
do reader::tagged_docs(item, tag_item_impl_method) |doc| {
|
||||||
let m_did = reader::with_doc_data(doc, parse_def_id);
|
let m_did = reader::with_doc_data(doc, parse_def_id);
|
||||||
rslt.push(@get_method(intr, cdata, m_did.node, tcx));
|
rslt.push(@get_method(intr, cdata, m_did.node, tcx));
|
||||||
}
|
true
|
||||||
|
};
|
||||||
|
|
||||||
rslt
|
rslt
|
||||||
}
|
}
|
||||||
|
@ -896,9 +896,10 @@ pub fn get_trait_method_def_ids(cdata: cmd,
|
||||||
let data = cdata.data;
|
let data = cdata.data;
|
||||||
let item = lookup_item(id, data);
|
let item = lookup_item(id, data);
|
||||||
let mut result = ~[];
|
let mut result = ~[];
|
||||||
for reader::tagged_docs(item, tag_item_trait_method) |mth| {
|
do reader::tagged_docs(item, tag_item_trait_method) |mth| {
|
||||||
result.push(item_def_id(mth, cdata));
|
result.push(item_def_id(mth, cdata));
|
||||||
}
|
true
|
||||||
|
};
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -909,14 +910,15 @@ pub fn get_provided_trait_methods(intr: @ident_interner, cdata: cmd,
|
||||||
let item = lookup_item(id, data);
|
let item = lookup_item(id, data);
|
||||||
let mut result = ~[];
|
let mut result = ~[];
|
||||||
|
|
||||||
for reader::tagged_docs(item, tag_item_trait_method) |mth_id| {
|
do reader::tagged_docs(item, tag_item_trait_method) |mth_id| {
|
||||||
let did = item_def_id(mth_id, cdata);
|
let did = item_def_id(mth_id, cdata);
|
||||||
let mth = lookup_item(did.node, data);
|
let mth = lookup_item(did.node, data);
|
||||||
|
|
||||||
if item_method_sort(mth) != 'p' { loop; }
|
if item_method_sort(mth) == 'p' {
|
||||||
|
result.push(@get_method(intr, cdata, did.node, tcx));
|
||||||
result.push(@get_method(intr, cdata, did.node, tcx));
|
}
|
||||||
}
|
true
|
||||||
|
};
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
@ -926,9 +928,10 @@ pub fn get_supertraits(cdata: cmd, id: ast::NodeId, tcx: ty::ctxt)
|
||||||
-> ~[@ty::TraitRef] {
|
-> ~[@ty::TraitRef] {
|
||||||
let mut results = ~[];
|
let mut results = ~[];
|
||||||
let item_doc = lookup_item(id, cdata.data);
|
let item_doc = lookup_item(id, cdata.data);
|
||||||
for reader::tagged_docs(item_doc, tag_item_super_trait_ref) |trait_doc| {
|
do reader::tagged_docs(item_doc, tag_item_super_trait_ref) |trait_doc| {
|
||||||
results.push(@doc_trait_ref(trait_doc, tcx, cdata));
|
results.push(@doc_trait_ref(trait_doc, tcx, cdata));
|
||||||
}
|
true
|
||||||
|
};
|
||||||
return results;
|
return results;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -939,11 +942,13 @@ pub fn get_type_name_if_impl(cdata: cmd,
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
for reader::tagged_docs(item, tag_item_impl_type_basename) |doc| {
|
let mut ret = None;
|
||||||
return Some(token::str_to_ident(doc.as_str_slice()));
|
do reader::tagged_docs(item, tag_item_impl_type_basename) |doc| {
|
||||||
}
|
ret = Some(token::str_to_ident(doc.as_str_slice()));
|
||||||
|
false
|
||||||
|
};
|
||||||
|
|
||||||
return None;
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_static_methods_if_impl(intr: @ident_interner,
|
pub fn get_static_methods_if_impl(intr: @ident_interner,
|
||||||
|
@ -956,14 +961,17 @@ pub fn get_static_methods_if_impl(intr: @ident_interner,
|
||||||
}
|
}
|
||||||
|
|
||||||
// If this impl implements a trait, don't consider it.
|
// If this impl implements a trait, don't consider it.
|
||||||
for reader::tagged_docs(item, tag_item_trait_ref) |_doc| {
|
let ret = do reader::tagged_docs(item, tag_item_trait_ref) |_doc| {
|
||||||
return None;
|
false
|
||||||
}
|
};
|
||||||
|
|
||||||
|
if !ret { return None }
|
||||||
|
|
||||||
let mut impl_method_ids = ~[];
|
let mut impl_method_ids = ~[];
|
||||||
for reader::tagged_docs(item, tag_item_impl_method) |impl_method_doc| {
|
do reader::tagged_docs(item, tag_item_impl_method) |impl_method_doc| {
|
||||||
impl_method_ids.push(reader::with_doc_data(impl_method_doc, parse_def_id));
|
impl_method_ids.push(reader::with_doc_data(impl_method_doc, parse_def_id));
|
||||||
}
|
true
|
||||||
|
};
|
||||||
|
|
||||||
let mut static_impl_methods = ~[];
|
let mut static_impl_methods = ~[];
|
||||||
foreach impl_method_id in impl_method_ids.iter() {
|
foreach impl_method_id in impl_method_ids.iter() {
|
||||||
|
@ -996,11 +1004,13 @@ pub fn get_item_attrs(cdata: cmd,
|
||||||
f: &fn(~[@ast::MetaItem])) {
|
f: &fn(~[@ast::MetaItem])) {
|
||||||
|
|
||||||
let item = lookup_item(node_id, cdata.data);
|
let item = lookup_item(node_id, cdata.data);
|
||||||
for reader::tagged_docs(item, tag_attributes) |attributes| {
|
do reader::tagged_docs(item, tag_attributes) |attributes| {
|
||||||
for reader::tagged_docs(attributes, tag_attribute) |attribute| {
|
do reader::tagged_docs(attributes, tag_attribute) |attribute| {
|
||||||
f(get_meta_items(attribute));
|
f(get_meta_items(attribute));
|
||||||
}
|
true
|
||||||
}
|
};
|
||||||
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn struct_field_family_to_visibility(family: Family) -> ast::visibility {
|
fn struct_field_family_to_visibility(family: Family) -> ast::visibility {
|
||||||
|
@ -1017,7 +1027,7 @@ pub fn get_struct_fields(intr: @ident_interner, cdata: cmd, id: ast::NodeId)
|
||||||
let data = cdata.data;
|
let data = cdata.data;
|
||||||
let item = lookup_item(id, data);
|
let item = lookup_item(id, data);
|
||||||
let mut result = ~[];
|
let mut result = ~[];
|
||||||
for reader::tagged_docs(item, tag_item_field) |an_item| {
|
do reader::tagged_docs(item, tag_item_field) |an_item| {
|
||||||
let f = item_family(an_item);
|
let f = item_family(an_item);
|
||||||
if f == PublicField || f == PrivateField || f == InheritedField {
|
if f == PublicField || f == PrivateField || f == InheritedField {
|
||||||
let name = item_name(intr, an_item);
|
let name = item_name(intr, an_item);
|
||||||
|
@ -1028,15 +1038,17 @@ pub fn get_struct_fields(intr: @ident_interner, cdata: cmd, id: ast::NodeId)
|
||||||
struct_field_family_to_visibility(f),
|
struct_field_family_to_visibility(f),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
true
|
||||||
for reader::tagged_docs(item, tag_item_unnamed_field) |an_item| {
|
};
|
||||||
|
do reader::tagged_docs(item, tag_item_unnamed_field) |an_item| {
|
||||||
let did = item_def_id(an_item, cdata);
|
let did = item_def_id(an_item, cdata);
|
||||||
result.push(ty::field_ty {
|
result.push(ty::field_ty {
|
||||||
ident: special_idents::unnamed_field,
|
ident: special_idents::unnamed_field,
|
||||||
id: did,
|
id: did,
|
||||||
vis: ast::inherited,
|
vis: ast::inherited,
|
||||||
});
|
});
|
||||||
}
|
true
|
||||||
|
};
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1102,12 +1114,13 @@ fn item_family_to_str(fam: Family) -> ~str {
|
||||||
|
|
||||||
fn get_meta_items(md: ebml::Doc) -> ~[@ast::MetaItem] {
|
fn get_meta_items(md: ebml::Doc) -> ~[@ast::MetaItem] {
|
||||||
let mut items: ~[@ast::MetaItem] = ~[];
|
let mut items: ~[@ast::MetaItem] = ~[];
|
||||||
for reader::tagged_docs(md, tag_meta_item_word) |meta_item_doc| {
|
do reader::tagged_docs(md, tag_meta_item_word) |meta_item_doc| {
|
||||||
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
|
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
|
||||||
let n = nd.as_str_slice().to_managed();
|
let n = nd.as_str_slice().to_managed();
|
||||||
items.push(attr::mk_word_item(n));
|
items.push(attr::mk_word_item(n));
|
||||||
|
true
|
||||||
};
|
};
|
||||||
for reader::tagged_docs(md, tag_meta_item_name_value) |meta_item_doc| {
|
do reader::tagged_docs(md, tag_meta_item_name_value) |meta_item_doc| {
|
||||||
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
|
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
|
||||||
let vd = reader::get_doc(meta_item_doc, tag_meta_item_value);
|
let vd = reader::get_doc(meta_item_doc, tag_meta_item_value);
|
||||||
let n = nd.as_str_slice().to_managed();
|
let n = nd.as_str_slice().to_managed();
|
||||||
|
@ -1115,12 +1128,14 @@ fn get_meta_items(md: ebml::Doc) -> ~[@ast::MetaItem] {
|
||||||
// FIXME (#623): Should be able to decode MetaNameValue variants,
|
// FIXME (#623): Should be able to decode MetaNameValue variants,
|
||||||
// but currently the encoder just drops them
|
// but currently the encoder just drops them
|
||||||
items.push(attr::mk_name_value_item_str(n, v));
|
items.push(attr::mk_name_value_item_str(n, v));
|
||||||
|
true
|
||||||
};
|
};
|
||||||
for reader::tagged_docs(md, tag_meta_item_list) |meta_item_doc| {
|
do reader::tagged_docs(md, tag_meta_item_list) |meta_item_doc| {
|
||||||
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
|
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
|
||||||
let n = nd.as_str_slice().to_managed();
|
let n = nd.as_str_slice().to_managed();
|
||||||
let subitems = get_meta_items(meta_item_doc);
|
let subitems = get_meta_items(meta_item_doc);
|
||||||
items.push(attr::mk_list_item(n, subitems));
|
items.push(attr::mk_list_item(n, subitems));
|
||||||
|
true
|
||||||
};
|
};
|
||||||
return items;
|
return items;
|
||||||
}
|
}
|
||||||
|
@ -1129,7 +1144,7 @@ fn get_attributes(md: ebml::Doc) -> ~[ast::Attribute] {
|
||||||
let mut attrs: ~[ast::Attribute] = ~[];
|
let mut attrs: ~[ast::Attribute] = ~[];
|
||||||
match reader::maybe_get_doc(md, tag_attributes) {
|
match reader::maybe_get_doc(md, tag_attributes) {
|
||||||
option::Some(attrs_d) => {
|
option::Some(attrs_d) => {
|
||||||
for reader::tagged_docs(attrs_d, tag_attribute) |attr_doc| {
|
do reader::tagged_docs(attrs_d, tag_attribute) |attr_doc| {
|
||||||
let meta_items = get_meta_items(attr_doc);
|
let meta_items = get_meta_items(attr_doc);
|
||||||
// Currently it's only possible to have a single meta item on
|
// Currently it's only possible to have a single meta item on
|
||||||
// an attribute
|
// an attribute
|
||||||
|
@ -1144,6 +1159,7 @@ fn get_attributes(md: ebml::Doc) -> ~[ast::Attribute] {
|
||||||
},
|
},
|
||||||
span: codemap::dummy_sp()
|
span: codemap::dummy_sp()
|
||||||
});
|
});
|
||||||
|
true
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
option::None => ()
|
option::None => ()
|
||||||
|
@ -1193,12 +1209,13 @@ pub fn get_crate_deps(data: @~[u8]) -> ~[crate_dep] {
|
||||||
let d = reader::get_doc(doc, tag_);
|
let d = reader::get_doc(doc, tag_);
|
||||||
d.as_str_slice().to_managed()
|
d.as_str_slice().to_managed()
|
||||||
}
|
}
|
||||||
for reader::tagged_docs(depsdoc, tag_crate_dep) |depdoc| {
|
do reader::tagged_docs(depsdoc, tag_crate_dep) |depdoc| {
|
||||||
deps.push(crate_dep {cnum: crate_num,
|
deps.push(crate_dep {cnum: crate_num,
|
||||||
name: token::str_to_ident(docstr(depdoc, tag_crate_dep_name)),
|
name: token::str_to_ident(docstr(depdoc, tag_crate_dep_name)),
|
||||||
vers: docstr(depdoc, tag_crate_dep_vers),
|
vers: docstr(depdoc, tag_crate_dep_vers),
|
||||||
hash: docstr(depdoc, tag_crate_dep_hash)});
|
hash: docstr(depdoc, tag_crate_dep_hash)});
|
||||||
crate_num += 1;
|
crate_num += 1;
|
||||||
|
true
|
||||||
};
|
};
|
||||||
return deps;
|
return deps;
|
||||||
}
|
}
|
||||||
|
@ -1235,7 +1252,7 @@ pub fn get_crate_vers(data: @~[u8]) -> @str {
|
||||||
fn iter_crate_items(intr: @ident_interner, cdata: cmd,
|
fn iter_crate_items(intr: @ident_interner, cdata: cmd,
|
||||||
get_crate_data: GetCrateDataCb,
|
get_crate_data: GetCrateDataCb,
|
||||||
proc: &fn(path: &str, ast::def_id)) {
|
proc: &fn(path: &str, ast::def_id)) {
|
||||||
for each_path(intr, cdata, get_crate_data) |path_string, def_like, _| {
|
do each_path(intr, cdata, get_crate_data) |path_string, def_like, _| {
|
||||||
match def_like {
|
match def_like {
|
||||||
dl_impl(*) | dl_field => {}
|
dl_impl(*) | dl_field => {}
|
||||||
dl_def(def) => {
|
dl_def(def) => {
|
||||||
|
@ -1243,7 +1260,8 @@ fn iter_crate_items(intr: @ident_interner, cdata: cmd,
|
||||||
ast_util::def_id_of_def(def))
|
ast_util::def_id_of_def(def))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn list_crate_metadata(intr: @ident_interner, bytes: @~[u8],
|
pub fn list_crate_metadata(intr: @ident_interner, bytes: @~[u8],
|
||||||
|
@ -1273,8 +1291,9 @@ pub fn translate_def_id(cdata: cmd, did: ast::def_id) -> ast::def_id {
|
||||||
pub fn get_link_args_for_crate(cdata: cmd) -> ~[~str] {
|
pub fn get_link_args_for_crate(cdata: cmd) -> ~[~str] {
|
||||||
let link_args = reader::get_doc(reader::Doc(cdata.data), tag_link_args);
|
let link_args = reader::get_doc(reader::Doc(cdata.data), tag_link_args);
|
||||||
let mut result = ~[];
|
let mut result = ~[];
|
||||||
for reader::tagged_docs(link_args, tag_link_args_arg) |arg_doc| {
|
do reader::tagged_docs(link_args, tag_link_args_arg) |arg_doc| {
|
||||||
result.push(arg_doc.as_str());
|
result.push(arg_doc.as_str());
|
||||||
}
|
true
|
||||||
|
};
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
|
@ -558,11 +558,12 @@ fn encode_info_for_mod(ecx: &EncodeContext,
|
||||||
ebml_w.wr_str(def_to_str(local_def(item.id)));
|
ebml_w.wr_str(def_to_str(local_def(item.id)));
|
||||||
ebml_w.end_tag();
|
ebml_w.end_tag();
|
||||||
|
|
||||||
for each_auxiliary_node_id(*item) |auxiliary_node_id| {
|
do each_auxiliary_node_id(*item) |auxiliary_node_id| {
|
||||||
ebml_w.start_tag(tag_mod_child);
|
ebml_w.start_tag(tag_mod_child);
|
||||||
ebml_w.wr_str(def_to_str(local_def(auxiliary_node_id)));
|
ebml_w.wr_str(def_to_str(local_def(auxiliary_node_id)));
|
||||||
ebml_w.end_tag();
|
ebml_w.end_tag();
|
||||||
}
|
true
|
||||||
|
};
|
||||||
|
|
||||||
match item.node {
|
match item.node {
|
||||||
item_impl(*) => {
|
item_impl(*) => {
|
||||||
|
@ -1377,9 +1378,7 @@ fn synthesize_crate_attrs(ecx: &EncodeContext,
|
||||||
|
|
||||||
let mut meta_items = ~[name_item, vers_item];
|
let mut meta_items = ~[name_item, vers_item];
|
||||||
|
|
||||||
for items.iter()
|
foreach &mi in items.iter().filter(|mi| "name" != mi.name() && "vers" != mi.name()) {
|
||||||
.filter(|mi| "name" != mi.name() && "vers" != mi.name())
|
|
||||||
.advance |&mi| {
|
|
||||||
meta_items.push(mi);
|
meta_items.push(mi);
|
||||||
}
|
}
|
||||||
let link_item = attr::mk_list_item(@"link", meta_items);
|
let link_item = attr::mk_list_item(@"link", meta_items);
|
||||||
|
@ -1454,26 +1453,24 @@ fn encode_crate_deps(ecx: &EncodeContext,
|
||||||
fn encode_lang_items(ecx: &EncodeContext, ebml_w: &mut writer::Encoder) {
|
fn encode_lang_items(ecx: &EncodeContext, ebml_w: &mut writer::Encoder) {
|
||||||
ebml_w.start_tag(tag_lang_items);
|
ebml_w.start_tag(tag_lang_items);
|
||||||
|
|
||||||
for ecx.tcx.lang_items.each_item |def_id, i| {
|
do ecx.tcx.lang_items.each_item |def_id, i| {
|
||||||
let def_id = match def_id {
|
foreach id in def_id.iter() {
|
||||||
Some(id) => id, None => { loop }
|
if id.crate == LOCAL_CRATE {
|
||||||
};
|
ebml_w.start_tag(tag_lang_items_item);
|
||||||
if def_id.crate != LOCAL_CRATE {
|
|
||||||
loop;
|
ebml_w.start_tag(tag_lang_items_item_id);
|
||||||
|
ebml_w.writer.write_be_u32(i as u32);
|
||||||
|
ebml_w.end_tag(); // tag_lang_items_item_id
|
||||||
|
|
||||||
|
ebml_w.start_tag(tag_lang_items_item_node_id);
|
||||||
|
ebml_w.writer.write_be_u32(id.node as u32);
|
||||||
|
ebml_w.end_tag(); // tag_lang_items_item_node_id
|
||||||
|
|
||||||
|
ebml_w.end_tag(); // tag_lang_items_item
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
true
|
||||||
ebml_w.start_tag(tag_lang_items_item);
|
};
|
||||||
|
|
||||||
ebml_w.start_tag(tag_lang_items_item_id);
|
|
||||||
ebml_w.writer.write_be_u32(i as u32);
|
|
||||||
ebml_w.end_tag(); // tag_lang_items_item_id
|
|
||||||
|
|
||||||
ebml_w.start_tag(tag_lang_items_item_node_id);
|
|
||||||
ebml_w.writer.write_be_u32(def_id.node as u32);
|
|
||||||
ebml_w.end_tag(); // tag_lang_items_item_node_id
|
|
||||||
|
|
||||||
ebml_w.end_tag(); // tag_lang_items_item
|
|
||||||
}
|
|
||||||
|
|
||||||
ebml_w.end_tag(); // tag_lang_items
|
ebml_w.end_tag(); // tag_lang_items
|
||||||
}
|
}
|
||||||
|
@ -1501,11 +1498,12 @@ fn encode_misc_info(ecx: &EncodeContext,
|
||||||
ebml_w.wr_str(def_to_str(local_def(item.id)));
|
ebml_w.wr_str(def_to_str(local_def(item.id)));
|
||||||
ebml_w.end_tag();
|
ebml_w.end_tag();
|
||||||
|
|
||||||
for each_auxiliary_node_id(item) |auxiliary_node_id| {
|
do each_auxiliary_node_id(item) |auxiliary_node_id| {
|
||||||
ebml_w.start_tag(tag_mod_child);
|
ebml_w.start_tag(tag_mod_child);
|
||||||
ebml_w.wr_str(def_to_str(local_def(auxiliary_node_id)));
|
ebml_w.wr_str(def_to_str(local_def(auxiliary_node_id)));
|
||||||
ebml_w.end_tag();
|
ebml_w.end_tag();
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Encode reexports for the root module.
|
// Encode reexports for the root module.
|
||||||
|
|
|
@ -88,7 +88,7 @@ pub fn mk_filesearch(maybe_sysroot: &Option<@Path>,
|
||||||
|
|
||||||
pub fn search<T>(filesearch: @FileSearch, pick: pick<T>) -> Option<T> {
|
pub fn search<T>(filesearch: @FileSearch, pick: pick<T>) -> Option<T> {
|
||||||
let mut rslt = None;
|
let mut rslt = None;
|
||||||
for filesearch.for_each_lib_search_path() |lib_search_path| {
|
do filesearch.for_each_lib_search_path() |lib_search_path| {
|
||||||
debug!("searching %s", lib_search_path.to_str());
|
debug!("searching %s", lib_search_path.to_str());
|
||||||
let r = os::list_dir_path(lib_search_path);
|
let r = os::list_dir_path(lib_search_path);
|
||||||
foreach path in r.iter() {
|
foreach path in r.iter() {
|
||||||
|
@ -102,8 +102,8 @@ pub fn search<T>(filesearch: @FileSearch, pick: pick<T>) -> Option<T> {
|
||||||
debug!("rejected %s", path.to_str());
|
debug!("rejected %s", path.to_str());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if rslt.is_some() { break; }
|
rslt.is_none()
|
||||||
}
|
};
|
||||||
return rslt;
|
return rslt;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -371,10 +371,11 @@ fn enc_purity(w: @io::Writer, p: purity) {
|
||||||
|
|
||||||
fn enc_abi_set(w: @io::Writer, abis: AbiSet) {
|
fn enc_abi_set(w: @io::Writer, abis: AbiSet) {
|
||||||
w.write_char('[');
|
w.write_char('[');
|
||||||
for abis.each |abi| {
|
do abis.each |abi| {
|
||||||
w.write_str(abi.name());
|
w.write_str(abi.name());
|
||||||
w.write_char(',');
|
w.write_char(',');
|
||||||
}
|
true
|
||||||
|
};
|
||||||
w.write_char(']')
|
w.write_char(']')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -412,14 +413,15 @@ fn enc_fn_sig(w: @io::Writer, cx: @ctxt, fsig: &ty::FnSig) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn enc_bounds(w: @io::Writer, cx: @ctxt, bs: &ty::ParamBounds) {
|
fn enc_bounds(w: @io::Writer, cx: @ctxt, bs: &ty::ParamBounds) {
|
||||||
for bs.builtin_bounds.each |bound| {
|
do bs.builtin_bounds.each |bound| {
|
||||||
match bound {
|
match bound {
|
||||||
ty::BoundSend => w.write_char('S'),
|
ty::BoundSend => w.write_char('S'),
|
||||||
ty::BoundFreeze => w.write_char('K'),
|
ty::BoundFreeze => w.write_char('K'),
|
||||||
ty::BoundStatic => w.write_char('O'),
|
ty::BoundStatic => w.write_char('O'),
|
||||||
ty::BoundSized => w.write_char('Z'),
|
ty::BoundSized => w.write_char('Z'),
|
||||||
}
|
}
|
||||||
}
|
true
|
||||||
|
};
|
||||||
|
|
||||||
foreach &tp in bs.trait_bounds.iter() {
|
foreach &tp in bs.trait_bounds.iter() {
|
||||||
w.write_char('I');
|
w.write_char('I');
|
||||||
|
|
|
@ -1139,7 +1139,7 @@ fn decode_side_tables(xcx: @ExtendedDecodeContext,
|
||||||
ast_doc: ebml::Doc) {
|
ast_doc: ebml::Doc) {
|
||||||
let dcx = xcx.dcx;
|
let dcx = xcx.dcx;
|
||||||
let tbl_doc = ast_doc.get(c::tag_table as uint);
|
let tbl_doc = ast_doc.get(c::tag_table as uint);
|
||||||
for reader::docs(tbl_doc) |tag, entry_doc| {
|
do reader::docs(tbl_doc) |tag, entry_doc| {
|
||||||
let id0 = entry_doc.get(c::tag_table_id as uint).as_int();
|
let id0 = entry_doc.get(c::tag_table_id as uint).as_int();
|
||||||
let id = xcx.tr_id(id0);
|
let id = xcx.tr_id(id0);
|
||||||
|
|
||||||
|
@ -1218,7 +1218,8 @@ fn decode_side_tables(xcx: @ExtendedDecodeContext,
|
||||||
}
|
}
|
||||||
|
|
||||||
debug!(">< Side table doc loaded");
|
debug!(">< Side table doc loaded");
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// ______________________________________________________________________
|
// ______________________________________________________________________
|
||||||
|
|
|
@ -83,13 +83,10 @@ impl<'self> CheckLoanCtxt<'self> {
|
||||||
//! are issued for future scopes and thus they may have been
|
//! are issued for future scopes and thus they may have been
|
||||||
//! *issued* but not yet be in effect.
|
//! *issued* but not yet be in effect.
|
||||||
|
|
||||||
for self.dfcx_loans.each_bit_on_entry_frozen(scope_id) |loan_index| {
|
do self.dfcx_loans.each_bit_on_entry_frozen(scope_id) |loan_index| {
|
||||||
let loan = &self.all_loans[loan_index];
|
let loan = &self.all_loans[loan_index];
|
||||||
if !op(loan) {
|
op(loan)
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn each_in_scope_loan(&self,
|
pub fn each_in_scope_loan(&self,
|
||||||
|
@ -100,14 +97,13 @@ impl<'self> CheckLoanCtxt<'self> {
|
||||||
//! currently in scope.
|
//! currently in scope.
|
||||||
|
|
||||||
let region_maps = self.tcx().region_maps;
|
let region_maps = self.tcx().region_maps;
|
||||||
for self.each_issued_loan(scope_id) |loan| {
|
do self.each_issued_loan(scope_id) |loan| {
|
||||||
if region_maps.is_subscope_of(scope_id, loan.kill_scope) {
|
if region_maps.is_subscope_of(scope_id, loan.kill_scope) {
|
||||||
if !op(loan) {
|
op(loan)
|
||||||
return false;
|
} else {
|
||||||
}
|
true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn each_in_scope_restriction(&self,
|
pub fn each_in_scope_restriction(&self,
|
||||||
|
@ -118,16 +114,18 @@ impl<'self> CheckLoanCtxt<'self> {
|
||||||
//! Iterates through all the in-scope restrictions for the
|
//! Iterates through all the in-scope restrictions for the
|
||||||
//! given `loan_path`
|
//! given `loan_path`
|
||||||
|
|
||||||
for self.each_in_scope_loan(scope_id) |loan| {
|
do self.each_in_scope_loan(scope_id) |loan| {
|
||||||
|
let mut ret = true;
|
||||||
foreach restr in loan.restrictions.iter() {
|
foreach restr in loan.restrictions.iter() {
|
||||||
if restr.loan_path == loan_path {
|
if restr.loan_path == loan_path {
|
||||||
if !op(loan, restr) {
|
if !op(loan, restr) {
|
||||||
return false;
|
ret = false;
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
ret
|
||||||
}
|
}
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn loans_generated_by(&self, scope_id: ast::NodeId) -> ~[uint] {
|
pub fn loans_generated_by(&self, scope_id: ast::NodeId) -> ~[uint] {
|
||||||
|
@ -135,9 +133,10 @@ impl<'self> CheckLoanCtxt<'self> {
|
||||||
//! we encounter `scope_id`.
|
//! we encounter `scope_id`.
|
||||||
|
|
||||||
let mut result = ~[];
|
let mut result = ~[];
|
||||||
for self.dfcx_loans.each_gen_bit_frozen(scope_id) |loan_index| {
|
do self.dfcx_loans.each_gen_bit_frozen(scope_id) |loan_index| {
|
||||||
result.push(loan_index);
|
result.push(loan_index);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -152,12 +151,13 @@ impl<'self> CheckLoanCtxt<'self> {
|
||||||
let new_loan_indices = self.loans_generated_by(scope_id);
|
let new_loan_indices = self.loans_generated_by(scope_id);
|
||||||
debug!("new_loan_indices = %?", new_loan_indices);
|
debug!("new_loan_indices = %?", new_loan_indices);
|
||||||
|
|
||||||
for self.each_issued_loan(scope_id) |issued_loan| {
|
do self.each_issued_loan(scope_id) |issued_loan| {
|
||||||
foreach &new_loan_index in new_loan_indices.iter() {
|
foreach &new_loan_index in new_loan_indices.iter() {
|
||||||
let new_loan = &self.all_loans[new_loan_index];
|
let new_loan = &self.all_loans[new_loan_index];
|
||||||
self.report_error_if_loans_conflict(issued_loan, new_loan);
|
self.report_error_if_loans_conflict(issued_loan, new_loan);
|
||||||
}
|
}
|
||||||
}
|
true
|
||||||
|
};
|
||||||
|
|
||||||
foreach i in range(0u, new_loan_indices.len()) {
|
foreach i in range(0u, new_loan_indices.len()) {
|
||||||
let old_loan = &self.all_loans[new_loan_indices[i]];
|
let old_loan = &self.all_loans[new_loan_indices[i]];
|
||||||
|
@ -268,15 +268,15 @@ impl<'self> CheckLoanCtxt<'self> {
|
||||||
|
|
||||||
debug!("check_if_path_is_moved(id=%?, use_kind=%?, lp=%s)",
|
debug!("check_if_path_is_moved(id=%?, use_kind=%?, lp=%s)",
|
||||||
id, use_kind, lp.repr(self.bccx.tcx));
|
id, use_kind, lp.repr(self.bccx.tcx));
|
||||||
for self.move_data.each_move_of(id, lp) |move, moved_lp| {
|
do self.move_data.each_move_of(id, lp) |move, moved_lp| {
|
||||||
self.bccx.report_use_of_moved_value(
|
self.bccx.report_use_of_moved_value(
|
||||||
span,
|
span,
|
||||||
use_kind,
|
use_kind,
|
||||||
lp,
|
lp,
|
||||||
move,
|
move,
|
||||||
moved_lp);
|
moved_lp);
|
||||||
return;
|
false
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn check_assignment(&self, expr: @ast::expr) {
|
pub fn check_assignment(&self, expr: @ast::expr) {
|
||||||
|
@ -308,13 +308,13 @@ impl<'self> CheckLoanCtxt<'self> {
|
||||||
if self.is_local_variable(cmt) {
|
if self.is_local_variable(cmt) {
|
||||||
assert!(cmt.mutbl.is_immutable()); // no "const" locals
|
assert!(cmt.mutbl.is_immutable()); // no "const" locals
|
||||||
let lp = opt_loan_path(cmt).get();
|
let lp = opt_loan_path(cmt).get();
|
||||||
for self.move_data.each_assignment_of(expr.id, lp) |assign| {
|
do self.move_data.each_assignment_of(expr.id, lp) |assign| {
|
||||||
self.bccx.report_reassigned_immutable_variable(
|
self.bccx.report_reassigned_immutable_variable(
|
||||||
expr.span,
|
expr.span,
|
||||||
lp,
|
lp,
|
||||||
assign);
|
assign);
|
||||||
return;
|
false
|
||||||
}
|
};
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -462,14 +462,18 @@ impl<'self> CheckLoanCtxt<'self> {
|
||||||
// `RESTR_MUTATE` restriction whenever the contents of an
|
// `RESTR_MUTATE` restriction whenever the contents of an
|
||||||
// owned pointer are borrowed, and hence while `v[*]` is not
|
// owned pointer are borrowed, and hence while `v[*]` is not
|
||||||
// restricted from being written, `v` is.
|
// restricted from being written, `v` is.
|
||||||
for this.each_in_scope_restriction(expr.id, loan_path)
|
let cont = do this.each_in_scope_restriction(expr.id, loan_path)
|
||||||
|loan, restr|
|
|loan, restr|
|
||||||
{
|
{
|
||||||
if restr.set.intersects(RESTR_MUTATE) {
|
if restr.set.intersects(RESTR_MUTATE) {
|
||||||
this.report_illegal_mutation(expr, loan_path, loan);
|
this.report_illegal_mutation(expr, loan_path, loan);
|
||||||
return false;
|
false
|
||||||
|
} else {
|
||||||
|
true
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
|
|
||||||
|
if !cont { return false }
|
||||||
|
|
||||||
// The previous code handled assignments to paths that
|
// The previous code handled assignments to paths that
|
||||||
// have been restricted. This covers paths that have been
|
// have been restricted. This covers paths that have been
|
||||||
|
@ -525,12 +529,16 @@ impl<'self> CheckLoanCtxt<'self> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for a non-const loan of `loan_path`
|
// Check for a non-const loan of `loan_path`
|
||||||
for this.each_in_scope_loan(expr.id) |loan| {
|
let cont = do this.each_in_scope_loan(expr.id) |loan| {
|
||||||
if loan.loan_path == loan_path && loan.mutbl != m_const {
|
if loan.loan_path == loan_path && loan.mutbl != m_const {
|
||||||
this.report_illegal_mutation(expr, full_loan_path, loan);
|
this.report_illegal_mutation(expr, full_loan_path, loan);
|
||||||
return false;
|
false
|
||||||
|
} else {
|
||||||
|
true
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
|
|
||||||
|
if !cont { return false }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -563,7 +571,7 @@ impl<'self> CheckLoanCtxt<'self> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_move_out_from_id(&self, id: ast::NodeId, span: span) {
|
fn check_move_out_from_id(&self, id: ast::NodeId, span: span) {
|
||||||
for self.move_data.each_path_moved_by(id) |_, move_path| {
|
do self.move_data.each_path_moved_by(id) |_, move_path| {
|
||||||
match self.analyze_move_out_from(id, move_path) {
|
match self.analyze_move_out_from(id, move_path) {
|
||||||
MoveOk => {}
|
MoveOk => {}
|
||||||
MoveWhileBorrowed(loan_path, loan_span) => {
|
MoveWhileBorrowed(loan_path, loan_span) => {
|
||||||
|
@ -578,7 +586,8 @@ impl<'self> CheckLoanCtxt<'self> {
|
||||||
self.bccx.loan_path_to_str(loan_path)));
|
self.bccx.loan_path_to_str(loan_path)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn analyze_move_out_from(&self,
|
pub fn analyze_move_out_from(&self,
|
||||||
|
@ -589,13 +598,16 @@ impl<'self> CheckLoanCtxt<'self> {
|
||||||
|
|
||||||
// FIXME(#4384) inadequare if/when we permit `move a.b`
|
// FIXME(#4384) inadequare if/when we permit `move a.b`
|
||||||
|
|
||||||
// check for a conflicting loan:
|
let mut ret = MoveOk;
|
||||||
for self.each_in_scope_restriction(expr_id, move_path) |loan, _| {
|
|
||||||
// Any restriction prevents moves.
|
|
||||||
return MoveWhileBorrowed(loan.loan_path, loan.span);
|
|
||||||
}
|
|
||||||
|
|
||||||
MoveOk
|
// check for a conflicting loan:
|
||||||
|
do self.each_in_scope_restriction(expr_id, move_path) |loan, _| {
|
||||||
|
// Any restriction prevents moves.
|
||||||
|
ret = MoveWhileBorrowed(loan.loan_path, loan.span);
|
||||||
|
false
|
||||||
|
};
|
||||||
|
|
||||||
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn check_call(&self,
|
pub fn check_call(&self,
|
||||||
|
|
|
@ -277,9 +277,10 @@ impl MoveData {
|
||||||
|
|
||||||
match self.path_map.find_copy(&lp) {
|
match self.path_map.find_copy(&lp) {
|
||||||
Some(index) => {
|
Some(index) => {
|
||||||
for self.each_base_path(index) |p| {
|
do self.each_base_path(index) |p| {
|
||||||
result.push(p);
|
result.push(p);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
match *lp {
|
match *lp {
|
||||||
|
@ -446,25 +447,29 @@ impl MoveData {
|
||||||
fn each_applicable_move(&self,
|
fn each_applicable_move(&self,
|
||||||
index0: MovePathIndex,
|
index0: MovePathIndex,
|
||||||
f: &fn(MoveIndex) -> bool) -> bool {
|
f: &fn(MoveIndex) -> bool) -> bool {
|
||||||
for self.each_extending_path(index0) |index| {
|
let mut ret = true;
|
||||||
|
do self.each_extending_path(index0) |index| {
|
||||||
let mut p = self.path(index).first_move;
|
let mut p = self.path(index).first_move;
|
||||||
while p != InvalidMoveIndex {
|
while p != InvalidMoveIndex {
|
||||||
if !f(p) {
|
if !f(p) {
|
||||||
return false;
|
ret = false;
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
p = self.move(p).next_move;
|
p = self.move(p).next_move;
|
||||||
}
|
}
|
||||||
}
|
ret
|
||||||
return true;
|
};
|
||||||
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
fn kill_moves(&self,
|
fn kill_moves(&self,
|
||||||
path: MovePathIndex,
|
path: MovePathIndex,
|
||||||
kill_id: ast::NodeId,
|
kill_id: ast::NodeId,
|
||||||
dfcx_moves: &mut MoveDataFlow) {
|
dfcx_moves: &mut MoveDataFlow) {
|
||||||
for self.each_applicable_move(path) |move_index| {
|
do self.each_applicable_move(path) |move_index| {
|
||||||
dfcx_moves.add_kill(kill_id, *move_index);
|
dfcx_moves.add_kill(kill_id, *move_index);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -506,14 +511,11 @@ impl FlowedMoveData {
|
||||||
* Iterates through each path moved by `id`
|
* Iterates through each path moved by `id`
|
||||||
*/
|
*/
|
||||||
|
|
||||||
for self.dfcx_moves.each_gen_bit_frozen(id) |index| {
|
do self.dfcx_moves.each_gen_bit_frozen(id) |index| {
|
||||||
let move = &self.move_data.moves[index];
|
let move = &self.move_data.moves[index];
|
||||||
let moved_path = move.path;
|
let moved_path = move.path;
|
||||||
if !f(move, self.move_data.path(moved_path).loan_path) {
|
f(move, self.move_data.path(moved_path).loan_path)
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn each_move_of(&self,
|
pub fn each_move_of(&self,
|
||||||
|
@ -545,31 +547,33 @@ impl FlowedMoveData {
|
||||||
|
|
||||||
let opt_loan_path_index = self.move_data.existing_move_path(loan_path);
|
let opt_loan_path_index = self.move_data.existing_move_path(loan_path);
|
||||||
|
|
||||||
for self.dfcx_moves.each_bit_on_entry_frozen(id) |index| {
|
let mut ret = true;
|
||||||
|
|
||||||
|
do self.dfcx_moves.each_bit_on_entry_frozen(id) |index| {
|
||||||
let move = &self.move_data.moves[index];
|
let move = &self.move_data.moves[index];
|
||||||
let moved_path = move.path;
|
let moved_path = move.path;
|
||||||
if base_indices.iter().any(|x| x == &moved_path) {
|
if base_indices.iter().any(|x| x == &moved_path) {
|
||||||
// Scenario 1 or 2: `loan_path` or some base path of
|
// Scenario 1 or 2: `loan_path` or some base path of
|
||||||
// `loan_path` was moved.
|
// `loan_path` was moved.
|
||||||
if !f(move, self.move_data.path(moved_path).loan_path) {
|
if !f(move, self.move_data.path(moved_path).loan_path) {
|
||||||
return false;
|
ret = false;
|
||||||
}
|
}
|
||||||
loop;
|
} else {
|
||||||
}
|
foreach &loan_path_index in opt_loan_path_index.iter() {
|
||||||
|
let cont = do self.move_data.each_base_path(moved_path) |p| {
|
||||||
foreach &loan_path_index in opt_loan_path_index.iter() {
|
if p == loan_path_index {
|
||||||
for self.move_data.each_base_path(moved_path) |p| {
|
// Scenario 3: some extension of `loan_path`
|
||||||
if p == loan_path_index {
|
// was moved
|
||||||
// Scenario 3: some extension of `loan_path`
|
f(move, self.move_data.path(moved_path).loan_path)
|
||||||
// was moved
|
} else {
|
||||||
if !f(move, self.move_data.path(moved_path).loan_path) {
|
true
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
|
if !cont { ret = false; break }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
ret
|
||||||
}
|
}
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_assignee(&self,
|
pub fn is_assignee(&self,
|
||||||
|
@ -601,13 +605,14 @@ impl FlowedMoveData {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
for self.dfcx_assign.each_bit_on_entry_frozen(id) |index| {
|
do self.dfcx_assign.each_bit_on_entry_frozen(id) |index| {
|
||||||
let assignment = &self.move_data.var_assignments[index];
|
let assignment = &self.move_data.var_assignments[index];
|
||||||
if assignment.path == loan_path_index && !f(assignment) {
|
if assignment.path == loan_path_index && !f(assignment) {
|
||||||
return false;
|
false
|
||||||
|
} else {
|
||||||
|
true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -118,12 +118,13 @@ pub fn check_arms(cx: &MatchCheckCtxt, arms: &[arm]) {
|
||||||
_ => false
|
_ => false
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
for walk_pat(*pat) |p| {
|
do walk_pat(*pat) |p| {
|
||||||
if pat_matches_nan(p) {
|
if pat_matches_nan(p) {
|
||||||
cx.tcx.sess.span_warn(p.span, "unmatchable NaN in pattern, \
|
cx.tcx.sess.span_warn(p.span, "unmatchable NaN in pattern, \
|
||||||
use the is_NaN method in a guard instead");
|
use the is_NaN method in a guard instead");
|
||||||
}
|
}
|
||||||
}
|
true
|
||||||
|
};
|
||||||
|
|
||||||
let v = ~[*pat];
|
let v = ~[*pat];
|
||||||
match is_useful(cx, &seen, v) {
|
match is_useful(cx, &seen, v) {
|
||||||
|
@ -873,7 +874,7 @@ pub fn check_legality_of_move_bindings(cx: &MatchCheckCtxt,
|
||||||
|
|
||||||
if !any_by_move { return; } // pointless micro-optimization
|
if !any_by_move { return; } // pointless micro-optimization
|
||||||
foreach pat in pats.iter() {
|
foreach pat in pats.iter() {
|
||||||
for walk_pat(*pat) |p| {
|
do walk_pat(*pat) |p| {
|
||||||
if pat_is_binding(def_map, p) {
|
if pat_is_binding(def_map, p) {
|
||||||
match p.node {
|
match p.node {
|
||||||
pat_ident(_, _, sub) => {
|
pat_ident(_, _, sub) => {
|
||||||
|
@ -890,6 +891,7 @@ pub fn check_legality_of_move_bindings(cx: &MatchCheckCtxt,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -880,11 +880,12 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
|
||||||
debug!("DataFlowContext::walk_pat(pat=%s, in_out=%s)",
|
debug!("DataFlowContext::walk_pat(pat=%s, in_out=%s)",
|
||||||
pat.repr(self.dfcx.tcx), bits_to_str(reslice(in_out)));
|
pat.repr(self.dfcx.tcx), bits_to_str(reslice(in_out)));
|
||||||
|
|
||||||
for ast_util::walk_pat(pat) |p| {
|
do ast_util::walk_pat(pat) |p| {
|
||||||
debug!(" p.id=%? in_out=%s", p.id, bits_to_str(reslice(in_out)));
|
debug!(" p.id=%? in_out=%s", p.id, bits_to_str(reslice(in_out)));
|
||||||
self.merge_with_entry_set(p.id, in_out);
|
self.merge_with_entry_set(p.id, in_out);
|
||||||
self.dfcx.apply_gen_kill(p.id, in_out);
|
self.dfcx.apply_gen_kill(p.id, in_out);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn walk_pat_alternatives(&mut self,
|
fn walk_pat_alternatives(&mut self,
|
||||||
|
|
|
@ -314,20 +314,22 @@ mod test {
|
||||||
fn each_node() {
|
fn each_node() {
|
||||||
let graph = create_graph();
|
let graph = create_graph();
|
||||||
let expected = ["A", "B", "C", "D", "E", "F"];
|
let expected = ["A", "B", "C", "D", "E", "F"];
|
||||||
for graph.each_node |idx, node| {
|
do graph.each_node |idx, node| {
|
||||||
assert_eq!(&expected[*idx], graph.node_data(idx));
|
assert_eq!(&expected[*idx], graph.node_data(idx));
|
||||||
assert_eq!(expected[*idx], node.data);
|
assert_eq!(expected[*idx], node.data);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn each_edge() {
|
fn each_edge() {
|
||||||
let graph = create_graph();
|
let graph = create_graph();
|
||||||
let expected = ["AB", "BC", "BD", "DE", "EC", "FB"];
|
let expected = ["AB", "BC", "BD", "DE", "EC", "FB"];
|
||||||
for graph.each_edge |idx, edge| {
|
do graph.each_edge |idx, edge| {
|
||||||
assert_eq!(&expected[*idx], graph.edge_data(idx));
|
assert_eq!(&expected[*idx], graph.edge_data(idx));
|
||||||
assert_eq!(expected[*idx], edge.data);
|
assert_eq!(expected[*idx], edge.data);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_adjacent_edges<N:Eq,E:Eq>(graph: &Graph<N,E>,
|
fn test_adjacent_edges<N:Eq,E:Eq>(graph: &Graph<N,E>,
|
||||||
|
@ -338,7 +340,7 @@ mod test {
|
||||||
assert_eq!(graph.node_data(start_index), &start_data);
|
assert_eq!(graph.node_data(start_index), &start_data);
|
||||||
|
|
||||||
let mut counter = 0;
|
let mut counter = 0;
|
||||||
for graph.each_incoming_edge(start_index) |edge_index, edge| {
|
do graph.each_incoming_edge(start_index) |edge_index, edge| {
|
||||||
assert_eq!(graph.edge_data(edge_index), &edge.data);
|
assert_eq!(graph.edge_data(edge_index), &edge.data);
|
||||||
assert!(counter < expected_incoming.len());
|
assert!(counter < expected_incoming.len());
|
||||||
debug!("counter=%? expected=%? edge_index=%? edge=%?",
|
debug!("counter=%? expected=%? edge_index=%? edge=%?",
|
||||||
|
@ -351,11 +353,12 @@ mod test {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
counter += 1;
|
counter += 1;
|
||||||
}
|
true
|
||||||
|
};
|
||||||
assert_eq!(counter, expected_incoming.len());
|
assert_eq!(counter, expected_incoming.len());
|
||||||
|
|
||||||
let mut counter = 0;
|
let mut counter = 0;
|
||||||
for graph.each_outgoing_edge(start_index) |edge_index, edge| {
|
do graph.each_outgoing_edge(start_index) |edge_index, edge| {
|
||||||
assert_eq!(graph.edge_data(edge_index), &edge.data);
|
assert_eq!(graph.edge_data(edge_index), &edge.data);
|
||||||
assert!(counter < expected_outgoing.len());
|
assert!(counter < expected_outgoing.len());
|
||||||
debug!("counter=%? expected=%? edge_index=%? edge=%?",
|
debug!("counter=%? expected=%? edge_index=%? edge=%?",
|
||||||
|
@ -368,7 +371,8 @@ mod test {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
counter += 1;
|
counter += 1;
|
||||||
}
|
true
|
||||||
|
};
|
||||||
assert_eq!(counter, expected_outgoing.len());
|
assert_eq!(counter, expected_outgoing.len());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -338,11 +338,12 @@ pub fn check_builtin_bounds(cx: Context, ty: ty::t, bounds: ty::BuiltinBounds,
|
||||||
{
|
{
|
||||||
let kind = ty::type_contents(cx.tcx, ty);
|
let kind = ty::type_contents(cx.tcx, ty);
|
||||||
let mut missing = ty::EmptyBuiltinBounds();
|
let mut missing = ty::EmptyBuiltinBounds();
|
||||||
for bounds.each |bound| {
|
do bounds.each |bound| {
|
||||||
if !kind.meets_bound(cx.tcx, bound) {
|
if !kind.meets_bound(cx.tcx, bound) {
|
||||||
missing.add(bound);
|
missing.add(bound);
|
||||||
}
|
}
|
||||||
}
|
true
|
||||||
|
};
|
||||||
if !missing.is_empty() {
|
if !missing.is_empty() {
|
||||||
any_missing(missing);
|
any_missing(missing);
|
||||||
}
|
}
|
||||||
|
|
|
@ -428,11 +428,12 @@ impl<'self> LanguageItemCollector<'self> {
|
||||||
pub fn collect_external_language_items(&mut self) {
|
pub fn collect_external_language_items(&mut self) {
|
||||||
let crate_store = self.session.cstore;
|
let crate_store = self.session.cstore;
|
||||||
do iter_crate_data(crate_store) |crate_number, _crate_metadata| {
|
do iter_crate_data(crate_store) |crate_number, _crate_metadata| {
|
||||||
for each_lang_item(crate_store, crate_number)
|
do each_lang_item(crate_store, crate_number)
|
||||||
|node_id, item_index| {
|
|node_id, item_index| {
|
||||||
let def_id = def_id { crate: crate_number, node: node_id };
|
let def_id = def_id { crate: crate_number, node: node_id };
|
||||||
self.collect_item(item_index, def_id);
|
self.collect_item(item_index, def_id);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -76,7 +76,6 @@ pub enum lint {
|
||||||
unused_imports,
|
unused_imports,
|
||||||
unnecessary_qualification,
|
unnecessary_qualification,
|
||||||
while_true,
|
while_true,
|
||||||
deprecated_for_loop,
|
|
||||||
path_statement,
|
path_statement,
|
||||||
unrecognized_lint,
|
unrecognized_lint,
|
||||||
non_camel_case_types,
|
non_camel_case_types,
|
||||||
|
@ -168,13 +167,6 @@ static lint_table: &'static [(&'static str, LintSpec)] = &[
|
||||||
default: warn
|
default: warn
|
||||||
}),
|
}),
|
||||||
|
|
||||||
("deprecated_for_loop",
|
|
||||||
LintSpec {
|
|
||||||
lint: deprecated_for_loop,
|
|
||||||
desc: "recommend using `foreach` or `do` instead of `for`",
|
|
||||||
default: allow
|
|
||||||
}),
|
|
||||||
|
|
||||||
("path_statement",
|
("path_statement",
|
||||||
LintSpec {
|
LintSpec {
|
||||||
lint: path_statement,
|
lint: path_statement,
|
||||||
|
@ -414,35 +406,33 @@ impl Context {
|
||||||
// of what we changed so we can roll everything back after invoking the
|
// of what we changed so we can roll everything back after invoking the
|
||||||
// specified closure
|
// specified closure
|
||||||
let mut pushed = 0u;
|
let mut pushed = 0u;
|
||||||
for each_lint(self.tcx.sess, attrs) |meta, level, lintname| {
|
do each_lint(self.tcx.sess, attrs) |meta, level, lintname| {
|
||||||
let lint = match self.dict.find_equiv(&lintname) {
|
match self.dict.find_equiv(&lintname) {
|
||||||
None => {
|
None => {
|
||||||
self.span_lint(
|
self.span_lint(
|
||||||
unrecognized_lint,
|
unrecognized_lint,
|
||||||
meta.span,
|
meta.span,
|
||||||
fmt!("unknown `%s` attribute: `%s`",
|
fmt!("unknown `%s` attribute: `%s`",
|
||||||
level_to_str(level), lintname));
|
level_to_str(level), lintname));
|
||||||
loop
|
}
|
||||||
}
|
Some(lint) => {
|
||||||
Some(lint) => { lint.lint }
|
let lint = lint.lint;
|
||||||
};
|
let now = self.get_level(lint);
|
||||||
|
if now == forbid && level != forbid {
|
||||||
let now = self.get_level(lint);
|
self.tcx.sess.span_err(meta.span,
|
||||||
if now == forbid && level != forbid {
|
fmt!("%s(%s) overruled by outer forbid(%s)",
|
||||||
self.tcx.sess.span_err(meta.span,
|
level_to_str(level),
|
||||||
fmt!("%s(%s) overruled by outer forbid(%s)",
|
lintname, lintname));
|
||||||
level_to_str(level),
|
} else if now != level {
|
||||||
lintname, lintname));
|
let src = self.get_source(lint);
|
||||||
loop;
|
self.lint_stack.push((lint, now, src));
|
||||||
|
pushed += 1;
|
||||||
|
self.set_level(lint, level, Node(meta.span));
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
true
|
||||||
if now != level {
|
};
|
||||||
let src = self.get_source(lint);
|
|
||||||
self.lint_stack.push((lint, now, src));
|
|
||||||
pushed += 1;
|
|
||||||
self.set_level(lint, level, Node(meta.span));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// detect doc(hidden)
|
// detect doc(hidden)
|
||||||
let mut doc_hidden = do attrs.iter().any |attr| {
|
let mut doc_hidden = do attrs.iter().any |attr| {
|
||||||
|
@ -496,7 +486,7 @@ impl Context {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Crate(c) => {
|
Crate(c) => {
|
||||||
for self.visitors.iter().advance |visitor| {
|
foreach visitor in self.visitors.iter() {
|
||||||
match *visitor {
|
match *visitor {
|
||||||
OldVisitor(_, stopping) => {
|
OldVisitor(_, stopping) => {
|
||||||
oldvisit::visit_crate(c, (self, stopping))
|
oldvisit::visit_crate(c, (self, stopping))
|
||||||
|
@ -615,24 +605,6 @@ fn lint_while_true() -> oldvisit::vt<@mut Context> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lint_deprecated_for_loop() -> oldvisit::vt<@mut Context> {
|
|
||||||
oldvisit::mk_vt(@oldvisit::Visitor {
|
|
||||||
visit_expr: |e, (cx, vt): (@mut Context, oldvisit::vt<@mut Context>)| {
|
|
||||||
match e.node {
|
|
||||||
ast::expr_call(_, _, ast::ForSugar) |
|
|
||||||
ast::expr_method_call(_, _, _, _, _, ast::ForSugar) => {
|
|
||||||
cx.span_lint(deprecated_for_loop, e.span,
|
|
||||||
"`for` is deprecated; use `foreach <pat> in \
|
|
||||||
<iterator>` or `do`")
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
oldvisit::visit_expr(e, (cx, vt));
|
|
||||||
},
|
|
||||||
.. *oldvisit::default_visitor()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lint_type_limits() -> oldvisit::vt<@mut Context> {
|
fn lint_type_limits() -> oldvisit::vt<@mut Context> {
|
||||||
fn is_valid<T:cmp::Ord>(binop: ast::binop, v: T,
|
fn is_valid<T:cmp::Ord>(binop: ast::binop, v: T,
|
||||||
min: T, max: T) -> bool {
|
min: T, max: T) -> bool {
|
||||||
|
@ -1165,7 +1137,7 @@ pub fn check_crate(tcx: ty::ctxt, crate: @ast::Crate) {
|
||||||
};
|
};
|
||||||
|
|
||||||
// Install defaults.
|
// Install defaults.
|
||||||
for cx.dict.each_value |spec| {
|
foreach (_, spec) in cx.dict.iter() {
|
||||||
cx.set_level(spec.lint, spec.default, Default);
|
cx.set_level(spec.lint, spec.default, Default);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1176,7 +1148,6 @@ pub fn check_crate(tcx: ty::ctxt, crate: @ast::Crate) {
|
||||||
|
|
||||||
// Register each of the lint passes with the context
|
// Register each of the lint passes with the context
|
||||||
cx.add_oldvisit_lint(lint_while_true());
|
cx.add_oldvisit_lint(lint_while_true());
|
||||||
cx.add_oldvisit_lint(lint_deprecated_for_loop());
|
|
||||||
cx.add_oldvisit_lint(lint_path_statement());
|
cx.add_oldvisit_lint(lint_path_statement());
|
||||||
cx.add_oldvisit_lint(lint_heap());
|
cx.add_oldvisit_lint(lint_heap());
|
||||||
cx.add_oldvisit_lint(lint_type_limits());
|
cx.add_oldvisit_lint(lint_type_limits());
|
||||||
|
|
|
@ -654,16 +654,21 @@ impl VisitContext {
|
||||||
moves_map: MovesMap,
|
moves_map: MovesMap,
|
||||||
arms: &[arm])
|
arms: &[arm])
|
||||||
-> Option<@pat> {
|
-> Option<@pat> {
|
||||||
|
let mut ret = None;
|
||||||
foreach arm in arms.iter() {
|
foreach arm in arms.iter() {
|
||||||
foreach &pat in arm.pats.iter() {
|
foreach &pat in arm.pats.iter() {
|
||||||
for ast_util::walk_pat(pat) |p| {
|
let cont = do ast_util::walk_pat(pat) |p| {
|
||||||
if moves_map.contains(&p.id) {
|
if moves_map.contains(&p.id) {
|
||||||
return Some(p);
|
ret = Some(p);
|
||||||
|
false
|
||||||
|
} else {
|
||||||
|
true
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
|
if !cont { return ret }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return None;
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn compute_captures(&self, fn_expr_id: NodeId) -> @[CaptureVar] {
|
pub fn compute_captures(&self, fn_expr_id: NodeId) -> @[CaptureVar] {
|
||||||
|
|
|
@ -72,14 +72,15 @@ pub fn pat_is_binding_or_wild(dm: resolve::DefMap, pat: @pat) -> bool {
|
||||||
|
|
||||||
pub fn pat_bindings(dm: resolve::DefMap, pat: @pat,
|
pub fn pat_bindings(dm: resolve::DefMap, pat: @pat,
|
||||||
it: &fn(binding_mode, NodeId, span, &Path)) {
|
it: &fn(binding_mode, NodeId, span, &Path)) {
|
||||||
for walk_pat(pat) |p| {
|
do walk_pat(pat) |p| {
|
||||||
match p.node {
|
match p.node {
|
||||||
pat_ident(binding_mode, ref pth, _) if pat_is_binding(dm, p) => {
|
pat_ident(binding_mode, ref pth, _) if pat_is_binding(dm, p) => {
|
||||||
it(binding_mode, p.id, p.span, pth);
|
it(binding_mode, p.id, p.span, pth);
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn pat_binding_ids(dm: resolve::DefMap, pat: @pat) -> ~[NodeId] {
|
pub fn pat_binding_ids(dm: resolve::DefMap, pat: @pat) -> ~[NodeId] {
|
||||||
|
|
|
@ -15,8 +15,6 @@
|
||||||
// makes all other generics or inline functions that it references
|
// makes all other generics or inline functions that it references
|
||||||
// reachable as well.
|
// reachable as well.
|
||||||
|
|
||||||
use std::iterator::IteratorUtil;
|
|
||||||
|
|
||||||
use middle::ty;
|
use middle::ty;
|
||||||
use middle::typeck;
|
use middle::typeck;
|
||||||
|
|
||||||
|
@ -392,8 +390,7 @@ impl ReachableContext {
|
||||||
// this properly would result in the necessity of computing *type*
|
// this properly would result in the necessity of computing *type*
|
||||||
// reachability, which might result in a compile time loss.
|
// reachability, which might result in a compile time loss.
|
||||||
fn mark_destructors_reachable(&self) {
|
fn mark_destructors_reachable(&self) {
|
||||||
for self.tcx.destructor_for_type.iter().advance
|
foreach (_, destructor_def_id) in self.tcx.destructor_for_type.iter() {
|
||||||
|(_, destructor_def_id)| {
|
|
||||||
if destructor_def_id.crate == LOCAL_CRATE {
|
if destructor_def_id.crate == LOCAL_CRATE {
|
||||||
self.reachable_symbols.insert(destructor_def_id.node);
|
self.reachable_symbols.insert(destructor_def_id.node);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1726,7 +1726,7 @@ impl Resolver {
|
||||||
let mut modules = HashMap::new();
|
let mut modules = HashMap::new();
|
||||||
|
|
||||||
// Create all the items reachable by paths.
|
// Create all the items reachable by paths.
|
||||||
for each_path(self.session.cstore, root.def_id.get().crate)
|
do each_path(self.session.cstore, root.def_id.get().crate)
|
||||||
|path_string, def_like, visibility| {
|
|path_string, def_like, visibility| {
|
||||||
|
|
||||||
debug!("(building reduced graph for external crate) found path \
|
debug!("(building reduced graph for external crate) found path \
|
||||||
|
@ -1893,7 +1893,8 @@ impl Resolver {
|
||||||
ignoring field");
|
ignoring field");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates and adds an import directive to the given module.
|
/// Creates and adds an import directive to the given module.
|
||||||
|
@ -1992,7 +1993,7 @@ impl Resolver {
|
||||||
self.module_to_str(module_));
|
self.module_to_str(module_));
|
||||||
self.resolve_imports_for_module(module_);
|
self.resolve_imports_for_module(module_);
|
||||||
|
|
||||||
for module_.children.each_value |&child_node| {
|
foreach (_, &child_node) in module_.children.iter() {
|
||||||
match child_node.get_module_if_available() {
|
match child_node.get_module_if_available() {
|
||||||
None => {
|
None => {
|
||||||
// Nothing to do.
|
// Nothing to do.
|
||||||
|
@ -2003,7 +2004,7 @@ impl Resolver {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for module_.anonymous_children.each_value |&child_module| {
|
foreach (_, &child_module) in module_.anonymous_children.iter() {
|
||||||
self.resolve_imports_for_module_subtree(child_module);
|
self.resolve_imports_for_module_subtree(child_module);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2446,8 +2447,7 @@ impl Resolver {
|
||||||
assert_eq!(containing_module.glob_count, 0);
|
assert_eq!(containing_module.glob_count, 0);
|
||||||
|
|
||||||
// Add all resolved imports from the containing module.
|
// Add all resolved imports from the containing module.
|
||||||
for containing_module.import_resolutions.iter().advance
|
foreach (ident, target_import_resolution) in containing_module.import_resolutions.iter() {
|
||||||
|(ident, target_import_resolution)| {
|
|
||||||
|
|
||||||
debug!("(resolving glob import) writing module resolution \
|
debug!("(resolving glob import) writing module resolution \
|
||||||
%? into `%s`",
|
%? into `%s`",
|
||||||
|
@ -2536,8 +2536,7 @@ impl Resolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add external module children from the containing module.
|
// Add external module children from the containing module.
|
||||||
for containing_module.external_module_children.iter().advance
|
foreach (&ident, module) in containing_module.external_module_children.iter() {
|
||||||
|(&ident, module)| {
|
|
||||||
let name_bindings =
|
let name_bindings =
|
||||||
@mut Resolver::create_name_bindings_from_module(*module);
|
@mut Resolver::create_name_bindings_from_module(*module);
|
||||||
merge_import_resolution(ident, name_bindings);
|
merge_import_resolution(ident, name_bindings);
|
||||||
|
@ -3109,7 +3108,7 @@ impl Resolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Descend into children and anonymous children.
|
// Descend into children and anonymous children.
|
||||||
for module_.children.each_value |&child_node| {
|
foreach (_, &child_node) in module_.children.iter() {
|
||||||
match child_node.get_module_if_available() {
|
match child_node.get_module_if_available() {
|
||||||
None => {
|
None => {
|
||||||
// Continue.
|
// Continue.
|
||||||
|
@ -3120,7 +3119,7 @@ impl Resolver {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for module_.anonymous_children.each_value |&module_| {
|
foreach (_, &module_) in module_.anonymous_children.iter() {
|
||||||
self.report_unresolved_imports(module_);
|
self.report_unresolved_imports(module_);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3168,7 +3167,7 @@ impl Resolver {
|
||||||
|
|
||||||
self.record_exports_for_module(module_);
|
self.record_exports_for_module(module_);
|
||||||
|
|
||||||
for module_.children.each_value |&child_name_bindings| {
|
foreach (_, &child_name_bindings) in module_.children.iter() {
|
||||||
match child_name_bindings.get_module_if_available() {
|
match child_name_bindings.get_module_if_available() {
|
||||||
None => {
|
None => {
|
||||||
// Nothing to do.
|
// Nothing to do.
|
||||||
|
@ -3179,7 +3178,7 @@ impl Resolver {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for module_.anonymous_children.each_value |&child_module| {
|
foreach (_, &child_module) in module_.anonymous_children.iter() {
|
||||||
self.record_exports_for_module_subtree(child_module);
|
self.record_exports_for_module_subtree(child_module);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3229,8 +3228,7 @@ impl Resolver {
|
||||||
pub fn add_exports_for_module(@mut self,
|
pub fn add_exports_for_module(@mut self,
|
||||||
exports2: &mut ~[Export2],
|
exports2: &mut ~[Export2],
|
||||||
module_: @mut Module) {
|
module_: @mut Module) {
|
||||||
for module_.import_resolutions.iter().advance |(ident,
|
foreach (ident, importresolution) in module_.import_resolutions.iter() {
|
||||||
importresolution)| {
|
|
||||||
if importresolution.privacy != Public {
|
if importresolution.privacy != Public {
|
||||||
debug!("(computing exports) not reexporting private `%s`",
|
debug!("(computing exports) not reexporting private `%s`",
|
||||||
self.session.str_of(*ident));
|
self.session.str_of(*ident));
|
||||||
|
@ -4195,7 +4193,7 @@ impl Resolver {
|
||||||
bindings_list: Option<@mut HashMap<ident,NodeId>>,
|
bindings_list: Option<@mut HashMap<ident,NodeId>>,
|
||||||
visitor: ResolveVisitor) {
|
visitor: ResolveVisitor) {
|
||||||
let pat_id = pattern.id;
|
let pat_id = pattern.id;
|
||||||
for walk_pat(pattern) |pattern| {
|
do walk_pat(pattern) |pattern| {
|
||||||
match pattern.node {
|
match pattern.node {
|
||||||
pat_ident(binding_mode, ref path, _)
|
pat_ident(binding_mode, ref path, _)
|
||||||
if !path.global && path.idents.len() == 1 => {
|
if !path.global && path.idents.len() == 1 => {
|
||||||
|
@ -4425,7 +4423,8 @@ impl Resolver {
|
||||||
// Nothing to do.
|
// Nothing to do.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn resolve_bare_identifier_pattern(@mut self, name: ident)
|
pub fn resolve_bare_identifier_pattern(@mut self, name: ident)
|
||||||
|
@ -4838,7 +4837,7 @@ impl Resolver {
|
||||||
let mut j = this.value_ribs.len();
|
let mut j = this.value_ribs.len();
|
||||||
while j != 0 {
|
while j != 0 {
|
||||||
j -= 1;
|
j -= 1;
|
||||||
for this.value_ribs[j].bindings.each_key |&k| {
|
foreach (&k, _) in this.value_ribs[j].bindings.iter() {
|
||||||
maybes.push(this.session.str_of(k));
|
maybes.push(this.session.str_of(k));
|
||||||
values.push(uint::max_value);
|
values.push(uint::max_value);
|
||||||
}
|
}
|
||||||
|
@ -5166,7 +5165,7 @@ impl Resolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Look for trait children.
|
// Look for trait children.
|
||||||
for search_module.children.each_value |&child_name_bindings| {
|
foreach (_, &child_name_bindings) in search_module.children.iter() {
|
||||||
match child_name_bindings.def_for_namespace(TypeNS) {
|
match child_name_bindings.def_for_namespace(TypeNS) {
|
||||||
Some(def) => {
|
Some(def) => {
|
||||||
match def {
|
match def {
|
||||||
|
@ -5189,9 +5188,7 @@ impl Resolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Look for imports.
|
// Look for imports.
|
||||||
for search_module.import_resolutions.each_value
|
foreach (_, &import_resolution) in search_module.import_resolutions.iter() {
|
||||||
|&import_resolution| {
|
|
||||||
|
|
||||||
match import_resolution.target_for_namespace(TypeNS) {
|
match import_resolution.target_for_namespace(TypeNS) {
|
||||||
None => {
|
None => {
|
||||||
// Continue.
|
// Continue.
|
||||||
|
@ -5370,7 +5367,7 @@ impl Resolver {
|
||||||
debug!("Dump of module `%s`:", self.module_to_str(module_));
|
debug!("Dump of module `%s`:", self.module_to_str(module_));
|
||||||
|
|
||||||
debug!("Children:");
|
debug!("Children:");
|
||||||
for module_.children.each_key |&name| {
|
foreach (&name, _) in module_.children.iter() {
|
||||||
debug!("* %s", self.session.str_of(name));
|
debug!("* %s", self.session.str_of(name));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1148,7 +1148,7 @@ fn store_non_ref_bindings(bcx: @mut Block,
|
||||||
*/
|
*/
|
||||||
|
|
||||||
let mut bcx = bcx;
|
let mut bcx = bcx;
|
||||||
for bindings_map.each_value |&binding_info| {
|
foreach (_, &binding_info) in bindings_map.iter() {
|
||||||
match binding_info.trmode {
|
match binding_info.trmode {
|
||||||
TrByValue(lldest) => {
|
TrByValue(lldest) => {
|
||||||
let llval = Load(bcx, binding_info.llmatch); // get a T*
|
let llval = Load(bcx, binding_info.llmatch); // get a T*
|
||||||
|
@ -1182,7 +1182,7 @@ fn insert_lllocals(bcx: @mut Block,
|
||||||
BindArgument => bcx.fcx.llargs
|
BindArgument => bcx.fcx.llargs
|
||||||
};
|
};
|
||||||
|
|
||||||
for bindings_map.each_value |&binding_info| {
|
foreach (_, &binding_info) in bindings_map.iter() {
|
||||||
let llval = match binding_info.trmode {
|
let llval = match binding_info.trmode {
|
||||||
// By value bindings: use the stack slot that we
|
// By value bindings: use the stack slot that we
|
||||||
// copied/moved the value into
|
// copied/moved the value into
|
||||||
|
@ -1250,7 +1250,7 @@ pub fn compile_guard(bcx: @mut Block,
|
||||||
|
|
||||||
fn drop_bindings(bcx: @mut Block, data: &ArmData) -> @mut Block {
|
fn drop_bindings(bcx: @mut Block, data: &ArmData) -> @mut Block {
|
||||||
let mut bcx = bcx;
|
let mut bcx = bcx;
|
||||||
for data.bindings_map.each_value |&binding_info| {
|
foreach (_, &binding_info) in data.bindings_map.iter() {
|
||||||
match binding_info.trmode {
|
match binding_info.trmode {
|
||||||
TrByValue(llval) => {
|
TrByValue(llval) => {
|
||||||
bcx = glue::drop_ty(bcx, llval, binding_info.ty);
|
bcx = glue::drop_ty(bcx, llval, binding_info.ty);
|
||||||
|
|
|
@ -2801,7 +2801,7 @@ pub fn create_module_map(ccx: &mut CrateContext) -> ValueRef {
|
||||||
// like the multiple borrows. At least, it doesn't
|
// like the multiple borrows. At least, it doesn't
|
||||||
// like them on the current snapshot. (2013-06-14)
|
// like them on the current snapshot. (2013-06-14)
|
||||||
let mut keys = ~[];
|
let mut keys = ~[];
|
||||||
for ccx.module_data.each_key |k| {
|
foreach (k, _) in ccx.module_data.iter() {
|
||||||
keys.push(k.to_managed());
|
keys.push(k.to_managed());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -736,7 +736,7 @@ pub fn emit_tydescs(ccx: &mut CrateContext) {
|
||||||
ccx.finished_tydescs = true;
|
ccx.finished_tydescs = true;
|
||||||
let glue_fn_ty = Type::generic_glue_fn(ccx).ptr_to();
|
let glue_fn_ty = Type::generic_glue_fn(ccx).ptr_to();
|
||||||
let tyds = &mut ccx.tydescs;
|
let tyds = &mut ccx.tydescs;
|
||||||
for tyds.each_value |&val| {
|
foreach (_, &val) in tyds.iter() {
|
||||||
let ti = val;
|
let ti = val;
|
||||||
|
|
||||||
// Each of the glue functions needs to be cast to a generic type
|
// Each of the glue functions needs to be cast to a generic type
|
||||||
|
|
|
@ -2277,7 +2277,7 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
|
||||||
// This is like with typarams below, but less "pessimistic" and also
|
// This is like with typarams below, but less "pessimistic" and also
|
||||||
// dependent on the trait store.
|
// dependent on the trait store.
|
||||||
let mut bt = TC_NONE;
|
let mut bt = TC_NONE;
|
||||||
for (AllBuiltinBounds() - bounds).each |bound| {
|
do (AllBuiltinBounds() - bounds).each |bound| {
|
||||||
bt = bt + match bound {
|
bt = bt + match bound {
|
||||||
BoundStatic if bounds.contains_elem(BoundSend)
|
BoundStatic if bounds.contains_elem(BoundSend)
|
||||||
=> TC_NONE, // Send bound implies static bound.
|
=> TC_NONE, // Send bound implies static bound.
|
||||||
|
@ -2286,7 +2286,8 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
|
||||||
BoundFreeze => TC_MUTABLE,
|
BoundFreeze => TC_MUTABLE,
|
||||||
BoundSized => TC_NONE, // don't care if interior is sized
|
BoundSized => TC_NONE, // don't care if interior is sized
|
||||||
};
|
};
|
||||||
}
|
true
|
||||||
|
};
|
||||||
st + mt + bt
|
st + mt + bt
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2297,7 +2298,7 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
|
||||||
let _i = indenter();
|
let _i = indenter();
|
||||||
|
|
||||||
let mut tc = TC_ALL;
|
let mut tc = TC_ALL;
|
||||||
for type_param_def.bounds.builtin_bounds.each |bound| {
|
do type_param_def.bounds.builtin_bounds.each |bound| {
|
||||||
debug!("tc = %s, bound = %?", tc.to_str(), bound);
|
debug!("tc = %s, bound = %?", tc.to_str(), bound);
|
||||||
tc = tc - match bound {
|
tc = tc - match bound {
|
||||||
BoundStatic => TypeContents::nonstatic(cx),
|
BoundStatic => TypeContents::nonstatic(cx),
|
||||||
|
@ -2306,7 +2307,8 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
|
||||||
// The dynamic-size bit can be removed at pointer-level, etc.
|
// The dynamic-size bit can be removed at pointer-level, etc.
|
||||||
BoundSized => TypeContents::dynamically_sized(cx),
|
BoundSized => TypeContents::dynamically_sized(cx),
|
||||||
};
|
};
|
||||||
}
|
true
|
||||||
|
};
|
||||||
|
|
||||||
debug!("result = %s", tc.to_str());
|
debug!("result = %s", tc.to_str());
|
||||||
return tc;
|
return tc;
|
||||||
|
@ -4424,10 +4426,11 @@ pub fn count_traits_and_supertraits(tcx: ctxt,
|
||||||
type_param_defs: &[TypeParameterDef]) -> uint {
|
type_param_defs: &[TypeParameterDef]) -> uint {
|
||||||
let mut total = 0;
|
let mut total = 0;
|
||||||
foreach type_param_def in type_param_defs.iter() {
|
foreach type_param_def in type_param_defs.iter() {
|
||||||
for each_bound_trait_and_supertraits(
|
do each_bound_trait_and_supertraits(
|
||||||
tcx, type_param_def.bounds.trait_bounds) |_| {
|
tcx, type_param_def.bounds.trait_bounds) |_| {
|
||||||
total += 1;
|
total += 1;
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
return total;
|
return total;
|
||||||
}
|
}
|
||||||
|
|
|
@ -420,44 +420,42 @@ impl<'self> LookupContext<'self> {
|
||||||
let tcx = self.tcx();
|
let tcx = self.tcx();
|
||||||
let mut next_bound_idx = 0; // count only trait bounds
|
let mut next_bound_idx = 0; // count only trait bounds
|
||||||
|
|
||||||
for ty::each_bound_trait_and_supertraits(tcx, bounds)
|
do ty::each_bound_trait_and_supertraits(tcx, bounds) |bound_trait_ref| {
|
||||||
|bound_trait_ref|
|
|
||||||
{
|
|
||||||
let this_bound_idx = next_bound_idx;
|
let this_bound_idx = next_bound_idx;
|
||||||
next_bound_idx += 1;
|
next_bound_idx += 1;
|
||||||
|
|
||||||
let trait_methods = ty::trait_methods(tcx, bound_trait_ref.def_id);
|
let trait_methods = ty::trait_methods(tcx, bound_trait_ref.def_id);
|
||||||
let pos = {
|
match trait_methods.iter().position(|m| {
|
||||||
match trait_methods.iter().position(|m| {
|
m.explicit_self != ast::sty_static &&
|
||||||
m.explicit_self != ast::sty_static &&
|
m.ident == self.m_name })
|
||||||
m.ident == self.m_name })
|
{
|
||||||
{
|
Some(pos) => {
|
||||||
Some(pos) => pos,
|
let method = trait_methods[pos];
|
||||||
None => {
|
|
||||||
debug!("trait doesn't contain method: %?",
|
let cand = Candidate {
|
||||||
bound_trait_ref.def_id);
|
rcvr_ty: self_ty,
|
||||||
loop; // check next trait or bound
|
rcvr_substs: bound_trait_ref.substs.clone(),
|
||||||
}
|
method_ty: method,
|
||||||
|
origin: method_param(
|
||||||
|
method_param {
|
||||||
|
trait_id: bound_trait_ref.def_id,
|
||||||
|
method_num: pos,
|
||||||
|
param_num: param,
|
||||||
|
bound_num: this_bound_idx,
|
||||||
|
})
|
||||||
|
};
|
||||||
|
|
||||||
|
debug!("pushing inherent candidate for param: %?", cand);
|
||||||
|
self.inherent_candidates.push(cand);
|
||||||
}
|
}
|
||||||
};
|
None => {
|
||||||
let method = trait_methods[pos];
|
debug!("trait doesn't contain method: %?",
|
||||||
|
bound_trait_ref.def_id);
|
||||||
let cand = Candidate {
|
// check next trait or bound
|
||||||
rcvr_ty: self_ty,
|
}
|
||||||
rcvr_substs: bound_trait_ref.substs.clone(),
|
}
|
||||||
method_ty: method,
|
true
|
||||||
origin: method_param(
|
};
|
||||||
method_param {
|
|
||||||
trait_id: bound_trait_ref.def_id,
|
|
||||||
method_num: pos,
|
|
||||||
param_num: param,
|
|
||||||
bound_num: this_bound_idx,
|
|
||||||
})
|
|
||||||
};
|
|
||||||
|
|
||||||
debug!("pushing inherent candidate for param: %?", cand);
|
|
||||||
self.inherent_candidates.push(cand);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -131,7 +131,7 @@ fn lookup_vtables_for_param(vcx: &VtableContext,
|
||||||
// ty is the value supplied for the type parameter A...
|
// ty is the value supplied for the type parameter A...
|
||||||
let mut param_result = ~[];
|
let mut param_result = ~[];
|
||||||
|
|
||||||
for ty::each_bound_trait_and_supertraits(
|
do ty::each_bound_trait_and_supertraits(
|
||||||
tcx, type_param_bounds.trait_bounds) |trait_ref|
|
tcx, type_param_bounds.trait_bounds) |trait_ref|
|
||||||
{
|
{
|
||||||
// ...and here trait_ref is each bound that was declared on A,
|
// ...and here trait_ref is each bound that was declared on A,
|
||||||
|
@ -158,7 +158,8 @@ fn lookup_vtables_for_param(vcx: &VtableContext,
|
||||||
vcx.infcx.ty_to_str(ty)));
|
vcx.infcx.ty_to_str(ty)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
true
|
||||||
|
};
|
||||||
|
|
||||||
debug!("lookup_vtables_for_param result(\
|
debug!("lookup_vtables_for_param result(\
|
||||||
location_info=%?, \
|
location_info=%?, \
|
||||||
|
@ -286,7 +287,8 @@ fn lookup_vtable_from_bounds(vcx: &VtableContext,
|
||||||
let tcx = vcx.tcx();
|
let tcx = vcx.tcx();
|
||||||
|
|
||||||
let mut n_bound = 0;
|
let mut n_bound = 0;
|
||||||
for ty::each_bound_trait_and_supertraits(tcx, bounds) |bound_trait_ref| {
|
let mut ret = None;
|
||||||
|
do ty::each_bound_trait_and_supertraits(tcx, bounds) |bound_trait_ref| {
|
||||||
debug!("checking bounds trait %s",
|
debug!("checking bounds trait %s",
|
||||||
bound_trait_ref.repr(vcx.tcx()));
|
bound_trait_ref.repr(vcx.tcx()));
|
||||||
|
|
||||||
|
@ -298,13 +300,14 @@ fn lookup_vtable_from_bounds(vcx: &VtableContext,
|
||||||
let vtable = vtable_param(param, n_bound);
|
let vtable = vtable_param(param, n_bound);
|
||||||
debug!("found param vtable: %?",
|
debug!("found param vtable: %?",
|
||||||
vtable);
|
vtable);
|
||||||
return Some(vtable);
|
ret = Some(vtable);
|
||||||
|
false
|
||||||
|
} else {
|
||||||
|
n_bound += 1;
|
||||||
|
true
|
||||||
}
|
}
|
||||||
|
};
|
||||||
n_bound += 1;
|
ret
|
||||||
}
|
|
||||||
|
|
||||||
return None;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn search_for_vtable(vcx: &VtableContext,
|
fn search_for_vtable(vcx: &VtableContext,
|
||||||
|
@ -552,7 +555,7 @@ pub fn early_resolve_expr(ex: @ast::expr,
|
||||||
let cx = fcx.ccx;
|
let cx = fcx.ccx;
|
||||||
match ex.node {
|
match ex.node {
|
||||||
ast::expr_path(*) => {
|
ast::expr_path(*) => {
|
||||||
for fcx.opt_node_ty_substs(ex.id) |substs| {
|
do fcx.opt_node_ty_substs(ex.id) |substs| {
|
||||||
debug!("vtable resolution on parameter bounds for expr %s",
|
debug!("vtable resolution on parameter bounds for expr %s",
|
||||||
ex.repr(fcx.tcx()));
|
ex.repr(fcx.tcx()));
|
||||||
let def = cx.tcx.def_map.get_copy(&ex.id);
|
let def = cx.tcx.def_map.get_copy(&ex.id);
|
||||||
|
@ -571,7 +574,8 @@ pub fn early_resolve_expr(ex: @ast::expr,
|
||||||
insert_vtables(fcx, ex.id, vtbls);
|
insert_vtables(fcx, ex.id, vtbls);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
ast::expr_paren(e) => {
|
ast::expr_paren(e) => {
|
||||||
|
|
|
@ -178,17 +178,19 @@ fn resolve_type_vars_for_node(wbcx: @mut WbCtxt, sp: span, id: ast::NodeId)
|
||||||
debug!("resolve_type_vars_for_node(id=%d, n_ty=%s, t=%s)",
|
debug!("resolve_type_vars_for_node(id=%d, n_ty=%s, t=%s)",
|
||||||
id, ppaux::ty_to_str(tcx, n_ty), ppaux::ty_to_str(tcx, t));
|
id, ppaux::ty_to_str(tcx, n_ty), ppaux::ty_to_str(tcx, t));
|
||||||
write_ty_to_tcx(tcx, id, t);
|
write_ty_to_tcx(tcx, id, t);
|
||||||
for fcx.opt_node_ty_substs(id) |substs| {
|
let mut ret = Some(t);
|
||||||
|
do fcx.opt_node_ty_substs(id) |substs| {
|
||||||
let mut new_tps = ~[];
|
let mut new_tps = ~[];
|
||||||
foreach subst in substs.tps.iter() {
|
foreach subst in substs.tps.iter() {
|
||||||
match resolve_type_vars_in_type(fcx, sp, *subst) {
|
match resolve_type_vars_in_type(fcx, sp, *subst) {
|
||||||
Some(t) => new_tps.push(t),
|
Some(t) => new_tps.push(t),
|
||||||
None => { wbcx.success = false; return None; }
|
None => { wbcx.success = false; ret = None; break }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
write_substs_to_tcx(tcx, id, new_tps);
|
write_substs_to_tcx(tcx, id, new_tps);
|
||||||
}
|
ret.is_some()
|
||||||
return Some(t);
|
};
|
||||||
|
ret
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -371,9 +371,10 @@ impl CoherenceChecker {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn check_implementation_coherence(&self) {
|
pub fn check_implementation_coherence(&self) {
|
||||||
for self.crate_context.tcx.trait_impls.each_key |&trait_id| {
|
do self.crate_context.tcx.trait_impls.each_key |&trait_id| {
|
||||||
self.check_implementation_coherence_of(trait_id);
|
self.check_implementation_coherence_of(trait_id);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn check_implementation_coherence_of(&self, trait_def_id: def_id) {
|
pub fn check_implementation_coherence_of(&self, trait_def_id: def_id) {
|
||||||
|
@ -738,14 +739,15 @@ impl CoherenceChecker {
|
||||||
|
|
||||||
let crate_store = self.crate_context.tcx.sess.cstore;
|
let crate_store = self.crate_context.tcx.sess.cstore;
|
||||||
do iter_crate_data(crate_store) |crate_number, _crate_metadata| {
|
do iter_crate_data(crate_store) |crate_number, _crate_metadata| {
|
||||||
for each_path(crate_store, crate_number) |_, def_like, _| {
|
do each_path(crate_store, crate_number) |_, def_like, _| {
|
||||||
match def_like {
|
match def_like {
|
||||||
dl_impl(def_id) => {
|
dl_impl(def_id) => {
|
||||||
self.add_external_impl(&mut impls_seen, def_id)
|
self.add_external_impl(&mut impls_seen, def_id)
|
||||||
}
|
}
|
||||||
dl_def(_) | dl_field => loop, // Skip this.
|
dl_def(_) | dl_field => (), // Skip this.
|
||||||
}
|
}
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -267,16 +267,23 @@ impl Combine for Glb {
|
||||||
a_isr: isr_alist,
|
a_isr: isr_alist,
|
||||||
r: ty::Region) -> ty::Region
|
r: ty::Region) -> ty::Region
|
||||||
{
|
{
|
||||||
for list::each(a_isr) |pair| {
|
let mut ret = None;
|
||||||
|
do list::each(a_isr) |pair| {
|
||||||
let (a_br, a_r) = *pair;
|
let (a_br, a_r) = *pair;
|
||||||
if a_r == r {
|
if a_r == r {
|
||||||
return ty::re_bound(a_br);
|
ret = Some(ty::re_bound(a_br));
|
||||||
|
false
|
||||||
|
} else {
|
||||||
|
true
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
|
|
||||||
this.infcx.tcx.sess.span_bug(
|
match ret {
|
||||||
this.trace.origin.span(),
|
Some(x) => x,
|
||||||
fmt!("could not find original bound region for %?", r));
|
None => this.infcx.tcx.sess.span_bug(
|
||||||
|
this.trace.origin.span(),
|
||||||
|
fmt!("could not find original bound region for %?", r))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fresh_bound_variable(this: &Glb) -> ty::Region {
|
fn fresh_bound_variable(this: &Glb) -> ty::Region {
|
||||||
|
|
|
@ -526,7 +526,7 @@ pub fn lattice_var_and_t<L:LatticeDir + Combine,
|
||||||
|
|
||||||
pub fn var_ids<T:Combine>(this: &T, isr: isr_alist) -> ~[RegionVid] {
|
pub fn var_ids<T:Combine>(this: &T, isr: isr_alist) -> ~[RegionVid] {
|
||||||
let mut result = ~[];
|
let mut result = ~[];
|
||||||
for list::each(isr) |pair| {
|
do list::each(isr) |pair| {
|
||||||
match pair.second() {
|
match pair.second() {
|
||||||
ty::re_infer(ty::ReVar(r)) => { result.push(r); }
|
ty::re_infer(ty::ReVar(r)) => { result.push(r); }
|
||||||
r => {
|
r => {
|
||||||
|
@ -535,8 +535,9 @@ pub fn var_ids<T:Combine>(this: &T, isr: isr_alist) -> ~[RegionVid] {
|
||||||
fmt!("Found non-region-vid: %?", r));
|
fmt!("Found non-region-vid: %?", r));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
true
|
||||||
return result;
|
};
|
||||||
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_var_in_set(new_vars: &[RegionVid], r: ty::Region) -> bool {
|
pub fn is_var_in_set(new_vars: &[RegionVid], r: ty::Region) -> bool {
|
||||||
|
|
|
@ -182,20 +182,27 @@ impl Combine for Lub {
|
||||||
// in both A and B. Replace the variable with the "first"
|
// in both A and B. Replace the variable with the "first"
|
||||||
// bound region from A that we find it to be associated
|
// bound region from A that we find it to be associated
|
||||||
// with.
|
// with.
|
||||||
for list::each(a_isr) |pair| {
|
let mut ret = None;
|
||||||
|
do list::each(a_isr) |pair| {
|
||||||
let (a_br, a_r) = *pair;
|
let (a_br, a_r) = *pair;
|
||||||
if tainted.iter().any(|x| x == &a_r) {
|
if tainted.iter().any(|x| x == &a_r) {
|
||||||
debug!("generalize_region(r0=%?): \
|
debug!("generalize_region(r0=%?): \
|
||||||
replacing with %?, tainted=%?",
|
replacing with %?, tainted=%?",
|
||||||
r0, a_br, tainted);
|
r0, a_br, tainted);
|
||||||
return ty::re_bound(a_br);
|
ret = Some(ty::re_bound(a_br));
|
||||||
|
false
|
||||||
|
} else {
|
||||||
|
true
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
|
|
||||||
this.infcx.tcx.sess.span_bug(
|
match ret {
|
||||||
this.trace.origin.span(),
|
Some(x) => x,
|
||||||
fmt!("Region %? is not associated with \
|
None => this.infcx.tcx.sess.span_bug(
|
||||||
any bound region from A!", r0));
|
this.trace.origin.span(),
|
||||||
|
fmt!("Region %? is not associated with \
|
||||||
|
any bound region from A!", r0))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1209,7 +1209,7 @@ impl RegionVarBindings {
|
||||||
debug!("process_edges(source_vid=%?, dir=%?)", source_vid, dir);
|
debug!("process_edges(source_vid=%?, dir=%?)", source_vid, dir);
|
||||||
|
|
||||||
let source_node_index = NodeIndex(source_vid.to_uint());
|
let source_node_index = NodeIndex(source_vid.to_uint());
|
||||||
for graph.each_adjacent_edge(source_node_index, dir) |_, edge| {
|
do graph.each_adjacent_edge(source_node_index, dir) |_, edge| {
|
||||||
match edge.data {
|
match edge.data {
|
||||||
ConstrainVarSubVar(from_vid, to_vid) => {
|
ConstrainVarSubVar(from_vid, to_vid) => {
|
||||||
let opp_vid =
|
let opp_vid =
|
||||||
|
@ -1229,7 +1229,8 @@ impl RegionVarBindings {
|
||||||
|
|
||||||
ConstrainRegSubReg(*) => {}
|
ConstrainRegSubReg(*) => {}
|
||||||
}
|
}
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -191,7 +191,9 @@ impl Combine for Sub {
|
||||||
// that the skolemized regions do not "leak".
|
// that the skolemized regions do not "leak".
|
||||||
let new_vars =
|
let new_vars =
|
||||||
self.infcx.region_vars.vars_created_since_snapshot(snapshot);
|
self.infcx.region_vars.vars_created_since_snapshot(snapshot);
|
||||||
for list::each(skol_isr) |pair| {
|
|
||||||
|
let mut ret = Ok(sig);
|
||||||
|
do list::each(skol_isr) |pair| {
|
||||||
let (skol_br, skol) = *pair;
|
let (skol_br, skol) = *pair;
|
||||||
let tainted = self.infcx.region_vars.tainted(snapshot, skol);
|
let tainted = self.infcx.region_vars.tainted(snapshot, skol);
|
||||||
foreach tainted_region in tainted.iter() {
|
foreach tainted_region in tainted.iter() {
|
||||||
|
@ -208,16 +210,19 @@ impl Combine for Sub {
|
||||||
|
|
||||||
// A is not as polymorphic as B:
|
// A is not as polymorphic as B:
|
||||||
if self.a_is_expected {
|
if self.a_is_expected {
|
||||||
return Err(ty::terr_regions_insufficiently_polymorphic(
|
ret = Err(ty::terr_regions_insufficiently_polymorphic(
|
||||||
skol_br, *tainted_region));
|
skol_br, *tainted_region));
|
||||||
|
break
|
||||||
} else {
|
} else {
|
||||||
return Err(ty::terr_regions_overly_polymorphic(
|
ret = Err(ty::terr_regions_overly_polymorphic(
|
||||||
skol_br, *tainted_region));
|
skol_br, *tainted_region));
|
||||||
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
ret.is_ok()
|
||||||
|
};
|
||||||
|
|
||||||
return Ok(sig);
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
// Traits please (FIXME: #2794):
|
// Traits please (FIXME: #2794):
|
||||||
|
|
|
@ -299,11 +299,12 @@ impl get_and_find_region for isr_alist {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn find(&self, br: ty::bound_region) -> Option<ty::Region> {
|
pub fn find(&self, br: ty::bound_region) -> Option<ty::Region> {
|
||||||
for list::each(*self) |isr| {
|
let mut ret = None;
|
||||||
|
do list::each(*self) |isr| {
|
||||||
let (isr_br, isr_r) = *isr;
|
let (isr_br, isr_r) = *isr;
|
||||||
if isr_br == br { return Some(isr_r); }
|
if isr_br == br { ret = Some(isr_r); false } else { true }
|
||||||
}
|
};
|
||||||
return None;
|
ret
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -589,14 +589,15 @@ impl Repr for ty::RegionSubsts {
|
||||||
impl Repr for ty::ParamBounds {
|
impl Repr for ty::ParamBounds {
|
||||||
fn repr(&self, tcx: ctxt) -> ~str {
|
fn repr(&self, tcx: ctxt) -> ~str {
|
||||||
let mut res = ~[];
|
let mut res = ~[];
|
||||||
for self.builtin_bounds.each |b| {
|
do self.builtin_bounds.each |b| {
|
||||||
res.push(match b {
|
res.push(match b {
|
||||||
ty::BoundStatic => ~"'static",
|
ty::BoundStatic => ~"'static",
|
||||||
ty::BoundSend => ~"Send",
|
ty::BoundSend => ~"Send",
|
||||||
ty::BoundFreeze => ~"Freeze",
|
ty::BoundFreeze => ~"Freeze",
|
||||||
ty::BoundSized => ~"Sized",
|
ty::BoundSized => ~"Sized",
|
||||||
});
|
});
|
||||||
}
|
true
|
||||||
|
};
|
||||||
foreach t in self.trait_bounds.iter() {
|
foreach t in self.trait_bounds.iter() {
|
||||||
res.push(t.repr(tcx));
|
res.push(t.repr(tcx));
|
||||||
}
|
}
|
||||||
|
@ -832,9 +833,10 @@ impl UserString for ty::BuiltinBounds {
|
||||||
fn user_string(&self, tcx: ctxt) -> ~str {
|
fn user_string(&self, tcx: ctxt) -> ~str {
|
||||||
if self.is_empty() { ~"<no-bounds>" } else {
|
if self.is_empty() { ~"<no-bounds>" } else {
|
||||||
let mut result = ~[];
|
let mut result = ~[];
|
||||||
for self.each |bb| {
|
do self.each |bb| {
|
||||||
result.push(bb.user_string(tcx));
|
result.push(bb.user_string(tcx));
|
||||||
}
|
true
|
||||||
|
};
|
||||||
result.connect("+")
|
result.connect("+")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -202,13 +202,13 @@ impl Program {
|
||||||
use extra::serialize::*;
|
use extra::serialize::*;
|
||||||
%s // view items
|
%s // view items
|
||||||
", self.externs, self.view_items);
|
", self.externs, self.view_items);
|
||||||
for self.structs.each_value |s| {
|
foreach (_, s) in self.structs.iter() {
|
||||||
// The structs aren't really useful unless they're encodable
|
// The structs aren't really useful unless they're encodable
|
||||||
code.push_str("#[deriving(Encodable, Decodable)]");
|
code.push_str("#[deriving(Encodable, Decodable)]");
|
||||||
code.push_str(*s);
|
code.push_str(*s);
|
||||||
code.push_str("\n");
|
code.push_str("\n");
|
||||||
}
|
}
|
||||||
for self.items.each_value |s| {
|
foreach (_, s) in self.items.iter() {
|
||||||
code.push_str(*s);
|
code.push_str(*s);
|
||||||
code.push_str("\n");
|
code.push_str("\n");
|
||||||
}
|
}
|
||||||
|
|
|
@ -165,7 +165,7 @@ impl PkgSrc {
|
||||||
debug!("Called check_dir, I'm in %s", dir.to_str());
|
debug!("Called check_dir, I'm in %s", dir.to_str());
|
||||||
let prefix = dir.components.len();
|
let prefix = dir.components.len();
|
||||||
debug!("Matching against %?", self.id.local_path.filestem());
|
debug!("Matching against %?", self.id.local_path.filestem());
|
||||||
for os::walk_dir(&dir) |pth| {
|
do os::walk_dir(&dir) |pth| {
|
||||||
match pth.filename() {
|
match pth.filename() {
|
||||||
Some(~"lib.rs") => PkgSrc::push_crate(&mut self.libs,
|
Some(~"lib.rs") => PkgSrc::push_crate(&mut self.libs,
|
||||||
prefix,
|
prefix,
|
||||||
|
@ -181,7 +181,8 @@ impl PkgSrc {
|
||||||
pth),
|
pth),
|
||||||
_ => ()
|
_ => ()
|
||||||
}
|
}
|
||||||
}
|
true
|
||||||
|
};
|
||||||
|
|
||||||
if self.libs.is_empty() && self.mains.is_empty()
|
if self.libs.is_empty() && self.mains.is_empty()
|
||||||
&& self.tests.is_empty() && self.benchs.is_empty() {
|
&& self.tests.is_empty() && self.benchs.is_empty() {
|
||||||
|
|
|
@ -206,11 +206,12 @@ impl CtxMethods for Ctx {
|
||||||
// The package id is presumed to be the first command-line
|
// The package id is presumed to be the first command-line
|
||||||
// argument
|
// argument
|
||||||
let pkgid = PkgId::new(args[0].clone(), &os::getcwd());
|
let pkgid = PkgId::new(args[0].clone(), &os::getcwd());
|
||||||
for each_pkg_parent_workspace(&pkgid) |workspace| {
|
do each_pkg_parent_workspace(&pkgid) |workspace| {
|
||||||
debug!("found pkg %s in workspace %s, trying to build",
|
debug!("found pkg %s in workspace %s, trying to build",
|
||||||
pkgid.to_str(), workspace.to_str());
|
pkgid.to_str(), workspace.to_str());
|
||||||
self.build(workspace, &pkgid);
|
self.build(workspace, &pkgid);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"clean" => {
|
"clean" => {
|
||||||
|
@ -264,17 +265,19 @@ impl CtxMethods for Ctx {
|
||||||
self.install(&rp[0], &pkgid);
|
self.install(&rp[0], &pkgid);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
for each_pkg_parent_workspace(&pkgid) |workspace| {
|
do each_pkg_parent_workspace(&pkgid) |workspace| {
|
||||||
self.install(workspace, &pkgid);
|
self.install(workspace, &pkgid);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"list" => {
|
"list" => {
|
||||||
io::println("Installed packages:");
|
io::println("Installed packages:");
|
||||||
for installed_packages::list_installed_packages |pkg_id| {
|
do installed_packages::list_installed_packages |pkg_id| {
|
||||||
io::println(pkg_id.local_path.to_str());
|
println(pkg_id.local_path.to_str());
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
"prefer" => {
|
"prefer" => {
|
||||||
if args.len() < 1 {
|
if args.len() < 1 {
|
||||||
|
@ -299,11 +302,12 @@ impl CtxMethods for Ctx {
|
||||||
else {
|
else {
|
||||||
let rp = rust_path();
|
let rp = rust_path();
|
||||||
assert!(!rp.is_empty());
|
assert!(!rp.is_empty());
|
||||||
for each_pkg_parent_workspace(&pkgid) |workspace| {
|
do each_pkg_parent_workspace(&pkgid) |workspace| {
|
||||||
path_util::uninstall_package_from(workspace, &pkgid);
|
path_util::uninstall_package_from(workspace, &pkgid);
|
||||||
note(fmt!("Uninstalled package %s (was installed in %s)",
|
note(fmt!("Uninstalled package %s (was installed in %s)",
|
||||||
pkgid.to_str(), workspace.to_str()));
|
pkgid.to_str(), workspace.to_str()));
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"unprefer" => {
|
"unprefer" => {
|
||||||
|
|
|
@ -323,7 +323,7 @@ pub fn find_and_install_dependencies(ctxt: &Ctx,
|
||||||
debug!("In find_and_install_dependencies...");
|
debug!("In find_and_install_dependencies...");
|
||||||
let my_workspace = (*workspace).clone();
|
let my_workspace = (*workspace).clone();
|
||||||
let my_ctxt = *ctxt;
|
let my_ctxt = *ctxt;
|
||||||
for c.each_view_item() |vi: &ast::view_item| {
|
do c.each_view_item() |vi: &ast::view_item| {
|
||||||
debug!("A view item!");
|
debug!("A view item!");
|
||||||
match vi.node {
|
match vi.node {
|
||||||
// ignore metadata, I guess
|
// ignore metadata, I guess
|
||||||
|
@ -354,7 +354,8 @@ pub fn find_and_install_dependencies(ctxt: &Ctx,
|
||||||
// Ignore `use`s
|
// Ignore `use`s
|
||||||
_ => ()
|
_ => ()
|
||||||
}
|
}
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
|
|
|
@ -211,12 +211,13 @@ pub fn escape_unicode(c: char, f: &fn(char)) {
|
||||||
(c <= '\uffff') { f('u'); 4 }
|
(c <= '\uffff') { f('u'); 4 }
|
||||||
_ { f('U'); 8 }
|
_ { f('U'); 8 }
|
||||||
);
|
);
|
||||||
for int::range_step(4 * (pad - 1), -1, -4) |offset| {
|
do int::range_step(4 * (pad - 1), -1, -4) |offset| {
|
||||||
match ((c as u32) >> offset) & 0xf {
|
match ((c as u32) >> offset) & 0xf {
|
||||||
i @ 0 .. 9 => { f('0' + i as char); }
|
i @ 0 .. 9 => { f('0' + i as char); }
|
||||||
i => { f('a' + (i - 10) as char); }
|
i => { f('a' + (i - 10) as char); }
|
||||||
}
|
}
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
///
|
||||||
|
|
|
@ -236,7 +236,9 @@ mod test {
|
||||||
fn t(o1: Ordering, o2: Ordering, e: Ordering) {
|
fn t(o1: Ordering, o2: Ordering, e: Ordering) {
|
||||||
assert_eq!(lexical_ordering(o1, o2), e);
|
assert_eq!(lexical_ordering(o1, o2), e);
|
||||||
}
|
}
|
||||||
for [Less, Equal, Greater].each |&o| {
|
|
||||||
|
let xs = [Less, Equal, Greater];
|
||||||
|
foreach &o in xs.iter() {
|
||||||
t(Less, o, Less);
|
t(Less, o, Less);
|
||||||
t(Equal, o, o);
|
t(Equal, o, o);
|
||||||
t(Greater, o, Greater);
|
t(Greater, o, Greater);
|
||||||
|
|
|
@ -889,27 +889,34 @@ mod tests {
|
||||||
fn test_ranges() {
|
fn test_ranges() {
|
||||||
let mut l = ~[];
|
let mut l = ~[];
|
||||||
|
|
||||||
for range_rev(14,11) |i| {
|
do range_rev(14,11) |i| {
|
||||||
l.push(i);
|
l.push(i);
|
||||||
}
|
true
|
||||||
for range_step(20,26,2) |i| {
|
};
|
||||||
|
do range_step(20,26,2) |i| {
|
||||||
l.push(i);
|
l.push(i);
|
||||||
}
|
true
|
||||||
for range_step(36,30,-2) |i| {
|
};
|
||||||
|
do range_step(36,30,-2) |i| {
|
||||||
l.push(i);
|
l.push(i);
|
||||||
}
|
true
|
||||||
for range_step(max_value - 2, max_value, 2) |i| {
|
};
|
||||||
|
do range_step(max_value - 2, max_value, 2) |i| {
|
||||||
l.push(i);
|
l.push(i);
|
||||||
}
|
true
|
||||||
for range_step(max_value - 3, max_value, 2) |i| {
|
};
|
||||||
|
do range_step(max_value - 3, max_value, 2) |i| {
|
||||||
l.push(i);
|
l.push(i);
|
||||||
}
|
true
|
||||||
for range_step(min_value + 2, min_value, -2) |i| {
|
};
|
||||||
|
do range_step(min_value + 2, min_value, -2) |i| {
|
||||||
l.push(i);
|
l.push(i);
|
||||||
}
|
true
|
||||||
for range_step(min_value + 3, min_value, -2) |i| {
|
};
|
||||||
|
do range_step(min_value + 3, min_value, -2) |i| {
|
||||||
l.push(i);
|
l.push(i);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
assert_eq!(l, ~[13,12,11,
|
assert_eq!(l, ~[13,12,11,
|
||||||
20,22,24,
|
20,22,24,
|
||||||
36,34,32,
|
36,34,32,
|
||||||
|
@ -919,22 +926,22 @@ mod tests {
|
||||||
min_value+3,min_value+1]);
|
min_value+3,min_value+1]);
|
||||||
|
|
||||||
// None of the `fail`s should execute.
|
// None of the `fail`s should execute.
|
||||||
for range_rev(0,10) |_i| {
|
do range_rev(0,10) |_i| {
|
||||||
fail!(~"unreachable");
|
fail!(~"unreachable");
|
||||||
}
|
};
|
||||||
for range_step(10,0,1) |_i| {
|
do range_step(10,0,1) |_i| {
|
||||||
fail!(~"unreachable");
|
fail!(~"unreachable");
|
||||||
}
|
};
|
||||||
for range_step(0,10,-1) |_i| {
|
do range_step(0,10,-1) |_i| {
|
||||||
fail!(~"unreachable");
|
fail!(~"unreachable");
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[should_fail]
|
#[should_fail]
|
||||||
#[ignore(cfg(windows))]
|
#[ignore(cfg(windows))]
|
||||||
fn test_range_step_zero_step() {
|
fn test_range_step_zero_step() {
|
||||||
for range_step(0,10,0) |_i| {}
|
do range_step(0,10,0) |_i| { true };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -654,27 +654,34 @@ mod tests {
|
||||||
pub fn test_ranges() {
|
pub fn test_ranges() {
|
||||||
let mut l = ~[];
|
let mut l = ~[];
|
||||||
|
|
||||||
for range_rev(14,11) |i| {
|
do range_rev(14,11) |i| {
|
||||||
l.push(i);
|
l.push(i);
|
||||||
}
|
true
|
||||||
for range_step(20,26,2) |i| {
|
};
|
||||||
|
do range_step(20,26,2) |i| {
|
||||||
l.push(i);
|
l.push(i);
|
||||||
}
|
true
|
||||||
for range_step(36,30,-2) |i| {
|
};
|
||||||
|
do range_step(36,30,-2) |i| {
|
||||||
l.push(i);
|
l.push(i);
|
||||||
}
|
true
|
||||||
for range_step(max_value - 2, max_value, 2) |i| {
|
};
|
||||||
|
do range_step(max_value - 2, max_value, 2) |i| {
|
||||||
l.push(i);
|
l.push(i);
|
||||||
}
|
true
|
||||||
for range_step(max_value - 3, max_value, 2) |i| {
|
};
|
||||||
|
do range_step(max_value - 3, max_value, 2) |i| {
|
||||||
l.push(i);
|
l.push(i);
|
||||||
}
|
true
|
||||||
for range_step(min_value + 2, min_value, -2) |i| {
|
};
|
||||||
|
do range_step(min_value + 2, min_value, -2) |i| {
|
||||||
l.push(i);
|
l.push(i);
|
||||||
}
|
true
|
||||||
for range_step(min_value + 3, min_value, -2) |i| {
|
};
|
||||||
|
do range_step(min_value + 3, min_value, -2) |i| {
|
||||||
l.push(i);
|
l.push(i);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
|
|
||||||
assert_eq!(l, ~[13,12,11,
|
assert_eq!(l, ~[13,12,11,
|
||||||
20,22,24,
|
20,22,24,
|
||||||
|
@ -685,28 +692,28 @@ mod tests {
|
||||||
min_value+3,min_value+1]);
|
min_value+3,min_value+1]);
|
||||||
|
|
||||||
// None of the `fail`s should execute.
|
// None of the `fail`s should execute.
|
||||||
for range_rev(0,0) |_i| {
|
do range_rev(0,0) |_i| {
|
||||||
fail!("unreachable");
|
fail!("unreachable");
|
||||||
}
|
};
|
||||||
for range_step(10,0,1) |_i| {
|
do range_step(10,0,1) |_i| {
|
||||||
fail!("unreachable");
|
fail!("unreachable");
|
||||||
}
|
};
|
||||||
for range_step(0,1,-10) |_i| {
|
do range_step(0,1,-10) |_i| {
|
||||||
fail!("unreachable");
|
fail!("unreachable");
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[should_fail]
|
#[should_fail]
|
||||||
#[ignore(cfg(windows))]
|
#[ignore(cfg(windows))]
|
||||||
fn test_range_step_zero_step_up() {
|
fn test_range_step_zero_step_up() {
|
||||||
for range_step(0,10,0) |_i| {}
|
do range_step(0,10,0) |_i| { true };
|
||||||
}
|
}
|
||||||
#[test]
|
#[test]
|
||||||
#[should_fail]
|
#[should_fail]
|
||||||
#[ignore(cfg(windows))]
|
#[ignore(cfg(windows))]
|
||||||
fn test_range_step_zero_step_down() {
|
fn test_range_step_zero_step_down() {
|
||||||
for range_step(0,-10,0) |_i| {}
|
do range_step(0,-10,0) |_i| { true };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -701,7 +701,7 @@ impl IsaacRng {
|
||||||
if use_rsl {
|
if use_rsl {
|
||||||
macro_rules! memloop (
|
macro_rules! memloop (
|
||||||
($arr:expr) => {{
|
($arr:expr) => {{
|
||||||
for u32::range_step(0, RAND_SIZE, 8) |i| {
|
do u32::range_step(0, RAND_SIZE, 8) |i| {
|
||||||
a+=$arr[i ]; b+=$arr[i+1];
|
a+=$arr[i ]; b+=$arr[i+1];
|
||||||
c+=$arr[i+2]; d+=$arr[i+3];
|
c+=$arr[i+2]; d+=$arr[i+3];
|
||||||
e+=$arr[i+4]; f+=$arr[i+5];
|
e+=$arr[i+4]; f+=$arr[i+5];
|
||||||
|
@ -711,20 +711,22 @@ impl IsaacRng {
|
||||||
self.mem[i+2]=c; self.mem[i+3]=d;
|
self.mem[i+2]=c; self.mem[i+3]=d;
|
||||||
self.mem[i+4]=e; self.mem[i+5]=f;
|
self.mem[i+4]=e; self.mem[i+5]=f;
|
||||||
self.mem[i+6]=g; self.mem[i+7]=h;
|
self.mem[i+6]=g; self.mem[i+7]=h;
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}}
|
}}
|
||||||
);
|
);
|
||||||
|
|
||||||
memloop!(self.rsl);
|
memloop!(self.rsl);
|
||||||
memloop!(self.mem);
|
memloop!(self.mem);
|
||||||
} else {
|
} else {
|
||||||
for u32::range_step(0, RAND_SIZE, 8) |i| {
|
do u32::range_step(0, RAND_SIZE, 8) |i| {
|
||||||
mix!();
|
mix!();
|
||||||
self.mem[i ]=a; self.mem[i+1]=b;
|
self.mem[i ]=a; self.mem[i+1]=b;
|
||||||
self.mem[i+2]=c; self.mem[i+3]=d;
|
self.mem[i+2]=c; self.mem[i+3]=d;
|
||||||
self.mem[i+4]=e; self.mem[i+5]=f;
|
self.mem[i+4]=e; self.mem[i+5]=f;
|
||||||
self.mem[i+6]=g; self.mem[i+7]=h;
|
self.mem[i+6]=g; self.mem[i+7]=h;
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
self.isaac();
|
self.isaac();
|
||||||
|
@ -764,12 +766,13 @@ impl IsaacRng {
|
||||||
|
|
||||||
let r = [(0, MIDPOINT), (MIDPOINT, 0)];
|
let r = [(0, MIDPOINT), (MIDPOINT, 0)];
|
||||||
foreach &(mr_offset, m2_offset) in r.iter() {
|
foreach &(mr_offset, m2_offset) in r.iter() {
|
||||||
for uint::range_step(0, MIDPOINT, 4) |base| {
|
do uint::range_step(0, MIDPOINT, 4) |base| {
|
||||||
rngstep!(0, 13);
|
rngstep!(0, 13);
|
||||||
rngstep!(1, -6);
|
rngstep!(1, -6);
|
||||||
rngstep!(2, 2);
|
rngstep!(2, 2);
|
||||||
rngstep!(3, -16);
|
rngstep!(3, -16);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
self.a = a;
|
self.a = a;
|
||||||
|
|
|
@ -665,9 +665,10 @@ fn spawn_process_os(prog: &str, args: &[~str],
|
||||||
fail!("failure in dup3(err_fd, 2): %s", os::last_os_error());
|
fail!("failure in dup3(err_fd, 2): %s", os::last_os_error());
|
||||||
}
|
}
|
||||||
// close all other fds
|
// close all other fds
|
||||||
for int::range_rev(getdtablesize() as int, 3) |fd| {
|
do int::range_rev(getdtablesize() as int, 3) |fd| {
|
||||||
close(fd as c_int);
|
close(fd as c_int);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
|
|
||||||
do with_dirp(dir) |dirp| {
|
do with_dirp(dir) |dirp| {
|
||||||
if !dirp.is_null() && chdir(dirp) == -1 {
|
if !dirp.is_null() && chdir(dirp) == -1 {
|
||||||
|
|
|
@ -272,14 +272,13 @@ impl<T> TrieNode<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn each_reverse<'a>(&'a self, f: &fn(&uint, &'a T) -> bool) -> bool {
|
fn each_reverse<'a>(&'a self, f: &fn(&uint, &'a T) -> bool) -> bool {
|
||||||
for uint::range_rev(self.children.len(), 0) |idx| {
|
do uint::range_rev(self.children.len(), 0) |idx| {
|
||||||
match self.children[idx] {
|
match self.children[idx] {
|
||||||
Internal(ref x) => if !x.each_reverse(|i,t| f(i,t)) { return false },
|
Internal(ref x) => x.each_reverse(|i,t| f(i,t)),
|
||||||
External(k, ref v) => if !f(&k, v) { return false },
|
External(k, ref v) => f(&k, v),
|
||||||
Nothing => ()
|
Nothing => true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
true
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mutate_values<'a>(&'a mut self, f: &fn(&uint, &mut T) -> bool) -> bool {
|
fn mutate_values<'a>(&'a mut self, f: &fn(&uint, &mut T) -> bool) -> bool {
|
||||||
|
@ -417,17 +416,19 @@ mod test_map {
|
||||||
let mut trie = TrieMap::new();
|
let mut trie = TrieMap::new();
|
||||||
let n = 300;
|
let n = 300;
|
||||||
|
|
||||||
for uint::range_step(1, n, 2) |x| {
|
do uint::range_step(1, n, 2) |x| {
|
||||||
assert!(trie.insert(x, x + 1));
|
assert!(trie.insert(x, x + 1));
|
||||||
assert!(trie.contains_key(&x));
|
assert!(trie.contains_key(&x));
|
||||||
check_integrity(&trie.root);
|
check_integrity(&trie.root);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
|
|
||||||
for uint::range_step(0, n, 2) |x| {
|
do uint::range_step(0, n, 2) |x| {
|
||||||
assert!(!trie.contains_key(&x));
|
assert!(!trie.contains_key(&x));
|
||||||
assert!(trie.insert(x, x + 1));
|
assert!(trie.insert(x, x + 1));
|
||||||
check_integrity(&trie.root);
|
check_integrity(&trie.root);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
|
|
||||||
foreach x in range(0u, n) {
|
foreach x in range(0u, n) {
|
||||||
assert!(trie.contains_key(&x));
|
assert!(trie.contains_key(&x));
|
||||||
|
@ -435,17 +436,19 @@ mod test_map {
|
||||||
check_integrity(&trie.root);
|
check_integrity(&trie.root);
|
||||||
}
|
}
|
||||||
|
|
||||||
for uint::range_step(1, n, 2) |x| {
|
do uint::range_step(1, n, 2) |x| {
|
||||||
assert!(trie.remove(&x));
|
assert!(trie.remove(&x));
|
||||||
assert!(!trie.contains_key(&x));
|
assert!(!trie.contains_key(&x));
|
||||||
check_integrity(&trie.root);
|
check_integrity(&trie.root);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
|
|
||||||
for uint::range_step(0, n, 2) |x| {
|
do uint::range_step(0, n, 2) |x| {
|
||||||
assert!(trie.contains_key(&x));
|
assert!(trie.contains_key(&x));
|
||||||
assert!(!trie.insert(x, x + 1));
|
assert!(!trie.insert(x, x + 1));
|
||||||
check_integrity(&trie.root);
|
check_integrity(&trie.root);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -471,9 +474,10 @@ mod test_map {
|
||||||
fn test_each_break() {
|
fn test_each_break() {
|
||||||
let mut m = TrieMap::new();
|
let mut m = TrieMap::new();
|
||||||
|
|
||||||
for uint::range_rev(uint::max_value, uint::max_value - 10000) |x| {
|
do uint::range_rev(uint::max_value, uint::max_value - 10000) |x| {
|
||||||
m.insert(x, x / 2);
|
m.insert(x, x / 2);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
|
|
||||||
let mut n = uint::max_value - 10000;
|
let mut n = uint::max_value - 10000;
|
||||||
do m.each |k, v| {
|
do m.each |k, v| {
|
||||||
|
@ -511,9 +515,10 @@ mod test_map {
|
||||||
fn test_each_reverse_break() {
|
fn test_each_reverse_break() {
|
||||||
let mut m = TrieMap::new();
|
let mut m = TrieMap::new();
|
||||||
|
|
||||||
for uint::range_rev(uint::max_value, uint::max_value - 10000) |x| {
|
do uint::range_rev(uint::max_value, uint::max_value - 10000) |x| {
|
||||||
m.insert(x, x / 2);
|
m.insert(x, x / 2);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
|
|
||||||
let mut n = uint::max_value - 1;
|
let mut n = uint::max_value - 1;
|
||||||
do m.each_reverse |k, v| {
|
do m.each_reverse |k, v| {
|
||||||
|
|
|
@ -94,12 +94,17 @@ pub fn lookup(name: &str) -> Option<Abi> {
|
||||||
* Returns the ABI with the given name (if any).
|
* Returns the ABI with the given name (if any).
|
||||||
*/
|
*/
|
||||||
|
|
||||||
for each_abi |abi| {
|
let mut res = None;
|
||||||
|
|
||||||
|
do each_abi |abi| {
|
||||||
if name == abi.data().name {
|
if name == abi.data().name {
|
||||||
return Some(abi);
|
res = Some(abi);
|
||||||
|
false
|
||||||
|
} else {
|
||||||
|
true
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
return None;
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn all_names() -> ~[&'static str] {
|
pub fn all_names() -> ~[&'static str] {
|
||||||
|
@ -193,21 +198,24 @@ impl AbiSet {
|
||||||
|
|
||||||
pub fn for_arch(&self, arch: Architecture) -> Option<Abi> {
|
pub fn for_arch(&self, arch: Architecture) -> Option<Abi> {
|
||||||
// NB---Single platform ABIs come first
|
// NB---Single platform ABIs come first
|
||||||
for self.each |abi| {
|
|
||||||
|
let mut res = None;
|
||||||
|
|
||||||
|
do self.each |abi| {
|
||||||
let data = abi.data();
|
let data = abi.data();
|
||||||
match data.abi_arch {
|
match data.abi_arch {
|
||||||
Archs(a) if (a & arch.bit()) != 0 => { return Some(abi); }
|
Archs(a) if (a & arch.bit()) != 0 => { res = Some(abi); false }
|
||||||
Archs(_) => { }
|
Archs(_) => { true }
|
||||||
RustArch | AllArch => { return Some(abi); }
|
RustArch | AllArch => { res = Some(abi); false }
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
|
|
||||||
None
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn check_valid(&self) -> Option<(Abi, Abi)> {
|
pub fn check_valid(&self) -> Option<(Abi, Abi)> {
|
||||||
let mut abis = ~[];
|
let mut abis = ~[];
|
||||||
for self.each |abi| { abis.push(abi); }
|
do self.each |abi| { abis.push(abi); true };
|
||||||
|
|
||||||
foreach (i, abi) in abis.iter().enumerate() {
|
foreach (i, abi) in abis.iter().enumerate() {
|
||||||
let data = abi.data();
|
let data = abi.data();
|
||||||
|
@ -261,9 +269,10 @@ impl ToStr for Abi {
|
||||||
impl ToStr for AbiSet {
|
impl ToStr for AbiSet {
|
||||||
fn to_str(&self) -> ~str {
|
fn to_str(&self) -> ~str {
|
||||||
let mut strs = ~[];
|
let mut strs = ~[];
|
||||||
for self.each |abi| {
|
do self.each |abi| {
|
||||||
strs.push(abi.data().name);
|
strs.push(abi.data().name);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
fmt!("\"%s\"", strs.connect(" "))
|
fmt!("\"%s\"", strs.connect(" "))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -126,10 +126,10 @@ pub fn visit_mod<E:Clone>(m: &_mod,
|
||||||
_sp: span,
|
_sp: span,
|
||||||
_id: NodeId,
|
_id: NodeId,
|
||||||
(e, v): (E, vt<E>)) {
|
(e, v): (E, vt<E>)) {
|
||||||
for m.view_items.iter().advance |vi| {
|
foreach vi in m.view_items.iter() {
|
||||||
(v.visit_view_item)(vi, (e.clone(), v));
|
(v.visit_view_item)(vi, (e.clone(), v));
|
||||||
}
|
}
|
||||||
for m.items.iter().advance |i| {
|
foreach i in m.items.iter() {
|
||||||
(v.visit_item)(*i, (e.clone(), v));
|
(v.visit_item)(*i, (e.clone(), v));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -173,10 +173,10 @@ pub fn visit_item<E:Clone>(i: &item, (e, v): (E, vt<E>)) {
|
||||||
}
|
}
|
||||||
item_mod(ref m) => (v.visit_mod)(m, i.span, i.id, (e, v)),
|
item_mod(ref m) => (v.visit_mod)(m, i.span, i.id, (e, v)),
|
||||||
item_foreign_mod(ref nm) => {
|
item_foreign_mod(ref nm) => {
|
||||||
for nm.view_items.iter().advance |vi| {
|
foreach vi in nm.view_items.iter() {
|
||||||
(v.visit_view_item)(vi, (e.clone(), v));
|
(v.visit_view_item)(vi, (e.clone(), v));
|
||||||
}
|
}
|
||||||
for nm.items.iter().advance |ni| {
|
foreach ni in nm.items.iter() {
|
||||||
(v.visit_foreign_item)(*ni, (e.clone(), v));
|
(v.visit_foreign_item)(*ni, (e.clone(), v));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -194,11 +194,11 @@ pub fn visit_item<E:Clone>(i: &item, (e, v): (E, vt<E>)) {
|
||||||
}
|
}
|
||||||
item_impl(ref tps, ref traits, ref ty, ref methods) => {
|
item_impl(ref tps, ref traits, ref ty, ref methods) => {
|
||||||
(v.visit_generics)(tps, (e.clone(), v));
|
(v.visit_generics)(tps, (e.clone(), v));
|
||||||
for traits.iter().advance |p| {
|
foreach p in traits.iter() {
|
||||||
visit_trait_ref(p, (e.clone(), v));
|
visit_trait_ref(p, (e.clone(), v));
|
||||||
}
|
}
|
||||||
(v.visit_ty)(ty, (e.clone(), v));
|
(v.visit_ty)(ty, (e.clone(), v));
|
||||||
for methods.iter().advance |m| {
|
foreach m in methods.iter() {
|
||||||
visit_method_helper(*m, (e.clone(), v))
|
visit_method_helper(*m, (e.clone(), v))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -208,10 +208,10 @@ pub fn visit_item<E:Clone>(i: &item, (e, v): (E, vt<E>)) {
|
||||||
}
|
}
|
||||||
item_trait(ref generics, ref traits, ref methods) => {
|
item_trait(ref generics, ref traits, ref methods) => {
|
||||||
(v.visit_generics)(generics, (e.clone(), v));
|
(v.visit_generics)(generics, (e.clone(), v));
|
||||||
for traits.iter().advance |p| {
|
foreach p in traits.iter() {
|
||||||
visit_path(&p.path, (e.clone(), v));
|
visit_path(&p.path, (e.clone(), v));
|
||||||
}
|
}
|
||||||
for methods.iter().advance |m| {
|
foreach m in methods.iter() {
|
||||||
(v.visit_trait_method)(m, (e.clone(), v));
|
(v.visit_trait_method)(m, (e.clone(), v));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -222,10 +222,10 @@ pub fn visit_item<E:Clone>(i: &item, (e, v): (E, vt<E>)) {
|
||||||
pub fn visit_enum_def<E:Clone>(enum_definition: &ast::enum_def,
|
pub fn visit_enum_def<E:Clone>(enum_definition: &ast::enum_def,
|
||||||
tps: &Generics,
|
tps: &Generics,
|
||||||
(e, v): (E, vt<E>)) {
|
(e, v): (E, vt<E>)) {
|
||||||
for enum_definition.variants.iter().advance |vr| {
|
foreach vr in enum_definition.variants.iter() {
|
||||||
match vr.node.kind {
|
match vr.node.kind {
|
||||||
tuple_variant_kind(ref variant_args) => {
|
tuple_variant_kind(ref variant_args) => {
|
||||||
for variant_args.iter().advance |va| {
|
foreach va in variant_args.iter() {
|
||||||
(v.visit_ty)(&va.ty, (e.clone(), v));
|
(v.visit_ty)(&va.ty, (e.clone(), v));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -235,7 +235,7 @@ pub fn visit_enum_def<E:Clone>(enum_definition: &ast::enum_def,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Visit the disr expr if it exists
|
// Visit the disr expr if it exists
|
||||||
for vr.node.disr_expr.iter().advance |ex| {
|
foreach ex in vr.node.disr_expr.iter() {
|
||||||
(v.visit_expr)(*ex, (e.clone(), v))
|
(v.visit_expr)(*ex, (e.clone(), v))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -250,12 +250,12 @@ pub fn visit_ty<E:Clone>(t: &Ty, (e, v): (E, vt<E>)) {
|
||||||
(v.visit_ty)(mt.ty, (e, v));
|
(v.visit_ty)(mt.ty, (e, v));
|
||||||
},
|
},
|
||||||
ty_tup(ref ts) => {
|
ty_tup(ref ts) => {
|
||||||
for ts.iter().advance |tt| {
|
foreach tt in ts.iter() {
|
||||||
(v.visit_ty)(tt, (e.clone(), v));
|
(v.visit_ty)(tt, (e.clone(), v));
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
ty_closure(ref f) => {
|
ty_closure(ref f) => {
|
||||||
for f.decl.inputs.iter().advance |a| {
|
foreach a in f.decl.inputs.iter() {
|
||||||
(v.visit_ty)(&a.ty, (e.clone(), v));
|
(v.visit_ty)(&a.ty, (e.clone(), v));
|
||||||
}
|
}
|
||||||
(v.visit_ty)(&f.decl.output, (e.clone(), v));
|
(v.visit_ty)(&f.decl.output, (e.clone(), v));
|
||||||
|
@ -264,7 +264,7 @@ pub fn visit_ty<E:Clone>(t: &Ty, (e, v): (E, vt<E>)) {
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
ty_bare_fn(ref f) => {
|
ty_bare_fn(ref f) => {
|
||||||
for f.decl.inputs.iter().advance |a| {
|
foreach a in f.decl.inputs.iter() {
|
||||||
(v.visit_ty)(&a.ty, (e.clone(), v));
|
(v.visit_ty)(&a.ty, (e.clone(), v));
|
||||||
}
|
}
|
||||||
(v.visit_ty)(&f.decl.output, (e, v));
|
(v.visit_ty)(&f.decl.output, (e, v));
|
||||||
|
@ -284,27 +284,27 @@ pub fn visit_ty<E:Clone>(t: &Ty, (e, v): (E, vt<E>)) {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn visit_path<E:Clone>(p: &Path, (e, v): (E, vt<E>)) {
|
pub fn visit_path<E:Clone>(p: &Path, (e, v): (E, vt<E>)) {
|
||||||
for p.types.iter().advance |tp| { (v.visit_ty)(tp, (e.clone(), v)); }
|
foreach tp in p.types.iter() { (v.visit_ty)(tp, (e.clone(), v)); }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn visit_pat<E:Clone>(p: &pat, (e, v): (E, vt<E>)) {
|
pub fn visit_pat<E:Clone>(p: &pat, (e, v): (E, vt<E>)) {
|
||||||
match p.node {
|
match p.node {
|
||||||
pat_enum(ref path, ref children) => {
|
pat_enum(ref path, ref children) => {
|
||||||
visit_path(path, (e.clone(), v));
|
visit_path(path, (e.clone(), v));
|
||||||
for children.iter().advance |children| {
|
foreach children in children.iter() {
|
||||||
for children.iter().advance |child| {
|
foreach child in children.iter() {
|
||||||
(v.visit_pat)(*child, (e.clone(), v));
|
(v.visit_pat)(*child, (e.clone(), v));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pat_struct(ref path, ref fields, _) => {
|
pat_struct(ref path, ref fields, _) => {
|
||||||
visit_path(path, (e.clone(), v));
|
visit_path(path, (e.clone(), v));
|
||||||
for fields.iter().advance |f| {
|
foreach f in fields.iter() {
|
||||||
(v.visit_pat)(f.pat, (e.clone(), v));
|
(v.visit_pat)(f.pat, (e.clone(), v));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pat_tup(ref elts) => {
|
pat_tup(ref elts) => {
|
||||||
for elts.iter().advance |elt| {
|
foreach elt in elts.iter() {
|
||||||
(v.visit_pat)(*elt, (e.clone(), v))
|
(v.visit_pat)(*elt, (e.clone(), v))
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -313,7 +313,7 @@ pub fn visit_pat<E:Clone>(p: &pat, (e, v): (E, vt<E>)) {
|
||||||
},
|
},
|
||||||
pat_ident(_, ref path, ref inner) => {
|
pat_ident(_, ref path, ref inner) => {
|
||||||
visit_path(path, (e.clone(), v));
|
visit_path(path, (e.clone(), v));
|
||||||
for inner.iter().advance |subpat| {
|
foreach subpat in inner.iter() {
|
||||||
(v.visit_pat)(*subpat, (e.clone(), v))
|
(v.visit_pat)(*subpat, (e.clone(), v))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -324,13 +324,13 @@ pub fn visit_pat<E:Clone>(p: &pat, (e, v): (E, vt<E>)) {
|
||||||
}
|
}
|
||||||
pat_wild => (),
|
pat_wild => (),
|
||||||
pat_vec(ref before, ref slice, ref after) => {
|
pat_vec(ref before, ref slice, ref after) => {
|
||||||
for before.iter().advance |elt| {
|
foreach elt in before.iter() {
|
||||||
(v.visit_pat)(*elt, (e.clone(), v));
|
(v.visit_pat)(*elt, (e.clone(), v));
|
||||||
}
|
}
|
||||||
for slice.iter().advance |elt| {
|
foreach elt in slice.iter() {
|
||||||
(v.visit_pat)(*elt, (e.clone(), v));
|
(v.visit_pat)(*elt, (e.clone(), v));
|
||||||
}
|
}
|
||||||
for after.iter().advance |tail| {
|
foreach tail in after.iter() {
|
||||||
(v.visit_pat)(*tail, (e.clone(), v));
|
(v.visit_pat)(*tail, (e.clone(), v));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -351,7 +351,7 @@ pub fn visit_foreign_item<E:Clone>(ni: &foreign_item, (e, v): (E, vt<E>)) {
|
||||||
|
|
||||||
pub fn visit_ty_param_bounds<E:Clone>(bounds: &OptVec<TyParamBound>,
|
pub fn visit_ty_param_bounds<E:Clone>(bounds: &OptVec<TyParamBound>,
|
||||||
(e, v): (E, vt<E>)) {
|
(e, v): (E, vt<E>)) {
|
||||||
for bounds.iter().advance |bound| {
|
foreach bound in bounds.iter() {
|
||||||
match *bound {
|
match *bound {
|
||||||
TraitTyParamBound(ref ty) => visit_trait_ref(ty, (e.clone(), v)),
|
TraitTyParamBound(ref ty) => visit_trait_ref(ty, (e.clone(), v)),
|
||||||
RegionTyParamBound => {}
|
RegionTyParamBound => {}
|
||||||
|
@ -360,13 +360,13 @@ pub fn visit_ty_param_bounds<E:Clone>(bounds: &OptVec<TyParamBound>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn visit_generics<E:Clone>(generics: &Generics, (e, v): (E, vt<E>)) {
|
pub fn visit_generics<E:Clone>(generics: &Generics, (e, v): (E, vt<E>)) {
|
||||||
for generics.ty_params.iter().advance |tp| {
|
foreach tp in generics.ty_params.iter() {
|
||||||
visit_ty_param_bounds(&tp.bounds, (e.clone(), v));
|
visit_ty_param_bounds(&tp.bounds, (e.clone(), v));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn visit_fn_decl<E:Clone>(fd: &fn_decl, (e, v): (E, vt<E>)) {
|
pub fn visit_fn_decl<E:Clone>(fd: &fn_decl, (e, v): (E, vt<E>)) {
|
||||||
for fd.inputs.iter().advance |a| {
|
foreach a in fd.inputs.iter() {
|
||||||
(v.visit_pat)(a.pat, (e.clone(), v));
|
(v.visit_pat)(a.pat, (e.clone(), v));
|
||||||
(v.visit_ty)(&a.ty, (e.clone(), v));
|
(v.visit_ty)(&a.ty, (e.clone(), v));
|
||||||
}
|
}
|
||||||
|
@ -399,7 +399,7 @@ pub fn visit_fn<E:Clone>(fk: &fn_kind,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn visit_ty_method<E:Clone>(m: &TypeMethod, (e, v): (E, vt<E>)) {
|
pub fn visit_ty_method<E:Clone>(m: &TypeMethod, (e, v): (E, vt<E>)) {
|
||||||
for m.decl.inputs.iter().advance |a| {
|
foreach a in m.decl.inputs.iter() {
|
||||||
(v.visit_ty)(&a.ty, (e.clone(), v));
|
(v.visit_ty)(&a.ty, (e.clone(), v));
|
||||||
}
|
}
|
||||||
(v.visit_generics)(&m.generics, (e.clone(), v));
|
(v.visit_generics)(&m.generics, (e.clone(), v));
|
||||||
|
@ -420,7 +420,7 @@ pub fn visit_struct_def<E:Clone>(
|
||||||
_id: NodeId,
|
_id: NodeId,
|
||||||
(e, v): (E, vt<E>)
|
(e, v): (E, vt<E>)
|
||||||
) {
|
) {
|
||||||
for sd.fields.iter().advance |f| {
|
foreach f in sd.fields.iter() {
|
||||||
(v.visit_struct_field)(*f, (e.clone(), v));
|
(v.visit_struct_field)(*f, (e.clone(), v));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -430,10 +430,10 @@ pub fn visit_struct_field<E:Clone>(sf: &struct_field, (e, v): (E, vt<E>)) {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn visit_block<E:Clone>(b: &Block, (e, v): (E, vt<E>)) {
|
pub fn visit_block<E:Clone>(b: &Block, (e, v): (E, vt<E>)) {
|
||||||
for b.view_items.iter().advance |vi| {
|
foreach vi in b.view_items.iter() {
|
||||||
(v.visit_view_item)(vi, (e.clone(), v));
|
(v.visit_view_item)(vi, (e.clone(), v));
|
||||||
}
|
}
|
||||||
for b.stmts.iter().advance |s| {
|
foreach s in b.stmts.iter() {
|
||||||
(v.visit_stmt)(*s, (e.clone(), v));
|
(v.visit_stmt)(*s, (e.clone(), v));
|
||||||
}
|
}
|
||||||
visit_expr_opt(b.expr, (e, v));
|
visit_expr_opt(b.expr, (e, v));
|
||||||
|
@ -460,7 +460,7 @@ pub fn visit_expr_opt<E>(eo: Option<@expr>, (e, v): (E, vt<E>)) {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn visit_exprs<E:Clone>(exprs: &[@expr], (e, v): (E, vt<E>)) {
|
pub fn visit_exprs<E:Clone>(exprs: &[@expr], (e, v): (E, vt<E>)) {
|
||||||
for exprs.iter().advance |ex| { (v.visit_expr)(*ex, (e.clone(), v)); }
|
foreach ex in exprs.iter() { (v.visit_expr)(*ex, (e.clone(), v)); }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn visit_mac<E>(_m: &mac, (_e, _v): (E, vt<E>)) {
|
pub fn visit_mac<E>(_m: &mac, (_e, _v): (E, vt<E>)) {
|
||||||
|
@ -477,13 +477,13 @@ pub fn visit_expr<E:Clone>(ex: @expr, (e, v): (E, vt<E>)) {
|
||||||
}
|
}
|
||||||
expr_struct(ref p, ref flds, base) => {
|
expr_struct(ref p, ref flds, base) => {
|
||||||
visit_path(p, (e.clone(), v));
|
visit_path(p, (e.clone(), v));
|
||||||
for flds.iter().advance |f| {
|
foreach f in flds.iter() {
|
||||||
(v.visit_expr)(f.expr, (e.clone(), v));
|
(v.visit_expr)(f.expr, (e.clone(), v));
|
||||||
}
|
}
|
||||||
visit_expr_opt(base, (e.clone(), v));
|
visit_expr_opt(base, (e.clone(), v));
|
||||||
}
|
}
|
||||||
expr_tup(ref elts) => {
|
expr_tup(ref elts) => {
|
||||||
for elts.iter().advance |el| { (v.visit_expr)(*el, (e.clone(), v)) }
|
foreach el in elts.iter() { (v.visit_expr)(*el, (e.clone(), v)) }
|
||||||
}
|
}
|
||||||
expr_call(callee, ref args, _) => {
|
expr_call(callee, ref args, _) => {
|
||||||
visit_exprs(*args, (e.clone(), v));
|
visit_exprs(*args, (e.clone(), v));
|
||||||
|
@ -491,7 +491,7 @@ pub fn visit_expr<E:Clone>(ex: @expr, (e, v): (E, vt<E>)) {
|
||||||
}
|
}
|
||||||
expr_method_call(_, callee, _, ref tys, ref args, _) => {
|
expr_method_call(_, callee, _, ref tys, ref args, _) => {
|
||||||
visit_exprs(*args, (e.clone(), v));
|
visit_exprs(*args, (e.clone(), v));
|
||||||
for tys.iter().advance |tp| {
|
foreach tp in tys.iter() {
|
||||||
(v.visit_ty)(tp, (e.clone(), v));
|
(v.visit_ty)(tp, (e.clone(), v));
|
||||||
}
|
}
|
||||||
(v.visit_expr)(callee, (e.clone(), v));
|
(v.visit_expr)(callee, (e.clone(), v));
|
||||||
|
@ -524,7 +524,7 @@ pub fn visit_expr<E:Clone>(ex: @expr, (e, v): (E, vt<E>)) {
|
||||||
expr_loop(ref b, _) => (v.visit_block)(b, (e.clone(), v)),
|
expr_loop(ref b, _) => (v.visit_block)(b, (e.clone(), v)),
|
||||||
expr_match(x, ref arms) => {
|
expr_match(x, ref arms) => {
|
||||||
(v.visit_expr)(x, (e.clone(), v));
|
(v.visit_expr)(x, (e.clone(), v));
|
||||||
for arms.iter().advance |a| { (v.visit_arm)(a, (e.clone(), v)); }
|
foreach a in arms.iter() { (v.visit_arm)(a, (e.clone(), v)); }
|
||||||
}
|
}
|
||||||
expr_fn_block(ref decl, ref body) => {
|
expr_fn_block(ref decl, ref body) => {
|
||||||
(v.visit_fn)(
|
(v.visit_fn)(
|
||||||
|
@ -547,7 +547,7 @@ pub fn visit_expr<E:Clone>(ex: @expr, (e, v): (E, vt<E>)) {
|
||||||
}
|
}
|
||||||
expr_field(x, _, ref tys) => {
|
expr_field(x, _, ref tys) => {
|
||||||
(v.visit_expr)(x, (e.clone(), v));
|
(v.visit_expr)(x, (e.clone(), v));
|
||||||
for tys.iter().advance |tp| {
|
foreach tp in tys.iter() {
|
||||||
(v.visit_ty)(tp, (e.clone(), v));
|
(v.visit_ty)(tp, (e.clone(), v));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -567,10 +567,10 @@ pub fn visit_expr<E:Clone>(ex: @expr, (e, v): (E, vt<E>)) {
|
||||||
expr_mac(ref mac) => visit_mac(mac, (e.clone(), v)),
|
expr_mac(ref mac) => visit_mac(mac, (e.clone(), v)),
|
||||||
expr_paren(x) => (v.visit_expr)(x, (e.clone(), v)),
|
expr_paren(x) => (v.visit_expr)(x, (e.clone(), v)),
|
||||||
expr_inline_asm(ref a) => {
|
expr_inline_asm(ref a) => {
|
||||||
for a.inputs.iter().advance |&(_, input)| {
|
foreach &(_, input) in a.inputs.iter() {
|
||||||
(v.visit_expr)(input, (e.clone(), v));
|
(v.visit_expr)(input, (e.clone(), v));
|
||||||
}
|
}
|
||||||
for a.outputs.iter().advance |&(_, out)| {
|
foreach &(_, out) in a.outputs.iter() {
|
||||||
(v.visit_expr)(out, (e.clone(), v));
|
(v.visit_expr)(out, (e.clone(), v));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -579,7 +579,7 @@ pub fn visit_expr<E:Clone>(ex: @expr, (e, v): (E, vt<E>)) {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn visit_arm<E:Clone>(a: &arm, (e, v): (E, vt<E>)) {
|
pub fn visit_arm<E:Clone>(a: &arm, (e, v): (E, vt<E>)) {
|
||||||
for a.pats.iter().advance |p| { (v.visit_pat)(*p, (e.clone(), v)); }
|
foreach p in a.pats.iter() { (v.visit_pat)(*p, (e.clone(), v)); }
|
||||||
visit_expr_opt(a.guard, (e.clone(), v));
|
visit_expr_opt(a.guard, (e.clone(), v));
|
||||||
(v.visit_block)(&a.body, (e.clone(), v));
|
(v.visit_block)(&a.body, (e.clone(), v));
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
use abi;
|
use abi;
|
||||||
use abi::AbiSet;
|
use abi::AbiSet;
|
||||||
use ast::{Sigil, BorrowedSigil, ManagedSigil, OwnedSigil};
|
use ast::{Sigil, BorrowedSigil, ManagedSigil, OwnedSigil};
|
||||||
use ast::{CallSugar, NoSugar, DoSugar, ForSugar};
|
use ast::{CallSugar, NoSugar, DoSugar};
|
||||||
use ast::{TyBareFn, TyClosure};
|
use ast::{TyBareFn, TyClosure};
|
||||||
use ast::{RegionTyParamBound, TraitTyParamBound};
|
use ast::{RegionTyParamBound, TraitTyParamBound};
|
||||||
use ast::{provided, public, purity};
|
use ast::{provided, public, purity};
|
||||||
|
@ -24,7 +24,7 @@ use ast::{expr, expr_, expr_addr_of, expr_match, expr_again};
|
||||||
use ast::{expr_assign, expr_assign_op, expr_binary, expr_block};
|
use ast::{expr_assign, expr_assign_op, expr_binary, expr_block};
|
||||||
use ast::{expr_break, expr_call, expr_cast, expr_do_body};
|
use ast::{expr_break, expr_call, expr_cast, expr_do_body};
|
||||||
use ast::{expr_field, expr_fn_block, expr_if, expr_index};
|
use ast::{expr_field, expr_fn_block, expr_if, expr_index};
|
||||||
use ast::{expr_lit, expr_log, expr_loop, expr_loop_body, expr_mac};
|
use ast::{expr_lit, expr_log, expr_loop, expr_mac};
|
||||||
use ast::{expr_method_call, expr_paren, expr_path, expr_repeat};
|
use ast::{expr_method_call, expr_paren, expr_path, expr_repeat};
|
||||||
use ast::{expr_ret, expr_self, expr_struct, expr_tup, expr_unary};
|
use ast::{expr_ret, expr_self, expr_struct, expr_tup, expr_unary};
|
||||||
use ast::{expr_vec, expr_vstore, expr_vstore_mut_box};
|
use ast::{expr_vec, expr_vstore, expr_vstore_mut_box};
|
||||||
|
@ -1626,8 +1626,7 @@ impl Parser {
|
||||||
} else if self.eat_keyword(keywords::ForEach) {
|
} else if self.eat_keyword(keywords::ForEach) {
|
||||||
return self.parse_for_expr();
|
return self.parse_for_expr();
|
||||||
} else if self.eat_keyword(keywords::For) {
|
} else if self.eat_keyword(keywords::For) {
|
||||||
return self.parse_sugary_call_expr(lo, ~"for", ForSugar,
|
return self.parse_for_expr();
|
||||||
expr_loop_body);
|
|
||||||
} else if self.eat_keyword(keywords::Do) {
|
} else if self.eat_keyword(keywords::Do) {
|
||||||
return self.parse_sugary_call_expr(lo, ~"do", DoSugar,
|
return self.parse_sugary_call_expr(lo, ~"do", DoSugar,
|
||||||
expr_do_body);
|
expr_do_body);
|
||||||
|
@ -2326,9 +2325,9 @@ impl Parser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// parse a 'foreach' .. 'in' expression ('foreach' token already eaten)
|
// parse a 'for' .. 'in' expression ('for' token already eaten)
|
||||||
pub fn parse_for_expr(&self) -> @expr {
|
pub fn parse_for_expr(&self) -> @expr {
|
||||||
// Parse: `foreach <src_pat> in <src_expr> <src_loop_block>`
|
// Parse: `for <src_pat> in <src_expr> <src_loop_block>`
|
||||||
|
|
||||||
let lo = self.last_span.lo;
|
let lo = self.last_span.lo;
|
||||||
let pat = self.parse_pat();
|
let pat = self.parse_pat();
|
||||||
|
|
|
@ -53,21 +53,24 @@ fn descending<M: MutableMap<uint, uint>>(map: &mut M, n_keys: uint) {
|
||||||
io::println(" Descending integers:");
|
io::println(" Descending integers:");
|
||||||
|
|
||||||
do timed("insert") {
|
do timed("insert") {
|
||||||
for uint::range_rev(n_keys, 0) |i| {
|
do uint::range_rev(n_keys, 0) |i| {
|
||||||
map.insert(i, i + 1);
|
map.insert(i, i + 1);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
do timed("search") {
|
do timed("search") {
|
||||||
for uint::range_rev(n_keys, 0) |i| {
|
do uint::range_rev(n_keys, 0) |i| {
|
||||||
assert_eq!(map.find(&i).unwrap(), &(i + 1));
|
assert_eq!(map.find(&i).unwrap(), &(i + 1));
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
do timed("remove") {
|
do timed("remove") {
|
||||||
for uint::range_rev(n_keys, 0) |i| {
|
do uint::range_rev(n_keys, 0) |i| {
|
||||||
assert!(map.remove(&i));
|
assert!(map.remove(&i));
|
||||||
}
|
true
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,15 +0,0 @@
|
||||||
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
|
|
||||||
// file at the top-level directory of this distribution and at
|
|
||||||
// http://rust-lang.org/COPYRIGHT.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
|
||||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
|
||||||
// option. This file may not be copied, modified, or distributed
|
|
||||||
// except according to those terms.
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
fn quux(_: &fn(&int) -> bool) -> () { }
|
|
||||||
for quux |_| { } //~ ERROR expected `for` closure to return
|
|
||||||
// `bool`, but found `()`
|
|
||||||
}
|
|
|
@ -1,16 +0,0 @@
|
||||||
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
|
|
||||||
// file at the top-level directory of this distribution and at
|
|
||||||
// http://rust-lang.org/COPYRIGHT.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
|
||||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
|
||||||
// option. This file may not be copied, modified, or distributed
|
|
||||||
// except according to those terms.
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
fn quux(_: &fn(&int) -> int) -> bool { true }
|
|
||||||
for quux |_| { } //~ ERROR A `for` loop iterator should expect a
|
|
||||||
// closure that returns `bool`. This iterator
|
|
||||||
// expects a closure that returns `int`.
|
|
||||||
}
|
|
|
@ -17,7 +17,6 @@
|
||||||
fn borrow(_v: &int) {}
|
fn borrow(_v: &int) {}
|
||||||
fn borrow_mut(_v: &mut int) {}
|
fn borrow_mut(_v: &mut int) {}
|
||||||
fn cond() -> bool { fail!() }
|
fn cond() -> bool { fail!() }
|
||||||
fn for_func(_f: &fn() -> bool) -> bool { fail!() }
|
|
||||||
fn produce<T>() -> T { fail!(); }
|
fn produce<T>() -> T { fail!(); }
|
||||||
|
|
||||||
fn inc(v: &mut ~int) {
|
fn inc(v: &mut ~int) {
|
||||||
|
@ -70,17 +69,6 @@ fn while_aliased_mut() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn for_loop_aliased_mut() {
|
|
||||||
// In this instance, the borrow is carried through the loop.
|
|
||||||
|
|
||||||
let mut v = ~3;
|
|
||||||
let mut w = ~4;
|
|
||||||
let mut _x = &w;
|
|
||||||
for for_func {
|
|
||||||
borrow_mut(v); //~ ERROR cannot borrow
|
|
||||||
_x = &v;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn loop_aliased_mut_break() {
|
fn loop_aliased_mut_break() {
|
||||||
// In this instance, the borrow is carried through the loop.
|
// In this instance, the borrow is carried through the loop.
|
||||||
|
@ -110,21 +98,6 @@ fn while_aliased_mut_break() {
|
||||||
borrow_mut(v); //~ ERROR cannot borrow
|
borrow_mut(v); //~ ERROR cannot borrow
|
||||||
}
|
}
|
||||||
|
|
||||||
fn for_aliased_mut_break() {
|
|
||||||
// In this instance, the borrow is carried through the loop.
|
|
||||||
|
|
||||||
let mut v = ~3;
|
|
||||||
let mut w = ~4;
|
|
||||||
let mut _x = &w;
|
|
||||||
for for_func {
|
|
||||||
// here we cannot be sure that `for_func` respects the break below
|
|
||||||
borrow_mut(v); //~ ERROR cannot borrow
|
|
||||||
_x = &v;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
borrow_mut(v); //~ ERROR cannot borrow
|
|
||||||
}
|
|
||||||
|
|
||||||
fn while_aliased_mut_cond(cond: bool, cond2: bool) {
|
fn while_aliased_mut_cond(cond: bool, cond2: bool) {
|
||||||
let mut v = ~3;
|
let mut v = ~3;
|
||||||
let mut w = ~4;
|
let mut w = ~4;
|
||||||
|
|
|
@ -1,16 +0,0 @@
|
||||||
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
|
|
||||||
// file at the top-level directory of this distribution and at
|
|
||||||
// http://rust-lang.org/COPYRIGHT.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
|
||||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
|
||||||
// option. This file may not be copied, modified, or distributed
|
|
||||||
// except according to those terms.
|
|
||||||
|
|
||||||
use std::task;
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
for task::spawn { return true; } //~ ERROR A `for` loop iterator should expect a closure that
|
|
||||||
//~^ ERROR expected `for` closure to return `bool`
|
|
||||||
}
|
|
|
@ -1,20 +0,0 @@
|
||||||
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
|
|
||||||
// file at the top-level directory of this distribution and at
|
|
||||||
// http://rust-lang.org/COPYRIGHT.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
|
||||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
|
||||||
// option. This file may not be copied, modified, or distributed
|
|
||||||
// except according to those terms.
|
|
||||||
|
|
||||||
|
|
||||||
#[forbid(deprecated_for_loop)];
|
|
||||||
|
|
||||||
fn f(_: &fn() -> bool) -> bool {
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
for f {} //~ ERROR `for` is deprecated
|
|
||||||
}
|
|
|
@ -31,16 +31,6 @@ fn no_env_no_for<'a>(_: &'a uint, blk: &fn(p: &'a fn())) {
|
||||||
blk(|| ())
|
blk(|| ())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn no_env_but_for<'a>(_: &'a uint, blk: &fn(p: &'a fn() -> bool) -> bool) {
|
|
||||||
// Test that a `for` loop is considered to hvae
|
|
||||||
// implicit free variables.
|
|
||||||
//
|
|
||||||
// FIXME(#4846): The `&'a uint` parameter is needed to ensure that `'a`
|
|
||||||
// is a free and not bound region name.
|
|
||||||
|
|
||||||
for blk { } //~ ERROR cannot infer an appropriate lifetime
|
|
||||||
}
|
|
||||||
|
|
||||||
fn repeating_loop() {
|
fn repeating_loop() {
|
||||||
// Test that the closure cannot be created within `loop` loop and
|
// Test that the closure cannot be created within `loop` loop and
|
||||||
// called without, even though the state that it closes over is
|
// called without, even though the state that it closes over is
|
||||||
|
|
|
@ -30,14 +30,14 @@ impl<A> iterable<A> for ~[A] {
|
||||||
|
|
||||||
fn length<A, T: iterable<A>>(x: T) -> uint {
|
fn length<A, T: iterable<A>>(x: T) -> uint {
|
||||||
let mut len = 0;
|
let mut len = 0;
|
||||||
for x.iterate() |_y| { len += 1 }
|
do x.iterate() |_y| { len += 1; true };
|
||||||
return len;
|
return len;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn main() {
|
pub fn main() {
|
||||||
let x = ~[0,1,2,3];
|
let x = ~[0,1,2,3];
|
||||||
// Call a method
|
// Call a method
|
||||||
for x.iterate() |y| { assert!(x[*y] == *y); }
|
do x.iterate() |y| { assert!(x[*y] == *y); true };
|
||||||
// Call a parameterized function
|
// Call a parameterized function
|
||||||
assert_eq!(length(x.clone()), x.len());
|
assert_eq!(length(x.clone()), x.len());
|
||||||
// Call a parameterized function, with type arguments that require
|
// Call a parameterized function, with type arguments that require
|
||||||
|
@ -47,7 +47,7 @@ pub fn main() {
|
||||||
// Now try it with a type that *needs* to be borrowed
|
// Now try it with a type that *needs* to be borrowed
|
||||||
let z = [0,1,2,3];
|
let z = [0,1,2,3];
|
||||||
// Call a method
|
// Call a method
|
||||||
for z.iterate() |y| { assert!(z[*y] == *y); }
|
do z.iterate() |y| { assert!(z[*y] == *y); true };
|
||||||
// Call a parameterized function
|
// Call a parameterized function
|
||||||
assert_eq!(length::<int, &[int]>(z), z.len());
|
assert_eq!(length::<int, &[int]>(z), z.len());
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,9 +31,10 @@ pub fn main() {
|
||||||
add_int(ints, 22);
|
add_int(ints, 22);
|
||||||
add_int(ints, 44);
|
add_int(ints, 44);
|
||||||
|
|
||||||
for iter_ints(ints) |i| {
|
do iter_ints(ints) |i| {
|
||||||
error!("int = %d", *i);
|
error!("int = %d", *i);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
|
|
||||||
error!("ints=%?", ints);
|
error!("ints=%?", ints);
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,7 +9,7 @@
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
// no-reformat
|
// no-reformat
|
||||||
// Testing various forms of `do` and `for` with empty arg lists
|
// Testing various forms of `do` with empty arg lists
|
||||||
|
|
||||||
fn f(f: &fn() -> bool) -> bool {
|
fn f(f: &fn() -> bool) -> bool {
|
||||||
true
|
true
|
||||||
|
@ -20,8 +20,4 @@ pub fn main() {
|
||||||
do f() { true };
|
do f() { true };
|
||||||
do f || { true };
|
do f || { true };
|
||||||
do f { true };
|
do f { true };
|
||||||
for f() || { }
|
|
||||||
for f() { }
|
|
||||||
for f || { }
|
|
||||||
for f { }
|
|
||||||
}
|
}
|
|
@ -8,13 +8,12 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
// Testing that we can drop the || in for/do exprs
|
// Testing that we can drop the || in do exprs
|
||||||
|
|
||||||
fn f(f: @fn() -> bool) -> bool { true }
|
fn f(f: @fn() -> bool) -> bool { true }
|
||||||
|
|
||||||
fn d(f: @fn()) { }
|
fn d(f: @fn()) { }
|
||||||
|
|
||||||
pub fn main() {
|
pub fn main() {
|
||||||
for f { }
|
|
||||||
do d { }
|
do d { }
|
||||||
}
|
}
|
|
@ -40,75 +40,87 @@ pub fn main() {
|
||||||
// int and uint have same result for
|
// int and uint have same result for
|
||||||
// Sum{100 > i >= 2} == (Sum{1 <= i <= 99} - 1) == n*(n+1)/2 - 1 for n=99
|
// Sum{100 > i >= 2} == (Sum{1 <= i <= 99} - 1) == n*(n+1)/2 - 1 for n=99
|
||||||
let mut sum = 0u;
|
let mut sum = 0u;
|
||||||
for uint_range_rev(100, 2) |i| {
|
do uint_range_rev(100, 2) |i| {
|
||||||
sum += i;
|
sum += i;
|
||||||
}
|
true
|
||||||
|
};
|
||||||
assert_eq!(sum, 4949);
|
assert_eq!(sum, 4949);
|
||||||
|
|
||||||
let mut sum = 0i;
|
let mut sum = 0i;
|
||||||
for int_range_rev(100, 2) |i| {
|
do int_range_rev(100, 2) |i| {
|
||||||
sum += i;
|
sum += i;
|
||||||
}
|
true
|
||||||
|
};
|
||||||
assert_eq!(sum, 4949);
|
assert_eq!(sum, 4949);
|
||||||
|
|
||||||
|
|
||||||
// elements are visited in correct order
|
// elements are visited in correct order
|
||||||
let primes = [2,3,5,7,11];
|
let primes = [2,3,5,7,11];
|
||||||
let mut prod = 1i;
|
let mut prod = 1i;
|
||||||
for uint_range_rev(5, 0) |i| {
|
do uint_range_rev(5, 0) |i| {
|
||||||
printfln!("uint 4 downto 0: %u", i);
|
printfln!("uint 4 downto 0: %u", i);
|
||||||
prod *= int::pow(primes[i], i);
|
prod *= int::pow(primes[i], i);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
assert_eq!(prod, 11*11*11*11*7*7*7*5*5*3*1);
|
assert_eq!(prod, 11*11*11*11*7*7*7*5*5*3*1);
|
||||||
let mut prod = 1i;
|
let mut prod = 1i;
|
||||||
for int_range_rev(5, 0) |i| {
|
do int_range_rev(5, 0) |i| {
|
||||||
printfln!("int 4 downto 0: %d", i);
|
printfln!("int 4 downto 0: %d", i);
|
||||||
prod *= int::pow(primes[i], i as uint);
|
prod *= int::pow(primes[i], i as uint);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
assert_eq!(prod, 11*11*11*11*7*7*7*5*5*3*1);
|
assert_eq!(prod, 11*11*11*11*7*7*7*5*5*3*1);
|
||||||
|
|
||||||
|
|
||||||
// range and range_rev are symmetric.
|
// range and range_rev are symmetric.
|
||||||
let mut sum_up = 0u;
|
let mut sum_up = 0u;
|
||||||
for uint_range(10, 30) |i| {
|
do uint_range(10, 30) |i| {
|
||||||
sum_up += i;
|
sum_up += i;
|
||||||
}
|
true
|
||||||
|
};
|
||||||
let mut sum_down = 0u;
|
let mut sum_down = 0u;
|
||||||
for uint_range_rev(30, 10) |i| {
|
do uint_range_rev(30, 10) |i| {
|
||||||
sum_down += i;
|
sum_down += i;
|
||||||
}
|
true
|
||||||
|
};
|
||||||
assert_eq!(sum_up, sum_down);
|
assert_eq!(sum_up, sum_down);
|
||||||
|
|
||||||
let mut sum_up = 0;
|
let mut sum_up = 0;
|
||||||
for int_range(-20, 10) |i| {
|
do int_range(-20, 10) |i| {
|
||||||
sum_up += i;
|
sum_up += i;
|
||||||
}
|
true
|
||||||
|
};
|
||||||
let mut sum_down = 0;
|
let mut sum_down = 0;
|
||||||
for int_range_rev(10, -20) |i| {
|
do int_range_rev(10, -20) |i| {
|
||||||
sum_down += i;
|
sum_down += i;
|
||||||
}
|
true
|
||||||
|
};
|
||||||
assert_eq!(sum_up, sum_down);
|
assert_eq!(sum_up, sum_down);
|
||||||
|
|
||||||
|
|
||||||
// empty ranges
|
// empty ranges
|
||||||
for int_range_rev(10, 10) |_| {
|
do int_range_rev(10, 10) |_| {
|
||||||
fail!("range should be empty when start == stop");
|
fail!("range should be empty when start == stop");
|
||||||
}
|
true
|
||||||
|
};
|
||||||
|
|
||||||
for uint_range_rev(0, 1) |_| {
|
do uint_range_rev(0, 1) |_| {
|
||||||
fail!("range should be empty when start-1 underflows");
|
fail!("range should be empty when start-1 underflows");
|
||||||
}
|
true
|
||||||
|
};
|
||||||
|
|
||||||
// range iterations do not wrap/underflow
|
// range iterations do not wrap/underflow
|
||||||
let mut uflo_loop_visited = ~[];
|
let mut uflo_loop_visited = ~[];
|
||||||
for int_range_step(int::min_value+15, int::min_value, -4) |x| {
|
do int_range_step(int::min_value+15, int::min_value, -4) |x| {
|
||||||
uflo_loop_visited.push(x - int::min_value);
|
uflo_loop_visited.push(x - int::min_value);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
assert_eq!(uflo_loop_visited, ~[15, 11, 7, 3]);
|
assert_eq!(uflo_loop_visited, ~[15, 11, 7, 3]);
|
||||||
|
|
||||||
let mut uflo_loop_visited = ~[];
|
let mut uflo_loop_visited = ~[];
|
||||||
for uint_range_step(uint::min_value+15, uint::min_value, -4) |x| {
|
do uint_range_step(uint::min_value+15, uint::min_value, -4) |x| {
|
||||||
uflo_loop_visited.push(x - uint::min_value);
|
uflo_loop_visited.push(x - uint::min_value);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
assert_eq!(uflo_loop_visited, ~[15, 11, 7, 3]);
|
assert_eq!(uflo_loop_visited, ~[15, 11, 7, 3]);
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,88 +32,101 @@ pub fn main() {
|
||||||
// int and uint have same result for
|
// int and uint have same result for
|
||||||
// Sum{2 <= i < 100} == (Sum{1 <= i <= 99} - 1) == n*(n+1)/2 - 1 for n=99
|
// Sum{2 <= i < 100} == (Sum{1 <= i <= 99} - 1) == n*(n+1)/2 - 1 for n=99
|
||||||
let mut sum = 0u;
|
let mut sum = 0u;
|
||||||
for uint_range(2, 100) |i| {
|
do uint_range(2, 100) |i| {
|
||||||
sum += i;
|
sum += i;
|
||||||
}
|
true
|
||||||
|
};
|
||||||
assert_eq!(sum, 4949);
|
assert_eq!(sum, 4949);
|
||||||
|
|
||||||
let mut sum = 0i;
|
let mut sum = 0i;
|
||||||
for int_range(2, 100) |i| {
|
do int_range(2, 100) |i| {
|
||||||
sum += i;
|
sum += i;
|
||||||
}
|
true
|
||||||
|
};
|
||||||
assert_eq!(sum, 4949);
|
assert_eq!(sum, 4949);
|
||||||
|
|
||||||
|
|
||||||
// elements are visited in correct order
|
// elements are visited in correct order
|
||||||
let primes = [2,3,5,7];
|
let primes = [2,3,5,7];
|
||||||
let mut prod = 1i;
|
let mut prod = 1i;
|
||||||
for uint_range(0, 4) |i| {
|
do uint_range(0, 4) |i| {
|
||||||
prod *= int::pow(primes[i], i);
|
prod *= int::pow(primes[i], i);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
assert_eq!(prod, 1*3*5*5*7*7*7);
|
assert_eq!(prod, 1*3*5*5*7*7*7);
|
||||||
let mut prod = 1i;
|
let mut prod = 1i;
|
||||||
for int_range(0, 4) |i| {
|
do int_range(0, 4) |i| {
|
||||||
prod *= int::pow(primes[i], i as uint);
|
prod *= int::pow(primes[i], i as uint);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
assert_eq!(prod, 1*3*5*5*7*7*7);
|
assert_eq!(prod, 1*3*5*5*7*7*7);
|
||||||
|
|
||||||
|
|
||||||
// empty ranges
|
// empty ranges
|
||||||
for int_range(10, 10) |_| {
|
do int_range(10, 10) |_| {
|
||||||
fail!("range should be empty when start == stop");
|
fail!("range should be empty when start == stop");
|
||||||
}
|
true
|
||||||
|
};
|
||||||
|
|
||||||
for uint_range(10, 10) |_| {
|
do uint_range(10, 10) |_| {
|
||||||
fail!("range should be empty when start == stop");
|
fail!("range should be empty when start == stop");
|
||||||
}
|
true
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
// range iterations do not wrap/overflow
|
// range iterations do not wrap/overflow
|
||||||
let mut oflo_loop_visited = ~[];
|
let mut oflo_loop_visited = ~[];
|
||||||
for uint_range_step(uint::max_value-15, uint::max_value, 4) |x| {
|
do uint_range_step(uint::max_value-15, uint::max_value, 4) |x| {
|
||||||
oflo_loop_visited.push(uint::max_value - x);
|
oflo_loop_visited.push(uint::max_value - x);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
assert_eq!(oflo_loop_visited, ~[15, 11, 7, 3]);
|
assert_eq!(oflo_loop_visited, ~[15, 11, 7, 3]);
|
||||||
|
|
||||||
let mut oflo_loop_visited = ~[];
|
let mut oflo_loop_visited = ~[];
|
||||||
for int_range_step(int::max_value-15, int::max_value, 4) |x| {
|
do int_range_step(int::max_value-15, int::max_value, 4) |x| {
|
||||||
oflo_loop_visited.push(int::max_value - x);
|
oflo_loop_visited.push(int::max_value - x);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
assert_eq!(oflo_loop_visited, ~[15, 11, 7, 3]);
|
assert_eq!(oflo_loop_visited, ~[15, 11, 7, 3]);
|
||||||
|
|
||||||
|
|
||||||
// range_step never passes nor visits the stop element
|
// range_step never passes nor visits the stop element
|
||||||
for int_range_step(0, 21, 3) |x| {
|
do int_range_step(0, 21, 3) |x| {
|
||||||
assert!(x < 21);
|
assert!(x < 21);
|
||||||
}
|
true
|
||||||
|
};
|
||||||
|
|
||||||
// range_step_inclusive will never pass stop element, and may skip it.
|
// range_step_inclusive will never pass stop element, and may skip it.
|
||||||
let mut saw21 = false;
|
let mut saw21 = false;
|
||||||
for uint::range_step_inclusive(0, 21, 4) |x| {
|
do uint::range_step_inclusive(0, 21, 4) |x| {
|
||||||
assert!(x <= 21);
|
assert!(x <= 21);
|
||||||
if x == 21 { saw21 = true; }
|
if x == 21 { saw21 = true; }
|
||||||
}
|
true
|
||||||
|
};
|
||||||
assert!(!saw21);
|
assert!(!saw21);
|
||||||
let mut saw21 = false;
|
let mut saw21 = false;
|
||||||
for int::range_step_inclusive(0, 21, 4) |x| {
|
do int::range_step_inclusive(0, 21, 4) |x| {
|
||||||
assert!(x <= 21);
|
assert!(x <= 21);
|
||||||
if x == 21 { saw21 = true; }
|
if x == 21 { saw21 = true; }
|
||||||
}
|
true
|
||||||
|
};
|
||||||
assert!(!saw21);
|
assert!(!saw21);
|
||||||
|
|
||||||
// range_step_inclusive will never pass stop element, but may visit it.
|
// range_step_inclusive will never pass stop element, but may visit it.
|
||||||
let mut saw21 = false;
|
let mut saw21 = false;
|
||||||
for uint::range_step_inclusive(0, 21, 3) |x| {
|
do uint::range_step_inclusive(0, 21, 3) |x| {
|
||||||
assert!(x <= 21);
|
assert!(x <= 21);
|
||||||
printfln!("saw: %u", x);
|
printfln!("saw: %u", x);
|
||||||
if x == 21 { saw21 = true; }
|
if x == 21 { saw21 = true; }
|
||||||
}
|
true
|
||||||
|
};
|
||||||
assert!(saw21);
|
assert!(saw21);
|
||||||
let mut saw21 = false;
|
let mut saw21 = false;
|
||||||
for int::range_step_inclusive(0, 21, 3) |x| {
|
do int::range_step_inclusive(0, 21, 3) |x| {
|
||||||
assert!(x <= 21);
|
assert!(x <= 21);
|
||||||
if x == 21 { saw21 = true; }
|
if x == 21 { saw21 = true; }
|
||||||
}
|
true
|
||||||
|
};
|
||||||
assert!(saw21);
|
assert!(saw21);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,75 +0,0 @@
|
||||||
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
|
|
||||||
// file at the top-level directory of this distribution and at
|
|
||||||
// http://rust-lang.org/COPYRIGHT.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
|
||||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
|
||||||
// option. This file may not be copied, modified, or distributed
|
|
||||||
// except according to those terms.
|
|
||||||
|
|
||||||
// xfail-fast
|
|
||||||
|
|
||||||
use std::cmp::Eq;
|
|
||||||
use std::vec;
|
|
||||||
|
|
||||||
fn iter<T>(v: ~[T], it: &fn(&T) -> bool) -> bool {
|
|
||||||
let mut i = 0u;
|
|
||||||
let mut l = v.len();
|
|
||||||
while i < l {
|
|
||||||
if !it(&v[i]) { return false; }
|
|
||||||
i += 1u;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn find_pos<T:Eq + Clone>(n: T, h: ~[T]) -> Option<uint> {
|
|
||||||
let mut i = 0u;
|
|
||||||
for iter(h.clone()) |e| {
|
|
||||||
if *e == n { return Some(i); }
|
|
||||||
i += 1u;
|
|
||||||
}
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bail_deep(x: ~[~[bool]]) {
|
|
||||||
let mut seen = false;
|
|
||||||
for iter(x.clone()) |x| {
|
|
||||||
for iter(x.clone()) |x| {
|
|
||||||
assert!(!seen);
|
|
||||||
if *x { seen = true; return; }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
assert!(!seen);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn ret_deep() -> ~str {
|
|
||||||
for iter(~[1, 2]) |e| {
|
|
||||||
for iter(~[3, 4]) |x| {
|
|
||||||
if *e + *x > 4 { return ~"hi"; }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ~"bye";
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn main() {
|
|
||||||
let mut last = 0;
|
|
||||||
let v = ~[1, 2, 3, 4, 5, 6, 7];
|
|
||||||
for v.iter().all |e| {
|
|
||||||
last = *e;
|
|
||||||
if *e == 5 { break; }
|
|
||||||
if *e % 2 == 1 { loop; }
|
|
||||||
assert_eq!(*e % 2, 0);
|
|
||||||
};
|
|
||||||
assert_eq!(last, 5);
|
|
||||||
|
|
||||||
assert_eq!(find_pos(1, ~[0, 1, 2, 3]), Some(1u));
|
|
||||||
assert_eq!(find_pos(1, ~[0, 4, 2, 3]), None);
|
|
||||||
assert_eq!(find_pos(~"hi", ~[~"foo", ~"bar", ~"baz", ~"hi"]), Some(3u));
|
|
||||||
|
|
||||||
bail_deep(~[~[false, false], ~[true, true], ~[false, true]]);
|
|
||||||
bail_deep(~[~[true]]);
|
|
||||||
bail_deep(~[~[false, false, false]]);
|
|
||||||
|
|
||||||
assert_eq!(ret_deep(), ~"hi");
|
|
||||||
}
|
|
Loading…
Add table
Reference in a new issue