Make vectors uglier ([]/~). Sorry. Should be temporary. Closes #2725.

This commit is contained in:
Michael Sullivan 2012-06-25 20:00:46 -07:00
parent c087aaf56b
commit 329eca6044
418 changed files with 4123 additions and 4034 deletions

View file

@ -21,15 +21,15 @@ type package = {
method: str,
description: str,
ref: option<str>,
tags: [str],
versions: [(str, str)]
tags: [str]/~,
versions: [(str, str)]/~
};
type local_package = {
name: str,
metaname: str,
version: str,
files: [str]
files: [str]/~
};
type source = @{
@ -38,7 +38,7 @@ type source = @{
mut method: str,
mut key: option<str>,
mut keyfp: option<str>,
mut packages: [mut package]
mut packages: [mut package]/~
};
type cargo = {
@ -62,21 +62,21 @@ type crate = {
desc: option<str>,
sigs: option<str>,
crate_type: option<str>,
deps: [str]
deps: [str]/~
};
type options = {
test: bool,
mode: mode,
free: [str],
free: [str]/~,
help: bool,
};
enum mode { system_mode, user_mode, local_mode }
fn opts() -> [getopts::opt] {
fn opts() -> [getopts::opt]/~ {
[optflag("g"), optflag("G"), optflag("test"),
optflag("h"), optflag("help")]
optflag("h"), optflag("help")]/~
}
fn info(msg: str) {
@ -216,7 +216,7 @@ fn assume_source_method(url: str) -> str {
"curl"
}
fn load_link(mis: [@ast::meta_item]) -> (option<str>,
fn load_link(mis: [@ast::meta_item]/~) -> (option<str>,
option<str>,
option<str>) {
let mut name = none;
@ -240,7 +240,7 @@ fn load_link(mis: [@ast::meta_item]) -> (option<str>,
fn load_crate(filename: str) -> option<crate> {
let sess = parse::new_parse_sess(none);
let c = parse::parse_crate_from_crate_file(filename, [], sess);
let c = parse::parse_crate_from_crate_file(filename, []/~, sess);
let mut name = none;
let mut vers = none;
@ -275,7 +275,7 @@ fn load_crate(filename: str) -> option<crate> {
}
type env = @{
mut deps: [str]
mut deps: [str]/~
};
fn goto_view_item(e: env, i: @ast::view_item) {
@ -283,7 +283,7 @@ fn load_crate(filename: str) -> option<crate> {
ast::view_item_use(ident, metas, id) {
let name_items = attr::find_meta_items_by_name(metas, "name");
let m = if name_items.is_empty() {
metas + [attr::mk_name_value_item_str(@"name", *ident)]
metas + [attr::mk_name_value_item_str(@"name", *ident)]/~
} else {
metas
};
@ -326,7 +326,7 @@ fn load_crate(filename: str) -> option<crate> {
}
let e = @{
mut deps: []
mut deps: []/~
};
let v = visit::mk_simple_visitor(@{
visit_view_item: {|a|goto_view_item(e, a)},
@ -424,7 +424,7 @@ fn parse_source(name: str, j: json::json) -> source {
mut method: method,
mut key: key,
mut keyfp: keyfp,
mut packages: [mut] };
mut packages: [mut]/~ };
}
_ { fail "needed dict value in source"; }
};
@ -498,7 +498,7 @@ fn load_one_source_package(src: source, p: map::hashmap<str, json::json>) {
_ { none }
};
let mut tags = [];
let mut tags = []/~;
alt p.find("tags") {
some(json::list(js)) {
for (*js).each {|j|
@ -528,7 +528,7 @@ fn load_one_source_package(src: source, p: map::hashmap<str, json::json>) {
description: description,
ref: ref,
tags: tags,
versions: []
versions: []/~
};
alt src.packages.position({ |pkg| pkg.uuid == uuid }) {
@ -595,7 +595,7 @@ fn load_source_packages(c: cargo, src: source) {
};
}
fn build_cargo_options(argv: [str]) -> options {
fn build_cargo_options(argv: [str]/~) -> options {
let match = alt getopts::getopts(argv, opts()) {
result::ok(m) { m }
result::err(f) {
@ -699,19 +699,19 @@ fn for_each_package(c: cargo, b: fn(source, package)) {
fn run_programs(buildpath: str) {
let newv = os::list_dir_path(buildpath);
for newv.each {|ct|
run::run_program(ct, []);
run::run_program(ct, []/~);
}
}
// Runs rustc in <path + subdir> with the given flags
// and returns <path + subdir>
fn run_in_buildpath(what: str, path: str, subdir: str, cf: str,
extra_flags: [str]) -> option<str> {
extra_flags: [str]/~) -> option<str> {
let buildpath = path::connect(path, subdir);
need_dir(buildpath);
#debug("%s: %s -> %s", what, cf, buildpath);
let p = run::program_output(rustc_sysroot(),
["--out-dir", buildpath, cf] + extra_flags);
["--out-dir", buildpath, cf]/~ + extra_flags);
if p.status != 0 {
error(#fmt["rustc failed: %d\n%s\n%s", p.status, p.err, p.out]);
ret none;
@ -721,7 +721,7 @@ fn run_in_buildpath(what: str, path: str, subdir: str, cf: str,
fn test_one_crate(_c: cargo, path: str, cf: str) {
let buildpath = alt run_in_buildpath("testing", path, "/test", cf,
[ "--test"]) {
[ "--test"]/~) {
none { ret; }
some(bp) { bp }
};
@ -730,7 +730,7 @@ fn test_one_crate(_c: cargo, path: str, cf: str) {
fn install_one_crate(c: cargo, path: str, cf: str) {
let buildpath = alt run_in_buildpath("installing", path,
"/build", cf, []) {
"/build", cf, []/~) {
none { ret; }
some(bp) { bp }
};
@ -758,7 +758,7 @@ fn install_one_crate(c: cargo, path: str, cf: str) {
fn rustc_sysroot() -> str {
alt os::self_exe_path() {
some(path) {
let path = [path, "..", "bin", "rustc"];
let path = [path, "..", "bin", "rustc"]/~;
check vec::is_not_empty(path);
let rustc = path::normalize(path::connect_many(path));
#debug(" rustc: %s", rustc);
@ -772,7 +772,7 @@ fn install_source(c: cargo, path: str) {
#debug("source: %s", path);
os::change_dir(path);
let mut cratefiles = [];
let mut cratefiles = []/~;
for os::walk_dir(".") {|p|
if str::ends_with(p, ".rc") {
vec::push(cratefiles, p);
@ -811,11 +811,11 @@ fn install_source(c: cargo, path: str) {
}
fn install_git(c: cargo, wd: str, url: str, ref: option<str>) {
run::program_output("git", ["clone", url, wd]);
run::program_output("git", ["clone", url, wd]/~);
if option::is_some(ref) {
let r = option::get(ref);
os::change_dir(wd);
run::run_program("git", ["checkout", r]);
run::run_program("git", ["checkout", r]/~);
}
install_source(c, wd);
@ -824,18 +824,18 @@ fn install_git(c: cargo, wd: str, url: str, ref: option<str>) {
fn install_curl(c: cargo, wd: str, url: str) {
let tarpath = path::connect(wd, "pkg.tar");
let p = run::program_output("curl", ["-f", "-s", "-o",
tarpath, url]);
tarpath, url]/~);
if p.status != 0 {
fail #fmt["fetch of %s failed: %s", url, p.err];
}
run::run_program("tar", ["-x", "--strip-components=1",
"-C", wd, "-f", tarpath]);
"-C", wd, "-f", tarpath]/~);
install_source(c, wd);
}
fn install_file(c: cargo, wd: str, path: str) {
run::program_output("tar", ["-x", "--strip-components=1",
"-C", wd, "-f", path]);
"-C", wd, "-f", path]/~);
install_source(c, wd);
}
@ -868,7 +868,7 @@ fn cargo_suggestion(c: cargo, fallback: fn())
}
fn install_uuid(c: cargo, wd: str, uuid: str) {
let mut ps = [];
let mut ps = []/~;
for_each_package(c, { |s, p|
if p.uuid == uuid {
vec::grow(ps, 1u, (s.name, copy p));
@ -892,7 +892,7 @@ fn install_uuid(c: cargo, wd: str, uuid: str) {
}
fn install_named(c: cargo, wd: str, name: str) {
let mut ps = [];
let mut ps = []/~;
for_each_package(c, { |s, p|
if p.name == name {
vec::grow(ps, 1u, (s.name, copy p));
@ -1082,7 +1082,7 @@ fn cmd_install(c: cargo) unsafe {
if vec::len(c.opts.free) == 2u {
let cwd = os::getcwd();
let status = run::run_program("cp", ["-R", cwd, wd]);
let status = run::run_program("cp", ["-R", cwd, wd]/~);
if status != 0 {
fail #fmt("could not copy directory: %s", cwd);
@ -1135,7 +1135,7 @@ fn sync_one_file(c: cargo, dir: str, src: source) -> bool {
alt copy src.key {
some(u) {
let p = run::program_output("curl", ["-f", "-s", "-o", keyfile,
u]);
u]/~);
if p.status != 0 {
error(#fmt["fetch for source %s (key %s) failed", name, u]);
ret false;
@ -1209,7 +1209,7 @@ fn sync_one_git(c: cargo, dir: str, src: source) -> bool {
}
else {
let p = run::program_output("git", ["reset", "--hard",
"HEAD@{1}"]);
"HEAD@{1}"]/~);
if p.status != 0 {
msg(name, insecure);
@ -1218,7 +1218,7 @@ fn sync_one_git(c: cargo, dir: str, src: source) -> bool {
}
if !os::path_exists(path::connect(dir, ".git")) {
let p = run::program_output("git", ["clone", url, dir]);
let p = run::program_output("git", ["clone", url, dir]/~);
if p.status != 0 {
error(#fmt["fetch for source %s (url %s) failed", name, url]);
@ -1231,7 +1231,7 @@ fn sync_one_git(c: cargo, dir: str, src: source) -> bool {
ret false;
}
let p = run::program_output("git", ["pull"]);
let p = run::program_output("git", ["pull"]/~);
if p.status != 0 {
error(#fmt["fetch for source %s (url %s) failed", name, url]);
@ -1244,7 +1244,7 @@ fn sync_one_git(c: cargo, dir: str, src: source) -> bool {
alt copy src.key {
some(u) {
let p = run::program_output("curl", ["-f", "-s", "-o", keyfile,
u]);
u]/~);
if p.status != 0 {
error(#fmt["fetch for source %s (key %s) failed", name, u]);
rollback(name, dir, false);
@ -1303,7 +1303,7 @@ fn sync_one_curl(c: cargo, dir: str, src: source) -> bool {
url += "/packages.json";
}
let p = run::program_output("curl", ["-f", "-s", "-o", pkgfile, url]);
let p = run::program_output("curl", ["-f", "-s", "-o", pkgfile, url]/~);
if p.status != 0 {
error(#fmt["fetch for source %s (url %s) failed", name, url]);
@ -1311,7 +1311,8 @@ fn sync_one_curl(c: cargo, dir: str, src: source) -> bool {
}
if smart {
url = src.url + "/source.json";
let p = run::program_output("curl", ["-f", "-s", "-o", srcfile, url]);
let p =
run::program_output("curl", ["-f", "-s", "-o", srcfile, url]/~);
if p.status == 0 {
has_src_file = true;
@ -1321,7 +1322,7 @@ fn sync_one_curl(c: cargo, dir: str, src: source) -> bool {
alt copy src.key {
some(u) {
let p = run::program_output("curl", ["-f", "-s", "-o", keyfile,
u]);
u]/~);
if p.status != 0 {
error(#fmt["fetch for source %s (key %s) failed", name, u]);
ret false;
@ -1340,7 +1341,7 @@ fn sync_one_curl(c: cargo, dir: str, src: source) -> bool {
}
let mut p = run::program_output("curl", ["-f", "-s", "-o",
sigfile, url]);
sigfile, url]/~);
if p.status != 0 {
error(#fmt["fetch for source %s (sig %s) failed", name, url]);
ret false;
@ -1358,7 +1359,7 @@ fn sync_one_curl(c: cargo, dir: str, src: source) -> bool {
url = src.url + "/source.json.sig";
p = run::program_output("curl", ["-f", "-s", "-o", srcsigfile,
url]);
url]/~);
if p.status != 0 {
error(#fmt["fetch for source %s (sig %s) failed",
name, url]);
@ -1422,13 +1423,15 @@ fn cmd_init(c: cargo) {
let sigfile = path::connect(c.root, "sources.json.sig");
let destsrcfile = path::connect(c.root, "sources.json");
let p = run::program_output("curl", ["-f", "-s", "-o", srcfile, srcurl]);
let p =
run::program_output("curl", ["-f", "-s", "-o", srcfile, srcurl]/~);
if p.status != 0 {
error(#fmt["fetch of sources.json failed: %s", p.out]);
ret;
}
let p = run::program_output("curl", ["-f", "-s", "-o", sigfile, sigurl]);
let p =
run::program_output("curl", ["-f", "-s", "-o", sigfile, sigurl]/~);
if p.status != 0 {
error(#fmt["fetch of sources.json.sig failed: %s", p.out]);
ret;
@ -1530,7 +1533,7 @@ fn cmd_search(c: cargo) {
fn install_to_dir(srcfile: str, destdir: str) {
let newfile = path::connect(destdir, path::basename(srcfile));
let status = run::run_program("cp", ["-r", srcfile, newfile]);
let status = run::run_program("cp", ["-r", srcfile, newfile]/~);
if status == 0 {
info(#fmt["installed: '%s'", newfile]);
} else {
@ -1647,7 +1650,7 @@ fn cmd_sources(c: cargo) {
mut method: assume_source_method(url),
mut key: none,
mut keyfp: none,
mut packages: [mut]
mut packages: [mut]/~
});
info(#fmt("added source: %s", name));
}
@ -1865,7 +1868,7 @@ Commands:
set-method Change the method for a source.");
}
fn main(argv: [str]) {
fn main(argv: [str]/~) {
let o = build_cargo_options(argv);
if vec::len(o.free) < 2u {

View file

@ -1,4 +1,4 @@
fn gpg(args: [str]) -> { status: int, out: str, err: str } {
fn gpg(args: [str]/~) -> { status: int, out: str, err: str } {
ret run::program_output("gpg", args);
}
@ -59,7 +59,7 @@ fn signing_key_fp() -> str {
}
fn supported() -> bool {
let r = gpg(["--version"]);
let r = gpg(["--version"]/~);
r.status == 0
}
@ -67,7 +67,7 @@ fn init(root: str) {
let p = path::connect(root, "gpg");
if !os::path_is_dir(p) {
os::make_dir(p, 0x1c0i32);
let p = run::start_program("gpg", ["--homedir", p, "--import"]);
let p = run::start_program("gpg", ["--homedir", p, "--import"]/~);
p.input().write_str(signing_key());
let s = p.finish();
if s != 0 {
@ -78,7 +78,8 @@ fn init(root: str) {
fn add(root: str, key: str) {
let path = path::connect(root, "gpg");
let p = run::program_output("gpg", ["--homedir", path, "--import", key]);
let p =
run::program_output("gpg", ["--homedir", path, "--import", key]/~);
if p.status != 0 {
fail "pgp add failed: " + p.out;
}
@ -87,7 +88,7 @@ fn add(root: str, key: str) {
fn verify(root: str, data: str, sig: str, keyfp: str) -> bool {
let path = path::connect(root, "gpg");
let p = gpg(["--homedir", path, "--with-fingerprint", "--verify", sig,
data]);
data]/~);
let res = "Primary key fingerprint: " + keyfp;
for str::split_char(p.err, '\n').each {|line|
if line == res { ret true; }

View file

@ -21,13 +21,13 @@ import common::mode_pretty;
import common::mode;
import util::logv;
fn main(args: [str]) {
fn main(args: [str]/~) {
let config = parse_config(args);
log_config(config);
run_tests(config);
}
fn parse_config(args: [str]) -> config {
fn parse_config(args: [str]/~) -> config {
let opts =
[getopts::reqopt("compile-lib-path"), getopts::reqopt("run-lib-path"),
getopts::reqopt("rustc-path"), getopts::reqopt("src-base"),
@ -36,7 +36,7 @@ fn parse_config(args: [str]) -> config {
getopts::reqopt("mode"), getopts::optflag("ignored"),
getopts::optopt("runtool"), getopts::optopt("rustcflags"),
getopts::optflag("verbose"),
getopts::optopt("logfile")];
getopts::optopt("logfile")]/~;
check (vec::is_not_empty(args));
let args_ = vec::tail(args);
@ -132,9 +132,9 @@ fn test_opts(config: config) -> test::test_opts {
}
}
fn make_tests(config: config) -> [test::test_desc] {
fn make_tests(config: config) -> [test::test_desc]/~ {
#debug("making tests from %s", config.src_base);
let mut tests = [];
let mut tests = []/~;
for os::list_dir_path(config.src_base).each {|file|
let file = file;
#debug("inspecting file %s", file);
@ -148,8 +148,8 @@ fn make_tests(config: config) -> [test::test_desc] {
fn is_test(config: config, testfile: str) -> bool {
// Pretty-printer does not work with .rc files yet
let valid_extensions =
alt config.mode { mode_pretty { [".rs"] } _ { [".rc", ".rs"] } };
let invalid_prefixes = [".", "#", "~"];
alt config.mode { mode_pretty { [".rs"]/~ } _ { [".rc", ".rs"]/~ } };
let invalid_prefixes = [".", "#", "~"]/~;
let name = path::basename(testfile);
let mut valid = false;

View file

@ -8,8 +8,8 @@ export expected_error;
type expected_error = { line: uint, kind: str, msg: str };
// Load any test directives embedded in the file
fn load_errors(testfile: str) -> [expected_error] {
let mut error_patterns = [];
fn load_errors(testfile: str) -> [expected_error]/~ {
let mut error_patterns = []/~;
let rdr = result::get(io::file_reader(testfile));
let mut line_num = 1u;
while !rdr.eof() {
@ -20,11 +20,11 @@ fn load_errors(testfile: str) -> [expected_error] {
ret error_patterns;
}
fn parse_expected(line_num: uint, line: str) -> [expected_error] unsafe {
fn parse_expected(line_num: uint, line: str) -> [expected_error]/~ unsafe {
let error_tag = "//!";
let mut idx;
alt str::find_str(line, error_tag) {
option::none { ret []; }
option::none { ret []/~; }
option::some(nn) { idx = (nn as uint) + str::len(error_tag); }
}
@ -49,5 +49,5 @@ fn parse_expected(line_num: uint, line: str) -> [expected_error] unsafe {
#debug("line=%u kind=%s msg=%s", line_num - adjust_line, kind, msg);
ret [{line: line_num - adjust_line, kind: kind, msg: msg}];
ret [{line: line_num - adjust_line, kind: kind, msg: msg}]/~;
}

View file

@ -10,23 +10,23 @@ export is_test_ignored;
type test_props = {
// Lines that should be expected, in order, on standard out
error_patterns: [str],
error_patterns: [str]/~,
// Extra flags to pass to the compiler
compile_flags: option<str>,
// If present, the name of a file that this test should match when
// pretty-printed
pp_exact: option<str>,
// Modules from aux directory that should be compiled
aux_builds: [str],
aux_builds: [str]/~,
// Environment settings to use during execution
exec_env: [(str,str)]
exec_env: [(str,str)]/~
};
// Load any test directives embedded in the file
fn load_props(testfile: str) -> test_props {
let mut error_patterns = [];
let mut aux_builds = [];
let mut exec_env = [];
let mut error_patterns = []/~;
let mut aux_builds = []/~;
let mut exec_env = []/~;
let mut compile_flags = option::none;
let mut pp_exact = option::none;
for iter_header(testfile) {|ln|

View file

@ -5,7 +5,7 @@ import libc::{c_int, pid_t};
export run;
#[cfg(target_os = "win32")]
fn target_env(lib_path: str, prog: str) -> [(str,str)] {
fn target_env(lib_path: str, prog: str) -> [(str,str)]/~ {
let mut env = os::env();
@ -27,16 +27,16 @@ fn target_env(lib_path: str, prog: str) -> [(str,str)] {
#[cfg(target_os = "linux")]
#[cfg(target_os = "macos")]
#[cfg(target_os = "freebsd")]
fn target_env(_lib_path: str, _prog: str) -> [(str,str)] {
[]
fn target_env(_lib_path: str, _prog: str) -> [(str,str)]/~ {
[]/~
}
// FIXME (#2659): This code is duplicated in core::run::program_output
fn run(lib_path: str,
prog: str,
args: [str],
env: [(str, str)],
args: [str]/~,
env: [(str, str)]/~,
input: option<str>) -> {status: int, out: str, err: str} {
let pipe_in = os::pipe();

View file

@ -92,7 +92,7 @@ fn run_pretty_test(config: config, props: test_props, testfile: str) {
let rounds =
alt props.pp_exact { option::some(_) { 1 } option::none { 2 } };
let mut srcs = [result::get(io::read_whole_file_str(testfile))];
let mut srcs = [result::get(io::read_whole_file_str(testfile))]/~;
let mut round = 0;
while round < rounds {
@ -139,12 +139,12 @@ fn run_pretty_test(config: config, props: test_props, testfile: str) {
fn print_source(config: config, testfile: str, src: str) -> procres {
compose_and_run(config, testfile, make_pp_args(config, testfile),
[], config.compile_lib_path, option::some(src))
[]/~, config.compile_lib_path, option::some(src))
}
fn make_pp_args(config: config, _testfile: str) -> procargs {
let prog = config.rustc_path;
let args = ["-", "--pretty", "normal"];
let args = ["-", "--pretty", "normal"]/~;
ret {prog: prog, args: args};
}
@ -179,7 +179,7 @@ actual:\n\
fn make_typecheck_args(config: config, testfile: str) -> procargs {
let prog = config.rustc_path;
let mut args = ["-", "--no-trans", "--lib", "-L", config.build_base,
"-L", aux_output_dir_name(config, testfile)];
"-L", aux_output_dir_name(config, testfile)]/~;
args += split_maybe_args(config.rustcflags);
ret {prog: prog, args: args};
}
@ -227,7 +227,7 @@ fn check_error_patterns(props: test_props,
}
}
fn check_expected_errors(expected_errors: [errors::expected_error],
fn check_expected_errors(expected_errors: [errors::expected_error]/~,
testfile: str,
procres: procres) {
@ -286,13 +286,13 @@ fn check_expected_errors(expected_errors: [errors::expected_error],
}
}
type procargs = {prog: str, args: [str]};
type procargs = {prog: str, args: [str]/~};
type procres = {status: int, stdout: str, stderr: str, cmdline: str};
fn compile_test(config: config, props: test_props,
testfile: str) -> procres {
let link_args = ["-L", aux_output_dir_name(config, testfile)];
let link_args = ["-L", aux_output_dir_name(config, testfile)]/~;
compose_and_run_compiler(
config, props, testfile,
make_compile_args(config, props, link_args,
@ -319,14 +319,14 @@ fn compose_and_run_compiler(
ensure_dir(aux_output_dir_name(config, testfile));
}
let extra_link_args = ["-L", aux_output_dir_name(config, testfile)];
let extra_link_args = ["-L", aux_output_dir_name(config, testfile)]/~;
vec::iter(props.aux_builds) {|rel_ab|
let abs_ab = path::connect(config.aux_base, rel_ab);
let aux_args =
make_compile_args(config, props, ["--lib"] + extra_link_args,
make_compile_args(config, props, ["--lib"]/~ + extra_link_args,
{|a,b|make_lib_name(a, b, testfile)}, abs_ab);
let auxres = compose_and_run(config, abs_ab, aux_args, [],
let auxres = compose_and_run(config, abs_ab, aux_args, []/~,
config.compile_lib_path, option::none);
if auxres.status != 0 {
fatal_procres(
@ -335,7 +335,7 @@ fn compose_and_run_compiler(
}
}
compose_and_run(config, testfile, args, [],
compose_and_run(config, testfile, args, []/~,
config.compile_lib_path, input)
}
@ -348,19 +348,19 @@ fn ensure_dir(path: path) {
fn compose_and_run(config: config, testfile: str,
procargs: procargs,
procenv: [(str, str)],
procenv: [(str, str)]/~,
lib_path: str,
input: option<str>) -> procres {
ret program_output(config, testfile, lib_path,
procargs.prog, procargs.args, procenv, input);
}
fn make_compile_args(config: config, props: test_props, extras: [str],
fn make_compile_args(config: config, props: test_props, extras: [str]/~,
xform: fn(config, str) -> str, testfile: str) ->
procargs {
let prog = config.rustc_path;
let mut args = [testfile, "-o", xform(config, testfile),
"-L", config.build_base] + extras;
"-L", config.build_base]/~ + extras;
args += split_maybe_args(config.rustcflags);
args += split_maybe_args(props.compile_flags);
ret {prog: prog, args: args};
@ -390,12 +390,12 @@ fn make_run_args(config: config, _props: test_props, testfile: str) ->
split_maybe_args(runtool)
};
let args = toolargs + [make_exe_name(config, testfile)];
let args = toolargs + [make_exe_name(config, testfile)]/~;
ret {prog: args[0], args: vec::slice(args, 1u, vec::len(args))};
}
fn split_maybe_args(argstr: option<str>) -> [str] {
fn rm_whitespace(v: [str]) -> [str] {
fn split_maybe_args(argstr: option<str>) -> [str]/~ {
fn rm_whitespace(v: [str]/~) -> [str]/~ {
fn flt(&&s: str) -> option<str> {
if !str::is_whitespace(s) { option::some(s) } else { option::none }
}
@ -404,12 +404,12 @@ fn split_maybe_args(argstr: option<str>) -> [str] {
alt argstr {
option::some(s) { rm_whitespace(str::split_char(s, ' ')) }
option::none { [] }
option::none { []/~ }
}
}
fn program_output(config: config, testfile: str, lib_path: str, prog: str,
args: [str], env: [(str, str)],
args: [str]/~, env: [(str, str)]/~,
input: option<str>) -> procres {
let cmdline =
{
@ -429,12 +429,12 @@ fn program_output(config: config, testfile: str, lib_path: str, prog: str,
#[cfg(target_os = "linux")]
#[cfg(target_os = "macos")]
#[cfg(target_os = "freebsd")]
fn make_cmdline(_libpath: str, prog: str, args: [str]) -> str {
fn make_cmdline(_libpath: str, prog: str, args: [str]/~) -> str {
#fmt["%s %s", prog, str::connect(args, " ")]
}
#[cfg(target_os = "win32")]
fn make_cmdline(libpath: str, prog: str, args: [str]) -> str {
fn make_cmdline(libpath: str, prog: str, args: [str]/~) -> str {
#fmt["%s %s %s", lib_path_cmd_prefix(libpath), prog,
str::connect(args, " ")]
}
@ -454,7 +454,7 @@ fn dump_output(config: config, testfile: str, out: str, err: str) {
fn dump_output_file(config: config, testfile: str, out: str, extension: str) {
let outfile = make_out_name(config, testfile, extension);
let writer = result::get(
io::file_writer(outfile, [io::create, io::truncate]));
io::file_writer(outfile, [io::create, io::truncate]/~));
writer.write_str(out);
}

View file

@ -1,7 +1,7 @@
use std;
import vec;
fn vec_equal<T>(v: [T], u: [T],
fn vec_equal<T>(v: [T]/~, u: [T]/~,
element_equality_test: fn@(&&T, &&T) -> bool) ->
bool {
let Lv = vec::len(v);
@ -20,11 +20,11 @@ pure fn builtin_equal_int(&&a: int, &&b: int) -> bool { ret a == b; }
fn main() {
assert (builtin_equal(5, 5));
assert (!builtin_equal(5, 4));
assert (!vec_equal([5, 5], [5], bind builtin_equal(_, _)));
assert (!vec_equal([5, 5], [5], builtin_equal_int));
assert (!vec_equal([5, 5], [5, 4], builtin_equal_int));
assert (!vec_equal([5, 5], [4, 5], builtin_equal_int));
assert (vec_equal([5, 5], [5, 5], builtin_equal_int));
assert (!vec_equal([5, 5]/~, [5]/~, bind builtin_equal(_, _)));
assert (!vec_equal([5, 5]/~, [5]/~, builtin_equal_int));
assert (!vec_equal([5, 5]/~, [5, 4]/~, builtin_equal_int));
assert (!vec_equal([5, 5]/~, [4, 5]/~, builtin_equal_int));
assert (vec_equal([5, 5]/~, [5, 5]/~, builtin_equal_int));
#error("Pass");
}

View file

@ -8,7 +8,7 @@ fn under(r : rand::rng, n : uint) -> uint {
}
// random choice from a vec
fn choice<T: copy>(r : rand::rng, v : [const T]) -> T {
fn choice<T: copy>(r : rand::rng, v : [const T]/~) -> T {
assert vec::len(v) != 0u; v[under(r, vec::len(v))]
}
@ -32,8 +32,8 @@ type pointy = {
mut f : fn@()->(),
mut g : fn~()->(),
mut m : [maybe_pointy],
mut n : [mut maybe_pointy],
mut m : [maybe_pointy]/~,
mut n : [mut maybe_pointy]/~,
mut o : {x : int, y : maybe_pointy}
};
// To add: objects; ifaces; anything type-parameterized?
@ -47,8 +47,8 @@ fn empty_pointy() -> @pointy {
mut f : fn@()->(){},
mut g : fn~()->(){},
mut m : [],
mut n : [mut],
mut m : []/~,
mut n : [mut]/~,
mut o : {x : 0, y : none}
}
}
@ -58,7 +58,7 @@ fn nop<T>(_x: T) { }
fn test_cycles(r : rand::rng, k: uint, n: uint)
{
let v : [mut @pointy] = [mut];
let v : [mut @pointy]/~ = [mut]/~;
// Create a graph with no edges
range(0u, vlen) {|_i|

View file

@ -10,7 +10,7 @@ type context = { mode: test_mode }; // + rng
fn write_file(filename: str, content: str) {
result::get(
io::file_writer(filename, [io::create, io::truncate]))
io::file_writer(filename, [io::create, io::truncate]/~))
.write_str(content);
}
@ -18,10 +18,10 @@ fn contains(haystack: str, needle: str) -> bool {
str::contains(haystack, needle)
}
fn find_rust_files(&files: [str], path: str) {
fn find_rust_files(&files: [str]/~, path: str) {
if str::ends_with(path, ".rs") && !contains(path, "utf8") {
// ignoring "utf8" tests because something is broken
files += [path];
files += [path]/~;
} else if os::path_is_dir(path)
&& !contains(path, "compile-fail")
&& !contains(path, "build") {
@ -32,7 +32,7 @@ fn find_rust_files(&files: [str], path: str) {
}
fn common_exprs() -> [ast::expr] {
fn common_exprs() -> [ast::expr]/~ {
fn dse(e: ast::expr_) -> ast::expr {
{ id: 0, node: e, span: ast_util::dummy_sp() }
}
@ -54,7 +54,7 @@ fn common_exprs() -> [ast::expr] {
@dse(ast::expr_lit(@dsl(ast::lit_bool(true)))))),
dse(ast::expr_unary(ast::uniq(ast::m_imm),
@dse(ast::expr_lit(@dsl(ast::lit_bool(true))))))
]
]/~
}
pure fn safe_to_steal_expr(e: @ast::expr, tm: test_mode) -> bool {
@ -116,16 +116,16 @@ fn safe_to_steal_ty(t: @ast::ty, tm: test_mode) -> bool {
// Not type-parameterized: https://github.com/mozilla/rust/issues/898 (FIXED)
fn stash_expr_if(c: fn@(@ast::expr, test_mode)->bool,
es: @mut [ast::expr],
es: @mut [ast::expr]/~,
e: @ast::expr,
tm: test_mode) {
if c(e, tm) {
*es += [*e];
*es += [*e]/~;
} else {/* now my indices are wrong :( */ }
}
fn stash_ty_if(c: fn@(@ast::ty, test_mode)->bool,
es: @mut [ast::ty],
es: @mut [ast::ty]/~,
e: @ast::ty,
tm: test_mode) {
if c(e, tm) {
@ -133,11 +133,11 @@ fn stash_ty_if(c: fn@(@ast::ty, test_mode)->bool,
} else {/* now my indices are wrong :( */ }
}
type stolen_stuff = {exprs: [ast::expr], tys: [ast::ty]};
type stolen_stuff = {exprs: [ast::expr]/~, tys: [ast::ty]/~};
fn steal(crate: ast::crate, tm: test_mode) -> stolen_stuff {
let exprs = @mut [];
let tys = @mut [];
let exprs = @mut []/~;
let tys = @mut []/~;
let v = visit::mk_simple_visitor(@{
visit_expr: {|a|stash_expr_if(safe_to_steal_expr, exprs, a, tm)},
visit_ty: {|a|stash_ty_if(safe_to_steal_ty, tys, a, tm)}
@ -248,7 +248,7 @@ fn check_variants_T<T: copy>(
codemap: codemap::codemap,
filename: str,
thing_label: str,
things: [T],
things: [T]/~,
stringifier: fn@(@T) -> str,
replacer: fn@(ast::crate, uint, T, test_mode) -> ast::crate,
cx: context
@ -333,19 +333,19 @@ fn check_whole_compiler(code: str, suggested_filename_prefix: str,
fn removeIfExists(filename: str) {
// So sketchy!
assert !contains(filename, " ");
run::program_output("bash", ["-c", "rm " + filename]);
run::program_output("bash", ["-c", "rm " + filename]/~);
}
fn removeDirIfExists(filename: str) {
// So sketchy!
assert !contains(filename, " ");
run::program_output("bash", ["-c", "rm -r " + filename]);
run::program_output("bash", ["-c", "rm -r " + filename]/~);
}
fn check_running(exe_filename: str) -> happiness {
let p = run::program_output(
"/Users/jruderman/scripts/timed_run_rust_program.py",
[exe_filename]);
[exe_filename]/~);
let comb = p.out + "\n" + p.err;
if str::len(comb) > 1u {
log(error, "comb comb comb: " + comb);
@ -385,7 +385,7 @@ fn check_compiling(filename: str) -> happiness {
let p = run::program_output(
"/Users/jruderman/code/rust/build/x86_64-apple-darwin/\
stage1/bin/rustc",
[filename]);
[filename]/~);
//#error("Status: %d", p.status);
if p.status == 0 {
@ -419,7 +419,7 @@ fn parse_and_print(code: @str) -> str {
let sess = parse::new_parse_sess(option::none);
write_file(filename, *code);
let crate = parse::parse_crate_from_source_str(
filename, code, [], sess);
filename, code, []/~, sess);
io::with_str_reader(*code) { |rdr|
as_str({|a|pprust::print_crate(sess.cm,
sess.span_diagnostic,
@ -451,7 +451,7 @@ fn content_is_dangerous_to_run(code: str) -> bool {
"import", // espeically fs, run
"native",
"unsafe",
"log"]; // python --> rust pipe deadlock?
"log"]/~; // python --> rust pipe deadlock?
for dangerous_patterns.each {|p| if contains(code, p) { ret true; } }
ret false;
@ -459,7 +459,7 @@ fn content_is_dangerous_to_run(code: str) -> bool {
fn content_is_dangerous_to_compile(code: str) -> bool {
let dangerous_patterns =
["xfail-test"];
["xfail-test"]/~;
for dangerous_patterns.each {|p| if contains(code, p) { ret true; } }
ret false;
@ -475,7 +475,7 @@ fn content_might_not_converge(code: str) -> bool {
" be ", // don't want to replace its child with a non-call:
// "Non-call expression in tail call"
"\n\n\n\n\n" // https://github.com/mozilla/rust/issues/850
];
]/~;
for confusing_patterns.each {|p| if contains(code, p) { ret true; } }
ret false;
@ -488,7 +488,7 @@ fn file_might_not_converge(filename: str) -> bool {
"block-arg-in-ternary.rs", // wrapping
"move-3-unique.rs", // 0 becomes (0), but both seem reasonable. wtf?
"move-3.rs" // 0 becomes (0), but both seem reasonable. wtf?
];
]/~;
for confusing_files.each {|f| if contains(filename, f) { ret true; } }
@ -518,12 +518,12 @@ fn check_roundtrip_convergence(code: @str, maxIters: uint) {
write_file("round-trip-b.rs", *newv);
run::run_program("diff",
["-w", "-u", "round-trip-a.rs",
"round-trip-b.rs"]);
"round-trip-b.rs"]/~);
fail "Mismatch";
}
}
fn check_convergence(files: [str]) {
fn check_convergence(files: [str]/~) {
#error("pp convergence tests: %u files", vec::len(files));
for files.each {|file|
if !file_might_not_converge(file) {
@ -538,7 +538,7 @@ fn check_convergence(files: [str]) {
}
}
fn check_variants(files: [str], cx: context) {
fn check_variants(files: [str]/~, cx: context) {
for files.each {|file|
if cx.mode == tm_converge && file_might_not_converge(file) {
#error("Skipping convergence test based on\
@ -562,7 +562,7 @@ fn check_variants(files: [str], cx: context) {
let crate =
parse::parse_crate_from_source_str(
file,
s, [], sess);
s, []/~, sess);
io::with_str_reader(*s) { |rdr|
#error("%s",
as_str({|a|pprust::print_crate(sess.cm,
@ -576,12 +576,12 @@ fn check_variants(files: [str], cx: context) {
}
}
fn main(args: [str]) {
fn main(args: [str]/~) {
if vec::len(args) != 2u {
#error("usage: %s <testdir>", args[0]);
ret;
}
let mut files = [];
let mut files = []/~;
let root = args[1];
find_rust_files(files, root);

View file

@ -8,8 +8,8 @@ Idea: provide functions for 'exhaustive' and 'random' modification of vecs.
It would be nice if this could be data-driven, so the two functions
could share information:
type vec_modifier = rec(fn (<T> v, uint i) -> [T] fun, uint lo, uint di);
const [vec_modifier] vec_modifiers = ~[rec(fun=vec_omit, 0u, 1u), ...];
type vec_modifier = rec(fn (<T> v, uint i) -> [T]/~ fun, uint lo, uint di);
const [vec_modifier]/~ vec_modifiers = ~[rec(fun=vec_omit, 0u, 1u), ...]/~;
But that gives me "error: internal compiler error unimplemented consts
that's not a plain literal".
https://github.com/graydon/rust/issues/570
@ -24,23 +24,23 @@ import vec::slice;
import vec::len;
import int;
fn vec_omit<T: copy>(v: [T], i: uint) -> [T] {
fn vec_omit<T: copy>(v: [T]/~, i: uint) -> [T]/~ {
slice(v, 0u, i) + slice(v, i + 1u, len(v))
}
fn vec_dup<T: copy>(v: [T], i: uint) -> [T] {
fn vec_dup<T: copy>(v: [T]/~, i: uint) -> [T]/~ {
slice(v, 0u, i) + [v[i]] + slice(v, i, len(v))
}
fn vec_swadj<T: copy>(v: [T], i: uint) -> [T] {
fn vec_swadj<T: copy>(v: [T]/~, i: uint) -> [T]/~ {
slice(v, 0u, i) + [v[i + 1u], v[i]] + slice(v, i + 2u, len(v))
}
fn vec_prefix<T: copy>(v: [T], i: uint) -> [T] { slice(v, 0u, i) }
fn vec_suffix<T: copy>(v: [T], i: uint) -> [T] { slice(v, i, len(v)) }
fn vec_prefix<T: copy>(v: [T]/~, i: uint) -> [T]/~ { slice(v, 0u, i) }
fn vec_suffix<T: copy>(v: [T]/~, i: uint) -> [T]/~ { slice(v, i, len(v)) }
fn vec_poke<T: copy>(v: [T], i: uint, x: T) -> [T] {
slice(v, 0u, i) + [x] + slice(v, i + 1u, len(v))
fn vec_poke<T: copy>(v: [T]/~, i: uint, x: T) -> [T]/~ {
slice(v, 0u, i) + [x]/~ + slice(v, i + 1u, len(v))
}
fn vec_insert<T: copy>(v: [T], i: uint, x: T) -> [T] {
slice(v, 0u, i) + [x] + slice(v, i, len(v))
fn vec_insert<T: copy>(v: [T]/~, i: uint, x: T) -> [T]/~ {
slice(v, 0u, i) + [x]/~ + slice(v, i, len(v))
}
// Iterates over 0...length, skipping the specified number on each side.
@ -51,23 +51,23 @@ fn ix(skip_low: uint, skip_high: uint, length: uint, it: block(uint)) {
// Returns a bunch of modified versions of v, some of which introduce
// new elements (borrowed from xs).
fn vec_edits<T: copy>(v: [T], xs: [T]) -> [[T]] {
let edits: [[T]] = [];
fn vec_edits<T: copy>(v: [T]/~, xs: [T]/~) -> [[T]/~]/~ {
let edits: [[T]/~]/~ = []/~;
let Lv: uint = len(v);
if Lv != 1u {
// When Lv == 1u, this is redundant with omit.
vec::push(edits, []);
vec::push(edits, []/~);
}
if Lv >= 3u {
// When Lv == 2u, this is redundant with swap.
vec::push(edits, vec::reversed(v));
}
ix(0u, 1u, Lv) {|i| edits += [vec_omit(v, i)]; }
ix(0u, 1u, Lv) {|i| edits += [vec_dup(v, i)]; }
ix(0u, 2u, Lv) {|i| edits += [vec_swadj(v, i)]; }
ix(1u, 2u, Lv) {|i| edits += [vec_prefix(v, i)]; }
ix(2u, 1u, Lv) {|i| edits += [vec_suffix(v, i)]; }
ix(0u, 1u, Lv) {|i| edits += [vec_omit(v, i)]/~; }
ix(0u, 1u, Lv) {|i| edits += [vec_dup(v, i)]/~; }
ix(0u, 2u, Lv) {|i| edits += [vec_swadj(v, i)]/~; }
ix(1u, 2u, Lv) {|i| edits += [vec_prefix(v, i)]/~; }
ix(2u, 1u, Lv) {|i| edits += [vec_suffix(v, i)]/~; }
ix(0u, 1u, len(xs)) {|j|
ix(0u, 1u, Lv) {|i|
@ -83,7 +83,7 @@ fn vec_edits<T: copy>(v: [T], xs: [T]) -> [[T]] {
// Would be nice if this were built in:
// https://github.com/graydon/rust/issues/424
fn vec_to_str(v: [int]) -> str {
fn vec_to_str(v: [int]/~) -> str {
let i = 0u;
let s = "[";
while i < len(v) {
@ -94,19 +94,19 @@ fn vec_to_str(v: [int]) -> str {
ret s + "]";
}
fn show_edits(a: [int], xs: [int]) {
fn show_edits(a: [int]/~, xs: [int]/~) {
log(error, "=== Edits of " + vec_to_str(a) + " ===");
let b = vec_edits(a, xs);
ix(0u, 1u, len(b)) {|i| log(error, vec_to_str(b[i])); }
}
fn demo_edits() {
let xs = [7, 8];
show_edits([], xs);
show_edits([1], xs);
show_edits([1, 2], xs);
show_edits([1, 2, 3], xs);
show_edits([1, 2, 3, 4], xs);
let xs = [7, 8]/~;
show_edits([]/~, xs);
show_edits([1]/~, xs);
show_edits([1, 2]/~, xs);
show_edits([1, 2, 3]/~, xs);
show_edits([1, 2, 3, 4]/~, xs);
}
fn main() { demo_edits(); }

View file

@ -8,7 +8,7 @@ fn under(r : rand::rng, n : uint) -> uint {
}
// random choice from a vec
fn choice<T: copy>(r : rand::rng, v : [T]) -> T {
fn choice<T: copy>(r : rand::rng, v : [T]/~) -> T {
assert vec::len(v) != 0u; v[under(r, vec::len(v))]
}
@ -16,7 +16,7 @@ fn choice<T: copy>(r : rand::rng, v : [T]) -> T {
fn unlikely(r : rand::rng, n : uint) -> bool { under(r, n) == 0u }
// shuffle a vec in place
fn shuffle<T>(r : rand::rng, &v : [mut T]) {
fn shuffle<T>(r : rand::rng, &v : [mut T]/~) {
let i = vec::len(v);
while i >= 2u {
// Loop invariant: elements with index >= i have been locked in place.
@ -26,20 +26,20 @@ fn shuffle<T>(r : rand::rng, &v : [mut T]) {
}
// create a shuffled copy of a vec
fn shuffled<T: copy>(r : rand::rng, v : [T]) -> [T] {
fn shuffled<T: copy>(r : rand::rng, v : [T]/~) -> [T]/~ {
let w = vec::to_mut(v);
shuffle(r, w);
vec::from_mut(w) // Shouldn't this happen automatically?
}
// sample from a population without replacement
//fn sample<T>(r : rand::rng, pop : [T], k : uint) -> [T] { fail }
//fn sample<T>(r : rand::rng, pop : [T]/~, k : uint) -> [T]/~ { fail }
// Two ways to make a weighted choice.
// * weighted_choice is O(number of choices) time
// * weighted_vec is O(total weight) space
type weighted<T> = { weight: uint, item: T };
fn weighted_choice<T: copy>(r : rand::rng, v : [weighted<T>]) -> T {
fn weighted_choice<T: copy>(r : rand::rng, v : [weighted<T>]/~) -> T {
assert vec::len(v) != 0u;
let total = 0u;
for {weight: weight, item: _} in v {
@ -57,8 +57,8 @@ fn weighted_choice<T: copy>(r : rand::rng, v : [weighted<T>]) -> T {
core::unreachable();
}
fn weighted_vec<T: copy>(v : [weighted<T>]) -> [T] {
let r = [];
fn weighted_vec<T: copy>(v : [weighted<T>]/~) -> [T]/~ {
let r = []/~;
for {weight: weight, item: item} in v {
let i = 0u;
while i < weight {
@ -74,10 +74,10 @@ fn main()
let r = rand::mk_rng();
log(error, under(r, 5u));
log(error, choice(r, [10, 20, 30]));
log(error, choice(r, [10, 20, 30]/~));
log(error, if unlikely(r, 5u) { "unlikely" } else { "likely" });
let a = [mut 1, 2, 3];
let a = [mut 1, 2, 3]/~;
shuffle(r, a);
log(error, a);
@ -86,7 +86,7 @@ fn main()
{weight:1u, item:"low"},
{weight:8u, item:"middle"},
{weight:1u, item:"high"}
];
]/~;
let w = weighted_vec(v);
while i < 1000u {

View file

@ -170,7 +170,7 @@ mod tests {
#[test]
fn manually_share_arc() {
let v = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
let v = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]/~;
let arc_v = arc::arc(v);
let p = port();
@ -182,7 +182,7 @@ mod tests {
let arc_v = p.recv();
let v = *arc::get::<[int]>(&arc_v);
let v = *arc::get::<[int]/~>(&arc_v);
assert v[3] == 4;
};
@ -196,7 +196,7 @@ mod tests {
#[test]
fn auto_share_arc() {
let v = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
let v = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]/~;
let (_res, arc_c) = shared_arc(v);
let p = port();
@ -216,7 +216,7 @@ mod tests {
#[test]
#[ignore] // this can probably infinite loop too.
fn exclusive_arc() {
let mut futures = [];
let mut futures = []/~;
let num_tasks = 10u;
let count = 1000u;
@ -231,7 +231,7 @@ mod tests {
**count += 1u;
}
}
})];
})]/~;
};
for futures.each {|f| f.get() };

View file

@ -65,7 +65,7 @@ pure fn is_uppercase(c: char) -> bool {
#[doc = "
Indicates whether a character is whitespace, defined in
terms of the Unicode General Categories 'Zs', 'Zl', 'Zp'
additional 'Cc'-category control codes in the range [0x09, 0x0d]
additional 'Cc'-category control codes in the range [0x09, 0x0d]/~
"]
pure fn is_whitespace(c: char) -> bool {
ret ('\x09' <= c && c <= '\x0d')
@ -128,8 +128,8 @@ Return the hexadecimal unicode escape of a char.
The rules are as follows:
- chars in [0,0xff] get 2-digit escapes: `\\xNN`
- chars in [0x100,0xffff] get 4-digit escapes: `\\uNNNN`
- chars in [0,0xff]/~ get 2-digit escapes: `\\xNN`
- chars in [0x100,0xffff]/~ get 4-digit escapes: `\\uNNNN`
- chars above 0x10000 get 8-digit escapes: `\\UNNNNNNNN`
"]
fn escape_unicode(c: char) -> str {
@ -154,7 +154,7 @@ languages. The exact rules are:
- Tab, CR and LF are escaped as '\t', '\r' and '\n' respectively.
- Single-quote, double-quote and backslash chars are backslash-escaped.
- Any other chars in the range [0x20,0x7e] are not escaped.
- Any other chars in the range [0x20,0x7e]/~ are not escaped.
- Any other chars are given hex unicode escapes; see `escape_unicode`.
"]
fn escape_default(c: char) -> str {

View file

@ -217,7 +217,7 @@ fn peek_(p: *rust_port) -> bool {
#[doc = "Receive on one of two ports"]
fn select2<A: send, B: send>(p_a: port<A>, p_b: port<B>)
-> either<A, B> {
let ports = [(**p_a).po, (**p_b).po];
let ports = [(**p_a).po, (**p_b).po]/~;
let n_ports = 2 as libc::size_t;
let yield = 0u, yieldp = ptr::addr_of(yield);

View file

@ -32,7 +32,7 @@ may permit read-only access during iteration or other use.
# WARNING
For maximum performance, this type is implemented using some rather
unsafe code. In particular, this innocent looking `[mut A]` pointer
unsafe code. In particular, this innocent looking `[mut A]/~` pointer
*may be null!* Therefore, it is important you not reach into the
data structure manually but instead use the provided extensions.
@ -48,27 +48,26 @@ type could only produce 47 million pushes/second.
"]
type dvec<A> = {
mut data: [mut A]
mut data: [mut A]/~
};
#[doc = "Creates a new, empty dvec"]
fn dvec<A>() -> dvec<A> {
{mut data: [mut]}
{mut data: [mut]/~}
}
#[doc = "Creates a new dvec with a single element"]
fn from_elt<A>(+e: A) -> dvec<A> {
{mut data: [mut e]}
{mut data: [mut e]/~}
}
#[doc = "Creates a new dvec with the contents of a vector"]
fn from_vec<A>(+v: [mut A]) -> dvec<A> {
fn from_vec<A>(+v: [mut A]/~) -> dvec<A> {
{mut data: v}
}
#[doc = "Consumes the vector and returns its contents"]
fn unwrap<A>(-d: dvec<A>) -> [mut A] {
fn unwrap<A>(-d: dvec<A>) -> [mut A]/~ {
let {data: v} <- d;
ret v;
}
@ -84,7 +83,7 @@ impl private_methods<A> for dvec<A> {
}
#[inline(always)]
fn borrow<B>(f: fn(-[mut A]) -> B) -> B {
fn borrow<B>(f: fn(-[mut A]/~) -> B) -> B {
unsafe {
let mut data = unsafe::reinterpret_cast(null::<()>());
data <-> self.data;
@ -95,7 +94,7 @@ impl private_methods<A> for dvec<A> {
}
#[inline(always)]
fn return(-data: [mut A]) {
fn return(-data: [mut A]/~) {
unsafe {
self.data <- data;
}
@ -114,7 +113,7 @@ impl extensions<A> for dvec<A> {
"]
#[inline(always)]
fn swap(f: fn(-[mut A]) -> [mut A]) {
fn swap(f: fn(-[mut A]/~) -> [mut A]/~) {
self.borrow { |v| self.return(f(v)) }
}
@ -128,7 +127,7 @@ impl extensions<A> for dvec<A> {
}
#[doc = "Overwrite the current contents"]
fn set(+w: [mut A]) {
fn set(+w: [mut A]/~) {
self.check_not_borrowed();
self.data <- w;
}
@ -151,7 +150,7 @@ impl extensions<A> for dvec<A> {
let data_ptr: *() = unsafe::reinterpret_cast(data);
if data_ptr.is_null() { fail "Recursive use of dvec"; }
log(error, "a");
self.data <- [mut t] + data;
self.data <- [mut t]/~ + data;
log(error, "b");
}
}
@ -219,7 +218,7 @@ impl extensions<A:copy> for dvec<A> {
}
};
for ts.each { |t| v += [t] };
for ts.each { |t| v += [t]/~ };
v
}
}
@ -229,7 +228,7 @@ impl extensions<A:copy> for dvec<A> {
See `unwrap()` if you do not wish to copy the contents.
"]
fn get() -> [A] {
fn get() -> [A]/~ {
self.borrow { |v|
let w = vec::from_mut(copy v);
self.return(v);
@ -271,4 +270,4 @@ impl extensions<A:copy> for dvec<A> {
fn last() -> A {
self.get_elt(self.len() - 1u)
}
}
}

View file

@ -21,28 +21,28 @@ fn either<T, U, V>(f_left: fn(T) -> V,
alt value { left(l) { f_left(l) } right(r) { f_right(r) } }
}
fn lefts<T: copy, U>(eithers: [either<T, U>]) -> [T] {
fn lefts<T: copy, U>(eithers: [either<T, U>]/~) -> [T]/~ {
#[doc = "Extracts from a vector of either all the left values"];
let mut result: [T] = [];
let mut result: [T]/~ = []/~;
for vec::each(eithers) {|elt|
alt elt { left(l) { result += [l]; } _ {/* fallthrough */ } }
alt elt { left(l) { result += [l]/~; } _ {/* fallthrough */ } }
}
ret result;
}
fn rights<T, U: copy>(eithers: [either<T, U>]) -> [U] {
fn rights<T, U: copy>(eithers: [either<T, U>]/~) -> [U]/~ {
#[doc = "Extracts from a vector of either all the right values"];
let mut result: [U] = [];
let mut result: [U]/~ = []/~;
for vec::each(eithers) {|elt|
alt elt { right(r) { result += [r]; } _ {/* fallthrough */ } }
alt elt { right(r) { result += [r]/~; } _ {/* fallthrough */ } }
}
ret result;
}
fn partition<T: copy, U: copy>(eithers: [either<T, U>])
-> {lefts: [T], rights: [U]} {
fn partition<T: copy, U: copy>(eithers: [either<T, U>]/~)
-> {lefts: [T]/~, rights: [U]/~} {
#[doc = "
Extracts from a vector of either all the left values and right values
@ -50,10 +50,10 @@ fn partition<T: copy, U: copy>(eithers: [either<T, U>])
right values.
"];
let mut lefts: [T] = [];
let mut rights: [U] = [];
let mut lefts: [T]/~ = []/~;
let mut rights: [U]/~ = []/~;
for vec::each(eithers) {|elt|
alt elt { left(l) { lefts += [l]; } right(r) { rights += [r]; } }
alt elt { left(l) { lefts += [l]/~; } right(r) { rights += [r]/~; } }
}
ret {lefts: lefts, rights: rights};
}
@ -112,49 +112,49 @@ fn test_either_right() {
#[test]
fn test_lefts() {
let input = [left(10), right(11), left(12), right(13), left(14)];
let input = [left(10), right(11), left(12), right(13), left(14)]/~;
let result = lefts(input);
assert (result == [10, 12, 14]);
assert (result == [10, 12, 14]/~);
}
#[test]
fn test_lefts_none() {
let input: [either<int, int>] = [right(10), right(10)];
let input: [either<int, int>]/~ = [right(10), right(10)]/~;
let result = lefts(input);
assert (vec::len(result) == 0u);
}
#[test]
fn test_lefts_empty() {
let input: [either<int, int>] = [];
let input: [either<int, int>]/~ = []/~;
let result = lefts(input);
assert (vec::len(result) == 0u);
}
#[test]
fn test_rights() {
let input = [left(10), right(11), left(12), right(13), left(14)];
let input = [left(10), right(11), left(12), right(13), left(14)]/~;
let result = rights(input);
assert (result == [11, 13]);
assert (result == [11, 13]/~);
}
#[test]
fn test_rights_none() {
let input: [either<int, int>] = [left(10), left(10)];
let input: [either<int, int>]/~ = [left(10), left(10)]/~;
let result = rights(input);
assert (vec::len(result) == 0u);
}
#[test]
fn test_rights_empty() {
let input: [either<int, int>] = [];
let input: [either<int, int>]/~ = []/~;
let result = rights(input);
assert (vec::len(result) == 0u);
}
#[test]
fn test_partition() {
let input = [left(10), right(11), left(12), right(13), left(14)];
let input = [left(10), right(11), left(12), right(13), left(14)]/~;
let result = partition(input);
assert (result.lefts[0] == 10);
assert (result.lefts[1] == 12);
@ -165,7 +165,7 @@ fn test_partition() {
#[test]
fn test_partition_no_lefts() {
let input: [either<int, int>] = [right(10), right(11)];
let input: [either<int, int>]/~ = [right(10), right(11)]/~;
let result = partition(input);
assert (vec::len(result.lefts) == 0u);
assert (vec::len(result.rights) == 2u);
@ -173,7 +173,7 @@ fn test_partition_no_lefts() {
#[test]
fn test_partition_no_rights() {
let input: [either<int, int>] = [left(10), left(11)];
let input: [either<int, int>]/~ = [left(10), left(11)]/~;
let result = partition(input);
assert (vec::len(result.lefts) == 2u);
assert (vec::len(result.rights) == 0u);
@ -181,7 +181,7 @@ fn test_partition_no_rights() {
#[test]
fn test_partition_empty() {
let input: [either<int, int>] = [];
let input: [either<int, int>]/~ = []/~;
let result = partition(input);
assert (vec::len(result.lefts) == 0u);
assert (vec::len(result.rights) == 0u);

View file

@ -9,12 +9,12 @@ The 'fmt' extension is modeled on the posix printf system.
A posix conversion ostensibly looks like this
> %[parameter][flags][width][.precision][length]type
> %[parameter]/~[flags]/~[width]/~[.precision]/~[length]/~type
Given the different numeric type bestiary we have, we omit the 'length'
parameter and support slightly different conversions for 'type'
> %[parameter][flags][width][.precision]type
> %[parameter]/~[flags]/~[width]/~[.precision]/~type
we also only support translating-to-rust a tiny subset of the possible
combinations at the moment.
@ -71,7 +71,7 @@ mod ct {
// A formatted conversion from an expression to a string
type conv =
{param: option<int>,
flags: [flag],
flags: [flag]/~,
width: count,
precision: count,
ty: ty};
@ -81,14 +81,14 @@ mod ct {
enum piece { piece_string(str), piece_conv(conv), }
type error_fn = fn@(str) -> ! ;
fn parse_fmt_string(s: str, error: error_fn) -> [piece] {
let mut pieces: [piece] = [];
fn parse_fmt_string(s: str, error: error_fn) -> [piece]/~ {
let mut pieces: [piece]/~ = []/~;
let lim = str::len(s);
let mut buf = "";
fn flush_buf(buf: str, &pieces: [piece]) -> str {
fn flush_buf(buf: str, &pieces: [piece]/~) -> str {
if str::len(buf) > 0u {
let piece = piece_string(buf);
pieces += [piece];
pieces += [piece]/~;
}
ret "";
}
@ -108,7 +108,7 @@ mod ct {
} else {
buf = flush_buf(buf, pieces);
let rs = parse_conversion(s, i, lim, error);
pieces += [rs.piece];
pieces += [rs.piece]/~;
i = rs.next;
}
} else { buf += curr; i += size; }
@ -162,16 +162,16 @@ mod ct {
};
}
fn parse_flags(s: str, i: uint, lim: uint) ->
{flags: [flag], next: uint} {
let noflags: [flag] = [];
{flags: [flag]/~, next: uint} {
let noflags: [flag]/~ = []/~;
if i >= lim { ret {flags: noflags, next: i}; }
fn more_(f: flag, s: str, i: uint, lim: uint) ->
{flags: [flag], next: uint} {
{flags: [flag]/~, next: uint} {
let next = parse_flags(s, i + 1u, lim);
let rest = next.flags;
let j = next.next;
let curr: [flag] = [f];
let curr: [flag]/~ = [f]/~;
ret {flags: curr + rest, next: j};
}
let more = {|x|more_(x, s, i, lim)};
@ -262,7 +262,7 @@ mod ct {
// Functions used by the fmt extension at runtime. For now there are a lot of
// decisions made a runtime. If it proves worthwhile then some of these
// conditions can be evaluated at compile-time. For now though it's cleaner to
// implement it this way, I think.
// implement it 0this way, I think.
mod rt {
enum flag {
flag_left_justify,
@ -276,7 +276,7 @@ mod rt {
// FIXME (#1993): May not want to use a vector here for flags; instead
// just use a bool per flag.
type conv = {flags: [flag], width: count, precision: count, ty: ty};
type conv = {flags: [flag]/~, width: count, precision: count, ty: ty};
fn conv_int(cv: conv, i: int) -> str {
let radix = 10u;
@ -430,12 +430,13 @@ mod rt {
}
ret padstr + s;
}
fn have_flag(flags: [flag], f: flag) -> bool {
fn have_flag(flags: [flag]/~, f: flag) -> bool {
for vec::each(flags) {|candidate| if candidate == f { ret true; } }
ret false;
}
}
// Local Variables:
// mode: rust;
// fill-column: 78;

View file

@ -116,10 +116,10 @@ fn to_str_common(num: float, digits: uint, exact: bool) -> str {
let mut frac = num - (trunc as float);
// stack of digits
let mut fractionalParts = [];
let mut fractionalParts = []/~;
// FIXME: (#2608)
// This used to return right away without rounding, as "[-]num",
// This used to return right away without rounding, as "[-]/~num",
// but given epsilon like in f64.rs, I don't see how the comparison
// to epsilon did much when only used there.
// if (frac < epsilon && !exact) || digits == 0u { ret accum; }
@ -236,7 +236,7 @@ Leading and trailing whitespace are ignored.
# Return value
`none` if the string did not represent a valid number. Otherwise, `some(n)`
where `n` is the floating-point number represented by `[num]`.
where `n` is the floating-point number represented by `[num]/~`.
"]
fn from_str(num: str) -> option<float> {
if num == "inf" {
@ -261,7 +261,7 @@ fn from_str(num: str) -> option<float> {
_ { ret none; }
}
//Determine if first char is '-'/'+'. Set [pos] and [neg] accordingly.
//Determine if first char is '-'/'+'. Set [pos]/~ and [neg]/~ accordingly.
let mut neg = false; //Sign of the result
alt str::char_at(num, 0u) {
'-' {
@ -345,7 +345,7 @@ fn from_str(num: str) -> option<float> {
pos = char_range.next;
}
let multiplier = pow_with_uint(10u, exponent);
//Note: not [int::pow], otherwise, we'll quickly
//Note: not [int::pow]/~, otherwise, we'll quickly
//end up with a nice overflow
if neg_exponent {
total = total / multiplier;

View file

@ -66,7 +66,7 @@ Parse a buffer of bytes
* buf - A byte buffer
* radix - The base of the number
"]
fn parse_buf(buf: [u8], radix: uint) -> option<T> {
fn parse_buf(buf: [u8]/~, radix: uint) -> option<T> {
if vec::len(buf) == 0u { ret none; }
let mut i = vec::len(buf) - 1u;
let mut start = 0u;

View file

@ -11,7 +11,7 @@ pure fn hash(&&x: int) -> uint { ret x as uint; }
#[doc = "Returns `base` raised to the power of `exponent`"]
fn pow(base: int, exponent: uint) -> int {
if exponent == 0u { ret 1; } //Not mathemtically true if [base == 0]
if exponent == 0u { ret 1; } //Not mathemtically true if [base == 0]/~
if base == 0 { ret 0; }
let mut my_pow = exponent;
let mut acc = 1;

View file

@ -30,7 +30,7 @@ enum seek_style { seek_set, seek_end, seek_cur, }
// The raw underlying reader iface. All readers must implement this.
iface reader {
// FIXME (#2004): Seekable really should be orthogonal.
fn read_bytes(uint) -> [u8];
fn read_bytes(uint) -> [u8]/~;
fn read_byte() -> int;
fn unread_byte(int);
fn eof() -> bool;
@ -41,9 +41,9 @@ iface reader {
// Generic utility functions defined on readers
impl reader_util for reader {
fn read_chars(n: uint) -> [char] {
fn read_chars(n: uint) -> [char]/~ {
// returns the (consumed offset, n_req), appends characters to &chars
fn chars_from_buf(buf: [u8], &chars: [char]) -> (uint, uint) {
fn chars_from_buf(buf: [u8]/~, &chars: [char]/~) -> (uint, uint) {
let mut i = 0u;
while i < vec::len(buf) {
let b0 = buf[i];
@ -52,7 +52,7 @@ impl reader_util for reader {
i += 1u;
assert (w > 0u);
if w == 1u {
chars += [ b0 as char ];
chars += [ b0 as char ]/~;
cont;
}
// can't satisfy this char with the existing data
@ -71,12 +71,12 @@ impl reader_util for reader {
// See str::char_at
val += ((b0 << ((w + 1u) as u8)) as uint)
<< (w - 1u) * 6u - w - 1u;
chars += [ val as char ];
chars += [ val as char ]/~;
}
ret (i, 0u);
}
let mut buf: [u8] = [];
let mut chars: [char] = [];
let mut buf: [u8]/~ = []/~;
let mut chars: [char]/~ = []/~;
// might need more bytes, but reading n will never over-read
let mut nbread = n;
while nbread > 0u {
@ -110,20 +110,20 @@ impl reader_util for reader {
}
fn read_line() -> str {
let mut buf: [u8] = [];
let mut buf: [u8]/~ = []/~;
loop {
let ch = self.read_byte();
if ch == -1 || ch == 10 { break; }
buf += [ch as u8];
buf += [ch as u8]/~;
}
str::from_bytes(buf)
}
fn read_c_str() -> str {
let mut buf: [u8] = [];
let mut buf: [u8]/~ = []/~;
loop {
let ch = self.read_byte();
if ch < 1 { break; } else { buf += [ch as u8]; }
if ch < 1 { break; } else { buf += [ch as u8]/~; }
}
str::from_bytes(buf)
}
@ -156,8 +156,8 @@ impl reader_util for reader {
val
}
fn read_whole_stream() -> [u8] {
let mut buf: [u8] = [];
fn read_whole_stream() -> [u8]/~ {
let mut buf: [u8]/~ = []/~;
while !self.eof() { buf += self.read_bytes(2048u); }
buf
}
@ -192,8 +192,8 @@ fn convert_whence(whence: seek_style) -> i32 {
}
impl of reader for *libc::FILE {
fn read_bytes(len: uint) -> [u8] {
let mut buf : [mut u8] = [mut];
fn read_bytes(len: uint) -> [u8]/~ {
let mut buf : [mut u8]/~ = [mut]/~;
vec::reserve(buf, len);
vec::as_mut_buf(buf) {|b|
let read = libc::fread(b as *mut c_void, 1u as size_t,
@ -216,7 +216,7 @@ impl of reader for *libc::FILE {
// duration of its lifetime.
// FIXME there really should be a better way to do this // #2004
impl <T: reader, C> of reader for {base: T, cleanup: C} {
fn read_bytes(len: uint) -> [u8] { self.base.read_bytes(len) }
fn read_bytes(len: uint) -> [u8]/~ { self.base.read_bytes(len) }
fn read_byte() -> int { self.base.read_byte() }
fn unread_byte(byte: int) { self.base.unread_byte(byte); }
fn eof() -> bool { self.base.eof() }
@ -260,10 +260,10 @@ fn file_reader(path: str) -> result<reader, str> {
// Byte buffer readers
// TODO: const u8, but this fails with rustboot.
type byte_buf = {buf: [u8], mut pos: uint, len: uint};
type byte_buf = {buf: [u8]/~, mut pos: uint, len: uint};
impl of reader for byte_buf {
fn read_bytes(len: uint) -> [u8] {
fn read_bytes(len: uint) -> [u8]/~ {
let rest = self.len - self.pos;
let mut to_read = len;
if rest < to_read { to_read = rest; }
@ -286,19 +286,19 @@ impl of reader for byte_buf {
fn tell() -> uint { self.pos }
}
fn bytes_reader(bytes: [u8]) -> reader {
fn bytes_reader(bytes: [u8]/~) -> reader {
bytes_reader_between(bytes, 0u, vec::len(bytes))
}
fn bytes_reader_between(bytes: [u8], start: uint, end: uint) -> reader {
fn bytes_reader_between(bytes: [u8]/~, start: uint, end: uint) -> reader {
{buf: bytes, mut pos: start, len: end} as reader
}
fn with_bytes_reader<t>(bytes: [u8], f: fn(reader) -> t) -> t {
fn with_bytes_reader<t>(bytes: [u8]/~, f: fn(reader) -> t) -> t {
f(bytes_reader(bytes))
}
fn with_bytes_reader_between<t>(bytes: [u8], start: uint, end: uint,
fn with_bytes_reader_between<t>(bytes: [u8]/~, start: uint, end: uint,
f: fn(reader) -> t) -> t {
f(bytes_reader_between(bytes, start, end))
}
@ -402,7 +402,7 @@ fn fd_writer(fd: fd_t, cleanup: bool) -> writer {
}
fn mk_file_writer(path: str, flags: [fileflag])
fn mk_file_writer(path: str, flags: [fileflag]/~)
-> result<writer, str> {
#[cfg(windows)]
@ -451,9 +451,9 @@ fn u64_to_le_bytes<T>(n: u64, size: uint, f: fn([u8]/&) -> T) -> T {
(n >> 56) as u8]/&) }
_ {
let mut bytes: [u8] = [], i = size, n = n;
let mut bytes: [u8]/~ = []/~, i = size, n = n;
while i > 0u {
bytes += [(n & 255_u64) as u8];
bytes += [(n & 255_u64) as u8]/~;
n >>= 8_u64;
i -= 1u;
}
@ -481,11 +481,11 @@ fn u64_to_be_bytes<T>(n: u64, size: uint, f: fn([u8]/&) -> T) -> T {
(n >> 8) as u8,
n as u8]/&) }
_ {
let mut bytes: [u8] = [];
let mut bytes: [u8]/~ = []/~;
let mut i = size;
while i > 0u {
let shift = ((i - 1u) * 8u) as u64;
bytes += [(n >> shift) as u8];
bytes += [(n >> shift) as u8]/~;
i -= 1u;
}
f(bytes)
@ -493,7 +493,7 @@ fn u64_to_be_bytes<T>(n: u64, size: uint, f: fn([u8]/&) -> T) -> T {
}
}
fn u64_from_be_bytes(data: [u8], start: uint, size: uint) -> u64 {
fn u64_from_be_bytes(data: [u8]/~, start: uint, size: uint) -> u64 {
let mut sz = size;
assert (sz <= 8u);
let mut val = 0_u64;
@ -577,7 +577,7 @@ impl writer_util for writer {
fn write_u8(n: u8) { self.write([n]/&) }
}
fn file_writer(path: str, flags: [fileflag]) -> result<writer, str> {
fn file_writer(path: str, flags: [fileflag]/~) -> result<writer, str> {
result::chain(mk_file_writer(path, flags), { |w| result::ok(w)})
}
@ -638,7 +638,7 @@ fn mem_buffer() -> mem_buffer {
@{buf: dvec(), mut pos: 0u}
}
fn mem_buffer_writer(b: mem_buffer) -> writer { b as writer }
fn mem_buffer_buf(b: mem_buffer) -> [u8] { b.buf.get() }
fn mem_buffer_buf(b: mem_buffer) -> [u8]/~ { b.buf.get() }
fn mem_buffer_str(b: mem_buffer) -> str {
str::from_bytes(b.buf.get())
}
@ -650,7 +650,7 @@ fn with_str_writer(f: fn(writer)) -> str {
io::mem_buffer_str(buf)
}
fn with_buf_writer(f: fn(writer)) -> [u8] {
fn with_buf_writer(f: fn(writer)) -> [u8]/~ {
let buf = mem_buffer();
let wr = mem_buffer_writer(buf);
f(wr);
@ -679,7 +679,7 @@ fn read_whole_file_str(file: str) -> result<str, str> {
// FIXME (#2004): implement this in a low-level way. Going through the
// abstractions is pointless.
fn read_whole_file(file: str) -> result<[u8], str> {
fn read_whole_file(file: str) -> result<[u8]/~, str> {
result::chain(file_reader(file), { |rdr|
result::ok(rdr.read_whole_stream())
})
@ -772,7 +772,7 @@ mod tests {
{
let out: io::writer =
result::get(
io::file_writer(tmpfile, [io::create, io::truncate]));
io::file_writer(tmpfile, [io::create, io::truncate]/~));
out.write_str(frood);
}
let inp: io::reader = result::get(io::file_reader(tmpfile));
@ -784,22 +784,22 @@ mod tests {
#[test]
fn test_readchars_empty() {
let inp : io::reader = io::str_reader("");
let res : [char] = inp.read_chars(128u);
let res : [char]/~ = inp.read_chars(128u);
assert(vec::len(res) == 0u);
}
#[test]
fn test_readchars_wide() {
let wide_test = "生锈的汤匙切肉汤hello生锈的汤匙切肉汤";
let ivals : [int] = [
let ivals : [int]/~ = [
29983, 38152, 30340, 27748,
21273, 20999, 32905, 27748,
104, 101, 108, 108, 111,
29983, 38152, 30340, 27748,
21273, 20999, 32905, 27748];
fn check_read_ln(len : uint, s: str, ivals: [int]) {
21273, 20999, 32905, 27748]/~;
fn check_read_ln(len : uint, s: str, ivals: [int]/~) {
let inp : io::reader = io::str_reader(s);
let res : [char] = inp.read_chars(len);
let res : [char]/~ = inp.read_chars(len);
if (len <= vec::len(ivals)) {
assert(vec::len(res) == len);
}
@ -841,7 +841,7 @@ mod tests {
#[test]
fn file_writer_bad_name() {
alt io::file_writer("?/?", []) {
alt io::file_writer("?/?", []/~) {
result::err(e) {
assert str::starts_with(e, "error opening ?/?");
}
@ -862,16 +862,16 @@ mod tests {
#[test]
fn mem_buffer_overwrite() {
let mbuf = mem_buffer();
mbuf.write([0u8, 1u8, 2u8, 3u8]);
assert mem_buffer_buf(mbuf) == [0u8, 1u8, 2u8, 3u8];
mbuf.write([0u8, 1u8, 2u8, 3u8]/~);
assert mem_buffer_buf(mbuf) == [0u8, 1u8, 2u8, 3u8]/~;
mbuf.seek(-2, seek_cur);
mbuf.write([4u8, 5u8, 6u8, 7u8]);
assert mem_buffer_buf(mbuf) == [0u8, 1u8, 4u8, 5u8, 6u8, 7u8];
mbuf.write([4u8, 5u8, 6u8, 7u8]/~);
assert mem_buffer_buf(mbuf) == [0u8, 1u8, 4u8, 5u8, 6u8, 7u8]/~;
mbuf.seek(-2, seek_end);
mbuf.write([8u8]);
mbuf.write([8u8]/~);
mbuf.seek(1, seek_set);
mbuf.write([9u8]);
assert mem_buffer_buf(mbuf) == [0u8, 9u8, 4u8, 5u8, 8u8, 7u8];
mbuf.write([9u8]/~);
assert mem_buffer_buf(mbuf) == [0u8, 9u8, 4u8, 5u8, 8u8, 7u8]/~;
}
}

View file

@ -19,14 +19,14 @@ impl extensions<A> of iter::base_iter<A> for IMPL_T<A> {
}
impl extensions<A:copy> for IMPL_T<A> {
fn filter_to_vec(pred: fn(A) -> bool) -> [A] {
fn filter_to_vec(pred: fn(A) -> bool) -> [A]/~ {
iter::filter_to_vec(self, pred)
}
fn map_to_vec<B>(op: fn(A) -> B) -> [B] { iter::map_to_vec(self, op) }
fn to_vec() -> [A] { iter::to_vec(self) }
fn map_to_vec<B>(op: fn(A) -> B) -> [B]/~ { iter::map_to_vec(self, op) }
fn to_vec() -> [A]/~ { iter::to_vec(self) }
// FIXME--bug in resolve prevents this from working (#2611)
// fn flat_map_to_vec<B:copy,IB:base_iter<B>>(op: fn(A) -> IB) -> [B] {
// fn flat_map_to_vec<B:copy,IB:base_iter<B>>(op: fn(A) -> IB) -> [B]/~ {
// iter::flat_map_to_vec(self, op)
// }

View file

@ -26,8 +26,8 @@ fn any<A,IA:base_iter<A>>(self: IA, blk: fn(A) -> bool) -> bool {
}
fn filter_to_vec<A:copy,IA:base_iter<A>>(self: IA,
prd: fn(A) -> bool) -> [A] {
let mut result = [];
prd: fn(A) -> bool) -> [A]/~ {
let mut result = []/~;
self.size_hint().iter {|hint| vec::reserve(result, hint); }
for self.each {|a|
if prd(a) { vec::push(result, a); }
@ -35,8 +35,8 @@ fn filter_to_vec<A:copy,IA:base_iter<A>>(self: IA,
ret result;
}
fn map_to_vec<A:copy,B,IA:base_iter<A>>(self: IA, op: fn(A) -> B) -> [B] {
let mut result = [];
fn map_to_vec<A:copy,B,IA:base_iter<A>>(self: IA, op: fn(A) -> B) -> [B]/~ {
let mut result = []/~;
self.size_hint().iter {|hint| vec::reserve(result, hint); }
for self.each {|a|
vec::push(result, op(a));
@ -45,9 +45,9 @@ fn map_to_vec<A:copy,B,IA:base_iter<A>>(self: IA, op: fn(A) -> B) -> [B] {
}
fn flat_map_to_vec<A:copy,B:copy,IA:base_iter<A>,IB:base_iter<B>>(
self: IA, op: fn(A) -> IB) -> [B] {
self: IA, op: fn(A) -> IB) -> [B]/~ {
let mut result = [];
let mut result = []/~;
for self.each {|a|
for op(a).each {|b|
vec::push(result, b);
@ -64,8 +64,8 @@ fn foldl<A,B,IA:base_iter<A>>(self: IA, +b0: B, blk: fn(B, A) -> B) -> B {
ret b;
}
fn to_vec<A:copy,IA:base_iter<A>>(self: IA) -> [A] {
foldl::<A,[A],IA>(self, [], {|r, a| r + [a]})
fn to_vec<A:copy,IA:base_iter<A>>(self: IA) -> [A]/~ {
foldl::<A,[A]/~,IA>(self, []/~, {|r, a| r + [a]/~})
}
fn contains<A,IA:base_iter<A>>(self: IA, x: A) -> bool {
@ -135,17 +135,17 @@ fn test_enumerate() {
#[test]
fn test_map_and_to_vec() {
let a = bind vec::iter([0, 1, 2], _);
let a = bind vec::iter([0, 1, 2]/~, _);
let b = bind map(a, {|i| 2*i}, _);
let c = to_vec(b);
assert c == [0, 2, 4];
assert c == [0, 2, 4]/~;
}
#[test]
fn test_map_directly_on_vec() {
let b = bind map([0, 1, 2], {|i| 2*i}, _);
let b = bind map([0, 1, 2]/~, {|i| 2*i}, _);
let c = to_vec(b);
assert c == [0, 2, 4];
assert c == [0, 2, 4]/~;
}
#[test]
@ -155,7 +155,7 @@ fn test_filter_on_int_range() {
}
let l = to_vec(bind filter(bind int::range(0, 10, _), is_even, _));
assert l == [0, 2, 4, 6, 8];
assert l == [0, 2, 4, 6, 8]/~;
}
#[test]
@ -165,7 +165,7 @@ fn test_filter_on_uint_range() {
}
let l = to_vec(bind filter(bind uint::range(0u, 10u, _), is_even, _));
assert l == [0u, 2u, 4u, 6u, 8u];
assert l == [0u, 2u, 4u, 6u, 8u]/~;
}
#[test]
@ -180,7 +180,7 @@ fn test_filter_map() {
let l = to_vec(bind filter_map(
bind int::range(0, 5, _), negativate_the_evens, _));
assert l == [0, -2, -4];
assert l == [0, -2, -4]/~;
}
#[test]
@ -190,70 +190,70 @@ fn test_flat_map_with_option() {
else { none }
}
let a = bind vec::iter([0, 1, 2], _);
let a = bind vec::iter([0, 1, 2]/~, _);
let b = bind flat_map(a, if_even, _);
let c = to_vec(b);
assert c == [0, 2];
assert c == [0, 2]/~;
}
#[test]
fn test_flat_map_with_list() {
fn repeat(&&i: int) -> [int] {
let mut r = [];
int::range(0, i) {|_j| r += [i]; }
fn repeat(&&i: int) -> [int]/~ {
let mut r = []/~;
int::range(0, i) {|_j| r += [i]/~; }
r
}
let a = bind vec::iter([0, 1, 2, 3], _);
let a = bind vec::iter([0, 1, 2, 3]/~, _);
let b = bind flat_map(a, repeat, _);
let c = to_vec(b);
#debug["c = %?", c];
assert c == [1, 2, 2, 3, 3, 3];
assert c == [1, 2, 2, 3, 3, 3]/~;
}
#[test]
fn test_repeat() {
let mut c = [], i = 0u;
let mut c = []/~, i = 0u;
repeat(5u) {||
c += [(i * i)];
c += [(i * i)]/~;
i += 1u;
};
#debug["c = %?", c];
assert c == [0u, 1u, 4u, 9u, 16u];
assert c == [0u, 1u, 4u, 9u, 16u]/~;
}
#[test]
fn test_min() {
assert min([5, 4, 1, 2, 3]) == 1;
assert min([5, 4, 1, 2, 3]/~) == 1;
}
#[test]
#[should_fail]
#[ignore(cfg(windows))]
fn test_min_empty() {
min::<int, [int]>([]);
min::<int, [int]/~>([]/~);
}
#[test]
fn test_max() {
assert max([1, 2, 4, 2, 3]) == 4;
assert max([1, 2, 4, 2, 3]/~) == 4;
}
#[test]
#[should_fail]
#[ignore(cfg(windows))]
fn test_max_empty() {
max::<int, [int]>([]);
max::<int, [int]/~>([]/~);
}
#[test]
fn test_reversed() {
assert to_vec(bind reversed([1, 2, 3], _)) == [3, 2, 1];
assert to_vec(bind reversed([1, 2, 3]/~, _)) == [3, 2, 1]/~;
}
#[test]
fn test_count() {
assert count([1, 2, 1, 2, 1], 1) == 3u;
assert count([1, 2, 1, 2, 1]/~, 1) == 3u;
}
#[test]
@ -261,7 +261,7 @@ fn test_foldr() {
fn sub(&&a: int, &&b: int) -> int {
a - b
}
let sum = foldr([1, 2, 3, 4], 0, sub);
let sum = foldr([1, 2, 3, 4]/~, 0, sub);
assert sum == -2;
}
*/

View file

@ -40,23 +40,23 @@ export walk_dir;
export as_c_charp, fill_charp_buf;
native mod rustrt {
fn rust_env_pairs() -> [str];
fn rust_env_pairs() -> [str]/~;
fn rust_getcwd() -> str;
fn rust_path_is_dir(path: *libc::c_char) -> c_int;
fn rust_path_exists(path: *libc::c_char) -> c_int;
fn rust_list_files(path: str) -> [str];
fn rust_list_files(path: str) -> [str]/~;
fn rust_process_wait(handle: c_int) -> c_int;
fn last_os_error() -> str;
fn rust_set_exit_status(code: libc::intptr_t);
}
fn env() -> [(str,str)] {
let mut pairs = [];
fn env() -> [(str,str)]/~ {
let mut pairs = []/~;
for vec::each(rustrt::rust_env_pairs()) {|p|
let vs = str::splitn_char(p, '=', 1u);
assert vec::len(vs) == 2u;
pairs += [(vs[0], vs[1])];
vec::push(pairs, (vs[0], vs[1]));
}
ret pairs;
}
@ -116,7 +116,7 @@ mod win32 {
fn as_utf16_p<T>(s: str, f: fn(*u16) -> T) -> T {
let mut t = str::to_utf16(s);
// Null terminate before passing on.
t += [0u16];
t += [0u16]/~;
vec::as_buf(t, f)
}
}
@ -373,7 +373,7 @@ fn self_exe_path() -> option<path> {
fill_charp_buf() {|buf, sz|
let mib = [CTL_KERN as c_int,
KERN_PROC as c_int,
KERN_PROC_PATHNAME as c_int, -1 as c_int];
KERN_PROC_PATHNAME as c_int, -1 as c_int]/~;
sysctl(vec::unsafe::to_ptr(mib), vec::len(mib) as c_uint,
buf as *mut c_void, ptr::mut_addr_of(sz),
ptr::null(), 0u as size_t) == (0 as c_int)
@ -553,7 +553,7 @@ fn make_dir(p: path, mode: c_int) -> bool {
}
#[doc = "Lists the contents of a directory"]
fn list_dir(p: path) -> [str] {
fn list_dir(p: path) -> [str]/~ {
#[cfg(unix)]
fn star(p: str) -> str { p }
@ -579,7 +579,7 @@ Lists the contents of a directory
This version prepends each entry with the directory.
"]
fn list_dir_path(p: path) -> [str] {
fn list_dir_path(p: path) -> [str]/~ {
let mut p = p;
let pl = str::len(p);
if pl == 0u || (p[pl - 1u] as char != path::consts::path_sep
@ -670,7 +670,7 @@ fn copy_file(from: path, to: path) -> bool {
fclose(istream);
ret false;
}
let mut buf : [mut u8] = [mut];
let mut buf : [mut u8]/~ = [mut]/~;
let bufsize = 8192u;
vec::reserve(buf, bufsize);
let mut done = false;
@ -978,7 +978,7 @@ mod tests {
};
assert (ostream as uint != 0u);
let s = "hello";
let mut buf = vec::to_mut(str::bytes(s) + [0 as u8]);
let mut buf = vec::to_mut(str::bytes(s) + [0 as u8]/~);
vec::as_mut_buf(buf) {|b|
assert (libc::fwrite(b as *c_void, 1u as size_t,
(str::len(s) + 1u) as size_t, ostream)
@ -989,7 +989,7 @@ mod tests {
fail (#fmt("%s doesn't exist", in));
}
assert(rs);
let rslt = run::run_program("diff", [in, out]);
let rslt = run::run_program("diff", [in, out]/~);
assert (rslt == 0);
assert (remove_file(in));
assert (remove_file(out));

View file

@ -127,7 +127,7 @@ Connects a vector of path segments into a single path.
Inserts path separators as needed.
"]
fn connect_many(paths: [path]) -> path {
fn connect_many(paths: [path]/~) -> path {
ret if vec::len(paths) == 1u {
paths[0]
} else {
@ -144,7 +144,7 @@ each piece of the path. On Windows, if the path is absolute then
the first element of the returned vector will be the drive letter
followed by a colon.
"]
fn split(p: path) -> [path] {
fn split(p: path) -> [path]/~ {
str::split_nonempty(p, {|c|
c == consts::path_sep || c == consts::alt_path_sep
})
@ -234,7 +234,7 @@ fn normalize(p: path) -> path {
ret s;
fn strip_dots(s: [path]) -> [path] {
fn strip_dots(s: [path]/~) -> [path]/~ {
vec::filter_map(s, { |elem|
if elem == "." {
option::none
@ -244,12 +244,12 @@ fn normalize(p: path) -> path {
})
}
fn rollup_doubledots(s: [path]) -> [path] {
fn rollup_doubledots(s: [path]/~) -> [path]/~ {
if vec::is_empty(s) {
ret [];
ret []/~;
}
let mut t = [];
let mut t = []/~;
let mut i = vec::len(s);
let mut skip = 0;
while i != 0u {
@ -258,7 +258,7 @@ fn normalize(p: path) -> path {
skip += 1;
} else {
if skip == 0 {
t += [s[i]];
vec::push(t, s[i]);
} else {
skip -= 1;
}
@ -266,7 +266,7 @@ fn normalize(p: path) -> path {
}
let mut t = vec::reversed(t);
while skip > 0 {
t += [".."];
vec::push(t, "..");
skip -= 1;
}
ret t;
@ -322,28 +322,28 @@ mod tests {
#[test]
fn split1() {
let actual = split("a" + ps() + "b");
let expected = ["a", "b"];
let expected = ["a", "b"]/~;
assert actual == expected;
}
#[test]
fn split2() {
let actual = split("a" + aps() + "b");
let expected = ["a", "b"];
let expected = ["a", "b"]/~;
assert actual == expected;
}
#[test]
fn split3() {
let actual = split(ps() + "a" + ps() + "b");
let expected = ["a", "b"];
let expected = ["a", "b"]/~;
assert actual == expected;
}
#[test]
fn split4() {
let actual = split("a" + ps() + "b" + aps() + "c");
let expected = ["a", "b", "c"];
let expected = ["a", "b", "c"]/~;
assert actual == expected;
}

View file

@ -149,8 +149,8 @@ fn test() {
assert (p.fst == 50);
assert (p.snd == 60);
let v0 = [32000u16, 32001u16, 32002u16];
let v1 = [0u16, 0u16, 0u16];
let v0 = [32000u16, 32001u16, 32002u16]/~;
let v1 = [0u16, 0u16, 0u16]/~;
ptr::memcpy(ptr::offset(vec::unsafe::to_ptr(v1), 1u),
ptr::offset(vec::unsafe::to_ptr(v0), 1u), 1u);
@ -185,7 +185,7 @@ fn test_buf_len() {
str::as_c_str(s0) {|p0|
str::as_c_str(s1) {|p1|
str::as_c_str(s2) {|p2|
let v = [p0, p1, p2, null()];
let v = [p0, p1, p2, null()]/~;
vec::as_buf(v) {|vp|
assert unsafe { buf_len(vp) } == 3u;
}

View file

@ -7,9 +7,9 @@ enum rctx {}
#[abi = "cdecl"]
native mod rustrt {
fn rand_seed() -> [u8];
fn rand_seed() -> [u8]/~;
fn rand_new() -> *rctx;
fn rand_new_seeded(seed: [u8]) -> *rctx;
fn rand_new_seeded(seed: [u8]/~) -> *rctx;
fn rand_next(c: *rctx) -> u32;
fn rand_free(c: *rctx);
}
@ -151,19 +151,19 @@ impl extensions for rng {
}
#[doc = "Return a random byte string of the specified length"]
fn gen_bytes(len: uint) -> [u8] {
fn gen_bytes(len: uint) -> [u8]/~ {
vec::from_fn(len) {|_i|
self.gen_u8()
}
}
#[doc = "Choose an item randomly, failing if values is empty"]
fn choose<T:copy>(values: [T]) -> T {
fn choose<T:copy>(values: [T]/~) -> T {
self.choose_option(values).get()
}
#[doc = "Choose some(item) randomly, returning none if values is empty"]
fn choose_option<T:copy>(values: [T]) -> option<T> {
fn choose_option<T:copy>(values: [T]/~) -> option<T> {
if values.is_empty() {
none
} else {
@ -173,13 +173,13 @@ impl extensions for rng {
#[doc = "Choose an item respecting the relative weights, failing if \
the sum of the weights is 0"]
fn choose_weighted<T: copy>(v : [weighted<T>]) -> T {
fn choose_weighted<T: copy>(v : [weighted<T>]/~) -> T {
self.choose_weighted_option(v).get()
}
#[doc = "Choose some(item) respecting the relative weights, returning \
none if the sum of the weights is 0"]
fn choose_weighted_option<T:copy>(v: [weighted<T>]) -> option<T> {
fn choose_weighted_option<T:copy>(v: [weighted<T>]/~) -> option<T> {
let mut total = 0u;
for v.each {|item|
total += item.weight;
@ -200,25 +200,25 @@ impl extensions for rng {
#[doc = "Return a vec containing copies of the items, in order, where \
the weight of the item determines how many copies there are"]
fn weighted_vec<T:copy>(v: [weighted<T>]) -> [T] {
let mut r = [];
fn weighted_vec<T:copy>(v: [weighted<T>]/~) -> [T]/~ {
let mut r = []/~;
for v.each {|item|
for uint::range(0u, item.weight) {|_i|
r += [item.item];
r += [item.item]/~;
}
}
r
}
#[doc = "Shuffle a vec"]
fn shuffle<T:copy>(values: [T]) -> [T] {
fn shuffle<T:copy>(values: [T]/~) -> [T]/~ {
let mut m = vec::to_mut(values);
self.shuffle_mut(m);
ret vec::from_mut(m);
}
#[doc = "Shuffle a mutable vec in place"]
fn shuffle_mut<T>(&&values: [mut T]) {
fn shuffle_mut<T>(&&values: [mut T]/~) {
let mut i = values.len();
while i >= 2u {
// invariant: elements with index >= i have been locked in place.
@ -241,7 +241,7 @@ impl of rng for @rand_res {
}
#[doc = "Create a new random seed for seeded_rng"]
fn seed() -> [u8] {
fn seed() -> [u8]/~ {
rustrt::rand_seed()
}
@ -254,7 +254,7 @@ fn rng() -> rng {
generator constructed with a given seed will generate the same \
sequence of values as all other generators constructed with the \
same seed. The seed may be any length."]
fn seeded_rng(seed: [u8]) -> rng {
fn seeded_rng(seed: [u8]/~) -> rng {
@rand_res(rustrt::rand_new_seeded(seed)) as rng
}
@ -301,7 +301,7 @@ mod tests {
#[test]
fn rng_seeded_custom_seed() {
// much shorter than generated seeds which are 1024 bytes
let seed = [2u8, 32u8, 4u8, 32u8, 51u8];
let seed = [2u8, 32u8, 4u8, 32u8, 51u8]/~;
let ra = rand::seeded_rng(seed);
let rb = rand::seeded_rng(seed);
assert ra.gen_str(100u) == rb.gen_str(100u);
@ -309,7 +309,7 @@ mod tests {
#[test]
fn rng_seeded_custom_seed2() {
let seed = [2u8, 32u8, 4u8, 32u8, 51u8];
let seed = [2u8, 32u8, 4u8, 32u8, 51u8]/~;
let ra = rand::seeded_rng(seed);
// Regression test that isaac is actually using the above vector
let r = ra.next();
@ -387,55 +387,56 @@ mod tests {
#[test]
fn choose() {
let r = rand::rng();
assert r.choose([1, 1, 1]) == 1;
assert r.choose([1, 1, 1]/~) == 1;
}
#[test]
fn choose_option() {
let r = rand::rng();
assert r.choose_option([]) == none::<int>;
assert r.choose_option([1, 1, 1]) == some(1);
assert r.choose_option([]/~) == none::<int>;
assert r.choose_option([1, 1, 1]/~) == some(1);
}
#[test]
fn choose_weighted() {
let r = rand::rng();
assert r.choose_weighted([{weight: 1u, item: 42}]) == 42;
assert r.choose_weighted([{weight: 1u, item: 42}]/~) == 42;
assert r.choose_weighted([
{weight: 0u, item: 42},
{weight: 1u, item: 43}
]) == 43;
]/~) == 43;
}
#[test]
fn choose_weighted_option() {
let r = rand::rng();
assert r.choose_weighted_option([{weight: 1u, item: 42}]) == some(42);
assert r.choose_weighted_option([{weight: 1u, item: 42}]/~) ==
some(42);
assert r.choose_weighted_option([
{weight: 0u, item: 42},
{weight: 1u, item: 43}
]) == some(43);
assert r.choose_weighted_option([]) == none::<int>;
]/~) == some(43);
assert r.choose_weighted_option([]/~) == none::<int>;
}
#[test]
fn weighted_vec() {
let r = rand::rng();
let empty: [int] = [];
assert r.weighted_vec([]) == empty;
let empty: [int]/~ = []/~;
assert r.weighted_vec([]/~) == empty;
assert r.weighted_vec([
{weight: 0u, item: 3u},
{weight: 1u, item: 2u},
{weight: 2u, item: 1u}
]) == [2u, 1u, 1u];
]/~) == [2u, 1u, 1u]/~;
}
#[test]
fn shuffle() {
let r = rand::rng();
let empty: [int] = [];
assert r.shuffle([]) == empty;
assert r.shuffle([1, 1, 1]) == [1, 1, 1];
let empty: [int]/~ = []/~;
assert r.shuffle([]/~) == empty;
assert r.shuffle([1, 1, 1]/~) == [1, 1, 1]/~;
}
}

View file

@ -245,18 +245,18 @@ checking for overflow:
if x == uint::max_value { ret err(\"overflow\"); }
else { ret ok(x+1u); }
}
map([1u, 2u, 3u], inc_conditionally).chain {|incd|
assert incd == [2u, 3u, 4u];
map([1u, 2u, 3u]/~, inc_conditionally).chain {|incd|
assert incd == [2u, 3u, 4u]/~;
}
"]
fn map_vec<T,U:copy,V:copy>(
ts: [T], op: fn(T) -> result<V,U>) -> result<[V],U> {
ts: [T]/~, op: fn(T) -> result<V,U>) -> result<[V]/~,U> {
let mut vs: [V] = [];
let mut vs: [V]/~ = []/~;
vec::reserve(vs, vec::len(ts));
for vec::each(ts) {|t|
alt op(t) {
ok(v) { vs += [v]; }
ok(v) { vs += [v]/~; }
err(u) { ret err(u); }
}
}
@ -284,16 +284,17 @@ length. While we do not often use preconditions in the standard
library, a precondition is used here because result::t is generally
used in 'careful' code contexts where it is both appropriate and easy
to accommodate an error like the vectors being of different lengths."]
fn map_vec2<S,T,U:copy,V:copy>(ss: [S], ts: [T], op: fn(S,T) -> result<V,U>)
: vec::same_length(ss, ts) -> result<[V],U> {
fn map_vec2<S,T,U:copy,V:copy>(ss: [S]/~, ts: [T]/~,
op: fn(S,T) -> result<V,U>)
: vec::same_length(ss, ts) -> result<[V]/~,U> {
let n = vec::len(ts);
let mut vs = [];
let mut vs = []/~;
vec::reserve(vs, n);
let mut i = 0u;
while i < n {
alt op(ss[i],ts[i]) {
ok(v) { vs += [v]; }
ok(v) { vs += [v]/~; }
err(u) { ret err(u); }
}
i += 1u;
@ -306,7 +307,7 @@ Applies op to the pairwise elements from `ss` and `ts`, aborting on
error. This could be implemented using `map2()` but it is more efficient
on its own as no result vector is built.
"]
fn iter_vec2<S,T,U:copy>(ss: [S], ts: [T],
fn iter_vec2<S,T,U:copy>(ss: [S]/~, ts: [T]/~,
op: fn(S,T) -> result<(),U>)
: vec::same_length(ss, ts)
-> result<(),U> {

View file

@ -62,8 +62,8 @@ Run a program, providing stdin, stdout and stderr handles
The process id of the spawned process
"]
fn spawn_process(prog: str, args: [str],
env: option<[(str,str)]>,
fn spawn_process(prog: str, args: [str]/~,
env: option<[(str,str)]/~>,
dir: option<str>,
in_fd: c_int, out_fd: c_int, err_fd: c_int)
-> pid_t {
@ -77,36 +77,36 @@ fn spawn_process(prog: str, args: [str],
}
}
fn with_argv<T>(prog: str, args: [str],
fn with_argv<T>(prog: str, args: [str]/~,
cb: fn(**libc::c_char) -> T) -> T {
let mut argptrs = str::as_c_str(prog) {|b| [b] };
let mut tmps = [];
let mut argptrs = str::as_c_str(prog) {|b| [b]/~ };
let mut tmps = []/~;
for vec::each(args) {|arg|
let t = @arg;
tmps += [t];
argptrs += str::as_c_str(*t) {|b| [b] };
tmps += [t]/~;
argptrs += str::as_c_str(*t) {|b| [b]/~ };
}
argptrs += [ptr::null()];
argptrs += [ptr::null()]/~;
vec::as_buf(argptrs, cb)
}
#[cfg(unix)]
fn with_envp<T>(env: option<[(str,str)]>,
fn with_envp<T>(env: option<[(str,str)]/~>,
cb: fn(*c_void) -> T) -> T {
// On posixy systems we can pass a char** for envp, which is
// a null-terminated array of "k=v\n" strings.
alt env {
some(es) if !vec::is_empty(es) {
let mut tmps = [];
let mut ptrs = [];
let mut tmps = []/~;
let mut ptrs = []/~;
for vec::each(es) {|e|
let (k,v) = e;
let t = @(#fmt("%s=%s", k, v));
vec::push(tmps, t);
ptrs += str::as_c_str(*t) {|b| [b]};
ptrs += str::as_c_str(*t) {|b| [b]/~};
}
ptrs += [ptr::null()];
ptrs += [ptr::null()]/~;
vec::as_buf(ptrs) { |p|
unsafe { cb(::unsafe::reinterpret_cast(p)) }
}
@ -118,7 +118,7 @@ fn with_envp<T>(env: option<[(str,str)]>,
}
#[cfg(windows)]
fn with_envp<T>(env: option<[(str,str)]>,
fn with_envp<T>(env: option<[(str,str)]/~>,
cb: fn(*c_void) -> T) -> T {
// On win32 we pass an "environment block" which is not a char**, but
// rather a concatenation of null-terminated k=v\0 sequences, with a final
@ -126,15 +126,15 @@ fn with_envp<T>(env: option<[(str,str)]>,
unsafe {
alt env {
some(es) if !vec::is_empty(es) {
let mut blk : [u8] = [];
let mut blk : [u8]/~ = []/~;
for vec::each(es) {|e|
let (k,v) = e;
let t = #fmt("%s=%s", k, v);
let mut v : [u8] = ::unsafe::reinterpret_cast(t);
let mut v : [u8]/~ = ::unsafe::reinterpret_cast(t);
blk += v;
::unsafe::forget(v);
}
blk += [0_u8];
blk += [0_u8]/~;
vec::as_buf(blk) {|p| cb(::unsafe::reinterpret_cast(p)) }
}
_ {
@ -164,7 +164,7 @@ Spawns a process and waits for it to terminate
The process id
"]
fn run_program(prog: str, args: [str]) -> int {
fn run_program(prog: str, args: [str]/~) -> int {
let pid = spawn_process(prog, args, none, none,
0i32, 0i32, 0i32);
if pid == -1 as pid_t { fail; }
@ -187,7 +187,7 @@ The class will ensure that file descriptors are closed properly.
A class with a <program> field
"]
fn start_program(prog: str, args: [str]) -> program {
fn start_program(prog: str, args: [str]/~) -> program {
let pipe_input = os::pipe();
let pipe_output = os::pipe();
let pipe_err = os::pipe();
@ -271,7 +271,7 @@ contents of stdout and stderr.
A record, {status: int, out: str, err: str} containing the exit code,
the contents of stdout and the contents of stderr.
"]
fn program_output(prog: str, args: [str]) ->
fn program_output(prog: str, args: [str]/~) ->
{status: int, out: str, err: str} {
let pipe_in = os::pipe();
@ -397,9 +397,9 @@ mod tests {
// Regression test for memory leaks
#[ignore(cfg(windows))] // FIXME (#2626)
fn test_leaks() {
run::run_program("echo", []);
run::start_program("echo", []);
run::program_output("echo", []);
run::run_program("echo", []/~);
run::start_program("echo", []/~);
run::program_output("echo", []/~);
}
#[test]
@ -410,7 +410,7 @@ mod tests {
let pid =
run::spawn_process(
"cat", [], none, none,
"cat", []/~, none, none,
pipe_in.in, pipe_out.out, pipe_err.out);
os::close(pipe_in.in);
os::close(pipe_out.out);
@ -430,7 +430,7 @@ mod tests {
#[test]
fn waitpid() {
let pid = run::spawn_process("false", [],
let pid = run::spawn_process("false", []/~,
none, none,
0i32, 0i32, 0i32);
let status = run::waitpid(pid);

View file

@ -122,7 +122,7 @@ Convert a vector of bytes to a UTF-8 string
Fails if invalid UTF-8
"]
pure fn from_bytes(vv: [u8]) -> str {
pure fn from_bytes(vv: [u8]/~) -> str {
assert is_utf8(vv);
ret unsafe { unsafe::from_bytes(vv) };
}
@ -136,7 +136,7 @@ Fails if invalid UTF-8
"]
pure fn from_byte(b: u8) -> str {
assert b < 128u8;
let mut v = [b, 0u8];
let mut v = [b, 0u8]/~;
unsafe { ::unsafe::transmute(v) }
}
@ -209,7 +209,7 @@ fn push_char(&s: str, ch: char) {
}
as_bytes(s) {|bytes|
let mut mut_bytes: [u8] = ::unsafe::reinterpret_cast(bytes);
let mut mut_bytes: [u8]/~ = ::unsafe::reinterpret_cast(bytes);
vec::unsafe::set_len(mut_bytes, new_len + 1u);
::unsafe::forget(mut_bytes);
}
@ -322,10 +322,10 @@ Converts a string to a vector of bytes
The result vector is not null-terminated.
"]
pure fn bytes(s: str) -> [u8] {
pure fn bytes(s: str) -> [u8]/~ {
unsafe {
let mut s_copy = s;
let mut v: [u8] = ::unsafe::transmute(s_copy);
let mut v: [u8]/~ = ::unsafe::transmute(s_copy);
vec::unsafe::set_len(v, len(s));
ret v;
}
@ -342,12 +342,12 @@ pure fn byte_slice<T>(s: str/&, f: fn([u8]/&) -> T) -> T {
}
#[doc = "Convert a string to a vector of characters"]
pure fn chars(s: str/&) -> [char] {
let mut buf = [], i = 0u;
pure fn chars(s: str/&) -> [char]/~ {
let mut buf = []/~, i = 0u;
let len = len(s);
while i < len {
let {ch, next} = char_range_at(s, i);
buf += [ch];
buf += [ch]/~;
i = next;
}
ret buf;
@ -378,7 +378,7 @@ pure fn slice(s: str/&, begin: uint, end: uint) -> str {
#[doc = "
Splits a string into substrings at each occurrence of a given character
"]
pure fn split_char(s: str/&, sep: char) -> [str] {
pure fn split_char(s: str/&, sep: char) -> [str]/~ {
split_char_inner(s, sep, len(s), true)
}
@ -388,27 +388,27 @@ character up to 'count' times
The byte must be a valid UTF-8/ASCII byte
"]
pure fn splitn_char(s: str/&, sep: char, count: uint) -> [str] {
pure fn splitn_char(s: str/&, sep: char, count: uint) -> [str]/~ {
split_char_inner(s, sep, count, true)
}
#[doc = "
Like `split_char`, but omits empty strings from the returned vector
"]
pure fn split_char_nonempty(s: str/&, sep: char) -> [str] {
pure fn split_char_nonempty(s: str/&, sep: char) -> [str]/~ {
split_char_inner(s, sep, len(s), false)
}
pure fn split_char_inner(s: str/&, sep: char, count: uint, allow_empty: bool)
-> [str] {
-> [str]/~ {
if sep < 128u as char {
let b = sep as u8, l = len(s);
let mut result = [], done = 0u;
let mut result = []/~, done = 0u;
let mut i = 0u, start = 0u;
while i < l && done < count {
if s[i] == b {
if allow_empty || start < i {
result += [unsafe { unsafe::slice_bytes(s, start, i) }];
result += [unsafe { unsafe::slice_bytes(s, start, i) }]/~;
}
start = i + 1u;
done += 1u;
@ -416,7 +416,7 @@ pure fn split_char_inner(s: str/&, sep: char, count: uint, allow_empty: bool)
i += 1u;
}
if allow_empty || start < l {
result += [unsafe { unsafe::slice_bytes(s, start, l) }];
result += [unsafe { unsafe::slice_bytes(s, start, l) }]/~;
}
result
} else {
@ -426,7 +426,7 @@ pure fn split_char_inner(s: str/&, sep: char, count: uint, allow_empty: bool)
#[doc = "Splits a string into substrings using a character function"]
pure fn split(s: str/&, sepfn: fn(char) -> bool) -> [str] {
pure fn split(s: str/&, sepfn: fn(char) -> bool) -> [str]/~ {
split_inner(s, sepfn, len(s), true)
}
@ -434,24 +434,24 @@ pure fn split(s: str/&, sepfn: fn(char) -> bool) -> [str] {
Splits a string into substrings using a character function, cutting at
most `count` times.
"]
pure fn splitn(s: str/&, sepfn: fn(char) -> bool, count: uint) -> [str] {
pure fn splitn(s: str/&, sepfn: fn(char) -> bool, count: uint) -> [str]/~ {
split_inner(s, sepfn, count, true)
}
#[doc = "Like `split`, but omits empty strings from the returned vector"]
pure fn split_nonempty(s: str/&, sepfn: fn(char) -> bool) -> [str] {
pure fn split_nonempty(s: str/&, sepfn: fn(char) -> bool) -> [str]/~ {
split_inner(s, sepfn, len(s), false)
}
pure fn split_inner(s: str/&, sepfn: fn(cc: char) -> bool, count: uint,
allow_empty: bool) -> [str] {
allow_empty: bool) -> [str]/~ {
let l = len(s);
let mut result = [], i = 0u, start = 0u, done = 0u;
let mut result = []/~, i = 0u, start = 0u, done = 0u;
while i < l && done < count {
let {ch, next} = char_range_at(s, i);
if sepfn(ch) {
if allow_empty || start < i {
result += [unsafe { unsafe::slice_bytes(s, start, i) }];
result += [unsafe { unsafe::slice_bytes(s, start, i) }]/~;
}
start = next;
done += 1u;
@ -459,7 +459,7 @@ pure fn split_inner(s: str/&, sepfn: fn(cc: char) -> bool, count: uint,
i = next;
}
if allow_empty || start < l {
result += [unsafe { unsafe::slice_bytes(s, start, l) }];
result += [unsafe { unsafe::slice_bytes(s, start, l) }]/~;
}
result
}
@ -510,19 +510,19 @@ Splits a string into a vector of the substrings separated by a given string
assert [\"\", \"XXX\", \"YYY\", \"\"] == split_str(\".XXX.YYY.\", \".\")
~~~
"]
pure fn split_str(s: str/&a, sep: str/&b) -> [str] {
let mut result = [];
pure fn split_str(s: str/&a, sep: str/&b) -> [str]/~ {
let mut result = []/~;
iter_between_matches(s, sep) {|from, to|
unsafe { result += [unsafe::slice_bytes(s, from, to)]; }
unsafe { result += [unsafe::slice_bytes(s, from, to)]/~; }
}
result
}
pure fn split_str_nonempty(s: str/&a, sep: str/&b) -> [str] {
let mut result = [];
pure fn split_str_nonempty(s: str/&a, sep: str/&b) -> [str]/~ {
let mut result = []/~;
iter_between_matches(s, sep) {|from, to|
if to > from {
unsafe { result += [unsafe::slice_bytes(s, from, to)]; }
unsafe { result += [unsafe::slice_bytes(s, from, to)]/~; }
}
}
result
@ -531,13 +531,13 @@ pure fn split_str_nonempty(s: str/&a, sep: str/&b) -> [str] {
#[doc = "
Splits a string into a vector of the substrings separated by LF ('\\n')
"]
pure fn lines(s: str/&) -> [str] { split_char(s, '\n') }
pure fn lines(s: str/&) -> [str]/~ { split_char(s, '\n') }
#[doc = "
Splits a string into a vector of the substrings separated by LF ('\\n')
and/or CR LF ('\\r\\n')
"]
pure fn lines_any(s: str/&) -> [str] {
pure fn lines_any(s: str/&) -> [str]/~ {
vec::map(lines(s), {|s|
let l = len(s);
let mut cp = s;
@ -551,7 +551,7 @@ pure fn lines_any(s: str/&) -> [str] {
#[doc = "
Splits a string into a vector of the substrings separated by whitespace
"]
pure fn words(s: str/&) -> [str] {
pure fn words(s: str/&) -> [str]/~ {
split_nonempty(s, {|c| char::is_whitespace(c)})
}
@ -1264,8 +1264,8 @@ pure fn is_utf16(v: [const u16]/&) -> bool {
}
#[doc = "Converts to a vector of `u16` encoded as UTF-16"]
pure fn to_utf16(s: str/&) -> [u16] {
let mut u = [];
pure fn to_utf16(s: str/&) -> [u16]/~ {
let mut u = []/~;
chars_iter(s) {|cch|
// Arithmetic with u32 literals is easier on the eyes than chars.
let mut ch = cch as u32;
@ -1273,14 +1273,14 @@ pure fn to_utf16(s: str/&) -> [u16] {
if (ch & 0xFFFF_u32) == ch {
// The BMP falls through (assuming non-surrogate, as it should)
assert ch <= 0xD7FF_u32 || ch >= 0xE000_u32;
u += [ch as u16]
u += [ch as u16]/~
} else {
// Supplementary planes break into surrogates.
assert ch >= 0x1_0000_u32 && ch <= 0x10_FFFF_u32;
ch -= 0x1_0000_u32;
let w1 = 0xD800_u16 | ((ch >> 10) as u16);
let w2 = 0xDC00_u16 | ((ch as u16) & 0x3FF_u16);
u += [w1, w2]
u += [w1, w2]/~
}
}
ret u;
@ -1568,9 +1568,9 @@ interop.
let i = str::as_bytes(\"Hello World\") { |bytes| vec::len(bytes) };
~~~
"]
pure fn as_bytes<T>(s: str, f: fn([u8]) -> T) -> T {
pure fn as_bytes<T>(s: str, f: fn([u8]/~) -> T) -> T {
unsafe {
let v: *[u8] = ::unsafe::reinterpret_cast(ptr::addr_of(s));
let v: *[u8]/~ = ::unsafe::reinterpret_cast(ptr::addr_of(s));
f(*v)
}
}
@ -1723,7 +1723,7 @@ mod unsafe {
#[doc = "Create a Rust string from a *u8 buffer of the given length"]
unsafe fn from_buf_len(buf: *u8, len: uint) -> str {
let mut v: [u8] = [];
let mut v: [u8]/~ = []/~;
vec::reserve(v, len + 1u);
vec::as_buf(v) {|b| ptr::memcpy(b, buf, len); }
vec::unsafe::set_len(v, len);
@ -1750,9 +1750,9 @@ mod unsafe {
Does not verify that the vector contains valid UTF-8.
"]
unsafe fn from_bytes(v: [const u8]) -> str {
unsafe fn from_bytes(v: [const u8]/~) -> str {
unsafe {
let mut vcopy : [u8] = ::unsafe::transmute(copy v);
let mut vcopy = ::unsafe::transmute(copy v);
vec::push(vcopy, 0u8);
::unsafe::transmute(vcopy)
}
@ -1763,7 +1763,7 @@ mod unsafe {
Does not verify that the byte is valid UTF-8.
"]
unsafe fn from_byte(u: u8) -> str { unsafe::from_bytes([u]) }
unsafe fn from_byte(u: u8) -> str { unsafe::from_bytes([u]/~) }
#[doc = "
Takes a bytewise (not UTF-8) slice from a string.
@ -1780,7 +1780,7 @@ mod unsafe {
assert (begin <= end);
assert (end <= n);
let mut v = [];
let mut v = []/~;
vec::reserve(v, end - begin + 1u);
unsafe {
vec::as_buf(v) { |vbuf|
@ -1788,7 +1788,7 @@ mod unsafe {
ptr::memcpy(vbuf, src, end - begin);
}
vec::unsafe::set_len(v, end - begin);
v += [0u8];
v += [0u8]/~;
::unsafe::transmute(v)
}
}
@ -1800,7 +1800,7 @@ mod unsafe {
}
#[doc = "Appends a vector of bytes to a string. (Not UTF-8 safe)."]
unsafe fn push_bytes(&s: str, bytes: [u8]) {
unsafe fn push_bytes(&s: str, bytes: [u8]/~) {
for vec::each(bytes) {|byte| rustrt::rust_str_push(s, byte); }
}
@ -1839,7 +1839,7 @@ mod unsafe {
#[test]
fn test_from_buf_len() {
unsafe {
let a = [65u8, 65u8, 65u8, 65u8, 65u8, 65u8, 65u8, 0u8];
let a = [65u8, 65u8, 65u8, 65u8, 65u8, 65u8, 65u8, 0u8]/~;
let b = vec::unsafe::to_ptr(a);
let c = from_buf_len(b, 3u);
assert (c == "AAA");
@ -1920,18 +1920,18 @@ impl extensions/& for str/& {
fn slice(begin: uint, end: uint) -> str { slice(self, begin, end) }
#[doc = "Splits a string into substrings using a character function"]
#[inline]
fn split(sepfn: fn(char) -> bool) -> [str] { split(self, sepfn) }
fn split(sepfn: fn(char) -> bool) -> [str]/~ { split(self, sepfn) }
#[doc = "
Splits a string into substrings at each occurrence of a given character
"]
#[inline]
fn split_char(sep: char) -> [str] { split_char(self, sep) }
fn split_char(sep: char) -> [str]/~ { split_char(self, sep) }
#[doc = "
Splits a string into a vector of the substrings separated by a given
string
"]
#[inline]
fn split_str(sep: str/&a) -> [str] { split_str(self, sep) }
fn split_str(sep: str/&a) -> [str]/~ { split_str(self, sep) }
#[doc = "Returns true if one string starts with another"]
#[inline]
fn starts_with(needle: str/&a) -> bool { starts_with(self, needle) }
@ -2032,79 +2032,79 @@ mod tests {
#[test]
fn test_split_char() {
fn t(s: str, c: char, u: [str]) {
fn t(s: str, c: char, u: [str]/~) {
log(debug, "split_byte: " + s);
let v = split_char(s, c);
#debug("split_byte to: %?", v);
assert vec::all2(v, u, { |a,b| a == b });
}
t("abc.hello.there", '.', ["abc", "hello", "there"]);
t(".hello.there", '.', ["", "hello", "there"]);
t("...hello.there.", '.', ["", "", "", "hello", "there", ""]);
t("abc.hello.there", '.', ["abc", "hello", "there"]/~);
t(".hello.there", '.', ["", "hello", "there"]/~);
t("...hello.there.", '.', ["", "", "", "hello", "there", ""]/~);
assert ["", "", "", "hello", "there", ""]
assert ["", "", "", "hello", "there", ""]/~
== split_char("...hello.there.", '.');
assert [""] == split_char("", 'z');
assert ["",""] == split_char("z", 'z');
assert ["ok"] == split_char("ok", 'z');
assert [""]/~ == split_char("", 'z');
assert ["",""]/~ == split_char("z", 'z');
assert ["ok"]/~ == split_char("ok", 'z');
}
#[test]
fn test_split_char_2() {
let data = "ประเทศไทย中华Việt Nam";
assert ["ประเทศไทย中华", "iệt Nam"]
assert ["ประเทศไทย中华", "iệt Nam"]/~
== split_char(data, 'V');
assert ["ประเ", "ศไ", "ย中华Việt Nam"]
assert ["ประเ", "ศไ", "ย中华Việt Nam"]/~
== split_char(data, 'ท');
}
#[test]
fn test_splitn_char() {
fn t(s: str, c: char, n: uint, u: [str]) {
fn t(s: str, c: char, n: uint, u: [str]/~) {
log(debug, "splitn_byte: " + s);
let v = splitn_char(s, c, n);
#debug("split_byte to: %?", v);
#debug("comparing vs. %?", u);
assert vec::all2(v, u, { |a,b| a == b });
}
t("abc.hello.there", '.', 0u, ["abc.hello.there"]);
t("abc.hello.there", '.', 1u, ["abc", "hello.there"]);
t("abc.hello.there", '.', 2u, ["abc", "hello", "there"]);
t("abc.hello.there", '.', 3u, ["abc", "hello", "there"]);
t(".hello.there", '.', 0u, [".hello.there"]);
t(".hello.there", '.', 1u, ["", "hello.there"]);
t("...hello.there.", '.', 3u, ["", "", "", "hello.there."]);
t("...hello.there.", '.', 5u, ["", "", "", "hello", "there", ""]);
t("abc.hello.there", '.', 0u, ["abc.hello.there"]/~);
t("abc.hello.there", '.', 1u, ["abc", "hello.there"]/~);
t("abc.hello.there", '.', 2u, ["abc", "hello", "there"]/~);
t("abc.hello.there", '.', 3u, ["abc", "hello", "there"]/~);
t(".hello.there", '.', 0u, [".hello.there"]/~);
t(".hello.there", '.', 1u, ["", "hello.there"]/~);
t("...hello.there.", '.', 3u, ["", "", "", "hello.there."]/~);
t("...hello.there.", '.', 5u, ["", "", "", "hello", "there", ""]/~);
assert [""] == splitn_char("", 'z', 5u);
assert ["",""] == splitn_char("z", 'z', 5u);
assert ["ok"] == splitn_char("ok", 'z', 5u);
assert ["z"] == splitn_char("z", 'z', 0u);
assert ["w.x.y"] == splitn_char("w.x.y", '.', 0u);
assert ["w","x.y"] == splitn_char("w.x.y", '.', 1u);
assert [""]/~ == splitn_char("", 'z', 5u);
assert ["",""]/~ == splitn_char("z", 'z', 5u);
assert ["ok"]/~ == splitn_char("ok", 'z', 5u);
assert ["z"]/~ == splitn_char("z", 'z', 0u);
assert ["w.x.y"]/~ == splitn_char("w.x.y", '.', 0u);
assert ["w","x.y"]/~ == splitn_char("w.x.y", '.', 1u);
}
#[test]
fn test_splitn_char_2 () {
let data = "ประเทศไทย中华Việt Nam";
assert ["ประเทศไทย中", "Việt Nam"]
assert ["ประเทศไทย中", "Việt Nam"]/~
== splitn_char(data, '华', 1u);
assert ["", "", "XXX", "YYYzWWWz"]
assert ["", "", "XXX", "YYYzWWWz"]/~
== splitn_char("zzXXXzYYYzWWWz", 'z', 3u);
assert ["",""] == splitn_char("z", 'z', 5u);
assert [""] == splitn_char("", 'z', 5u);
assert ["ok"] == splitn_char("ok", 'z', 5u);
assert ["",""]/~ == splitn_char("z", 'z', 5u);
assert [""]/~ == splitn_char("", 'z', 5u);
assert ["ok"]/~ == splitn_char("ok", 'z', 5u);
}
#[test]
fn test_splitn_char_3() {
let data = "ประเทศไทย中华Việt Nam";
assert ["ประเทศไทย中华", "iệt Nam"]
assert ["ประเทศไทย中华", "iệt Nam"]/~
== splitn_char(data, 'V', 1u);
assert ["ประเ", "ศไทย中华Việt Nam"]
assert ["ประเ", "ศไทย中华Việt Nam"]/~
== splitn_char(data, 'ท', 1u);
}
@ -2125,40 +2125,40 @@ mod tests {
t("::hello::there::", "::", 3, "");
let data = "ประเทศไทย中华Việt Nam";
assert ["ประเทศไทย", "Việt Nam"]
assert ["ประเทศไทย", "Việt Nam"]/~
== split_str (data, "中华");
assert ["", "XXX", "YYY", ""]
assert ["", "XXX", "YYY", ""]/~
== split_str("zzXXXzzYYYzz", "zz");
assert ["zz", "zYYYz"]
assert ["zz", "zYYYz"]/~
== split_str("zzXXXzYYYz", "XXX");
assert ["", "XXX", "YYY", ""] == split_str(".XXX.YYY.", ".");
assert [""] == split_str("", ".");
assert ["",""] == split_str("zz", "zz");
assert ["ok"] == split_str("ok", "z");
assert ["","z"] == split_str("zzz", "zz");
assert ["","","z"] == split_str("zzzzz", "zz");
assert ["", "XXX", "YYY", ""]/~ == split_str(".XXX.YYY.", ".");
assert [""]/~ == split_str("", ".");
assert ["",""]/~ == split_str("zz", "zz");
assert ["ok"]/~ == split_str("ok", "z");
assert ["","z"]/~ == split_str("zzz", "zz");
assert ["","","z"]/~ == split_str("zzzzz", "zz");
}
#[test]
fn test_split() {
let data = "ประเทศไทย中华Việt Nam";
assert ["ประเทศไทย中", "Việt Nam"]
assert ["ประเทศไทย中", "Việt Nam"]/~
== split (data, {|cc| cc == '华'});
assert ["", "", "XXX", "YYY", ""]
assert ["", "", "XXX", "YYY", ""]/~
== split("zzXXXzYYYz", char::is_lowercase);
assert ["zz", "", "", "z", "", "", "z"]
assert ["zz", "", "", "z", "", "", "z"]/~
== split("zzXXXzYYYz", char::is_uppercase);
assert ["",""] == split("z", {|cc| cc == 'z'});
assert [""] == split("", {|cc| cc == 'z'});
assert ["ok"] == split("ok", {|cc| cc == 'z'});
assert ["",""]/~ == split("z", {|cc| cc == 'z'});
assert [""]/~ == split("", {|cc| cc == 'z'});
assert ["ok"]/~ == split("ok", {|cc| cc == 'z'});
}
#[test]
@ -2166,34 +2166,34 @@ mod tests {
let lf = "\nMary had a little lamb\nLittle lamb\n";
let crlf = "\r\nMary had a little lamb\r\nLittle lamb\r\n";
assert ["", "Mary had a little lamb", "Little lamb", ""]
assert ["", "Mary had a little lamb", "Little lamb", ""]/~
== lines(lf);
assert ["", "Mary had a little lamb", "Little lamb", ""]
assert ["", "Mary had a little lamb", "Little lamb", ""]/~
== lines_any(lf);
assert ["\r", "Mary had a little lamb\r", "Little lamb\r", ""]
assert ["\r", "Mary had a little lamb\r", "Little lamb\r", ""]/~
== lines(crlf);
assert ["", "Mary had a little lamb", "Little lamb", ""]
assert ["", "Mary had a little lamb", "Little lamb", ""]/~
== lines_any(crlf);
assert [""] == lines ("");
assert [""] == lines_any("");
assert ["",""] == lines ("\n");
assert ["",""] == lines_any("\n");
assert ["banana"] == lines ("banana");
assert ["banana"] == lines_any("banana");
assert [""]/~ == lines ("");
assert [""]/~ == lines_any("");
assert ["",""]/~ == lines ("\n");
assert ["",""]/~ == lines_any("\n");
assert ["banana"]/~ == lines ("banana");
assert ["banana"]/~ == lines_any("banana");
}
#[test]
fn test_words () {
let data = "\nMary had a little lamb\nLittle lamb\n";
assert ["Mary","had","a","little","lamb","Little","lamb"]
assert ["Mary","had","a","little","lamb","Little","lamb"]/~
== words(data);
assert ["ok"] == words("ok");
assert [] == words("");
assert ["ok"]/~ == words("ok");
assert []/~ == words("");
}
#[test]
@ -2250,22 +2250,23 @@ mod tests {
#[test]
fn test_concat() {
fn t(v: [str], s: str) { assert (eq(concat(v), s)); }
t(["you", "know", "I'm", "no", "good"], "youknowI'mnogood");
let v: [str] = [];
fn t(v: [str]/~, s: str) { assert (eq(concat(v), s)); }
t(["you", "know", "I'm", "no", "good"]/~, "youknowI'mnogood");
let v: [str]/~ = []/~;
t(v, "");
t(["hi"], "hi");
t(["hi"]/~, "hi");
}
#[test]
fn test_connect() {
fn t(v: [str], sep: str, s: str) {
fn t(v: [str]/~, sep: str, s: str) {
assert (eq(connect(v, sep), s));
}
t(["you", "know", "I'm", "no", "good"], " ", "you know I'm no good");
let v: [str] = [];
t(["you", "know", "I'm", "no", "good"]/~,
" ", "you know I'm no good");
let v: [str]/~ = []/~;
t(v, " ", "");
t(["hi"], " ", "hi");
t(["hi"]/~, " ", "hi");
}
#[test]
@ -2517,7 +2518,7 @@ mod tests {
#[test]
fn test_unsafe_from_bytes() {
let a = [65u8, 65u8, 65u8, 65u8, 65u8, 65u8, 65u8];
let a = [65u8, 65u8, 65u8, 65u8, 65u8, 65u8, 65u8]/~;
let b = unsafe { unsafe::from_bytes(a) };
assert (b == "AAAAAAA");
}
@ -2534,7 +2535,7 @@ mod tests {
0x56_u8, 0x69_u8, 0xe1_u8,
0xbb_u8, 0x87_u8, 0x74_u8,
0x20_u8, 0x4e_u8, 0x61_u8,
0x6d_u8];
0x6d_u8]/~;
assert ss == from_bytes(bb);
}
@ -2552,7 +2553,7 @@ mod tests {
0x56_u8, 0x69_u8, 0xe1_u8,
0xbb_u8, 0x87_u8, 0x74_u8,
0x20_u8, 0x4e_u8, 0x61_u8,
0x6d_u8];
0x6d_u8]/~;
let _x = from_bytes(bb);
}
@ -2560,7 +2561,7 @@ mod tests {
#[test]
fn test_from_buf() {
unsafe {
let a = [65u8, 65u8, 65u8, 65u8, 65u8, 65u8, 65u8, 0u8];
let a = [65u8, 65u8, 65u8, 65u8, 65u8, 65u8, 65u8, 0u8]/~;
let b = vec::unsafe::to_ptr(a);
let c = unsafe::from_buf(b);
assert (c == "AAAAAAA");
@ -2609,7 +2610,7 @@ mod tests {
fn vec_str_conversions() {
let s1: str = "All mimsy were the borogoves";
let v: [u8] = bytes(s1);
let v: [u8]/~ = bytes(s1);
let s2: str = from_bytes(v);
let mut i: uint = 0u;
let n1: uint = len(s1);
@ -2774,7 +2775,7 @@ mod tests {
#[test]
fn test_chars() {
let ss = "ศไทย中华Việt Nam";
assert ['ศ','ไ','ท','ย','中','华','V','i','ệ','t',' ','N','a','m']
assert ['ศ','ไ','ท','ย','中','华','V','i','ệ','t',' ','N','a','m']/~
== chars(ss);
}
@ -2785,7 +2786,7 @@ mod tests {
[0xd800_u16, 0xdf45_u16, 0xd800_u16, 0xdf3f_u16,
0xd800_u16, 0xdf3b_u16, 0xd800_u16, 0xdf46_u16,
0xd800_u16, 0xdf39_u16, 0xd800_u16, 0xdf3b_u16,
0xd800_u16, 0xdf30_u16, 0x000a_u16]),
0xd800_u16, 0xdf30_u16, 0x000a_u16]/~),
("𐐒𐑉𐐮𐑀𐐲𐑋 𐐏𐐲𐑍\n",
[0xd801_u16, 0xdc12_u16, 0xd801_u16,
@ -2793,7 +2794,7 @@ mod tests {
0xdc40_u16, 0xd801_u16, 0xdc32_u16, 0xd801_u16,
0xdc4b_u16, 0x0020_u16, 0xd801_u16, 0xdc0f_u16,
0xd801_u16, 0xdc32_u16, 0xd801_u16, 0xdc4d_u16,
0x000a_u16]),
0x000a_u16]/~),
("𐌀𐌖𐌋𐌄𐌑𐌉·𐌌𐌄𐌕𐌄𐌋𐌉𐌑\n",
[0xd800_u16, 0xdf00_u16, 0xd800_u16, 0xdf16_u16,
@ -2802,7 +2803,7 @@ mod tests {
0x00b7_u16, 0xd800_u16, 0xdf0c_u16, 0xd800_u16,
0xdf04_u16, 0xd800_u16, 0xdf15_u16, 0xd800_u16,
0xdf04_u16, 0xd800_u16, 0xdf0b_u16, 0xd800_u16,
0xdf09_u16, 0xd800_u16, 0xdf11_u16, 0x000a_u16 ]),
0xdf09_u16, 0xd800_u16, 0xdf11_u16, 0x000a_u16 ]/~),
("𐒋𐒘𐒈𐒑𐒛𐒒 𐒕𐒓 𐒈𐒚𐒍 𐒏𐒜𐒒𐒖𐒆 𐒕𐒆\n",
[0xd801_u16, 0xdc8b_u16, 0xd801_u16, 0xdc98_u16,
@ -2815,7 +2816,7 @@ mod tests {
0xdc9c_u16, 0xd801_u16, 0xdc92_u16, 0xd801_u16,
0xdc96_u16, 0xd801_u16, 0xdc86_u16, 0x0020_u16,
0xd801_u16, 0xdc95_u16, 0xd801_u16, 0xdc86_u16,
0x000a_u16 ]) ];
0x000a_u16 ]/~) ]/~;
for vec::each(pairs) {|p|
let (s, u) = p;

View file

@ -56,7 +56,7 @@ impl <A: to_str copy, B: to_str copy, C: to_str copy> of to_str for (A, B, C){
}
}
impl <A: to_str> of to_str for [A] {
impl <A: to_str> of to_str for [A]/~ {
fn to_str() -> str {
let mut acc = "[", first = true;
for vec::each(self) {|elt|
@ -98,11 +98,12 @@ mod tests {
}
fn test_vectors() {
let x: [int] = [];
assert x.to_str() == "[]";
assert [1].to_str() == "[1]";
assert [1, 2, 3].to_str() == "[1, 2, 3]";
assert [[], [1], [1, 1]].to_str() == "[[], [1], [1, 1]]";
let x: [int]/~ = []/~;
assert x.to_str() == "[]/~";
assert [1]/~.to_str() == "[1]/~";
assert [1, 2, 3]/~.to_str() == "[1, 2, 3]/~";
assert [[]/~, [1]/~, [1, 1]/~]/~.to_str() ==
"[[]/~, [1]/~, [1, 1]/~]/~";
}
fn test_pointer_types() {

View file

@ -88,7 +88,7 @@ Parse a buffer of bytes
`buf` must not be empty
"]
fn parse_buf(buf: [u8], radix: uint) -> option<T> {
fn parse_buf(buf: [u8]/~, radix: uint) -> option<T> {
if vec::len(buf) == 0u { ret none; }
let mut i = vec::len(buf) - 1u;
let mut power = 1u as T;

View file

@ -33,7 +33,7 @@ Both types must have the same size and alignment.
# Example
assert transmute(\"L\") == [76u8, 0u8];
assert transmute(\"L\") == [76u8, 0u8]/~;
"]
unsafe fn transmute<L, G>(-thing: L) -> G {
let newthing = reinterpret_cast(thing);
@ -62,7 +62,7 @@ mod tests {
#[test]
fn test_transmute2() {
unsafe {
assert transmute("L") == [76u8, 0u8];
assert transmute("L") == [76u8, 0u8]/~;
}
}
}

File diff suppressed because it is too large Load diff

View file

@ -5,11 +5,11 @@ export arena, arena_with_size;
import list;
import list::{list, cons, nil};
type chunk = {data: [u8], mut fill: uint};
type chunk = {data: [u8]/~, mut fill: uint};
type arena = {mut chunks: @list<@chunk>};
fn chunk(size: uint) -> @chunk {
let mut v = [];
let mut v = []/~;
vec::reserve(v, size);
@{ data: v, mut fill: 0u }
}

View file

@ -22,7 +22,7 @@ export eq_vec;
// for the case where nbits <= 32.
#[doc = "The bitvector type"]
type bitv = @{storage: [mut uint], nbits: uint};
type bitv = @{storage: [mut uint]/~, nbits: uint};
const uint_bits: uint = 32u + (1u << 32u >> 27u);
@ -183,7 +183,7 @@ Converts the bitvector to a vector of uint with the same length.
Each uint in the resulting vector has either value 0u or 1u.
"]
fn to_vec(v: bitv) -> [uint] {
fn to_vec(v: bitv) -> [uint]/~ {
let sub = {|x|init_to_vec(v, x)};
ret vec::from_fn::<uint>(v.nbits, sub);
}
@ -225,7 +225,7 @@ Compare a bitvector to a vector of uint
The uint vector is expected to only contain the values 0u and 1u. Both the
bitvector and vector must have the same length
"]
fn eq_vec(v0: bitv, v1: [uint]) -> bool {
fn eq_vec(v0: bitv, v1: [uint]/~) -> bool {
assert (v0.nbits == vec::len::<uint>(v1));
let len = v0.nbits;
let mut i = 0u;
@ -262,9 +262,9 @@ mod tests {
fn test_1_element() {
let mut act;
act = bitv(1u, false);
assert (eq_vec(act, [0u]));
assert (eq_vec(act, [0u]/~));
act = bitv(1u, true);
assert (eq_vec(act, [1u]));
assert (eq_vec(act, [1u]/~));
}
#[test]
@ -273,11 +273,11 @@ mod tests {
// all 0
act = bitv(10u, false);
assert (eq_vec(act, [0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u]));
assert (eq_vec(act, [0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u]/~));
// all 1
act = bitv(10u, true);
assert (eq_vec(act, [1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u]));
assert (eq_vec(act, [1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u]/~));
// mixed
act = bitv(10u, false);
@ -286,7 +286,7 @@ mod tests {
set(act, 2u, true);
set(act, 3u, true);
set(act, 4u, true);
assert (eq_vec(act, [1u, 1u, 1u, 1u, 1u, 0u, 0u, 0u, 0u, 0u]));
assert (eq_vec(act, [1u, 1u, 1u, 1u, 1u, 0u, 0u, 0u, 0u, 0u]/~));
// mixed
act = bitv(10u, false);
@ -295,7 +295,7 @@ mod tests {
set(act, 7u, true);
set(act, 8u, true);
set(act, 9u, true);
assert (eq_vec(act, [0u, 0u, 0u, 0u, 0u, 1u, 1u, 1u, 1u, 1u]));
assert (eq_vec(act, [0u, 0u, 0u, 0u, 0u, 1u, 1u, 1u, 1u, 1u]/~));
// mixed
act = bitv(10u, false);
@ -303,7 +303,7 @@ mod tests {
set(act, 3u, true);
set(act, 6u, true);
set(act, 9u, true);
assert (eq_vec(act, [1u, 0u, 0u, 1u, 0u, 0u, 1u, 0u, 0u, 1u]));
assert (eq_vec(act, [1u, 0u, 0u, 1u, 0u, 0u, 1u, 0u, 0u, 1u]/~));
}
#[test]
@ -315,14 +315,14 @@ mod tests {
assert (eq_vec(act,
[0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
0u, 0u, 0u, 0u, 0u]));
0u, 0u, 0u, 0u, 0u]/~));
// all 1
act = bitv(31u, true);
assert (eq_vec(act,
[1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u,
1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u,
1u, 1u, 1u, 1u, 1u]));
1u, 1u, 1u, 1u, 1u]/~));
// mixed
act = bitv(31u, false);
@ -337,7 +337,7 @@ mod tests {
assert (eq_vec(act,
[1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 0u, 0u, 0u, 0u, 0u,
0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
0u, 0u, 0u, 0u, 0u]));
0u, 0u, 0u, 0u, 0u]/~));
// mixed
act = bitv(31u, false);
@ -352,7 +352,7 @@ mod tests {
assert (eq_vec(act,
[0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
0u, 0u, 0u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 0u, 0u,
0u, 0u, 0u, 0u, 0u]));
0u, 0u, 0u, 0u, 0u]/~));
// mixed
act = bitv(31u, false);
@ -366,7 +366,7 @@ mod tests {
assert (eq_vec(act,
[0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 1u, 1u,
1u, 1u, 1u, 1u, 1u]));
1u, 1u, 1u, 1u, 1u]/~));
// mixed
act = bitv(31u, false);
@ -376,7 +376,7 @@ mod tests {
assert (eq_vec(act,
[0u, 0u, 0u, 1u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
0u, 0u, 0u, 0u, 1u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
0u, 0u, 0u, 0u, 1u]));
0u, 0u, 0u, 0u, 1u]/~));
}
#[test]
@ -388,14 +388,14 @@ mod tests {
assert (eq_vec(act,
[0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
0u, 0u, 0u, 0u, 0u, 0u]));
0u, 0u, 0u, 0u, 0u, 0u]/~));
// all 1
act = bitv(32u, true);
assert (eq_vec(act,
[1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u,
1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u,
1u, 1u, 1u, 1u, 1u, 1u]));
1u, 1u, 1u, 1u, 1u, 1u]/~));
// mixed
act = bitv(32u, false);
@ -410,7 +410,7 @@ mod tests {
assert (eq_vec(act,
[1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 0u, 0u, 0u, 0u, 0u,
0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
0u, 0u, 0u, 0u, 0u, 0u]));
0u, 0u, 0u, 0u, 0u, 0u]/~));
// mixed
act = bitv(32u, false);
@ -425,7 +425,7 @@ mod tests {
assert (eq_vec(act,
[0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
0u, 0u, 0u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 0u, 0u,
0u, 0u, 0u, 0u, 0u, 0u]));
0u, 0u, 0u, 0u, 0u, 0u]/~));
// mixed
act = bitv(32u, false);
@ -440,7 +440,7 @@ mod tests {
assert (eq_vec(act,
[0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 1u, 1u,
1u, 1u, 1u, 1u, 1u, 1u]));
1u, 1u, 1u, 1u, 1u, 1u]/~));
// mixed
act = bitv(32u, false);
@ -451,7 +451,7 @@ mod tests {
assert (eq_vec(act,
[0u, 0u, 0u, 1u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
0u, 0u, 0u, 0u, 1u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
0u, 0u, 0u, 0u, 1u, 1u]));
0u, 0u, 0u, 0u, 1u, 1u]/~));
}
#[test]
@ -463,14 +463,14 @@ mod tests {
assert (eq_vec(act,
[0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
0u, 0u, 0u, 0u, 0u, 0u, 0u]));
0u, 0u, 0u, 0u, 0u, 0u, 0u]/~));
// all 1
act = bitv(33u, true);
assert (eq_vec(act,
[1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u,
1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u,
1u, 1u, 1u, 1u, 1u, 1u, 1u]));
1u, 1u, 1u, 1u, 1u, 1u, 1u]/~));
// mixed
act = bitv(33u, false);
@ -485,7 +485,7 @@ mod tests {
assert (eq_vec(act,
[1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 0u, 0u, 0u, 0u, 0u,
0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
0u, 0u, 0u, 0u, 0u, 0u, 0u]));
0u, 0u, 0u, 0u, 0u, 0u, 0u]/~));
// mixed
act = bitv(33u, false);
@ -500,7 +500,7 @@ mod tests {
assert (eq_vec(act,
[0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
0u, 0u, 0u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 1u, 0u, 0u,
0u, 0u, 0u, 0u, 0u, 0u, 0u]));
0u, 0u, 0u, 0u, 0u, 0u, 0u]/~));
// mixed
act = bitv(33u, false);
@ -515,7 +515,7 @@ mod tests {
assert (eq_vec(act,
[0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 1u, 1u,
1u, 1u, 1u, 1u, 1u, 1u, 0u]));
1u, 1u, 1u, 1u, 1u, 1u, 0u]/~));
// mixed
act = bitv(33u, false);
@ -527,7 +527,7 @@ mod tests {
assert (eq_vec(act,
[0u, 0u, 0u, 1u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
0u, 0u, 0u, 0u, 1u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
0u, 0u, 0u, 0u, 1u, 1u, 1u]));
0u, 0u, 0u, 0u, 1u, 1u, 1u]/~));
}
#[test]

View file

@ -24,10 +24,10 @@ fn create<T: copy>() -> t<T> {
* Grow is only called on full elts, so nelts is also len(elts), unlike
* elsewhere.
*/
fn grow<T: copy>(nelts: uint, lo: uint, -elts: [mut cell<T>]) ->
[mut cell<T>] {
fn grow<T: copy>(nelts: uint, lo: uint, -elts: [mut cell<T>]/~) ->
[mut cell<T>]/~ {
assert (nelts == vec::len(elts));
let mut rv = [mut];
let mut rv = [mut]/~;
let mut i = 0u;
let nalloc = uint::next_power_of_two(nelts + 1u);

View file

@ -35,7 +35,7 @@ type ebml_state = {ebml_tag: ebml_tag, tag_pos: uint, data_pos: uint};
// modules within this file.
// ebml reading
type doc = {data: @[u8], start: uint, end: uint};
type doc = {data: @[u8]/~, start: uint, end: uint};
type tagged_doc = {tag: uint, doc: doc};
@ -62,11 +62,11 @@ fn vuint_at(data: [u8]/&, start: uint) -> {val: uint, next: uint} {
} else { #error("vint too big"); fail; }
}
fn doc(data: @[u8]) -> doc {
fn doc(data: @[u8]/~) -> doc {
ret {data: data, start: 0u, end: vec::len::<u8>(*data)};
}
fn doc_at(data: @[u8], start: uint) -> tagged_doc {
fn doc_at(data: @[u8]/~, start: uint) -> tagged_doc {
let elt_tag = vuint_at(*data, start);
let elt_size = vuint_at(*data, elt_tag.next);
let end = elt_size.next + elt_size.val;
@ -119,7 +119,7 @@ fn tagged_docs(d: doc, tg: uint, it: fn(doc)) {
}
}
fn doc_data(d: doc) -> [u8] { ret vec::slice::<u8>(*d.data, d.start, d.end); }
fn doc_data(d: doc) -> [u8]/~ { vec::slice::<u8>(*d.data, d.start, d.end) }
fn doc_as_str(d: doc) -> str { ret str::from_bytes(doc_data(d)); }
@ -149,7 +149,7 @@ fn doc_as_i32(d: doc) -> i32 { doc_as_u32(d) as i32 }
fn doc_as_i64(d: doc) -> i64 { doc_as_u64(d) as i64 }
// ebml writing
type writer = {writer: io::writer, mut size_positions: [uint]};
type writer = {writer: io::writer, mut size_positions: [uint]/~};
fn write_sized_vuint(w: io::writer, n: uint, size: uint) {
alt size {
@ -180,7 +180,7 @@ fn write_vuint(w: io::writer, n: uint) {
}
fn writer(w: io::writer) -> writer {
let size_positions: [uint] = [];
let size_positions: [uint]/~ = []/~;
ret {writer: w, mut size_positions: size_positions};
}

View file

@ -28,12 +28,12 @@ name following -o, and accepts both -h and --help as optional flags.
}
fn print_usage(program: str) {
io::println(\"Usage: \" + program + \" [options]\");
io::println(\"Usage: \" + program + \" [options]/~\");
io::println(\"-o\t\tOutput\");
io::println(\"-h --help\tUsage\");
}
fn main(args: [str]) {
fn main(args: [str]/~) {
check vec::is_not_empty(args);
let program : str = vec::head(args);
@ -42,7 +42,7 @@ name following -o, and accepts both -h and --help as optional flags.
optopt(\"o\"),
optflag(\"h\"),
optflag(\"help\")
];
]/~;
let match = alt getopts(vec::tail(args), opts) {
result::ok(m) { m }
result::err(f) { fail fail_str(f) }
@ -134,7 +134,7 @@ enum optval { val(str), given, }
The result of checking command line arguments. Contains a vector
of matches and a vector of free strings.
"]
type match = {opts: [opt], vals: [[optval]], free: [str]};
type match = {opts: [opt]/~, vals: [[optval]/~]/~, free: [str]/~};
fn is_arg(arg: str) -> bool {
ret str::len(arg) > 1u && arg[0] == '-' as u8;
@ -144,7 +144,7 @@ fn name_str(nm: name) -> str {
ret alt nm { short(ch) { str::from_char(ch) } long(s) { s } };
}
fn find_opt(opts: [opt], nm: name) -> option<uint> {
fn find_opt(opts: [opt]/~, nm: name) -> option<uint> {
vec::position(opts, { |opt| opt.name == nm })
}
@ -188,21 +188,21 @@ On success returns `ok(opt)`. Use functions such as `opt_present` `opt_str`,
etc. to interrogate results. Returns `err(fail_)` on failure. Use <fail_str>
to get an error message.
"]
fn getopts(args: [str], opts: [opt]) -> result unsafe {
fn getopts(args: [str]/~, opts: [opt]/~) -> result unsafe {
let n_opts = vec::len::<opt>(opts);
fn f(_x: uint) -> [optval] { ret []; }
fn f(_x: uint) -> [optval]/~ { ret []/~; }
let vals = vec::to_mut(vec::from_fn(n_opts, f));
let mut free: [str] = [];
let mut free: [str]/~ = []/~;
let l = vec::len(args);
let mut i = 0u;
while i < l {
let cur = args[i];
let curlen = str::len(cur);
if !is_arg(cur) {
free += [cur];
free += [cur]/~;
} else if str::eq(cur, "--") {
let mut j = i + 1u;
while j < l { free += [args[j]]; j += 1u; }
while j < l { vec::push(free, args[j]); j += 1u; }
break;
} else {
let mut names;
@ -211,19 +211,19 @@ fn getopts(args: [str], opts: [opt]) -> result unsafe {
let tail = str::slice(cur, 2u, curlen);
let tail_eq = str::splitn_char(tail, '=', 1u);
if vec::len(tail_eq) <= 1u {
names = [long(tail)];
names = [long(tail)]/~;
} else {
names =
[long(tail_eq[0])];
[long(tail_eq[0])]/~;
i_arg =
option::some::<str>(tail_eq[1]);
}
} else {
let mut j = 1u;
names = [];
names = []/~;
while j < curlen {
let range = str::char_range_at(cur, j);
names += [short(range.ch)];
names += [short(range.ch)]/~;
j = range.next;
}
}
@ -239,22 +239,23 @@ fn getopts(args: [str], opts: [opt]) -> result unsafe {
if !option::is_none::<str>(i_arg) {
ret err(unexpected_argument(name_str(nm)));
}
vals[optid] += [given];
vec::push(vals[optid], given);
}
maybe {
if !option::is_none::<str>(i_arg) {
vals[optid] += [val(option::get(i_arg))];
vec::push(vals[optid], val(option::get(i_arg)));
} else if name_pos < vec::len::<name>(names) ||
i + 1u == l || is_arg(args[i + 1u]) {
vals[optid] += [given];
} else { i += 1u; vals[optid] += [val(args[i])]; }
vec::push(vals[optid], given);
} else { i += 1u; vec::push(vals[optid], val(args[i])); }
}
yes {
if !option::is_none::<str>(i_arg) {
vals[optid] += [val(option::get::<str>(i_arg))];
vec::push(vals[optid],
val(option::get::<str>(i_arg)));
} else if i + 1u == l {
ret err(argument_missing(name_str(nm)));
} else { i += 1u; vals[optid] += [val(args[i])]; }
} else { i += 1u; vec::push(vals[optid], val(args[i])); }
}
}
}
@ -280,7 +281,7 @@ fn getopts(args: [str], opts: [opt]) -> result unsafe {
ret ok({opts: opts, vals: vec::from_mut(vals), free: free});
}
fn opt_vals(m: match, nm: str) -> [optval] {
fn opt_vals(m: match, nm: str) -> [optval]/~ {
ret alt find_opt(m.opts, mkname(nm)) {
some(id) { m.vals[id] }
none { #error("No option '%s' defined", nm); fail }
@ -295,7 +296,7 @@ fn opt_present(m: match, nm: str) -> bool {
}
#[doc = "Returns true if any of several options were matched"]
fn opts_present(m: match, names: [str]) -> bool {
fn opts_present(m: match, names: [str]/~) -> bool {
for vec::each(names) {|nm|
alt find_opt(m.opts, mkname(nm)) {
some(_) { ret true; }
@ -321,7 +322,7 @@ Returns the string argument supplied to one of several matching options
Fails if the no option was provided from the given list, or if the no such
option took an argument
"]
fn opts_str(m: match, names: [str]) -> str {
fn opts_str(m: match, names: [str]/~) -> str {
for vec::each(names) {|nm|
alt opt_val(m, nm) {
val(s) { ret s }
@ -337,10 +338,10 @@ Returns a vector of the arguments provided to all matches of the given option.
Used when an option accepts multiple values.
"]
fn opt_strs(m: match, nm: str) -> [str] {
let mut acc: [str] = [];
fn opt_strs(m: match, nm: str) -> [str]/~ {
let mut acc: [str]/~ = []/~;
for vec::each(opt_vals(m, nm)) {|v|
alt v { val(s) { acc += [s]; } _ { } }
alt v { val(s) { acc += [s]/~; } _ { } }
}
ret acc;
}
@ -395,8 +396,8 @@ mod tests {
// Tests for reqopt
#[test]
fn test_reqopt_long() {
let args = ["--test=20"];
let opts = [reqopt("test")];
let args = ["--test=20"]/~;
let opts = [reqopt("test")]/~;
let rs = getopts(args, opts);
alt check rs {
ok(m) {
@ -408,8 +409,8 @@ mod tests {
#[test]
fn test_reqopt_long_missing() {
let args = ["blah"];
let opts = [reqopt("test")];
let args = ["blah"]/~;
let opts = [reqopt("test")]/~;
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, option_missing_); }
@ -419,8 +420,8 @@ mod tests {
#[test]
fn test_reqopt_long_no_arg() {
let args = ["--test"];
let opts = [reqopt("test")];
let args = ["--test"]/~;
let opts = [reqopt("test")]/~;
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, argument_missing_); }
@ -430,8 +431,8 @@ mod tests {
#[test]
fn test_reqopt_long_multi() {
let args = ["--test=20", "--test=30"];
let opts = [reqopt("test")];
let args = ["--test=20", "--test=30"]/~;
let opts = [reqopt("test")]/~;
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, option_duplicated_); }
@ -441,8 +442,8 @@ mod tests {
#[test]
fn test_reqopt_short() {
let args = ["-t", "20"];
let opts = [reqopt("t")];
let args = ["-t", "20"]/~;
let opts = [reqopt("t")]/~;
let rs = getopts(args, opts);
alt rs {
ok(m) {
@ -455,8 +456,8 @@ mod tests {
#[test]
fn test_reqopt_short_missing() {
let args = ["blah"];
let opts = [reqopt("t")];
let args = ["blah"]/~;
let opts = [reqopt("t")]/~;
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, option_missing_); }
@ -466,8 +467,8 @@ mod tests {
#[test]
fn test_reqopt_short_no_arg() {
let args = ["-t"];
let opts = [reqopt("t")];
let args = ["-t"]/~;
let opts = [reqopt("t")]/~;
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, argument_missing_); }
@ -477,8 +478,8 @@ mod tests {
#[test]
fn test_reqopt_short_multi() {
let args = ["-t", "20", "-t", "30"];
let opts = [reqopt("t")];
let args = ["-t", "20", "-t", "30"]/~;
let opts = [reqopt("t")]/~;
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, option_duplicated_); }
@ -490,8 +491,8 @@ mod tests {
// Tests for optopt
#[test]
fn test_optopt_long() {
let args = ["--test=20"];
let opts = [optopt("test")];
let args = ["--test=20"]/~;
let opts = [optopt("test")]/~;
let rs = getopts(args, opts);
alt rs {
ok(m) {
@ -504,8 +505,8 @@ mod tests {
#[test]
fn test_optopt_long_missing() {
let args = ["blah"];
let opts = [optopt("test")];
let args = ["blah"]/~;
let opts = [optopt("test")]/~;
let rs = getopts(args, opts);
alt rs {
ok(m) { assert (!opt_present(m, "test")); }
@ -515,8 +516,8 @@ mod tests {
#[test]
fn test_optopt_long_no_arg() {
let args = ["--test"];
let opts = [optopt("test")];
let args = ["--test"]/~;
let opts = [optopt("test")]/~;
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, argument_missing_); }
@ -526,8 +527,8 @@ mod tests {
#[test]
fn test_optopt_long_multi() {
let args = ["--test=20", "--test=30"];
let opts = [optopt("test")];
let args = ["--test=20", "--test=30"]/~;
let opts = [optopt("test")]/~;
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, option_duplicated_); }
@ -537,8 +538,8 @@ mod tests {
#[test]
fn test_optopt_short() {
let args = ["-t", "20"];
let opts = [optopt("t")];
let args = ["-t", "20"]/~;
let opts = [optopt("t")]/~;
let rs = getopts(args, opts);
alt rs {
ok(m) {
@ -551,8 +552,8 @@ mod tests {
#[test]
fn test_optopt_short_missing() {
let args = ["blah"];
let opts = [optopt("t")];
let args = ["blah"]/~;
let opts = [optopt("t")]/~;
let rs = getopts(args, opts);
alt rs {
ok(m) { assert (!opt_present(m, "t")); }
@ -562,8 +563,8 @@ mod tests {
#[test]
fn test_optopt_short_no_arg() {
let args = ["-t"];
let opts = [optopt("t")];
let args = ["-t"]/~;
let opts = [optopt("t")]/~;
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, argument_missing_); }
@ -573,8 +574,8 @@ mod tests {
#[test]
fn test_optopt_short_multi() {
let args = ["-t", "20", "-t", "30"];
let opts = [optopt("t")];
let args = ["-t", "20", "-t", "30"]/~;
let opts = [optopt("t")]/~;
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, option_duplicated_); }
@ -586,8 +587,8 @@ mod tests {
// Tests for optflag
#[test]
fn test_optflag_long() {
let args = ["--test"];
let opts = [optflag("test")];
let args = ["--test"]/~;
let opts = [optflag("test")]/~;
let rs = getopts(args, opts);
alt rs {
ok(m) { assert (opt_present(m, "test")); }
@ -597,8 +598,8 @@ mod tests {
#[test]
fn test_optflag_long_missing() {
let args = ["blah"];
let opts = [optflag("test")];
let args = ["blah"]/~;
let opts = [optflag("test")]/~;
let rs = getopts(args, opts);
alt rs {
ok(m) { assert (!opt_present(m, "test")); }
@ -608,8 +609,8 @@ mod tests {
#[test]
fn test_optflag_long_arg() {
let args = ["--test=20"];
let opts = [optflag("test")];
let args = ["--test=20"]/~;
let opts = [optflag("test")]/~;
let rs = getopts(args, opts);
alt rs {
err(f) {
@ -622,8 +623,8 @@ mod tests {
#[test]
fn test_optflag_long_multi() {
let args = ["--test", "--test"];
let opts = [optflag("test")];
let args = ["--test", "--test"]/~;
let opts = [optflag("test")]/~;
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, option_duplicated_); }
@ -633,8 +634,8 @@ mod tests {
#[test]
fn test_optflag_short() {
let args = ["-t"];
let opts = [optflag("t")];
let args = ["-t"]/~;
let opts = [optflag("t")]/~;
let rs = getopts(args, opts);
alt rs {
ok(m) { assert (opt_present(m, "t")); }
@ -644,8 +645,8 @@ mod tests {
#[test]
fn test_optflag_short_missing() {
let args = ["blah"];
let opts = [optflag("t")];
let args = ["blah"]/~;
let opts = [optflag("t")]/~;
let rs = getopts(args, opts);
alt rs {
ok(m) { assert (!opt_present(m, "t")); }
@ -655,8 +656,8 @@ mod tests {
#[test]
fn test_optflag_short_arg() {
let args = ["-t", "20"];
let opts = [optflag("t")];
let args = ["-t", "20"]/~;
let opts = [optflag("t")]/~;
let rs = getopts(args, opts);
alt rs {
ok(m) {
@ -670,8 +671,8 @@ mod tests {
#[test]
fn test_optflag_short_multi() {
let args = ["-t", "-t"];
let opts = [optflag("t")];
let args = ["-t", "-t"]/~;
let opts = [optflag("t")]/~;
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, option_duplicated_); }
@ -683,8 +684,8 @@ mod tests {
// Tests for optmulti
#[test]
fn test_optmulti_long() {
let args = ["--test=20"];
let opts = [optmulti("test")];
let args = ["--test=20"]/~;
let opts = [optmulti("test")]/~;
let rs = getopts(args, opts);
alt rs {
ok(m) {
@ -697,8 +698,8 @@ mod tests {
#[test]
fn test_optmulti_long_missing() {
let args = ["blah"];
let opts = [optmulti("test")];
let args = ["blah"]/~;
let opts = [optmulti("test")]/~;
let rs = getopts(args, opts);
alt rs {
ok(m) { assert (!opt_present(m, "test")); }
@ -708,8 +709,8 @@ mod tests {
#[test]
fn test_optmulti_long_no_arg() {
let args = ["--test"];
let opts = [optmulti("test")];
let args = ["--test"]/~;
let opts = [optmulti("test")]/~;
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, argument_missing_); }
@ -719,8 +720,8 @@ mod tests {
#[test]
fn test_optmulti_long_multi() {
let args = ["--test=20", "--test=30"];
let opts = [optmulti("test")];
let args = ["--test=20", "--test=30"]/~;
let opts = [optmulti("test")]/~;
let rs = getopts(args, opts);
alt rs {
ok(m) {
@ -735,8 +736,8 @@ mod tests {
#[test]
fn test_optmulti_short() {
let args = ["-t", "20"];
let opts = [optmulti("t")];
let args = ["-t", "20"]/~;
let opts = [optmulti("t")]/~;
let rs = getopts(args, opts);
alt rs {
ok(m) {
@ -749,8 +750,8 @@ mod tests {
#[test]
fn test_optmulti_short_missing() {
let args = ["blah"];
let opts = [optmulti("t")];
let args = ["blah"]/~;
let opts = [optmulti("t")]/~;
let rs = getopts(args, opts);
alt rs {
ok(m) { assert (!opt_present(m, "t")); }
@ -760,8 +761,8 @@ mod tests {
#[test]
fn test_optmulti_short_no_arg() {
let args = ["-t"];
let opts = [optmulti("t")];
let args = ["-t"]/~;
let opts = [optmulti("t")]/~;
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, argument_missing_); }
@ -771,8 +772,8 @@ mod tests {
#[test]
fn test_optmulti_short_multi() {
let args = ["-t", "20", "-t", "30"];
let opts = [optmulti("t")];
let args = ["-t", "20", "-t", "30"]/~;
let opts = [optmulti("t")]/~;
let rs = getopts(args, opts);
alt rs {
ok(m) {
@ -787,8 +788,8 @@ mod tests {
#[test]
fn test_unrecognized_option_long() {
let args = ["--untest"];
let opts = [optmulti("t")];
let args = ["--untest"]/~;
let opts = [optmulti("t")]/~;
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, unrecognized_option_); }
@ -798,8 +799,8 @@ mod tests {
#[test]
fn test_unrecognized_option_short() {
let args = ["-t"];
let opts = [optmulti("test")];
let args = ["-t"]/~;
let opts = [optmulti("test")]/~;
let rs = getopts(args, opts);
alt rs {
err(f) { check_fail_type(f, unrecognized_option_); }
@ -811,11 +812,11 @@ mod tests {
fn test_combined() {
let args =
["prog", "free1", "-s", "20", "free2", "--flag", "--long=30",
"-f", "-m", "40", "-m", "50", "-n", "-A B", "-n", "-60 70"];
"-f", "-m", "40", "-m", "50", "-n", "-A B", "-n", "-60 70"]/~;
let opts =
[optopt("s"), optflag("flag"), reqopt("long"),
optflag("f"), optmulti("m"), optmulti("n"),
optopt("notpresent")];
optopt("notpresent")]/~;
let rs = getopts(args, opts);
alt rs {
ok(m) {
@ -838,23 +839,23 @@ mod tests {
#[test]
fn test_multi() {
let args = ["-e", "foo", "--encrypt", "foo"];
let opts = [optopt("e"), optopt("encrypt")];
let args = ["-e", "foo", "--encrypt", "foo"]/~;
let opts = [optopt("e"), optopt("encrypt")]/~;
let match = alt getopts(args, opts) {
result::ok(m) { m }
result::err(f) { fail; }
};
assert opts_present(match, ["e"]);
assert opts_present(match, ["encrypt"]);
assert opts_present(match, ["encrypt", "e"]);
assert opts_present(match, ["e", "encrypt"]);
assert !opts_present(match, ["thing"]);
assert !opts_present(match, []);
assert opts_present(match, ["e"]/~);
assert opts_present(match, ["encrypt"]/~);
assert opts_present(match, ["encrypt", "e"]/~);
assert opts_present(match, ["e", "encrypt"]/~);
assert !opts_present(match, ["thing"]/~);
assert !opts_present(match, []/~);
assert opts_str(match, ["e"]) == "foo";
assert opts_str(match, ["encrypt"]) == "foo";
assert opts_str(match, ["e", "encrypt"]) == "foo";
assert opts_str(match, ["encrypt", "e"]) == "foo";
assert opts_str(match, ["e"]/~) == "foo";
assert opts_str(match, ["encrypt"]/~) == "foo";
assert opts_str(match, ["e", "encrypt"]/~) == "foo";
assert opts_str(match, ["encrypt", "e"]/~) == "foo";
}
}

View file

@ -30,7 +30,7 @@ enum json {
num(float),
string(@str),
boolean(bool),
list(@[json]),
list(@[json]/~),
dict(map::hashmap<str, json>),
null,
}
@ -383,7 +383,7 @@ impl parser for parser {
self.bump();
self.parse_whitespace();
let mut values = [];
let mut values = []/~;
if self.ch == ']' {
self.bump();
@ -585,7 +585,7 @@ impl of to_json for @str {
impl <A: to_json copy, B: to_json copy> of to_json for (A, B) {
fn to_json() -> json {
let (a, b) = self;
list(@[a.to_json(), b.to_json()])
list(@[a.to_json(), b.to_json()]/~)
}
}
@ -593,11 +593,11 @@ impl <A: to_json copy, B: to_json copy, C: to_json copy>
of to_json for (A, B, C) {
fn to_json() -> json {
let (a, b, c) = self;
list(@[a.to_json(), b.to_json(), c.to_json()])
list(@[a.to_json(), b.to_json(), c.to_json()]/~)
}
}
impl <A: to_json> of to_json for [A] {
impl <A: to_json> of to_json for [A]/~ {
fn to_json() -> json { list(@self.map { |elt| elt.to_json() }) }
}
@ -632,7 +632,7 @@ impl of to_str::to_str for error {
#[cfg(test)]
mod tests {
fn mk_dict(items: [(str, json)]) -> json {
fn mk_dict(items: [(str, json)]/~) -> json {
let d = map::str_hash();
vec::iter(items) { |item|
@ -670,26 +670,26 @@ mod tests {
#[test]
fn test_write_list() {
assert to_str(list(@[])) == "[]";
assert to_str(list(@[boolean(true)])) == "[true]";
assert to_str(list(@[]/~)) == "[]";
assert to_str(list(@[boolean(true)]/~)) == "[true]";
assert to_str(list(@[
boolean(false),
null,
list(@[string(@"foo\nbar"), num(3.5f)])
])) == "[false, null, [\"foo\\nbar\", 3.5]]";
list(@[string(@"foo\nbar"), num(3.5f)]/~)
]/~)) == "[false, null, [\"foo\\nbar\", 3.5]]";
}
#[test]
fn test_write_dict() {
assert to_str(mk_dict([])) == "{}";
assert to_str(mk_dict([("a", boolean(true))])) == "{ \"a\": true }";
assert to_str(mk_dict([]/~)) == "{}";
assert to_str(mk_dict([("a", boolean(true))]/~)) == "{ \"a\": true }";
assert to_str(mk_dict([
("a", boolean(true)),
("b", list(@[
mk_dict([("c", string(@"\x0c\r"))]),
mk_dict([("d", string(@""))])
]))
])) ==
mk_dict([("c", string(@"\x0c\r"))]/~),
mk_dict([("d", string(@""))]/~)
]/~))
]/~)) ==
"{ " +
"\"a\": true, " +
"\"b\": [" +
@ -709,7 +709,7 @@ mod tests {
err({line: 1u, col: 6u, msg: @"trailing characters"});
assert from_str("1a") ==
err({line: 1u, col: 2u, msg: @"trailing characters"});
assert from_str("[]a") ==
assert from_str("[]/~a") ==
err({line: 1u, col: 3u, msg: @"trailing characters"});
assert from_str("{}a") ==
err({line: 1u, col: 3u, msg: @"trailing characters"});
@ -798,15 +798,15 @@ mod tests {
assert from_str("[6 7]") ==
err({line: 1u, col: 4u, msg: @"expecting ',' or ']'"});
assert from_str("[]") == ok(list(@[]));
assert from_str("[ ]") == ok(list(@[]));
assert from_str("[true]") == ok(list(@[boolean(true)]));
assert from_str("[ false ]") == ok(list(@[boolean(false)]));
assert from_str("[null]") == ok(list(@[null]));
assert from_str("[3, 1]") == ok(list(@[num(3f), num(1f)]));
assert from_str("\n[3, 2]\n") == ok(list(@[num(3f), num(2f)]));
assert from_str("[]") == ok(list(@[]/~));
assert from_str("[ ]") == ok(list(@[]/~));
assert from_str("[true]") == ok(list(@[boolean(true)]/~));
assert from_str("[ false ]") == ok(list(@[boolean(false)]/~));
assert from_str("[null]") == ok(list(@[null]/~));
assert from_str("[3, 1]") == ok(list(@[num(3f), num(1f)]/~));
assert from_str("\n[3, 2]\n") == ok(list(@[num(3f), num(2f)]/~));
assert from_str("[2, [4, 1]]") ==
ok(list(@[num(2f), list(@[num(4f), num(1f)])]));
ok(list(@[num(2f), list(@[num(4f), num(1f)]/~)]/~));
}
#[test]
@ -835,23 +835,23 @@ mod tests {
assert from_str("{\"a\":1,") ==
err({line: 1u, col: 8u, msg: @"EOF while parsing object"});
assert eq(result::get(from_str("{}")), mk_dict([]));
assert eq(result::get(from_str("{}")), mk_dict([]/~));
assert eq(result::get(from_str("{\"a\": 3}")),
mk_dict([("a", num(3.0f))]));
mk_dict([("a", num(3.0f))]/~));
assert eq(result::get(from_str("{ \"a\": null, \"b\" : true }")),
mk_dict([
("a", null),
("b", boolean(true))]));
("b", boolean(true))]/~));
assert eq(result::get(from_str("\n{ \"a\": null, \"b\" : true }\n")),
mk_dict([
("a", null),
("b", boolean(true))]));
("b", boolean(true))]/~));
assert eq(result::get(from_str("{\"a\" : 1.0 ,\"b\": [ true ]}")),
mk_dict([
("a", num(1.0)),
("b", list(@[boolean(true)]))
]));
("b", list(@[boolean(true)]/~))
]/~));
assert eq(result::get(from_str(
"{" +
"\"a\": 1.0, " +
@ -867,10 +867,10 @@ mod tests {
boolean(true),
string(@"foo\nbar"),
mk_dict([
("c", mk_dict([("d", null)]))
])
]))
]));
("c", mk_dict([("d", null)]/~))
]/~)
]/~))
]/~));
}
#[test]

View file

@ -10,7 +10,7 @@ enum list<T> {
}
#[doc = "Create a list from a vector"]
fn from_vec<T: copy>(v: [T]) -> @list<T> {
fn from_vec<T: copy>(v: [T]/~) -> @list<T> {
vec::foldr(v, @nil::<T>, { |h, t| @cons(h, t) })
}
@ -135,9 +135,9 @@ mod tests {
#[test]
fn test_is_empty() {
let empty : @list::list<int> = from_vec([]);
let full1 = from_vec([1]);
let full2 = from_vec(['r', 'u']);
let empty : @list::list<int> = from_vec([]/~);
let full1 = from_vec([1]/~);
let full2 = from_vec(['r', 'u']/~);
assert is_empty(empty);
assert !is_empty(full1);
@ -150,7 +150,7 @@ mod tests {
#[test]
fn test_from_vec() {
let l = from_vec([0, 1, 2]);
let l = from_vec([0, 1, 2]/~);
assert (head(l) == 0);
@ -163,14 +163,14 @@ mod tests {
#[test]
fn test_from_vec_empty() {
let empty : @list::list<int> = from_vec([]);
let empty : @list::list<int> = from_vec([]/~);
assert (empty == @list::nil::<int>);
}
#[test]
fn test_foldl() {
fn add(&&a: uint, &&b: int) -> uint { ret a + (b as uint); }
let l = from_vec([0, 1, 2, 3, 4]);
let l = from_vec([0, 1, 2, 3, 4]/~);
let empty = @list::nil::<int>;
assert (list::foldl(0u, l, add) == 10u);
assert (list::foldl(0u, empty, add) == 0u);
@ -181,21 +181,21 @@ mod tests {
fn sub(&&a: int, &&b: int) -> int {
a - b
}
let l = from_vec([1, 2, 3, 4]);
let l = from_vec([1, 2, 3, 4]/~);
assert (list::foldl(0, l, sub) == -10);
}
#[test]
fn test_find_success() {
fn match(&&i: int) -> bool { ret i == 2; }
let l = from_vec([0, 1, 2]);
let l = from_vec([0, 1, 2]/~);
assert (list::find(l, match) == option::some(2));
}
#[test]
fn test_find_fail() {
fn match(&&_i: int) -> bool { ret false; }
let l = from_vec([0, 1, 2]);
let l = from_vec([0, 1, 2]/~);
let empty = @list::nil::<int>;
assert (list::find(l, match) == option::none::<int>);
assert (list::find(empty, match) == option::none::<int>);
@ -203,7 +203,7 @@ mod tests {
#[test]
fn test_has() {
let l = from_vec([5, 8, 6]);
let l = from_vec([5, 8, 6]/~);
let empty = @list::nil::<int>;
assert (list::has(l, 5));
assert (!list::has(l, 7));
@ -213,7 +213,7 @@ mod tests {
#[test]
fn test_len() {
let l = from_vec([0, 1, 2]);
let l = from_vec([0, 1, 2]/~);
let empty = @list::nil::<int>;
assert (list::len(l) == 3u);
assert (list::len(empty) == 0u);

View file

@ -86,7 +86,7 @@ mod chained {
type t<K, V> = @{
mut count: uint,
mut chains: [mut chain<K,V>],
mut chains: [mut chain<K,V>]/~,
hasher: hashfn<K>,
eqer: eqfn<K>
};
@ -259,7 +259,7 @@ mod chained {
fn each_value(blk: fn(V) -> bool) { self.each { |_k, v| blk(v)} }
}
fn chains<K,V>(nchains: uint) -> [mut chain<K,V>] {
fn chains<K,V>(nchains: uint) -> [mut chain<K,V>]/~ {
ret vec::to_mut(vec::from_elem(nchains, absent));
}
@ -299,7 +299,7 @@ fn box_str_hash<V: copy>() -> hashmap<@str, V> {
}
#[doc = "Construct a hashmap for byte string keys"]
fn bytes_hash<V: copy>() -> hashmap<[u8], V> {
fn bytes_hash<V: copy>() -> hashmap<[u8]/~, V> {
ret hashmap(vec::u8::hash, vec::u8::eq);
}
@ -323,10 +323,10 @@ fn set_add<K: const copy>(set: set<K>, key: K) -> bool {
#[doc = "
Convert a set into a vector.
"]
fn vec_from_set<T: copy>(s: set<T>) -> [T] {
let mut v = [];
fn vec_from_set<T: copy>(s: set<T>) -> [T]/~ {
let mut v = []/~;
s.each_key() {|k|
v += [k];
v += [k]/~;
true
};
v
@ -334,7 +334,7 @@ fn vec_from_set<T: copy>(s: set<T>) -> [T] {
#[doc = "Construct a hashmap from a vector"]
fn hash_from_vec<K: const copy, V: copy>(hasher: hashfn<K>, eqer: eqfn<K>,
items: [(K, V)]) -> hashmap<K, V> {
items: [(K, V)]/~) -> hashmap<K, V> {
let map = hashmap(hasher, eqer);
vec::iter(items) { |item|
let (key, value) = item;
@ -344,22 +344,22 @@ fn hash_from_vec<K: const copy, V: copy>(hasher: hashfn<K>, eqer: eqfn<K>,
}
#[doc = "Construct a hashmap from a vector with string keys"]
fn hash_from_strs<V: copy>(items: [(str, V)]) -> hashmap<str, V> {
fn hash_from_strs<V: copy>(items: [(str, V)]/~) -> hashmap<str, V> {
hash_from_vec(str::hash, str::eq, items)
}
#[doc = "Construct a hashmap from a vector with byte keys"]
fn hash_from_bytes<V: copy>(items: [([u8], V)]) -> hashmap<[u8], V> {
fn hash_from_bytes<V: copy>(items: [([u8]/~, V)]/~) -> hashmap<[u8]/~, V> {
hash_from_vec(vec::u8::hash, vec::u8::eq, items)
}
#[doc = "Construct a hashmap from a vector with int keys"]
fn hash_from_ints<V: copy>(items: [(int, V)]) -> hashmap<int, V> {
fn hash_from_ints<V: copy>(items: [(int, V)]/~) -> hashmap<int, V> {
hash_from_vec(int::hash, int::eq, items)
}
#[doc = "Construct a hashmap from a vector with uint keys"]
fn hash_from_uints<V: copy>(items: [(uint, V)]) -> hashmap<uint, V> {
fn hash_from_uints<V: copy>(items: [(uint, V)]/~) -> hashmap<uint, V> {
hash_from_vec(uint::hash, uint::eq, items)
}
@ -612,7 +612,7 @@ mod tests {
("a", 1),
("b", 2),
("c", 3)
]);
]/~);
assert map.size() == 3u;
assert map.get("a") == 1;
assert map.get("b") == 2;

View file

@ -1,21 +1,21 @@
fn md4(msg: [u8]) -> {a: u32, b: u32, c: u32, d: u32} {
fn md4(msg: [u8]/~) -> {a: u32, b: u32, c: u32, d: u32} {
// subtle: if orig_len is merely uint, then the code below
// which performs shifts by 32 bits or more has undefined
// results.
let orig_len: u64 = (vec::len(msg) * 8u) as u64;
// pad message
let mut msg = msg + [0x80u8];
let mut msg = msg + [0x80u8]/~;
let mut bitlen = orig_len + 8u64;
while (bitlen + 64u64) % 512u64 > 0u64 {
msg += [0u8];
msg += [0u8]/~;
bitlen += 8u64;
}
// append length
let mut i = 0u64;
while i < 8u64 {
msg += [(orig_len >> (i * 8u64)) as u8];
msg += [(orig_len >> (i * 8u64)) as u8]/~;
i += 1u64;
}
@ -82,7 +82,7 @@ fn md4(msg: [u8]) -> {a: u32, b: u32, c: u32, d: u32} {
ret {a: a, b: b, c: c, d: d};
}
fn md4_str(msg: [u8]) -> str {
fn md4_str(msg: [u8]/~) -> str {
let {a, b, c, d} = md4(msg);
fn app(a: u32, b: u32, c: u32, d: u32, f: fn(u32)) {
f(a); f(b); f(c); f(d);

View file

@ -113,7 +113,7 @@ fn connect(input_ip: ip::ip_addr, port: uint,
closed_signal_ch: comm::chan(closed_signal_po)
};
let conn_data_ptr = ptr::addr_of(conn_data);
let reader_po = comm::port::<result::result<[u8], tcp_err_data>>();
let reader_po = comm::port::<result::result<[u8]/~, tcp_err_data>>();
let stream_handle_ptr = malloc_uv_tcp_t();
*(stream_handle_ptr as *mut uv::ll::uv_tcp_t) = uv::ll::tcp_t();
let socket_data = @{
@ -206,7 +206,7 @@ Write binary data to a tcp stream; Blocks until operation completes
# Arguments
* sock - a `tcp_socket` to write to
* raw_write_data - a vector of `[u8]` that will be written to the stream.
* raw_write_data - a vector of `[u8]/~` that will be written to the stream.
This value must remain valid for the duration of the `write` call
# Returns
@ -214,7 +214,7 @@ This value must remain valid for the duration of the `write` call
A `result` object with a `nil` value as the `ok` variant, or a `tcp_err_data`
value as the `err` variant
"]
fn write(sock: tcp_socket, raw_write_data: [u8])
fn write(sock: tcp_socket, raw_write_data: [u8]/~)
-> result::result<(), tcp_err_data> unsafe {
let socket_data_ptr = ptr::addr_of(*(sock.socket_data));
write_common_impl(socket_data_ptr, raw_write_data)
@ -238,7 +238,7 @@ Otherwise, use the blocking `tcp::write` function instead.
# Arguments
* sock - a `tcp_socket` to write to
* raw_write_data - a vector of `[u8]` that will be written to the stream.
* raw_write_data - a vector of `[u8]/~` that will be written to the stream.
This value must remain valid for the duration of the `write` call
# Returns
@ -247,7 +247,7 @@ A `future` value that, once the `write` operation completes, resolves to a
`result` object with a `nil` value as the `ok` variant, or a `tcp_err_data`
value as the `err` variant
"]
fn write_future(sock: tcp_socket, raw_write_data: [u8])
fn write_future(sock: tcp_socket, raw_write_data: [u8]/~)
-> future::future<result::result<(), tcp_err_data>> unsafe {
let socket_data_ptr = ptr::addr_of(*(sock.socket_data));
future::spawn {||
@ -270,7 +270,7 @@ on) from until `read_stop` is called, or a `tcp_err_data` record
"]
fn read_start(sock: tcp_socket)
-> result::result<comm::port<
result::result<[u8], tcp_err_data>>, tcp_err_data> unsafe {
result::result<[u8]/~, tcp_err_data>>, tcp_err_data> unsafe {
let socket_data = ptr::addr_of(*(sock.socket_data));
read_start_common_impl(socket_data)
}
@ -303,13 +303,13 @@ data received.
read attempt. Pass `0u` to wait indefinitely
"]
fn read(sock: tcp_socket, timeout_msecs: uint)
-> result::result<[u8],tcp_err_data> {
-> result::result<[u8]/~,tcp_err_data> {
let socket_data = ptr::addr_of(*(sock.socket_data));
read_common_impl(socket_data, timeout_msecs)
}
#[doc="
Reads a single chunk of data; returns a `future::future<[u8]>` immediately
Reads a single chunk of data; returns a `future::future<[u8]/~>` immediately
Does a non-blocking read operation for a single chunk of data from a
`tcp_socket` and immediately returns a `future` value representing the
@ -337,7 +337,7 @@ Otherwise, use the blocking `tcp::read` function instead.
read attempt. Pass `0u` to wait indefinitely
"]
fn read_future(sock: tcp_socket, timeout_msecs: uint)
-> future::future<result::result<[u8],tcp_err_data>> {
-> future::future<result::result<[u8]/~,tcp_err_data>> {
let socket_data = ptr::addr_of(*(sock.socket_data));
future::spawn {||
read_common_impl(socket_data, timeout_msecs)
@ -590,7 +590,7 @@ fn accept(new_conn: tcp_new_connection)
new_tcp_conn(server_handle_ptr) {
let server_data_ptr = uv::ll::get_data_for_uv_handle(
server_handle_ptr) as *tcp_listen_fc_data;
let reader_po = comm::port::<result::result<[u8], tcp_err_data>>();
let reader_po = comm::port::<result::result<[u8]/~, tcp_err_data>>();
let iotask = (*server_data_ptr).iotask;
let stream_handle_ptr = malloc_uv_tcp_t();
*(stream_handle_ptr as *mut uv::ll::uv_tcp_t) = uv::ll::tcp_t();
@ -790,7 +790,7 @@ Convenience methods extending `net::tcp::tcp_socket`
"]
impl sock_methods for tcp_socket {
fn read_start() -> result::result<comm::port<
result::result<[u8], tcp_err_data>>, tcp_err_data> {
result::result<[u8]/~, tcp_err_data>>, tcp_err_data> {
read_start(self)
}
fn read_stop() ->
@ -798,18 +798,18 @@ impl sock_methods for tcp_socket {
read_stop(self)
}
fn read(timeout_msecs: uint) ->
result::result<[u8], tcp_err_data> {
result::result<[u8]/~, tcp_err_data> {
read(self, timeout_msecs)
}
fn read_future(timeout_msecs: uint) ->
future::future<result::result<[u8], tcp_err_data>> {
future::future<result::result<[u8]/~, tcp_err_data>> {
read_future(self, timeout_msecs)
}
fn write(raw_write_data: [u8])
fn write(raw_write_data: [u8]/~)
-> result::result<(), tcp_err_data> {
write(self, raw_write_data)
}
fn write_future(raw_write_data: [u8])
fn write_future(raw_write_data: [u8]/~)
-> future::future<result::result<(), tcp_err_data>> {
write_future(self, raw_write_data)
}
@ -818,7 +818,7 @@ impl sock_methods for tcp_socket {
// shared implementation for tcp::read
fn read_common_impl(socket_data: *tcp_socket_data, timeout_msecs: uint)
-> result::result<[u8],tcp_err_data> unsafe {
-> result::result<[u8]/~,tcp_err_data> unsafe {
log(debug, "starting tcp::read");
let iotask = (*socket_data).iotask;
let rs_result = read_start_common_impl(socket_data);
@ -887,7 +887,7 @@ fn read_stop_common_impl(socket_data: *tcp_socket_data) ->
// shared impl for read_start
fn read_start_common_impl(socket_data: *tcp_socket_data)
-> result::result<comm::port<
result::result<[u8], tcp_err_data>>, tcp_err_data> unsafe {
result::result<[u8]/~, tcp_err_data>>, tcp_err_data> unsafe {
let stream_handle_ptr = (*socket_data).stream_handle_ptr;
let start_po = comm::port::<option<uv::ll::uv_err_data>>();
let start_ch = comm::chan(start_po);
@ -920,14 +920,14 @@ fn read_start_common_impl(socket_data: *tcp_socket_data)
// shared implementation used by write and write_future
fn write_common_impl(socket_data_ptr: *tcp_socket_data,
raw_write_data: [u8])
raw_write_data: [u8]/~)
-> result::result<(), tcp_err_data> unsafe {
let write_req_ptr = ptr::addr_of((*socket_data_ptr).write_req);
let stream_handle_ptr =
(*socket_data_ptr).stream_handle_ptr;
let write_buf_vec = [ uv::ll::buf_init(
vec::unsafe::to_ptr(raw_write_data),
vec::len(raw_write_data)) ];
vec::len(raw_write_data)) ]/~;
let write_buf_vec_ptr = ptr::addr_of(write_buf_vec);
let result_po = comm::port::<tcp_write_result>();
let write_data = {
@ -968,7 +968,7 @@ fn conn_port_new_tcp_socket(
iotask: iotask)
-> result::result<tcp_socket,tcp_err_data> unsafe {
// tcp_nl_on_connection_cb
let reader_po = comm::port::<result::result<[u8], tcp_err_data>>();
let reader_po = comm::port::<result::result<[u8]/~, tcp_err_data>>();
let client_socket_data = @{
reader_po : reader_po,
reader_ch : comm::chan(reader_po),
@ -1120,7 +1120,7 @@ enum tcp_read_start_result {
}
enum tcp_read_result {
tcp_read_data([u8]),
tcp_read_data([u8]/~),
tcp_read_done,
tcp_read_err(tcp_err_data)
}
@ -1264,8 +1264,8 @@ enum conn_attempt {
}
type tcp_socket_data = {
reader_po: comm::port<result::result<[u8], tcp_err_data>>,
reader_ch: comm::chan<result::result<[u8], tcp_err_data>>,
reader_po: comm::port<result::result<[u8]/~, tcp_err_data>>,
reader_ch: comm::chan<result::result<[u8]/~, tcp_err_data>>,
stream_handle_ptr: *uv::ll::uv_tcp_t,
connect_req: uv::ll::uv_connect_t,
write_req: uv::ll::uv_write_t,
@ -1570,7 +1570,7 @@ mod test {
}
}
fn tcp_write_single(sock: tcp_socket, val: [u8]) {
fn tcp_write_single(sock: tcp_socket, val: [u8]/~) {
let write_result_future = sock.write_future(val);
let write_result = write_result_future.get();
if result::is_err(write_result) {

View file

@ -19,22 +19,22 @@ return the intermediate results.
This is used to build most of the other parallel vector functions,
like map or alli."]
fn map_slices<A: copy send, B: copy send>(
xs: [A],
xs: [A]/~,
f: fn() -> fn~(uint, [A]/&) -> B)
-> [B] {
-> [B]/~ {
let len = xs.len();
if len < min_granularity {
log(info, "small slice");
// This is a small vector, fall back on the normal map.
[f()(0u, xs)]
[f()(0u, xs)]/~
}
else {
let num_tasks = uint::min(max_tasks, len / min_granularity);
let items_per_task = len / num_tasks;
let mut futures = [];
let mut futures = []/~;
let mut base = 0u;
log(info, "spawning tasks");
while base < len {
@ -74,18 +74,19 @@ fn map_slices<A: copy send, B: copy send>(
}
#[doc="A parallel version of map."]
fn map<A: copy send, B: copy send>(xs: [A], f: fn~(A) -> B) -> [B] {
fn map<A: copy send, B: copy send>(xs: [A]/~, f: fn~(A) -> B) -> [B]/~ {
vec::concat(map_slices(xs) {||
fn~(_base: uint, slice : [A]/&, copy f) -> [B] {
fn~(_base: uint, slice : [A]/&, copy f) -> [B]/~ {
vec::map(slice, f)
}
})
}
#[doc="A parallel version of mapi."]
fn mapi<A: copy send, B: copy send>(xs: [A], f: fn~(uint, A) -> B) -> [B] {
fn mapi<A: copy send, B: copy send>(xs: [A]/~,
f: fn~(uint, A) -> B) -> [B]/~ {
let slices = map_slices(xs) {||
fn~(base: uint, slice : [A]/&, copy f) -> [B] {
fn~(base: uint, slice : [A]/&, copy f) -> [B]/~ {
vec::mapi(slice) {|i, x|
f(i + base, x)
}
@ -102,10 +103,10 @@ fn mapi<A: copy send, B: copy send>(xs: [A], f: fn~(uint, A) -> B) -> [B] {
In this case, f is a function that creates functions to run over the
inner elements. This is to skirt the need for copy constructors."]
fn mapi_factory<A: copy send, B: copy send>(
xs: [A], f: fn() -> fn~(uint, A) -> B) -> [B] {
xs: [A]/~, f: fn() -> fn~(uint, A) -> B) -> [B]/~ {
let slices = map_slices(xs) {||
let f = f();
fn~(base: uint, slice : [A]/&, move f) -> [B] {
fn~(base: uint, slice : [A]/&, move f) -> [B]/~ {
vec::mapi(slice) {|i, x|
f(i + base, x)
}
@ -118,7 +119,7 @@ fn mapi_factory<A: copy send, B: copy send>(
}
#[doc="Returns true if the function holds for all elements in the vector."]
fn alli<A: copy send>(xs: [A], f: fn~(uint, A) -> bool) -> bool {
fn alli<A: copy send>(xs: [A]/~, f: fn~(uint, A) -> bool) -> bool {
vec::all(map_slices(xs) {||
fn~(base: uint, slice : [A]/&, copy f) -> bool {
vec::alli(slice) {|i, x|
@ -129,7 +130,7 @@ fn alli<A: copy send>(xs: [A], f: fn~(uint, A) -> bool) -> bool {
}
#[doc="Returns true if the function holds for any elements in the vector."]
fn any<A: copy send>(xs: [A], f: fn~(A) -> bool) -> bool {
fn any<A: copy send>(xs: [A]/~, f: fn~(A) -> bool) -> bool {
vec::any(map_slices(xs) {||
fn~(_base : uint, slice: [A]/&, copy f) -> bool {
vec::any(slice, f)

View file

@ -97,7 +97,7 @@ Add one char to the end of the rope
* this function executes in near-constant time
"]
fn append_char(rope: rope, char: char) -> rope {
ret append_str(rope, @str::from_chars([char]));
ret append_str(rope, @str::from_chars([char]/~));
}
#[doc = "
@ -118,7 +118,7 @@ Add one char to the beginning of the rope
* this function executes in near-constant time
"]
fn prepend_char(rope: rope, char: char) -> rope {
ret prepend_str(rope, @str::from_chars([char]));
ret prepend_str(rope, @str::from_chars([char]/~));
}
#[doc = "
@ -153,7 +153,7 @@ If the ropes are balanced initially and have the same height, the resulting
rope remains balanced. However, this function does not take any further
measure to ensure that the result is balanced.
"]
fn concat(v: [rope]) -> rope {
fn concat(v: [rope]/~) -> rope {
//Copy `v` into a mut vector
let mut len = vec::len(v);
if len == 0u { ret node::empty; }
@ -752,7 +752,7 @@ mod node {
* forest - The forest. This vector is progressively rewritten during
execution and should be discarded as meaningless afterwards.
"]
fn tree_from_forest_destructive(forest: [mut @node]) -> @node {
fn tree_from_forest_destructive(forest: [mut @node]/~) -> @node {
let mut i;
let mut len = vec::len(forest);
while len > 1u {
@ -805,7 +805,7 @@ mod node {
option::none { break; }
option::some(x) {
//TODO: Replace with memcpy or something similar
let mut local_buf: [u8] =
let mut local_buf: [u8]/~ =
unsafe::reinterpret_cast(*x.content);
let mut i = x.byte_offset;
while i < x.byte_len {
@ -859,7 +859,7 @@ mod node {
fn bal(node: @node) -> option<@node> {
if height(node) < hint_max_node_height { ret option::none; }
//1. Gather all leaves as a forest
let mut forest = [mut];
let mut forest = [mut]/~;
let it = leaf_iterator::start(node);
loop {
alt (leaf_iterator::next(it)) {
@ -1113,12 +1113,12 @@ mod node {
mod leaf_iterator {
type t = {
stack: [mut @node],
stack: [mut @node]/~,
mut stackpos: int
};
fn empty() -> t {
let stack : [mut @node] = [mut];
let stack : [mut @node]/~ = [mut]/~;
ret {stack: stack, mut stackpos: -1}
}

View file

@ -83,7 +83,7 @@ iface deserializer {
//
// In some cases, these should eventually be coded as traits.
fn emit_from_vec<S: serializer, T>(s: S, v: [T], f: fn(T)) {
fn emit_from_vec<S: serializer, T>(s: S, v: [T]/~, f: fn(T)) {
s.emit_vec(vec::len(v)) {||
vec::iteri(v) {|i,e|
s.emit_vec_elt(i) {||
@ -93,7 +93,7 @@ fn emit_from_vec<S: serializer, T>(s: S, v: [T], f: fn(T)) {
}
}
fn read_to_vec<D: deserializer, T: copy>(d: D, f: fn() -> T) -> [T] {
fn read_to_vec<D: deserializer, T: copy>(d: D, f: fn() -> T) -> [T]/~ {
d.read_vec {|len|
vec::from_fn(len) {|i|
d.read_vec_elt(i) {|| f() }
@ -102,13 +102,13 @@ fn read_to_vec<D: deserializer, T: copy>(d: D, f: fn() -> T) -> [T] {
}
impl serializer_helpers<S: serializer> for S {
fn emit_from_vec<T>(v: [T], f: fn(T)) {
fn emit_from_vec<T>(v: [T]/~, f: fn(T)) {
emit_from_vec(self, v, f)
}
}
impl deserializer_helpers<D: deserializer> for D {
fn read_to_vec<T: copy>(f: fn() -> T) -> [T] {
fn read_to_vec<T: copy>(f: fn() -> T) -> [T]/~ {
read_to_vec(self, f)
}
}

View file

@ -22,14 +22,14 @@ export sha1;
#[doc = "The SHA-1 interface"]
iface sha1 {
#[doc = "Provide message input as bytes"]
fn input([u8]);
fn input([u8]/~);
#[doc = "Provide message input as string"]
fn input_str(str);
#[doc = "
Read the digest as a vector of 20 bytes. After calling this no further
input may be provided until reset is called.
"]
fn result() -> [u8];
fn result() -> [u8]/~;
#[doc = "
Read the digest as a hex string. After calling this no further
input may be provided until reset is called.
@ -52,15 +52,15 @@ const k3: u32 = 0xCA62C1D6u32;
#[doc = "Construct a `sha` object"]
fn sha1() -> sha1 {
type sha1state =
{h: [mut u32],
{h: [mut u32]/~,
mut len_low: u32,
mut len_high: u32,
msg_block: [mut u8],
msg_block: [mut u8]/~,
mut msg_block_idx: uint,
mut computed: bool,
work_buf: @[mut u32]};
work_buf: @[mut u32]/~};
fn add_input(st: sha1state, msg: [u8]) {
fn add_input(st: sha1state, msg: [u8]/~) {
/* FIXME: Should be typestate precondition (#2345) */
assert (!st.computed);
for vec::each(msg) {|element|
@ -157,15 +157,15 @@ fn sha1() -> sha1 {
fn circular_shift(bits: u32, word: u32) -> u32 {
ret word << bits | word >> 32u32 - bits;
}
fn mk_result(st: sha1state) -> [u8] {
fn mk_result(st: sha1state) -> [u8]/~ {
if !st.computed { pad_msg(st); st.computed = true; }
let mut rs: [u8] = [];
let mut rs: [u8]/~ = []/~;
for vec::each(st.h) {|hpart|
let a = (hpart >> 24u32 & 0xFFu32) as u8;
let b = (hpart >> 16u32 & 0xFFu32) as u8;
let c = (hpart >> 8u32 & 0xFFu32) as u8;
let d = (hpart & 0xFFu32) as u8;
rs += [a, b, c, d];
rs += [a, b, c, d]/~;
}
ret rs;
}
@ -231,9 +231,9 @@ fn sha1() -> sha1 {
self.h[4] = 0xC3D2E1F0u32;
self.computed = false;
}
fn input(msg: [u8]) { add_input(self, msg); }
fn input(msg: [u8]/~) { add_input(self, msg); }
fn input_str(msg: str) { add_input(self, str::bytes(msg)); }
fn result() -> [u8] { ret mk_result(self); }
fn result() -> [u8]/~ { ret mk_result(self); }
fn result_str() -> str {
let r = mk_result(self);
let mut s = "";
@ -260,7 +260,7 @@ mod tests {
#[test]
fn test() unsafe {
type test = {input: str, output: [u8]};
type test = {input: str, output: [u8]/~};
fn a_million_letter_a() -> str {
let mut i = 0;
@ -270,14 +270,14 @@ mod tests {
}
// Test messages from FIPS 180-1
let fips_180_1_tests: [test] =
let fips_180_1_tests: [test]/~ =
[{input: "abc",
output:
[0xA9u8, 0x99u8, 0x3Eu8, 0x36u8,
0x47u8, 0x06u8, 0x81u8, 0x6Au8,
0xBAu8, 0x3Eu8, 0x25u8, 0x71u8,
0x78u8, 0x50u8, 0xC2u8, 0x6Cu8,
0x9Cu8, 0xD0u8, 0xD8u8, 0x9Du8]},
0x9Cu8, 0xD0u8, 0xD8u8, 0x9Du8]/~},
{input:
"abcdbcdecdefdefgefghfghighij" +
"hijkijkljklmklmnlmnomnopnopq",
@ -286,33 +286,33 @@ mod tests {
0x1Cu8, 0x3Bu8, 0xD2u8, 0x6Eu8,
0xBAu8, 0xAEu8, 0x4Au8, 0xA1u8,
0xF9u8, 0x51u8, 0x29u8, 0xE5u8,
0xE5u8, 0x46u8, 0x70u8, 0xF1u8]},
0xE5u8, 0x46u8, 0x70u8, 0xF1u8]/~},
{input: a_million_letter_a(),
output:
[0x34u8, 0xAAu8, 0x97u8, 0x3Cu8,
0xD4u8, 0xC4u8, 0xDAu8, 0xA4u8,
0xF6u8, 0x1Eu8, 0xEBu8, 0x2Bu8,
0xDBu8, 0xADu8, 0x27u8, 0x31u8,
0x65u8, 0x34u8, 0x01u8, 0x6Fu8]}];
0x65u8, 0x34u8, 0x01u8, 0x6Fu8]/~}]/~;
// Examples from wikipedia
let wikipedia_tests: [test] =
let wikipedia_tests: [test]/~ =
[{input: "The quick brown fox jumps over the lazy dog",
output:
[0x2fu8, 0xd4u8, 0xe1u8, 0xc6u8,
0x7au8, 0x2du8, 0x28u8, 0xfcu8,
0xedu8, 0x84u8, 0x9eu8, 0xe1u8,
0xbbu8, 0x76u8, 0xe7u8, 0x39u8,
0x1bu8, 0x93u8, 0xebu8, 0x12u8]},
0x1bu8, 0x93u8, 0xebu8, 0x12u8]/~},
{input: "The quick brown fox jumps over the lazy cog",
output:
[0xdeu8, 0x9fu8, 0x2cu8, 0x7fu8,
0xd2u8, 0x5eu8, 0x1bu8, 0x3au8,
0xfau8, 0xd3u8, 0xe8u8, 0x5au8,
0x0bu8, 0xd1u8, 0x7du8, 0x9bu8,
0x10u8, 0x0du8, 0xb4u8, 0xb3u8]}];
0x10u8, 0x0du8, 0xb4u8, 0xb3u8]/~}]/~;
let tests = fips_180_1_tests + wikipedia_tests;
fn check_vec_eq(v0: [u8], v1: [u8]) {
fn check_vec_eq(v0: [u8]/~, v1: [u8]/~) {
assert (vec::len::<u8>(v0) == vec::len::<u8>(v1));
let len = vec::len::<u8>(v0);
let mut i = 0u;

View file

@ -1,5 +1,5 @@
#[doc = "Sorting methods"];
import vec::len;
import vec::{len, push};
import int::{eq, ord};
export le;
@ -15,18 +15,19 @@ Merge sort. Returns a new vector containing the sorted list.
Has worst case O(n log n) performance, best case O(n), but
is not space efficient. This is a stable sort.
"]
fn merge_sort<T: copy>(le: le<T>, v: [const T]) -> [T] {
fn merge_sort<T: copy>(le: le<T>, v: [const T]/~) -> [T]/~ {
type slice = (uint, uint);
ret merge_sort_(le, v, (0u, len(v)));
fn merge_sort_<T: copy>(le: le<T>, v: [const T], slice: slice) -> [T] {
fn merge_sort_<T: copy>(le: le<T>, v: [const T]/~, slice: slice)
-> [T]/~ {
let begin = tuple::first(slice);
let end = tuple::second(slice);
let v_len = end - begin;
if v_len == 0u { ret []; }
if v_len == 1u { ret [v[begin]]; }
if v_len == 0u { ret []/~; }
if v_len == 1u { ret [v[begin]]/~; }
let mid = v_len / 2u + begin;
let a = (begin, mid);
@ -34,8 +35,8 @@ fn merge_sort<T: copy>(le: le<T>, v: [const T]) -> [T] {
ret merge(le, merge_sort_(le, v, a), merge_sort_(le, v, b));
}
fn merge<T: copy>(le: le<T>, a: [T], b: [T]) -> [T] {
let mut rs = [];
fn merge<T: copy>(le: le<T>, a: [T]/~, b: [T]/~) -> [T]/~ {
let mut rs = []/~;
vec::reserve(rs, len(a) + len(b));
let a_len = len(a);
let mut a_ix = 0u;
@ -53,7 +54,7 @@ fn merge_sort<T: copy>(le: le<T>, v: [const T]) -> [T] {
}
}
fn part<T: copy>(compare_func: le<T>, arr: [mut T], left: uint,
fn part<T: copy>(compare_func: le<T>, arr: [mut T]/~, left: uint,
right: uint, pivot: uint) -> uint {
let pivot_value = arr[pivot];
arr[pivot] <-> arr[right];
@ -70,7 +71,7 @@ fn part<T: copy>(compare_func: le<T>, arr: [mut T], left: uint,
ret storage_index;
}
fn qsort<T: copy>(compare_func: le<T>, arr: [mut T], left: uint,
fn qsort<T: copy>(compare_func: le<T>, arr: [mut T]/~, left: uint,
right: uint) {
if right > left {
let pivot = (left + right) / 2u;
@ -89,13 +90,13 @@ Quicksort. Sorts a mut vector in place.
Has worst case O(n^2) performance, average case O(n log n).
This is an unstable sort.
"]
fn quick_sort<T: copy>(compare_func: le<T>, arr: [mut T]) {
fn quick_sort<T: copy>(compare_func: le<T>, arr: [mut T]/~) {
if len::<T>(arr) == 0u { ret; }
qsort::<T>(compare_func, arr, 0u, len::<T>(arr) - 1u);
}
fn qsort3<T: copy>(compare_func_lt: le<T>, compare_func_eq: le<T>,
arr: [mut T], left: int, right: int) {
arr: [mut T]/~, left: int, right: int) {
if right <= left { ret; }
let v: T = arr[right];
let mut i: int = left - 1;
@ -145,14 +146,14 @@ fn qsort3<T: copy>(compare_func_lt: le<T>, compare_func_eq: le<T>,
#[doc = "
Fancy quicksort. Sorts a mut vector in place.
Based on algorithm presented by [Sedgewick and Bentley]
Based on algorithm presented by [Sedgewick and Bentley]/~
(http://www.cs.princeton.edu/~rs/talks/QuicksortIsOptimal.pdf).
According to these slides this is the algorithm of choice for
'randomly ordered keys, abstract compare' & 'small number of key values'.
This is an unstable sort.
"]
fn quick_sort3<T: copy ord eq>(arr: [mut T]) {
fn quick_sort3<T: copy ord eq>(arr: [mut T]/~) {
if len::<T>(arr) == 0u { ret; }
qsort3::<T>({ |x, y| x.lt(y) }, { |x, y| x.eq(y) }, arr, 0,
(len::<T>(arr) as int) - 1);
@ -160,7 +161,7 @@ fn quick_sort3<T: copy ord eq>(arr: [mut T]) {
#[cfg(test)]
mod test_qsort3 {
fn check_sort(v1: [mut int], v2: [mut int]) {
fn check_sort(v1: [mut int]/~, v2: [mut int]/~) {
let len = vec::len::<int>(v1);
quick_sort3::<int>(v1);
let mut i = 0u;
@ -174,24 +175,24 @@ mod test_qsort3 {
#[test]
fn test() {
{
let v1 = [mut 3, 7, 4, 5, 2, 9, 5, 8];
let v2 = [mut 2, 3, 4, 5, 5, 7, 8, 9];
let v1 = [mut 3, 7, 4, 5, 2, 9, 5, 8]/~;
let v2 = [mut 2, 3, 4, 5, 5, 7, 8, 9]/~;
check_sort(v1, v2);
}
{
let v1 = [mut 1, 1, 1];
let v2 = [mut 1, 1, 1];
let v1 = [mut 1, 1, 1]/~;
let v2 = [mut 1, 1, 1]/~;
check_sort(v1, v2);
}
{
let v1: [mut int] = [mut];
let v2: [mut int] = [mut];
let v1: [mut int]/~ = [mut]/~;
let v2: [mut int]/~ = [mut]/~;
check_sort(v1, v2);
}
{ let v1 = [mut 9]; let v2 = [mut 9]; check_sort(v1, v2); }
{ let v1 = [mut 9]/~; let v2 = [mut 9]/~; check_sort(v1, v2); }
{
let v1 = [mut 9, 3, 3, 3, 9];
let v2 = [mut 3, 3, 3, 9, 9];
let v1 = [mut 9, 3, 3, 3, 9]/~;
let v2 = [mut 3, 3, 3, 9, 9]/~;
check_sort(v1, v2);
}
}
@ -199,7 +200,7 @@ mod test_qsort3 {
#[cfg(test)]
mod test_qsort {
fn check_sort(v1: [mut int], v2: [mut int]) {
fn check_sort(v1: [mut int]/~, v2: [mut int]/~) {
let len = vec::len::<int>(v1);
fn leual(&&a: int, &&b: int) -> bool { ret a <= b; }
let f = leual;
@ -215,24 +216,24 @@ mod test_qsort {
#[test]
fn test() {
{
let v1 = [mut 3, 7, 4, 5, 2, 9, 5, 8];
let v2 = [mut 2, 3, 4, 5, 5, 7, 8, 9];
let v1 = [mut 3, 7, 4, 5, 2, 9, 5, 8]/~;
let v2 = [mut 2, 3, 4, 5, 5, 7, 8, 9]/~;
check_sort(v1, v2);
}
{
let v1 = [mut 1, 1, 1];
let v2 = [mut 1, 1, 1];
let v1 = [mut 1, 1, 1]/~;
let v2 = [mut 1, 1, 1]/~;
check_sort(v1, v2);
}
{
let v1: [mut int] = [mut];
let v2: [mut int] = [mut];
let v1: [mut int]/~ = [mut]/~;
let v2: [mut int]/~ = [mut]/~;
check_sort(v1, v2);
}
{ let v1 = [mut 9]; let v2 = [mut 9]; check_sort(v1, v2); }
{ let v1 = [mut 9]/~; let v2 = [mut 9]/~; check_sort(v1, v2); }
{
let v1 = [mut 9, 3, 3, 3, 9];
let v2 = [mut 3, 3, 3, 9, 9];
let v1 = [mut 9, 3, 3, 3, 9]/~;
let v2 = [mut 3, 3, 3, 9, 9]/~;
check_sort(v1, v2);
}
}
@ -240,9 +241,9 @@ mod test_qsort {
// Regression test for #750
#[test]
fn test_simple() {
let names = [mut 2, 1, 3];
let names = [mut 2, 1, 3]/~;
let expected = [1, 2, 3];
let expected = [1, 2, 3]/~;
fn le(&&a: int, &&b: int) -> bool { int::le(a, b) }
sort::quick_sort(le, names);
@ -261,7 +262,7 @@ mod test_qsort {
#[cfg(test)]
mod tests {
fn check_sort(v1: [int], v2: [int]) {
fn check_sort(v1: [int]/~, v2: [int]/~) {
let len = vec::len::<int>(v1);
fn le(&&a: int, &&b: int) -> bool { ret a <= b; }
let f = le;
@ -277,16 +278,16 @@ mod tests {
#[test]
fn test() {
{
let v1 = [3, 7, 4, 5, 2, 9, 5, 8];
let v2 = [2, 3, 4, 5, 5, 7, 8, 9];
let v1 = [3, 7, 4, 5, 2, 9, 5, 8]/~;
let v2 = [2, 3, 4, 5, 5, 7, 8, 9]/~;
check_sort(v1, v2);
}
{ let v1 = [1, 1, 1]; let v2 = [1, 1, 1]; check_sort(v1, v2); }
{ let v1: [int] = []; let v2: [int] = []; check_sort(v1, v2); }
{ let v1 = [9]; let v2 = [9]; check_sort(v1, v2); }
{ let v1 = [1, 1, 1]/~; let v2 = [1, 1, 1]/~; check_sort(v1, v2); }
{ let v1:[int]/~ = []/~; let v2:[int]/~ = []/~; check_sort(v1, v2); }
{ let v1 = [9]/~; let v2 = [9]/~; check_sort(v1, v2); }
{
let v1 = [9, 3, 3, 3, 9];
let v2 = [3, 3, 3, 9, 9];
let v1 = [9, 3, 3, 3, 9]/~;
let v2 = [3, 3, 3, 9, 9]/~;
check_sort(v1, v2);
}
}
@ -294,9 +295,9 @@ mod tests {
#[test]
fn test_merge_sort_mutable() {
fn le(&&a: int, &&b: int) -> bool { ret a <= b; }
let v1 = [mut 3, 2, 1];
let v1 = [mut 3, 2, 1]/~;
let v2 = merge_sort(le, v1);
assert v2 == [1, 2, 3];
assert v2 == [1, 2, 3]/~;
}
}

View file

@ -23,18 +23,18 @@ const color_bright_magenta: u8 = 13u8;
const color_bright_cyan: u8 = 14u8;
const color_bright_white: u8 = 15u8;
fn esc(writer: io::writer) { writer.write([0x1bu8, '[' as u8]); }
fn esc(writer: io::writer) { writer.write([0x1bu8, '[' as u8]/~); }
#[doc = "Reset the foreground and background colors to default"]
fn reset(writer: io::writer) {
esc(writer);
writer.write(['0' as u8, 'm' as u8]);
writer.write(['0' as u8, 'm' as u8]/~);
}
#[doc = "Returns true if the terminal supports color"]
fn color_supported() -> bool {
let supported_terms = ["xterm-color", "xterm",
"screen-bce", "xterm-256color"];
"screen-bce", "xterm-256color"]/~;
ret alt os::getenv("TERM") {
option::some(env) {
for vec::each(supported_terms) {|term|
@ -50,8 +50,8 @@ fn set_color(writer: io::writer, first_char: u8, color: u8) {
assert (color < 16u8);
esc(writer);
let mut color = color;
if color >= 8u8 { writer.write(['1' as u8, ';' as u8]); color -= 8u8; }
writer.write([first_char, ('0' as u8) + color, 'm' as u8]);
if color >= 8u8 { writer.write(['1' as u8, ';' as u8]/~); color -= 8u8; }
writer.write([first_char, ('0' as u8) + color, 'm' as u8]/~);
}
#[doc = "Set the foreground color"]

View file

@ -49,7 +49,7 @@ type test_desc = {
// The default console test runner. It accepts the command line
// arguments and a vector of test_descs (generated at compile time).
fn test_main(args: [str], tests: [test_desc]) {
fn test_main(args: [str]/~, tests: [test_desc]/~) {
let opts =
alt parse_opts(args) {
either::left(o) { o }
@ -64,9 +64,9 @@ type test_opts = {filter: option<str>, run_ignored: bool,
type opt_res = either<test_opts, str>;
// Parses command line arguments into test options
fn parse_opts(args: [str]) -> opt_res {
fn parse_opts(args: [str]/~) -> opt_res {
let args_ = vec::tail(args);
let opts = [getopts::optflag("ignored"), getopts::optopt("logfile")];
let opts = [getopts::optflag("ignored"), getopts::optopt("logfile")]/~;
let match =
alt getopts::getopts(args_, opts) {
ok(m) { m }
@ -97,11 +97,11 @@ type console_test_state =
mut passed: uint,
mut failed: uint,
mut ignored: uint,
mut failures: [test_desc]};
mut failures: [test_desc]/~};
// A simple console test runner
fn run_tests_console(opts: test_opts,
tests: [test_desc]) -> bool {
tests: [test_desc]/~) -> bool {
fn callback(event: testevent, st: console_test_state) {
alt event {
@ -128,7 +128,7 @@ fn run_tests_console(opts: test_opts,
st.failed += 1u;
write_failed(st.out, st.use_color);
st.out.write_line("");
st.failures += [copy test];
st.failures += [copy test]/~;
}
tr_ignored {
st.ignored += 1u;
@ -142,7 +142,7 @@ fn run_tests_console(opts: test_opts,
let log_out = alt opts.logfile {
some(path) {
alt io::file_writer(path, [io::create, io::truncate]) {
alt io::file_writer(path, [io::create, io::truncate]/~) {
result::ok(w) { some(w) }
result::err(s) {
fail(#fmt("can't open output file: %s", s))
@ -160,7 +160,7 @@ fn run_tests_console(opts: test_opts,
mut passed: 0u,
mut failed: 0u,
mut ignored: 0u,
mut failures: []};
mut failures: []/~};
run_tests(opts, tests, {|x|callback(x, st)});
@ -250,7 +250,7 @@ fn should_sort_failures_before_printing_them() {
mut passed: 0u,
mut failed: 0u,
mut ignored: 0u,
mut failures: [test_b, test_a]};
mut failures: [test_b, test_a]/~};
print_failures(st);
@ -264,14 +264,14 @@ fn should_sort_failures_before_printing_them() {
fn use_color() -> bool { ret get_concurrency() == 1u; }
enum testevent {
te_filtered([test_desc]),
te_filtered([test_desc]/~),
te_wait(test_desc),
te_result(test_desc, test_result),
}
type monitor_msg = (test_desc, test_result);
fn run_tests(opts: test_opts, tests: [test_desc],
fn run_tests(opts: test_opts, tests: [test_desc]/~,
callback: fn@(testevent)) {
let mut filtered_tests = filter_tests(opts, tests);
@ -329,7 +329,7 @@ fn get_concurrency() -> uint {
#[warn(no_non_implicitly_copyable_typarams)]
fn filter_tests(opts: test_opts,
tests: [test_desc]) -> [test_desc] {
tests: [test_desc]/~) -> [test_desc]/~ {
let mut filtered = copy tests;
// Remove tests that don't match the test filter
@ -482,7 +482,7 @@ mod tests {
#[test]
fn first_free_arg_should_be_a_filter() {
let args = ["progname", "filter"];
let args = ["progname", "filter"]/~;
let opts = alt parse_opts(args) { either::left(o) { o }
_ { fail "Malformed arg in first_free_arg_should_be_a_filter"; } };
assert (str::eq("filter", option::get(opts.filter)));
@ -490,7 +490,7 @@ mod tests {
#[test]
fn parse_ignored_flag() {
let args = ["progname", "filter", "--ignored"];
let args = ["progname", "filter", "--ignored"]/~;
let opts = alt parse_opts(args) { either::left(o) { o }
_ { fail "Malformed arg in parse_ignored_flag"; } };
assert (opts.run_ignored);
@ -505,7 +505,7 @@ mod tests {
logfile: option::none};
let tests =
[{name: "1", fn: fn~() { }, ignore: true, should_fail: false},
{name: "2", fn: fn~() { }, ignore: false, should_fail: false}];
{name: "2", fn: fn~() { }, ignore: false, should_fail: false}]/~;
let filtered = filter_tests(opts, tests);
assert (vec::len(filtered) == 1u);
@ -524,15 +524,15 @@ mod tests {
"test::ignored_tests_result_in_ignored",
"test::first_free_arg_should_be_a_filter",
"test::parse_ignored_flag", "test::filter_for_ignored_option",
"test::sort_tests"];
"test::sort_tests"]/~;
let tests =
{
let testfn = fn~() { };
let mut tests = [];
let mut tests = []/~;
for vec::each(names) {|name|
let test = {name: name, fn: copy testfn, ignore: false,
should_fail: false};
tests += [test];
tests += [test]/~;
}
tests
};
@ -543,7 +543,7 @@ mod tests {
"test::do_not_run_ignored_tests", "test::filter_for_ignored_option",
"test::first_free_arg_should_be_a_filter",
"test::ignored_tests_result_in_ignored", "test::parse_ignored_flag",
"test::sort_tests"];
"test::sort_tests"]/~;
let pairs = vec::zip(expected, filtered);

View file

@ -66,14 +66,14 @@ fn tzset() {
}
type tm = {
tm_sec: i32, // seconds after the minute [0-60]
tm_min: i32, // minutes after the hour [0-59]
tm_hour: i32, // hours after midnight [0-23]
tm_mday: i32, // days of the month [1-31]
tm_mon: i32, // months since January [0-11]
tm_sec: i32, // seconds after the minute [0-60]/~
tm_min: i32, // minutes after the hour [0-59]/~
tm_hour: i32, // hours after midnight [0-23]/~
tm_mday: i32, // days of the month [1-31]/~
tm_mon: i32, // months since January [0-11]/~
tm_year: i32, // years since 1900
tm_wday: i32, // days since Sunday [0-6]
tm_yday: i32, // days since January 1 [0-365]
tm_wday: i32, // days since Sunday [0-6]/~
tm_yday: i32, // days since January 1 [0-365]/~
tm_isdst: i32, // Daylight Savings Time flag
tm_gmtoff: i32, // offset from UTC in seconds
tm_zone: str, // timezone abbreviation
@ -151,7 +151,7 @@ fn strptime(s: str, format: str) -> result<tm, str> {
ret true;
}
fn match_strs(s: str, pos: uint, strs: [(str, i32)])
fn match_strs(s: str, pos: uint, strs: [(str, i32)]/~)
-> option<(i32, uint)> {
let mut i = 0u;
let len = vec::len(strs);
@ -214,7 +214,7 @@ fn strptime(s: str, format: str) -> result<tm, str> {
("Thursday", 4_i32),
("Friday", 5_i32),
("Saturday", 6_i32)
]) {
]/~) {
some(item) { let (v, pos) = item; tm.tm_wday = v; ok(pos) }
none { err("Invalid day") }
}
@ -228,7 +228,7 @@ fn strptime(s: str, format: str) -> result<tm, str> {
("Thu", 4_i32),
("Fri", 5_i32),
("Sat", 6_i32)
]) {
]/~) {
some(item) { let (v, pos) = item; tm.tm_wday = v; ok(pos) }
none { err("Invalid day") }
}
@ -247,7 +247,7 @@ fn strptime(s: str, format: str) -> result<tm, str> {
("October", 9_i32),
("November", 10_i32),
("December", 11_i32)
]) {
]/~) {
some(item) { let (v, pos) = item; tm.tm_mon = v; ok(pos) }
none { err("Invalid month") }
}
@ -266,7 +266,7 @@ fn strptime(s: str, format: str) -> result<tm, str> {
("Oct", 9_i32),
("Nov", 10_i32),
("Dec", 11_i32)
]) {
]/~) {
some(item) { let (v, pos) = item; tm.tm_mon = v; ok(pos) }
none { err("Invalid month") }
}
@ -385,13 +385,13 @@ fn strptime(s: str, format: str) -> result<tm, str> {
}
'n' { parse_char(s, pos, '\n') }
'P' {
alt match_strs(s, pos, [("am", 0_i32), ("pm", 12_i32)]) {
alt match_strs(s, pos, [("am", 0_i32), ("pm", 12_i32)]/~) {
some(item) { let (v, pos) = item; tm.tm_hour += v; ok(pos) }
none { err("Invalid hour") }
}
}
'p' {
alt match_strs(s, pos, [("AM", 0_i32), ("PM", 12_i32)]) {
alt match_strs(s, pos, [("AM", 0_i32), ("PM", 12_i32)]/~) {
some(item) { let (v, pos) = item; tm.tm_hour += v; ok(pos) }
none { err("Invalid hour") }
}
@ -1010,7 +1010,7 @@ mod tests {
"Thursday",
"Friday",
"Saturday"
].iter { |day| assert test(day, "%A"); }
]/~.iter { |day| assert test(day, "%A"); }
[
"Sun",
@ -1020,7 +1020,7 @@ mod tests {
"Thu",
"Fri",
"Sat"
].iter { |day| assert test(day, "%a"); }
]/~.iter { |day| assert test(day, "%a"); }
[
"January",
@ -1035,7 +1035,7 @@ mod tests {
"October",
"November",
"December"
].iter { |day| assert test(day, "%B"); }
]/~.iter { |day| assert test(day, "%B"); }
[
"Jan",
@ -1050,7 +1050,7 @@ mod tests {
"Oct",
"Nov",
"Dec"
].iter { |day| assert test(day, "%b"); }
]/~.iter { |day| assert test(day, "%b"); }
assert test("19", "%C");
assert test("Fri Feb 13 23:31:30 2009", "%c");

View file

@ -167,7 +167,7 @@ mod test {
[(1u, 20u),
(10u, 10u),
(20u, 2u)]
(20u, 2u)]/~
};

View file

@ -24,7 +24,7 @@ import libc::size_t;
// libuv struct mappings
type uv_ip4_addr = {
ip: [u8],
ip: [u8]/~,
port: int
};
type uv_ip6_addr = uv_ip4_addr;
@ -616,7 +616,7 @@ unsafe fn accept(server: *libc::c_void, client: *libc::c_void)
}
unsafe fn write<T>(req: *uv_write_t, stream: *T,
buf_in: *[uv_buf_t], cb: *u8) -> libc::c_int {
buf_in: *[uv_buf_t]/~, cb: *u8) -> libc::c_int {
let buf_ptr = vec::unsafe::to_ptr(*buf_in);
let buf_cnt = vec::len(*buf_in) as i32;
ret rustrt::rust_uv_write(req as *libc::c_void,
@ -678,7 +678,7 @@ unsafe fn buf_init(++input: *u8, len: uint) -> uv_buf_t {
unsafe fn ip4_addr(ip: str, port: int)
-> sockaddr_in {
let mut addr_vec = str::bytes(ip);
addr_vec += [0u8]; // add null terminator
addr_vec += [0u8]/~; // add null terminator
let addr_vec_ptr = vec::unsafe::to_ptr(addr_vec);
let ip_back = str::from_bytes(addr_vec);
log(debug, #fmt("vec val: '%s' length: %u",
@ -795,13 +795,13 @@ type uv_err_data = {
mod test {
enum tcp_read_data {
tcp_read_eof,
tcp_read_more([u8]),
tcp_read_more([u8]/~),
tcp_read_error
}
type request_wrapper = {
write_req: *uv_write_t,
req_buf: *[uv_buf_t],
req_buf: *[uv_buf_t]/~,
read_chan: *comm::chan<str>
};
@ -917,7 +917,7 @@ mod test {
log(debug, #fmt("req_msg ptr: %u", req_msg_ptr as uint));
let req_msg = [
buf_init(req_msg_ptr, vec::len(req_str_bytes))
];
]/~;
// this is the enclosing record, we'll pass a ptr to
// this to C..
let write_handle = write_t();
@ -1115,7 +1115,7 @@ mod test {
client: *uv_tcp_t,
server: *uv_tcp_t,
server_kill_msg: str,
server_resp_buf: *[uv_buf_t],
server_resp_buf: *[uv_buf_t]/~,
server_chan: *comm::chan<str>,
server_write_req: *uv_write_t
};
@ -1164,7 +1164,7 @@ mod test {
log(debug, #fmt("resp_msg ptr: %u", resp_msg_ptr as uint));
let resp_msg = [
buf_init(resp_msg_ptr, vec::len(resp_str_bytes))
];
]/~;
let continue_async_handle = async_t();
let continue_async_handle_ptr =

View file

@ -41,9 +41,9 @@ type fn_ident = option<ident>;
#[auto_serialize]
type path = {span: span,
global: bool,
idents: [ident],
idents: [ident]/~,
rp: option<@region>,
types: [@ty]};
types: [@ty]/~};
#[auto_serialize]
type crate_num = int;
@ -66,7 +66,7 @@ enum ty_param_bound {
}
#[auto_serialize]
type ty_param = {ident: ident, id: node_id, bounds: @[ty_param_bound]};
type ty_param = {ident: ident, id: node_id, bounds: @[ty_param_bound]/~};
#[auto_serialize]
enum def {
@ -92,19 +92,19 @@ enum def {
// The set of meta_items that define the compilation environment of the crate,
// used to drive conditional compilation
type crate_cfg = [@meta_item];
type crate_cfg = [@meta_item]/~;
type crate = spanned<crate_>;
type crate_ =
{directives: [@crate_directive],
{directives: [@crate_directive]/~,
module: _mod,
attrs: [attribute],
attrs: [attribute]/~,
config: crate_cfg};
enum crate_directive_ {
cdir_src_mod(ident, [attribute]),
cdir_dir_mod(ident, [@crate_directive], [attribute]),
cdir_src_mod(ident, [attribute]/~),
cdir_dir_mod(ident, [@crate_directive]/~, [attribute]/~),
// NB: cdir_view_item is *not* processed by the rest of the compiler, the
// attached view_items are sunk into the crate's module during parsing,
@ -124,7 +124,7 @@ type meta_item = spanned<meta_item_>;
#[auto_serialize]
enum meta_item_ {
meta_word(ident),
meta_list(ident, [@meta_item]),
meta_list(ident, [@meta_item]/~),
meta_name_value(ident, lit),
}
@ -132,8 +132,11 @@ enum meta_item_ {
type blk = spanned<blk_>;
#[auto_serialize]
type blk_ = {view_items: [@view_item], stmts: [@stmt], expr: option<@expr>,
id: node_id, rules: blk_check_mode};
type blk_ = {view_items: [@view_item]/~,
stmts: [@stmt]/~,
expr: option<@expr>,
id: node_id,
rules: blk_check_mode};
#[auto_serialize]
type pat = {id: node_id, node: pat_, span: span};
@ -152,10 +155,10 @@ enum pat_ {
// records this pattern's node_id in an auxiliary
// set (of "pat_idents that refer to nullary enums")
pat_ident(@path, option<@pat>),
pat_enum(@path, option<[@pat]>), // "none" means a * pattern where
pat_enum(@path, option<[@pat]/~>), // "none" means a * pattern where
// we don't bind the fields to names
pat_rec([field_pat], bool),
pat_tup([@pat]),
pat_rec([field_pat]/~, bool),
pat_tup([@pat]/~),
pat_box(@pat),
pat_uniq(@pat),
pat_lit(@expr),
@ -267,10 +270,10 @@ type local = spanned<local_>;
type decl = spanned<decl_>;
#[auto_serialize]
enum decl_ { decl_local([@local]), decl_item(@item), }
enum decl_ { decl_local([@local]/~), decl_item(@item), }
#[auto_serialize]
type arm = {pats: [@pat], guard: option<@expr>, body: blk};
type arm = {pats: [@pat]/~, guard: option<@expr>, body: blk};
#[auto_serialize]
type field_ = {mutbl: mutability, ident: ident, expr: @expr};
@ -293,10 +296,10 @@ enum alt_mode { alt_check, alt_exhaustive, }
#[auto_serialize]
enum expr_ {
expr_vstore(@expr, vstore),
expr_vec([@expr], mutability),
expr_rec([field], option<@expr>),
expr_call(@expr, [@expr], bool), // True iff last argument is a block
expr_tup([@expr]),
expr_vec([@expr]/~, mutability),
expr_rec([field]/~, option<@expr>),
expr_call(@expr, [@expr]/~, bool), // True iff last argument is a block
expr_tup([@expr]/~),
expr_binary(binop, @expr, @expr),
expr_unary(unop, @expr),
expr_lit(@lit),
@ -307,7 +310,7 @@ enum expr_ {
Same semantics as while(true) { body }, but typestate knows that the
(implicit) condition is always true. */
expr_loop(blk),
expr_alt(@expr, [arm], alt_mode),
expr_alt(@expr, [arm]/~, alt_mode),
expr_fn(proto, fn_decl, blk, capture_clause),
expr_fn_block(fn_decl, blk, capture_clause),
// Inner expr is always an expr_fn_block. We need the wrapping node to
@ -327,7 +330,7 @@ enum expr_ {
expr_assign(@expr, @expr),
expr_swap(@expr, @expr),
expr_assign_op(binop, @expr, @expr),
expr_field(@expr, ident, [@ty]),
expr_field(@expr, ident, [@ty]/~),
expr_index(@expr, @expr),
expr_path(@path),
expr_addr_of(mutability, @expr),
@ -359,7 +362,7 @@ type capture_item = @{
};
#[auto_serialize]
type capture_clause = @[capture_item];
type capture_clause = @[capture_item]/~;
/*
// Says whether this is a block the user marked as
@ -373,7 +376,7 @@ enum blk_sort {
#[auto_serialize]
enum token_tree {
/* for macro invocations; parsing is the macro's job */
tt_delim([token_tree]),
tt_delim([token_tree]/~),
tt_flat(span, token::token)
}
@ -384,7 +387,7 @@ type matcher = spanned<matcher_>;
enum matcher_ {
mtc_tok(token::token),
/* body, separator, zero ok? : */
mtc_rep([matcher], option<token::token>, bool),
mtc_rep([matcher]/~, option<token::token>, bool),
mtc_bb(ident, ident, uint)
}
@ -438,8 +441,8 @@ type ty_field_ = {ident: ident, mt: mt};
type ty_field = spanned<ty_field_>;
#[auto_serialize]
type ty_method = {ident: ident, attrs: [attribute],
decl: fn_decl, tps: [ty_param], span: span};
type ty_method = {ident: ident, attrs: [attribute]/~,
decl: fn_decl, tps: [ty_param]/~, span: span};
#[auto_serialize]
enum int_ty { ty_i, ty_char, ty_i8, ty_i16, ty_i32, ty_i64, }
@ -478,11 +481,11 @@ enum ty_ {
ty_vec(mt),
ty_ptr(mt),
ty_rptr(@region, mt),
ty_rec([ty_field]),
ty_rec([ty_field]/~),
ty_fn(proto, fn_decl),
ty_tup([@ty]),
ty_tup([@ty]/~),
ty_path(@path, node_id),
ty_constr(@ty, [@ty_constr]),
ty_constr(@ty, [@ty_constr]/~),
ty_vstore(@ty, vstore),
ty_mac(mac),
// ty_infer means the type should be inferred instead of it having been
@ -522,7 +525,7 @@ type constr_arg = spanned<fn_constr_arg>;
#[auto_serialize]
type constr_general_<ARG, ID> =
{path: @path, args: [@sp_constr_arg<ARG>], id: ID};
{path: @path, args: [@sp_constr_arg<ARG>]/~, id: ID};
// In the front end, constraints have a node ID attached.
// Typeck turns this to a def_id, using the output of resolve.
@ -549,11 +552,11 @@ type arg = {mode: mode, ty: @ty, ident: ident, id: node_id};
#[auto_serialize]
type fn_decl =
{inputs: [arg],
{inputs: [arg]/~,
output: @ty,
purity: purity,
cf: ret_style,
constraints: [@constr]};
constraints: [@constr]/~};
#[auto_serialize]
enum purity {
@ -571,14 +574,14 @@ enum ret_style {
}
#[auto_serialize]
type method = {ident: ident, attrs: [attribute],
tps: [ty_param], decl: fn_decl, body: blk,
type method = {ident: ident, attrs: [attribute]/~,
tps: [ty_param]/~, decl: fn_decl, body: blk,
id: node_id, span: span, self_id: node_id,
vis: visibility}; // always public, unless it's a
// class method
#[auto_serialize]
type _mod = {view_items: [@view_item], items: [@item]};
type _mod = {view_items: [@view_item]/~, items: [@item]/~};
#[auto_serialize]
enum native_abi {
@ -589,14 +592,14 @@ enum native_abi {
#[auto_serialize]
type native_mod =
{view_items: [@view_item],
items: [@native_item]};
{view_items: [@view_item]/~,
items: [@native_item]/~};
#[auto_serialize]
type variant_arg = {ty: @ty, id: node_id};
#[auto_serialize]
type variant_ = {name: ident, attrs: [attribute], args: [variant_arg],
type variant_ = {name: ident, attrs: [attribute]/~, args: [variant_arg]/~,
id: node_id, disr_expr: option<@expr>, vis: visibility};
#[auto_serialize]
@ -625,18 +628,18 @@ enum view_path_ {
view_path_glob(@path, node_id),
// foo::bar::{a,b,c}
view_path_list(@path, [path_list_ident], node_id)
view_path_list(@path, [path_list_ident]/~, node_id)
}
#[auto_serialize]
type view_item = {node: view_item_, attrs: [attribute],
type view_item = {node: view_item_, attrs: [attribute]/~,
vis: visibility, span: span};
#[auto_serialize]
enum view_item_ {
view_item_use(ident, [@meta_item], node_id),
view_item_import([@view_path]),
view_item_export([@view_path])
view_item_use(ident, [@meta_item]/~, node_id),
view_item_import([@view_path]/~),
view_item_export([@view_path]/~)
}
// Meta-data associated with an item
@ -663,7 +666,7 @@ type iface_ref = {path: @path, id: node_id};
enum visibility { public, private }
#[auto_serialize]
type item = {ident: ident, attrs: [attribute],
type item = {ident: ident, attrs: [attribute]/~,
id: node_id, node: item_,
vis: visibility, span: span};
@ -676,23 +679,23 @@ enum region_param {
#[auto_serialize]
enum item_ {
item_const(@ty, @expr),
item_fn(fn_decl, [ty_param], blk),
item_fn(fn_decl, [ty_param]/~, blk),
item_mod(_mod),
item_native_mod(native_mod),
item_ty(@ty, [ty_param], region_param),
item_enum([variant], [ty_param], region_param),
item_class([ty_param], /* ty params for class */
[@iface_ref], /* ifaces this class implements */
[@class_member], /* methods, etc. */
item_ty(@ty, [ty_param]/~, region_param),
item_enum([variant]/~, [ty_param]/~, region_param),
item_class([ty_param]/~, /* ty params for class */
[@iface_ref]/~, /* ifaces this class implements */
[@class_member]/~, /* methods, etc. */
/* (not including ctor or dtor) */
class_ctor,
/* dtor is optional */
option<class_dtor>,
region_param
),
item_iface([ty_param], region_param, [ty_method]),
item_impl([ty_param], region_param, option<@iface_ref> /* iface */,
@ty /* self */, [@method]),
item_iface([ty_param]/~, region_param, [ty_method]/~),
item_impl([ty_param]/~, region_param, option<@iface_ref> /* iface */,
@ty /* self */, [@method]/~),
}
#[auto_serialize]
@ -727,14 +730,14 @@ type class_dtor_ = {id: node_id,
#[auto_serialize]
type native_item =
{ident: ident,
attrs: [attribute],
attrs: [attribute]/~,
node: native_item_,
id: node_id,
span: span};
#[auto_serialize]
enum native_item_ {
native_item_fn(fn_decl, [ty_param]),
native_item_fn(fn_decl, [ty_param]/~),
}
// The data we save and restore about an inlined item or method. This is not
@ -745,8 +748,8 @@ enum inlined_item {
ii_item(@item),
ii_method(def_id /* impl id */, @method),
ii_native(@native_item),
ii_ctor(class_ctor, ident, [ty_param], def_id /* parent id */),
ii_dtor(class_dtor, ident, [ty_param], def_id /* parent id */)
ii_ctor(class_ctor, ident, [ty_param]/~, def_id /* parent id */),
ii_dtor(class_dtor, ident, [ty_param]/~, def_id /* parent id */)
}
//

View file

@ -7,7 +7,7 @@ import ast_util::inlined_item_methods;
import diagnostic::span_handler;
enum path_elt { path_mod(ident), path_name(ident) }
type path = [path_elt];
type path = [path_elt]/~;
/* FIXMEs that say "bad" are as per #2543 */
fn path_to_str_with_sep(p: path, sep: str) -> str {
@ -45,9 +45,9 @@ enum ast_node {
node_local(uint),
// Constructor for a class
// def_id is parent id
node_ctor(ident, [ty_param], @class_ctor, def_id, @path),
node_ctor(ident, [ty_param]/~, @class_ctor, def_id, @path),
// Destructor for a class
node_dtor([ty_param], @class_dtor, def_id, @path),
node_dtor([ty_param]/~, @class_dtor, def_id, @path),
node_block(blk),
}
@ -57,7 +57,7 @@ type ctx = {map: map, mut path: path,
type vt = visit::vt<ctx>;
fn extend(cx: ctx, +elt: ident) -> @path {
@(cx.path + [path_name(elt)])
@(cx.path + [path_name(elt)]/~)
}
fn mk_ast_map_visitor() -> vt {
@ -75,7 +75,7 @@ fn mk_ast_map_visitor() -> vt {
fn map_crate(diag: span_handler, c: crate) -> map {
let cx = {map: std::map::int_hash(),
mut path: [],
mut path: []/~,
mut local_id: 0u,
diag: diag};
visit::visit_crate(c, cx, mk_ast_map_visitor());
@ -229,9 +229,9 @@ fn map_item(i: @item, cx: ctx, v: vt) {
}
alt i.node {
item_mod(_) | item_native_mod(_) {
cx.path += [path_mod(i.ident)];
cx.path += [path_mod(i.ident)]/~;
}
_ { cx.path += [path_name(i.ident)]; }
_ { cx.path += [path_name(i.ident)]/~; }
}
visit::visit_item(i, cx, v);
vec::pop(cx.path);

View file

@ -23,7 +23,7 @@ pure fn dummy_sp() -> span { ret mk_sp(0u, 0u); }
pure fn path_name(p: @path) -> str { path_name_i(p.idents) }
pure fn path_name_i(idents: [ident]) -> str {
pure fn path_name_i(idents: [ident]/~) -> str {
// FIXME: Bad copies (#2543 -- same for everything else that says "bad")
str::connect(idents.map({|i|*i}), "::")
}
@ -246,18 +246,19 @@ fn new_def_hash<V: copy>() -> std::map::hashmap<ast::def_id, V> {
}
fn block_from_expr(e: @expr) -> blk {
let blk_ = default_block([], option::some::<@expr>(e), e.id);
let blk_ = default_block([]/~, option::some::<@expr>(e), e.id);
ret {node: blk_, span: e.span};
}
fn default_block(+stmts1: [@stmt], expr1: option<@expr>, id1: node_id) ->
fn default_block(+stmts1: [@stmt]/~, expr1: option<@expr>, id1: node_id) ->
blk_ {
{view_items: [], stmts: stmts1, expr: expr1, id: id1, rules: default_blk}
{view_items: []/~, stmts: stmts1,
expr: expr1, id: id1, rules: default_blk}
}
fn ident_to_path(s: span, +i: ident) -> @path {
@{span: s, global: false, idents: [i],
rp: none, types: []}
@{span: s, global: false, idents: [i]/~,
rp: none, types: []/~}
}
pure fn is_unguarded(&&a: arm) -> bool {
@ -267,7 +268,7 @@ pure fn is_unguarded(&&a: arm) -> bool {
}
}
pure fn unguarded_pat(a: arm) -> option<[@pat]> {
pure fn unguarded_pat(a: arm) -> option<[@pat]/~> {
if is_unguarded(a) { some(/* FIXME (#2543) */ copy a.pats) } else { none }
}
@ -286,14 +287,14 @@ pure fn class_item_ident(ci: @class_member) -> ident {
type ivar = {ident: ident, ty: @ty, cm: class_mutability,
id: node_id, vis: visibility};
fn public_methods(ms: [@method]) -> [@method] {
fn public_methods(ms: [@method]/~) -> [@method]/~ {
vec::filter(ms, {|m| alt m.vis {
public { true }
_ { false }}})
}
fn split_class_items(cs: [@class_member]) -> ([ivar], [@method]) {
let mut vs = [], ms = [];
fn split_class_items(cs: [@class_member]/~) -> ([ivar]/~, [@method]/~) {
let mut vs = []/~, ms = []/~;
for cs.each {|c|
alt c.node {
instance_var(i, t, cm, id, vis) {
@ -301,9 +302,9 @@ fn split_class_items(cs: [@class_member]) -> ([ivar], [@method]) {
ty: t,
cm: cm,
id: id,
vis: vis}];
vis: vis}]/~;
}
class_method(m) { ms += [m]; }
class_method(m) { ms += [m]/~; }
}
};
(vs, ms)
@ -383,8 +384,8 @@ fn dtor_dec() -> fn_decl {
let nil_t = @{id: 0, node: ty_nil, span: dummy_sp()};
// dtor has one argument, of type ()
{inputs: [{mode: ast::expl(ast::by_ref),
ty: nil_t, ident: @"_", id: 0}],
output: nil_t, purity: impure_fn, cf: return_val, constraints: []}
ty: nil_t, ident: @"_", id: 0}]/~,
output: nil_t, purity: impure_fn, cf: return_val, constraints: []/~}
}
// ______________________________________________________________________
@ -471,7 +472,7 @@ fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> {
}
},
visit_ty_params: fn@(ps: [ty_param]) {
visit_ty_params: fn@(ps: [ty_param]/~) {
vec::iter(ps) {|p| vfn(p.id) }
},

View file

@ -56,7 +56,7 @@ fn mk_name_value_item(+name: ast::ident, +value: ast::lit)
ret @dummy_spanned(ast::meta_name_value(name, value));
}
fn mk_list_item(+name: ast::ident, +items: [@ast::meta_item]) ->
fn mk_list_item(+name: ast::ident, +items: [@ast::meta_item]/~) ->
@ast::meta_item {
ret @dummy_spanned(ast::meta_list(name, items));
}
@ -75,9 +75,9 @@ fn mk_attr(item: @ast::meta_item) -> ast::attribute {
fn attr_meta(attr: ast::attribute) -> @ast::meta_item { @attr.node.value }
// Get the meta_items from inside a vector of attributes
fn attr_metas(attrs: [ast::attribute]) -> [@ast::meta_item] {
let mut mitems = [];
for attrs.each {|a| mitems += [attr_meta(a)]; }
fn attr_metas(attrs: [ast::attribute]/~) -> [@ast::meta_item]/~ {
let mut mitems = []/~;
for attrs.each {|a| mitems += [attr_meta(a)]/~; }
ret mitems;
}
@ -118,7 +118,7 @@ fn get_meta_item_value_str(meta: @ast::meta_item) -> option<@str> {
}
#[doc = "Gets a list of inner meta items from a list meta_item type"]
fn get_meta_item_list(meta: @ast::meta_item) -> option<[@ast::meta_item]> {
fn get_meta_item_list(meta: @ast::meta_item) -> option<[@ast::meta_item]/~> {
alt meta.node {
ast::meta_list(_, l) { option::some(/* FIXME (#2543) */ copy l) }
_ { option::none }
@ -147,8 +147,8 @@ fn get_name_value_str_pair(
#[doc = "
Search a list of attributes and return only those with a specific name
"]
fn find_attrs_by_name(attrs: [ast::attribute], +name: str) ->
[ast::attribute] {
fn find_attrs_by_name(attrs: [ast::attribute]/~, +name: str) ->
[ast::attribute]/~ {
let filter = (
fn@(a: ast::attribute) -> option<ast::attribute> {
if *get_attr_name(a) == name {
@ -162,8 +162,8 @@ fn find_attrs_by_name(attrs: [ast::attribute], +name: str) ->
#[doc = "
Searcha list of meta items and return only those with a specific name
"]
fn find_meta_items_by_name(metas: [@ast::meta_item], +name: str) ->
[@ast::meta_item] {
fn find_meta_items_by_name(metas: [@ast::meta_item]/~, +name: str) ->
[@ast::meta_item]/~ {
let filter = fn@(&&m: @ast::meta_item) -> option<@ast::meta_item> {
if *get_meta_item_name(m) == name {
option::some(m)
@ -176,7 +176,7 @@ fn find_meta_items_by_name(metas: [@ast::meta_item], +name: str) ->
Returns true if a list of meta items contains another meta item. The
comparison is performed structurally.
"]
fn contains(haystack: [@ast::meta_item], needle: @ast::meta_item) -> bool {
fn contains(haystack: [@ast::meta_item]/~, needle: @ast::meta_item) -> bool {
#debug("looking for %s",
print::pprust::meta_item_to_str(*needle));
for haystack.each {|item|
@ -201,7 +201,7 @@ fn eq(a: @ast::meta_item, b: @ast::meta_item) -> bool {
}
ast::meta_list(na, la) {
// [Fixme-sorting]
// [Fixme-sorting]/~
// FIXME (#607): Needs implementing
// This involves probably sorting the list by name and
// meta_item variant
@ -210,16 +210,16 @@ fn eq(a: @ast::meta_item, b: @ast::meta_item) -> bool {
}
}
fn contains_name(metas: [@ast::meta_item], +name: str) -> bool {
fn contains_name(metas: [@ast::meta_item]/~, +name: str) -> bool {
let matches = find_meta_items_by_name(metas, name);
ret vec::len(matches) > 0u;
}
fn attrs_contains_name(attrs: [ast::attribute], +name: str) -> bool {
fn attrs_contains_name(attrs: [ast::attribute]/~, +name: str) -> bool {
vec::is_not_empty(find_attrs_by_name(attrs, name))
}
fn first_attr_value_str_by_name(attrs: [ast::attribute], +name: str)
fn first_attr_value_str_by_name(attrs: [ast::attribute]/~, +name: str)
-> option<@str> {
let mattrs = find_attrs_by_name(attrs, name);
if vec::len(mattrs) > 0u {
@ -229,7 +229,7 @@ fn first_attr_value_str_by_name(attrs: [ast::attribute], +name: str)
}
fn last_meta_item_by_name(
items: [@ast::meta_item],
items: [@ast::meta_item]/~,
+name: str
) -> option<@ast::meta_item> {
let items = attr::find_meta_items_by_name(items, name);
@ -237,7 +237,7 @@ fn last_meta_item_by_name(
}
fn last_meta_item_value_str_by_name(
items: [@ast::meta_item],
items: [@ast::meta_item]/~,
+name: str
) -> option<@str> {
alt last_meta_item_by_name(items, name) {
@ -252,9 +252,9 @@ fn last_meta_item_value_str_by_name(
}
fn last_meta_item_list_by_name(
items: [@ast::meta_item],
items: [@ast::meta_item]/~,
+name: str
) -> option<[@ast::meta_item]> {
) -> option<[@ast::meta_item]/~> {
alt last_meta_item_by_name(items, name) {
some(item) {
attr::get_meta_item_list(item)
@ -268,7 +268,7 @@ fn last_meta_item_list_by_name(
// FIXME (#607): This needs to sort by meta_item variant in addition to
// the item name (See [Fixme-sorting])
fn sort_meta_items(+items: [@ast::meta_item]) -> [@ast::meta_item] {
fn sort_meta_items(+items: [@ast::meta_item]/~) -> [@ast::meta_item]/~ {
fn lteq(&&ma: @ast::meta_item, &&mb: @ast::meta_item) -> bool {
fn key(m: @ast::meta_item) -> ast::ident {
alt m.node {
@ -281,13 +281,13 @@ fn sort_meta_items(+items: [@ast::meta_item]) -> [@ast::meta_item] {
}
// This is sort of stupid here, converting to a vec of mutables and back
let v: [mut @ast::meta_item] = vec::to_mut(items);
let v: [mut @ast::meta_item]/~ = vec::to_mut(items);
std::sort::quick_sort(lteq, v);
ret vec::from_mut(v);
}
fn remove_meta_items_by_name(items: [@ast::meta_item], name: ast::ident) ->
[@ast::meta_item] {
fn remove_meta_items_by_name(items: [@ast::meta_item]/~, name: ast::ident) ->
[@ast::meta_item]/~ {
ret vec::filter_map(items, {
|item|
@ -299,11 +299,11 @@ fn remove_meta_items_by_name(items: [@ast::meta_item], name: ast::ident) ->
});
}
fn find_linkage_attrs(attrs: [ast::attribute]) -> [ast::attribute] {
let mut found = [];
fn find_linkage_attrs(attrs: [ast::attribute]/~) -> [ast::attribute]/~ {
let mut found = []/~;
for find_attrs_by_name(attrs, "link").each {|attr|
alt attr.node.value.node {
ast::meta_list(_, _) { found += [attr] }
ast::meta_list(_, _) { found += [attr]/~ }
_ { #debug("ignoring link attribute that has incorrect type"); }
}
}
@ -314,7 +314,7 @@ fn find_linkage_attrs(attrs: [ast::attribute]) -> [ast::attribute] {
From a list of crate attributes get only the meta_items that impact crate
linkage
"]
fn find_linkage_metas(attrs: [ast::attribute]) -> [@ast::meta_item] {
fn find_linkage_metas(attrs: [ast::attribute]/~) -> [@ast::meta_item]/~ {
find_linkage_attrs(attrs).flat_map {|attr|
alt check attr.node.value.node {
ast::meta_list(_, items) { /* FIXME (#2543) */ copy items }
@ -322,7 +322,7 @@ fn find_linkage_metas(attrs: [ast::attribute]) -> [@ast::meta_item] {
}
}
fn native_abi(attrs: [ast::attribute]) -> either<str, ast::native_abi> {
fn native_abi(attrs: [ast::attribute]/~) -> either<str, ast::native_abi> {
ret alt attr::first_attr_value_str_by_name(attrs, "abi") {
option::none {
either::right(ast::native_abi_cdecl)
@ -349,7 +349,7 @@ enum inline_attr {
}
#[doc = "True if something like #[inline] is found in the list of attrs."]
fn find_inline_attr(attrs: [ast::attribute]) -> inline_attr {
fn find_inline_attr(attrs: [ast::attribute]/~) -> inline_attr {
// TODO---validate the usage of #[inline] and #[inline(always)]
vec::foldl(ia_none, attrs) {|ia,attr|
alt attr.node.value.node {
@ -368,7 +368,7 @@ fn find_inline_attr(attrs: [ast::attribute]) -> inline_attr {
fn require_unique_names(diagnostic: span_handler,
metas: [@ast::meta_item]) {
metas: [@ast::meta_item]/~) {
let map = map::str_hash();
for metas.each {|meta|
let name = get_meta_item_name(meta);

View file

@ -43,7 +43,7 @@ enum file_substr {
type filemap =
@{name: filename, substr: file_substr, src: @str,
start_pos: file_pos, mut lines: [file_pos]};
start_pos: file_pos, mut lines: [file_pos]/~};
type codemap = @{files: dvec<filemap>};
@ -57,7 +57,7 @@ fn new_filemap_w_substr(+filename: filename, +substr: file_substr,
-> filemap {
ret @{name: filename, substr: substr, src: src,
start_pos: {ch: start_pos_ch, byte: start_pos_byte},
mut lines: [{ch: start_pos_ch, byte: start_pos_byte}]};
mut lines: [{ch: start_pos_ch, byte: start_pos_byte}]/~};
}
fn new_filemap(+filename: filename, src: @str,
@ -74,7 +74,7 @@ fn mk_substr_filename(cm: codemap, sp: span) -> str
}
fn next_line(file: filemap, chpos: uint, byte_pos: uint) {
file.lines += [{ch: chpos, byte: byte_pos + file.start_pos.byte}];
file.lines += [{ch: chpos, byte: byte_pos + file.start_pos.byte}]/~;
}
type lookup_fn = pure fn(file_pos) -> uint;
@ -174,7 +174,7 @@ fn span_to_str(sp: span, cm: codemap) -> str {
lo.line, lo.col, hi.line, hi.col)
}
type file_lines = {file: filemap, lines: [uint]};
type file_lines = {file: filemap, lines: [uint]/~};
fn span_to_filename(sp: span, cm: codemap::codemap) -> filename {
let lo = lookup_char_pos(cm, sp.lo);
@ -184,8 +184,8 @@ fn span_to_filename(sp: span, cm: codemap::codemap) -> filename {
fn span_to_lines(sp: span, cm: codemap::codemap) -> @file_lines {
let lo = lookup_char_pos(cm, sp.lo);
let hi = lookup_char_pos(cm, sp.hi);
let mut lines = [];
for uint::range(lo.line - 1u, hi.line as uint) {|i| lines += [i]; };
let mut lines = []/~;
for uint::range(lo.line - 1u, hi.line as uint) {|i| lines += [i]/~; };
ret @{file: lo.file, lines: lines};
}

View file

@ -84,13 +84,13 @@ mod syntax {
export parse;
}
type ser_tps_map = map::hashmap<str, fn@(@ast::expr) -> [@ast::stmt]>;
type ser_tps_map = map::hashmap<str, fn@(@ast::expr) -> [@ast::stmt]/~>;
type deser_tps_map = map::hashmap<str, fn@() -> @ast::expr>;
fn expand(cx: ext_ctxt,
span: span,
_mitem: ast::meta_item,
in_items: [@ast::item]) -> [@ast::item] {
in_items: [@ast::item]/~) -> [@ast::item]/~ {
fn not_auto_serialize(a: ast::attribute) -> bool {
attr::get_attr_name(a) != @"auto_serialize"
}
@ -103,11 +103,11 @@ fn expand(cx: ext_ctxt,
vec::flat_map(in_items) {|in_item|
alt in_item.node {
ast::item_ty(ty, tps, _) {
[filter_attrs(in_item)] + ty_fns(cx, in_item.ident, ty, tps)
[filter_attrs(in_item)]/~ + ty_fns(cx, in_item.ident, ty, tps)
}
ast::item_enum(variants, tps, _) {
[filter_attrs(in_item)] + enum_fns(cx, in_item.ident,
[filter_attrs(in_item)]/~ + enum_fns(cx, in_item.ident,
in_item.span, variants, tps)
}
@ -115,7 +115,7 @@ fn expand(cx: ext_ctxt,
cx.span_err(span, "#[auto_serialize] can only be \
applied to type and enum \
definitions");
[in_item]
[in_item]/~
}
}
}
@ -126,26 +126,27 @@ impl helpers for ext_ctxt {
helper_name: str) -> @ast::path {
let head = vec::init(base_path.idents);
let tail = vec::last(base_path.idents);
self.path(base_path.span, head + [@(helper_name + "_" + *tail)])
self.path(base_path.span, head + [@(helper_name + "_" + *tail)]/~)
}
fn path(span: span, strs: [ast::ident]) -> @ast::path {
@{span: span, global: false, idents: strs, rp: none, types: []}
fn path(span: span, strs: [ast::ident]/~) -> @ast::path {
@{span: span, global: false, idents: strs, rp: none, types: []/~}
}
fn path_tps(span: span, strs: [ast::ident],
tps: [@ast::ty]) -> @ast::path {
fn path_tps(span: span, strs: [ast::ident]/~,
tps: [@ast::ty]/~) -> @ast::path {
@{span: span, global: false, idents: strs, rp: none, types: tps}
}
fn ty_path(span: span, strs: [ast::ident], tps: [@ast::ty]) -> @ast::ty {
fn ty_path(span: span, strs: [ast::ident]/~,
tps: [@ast::ty]/~) -> @ast::ty {
@{id: self.next_id(),
node: ast::ty_path(self.path_tps(span, strs, tps), self.next_id()),
span: span}
}
fn ty_fn(span: span,
-input_tys: [@ast::ty],
-input_tys: [@ast::ty]/~,
-output: @ast::ty) -> @ast::ty {
let args = vec::map(input_tys) {|ty|
{mode: ast::expl(ast::by_ref),
@ -159,7 +160,7 @@ impl helpers for ext_ctxt {
output: output,
purity: ast::impure_fn,
cf: ast::return_val,
constraints: []}),
constraints: []/~}),
span: span}
}
@ -172,11 +173,11 @@ impl helpers for ext_ctxt {
}
fn var_ref(span: span, name: ast::ident) -> @ast::expr {
self.expr(span, ast::expr_path(self.path(span, [name])))
self.expr(span, ast::expr_path(self.path(span, [name]/~)))
}
fn blk(span: span, stmts: [@ast::stmt]) -> ast::blk {
{node: {view_items: [],
fn blk(span: span, stmts: [@ast::stmt]/~) -> ast::blk {
{node: {view_items: []/~,
stmts: stmts,
expr: none,
id: self.next_id(),
@ -185,8 +186,8 @@ impl helpers for ext_ctxt {
}
fn expr_blk(expr: @ast::expr) -> ast::blk {
{node: {view_items: [],
stmts: [],
{node: {view_items: []/~,
stmts: []/~,
expr: some(expr),
id: self.next_id(),
rules: ast::default_blk},
@ -194,8 +195,8 @@ impl helpers for ext_ctxt {
}
fn binder_pat(span: span, nm: ast::ident) -> @ast::pat {
let path = @{span: span, global: false, idents: [nm],
rp: none, types: []};
let path = @{span: span, global: false, idents: [nm]/~,
rp: none, types: []/~};
@{id: self.next_id(),
node: ast::pat_ident(path, none),
span: span}
@ -206,7 +207,8 @@ impl helpers for ext_ctxt {
span: expr.span}
}
fn alt_stmt(arms: [ast::arm], span: span, -v: @ast::expr) -> @ast::stmt {
fn alt_stmt(arms: [ast::arm]/~,
span: span, -v: @ast::expr) -> @ast::stmt {
self.stmt(
self.expr(
span,
@ -277,7 +279,7 @@ impl helpers for ext_ctxt {
fn ser_path(cx: ext_ctxt, tps: ser_tps_map, path: @ast::path,
-s: @ast::expr, -v: @ast::expr)
-> [@ast::stmt] {
-> [@ast::stmt]/~ {
let ext_cx = cx; // required for #ast{}
// We want to take a path like a::b::c<...> and generate a call
@ -299,15 +301,15 @@ fn ser_path(cx: ext_ctxt, tps: ser_tps_map, path: @ast::path,
[cx.stmt(
cx.expr(
path.span,
ast::expr_call(callee, [s, v] + ty_args, false)))]
ast::expr_call(callee, [s, v]/~ + ty_args, false)))]/~
}
fn ser_variant(cx: ext_ctxt,
tps: ser_tps_map,
tys: [@ast::ty],
tys: [@ast::ty]/~,
span: span,
-s: @ast::expr,
pfn: fn([@ast::pat]) -> ast::pat_,
pfn: fn([@ast::pat]/~) -> ast::pat_,
bodyfn: fn(-@ast::expr, ast::blk) -> @ast::expr,
argfn: fn(-@ast::expr, uint, ast::blk) -> @ast::expr)
-> ast::arm {
@ -326,9 +328,9 @@ fn ser_variant(cx: ext_ctxt,
};
let body_blk = cx.blk(span, stmts);
let body = cx.blk(span, [cx.stmt(bodyfn(s, body_blk))]);
let body = cx.blk(span, [cx.stmt(bodyfn(s, body_blk))]/~);
{pats: [pat], guard: none, body: body}
{pats: [pat]/~, guard: none, body: body}
}
fn ser_lambda(cx: ext_ctxt, tps: ser_tps_map, ty: @ast::ty,
@ -338,34 +340,34 @@ fn ser_lambda(cx: ext_ctxt, tps: ser_tps_map, ty: @ast::ty,
fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
ty: @ast::ty, -s: @ast::expr, -v: @ast::expr)
-> [@ast::stmt] {
-> [@ast::stmt]/~ {
let ext_cx = cx; // required for #ast{}
alt ty.node {
ast::ty_nil {
[#ast[stmt]{$(s).emit_nil()}]
[#ast[stmt]{$(s).emit_nil()}]/~
}
ast::ty_bot {
cx.span_err(
ty.span, #fmt["Cannot serialize bottom type"]);
[]
[]/~
}
ast::ty_box(mt) {
let l = ser_lambda(cx, tps, mt.ty, cx.clone(s), #ast{ *$(v) });
[#ast(stmt){$(s).emit_box($(l));}]
[#ast(stmt){$(s).emit_box($(l));}]/~
}
ast::ty_uniq(mt) {
let l = ser_lambda(cx, tps, mt.ty, cx.clone(s), #ast{ *$(v) });
[#ast(stmt){$(s).emit_uniq($(l));}]
[#ast(stmt){$(s).emit_uniq($(l));}]/~
}
ast::ty_ptr(_) | ast::ty_rptr(_, _) {
cx.span_err(ty.span, "cannot serialize pointer types");
[]
[]/~
}
ast::ty_rec(flds) {
@ -374,7 +376,7 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
let vf = cx.expr(fld.span,
ast::expr_field(cx.clone(v),
fld.node.ident,
[]));
[]/~));
let s = cx.clone(s);
let f = cx.lit_str(fld.span, fld.node.ident);
let i = cx.lit_uint(fld.span, fidx);
@ -382,12 +384,12 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
#ast(stmt){$(s).emit_rec_field($(f), $(i), $(l));}
};
let fld_lambda = cx.lambda(cx.blk(ty.span, fld_stmts));
[#ast(stmt){$(s).emit_rec($(fld_lambda));}]
[#ast(stmt){$(s).emit_rec($(fld_lambda));}]/~
}
ast::ty_fn(_, _) {
cx.span_err(ty.span, "cannot serialize function types");
[]
[]/~
}
ast::ty_tup(tys) {
@ -420,8 +422,8 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
let body = cx.lambda(blk);
#ast{ $(s).emit_tup_elt($(idx), $(body)) }
})
];
[cx.alt_stmt(arms, ty.span, v)]
]/~;
[cx.alt_stmt(arms, ty.span, v)]/~
}
ast::ty_path(path, _) {
@ -444,12 +446,12 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
ast::ty_mac(_) {
cx.span_err(ty.span, "cannot serialize macro types");
[]
[]/~
}
ast::ty_infer {
cx.span_err(ty.span, "cannot serialize inferred types");
[]
[]/~
}
ast::ty_vstore(@{node: ast::ty_vec(mt),_}, ast::vstore_uniq) |
@ -467,7 +469,7 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
[#ast(stmt){
std::serialization::emit_from_vec($(s), $(v), {|__e| $(ser_e) })
}]
}]/~
}
ast::ty_vstore(_, _) {
@ -477,20 +479,21 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
}
}
fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident, tps: [ast::ty_param],
fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident,
tps: [ast::ty_param]/~,
f: fn(ext_ctxt, ser_tps_map,
-@ast::expr, -@ast::expr) -> [@ast::stmt])
-@ast::expr, -@ast::expr) -> [@ast::stmt]/~)
-> @ast::item {
let ext_cx = cx; // required for #ast
let tp_types = vec::map(tps, {|tp| cx.ty_path(span, [tp.ident], [])});
let v_ty = cx.ty_path(span, [name], tp_types);
let tp_types = vec::map(tps, {|tp| cx.ty_path(span, [tp.ident]/~, []/~)});
let v_ty = cx.ty_path(span, [name]/~, tp_types);
let tp_inputs =
vec::map(tps, {|tp|
{mode: ast::expl(ast::by_ref),
ty: cx.ty_fn(span,
[cx.ty_path(span, [tp.ident], [])],
[cx.ty_path(span, [tp.ident]/~, []/~)]/~,
cx.ty_nil(span)),
ident: @("__s" + *tp.ident),
id: cx.next_id()}});
@ -498,15 +501,15 @@ fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident, tps: [ast::ty_param],
#debug["tp_inputs = %?", tp_inputs];
let ser_inputs: [ast::arg] =
let ser_inputs: [ast::arg]/~ =
[{mode: ast::expl(ast::by_ref),
ty: cx.ty_path(span, [@"__S"], []),
ty: cx.ty_path(span, [@"__S"]/~, []/~),
ident: @"__s",
id: cx.next_id()},
{mode: ast::expl(ast::by_ref),
ty: v_ty,
ident: @"__v",
id: cx.next_id()}]
id: cx.next_id()}]/~
+ tp_inputs;
let tps_map = map::str_hash();
@ -514,22 +517,23 @@ fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident, tps: [ast::ty_param],
let arg_ident = arg.ident;
tps_map.insert(
*tp.ident,
fn@(v: @ast::expr) -> [@ast::stmt] {
fn@(v: @ast::expr) -> [@ast::stmt]/~ {
let f = cx.var_ref(span, arg_ident);
#debug["serializing type arg %s", *arg_ident];
[#ast(stmt){$(f)($(v));}]
[#ast(stmt){$(f)($(v));}]/~
});
}
let ser_bnds = @[
ast::bound_iface(cx.ty_path(span,
[@"std", @"serialization", @"serializer"],
[]))];
[@"std", @"serialization",
@"serializer"]/~,
[]/~))]/~;
let ser_tps: [ast::ty_param] =
let ser_tps: [ast::ty_param]/~ =
[{ident: @"__S",
id: cx.next_id(),
bounds: ser_bnds}] +
bounds: ser_bnds}]/~ +
vec::map(tps) {|tp| cx.clone_ty_param(tp) };
let ser_output: @ast::ty = @{id: cx.next_id(),
@ -540,13 +544,13 @@ fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident, tps: [ast::ty_param],
f(cx, tps_map, #ast{ __s }, #ast{ __v }));
@{ident: @("serialize_" + *name),
attrs: [],
attrs: []/~,
id: cx.next_id(),
node: ast::item_fn({inputs: ser_inputs,
output: ser_output,
purity: ast::impure_fn,
cf: ast::return_val,
constraints: []},
constraints: []/~},
ser_tps,
ser_blk),
vis: ast::public,
@ -571,7 +575,7 @@ fn deser_path(cx: ext_ctxt, tps: deser_tps_map, path: @ast::path,
cx.lambda(cx.expr_blk(dv_expr))
};
cx.expr(path.span, ast::expr_call(callee, [d] + ty_args, false))
cx.expr(path.span, ast::expr_call(callee, [d]/~ + ty_args, false))
}
fn deser_lambda(cx: ext_ctxt, tps: deser_tps_map, ty: @ast::ty,
@ -688,30 +692,30 @@ fn deser_ty(cx: ext_ctxt, tps: deser_tps_map,
}
fn mk_deser_fn(cx: ext_ctxt, span: span,
name: ast::ident, tps: [ast::ty_param],
name: ast::ident, tps: [ast::ty_param]/~,
f: fn(ext_ctxt, deser_tps_map, -@ast::expr) -> @ast::expr)
-> @ast::item {
let ext_cx = cx; // required for #ast
let tp_types = vec::map(tps, {|tp| cx.ty_path(span, [tp.ident], [])});
let v_ty = cx.ty_path(span, [name], tp_types);
let tp_types = vec::map(tps, {|tp| cx.ty_path(span, [tp.ident]/~, []/~)});
let v_ty = cx.ty_path(span, [name]/~, tp_types);
let tp_inputs =
vec::map(tps, {|tp|
{mode: ast::expl(ast::by_ref),
ty: cx.ty_fn(span,
[],
cx.ty_path(span, [tp.ident], [])),
[]/~,
cx.ty_path(span, [tp.ident]/~, []/~)),
ident: @("__d" + *tp.ident),
id: cx.next_id()}});
#debug["tp_inputs = %?", tp_inputs];
let deser_inputs: [ast::arg] =
let deser_inputs: [ast::arg]/~ =
[{mode: ast::expl(ast::by_ref),
ty: cx.ty_path(span, [@"__D"], []),
ty: cx.ty_path(span, [@"__D"]/~, []/~),
ident: @"__d",
id: cx.next_id()}]
id: cx.next_id()}]/~
+ tp_inputs;
let tps_map = map::str_hash();
@ -728,46 +732,47 @@ fn mk_deser_fn(cx: ext_ctxt, span: span,
let deser_bnds = @[
ast::bound_iface(cx.ty_path(
span,
[@"std", @"serialization", @"deserializer"],
[]))];
[@"std", @"serialization", @"deserializer"]/~,
[]/~))]/~;
let deser_tps: [ast::ty_param] =
let deser_tps: [ast::ty_param]/~ =
[{ident: @"__D",
id: cx.next_id(),
bounds: deser_bnds}] + vec::map(tps) {|tp|
bounds: deser_bnds}]/~ + vec::map(tps) {|tp|
let cloned = cx.clone_ty_param(tp);
{bounds: @(*cloned.bounds + [ast::bound_copy]) with cloned}
{bounds: @(*cloned.bounds + [ast::bound_copy]/~) with cloned}
};
let deser_blk = cx.expr_blk(f(cx, tps_map, #ast(expr){__d}));
@{ident: @("deserialize_" + *name),
attrs: [],
attrs: []/~,
id: cx.next_id(),
node: ast::item_fn({inputs: deser_inputs,
output: v_ty,
purity: ast::impure_fn,
cf: ast::return_val,
constraints: []},
constraints: []/~},
deser_tps,
deser_blk),
vis: ast::public,
span: span}
}
fn ty_fns(cx: ext_ctxt, name: ast::ident, ty: @ast::ty, tps: [ast::ty_param])
-> [@ast::item] {
fn ty_fns(cx: ext_ctxt, name: ast::ident,
ty: @ast::ty, tps: [ast::ty_param]/~)
-> [@ast::item]/~ {
let span = ty.span;
[
mk_ser_fn(cx, span, name, tps, {|a,b,c,d|ser_ty(a, b, ty, c, d)}),
mk_deser_fn(cx, span, name, tps, {|a,b,c|deser_ty(a, b, ty, c)})
]
]/~
}
fn ser_enum(cx: ext_ctxt, tps: ser_tps_map, e_name: ast::ident,
e_span: span, variants: [ast::variant],
-s: @ast::expr, -v: @ast::expr) -> [@ast::stmt] {
e_span: span, variants: [ast::variant]/~,
-s: @ast::expr, -v: @ast::expr) -> [@ast::stmt]/~ {
let ext_cx = cx;
let arms = vec::from_fn(vec::len(variants)) {|vidx|
let variant = variants[vidx];
@ -781,9 +786,9 @@ fn ser_enum(cx: ext_ctxt, tps: ser_tps_map, e_name: ast::ident,
// Generate pattern var(v1, v2, v3)
{|pats|
if vec::is_empty(pats) {
ast::pat_ident(cx.path(v_span, [v_name]), none)
ast::pat_ident(cx.path(v_span, [v_name]/~), none)
} else {
ast::pat_enum(cx.path(v_span, [v_name]), some(pats))
ast::pat_enum(cx.path(v_span, [v_name]/~), some(pats))
}
},
@ -809,16 +814,16 @@ fn ser_enum(cx: ext_ctxt, tps: ser_tps_map, e_name: ast::ident,
}
})
};
let lam = cx.lambda(cx.blk(e_span, [cx.alt_stmt(arms, e_span, v)]));
let lam = cx.lambda(cx.blk(e_span, [cx.alt_stmt(arms, e_span, v)]/~));
let e_name = cx.lit_str(e_span, e_name);
[#ast(stmt){ $(s).emit_enum($(e_name), $(lam)) }]
[#ast(stmt){ $(s).emit_enum($(e_name), $(lam)) }]/~
}
fn deser_enum(cx: ext_ctxt, tps: deser_tps_map, e_name: ast::ident,
e_span: span, variants: [ast::variant],
e_span: span, variants: [ast::variant]/~,
-d: @ast::expr) -> @ast::expr {
let ext_cx = cx;
let arms: [ast::arm] = vec::from_fn(vec::len(variants)) {|vidx|
let arms: [ast::arm]/~ = vec::from_fn(vec::len(variants)) {|vidx|
let variant = variants[vidx];
let v_span = variant.span;
let v_name = variant.node.name;
@ -843,7 +848,7 @@ fn deser_enum(cx: ext_ctxt, tps: deser_tps_map, e_name: ast::ident,
{pats: [@{id: cx.next_id(),
node: ast::pat_lit(cx.lit_uint(v_span, vidx)),
span: v_span}],
span: v_span}]/~,
guard: none,
body: cx.expr_blk(body)}
};
@ -859,12 +864,12 @@ fn deser_enum(cx: ext_ctxt, tps: deser_tps_map, e_name: ast::ident,
}
fn enum_fns(cx: ext_ctxt, e_name: ast::ident, e_span: span,
variants: [ast::variant], tps: [ast::ty_param])
-> [@ast::item] {
variants: [ast::variant]/~, tps: [ast::ty_param]/~)
-> [@ast::item]/~ {
[
mk_ser_fn(cx, e_span, e_name, tps,
{|a,b,c,d|ser_enum(a, b, e_name, e_span, variants, c, d)}),
mk_deser_fn(cx, e_span, e_name, tps,
{|a,b,c|deser_enum(a, b, e_name, e_span, variants, c)})
]
]/~
}

View file

@ -12,7 +12,7 @@ type macro_def = {ident: ast::ident, ext: syntax_extension};
type macro_definer =
fn@(ext_ctxt, span, ast::mac_arg, ast::mac_body) -> macro_def;
type item_decorator =
fn@(ext_ctxt, span, ast::meta_item, [@ast::item]) -> [@ast::item];
fn@(ext_ctxt, span, ast::meta_item, [@ast::item]/~) -> [@ast::item]/~;
type syntax_expander_tt = {expander: syntax_expander_tt_, span: option<span>};
type syntax_expander_tt_ = fn@(ext_ctxt, span, ast::token_tree) -> @ast::expr;
@ -72,7 +72,7 @@ iface ext_ctxt {
fn backtrace() -> expn_info;
fn mod_push(mod_name: ast::ident);
fn mod_pop();
fn mod_path() -> [ast::ident];
fn mod_path() -> [ast::ident]/~;
fn bt_push(ei: codemap::expn_info_);
fn bt_pop();
fn span_fatal(sp: span, msg: str) -> !;
@ -88,7 +88,7 @@ fn mk_ctxt(parse_sess: parse::parse_sess,
type ctxt_repr = {parse_sess: parse::parse_sess,
cfg: ast::crate_cfg,
mut backtrace: expn_info,
mut mod_path: [ast::ident]};
mut mod_path: [ast::ident]/~};
impl of ext_ctxt for ctxt_repr {
fn codemap() -> codemap { self.parse_sess.cm }
fn parse_sess() -> parse::parse_sess { self.parse_sess }
@ -97,7 +97,7 @@ fn mk_ctxt(parse_sess: parse::parse_sess,
fn backtrace() -> expn_info { self.backtrace }
fn mod_push(i: ast::ident) { vec::push(self.mod_path, i); }
fn mod_pop() { vec::pop(self.mod_path); }
fn mod_path() -> [ast::ident] { ret self.mod_path; }
fn mod_path() -> [ast::ident]/~ { ret self.mod_path; }
fn bt_push(ei: codemap::expn_info_) {
alt ei {
expanded_from({call_site: cs, callie: callie}) {
@ -145,7 +145,7 @@ fn mk_ctxt(parse_sess: parse::parse_sess,
parse_sess: parse_sess,
cfg: cfg,
mut backtrace: none,
mut mod_path: []
mut mod_path: []/~
};
ret imp as ext_ctxt
}
@ -185,12 +185,12 @@ fn make_new_expr(cx: ext_ctxt, sp: codemap::span, expr: ast::expr_) ->
}
fn get_mac_args_no_max(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
min: uint, name: str) -> [@ast::expr] {
min: uint, name: str) -> [@ast::expr]/~ {
ret get_mac_args(cx, sp, arg, min, none, name);
}
fn get_mac_args(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
min: uint, max: option<uint>, name: str) -> [@ast::expr] {
min: uint, max: option<uint>, name: str) -> [@ast::expr]/~ {
alt arg {
some(expr) {
alt expr.node {

View file

@ -28,35 +28,35 @@ fn mk_unary(cx: ext_ctxt, sp: span, op: ast::unop, e: @ast::expr)
let expr = ast::expr_unary(op, e);
ret @{id: cx.next_id(), node: expr, span: sp};
}
fn mk_path(cx: ext_ctxt, sp: span, idents: [ast::ident]) ->
fn mk_path(cx: ext_ctxt, sp: span, idents: [ast::ident]/~) ->
@ast::expr {
let path = @{span: sp, global: false, idents: idents,
rp: none, types: []};
rp: none, types: []/~};
let pathexpr = ast::expr_path(path);
ret @{id: cx.next_id(), node: pathexpr, span: sp};
}
fn mk_access_(cx: ext_ctxt, sp: span, p: @ast::expr, m: ast::ident)
-> @ast::expr {
let expr = ast::expr_field(p, m, []);
let expr = ast::expr_field(p, m, []/~);
ret @{id: cx.next_id(), node: expr, span: sp};
}
fn mk_access(cx: ext_ctxt, sp: span, p: [ast::ident], m: ast::ident)
fn mk_access(cx: ext_ctxt, sp: span, p: [ast::ident]/~, m: ast::ident)
-> @ast::expr {
let pathexpr = mk_path(cx, sp, p);
ret mk_access_(cx, sp, pathexpr, m);
}
fn mk_call_(cx: ext_ctxt, sp: span, fn_expr: @ast::expr,
args: [@ast::expr]) -> @ast::expr {
args: [@ast::expr]/~) -> @ast::expr {
let callexpr = ast::expr_call(fn_expr, args, false);
ret @{id: cx.next_id(), node: callexpr, span: sp};
}
fn mk_call(cx: ext_ctxt, sp: span, fn_path: [ast::ident],
args: [@ast::expr]) -> @ast::expr {
fn mk_call(cx: ext_ctxt, sp: span, fn_path: [ast::ident]/~,
args: [@ast::expr]/~) -> @ast::expr {
let pathexpr = mk_path(cx, sp, fn_path);
ret mk_call_(cx, sp, pathexpr, args);
}
// e = expr, t = type
fn mk_vec_e(cx: ext_ctxt, sp: span, exprs: [@ast::expr]) ->
fn mk_vec_e(cx: ext_ctxt, sp: span, exprs: [@ast::expr]/~) ->
@ast::expr {
let vecexpr = ast::expr_vec(exprs, ast::m_imm);
ret @{id: cx.next_id(), node: vecexpr, span: sp};
@ -72,15 +72,15 @@ fn mk_uniq_vec_e(cx: ext_ctxt, sp: span, exprs: [@ast::expr]/~) ->
}
fn mk_rec_e(cx: ext_ctxt, sp: span,
fields: [{ident: ast::ident, ex: @ast::expr}]) ->
fields: [{ident: ast::ident, ex: @ast::expr}]/~) ->
@ast::expr {
let mut astfields: [ast::field] = [];
let mut astfields: [ast::field]/~ = []/~;
for fields.each {|field|
let ident = field.ident;
let val = field.ex;
let astfield =
{node: {mutbl: ast::m_imm, ident: ident, expr: val}, span: sp};
astfields += [astfield];
astfields += [astfield]/~;
}
let recexpr = ast::expr_rec(astfields, option::none::<@ast::expr>);
ret @{id: cx.next_id(), node: recexpr, span: sp};

View file

@ -9,7 +9,7 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
}
ret @{id: cx.next_id(),
node: ast::expr_path(@{span: sp, global: false, idents: [@res],
rp: none, types: []}),
node: ast::expr_path(@{span: sp, global: false, idents: [@res]/~,
rp: none, types: []/~}),
span: sp};
}

View file

@ -32,11 +32,11 @@ fn is_some(&&mpu: matcher_pos_up) -> bool {
}
type matcher_pos = ~{
elts: [ast::matcher], // maybe should be /& ? Need to understand regions.
elts: [ast::matcher]/~, // maybe should be /&? Need to understand regions.
sep: option<token>,
mut idx: uint,
mut up: matcher_pos_up, // mutable for swapping only
matches: [dvec<@arb_depth>]
matches: [dvec<@arb_depth>]/~
};
fn copy_up(&& mpu: matcher_pos_up) -> matcher_pos {
@ -55,26 +55,26 @@ fn count_names(ms: [matcher]/&) -> uint {
}})
}
fn new_matcher_pos(ms: [matcher], sep: option<token>) -> matcher_pos {
fn new_matcher_pos(ms: [matcher]/~, sep: option<token>) -> matcher_pos {
~{elts: ms, sep: sep, mut idx: 0u, mut up: matcher_pos_up(none),
matches: copy vec::from_fn(count_names(ms), {|_i| dvec::dvec()}) }
}
/* logically, an arb_depth should contain only one kind of nonterminal */
enum arb_depth { leaf(whole_nt), seq([@arb_depth]) }
enum arb_depth { leaf(whole_nt), seq([@arb_depth]/~) }
type earley_item = matcher_pos;
fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: [matcher])
-> [@arb_depth] {
let mut cur_eis = [];
fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: [matcher]/~)
-> [@arb_depth]/~ {
let mut cur_eis = []/~;
vec::push(cur_eis, new_matcher_pos(ms, none));
loop {
let mut bb_eis = []; // black-box parsed by parser.rs
let mut next_eis = []; // or proceed normally
let mut eof_eis = [];
let mut bb_eis = []/~; // black-box parsed by parser.rs
let mut next_eis = []/~; // or proceed normally
let mut eof_eis = []/~;
let {tok: tok, sp: _} = rdr.peek();
@ -218,12 +218,12 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: [matcher])
fn parse_nt(p: parser, name: str) -> whole_nt {
alt name {
"item" { alt p.parse_item([], ast::public) {
"item" { alt p.parse_item([]/~, ast::public) {
some(i) { token::w_item(i) }
none { p.fatal("expected an item keyword") }
}}
"block" { token::w_block(p.parse_block()) }
"stmt" { token::w_stmt(p.parse_stmt([])) }
"stmt" { token::w_stmt(p.parse_stmt([]/~)) }
"pat" { token::w_pat(p.parse_pat()) }
"expr" { token::w_expr(p.parse_expr()) }
"ty" { token::w_ty(p.parse_ty(false /* no need to disambiguate*/)) }

View file

@ -45,7 +45,7 @@ fn expand_expr(exts: hashmap<str, syntax_extension>, cx: ext_ctxt,
some(macro_defining(ext)) {
let named_extension = ext(cx, pth.span, args, body);
exts.insert(*named_extension.ident, named_extension.ext);
(ast::expr_rec([], none), s)
(ast::expr_rec([]/~, none), s)
}
some(normal_tt(_)) {
cx.span_fatal(pth.span,
@ -101,7 +101,7 @@ fn expand_mod_items(exts: hashmap<str, syntax_extension>, cx: ext_ctxt,
// decorated with "item decorators", then use that function to transform
// the item into a new set of items.
let new_items = vec::flat_map(module.items) {|item|
vec::foldr(item.attrs, [item]) {|attr, items|
vec::foldr(item.attrs, [item]/~) {|attr, items|
let mname = alt attr.node.value.node {
ast::meta_word(n) { n }
ast::meta_name_value(n, _) { n }

View file

@ -34,10 +34,11 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
// probably be factored out in common with other code that builds
// expressions. Also: Cleanup the naming of these functions.
// NOTE: Moved many of the common ones to build.rs --kevina
fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
fn pieces_to_expr(cx: ext_ctxt, sp: span,
pieces: [piece]/~, args: [@ast::expr]/~)
-> @ast::expr {
fn make_path_vec(_cx: ext_ctxt, ident: ast::ident) -> [ast::ident] {
ret [@"extfmt", @"rt", ident];
fn make_path_vec(_cx: ext_ctxt, ident: ast::ident) -> [ast::ident]/~ {
ret [@"extfmt", @"rt", ident]/~;
}
fn make_rt_path_expr(cx: ext_ctxt, sp: span,
ident: ast::ident) -> @ast::expr {
@ -48,8 +49,8 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
// which tells the RT::conv* functions how to perform the conversion
fn make_rt_conv_expr(cx: ext_ctxt, sp: span, cnv: conv) -> @ast::expr {
fn make_flags(cx: ext_ctxt, sp: span, flags: [flag]) -> @ast::expr {
let mut flagexprs: [@ast::expr] = [];
fn make_flags(cx: ext_ctxt, sp: span, flags: [flag]/~) -> @ast::expr {
let mut flagexprs: [@ast::expr]/~ = []/~;
for flags.each {|f|
let mut fstr;
alt f {
@ -59,7 +60,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
flag_sign_always { fstr = "flag_sign_always"; }
flag_alternate { fstr = "flag_alternate"; }
}
flagexprs += [make_rt_path_expr(cx, sp, @fstr)];
flagexprs += [make_rt_path_expr(cx, sp, @fstr)]/~;
}
ret mk_uniq_vec_e(cx, sp, flagexprs);
}
@ -71,7 +72,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
count_is(c) {
let count_lit = mk_int(cx, sp, c);
let count_is_path = make_path_vec(cx, @"count_is");
let count_is_args = [count_lit];
let count_is_args = [count_lit]/~;
ret mk_call(cx, sp, count_is_path, count_is_args);
}
_ { cx.span_unimpl(sp, "unimplemented #fmt conversion"); }
@ -99,7 +100,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
[{ident: @"flags", ex: flags_expr},
{ident: @"width", ex: width_expr},
{ident: @"precision", ex: precision_expr},
{ident: @"ty", ex: ty_expr}]);
{ident: @"ty", ex: ty_expr}]/~);
}
let rt_conv_flags = make_flags(cx, sp, cnv.flags);
let rt_conv_width = make_count(cx, sp, cnv.width);
@ -113,7 +114,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
let fname = "conv_" + conv_type;
let path = make_path_vec(cx, @fname);
let cnv_expr = make_rt_conv_expr(cx, sp, cnv);
let args = [cnv_expr, arg];
let args = [cnv_expr, arg]/~;
ret mk_call(cx, arg.span, path, args);
}
fn make_new_conv(cx: ext_ctxt, sp: span, cnv: conv, arg: @ast::expr) ->

View file

@ -11,5 +11,6 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
);
//trivial expression
ret @{id: cx.next_id(), node: ast::expr_rec([], option::none), span: sp};
ret @{id: cx.next_id(), node: ast::expr_rec([]/~, option::none),
span: sp};
}

View file

@ -35,7 +35,7 @@ impl of qq_helper for @ast::crate {
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_crate(*self, cx, v);}
fn extract_mac() -> option<ast::mac_> {fail}
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_crate"])
mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_crate"]/~)
}
fn get_fold_fn() -> str {"fold_crate"}
}
@ -49,7 +49,7 @@ impl of qq_helper for @ast::expr {
}
}
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_expr"])
mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_expr"]/~)
}
fn get_fold_fn() -> str {"fold_expr"}
}
@ -63,7 +63,7 @@ impl of qq_helper for @ast::ty {
}
}
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_ty"])
mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_ty"]/~)
}
fn get_fold_fn() -> str {"fold_ty"}
}
@ -72,7 +72,7 @@ impl of qq_helper for @ast::item {
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_item(self, cx, v);}
fn extract_mac() -> option<ast::mac_> {fail}
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_item"])
mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_item"]/~)
}
fn get_fold_fn() -> str {"fold_item"}
}
@ -81,7 +81,7 @@ impl of qq_helper for @ast::stmt {
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_stmt(self, cx, v);}
fn extract_mac() -> option<ast::mac_> {fail}
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_stmt"])
mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_stmt"]/~)
}
fn get_fold_fn() -> str {"fold_stmt"}
}
@ -90,7 +90,7 @@ impl of qq_helper for @ast::pat {
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_pat(self, cx, v);}
fn extract_mac() -> option<ast::mac_> {fail}
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_pat"])
mk_path(cx, sp, [@"syntax", @"ext", @"qquote", @"parse_pat"]/~)
}
fn get_fold_fn() -> str {"fold_pat"}
}
@ -133,12 +133,12 @@ fn expand_ast(ecx: ext_ctxt, _sp: span,
{
let mut what = "expr";
option::iter(arg) {|arg|
let args: [@ast::expr] =
let args: [@ast::expr]/~ =
alt arg.node {
ast::expr_vec(elts, _) { elts }
_ {
ecx.span_fatal
(_sp, "#ast requires arguments of the form `[...]`.")
(_sp, "#ast requires arguments of the form `[...]/~`.")
}
};
if vec::len::<@ast::expr>(args) != 1u {
@ -163,14 +163,14 @@ fn expand_ast(ecx: ext_ctxt, _sp: span,
};
}
fn parse_crate(p: parser) -> @ast::crate { p.parse_crate_mod([]) }
fn parse_crate(p: parser) -> @ast::crate { p.parse_crate_mod([]/~) }
fn parse_ty(p: parser) -> @ast::ty { p.parse_ty(false) }
fn parse_stmt(p: parser) -> @ast::stmt { p.parse_stmt([]) }
fn parse_stmt(p: parser) -> @ast::stmt { p.parse_stmt([]/~) }
fn parse_expr(p: parser) -> @ast::expr { p.parse_expr() }
fn parse_pat(p: parser) -> @ast::pat { p.parse_pat() }
fn parse_item(p: parser) -> @ast::item {
alt p.parse_item([], ast::public) {
alt p.parse_item([]/~, ast::public) {
some(item) { item }
none { fail "parse_item: parsing an item failed"; }
}
@ -230,47 +230,48 @@ fn finish<T: qq_helper>
let cx = ecx;
let cfg_call = {||
mk_call_(cx, sp, mk_access(cx, sp, [@"ext_cx"], @"cfg"), [])
mk_call_(cx, sp, mk_access(cx, sp, [@"ext_cx"]/~, @"cfg"), []/~)
};
let parse_sess_call = {||
mk_call_(cx, sp, mk_access(cx, sp, [@"ext_cx"], @"parse_sess"), [])
mk_call_(cx, sp,
mk_access(cx, sp, [@"ext_cx"]/~, @"parse_sess"), []/~)
};
let pcall = mk_call(cx,sp,
[@"syntax", @"parse", @"parser",
@"parse_from_source_str"],
@"parse_from_source_str"]/~,
[node.mk_parse_fn(cx,sp),
mk_str(cx,sp, fname),
mk_call(cx,sp,
[@"syntax",@"ext",
@"qquote", @"mk_file_substr"],
@"qquote", @"mk_file_substr"]/~,
[mk_str(cx,sp, loc.file.name),
mk_uint(cx,sp, loc.line),
mk_uint(cx,sp, loc.col)]),
mk_uint(cx,sp, loc.col)]/~),
mk_unary(cx,sp, ast::box(ast::m_imm),
mk_str(cx,sp, str2)),
cfg_call(),
parse_sess_call()]
parse_sess_call()]/~
);
let mut rcall = pcall;
if (g_len > 0u) {
rcall = mk_call(cx,sp,
[@"syntax", @"ext", @"qquote", @"replace"],
[@"syntax", @"ext", @"qquote", @"replace"]/~,
[pcall,
mk_uniq_vec_e(cx,sp, qcx.gather.map_to_vec {|g|
mk_call(cx,sp,
[@"syntax", @"ext",
@"qquote", @g.constr],
[g.e])}),
@"qquote", @g.constr]/~,
[g.e]/~)}),
mk_path(cx,sp,
[@"syntax", @"ext", @"qquote",
@node.get_fold_fn()])]);
@node.get_fold_fn()]/~)]/~);
}
ret rcall;
}
fn replace<T>(node: T, repls: [fragment], ff: fn (ast_fold, T) -> T)
fn replace<T>(node: T, repls: [fragment]/~, ff: fn (ast_fold, T) -> T)
-> T
{
let aft = default_ast_fold();
@ -290,7 +291,7 @@ fn fold_item(f: ast_fold, &&n: @ast::item) -> @ast::item {f.fold_item(n)}
fn fold_stmt(f: ast_fold, &&n: @ast::stmt) -> @ast::stmt {f.fold_stmt(n)}
fn fold_pat(f: ast_fold, &&n: @ast::pat) -> @ast::pat {f.fold_pat(n)}
fn replace_expr(repls: [fragment],
fn replace_expr(repls: [fragment]/~,
e: ast::expr_, s: span, fld: ast_fold,
orig: fn@(ast::expr_, span, ast_fold)->(ast::expr_, span))
-> (ast::expr_, span)
@ -304,7 +305,7 @@ fn replace_expr(repls: [fragment],
}
}
fn replace_ty(repls: [fragment],
fn replace_ty(repls: [fragment]/~,
e: ast::ty_, s: span, fld: ast_fold,
orig: fn@(ast::ty_, span, ast_fold)->(ast::ty_, span))
-> (ast::ty_, span)

View file

@ -22,7 +22,7 @@ fn path_to_ident(pth: @path) -> option<ident> {
type clause = {params: binders, body: @expr};
/* logically, an arb_depth should contain only one kind of matchable */
enum arb_depth<T> { leaf(T), seq(@[arb_depth<T>], span), }
enum arb_depth<T> { leaf(T), seq(@[arb_depth<T>]/~, span), }
enum matchable {
@ -70,8 +70,8 @@ fn match_error(cx: ext_ctxt, m: matchable, expected: str) -> ! {
type match_result = option<arb_depth<matchable>>;
type selector = fn@(matchable) -> match_result;
fn elts_to_ell(cx: ext_ctxt, elts: [@expr]) ->
{pre: [@expr], rep: option<@expr>, post: [@expr]} {
fn elts_to_ell(cx: ext_ctxt, elts: [@expr]/~) ->
{pre: [@expr]/~, rep: option<@expr>, post: [@expr]/~} {
let mut idx: uint = 0u;
let mut res = none;
for elts.each {|elt|
@ -96,15 +96,15 @@ fn elts_to_ell(cx: ext_ctxt, elts: [@expr]) ->
}
ret alt res {
some(val) { val }
none { {pre: elts, rep: none, post: []} }
none { {pre: elts, rep: none, post: []/~} }
}
}
fn option_flatten_map<T: copy, U: copy>(f: fn@(T) -> option<U>, v: [T]) ->
option<[U]> {
let mut res = [];
fn option_flatten_map<T: copy, U: copy>(f: fn@(T) -> option<U>, v: [T]/~) ->
option<[U]/~> {
let mut res = []/~;
for v.each {|elem|
alt f(elem) { none { ret none; } some(fv) { res += [fv]; } }
alt f(elem) { none { ret none; } some(fv) { res += [fv]/~; } }
}
ret some(res);
}
@ -182,7 +182,7 @@ fn use_selectors_to_bind(b: binders, e: @expr) -> option<bindings> {
/* use the bindings on the body to generate the expanded code */
fn transcribe(cx: ext_ctxt, b: bindings, body: @expr) -> @expr {
let idx_path: @mut [uint] = @mut [];
let idx_path: @mut [uint]/~ = @mut []/~;
fn new_id(_old: node_id, cx: ext_ctxt) -> node_id { ret cx.next_id(); }
fn new_span(cx: ext_ctxt, sp: span) -> span {
/* this discards information in the case of macro-defining macros */
@ -214,7 +214,7 @@ fn transcribe(cx: ext_ctxt, b: bindings, body: @expr) -> @expr {
/* helper: descend into a matcher */
fn follow(m: arb_depth<matchable>, idx_path: @mut [uint]) ->
fn follow(m: arb_depth<matchable>, idx_path: @mut [uint]/~) ->
arb_depth<matchable> {
let mut res: arb_depth<matchable> = m;
for vec::each(*idx_path) {|idx|
@ -227,7 +227,7 @@ fn follow(m: arb_depth<matchable>, idx_path: @mut [uint]) ->
}
fn follow_for_trans(cx: ext_ctxt, mmaybe: option<arb_depth<matchable>>,
idx_path: @mut [uint]) -> option<matchable> {
idx_path: @mut [uint]/~) -> option<matchable> {
alt mmaybe {
none { ret none }
some(m) {
@ -264,8 +264,9 @@ fn free_vars(b: bindings, e: @expr, it: fn(ident)) {
/* handle sequences (anywhere in the AST) of exprs, either real or ...ed */
fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
recur: fn@(&&@expr) -> @expr, exprs: [@expr]) -> [@expr] {
fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut [uint]/~,
recur: fn@(&&@expr) -> @expr,
exprs: [@expr]/~) -> [@expr]/~ {
alt elts_to_ell(cx, exprs) {
{pre: pre, rep: repeat_me_maybe, post: post} {
let mut res = vec::map(pre, recur);
@ -308,8 +309,8 @@ fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
/* Whew, we now know how how many times to repeat */
let mut idx: uint = 0u;
while idx < rc {
*idx_path += [idx];
res += [recur(repeat_me)]; // whew!
*idx_path += [idx]/~;
res += [recur(repeat_me)]/~; // whew!
vec::pop(*idx_path);
idx += 1u;
}
@ -326,7 +327,7 @@ fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
// substitute, in a position that's required to be an ident
fn transcribe_ident(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
fn transcribe_ident(cx: ext_ctxt, b: bindings, idx_path: @mut [uint]/~,
&&i: ident, _fld: ast_fold) -> ident {
ret alt follow_for_trans(cx, b.find(i), idx_path) {
some(match_ident(a_id)) { a_id.node }
@ -336,14 +337,14 @@ fn transcribe_ident(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
}
fn transcribe_path(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
fn transcribe_path(cx: ext_ctxt, b: bindings, idx_path: @mut [uint]/~,
p: path, _fld: ast_fold) -> path {
// Don't substitute into qualified names.
if vec::len(p.types) > 0u || vec::len(p.idents) != 1u { ret p; }
alt follow_for_trans(cx, b.find(p.idents[0]), idx_path) {
some(match_ident(id)) {
{span: id.span, global: false, idents: [id.node],
rp: none, types: []}
{span: id.span, global: false, idents: [id.node]/~,
rp: none, types: []/~}
}
some(match_path(a_pth)) { *a_pth }
some(m) { match_error(cx, m, "a path") }
@ -352,7 +353,7 @@ fn transcribe_path(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
}
fn transcribe_expr(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
fn transcribe_expr(cx: ext_ctxt, b: bindings, idx_path: @mut [uint]/~,
e: ast::expr_, s: span, fld: ast_fold,
orig: fn@(ast::expr_, span, ast_fold)->(ast::expr_, span))
-> (ast::expr_, span)
@ -367,9 +368,9 @@ fn transcribe_expr(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
some(match_ident(id)) {
(expr_path(@{span: id.span,
global: false,
idents: [id.node],
idents: [id.node]/~,
rp: none,
types: []}), id.span)
types: []/~}), id.span)
}
some(match_path(a_pth)) { (expr_path(a_pth), s) }
some(match_expr(a_exp)) { (a_exp.node, a_exp.span) }
@ -381,7 +382,7 @@ fn transcribe_expr(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
}
}
fn transcribe_type(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
fn transcribe_type(cx: ext_ctxt, b: bindings, idx_path: @mut [uint]/~,
t: ast::ty_, s: span, fld: ast_fold,
orig: fn@(ast::ty_, span, ast_fold) -> (ast::ty_, span))
-> (ast::ty_, span)
@ -407,7 +408,7 @@ fn transcribe_type(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
/* for parsing reasons, syntax variables bound to blocks must be used like
`{v}` */
fn transcribe_block(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
fn transcribe_block(cx: ext_ctxt, b: bindings, idx_path: @mut [uint]/~,
blk: blk_, s: span, fld: ast_fold,
orig: fn@(blk_, span, ast_fold) -> (blk_, span))
-> (blk_, span)
@ -458,7 +459,7 @@ fn p_t_s_rec(cx: ext_ctxt, m: matchable, s: selector, b: binders) {
}
}
{pre: pre, rep: none, post: post} {
if post != [] {
if post != []/~ {
cx.bug("elts_to_ell provided an invalid result");
}
p_t_s_r_length(cx, vec::len(pre), false, s, b);
@ -606,10 +607,10 @@ fn p_t_s_r_ellipses(cx: ext_ctxt, repeat_me: @expr, offset: uint, s: selector,
match_expr(e) {
alt e.node {
expr_vec(arg_elts, _) {
let mut elts = [];
let mut elts = []/~;
let mut idx = offset;
while idx < vec::len(arg_elts) {
elts += [leaf(match_expr(arg_elts[idx]))];
vec::push(elts, leaf(match_expr(arg_elts[idx])));
idx += 1u;
}
@ -651,7 +652,7 @@ fn p_t_s_r_length(cx: ext_ctxt, len: uint, at_least: bool, s: selector,
compose_sels(s, {|x|len_select(cx, x, at_least, len)}));
}
fn p_t_s_r_actual_vector(cx: ext_ctxt, elts: [@expr], _repeat_after: bool,
fn p_t_s_r_actual_vector(cx: ext_ctxt, elts: [@expr]/~, _repeat_after: bool,
s: selector, b: binders) {
let mut idx: uint = 0u;
while idx < vec::len(elts) {
@ -679,14 +680,14 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
let args = get_mac_args_no_max(cx, sp, arg, 0u, "macro");
let mut macro_name: option<@str> = none;
let mut clauses: [@clause] = [];
let mut clauses: [@clause]/~ = []/~;
for args.each {|arg|
alt arg.node {
expr_vec(elts, mutbl) {
if vec::len(elts) != 2u {
cx.span_fatal((*arg).span,
"extension clause must consist of [" +
"macro invocation, expansion body]");
"macro invocation, expansion body]/~");
}
@ -719,7 +720,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
};
clauses +=
[@{params: pattern_to_selectors(cx, arg),
body: elts[1u]}];
body: elts[1u]}]/~;
// FIXME (#2251): check duplicates (or just simplify
// the macro arg situation)
@ -739,7 +740,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
}
_ {
cx.span_fatal((*arg).span,
"extension must be [clause, " + " ...]");
"extension must be [clause, " + " ...]/~");
}
}
}
@ -759,7 +760,8 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
ext: normal({expander: ext, span: some(option::get(arg).span)})};
fn generic_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
_body: ast::mac_body, clauses: [@clause]) -> @expr {
_body: ast::mac_body,
clauses: [@clause]/~) -> @expr {
let arg = alt arg {
some(arg) { arg }
none { cx.span_fatal(sp, "macro must have arguments")}

View file

@ -41,7 +41,7 @@ iface ast_fold {
fn fold_ident(&&ident) -> ident;
fn fold_path(&&@path) -> @path;
fn fold_local(&&@local) -> @local;
fn map_exprs(fn@(&&@expr) -> @expr, [@expr]) -> [@expr];
fn map_exprs(fn@(&&@expr) -> @expr, [@expr]/~) -> [@expr]/~;
fn new_id(node_id) -> node_id;
fn new_span(span) -> span;
}
@ -75,7 +75,7 @@ type ast_fold_precursor = @{
fold_ident: fn@(&&ident, ast_fold) -> ident,
fold_path: fn@(path, ast_fold) -> path,
fold_local: fn@(local_, span, ast_fold) -> (local_, span),
map_exprs: fn@(fn@(&&@expr) -> @expr, [@expr]) -> [@expr],
map_exprs: fn@(fn@(&&@expr) -> @expr, [@expr]/~) -> [@expr]/~,
new_id: fn@(node_id) -> node_id,
new_span: fn@(span) -> span};
@ -151,7 +151,7 @@ fn fold_ty_param(tp: ty_param, fld: ast_fold) -> ty_param {
bounds: @vec::map(*tp.bounds, {|x|fold_ty_param_bound(x, fld)})}
}
fn fold_ty_params(tps: [ty_param], fld: ast_fold) -> [ty_param] {
fn fold_ty_params(tps: [ty_param]/~, fld: ast_fold) -> [ty_param]/~ {
vec::map(tps, {|x|fold_ty_param(x, fld)})
}
@ -335,10 +335,11 @@ fn noop_fold_pat(p: pat_, fld: ast_fold) -> pat_ {
{|pats| vec::map(pats, fld.fold_pat)})
}
pat_rec(fields, etc) {
let mut fs = [];
let mut fs = []/~;
for fields.each {|f|
fs += [{ident: /* FIXME (#2543) */ copy f.ident,
pat: fld.fold_pat(f.pat)}];
vec::push(fs,
{ident: /* FIXME (#2543) */ copy f.ident,
pat: fld.fold_pat(f.pat)});
}
pat_rec(fs, etc)
}
@ -570,7 +571,7 @@ fn noop_fold_local(l: local_, fld: ast_fold) -> local_ {
/* temporarily eta-expand because of a compiler bug with using `fn<T>` as a
value */
fn noop_map_exprs(f: fn@(&&@expr) -> @expr, es: [@expr]) -> [@expr] {
fn noop_map_exprs(f: fn@(&&@expr) -> @expr, es: [@expr]/~) -> [@expr]/~ {
ret vec::map(es, f);
}
@ -717,7 +718,7 @@ impl of ast_fold for ast_fold_precursor {
let (n, s) = self.fold_local(x.node, x.span, self as ast_fold);
ret @{node: n, span: self.new_span(s)};
}
fn map_exprs(f: fn@(&&@expr) -> @expr, e: [@expr]) -> [@expr] {
fn map_exprs(f: fn@(&&@expr) -> @expr, e: [@expr]/~) -> [@expr]/~ {
self.map_exprs(f, e)
}
fn new_id(node_id: ast::node_id) -> node_id {

View file

@ -119,7 +119,8 @@ fn parse_expr_from_source_str(name: str, source: @str, cfg: ast::crate_cfg,
}
fn parse_item_from_source_str(name: str, source: @str, cfg: ast::crate_cfg,
+attrs: [ast::attribute], vis: ast::visibility,
+attrs: [ast::attribute]/~,
vis: ast::visibility,
sess: parse_sess) -> option<@ast::item> {
let (p, rdr) = new_parser_etc_from_source_str(sess, cfg, name,
codemap::fss_none, source);
@ -197,7 +198,7 @@ fn new_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg, +path: str,
}
fn new_parser_from_tt(sess: parse_sess, cfg: ast::crate_cfg,
tt: [ast::token_tree]) -> parser {
tt: [ast::token_tree]/~) -> parser {
let trdr = lexer::new_tt_reader(sess.span_diagnostic, sess.interner, tt);
ret parser(sess, cfg, trdr as reader, parser::SOURCE_FILE)
}

View file

@ -7,11 +7,11 @@ export parser_attr;
// A type to distingush between the parsing of item attributes or syntax
// extensions, which both begin with token.POUND
type attr_or_ext = option<either<[ast::attribute], @ast::expr>>;
type attr_or_ext = option<either<[ast::attribute]/~, @ast::expr>>;
impl parser_attr for parser {
fn parse_outer_attrs_or_ext(first_item_attrs: [ast::attribute])
fn parse_outer_attrs_or_ext(first_item_attrs: [ast::attribute]/~)
-> attr_or_ext
{
let expect_item_next = vec::is_not_empty(first_item_attrs);
@ -21,7 +21,8 @@ impl parser_attr for parser {
self.bump();
let first_attr =
self.parse_attribute_naked(ast::attr_outer, lo);
ret some(left([first_attr] + self.parse_outer_attributes()));
ret some(left([first_attr]/~ +
self.parse_outer_attributes()));
} else if !(self.look_ahead(1u) == token::LT
|| self.look_ahead(1u) == token::LBRACKET
|| self.look_ahead(1u) == token::POUND
@ -33,11 +34,11 @@ impl parser_attr for parser {
}
// Parse attributes that appear before an item
fn parse_outer_attributes() -> [ast::attribute] {
let mut attrs: [ast::attribute] = [];
fn parse_outer_attributes() -> [ast::attribute]/~ {
let mut attrs: [ast::attribute]/~ = []/~;
while self.token == token::POUND
&& self.look_ahead(1u) == token::LBRACKET {
attrs += [self.parse_attribute(ast::attr_outer)];
attrs += [self.parse_attribute(ast::attr_outer)]/~;
}
ret attrs;
}
@ -64,9 +65,9 @@ impl parser_attr for parser {
// is an inner attribute of the containing item or an outer attribute of
// the first contained item until we see the semi).
fn parse_inner_attrs_and_next() ->
{inner: [ast::attribute], next: [ast::attribute]} {
let mut inner_attrs: [ast::attribute] = [];
let mut next_outer_attrs: [ast::attribute] = [];
{inner: [ast::attribute]/~, next: [ast::attribute]/~} {
let mut inner_attrs: [ast::attribute]/~ = []/~;
let mut next_outer_attrs: [ast::attribute]/~ = []/~;
while self.token == token::POUND {
if self.look_ahead(1u) != token::LBRACKET {
// This is an extension
@ -75,13 +76,13 @@ impl parser_attr for parser {
let attr = self.parse_attribute(ast::attr_inner);
if self.token == token::SEMI {
self.bump();
inner_attrs += [attr];
inner_attrs += [attr]/~;
} else {
// It's not really an inner attribute
let outer_attr =
spanned(attr.span.lo, attr.span.hi,
{style: ast::attr_outer, value: attr.node.value});
next_outer_attrs += [outer_attr];
next_outer_attrs += [outer_attr]/~;
break;
}
}
@ -110,15 +111,15 @@ impl parser_attr for parser {
}
}
fn parse_meta_seq() -> [@ast::meta_item] {
fn parse_meta_seq() -> [@ast::meta_item]/~ {
ret self.parse_seq(token::LPAREN, token::RPAREN,
seq_sep_trailing_disallowed(token::COMMA),
{|p| p.parse_meta_item()}).node;
}
fn parse_optional_meta() -> [@ast::meta_item] {
fn parse_optional_meta() -> [@ast::meta_item]/~ {
alt self.token { token::LPAREN { ret self.parse_meta_seq(); }
_ { ret []; } }
_ { ret []/~; } }
}
}

View file

@ -16,7 +16,7 @@ enum cmnt_style {
blank_line, // Just a manual blank line "\n\n", for layout
}
type cmnt = {style: cmnt_style, lines: [str], pos: uint};
type cmnt = {style: cmnt_style, lines: [str]/~, pos: uint};
fn read_to_eol(rdr: string_reader) -> str {
let mut val = "";
@ -41,14 +41,14 @@ fn consume_non_eol_whitespace(rdr: string_reader) {
}
}
fn push_blank_line_comment(rdr: string_reader, &comments: [cmnt]) {
fn push_blank_line_comment(rdr: string_reader, &comments: [cmnt]/~) {
#debug(">>> blank-line comment");
let v: [str] = [];
comments += [{style: blank_line, lines: v, pos: rdr.chpos}];
let v: [str]/~ = []/~;
comments += [{style: blank_line, lines: v, pos: rdr.chpos}]/~;
}
fn consume_whitespace_counting_blank_lines(rdr: string_reader,
&comments: [cmnt]) {
&comments: [cmnt]/~) {
while is_whitespace(rdr.curr) && !is_eof(rdr) {
if rdr.col == 0u && rdr.curr == '\n' {
push_blank_line_comment(rdr, comments);
@ -62,18 +62,18 @@ fn read_shebang_comment(rdr: string_reader, code_to_the_left: bool) -> cmnt {
let p = rdr.chpos;
#debug("<<< shebang comment");
ret {style: if code_to_the_left { trailing } else { isolated },
lines: [read_one_line_comment(rdr)],
lines: [read_one_line_comment(rdr)]/~,
pos: p};
}
fn read_line_comments(rdr: string_reader, code_to_the_left: bool) -> cmnt {
#debug(">>> line comments");
let p = rdr.chpos;
let mut lines: [str] = [];
let mut lines: [str]/~ = []/~;
while rdr.curr == '/' && nextch(rdr) == '/' {
let line = read_one_line_comment(rdr);
log(debug, line);
lines += [line];
lines += [line]/~;
consume_non_eol_whitespace(rdr);
}
#debug("<<< line comments");
@ -88,7 +88,7 @@ fn all_whitespace(s: str, begin: uint, end: uint) -> bool {
ret true;
}
fn trim_whitespace_prefix_and_push_line(&lines: [str],
fn trim_whitespace_prefix_and_push_line(&lines: [str]/~,
s: str, col: uint) unsafe {
let mut s1;
let len = str::len(s);
@ -98,13 +98,13 @@ fn trim_whitespace_prefix_and_push_line(&lines: [str],
} else { s1 = ""; }
} else { s1 = s; }
log(debug, "pushing line: " + s1);
lines += [s1];
lines += [s1]/~;
}
fn read_block_comment(rdr: string_reader, code_to_the_left: bool) -> cmnt {
#debug(">>> block comment");
let p = rdr.chpos;
let mut lines: [str] = [];
let mut lines: [str]/~ = []/~;
let mut col: uint = rdr.col;
bump(rdr);
bump(rdr);
@ -153,14 +153,14 @@ fn peeking_at_comment(rdr: string_reader) -> bool {
}
fn consume_comment(rdr: string_reader, code_to_the_left: bool,
&comments: [cmnt]) {
&comments: [cmnt]/~) {
#debug(">>> consume comment");
if rdr.curr == '/' && nextch(rdr) == '/' {
comments += [read_line_comments(rdr, code_to_the_left)];
comments += [read_line_comments(rdr, code_to_the_left)]/~;
} else if rdr.curr == '/' && nextch(rdr) == '*' {
comments += [read_block_comment(rdr, code_to_the_left)];
comments += [read_block_comment(rdr, code_to_the_left)]/~;
} else if rdr.curr == '#' && nextch(rdr) == '!' {
comments += [read_shebang_comment(rdr, code_to_the_left)];
comments += [read_shebang_comment(rdr, code_to_the_left)]/~;
} else { fail; }
#debug("<<< consume comment");
}
@ -170,7 +170,7 @@ type lit = {lit: str, pos: uint};
fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler,
path: str,
srdr: io::reader) ->
{cmnts: [cmnt], lits: [lit]} {
{cmnts: [cmnt]/~, lits: [lit]/~} {
let src = @str::from_bytes(srdr.read_whole_stream());
let itr = @interner::mk::<@str>(
{|x|str::hash(*x)},
@ -179,8 +179,8 @@ fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler,
let rdr = lexer::new_low_level_string_reader
(span_diagnostic, codemap::new_filemap(path, src, 0u, 0u), itr);
let mut comments: [cmnt] = [];
let mut literals: [lit] = [];
let mut comments: [cmnt]/~ = []/~;
let mut literals: [lit]/~ = []/~;
let mut first_read: bool = true;
while !is_eof(rdr) {
loop {

View file

@ -149,9 +149,9 @@ impl parser_common for parser {
}
fn parse_seq_to_before_gt<T: copy>(sep: option<token::token>,
f: fn(parser) -> T) -> [T] {
f: fn(parser) -> T) -> [T]/~ {
let mut first = true;
let mut v = [];
let mut v = []/~;
while self.token != token::GT
&& self.token != token::BINOP(token::SHR) {
alt sep {
@ -166,7 +166,7 @@ impl parser_common for parser {
}
fn parse_seq_to_gt<T: copy>(sep: option<token::token>,
f: fn(parser) -> T) -> [T] {
f: fn(parser) -> T) -> [T]/~ {
let v = self.parse_seq_to_before_gt(sep, f);
self.expect_gt();
@ -174,7 +174,7 @@ impl parser_common for parser {
}
fn parse_seq_lt_gt<T: copy>(sep: option<token::token>,
f: fn(parser) -> T) -> spanned<[T]> {
f: fn(parser) -> T) -> spanned<[T]/~> {
let lo = self.span.lo;
self.expect(token::LT);
let result = self.parse_seq_to_before_gt::<T>(sep, f);
@ -184,7 +184,7 @@ impl parser_common for parser {
}
fn parse_seq_to_end<T: copy>(ket: token::token, sep: seq_sep,
f: fn(parser) -> T) -> [T] {
f: fn(parser) -> T) -> [T]/~ {
let val = self.parse_seq_to_before_end(ket, sep, f);
self.bump();
ret val;
@ -192,9 +192,9 @@ impl parser_common for parser {
fn parse_seq_to_before_end<T: copy>(ket: token::token, sep: seq_sep,
f: fn(parser) -> T) -> [T] {
f: fn(parser) -> T) -> [T]/~ {
let mut first: bool = true;
let mut v: [T] = [];
let mut v: [T]/~ = []/~;
while self.token != ket {
alt sep.sep {
some(t) { if first { first = false; }
@ -207,8 +207,10 @@ impl parser_common for parser {
ret v;
}
fn parse_unspanned_seq<T: copy>(bra: token::token, ket: token::token,
sep: seq_sep, f: fn(parser) -> T) -> [T] {
fn parse_unspanned_seq<T: copy>(bra: token::token,
ket: token::token,
sep: seq_sep,
f: fn(parser) -> T) -> [T]/~ {
self.expect(bra);
let result = self.parse_seq_to_before_end::<T>(ket, sep, f);
self.bump();
@ -218,7 +220,7 @@ impl parser_common for parser {
// NB: Do not use this function unless you actually plan to place the
// spanned list in the AST.
fn parse_seq<T: copy>(bra: token::token, ket: token::token, sep: seq_sep,
f: fn(parser) -> T) -> spanned<[T]> {
f: fn(parser) -> T) -> spanned<[T]/~> {
let lo = self.span.lo;
self.expect(bra);
let result = self.parse_seq_to_before_end::<T>(ket, sep, f);

View file

@ -7,24 +7,26 @@ type ctx =
@{sess: parse::parse_sess,
cfg: ast::crate_cfg};
fn eval_crate_directives(cx: ctx, cdirs: [@ast::crate_directive], prefix: str,
&view_items: [@ast::view_item],
&items: [@ast::item]) {
fn eval_crate_directives(cx: ctx,
cdirs: [@ast::crate_directive]/~,
prefix: str,
&view_items: [@ast::view_item]/~,
&items: [@ast::item]/~) {
for cdirs.each {|sub_cdir|
eval_crate_directive(cx, sub_cdir, prefix, view_items, items);
}
}
fn eval_crate_directives_to_mod(cx: ctx, cdirs: [@ast::crate_directive],
fn eval_crate_directives_to_mod(cx: ctx, cdirs: [@ast::crate_directive]/~,
prefix: str, suffix: option<str>)
-> (ast::_mod, [ast::attribute]) {
-> (ast::_mod, [ast::attribute]/~) {
#debug("eval crate prefix: %s", prefix);
#debug("eval crate suffix: %s",
option::get_default(suffix, "none"));
let (cview_items, citems, cattrs)
= parse_companion_mod(cx, prefix, suffix);
let mut view_items: [@ast::view_item] = [];
let mut items: [@ast::item] = [];
let mut view_items: [@ast::view_item]/~ = []/~;
let mut items: [@ast::item]/~ = []/~;
eval_crate_directives(cx, cdirs, prefix, view_items, items);
ret ({view_items: view_items + cview_items,
items: items + citems},
@ -42,7 +44,7 @@ We build the path to the companion mod by combining the prefix and the
optional suffix then adding the .rs extension.
*/
fn parse_companion_mod(cx: ctx, prefix: str, suffix: option<str>)
-> ([@ast::view_item], [@ast::item], [ast::attribute]) {
-> ([@ast::view_item]/~, [@ast::item]/~, [ast::attribute]/~) {
fn companion_file(+prefix: str, suffix: option<str>) -> str {
ret alt suffix {
@ -72,11 +74,11 @@ fn parse_companion_mod(cx: ctx, prefix: str, suffix: option<str>)
cx.sess.byte_pos = cx.sess.byte_pos + r0.pos;
ret (m0.view_items, m0.items, inner_attrs.inner);
} else {
ret ([], [], []);
ret ([]/~, []/~, []/~);
}
}
fn cdir_path_opt(id: ast::ident, attrs: [ast::attribute]) -> @str {
fn cdir_path_opt(id: ast::ident, attrs: [ast::attribute]/~) -> @str {
alt ::attr::first_attr_value_str_by_name(attrs, "path") {
some(d) {
ret d;
@ -86,8 +88,8 @@ fn cdir_path_opt(id: ast::ident, attrs: [ast::attribute]) -> @str {
}
fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: str,
&view_items: [@ast::view_item],
&items: [@ast::item]) {
&view_items: [@ast::view_item]/~,
&items: [@ast::item]/~) {
alt cdir.node {
ast::cdir_src_mod(id, attrs) {
let file_path = cdir_path_opt(@(*id + ".rs"), attrs);
@ -108,7 +110,7 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: str,
// Thread defids, chpos and byte_pos through the parsers
cx.sess.chpos = r0.chpos;
cx.sess.byte_pos = cx.sess.byte_pos + r0.pos;
items += [i];
items += [i]/~;
}
ast::cdir_dir_mod(id, cdirs, attrs) {
let path = cdir_path_opt(id, attrs);
@ -126,9 +128,9 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: str,
vis: ast::public,
span: cdir.span};
cx.sess.next_id += 1;
items += [i];
items += [i]/~;
}
ast::cdir_view_item(vi) { view_items += [vi]; }
ast::cdir_view_item(vi) { view_items += [vi]/~; }
ast::cdir_syntax(pth) { }
}
}

View file

@ -26,7 +26,7 @@ enum tt_frame_up { /* to break a circularity */
/* TODO: figure out how to have a uniquely linked stack, and change to `~` */
#[doc = "an unzipping of `token_tree`s"]
type tt_frame = @{
readme: [ast::token_tree],
readme: [ast::token_tree]/~,
mut idx: uint,
up: tt_frame_up
};
@ -41,7 +41,7 @@ type tt_reader = @{
};
fn new_tt_reader(span_diagnostic: diagnostic::span_handler,
itr: @interner::interner<@str>, src: [ast::token_tree])
itr: @interner::interner<@str>, src: [ast::token_tree]/~)
-> tt_reader {
let r = @{span_diagnostic: span_diagnostic, interner: itr,
mut cur: @{readme: src, mut idx: 0u,

View file

@ -13,6 +13,7 @@ import common::{seq_sep_trailing_disallowed, seq_sep_trailing_allowed,
seq_sep_none, token_to_str};
import common::*;//{parser_common};
import dvec::{dvec, extensions};
import vec::{push};
export file_type;
export parser;
@ -51,10 +52,10 @@ enum pexpr {
*/
enum class_contents { ctor_decl(fn_decl, blk, codemap::span),
dtor_decl(blk, codemap::span),
members([@class_member]) }
members([@class_member]/~) }
type arg_or_capture_item = either<arg, capture_item>;
type item_info = (ident, item_, option<[attribute]>);
type item_info = (ident, item_, option<[attribute]/~>);
class parser {
let sess: parse_sess;
@ -176,14 +177,14 @@ class parser {
// functions can't have constrained types. Not sure whether
// that would be desirable anyway. See bug for the story on
// constrained types.
let constrs: [@constr] = [];
let constrs: [@constr]/~ = []/~;
let (ret_style, ret_ty) = self.parse_ret_ty();
ret {inputs: inputs, output: ret_ty,
purity: purity, cf: ret_style,
constraints: constrs};
}
fn parse_ty_methods() -> [ty_method] {
fn parse_ty_methods() -> [ty_method]/~ {
self.parse_unspanned_seq(token::LBRACE, token::RBRACE,
seq_sep_none()) { |p|
let attrs = p.parse_outer_attributes();
@ -215,7 +216,7 @@ class parser {
// if i is the jth ident in args, return j
// otherwise, fail
fn ident_index(args: [arg], i: ident) -> uint {
fn ident_index(args: [arg]/~, i: ident) -> uint {
let mut j = 0u;
for args.each {|a| if a.ident == i { ret j; } j += 1u; }
self.fatal("unbound variable `" + *i + "` in constraint arg");
@ -235,7 +236,7 @@ class parser {
ret @{node: carg, span: sp};
}
fn parse_constr_arg(args: [arg]) -> @constr_arg {
fn parse_constr_arg(args: [arg]/~) -> @constr_arg {
let sp = self.span;
let mut carg = carg_base;
if self.token == token::BINOP(token::STAR) {
@ -247,7 +248,7 @@ class parser {
ret @{node: carg, span: sp};
}
fn parse_ty_constr(fn_args: [arg]) -> @constr {
fn parse_ty_constr(fn_args: [arg]/~) -> @constr {
let lo = self.span.lo;
let path = self.parse_path_without_tps();
let args = self.parse_unspanned_seq(
@ -261,7 +262,7 @@ class parser {
fn parse_constr_in_type() -> @ty_constr {
let lo = self.span.lo;
let path = self.parse_path_without_tps();
let args: [@ty_constr_arg] = self.parse_unspanned_seq(
let args: [@ty_constr_arg]/~ = self.parse_unspanned_seq(
token::LPAREN, token::RPAREN,
seq_sep_trailing_disallowed(token::COMMA),
{|p| p.parse_type_constr_arg()});
@ -272,17 +273,17 @@ class parser {
fn parse_constrs<T: copy>(pser: fn(parser) -> @constr_general<T>) ->
[@constr_general<T>] {
let mut constrs: [@constr_general<T>] = [];
[@constr_general<T>]/~ {
let mut constrs: [@constr_general<T>]/~ = []/~;
loop {
let constr = pser(self);
constrs += [constr];
constrs += [constr]/~;
if self.token == token::COMMA { self.bump(); }
else { ret constrs; }
};
}
fn parse_type_constraints() -> [@ty_constr] {
fn parse_type_constraints() -> [@ty_constr]/~ {
ret self.parse_constrs({|p| p.parse_constr_in_type()});
}
@ -359,10 +360,10 @@ class parser {
self.bump();
ty_nil
} else {
let mut ts = [self.parse_ty(false)];
let mut ts = [self.parse_ty(false)]/~;
while self.token == token::COMMA {
self.bump();
ts += [self.parse_ty(false)];
ts += [self.parse_ty(false)]/~;
}
let t = if vec::len(ts) == 1u { ts[0].node }
else { ty_tup(ts) };
@ -583,22 +584,22 @@ class parser {
let lo = self.span.lo;
let global = self.eat(token::MOD_SEP);
let mut ids = [];
let mut ids = []/~;
loop {
let is_not_last =
self.look_ahead(2u) != token::LT
&& self.look_ahead(1u) == token::MOD_SEP;
if is_not_last {
ids += [parse_ident(self)];
ids += [parse_ident(self)]/~;
self.expect(token::MOD_SEP);
} else {
ids += [parse_last_ident(self)];
ids += [parse_last_ident(self)]/~;
break;
}
}
@{span: mk_sp(lo, self.last_span.hi), global: global,
idents: ids, rp: none, types: []}
idents: ids, rp: none, types: []/~}
}
fn parse_value_path() -> @path {
@ -639,7 +640,7 @@ class parser {
self.parse_seq_lt_gt(some(token::COMMA),
{|p| p.parse_ty(false)})
} else {
{node: [], span: path.span}
{node: []/~, span: path.span}
}
};
@ -715,9 +716,9 @@ class parser {
let lit = @spanned(lo, hi, lit_nil);
ret self.mk_pexpr(lo, hi, expr_lit(lit));
}
let mut es = [self.parse_expr()];
let mut es = [self.parse_expr()]/~;
while self.token == token::COMMA {
self.bump(); es += [self.parse_expr()];
self.bump(); es += [self.parse_expr()]/~;
}
hi = self.span.hi;
self.expect(token::RPAREN);
@ -733,7 +734,7 @@ class parser {
if self.is_keyword("mut") ||
is_plain_ident(self.token)
&& self.look_ahead(1u) == token::COLON {
let mut fields = [self.parse_field(token::COLON)];
let mut fields = [self.parse_field(token::COLON)]/~;
let mut base = none;
while self.token != token::RBRACE {
// optional comma before "with"
@ -750,7 +751,7 @@ class parser {
// record ends by an optional trailing comma
break;
}
fields += [self.parse_field(token::COLON)];
fields += [self.parse_field(token::COLON)]/~;
}
hi = self.span.hi;
self.expect(token::RBRACE);
@ -997,7 +998,7 @@ class parser {
self.expect(token::LT);
self.parse_seq_to_gt(some(token::COMMA),
{|p| p.parse_ty(false)})
} else { [] };
} else { []/~ };
e = self.mk_pexpr(lo, hi, expr_field(self.to_expr(e),
self.get_str(i),
tys));
@ -1027,13 +1028,13 @@ class parser {
let blk = self.parse_fn_block_expr();
alt e.node {
expr_call(f, args, false) {
e = pexpr(@{node: expr_call(f, args + [blk], true)
e = pexpr(@{node: expr_call(f, args + [blk]/~, true)
with *self.to_expr(e)});
}
_ {
e = self.mk_pexpr(lo, self.last_span.hi,
expr_call(self.to_expr(e),
[blk], true));
[blk]/~, true));
}
}
}
@ -1085,10 +1086,10 @@ class parser {
ret alt self.token {
token::LPAREN | token::LBRACE | token::LBRACKET {
let ket = flip(self.token);
tt_delim([parse_tt_flat(self, true)] +
tt_delim([parse_tt_flat(self, true)]/~ +
self.parse_seq_to_before_end(ket, seq_sep_none(),
{|p| p.parse_token_tree()})
+ [parse_tt_flat(self, true)])
+ [parse_tt_flat(self, true)]/~)
}
_ { parse_tt_flat(self, false) }
};
@ -1354,7 +1355,7 @@ class parser {
let b_arg = vec::last(args);
let last = self.mk_expr(b_arg.span.lo, b_arg.span.hi,
ctor(b_arg));
@{node: expr_call(f, vec::init(args) + [last], true)
@{node: expr_call(f, vec::init(args) + [last]/~, true)
with *call}
}
_ {
@ -1385,14 +1386,14 @@ class parser {
else { alt_exhaustive };
let discriminant = self.parse_expr();
self.expect(token::LBRACE);
let mut arms: [arm] = [];
let mut arms: [arm]/~ = []/~;
while self.token != token::RBRACE {
let pats = self.parse_pats();
let mut guard = none;
if self.eat_keyword("if") { guard = some(self.parse_expr()); }
if self.token == token::FAT_ARROW { self.bump(); }
let blk = self.parse_block();
arms += [{pats: pats, guard: guard, body: blk}];
arms += [{pats: pats, guard: guard, body: blk}]/~;
}
let mut hi = self.span.hi;
self.bump();
@ -1434,10 +1435,10 @@ class parser {
}
}
fn parse_pats() -> [@pat] {
let mut pats = [];
fn parse_pats() -> [@pat]/~ {
let mut pats = []/~;
loop {
pats += [self.parse_pat()];
pats += [self.parse_pat()]/~;
if self.token == token::BINOP(token::OR) { self.bump(); }
else { ret pats; }
};
@ -1463,7 +1464,7 @@ class parser {
}
token::LBRACE {
self.bump();
let mut fields = [];
let mut fields = []/~;
let mut etc = false;
let mut first = true;
while self.token != token::RBRACE {
@ -1498,7 +1499,7 @@ class parser {
node: pat_ident(fieldpath, none),
span: mk_sp(lo, hi)};
}
fields += [{ident: fieldname, pat: subpat}];
fields += [{ident: fieldname, pat: subpat}]/~;
}
hi = self.span.hi;
self.bump();
@ -1513,10 +1514,10 @@ class parser {
let expr = self.mk_expr(lo, hi, expr_lit(lit));
pat = pat_lit(expr);
} else {
let mut fields = [self.parse_pat()];
let mut fields = [self.parse_pat()]/~;
while self.token == token::COMMA {
self.bump();
fields += [self.parse_pat()];
fields += [self.parse_pat()]/~;
}
if vec::len(fields) == 1u { self.expect(token::COMMA); }
hi = self.span.hi;
@ -1548,7 +1549,7 @@ class parser {
} else {
let enum_path = self.parse_path_with_tps(true);
hi = enum_path.span.hi;
let mut args: [@pat] = [];
let mut args: [@pat]/~ = []/~;
let mut star_pat = false;
alt self.token {
token::LPAREN {
@ -1604,9 +1605,9 @@ class parser {
fn parse_let() -> @decl {
let is_mutbl = self.eat_keyword("mut");
let lo = self.span.lo;
let mut locals = [self.parse_local(is_mutbl, true)];
let mut locals = [self.parse_local(is_mutbl, true)]/~;
while self.eat(token::COMMA) {
locals += [self.parse_local(is_mutbl, true)];
locals += [self.parse_local(is_mutbl, true)]/~;
}
ret @spanned(lo, self.last_span.hi, decl_local(locals));
}
@ -1628,8 +1629,8 @@ class parser {
span: mk_sp(lo, self.last_span.hi)};
}
fn parse_stmt(+first_item_attrs: [attribute]) -> @stmt {
fn check_expected_item(p: parser, current_attrs: [attribute]) {
fn parse_stmt(+first_item_attrs: [attribute]/~) -> @stmt {
fn check_expected_item(p: parser, current_attrs: [attribute]/~) {
// If we have attributes then we should have an item
if vec::is_not_empty(current_attrs) {
p.fatal("expected item");
@ -1645,7 +1646,7 @@ class parser {
} else {
let mut item_attrs;
alt self.parse_outer_attrs_or_ext(first_item_attrs) {
none { item_attrs = []; }
none { item_attrs = []/~; }
some(left(attrs)) { item_attrs = attrs; }
some(right(ext)) {
ret @spanned(lo, ext.span.hi, stmt_expr(ext, self.get_id()));
@ -1685,14 +1686,15 @@ class parser {
ret blk;
}
fn parse_inner_attrs_and_block(parse_attrs: bool) -> ([attribute], blk) {
fn parse_inner_attrs_and_block(parse_attrs: bool)
-> ([attribute]/~, blk) {
fn maybe_parse_inner_attrs_and_next(p: parser, parse_attrs: bool) ->
{inner: [attribute], next: [attribute]} {
{inner: [attribute]/~, next: [attribute]/~} {
if parse_attrs {
p.parse_inner_attrs_and_next()
} else {
{inner: [], next: []}
{inner: []/~, next: []/~}
}
}
@ -1727,12 +1729,12 @@ class parser {
// necessary, and this should take a qualifier.
// some blocks start with "#{"...
fn parse_block_tail(lo: uint, s: blk_check_mode) -> blk {
self.parse_block_tail_(lo, s, [])
self.parse_block_tail_(lo, s, []/~)
}
fn parse_block_tail_(lo: uint, s: blk_check_mode,
+first_item_attrs: [attribute]) -> blk {
let mut stmts = [];
+first_item_attrs: [attribute]/~) -> blk {
let mut stmts = []/~;
let mut expr = none;
let {attrs_remaining, view_items} =
self.parse_view(first_item_attrs, true);
@ -1749,13 +1751,14 @@ class parser {
}
_ {
let stmt = self.parse_stmt(initial_attrs);
initial_attrs = [];
initial_attrs = []/~;
alt stmt.node {
stmt_expr(e, stmt_id) { // Expression without semicolon:
alt self.token {
token::SEMI {
self.bump();
stmts += [@{node: stmt_semi(e, stmt_id) with *stmt}];
push(stmts,
@{node: stmt_semi(e, stmt_id) with *stmt});
}
token::RBRACE {
expr = some(e);
@ -1766,13 +1769,13 @@ class parser {
but found '"
+ token_to_str(self.reader, t) + "'");
}
stmts += [stmt];
stmts += [stmt]/~;
}
}
}
_ { // All other kinds of statements:
stmts += [stmt];
stmts += [stmt]/~;
if classify::stmt_ends_with_semi(*stmt) {
self.expect(token::SEMI);
@ -1790,30 +1793,32 @@ class parser {
}
fn parse_ty_param() -> ty_param {
let mut bounds = [];
let mut bounds = []/~;
let ident = self.parse_ident();
if self.eat(token::COLON) {
while self.token != token::COMMA && self.token != token::GT {
if self.eat_keyword("send") { bounds += [bound_send]; }
else if self.eat_keyword("copy") { bounds += [bound_copy]; }
else if self.eat_keyword("const") { bounds += [bound_const]; }
else { bounds += [bound_iface(self.parse_ty(false))]; }
if self.eat_keyword("send") { push(bounds, bound_send); }
else if self.eat_keyword("copy") { push(bounds, bound_copy) }
else if self.eat_keyword("const") {
push(bounds, bound_const)
}
else { push(bounds, bound_iface(self.parse_ty(false))); }
}
}
ret {ident: ident, id: self.get_id(), bounds: @bounds};
}
fn parse_ty_params() -> [ty_param] {
fn parse_ty_params() -> [ty_param]/~ {
if self.eat(token::LT) {
self.parse_seq_to_gt(some(token::COMMA), {|p| p.parse_ty_param()})
} else { [] }
} else { []/~ }
}
fn parse_fn_decl(purity: purity,
parse_arg_fn: fn(parser) -> arg_or_capture_item)
-> (fn_decl, capture_clause) {
let args_or_capture_items: [arg_or_capture_item] =
let args_or_capture_items: [arg_or_capture_item]/~ =
self.parse_unspanned_seq(
token::LPAREN, token::RPAREN,
seq_sep_trailing_disallowed(token::COMMA), parse_arg_fn);
@ -1824,7 +1829,7 @@ class parser {
// Use the args list to translate each bound variable
// mentioned in a constraint to an arg index.
// Seems weird to do this in the parser, but I'm not sure how else to.
let mut constrs = [];
let mut constrs = []/~;
if self.token == token::COLON {
self.bump();
constrs = self.parse_constrs({|p| p.parse_ty_constr(inputs) });
@ -1840,7 +1845,7 @@ class parser {
fn parse_fn_block_decl() -> (fn_decl, capture_clause) {
let inputs_captures = {
if self.eat(token::OROR) {
[]
[]/~
} else {
self.parse_unspanned_seq(
token::BINOP(token::OR), token::BINOP(token::OR),
@ -1857,11 +1862,11 @@ class parser {
output: output,
purity: impure_fn,
cf: return_val,
constraints: []},
constraints: []/~},
@either::rights(inputs_captures));
}
fn parse_fn_header() -> {ident: ident, tps: [ty_param]} {
fn parse_fn_header() -> {ident: ident, tps: [ty_param]/~} {
let id = self.parse_value_ident();
let ty_params = self.parse_ty_params();
ret {ident: id, tps: ty_params};
@ -1869,7 +1874,7 @@ class parser {
fn mk_item(lo: uint, hi: uint, +ident: ident,
+node: item_, vis: visibility,
+attrs: [attribute]) -> @item {
+attrs: [attribute]/~) -> @item {
ret @{ident: ident,
attrs: attrs,
id: self.get_id(),
@ -1922,9 +1927,9 @@ class parser {
}
// Parses three variants (with the region/type params always optional):
// impl /&<T: copy> of to_str for [T] { ... }
// impl name/&<T> of to_str for [T] { ... }
// impl name/&<T> for [T] { ... }
// impl /&<T: copy> of to_str for [T]/~ { ... }
// impl name/&<T> of to_str for [T]/~ { ... }
// impl name/&<T> for [T]/~ { ... }
fn parse_item_impl() -> item_info {
fn wrap_path(p: parser, pt: @path) -> @ty {
@{id: p.get_id(), node: ty_path(pt, p.get_id()), span: pt.span}
@ -1936,7 +1941,7 @@ class parser {
(none, self.parse_region_param(), self.parse_ty_params())
}
else if self.is_keyword("of") {
(none, rp_none, [])
(none, rp_none, []/~)
} else {
let id = self.parse_ident();
let rp = self.parse_region_param();
@ -1956,10 +1961,10 @@ class parser {
};
self.expect_keyword("for");
let ty = self.parse_ty(false);
let mut meths = [];
let mut meths = []/~;
self.expect(token::LBRACE);
while !self.eat(token::RBRACE) {
meths += [self.parse_method(public)];
meths += [self.parse_method(public)]/~;
}
(ident, item_impl(tps, rp, ifce, ty, meths), none)
}
@ -1969,7 +1974,7 @@ class parser {
// the return type of the ctor function.
fn ident_to_path_tys(i: ident,
rp: region_param,
typarams: [ty_param]) -> @path {
typarams: [ty_param]/~) -> @path {
let s = self.last_span;
// Hack. But then, this whole function is in service of a hack.
@ -1978,7 +1983,7 @@ class parser {
rp_self { some(self.region_from_name(some(@"self"))) }
};
@{span: s, global: false, idents: [i],
@{span: s, global: false, idents: [i]/~,
rp: a_r,
types: vec::map(typarams, {|tp|
@{id: self.get_id(),
@ -1992,7 +1997,7 @@ class parser {
id: self.get_id()}
}
fn parse_iface_ref_list() -> [@iface_ref] {
fn parse_iface_ref_list() -> [@iface_ref]/~ {
self.parse_seq_to_before_end(
token::LBRACE, seq_sep_trailing_disallowed(token::COMMA),
{|p| p.parse_iface_ref()})
@ -2003,11 +2008,11 @@ class parser {
let rp = self.parse_region_param();
let ty_params = self.parse_ty_params();
let class_path = self.ident_to_path_tys(class_name, rp, ty_params);
let ifaces : [@iface_ref] = if self.eat(token::COLON)
let ifaces : [@iface_ref]/~ = if self.eat(token::COLON)
{ self.parse_iface_ref_list() }
else { [] };
else { []/~ };
self.expect(token::LBRACE);
let mut ms: [@class_member] = [];
let mut ms: [@class_member]/~ = []/~;
let ctor_id = self.get_id();
let mut the_ctor : option<(fn_decl, blk, codemap::span)> = none;
let mut the_dtor : option<(blk, codemap::span)> = none;
@ -2092,16 +2097,16 @@ class parser {
}
else if self.eat_keyword("priv") {
self.expect(token::LBRACE);
let mut results = [];
let mut results = []/~;
while self.token != token::RBRACE {
results += [self.parse_single_class_item(private)];
results += [self.parse_single_class_item(private)]/~;
}
self.bump();
ret members(results);
}
else {
// Probably need to parse attrs
ret members([self.parse_single_class_item(public)]);
ret members([self.parse_single_class_item(public)]/~);
}
}
@ -2112,11 +2117,11 @@ class parser {
}
fn parse_mod_items(term: token::token,
+first_item_attrs: [attribute]) -> _mod {
+first_item_attrs: [attribute]/~) -> _mod {
// Shouldn't be any view items since we've already parsed an item attr
let {attrs_remaining, view_items} =
self.parse_view(first_item_attrs, false);
let mut items: [@item] = [];
let mut items: [@item]/~ = []/~;
let mut first = true;
while self.token != term {
let mut attrs = self.parse_outer_attributes();
@ -2124,7 +2129,7 @@ class parser {
#debug["parse_mod_items: parse_item(attrs=%?)", attrs];
let vis = self.parse_visibility(private);
alt self.parse_item(attrs, vis) {
some(i) { items += [i]; }
some(i) { items += [i]/~; }
_ {
self.fatal("expected item but found '" +
token_to_str(self.reader, self.token) + "'");
@ -2160,7 +2165,7 @@ class parser {
(id, item_mod(m), some(inner_attrs.inner))
}
fn parse_item_native_fn(+attrs: [attribute],
fn parse_item_native_fn(+attrs: [attribute]/~,
purity: purity) -> @native_item {
let lo = self.last_span.lo;
let t = self.parse_fn_header();
@ -2186,22 +2191,22 @@ class parser {
else { self.unexpected(); }
}
fn parse_native_item(+attrs: [attribute]) ->
fn parse_native_item(+attrs: [attribute]/~) ->
@native_item {
self.parse_item_native_fn(attrs, self.parse_fn_purity())
}
fn parse_native_mod_items(+first_item_attrs: [attribute]) ->
fn parse_native_mod_items(+first_item_attrs: [attribute]/~) ->
native_mod {
// Shouldn't be any view items since we've already parsed an item attr
let {attrs_remaining, view_items} =
self.parse_view(first_item_attrs, false);
let mut items: [@native_item] = [];
let mut items: [@native_item]/~ = []/~;
let mut initial_attrs = attrs_remaining;
while self.token != token::RBRACE {
let attrs = initial_attrs + self.parse_outer_attributes();
initial_attrs = [];
items += [self.parse_native_item(attrs)];
initial_attrs = []/~;
items += [self.parse_native_item(attrs)]/~;
}
ret {view_items: view_items,
items: items};
@ -2246,7 +2251,7 @@ class parser {
let id = self.parse_ident();
let rp = self.parse_region_param();
let ty_params = self.parse_ty_params();
let mut variants: [variant] = [];
let mut variants: [variant]/~ = []/~;
// Newtype syntax
if self.token == token::EQ {
self.check_restricted_keywords_(*id);
@ -2256,12 +2261,12 @@ class parser {
let variant =
spanned(ty.span.lo, ty.span.hi,
{name: id,
attrs: [],
args: [{ty: ty, id: self.get_id()}],
attrs: []/~,
args: [{ty: ty, id: self.get_id()}]/~,
id: self.get_id(),
disr_expr: none,
vis: public});
ret (id, item_enum([variant], ty_params, rp), none);
ret (id, item_enum([variant]/~, ty_params, rp), none);
}
self.expect(token::LBRACE);
@ -2272,7 +2277,7 @@ class parser {
let vlo = self.span.lo;
let vis = self.parse_visibility(default_vis);
let ident = self.parse_value_ident();
let mut args = [], disr_expr = none;
let mut args = []/~, disr_expr = none;
if self.token == token::LPAREN {
all_nullary = false;
let arg_tys = self.parse_unspanned_seq(
@ -2280,7 +2285,7 @@ class parser {
seq_sep_trailing_disallowed(token::COMMA),
{|p| p.parse_ty(false)});
for arg_tys.each {|ty|
args += [{ty: ty, id: self.get_id()}];
args += [{ty: ty, id: self.get_id()}]/~;
}
} else if self.eat(token::EQ) {
have_disr = true;
@ -2290,7 +2295,7 @@ class parser {
let vr = {name: ident, attrs: variant_attrs,
args: args, id: self.get_id(),
disr_expr: disr_expr, vis: vis};
variants += [spanned(vlo, self.last_span.hi, vr)];
variants += [spanned(vlo, self.last_span.hi, vr)]/~;
if !self.eat(token::COMMA) { break; }
}
@ -2333,7 +2338,7 @@ class parser {
}
}
fn parse_item(+attrs: [attribute], vis: visibility)
fn parse_item(+attrs: [attribute]/~, vis: visibility)
-> option<@item> {
let lo = self.span.lo;
let (ident, item_, extra_attrs) = if self.eat_keyword("const") {
@ -2384,20 +2389,20 @@ class parser {
fn parse_view_path() -> @view_path {
let lo = self.span.lo;
let first_ident = self.parse_ident();
let mut path = [first_ident];
let mut path = [first_ident]/~;
#debug("parsed view_path: %s", *first_ident);
alt self.token {
token::EQ {
// x = foo::bar
self.bump();
path = [self.parse_ident()];
path = [self.parse_ident()]/~;
while self.token == token::MOD_SEP {
self.bump();
let id = self.parse_ident();
path += [id];
path += [id]/~;
}
let path = @{span: mk_sp(lo, self.span.hi), global: false,
idents: path, rp: none, types: []};
idents: path, rp: none, types: []/~};
ret @spanned(lo, self.span.hi,
view_path_simple(first_ident, path, self.get_id()));
}
@ -2411,7 +2416,7 @@ class parser {
token::IDENT(i, _) {
self.bump();
path += [self.get_str(i)];
path += [self.get_str(i)]/~;
}
// foo::bar::{a,b,c}
@ -2422,7 +2427,7 @@ class parser {
{|p| p.parse_path_list_ident()});
let path = @{span: mk_sp(lo, self.span.hi),
global: false, idents: path,
rp: none, types: []};
rp: none, types: []/~};
ret @spanned(lo, self.span.hi,
view_path_list(path, idents, self.get_id()));
}
@ -2432,7 +2437,7 @@ class parser {
self.bump();
let path = @{span: mk_sp(lo, self.span.hi),
global: false, idents: path,
rp: none, types: []};
rp: none, types: []/~};
ret @spanned(lo, self.span.hi,
view_path_glob(path, self.get_id()));
}
@ -2445,16 +2450,16 @@ class parser {
}
let last = path[vec::len(path) - 1u];
let path = @{span: mk_sp(lo, self.span.hi), global: false,
idents: path, rp: none, types: []};
idents: path, rp: none, types: []/~};
ret @spanned(lo, self.span.hi,
view_path_simple(last, path, self.get_id()));
}
fn parse_view_paths() -> [@view_path] {
let mut vp = [self.parse_view_path()];
fn parse_view_paths() -> [@view_path]/~ {
let mut vp = [self.parse_view_path()]/~;
while self.token == token::COMMA {
self.bump();
vp += [self.parse_view_path()];
vp += [self.parse_view_path()]/~;
}
ret vp;
}
@ -2468,7 +2473,7 @@ class parser {
|| self.token_is_keyword("export", tok)
}
fn parse_view_item(+attrs: [attribute]) -> @view_item {
fn parse_view_item(+attrs: [attribute]/~) -> @view_item {
let lo = self.span.lo, vis = self.parse_visibility(private);
let node = if self.eat_keyword("use") {
self.parse_use()
@ -2482,14 +2487,14 @@ class parser {
vis: vis, span: mk_sp(lo, self.last_span.hi)}
}
fn parse_view(+first_item_attrs: [attribute],
only_imports: bool) -> {attrs_remaining: [attribute],
view_items: [@view_item]} {
fn parse_view(+first_item_attrs: [attribute]/~,
only_imports: bool) -> {attrs_remaining: [attribute]/~,
view_items: [@view_item]/~} {
let mut attrs = first_item_attrs + self.parse_outer_attributes();
let mut items = [];
let mut items = []/~;
while if only_imports { self.is_keyword("import") }
else { self.is_view_item() } {
items += [self.parse_view_item(attrs)];
items += [self.parse_view_item(attrs)]/~;
attrs = self.parse_outer_attributes();
}
{attrs_remaining: attrs, view_items: items}
@ -2502,7 +2507,7 @@ class parser {
let first_item_outer_attrs = crate_attrs.next;
let m = self.parse_mod_items(token::EOF, first_item_outer_attrs);
ret @spanned(lo, self.span.lo,
{directives: [],
{directives: []/~,
module: m,
attrs: crate_attrs.inner,
config: self.cfg});
@ -2523,7 +2528,7 @@ class parser {
//
// Each directive imperatively extends its environment with 0 or more
// items.
fn parse_crate_directive(first_outer_attr: [attribute]) ->
fn parse_crate_directive(first_outer_attr: [attribute]/~) ->
crate_directive {
// Collect the next attributes
@ -2564,8 +2569,8 @@ class parser {
}
fn parse_crate_directives(term: token::token,
first_outer_attr: [attribute]) ->
[@crate_directive] {
first_outer_attr: [attribute]/~) ->
[@crate_directive]/~ {
// This is pretty ugly. If we have an outer attribute then we can't
// accept seeing the terminator next, so if we do see it then fail the
@ -2574,12 +2579,12 @@ class parser {
self.expect_keyword("mod");
}
let mut cdirs: [@crate_directive] = [];
let mut cdirs: [@crate_directive]/~ = []/~;
let mut first_outer_attr = first_outer_attr;
while self.token != term {
let cdir = @self.parse_crate_directive(first_outer_attr);
cdirs += [cdir];
first_outer_attr = [];
cdirs += [cdir]/~;
first_outer_attr = []/~;
}
ret cdirs;
}

View file

@ -260,7 +260,7 @@ fn contextual_keyword_table() -> hashmap<str, ()> {
"with",
/* temp */
"sep", "many", "at_least_one", "parse"
];
]/~;
for keys.each {|word|
words.insert(word, ());
}
@ -298,7 +298,7 @@ fn restricted_keyword_table() -> hashmap<str, ()> {
"true", "trait", "type",
"unchecked", "unsafe",
"while"
];
]/~;
for keys.each {|word|
words.insert(word, ());
}

View file

@ -71,7 +71,7 @@ fn tok_str(++t: token) -> str {
}
}
fn buf_str(toks: [mut token], szs: [mut int], left: uint, right: uint,
fn buf_str(toks: [mut token]/~, szs: [mut int]/~, left: uint, right: uint,
lim: uint) -> str {
let n = vec::len(toks);
assert (n == vec::len(szs));
@ -100,9 +100,9 @@ fn mk_printer(out: io::writer, linewidth: uint) -> printer {
// fall behind.
let n: uint = 3u * linewidth;
#debug("mk_printer %u", linewidth);
let token: [mut token] = vec::to_mut(vec::from_elem(n, EOF));
let size: [mut int] = vec::to_mut(vec::from_elem(n, 0));
let scan_stack: [mut uint] = vec::to_mut(vec::from_elem(n, 0u));
let token: [mut token]/~ = vec::to_mut(vec::from_elem(n, EOF));
let size: [mut int]/~ = vec::to_mut(vec::from_elem(n, 0));
let scan_stack: [mut uint]/~ = vec::to_mut(vec::from_elem(n, 0u));
@{out: out,
buf_len: n,
mut margin: linewidth as int,
@ -206,8 +206,8 @@ type printer = @{
mut space: int, // number of spaces left on line
mut left: uint, // index of left side of input stream
mut right: uint, // index of right side of input stream
token: [mut token], // ring-buffr stream goes through
size: [mut int], // ring-buffer of calculated sizes
token: [mut token]/~, // ring-buffr stream goes through
size: [mut int]/~, // ring-buffer of calculated sizes
mut left_total: int, // running size of stream "...left"
mut right_total: int, // running size of stream "...right"
// pseudo-stack, really a ring too. Holds the
@ -216,7 +216,7 @@ type printer = @{
// BEGIN (if there is any) on top of it. Stuff is flushed off the
// bottom as it becomes irrelevant due to the primary ring-buffer
// advancing.
mut scan_stack: [mut uint],
mut scan_stack: [mut uint]/~,
mut scan_stack_empty: bool, // top==bottom disambiguator
mut top: uint, // index of top of scan_stack
mut bottom: uint, // index of bottom of scan_stack
@ -231,7 +231,7 @@ impl printer for printer {
// be very careful with this!
fn replace_last_token(t: token) { self.token[self.right] = t; }
fn pretty_print(t: token) {
#debug("pp [%u,%u]", self.left, self.right);
#debug("pp [%u,%u]/~", self.left, self.right);
alt t {
EOF {
if !self.scan_stack_empty {
@ -248,17 +248,17 @@ impl printer for printer {
self.left = 0u;
self.right = 0u;
} else { self.advance_right(); }
#debug("pp BEGIN/buffer [%u,%u]", self.left, self.right);
#debug("pp BEGIN/buffer [%u,%u]/~", self.left, self.right);
self.token[self.right] = t;
self.size[self.right] = -self.right_total;
self.scan_push(self.right);
}
END {
if self.scan_stack_empty {
#debug("pp END/print [%u,%u]", self.left, self.right);
#debug("pp END/print [%u,%u]/~", self.left, self.right);
self.print(t, 0);
} else {
#debug("pp END/buffer [%u,%u]", self.left, self.right);
#debug("pp END/buffer [%u,%u]/~", self.left, self.right);
self.advance_right();
self.token[self.right] = t;
self.size[self.right] = -1;
@ -272,7 +272,7 @@ impl printer for printer {
self.left = 0u;
self.right = 0u;
} else { self.advance_right(); }
#debug("pp BREAK/buffer [%u,%u]", self.left, self.right);
#debug("pp BREAK/buffer [%u,%u]/~", self.left, self.right);
self.check_stack(0);
self.scan_push(self.right);
self.token[self.right] = t;
@ -281,10 +281,10 @@ impl printer for printer {
}
STRING(s, len) {
if self.scan_stack_empty {
#debug("pp STRING/print [%u,%u]", self.left, self.right);
#debug("pp STRING/print [%u,%u]/~", self.left, self.right);
self.print(t, len);
} else {
#debug("pp STRING/buffer [%u,%u]", self.left, self.right);
#debug("pp STRING/buffer [%u,%u]/~", self.left, self.right);
self.advance_right();
self.token[self.right] = t;
self.size[self.right] = len;
@ -295,7 +295,7 @@ impl printer for printer {
}
}
fn check_stream() {
#debug("check_stream [%u, %u] with left_total=%d, right_total=%d",
#debug("check_stream [%u, %u]/~ with left_total=%d, right_total=%d",
self.left, self.right, self.left_total, self.right_total);
if self.right_total - self.left_total > self.space {
#debug("scan window is %d, longer than space on line (%d)",
@ -347,7 +347,7 @@ impl printer for printer {
assert (self.right != self.left);
}
fn advance_left(++x: token, L: int) {
#debug("advnce_left [%u,%u], sizeof(%u)=%d", self.left, self.right,
#debug("advnce_left [%u,%u]/~, sizeof(%u)=%d", self.left, self.right,
self.left, L);
if L >= 0 {
self.print(x, L);

View file

@ -26,8 +26,8 @@ fn no_ann() -> pp_ann {
type ps =
@{s: pp::printer,
cm: option<codemap>,
comments: option<[comments::cmnt]>,
literals: option<[comments::lit]>,
comments: option<[comments::cmnt]/~>,
literals: option<[comments::lit]/~>,
mut cur_cmnt: uint,
mut cur_lit: uint,
boxes: dvec<pp::breaks>,
@ -46,8 +46,8 @@ fn end(s: ps) {
fn rust_printer(writer: io::writer) -> ps {
ret @{s: pp::mk_printer(writer, default_columns),
cm: none::<codemap>,
comments: none::<[comments::cmnt]>,
literals: none::<[comments::lit]>,
comments: none::<[comments::cmnt]/~>,
literals: none::<[comments::lit]/~>,
mut cur_cmnt: 0u,
mut cur_lit: 0u,
boxes: dvec(),
@ -97,7 +97,7 @@ fn item_to_str(i: @ast::item) -> str { ret to_str(i, print_item); }
fn attr_to_str(i: ast::attribute) -> str { ret to_str(i, print_attribute); }
fn typarams_to_str(tps: [ast::ty_param]) -> str {
fn typarams_to_str(tps: [ast::ty_param]/~) -> str {
ret to_str(tps, print_type_params)
}
@ -106,7 +106,7 @@ fn path_to_str(&&p: @ast::path) -> str {
}
fn fun_to_str(decl: ast::fn_decl, name: ast::ident,
params: [ast::ty_param]) -> str {
params: [ast::ty_param]/~) -> str {
let buffer = io::mem_buffer();
let s = rust_printer(io::mem_buffer_writer(buffer));
print_fn(s, decl, name, params);
@ -119,15 +119,15 @@ fn fun_to_str(decl: ast::fn_decl, name: ast::ident,
#[test]
fn test_fun_to_str() {
let decl: ast::fn_decl = {
inputs: [],
inputs: []/~,
output: @{id: 0,
node: ast::ty_nil,
span: ast_util::dummy_sp()},
purity: ast::impure_fn,
cf: ast::return_val,
constraints: []
constraints: []/~
};
assert fun_to_str(decl, "a", []) == "fn a()";
assert fun_to_str(decl, "a", []/~) == "fn a()";
}
fn block_to_str(blk: ast::blk) -> str {
@ -158,8 +158,8 @@ fn variant_to_str(var: ast::variant) -> str {
fn test_variant_to_str() {
let var = ast_util::respan(ast_util::dummy_sp(), {
name: "principle_skinner",
attrs: [],
args: [],
attrs: []/~,
args: []/~,
id: 0,
disr_expr: none
});
@ -254,7 +254,7 @@ fn synth_comment(s: ps, text: str) {
word(s.s, "*/");
}
fn commasep<IN>(s: ps, b: breaks, elts: [IN], op: fn(ps, IN)) {
fn commasep<IN>(s: ps, b: breaks, elts: [IN]/~, op: fn(ps, IN)) {
box(s, 0u, b);
let mut first = true;
for elts.each {|elt|
@ -265,7 +265,7 @@ fn commasep<IN>(s: ps, b: breaks, elts: [IN], op: fn(ps, IN)) {
}
fn commasep_cmnt<IN>(s: ps, b: breaks, elts: [IN], op: fn(ps, IN),
fn commasep_cmnt<IN>(s: ps, b: breaks, elts: [IN]/~, op: fn(ps, IN),
get_span: fn(IN) -> codemap::span) {
box(s, 0u, b);
let len = vec::len::<IN>(elts);
@ -284,12 +284,12 @@ fn commasep_cmnt<IN>(s: ps, b: breaks, elts: [IN], op: fn(ps, IN),
end(s);
}
fn commasep_exprs(s: ps, b: breaks, exprs: [@ast::expr]) {
fn commasep_exprs(s: ps, b: breaks, exprs: [@ast::expr]/~) {
fn expr_span(&&expr: @ast::expr) -> codemap::span { ret expr.span; }
commasep_cmnt(s, b, exprs, print_expr, expr_span);
}
fn print_mod(s: ps, _mod: ast::_mod, attrs: [ast::attribute]) {
fn print_mod(s: ps, _mod: ast::_mod, attrs: [ast::attribute]/~) {
print_inner_attributes(s, attrs);
for _mod.view_items.each {|vitem|
print_view_item(s, vitem);
@ -297,7 +297,7 @@ fn print_mod(s: ps, _mod: ast::_mod, attrs: [ast::attribute]) {
for _mod.items.each {|item| print_item(s, item); }
}
fn print_native_mod(s: ps, nmod: ast::native_mod, attrs: [ast::attribute]) {
fn print_native_mod(s: ps, nmod: ast::native_mod, attrs: [ast::attribute]/~) {
print_inner_attributes(s, attrs);
for nmod.view_items.each {|vitem|
print_view_item(s, vitem);
@ -504,7 +504,7 @@ fn print_item(s: ps, &&item: @ast::item) {
hardbreak_if_not_bol(s);
maybe_print_comment(s, ctor.span.lo);
head(s, "new");
print_fn_args_and_ret(s, ctor.node.dec, []);
print_fn_args_and_ret(s, ctor.node.dec, []/~);
space(s.s);
print_block(s, ctor.node.body);
option::iter(m_dtor) {|dtor|
@ -626,7 +626,7 @@ fn print_method(s: ps, meth: @ast::method) {
print_block_with_attrs(s, meth.body, meth.attrs);
}
fn print_outer_attributes(s: ps, attrs: [ast::attribute]) {
fn print_outer_attributes(s: ps, attrs: [ast::attribute]/~) {
let mut count = 0;
for attrs.each {|attr|
alt attr.node.style {
@ -637,7 +637,7 @@ fn print_outer_attributes(s: ps, attrs: [ast::attribute]) {
if count > 0 { hardbreak_if_not_bol(s); }
}
fn print_inner_attributes(s: ps, attrs: [ast::attribute]) {
fn print_inner_attributes(s: ps, attrs: [ast::attribute]/~) {
let mut count = 0;
for attrs.each {|attr|
alt attr.node.style {
@ -685,7 +685,7 @@ fn print_block(s: ps, blk: ast::blk) {
print_possibly_embedded_block(s, blk, block_normal, indent_unit);
}
fn print_block_with_attrs(s: ps, blk: ast::blk, attrs: [ast::attribute]) {
fn print_block_with_attrs(s: ps, blk: ast::blk, attrs: [ast::attribute]/~) {
print_possibly_embedded_block_(s, blk, block_normal, indent_unit, attrs);
}
@ -694,11 +694,11 @@ enum embed_type { block_macro, block_block_fn, block_normal, }
fn print_possibly_embedded_block(s: ps, blk: ast::blk, embedded: embed_type,
indented: uint) {
print_possibly_embedded_block_(
s, blk, embedded, indented, []);
s, blk, embedded, indented, []/~);
}
fn print_possibly_embedded_block_(s: ps, blk: ast::blk, embedded: embed_type,
indented: uint, attrs: [ast::attribute]) {
indented: uint, attrs: [ast::attribute]/~) {
alt blk.node.rules {
ast::unchecked_blk { word(s.s, "unchecked"); }
ast::unsafe_blk { word(s.s, "unsafe"); }
@ -811,10 +811,10 @@ fn print_mac(s: ps, m: ast::mac) {
fn print_vstore(s: ps, t: ast::vstore) {
alt t {
ast::vstore_fixed(some(i)) { word_space(s, #fmt("/%u", i)); }
ast::vstore_fixed(none) { word_space(s, "/_"); }
ast::vstore_uniq { word_space(s, "/~"); }
ast::vstore_box { word_space(s, "/@"); }
ast::vstore_fixed(some(i)) { word(s.s, #fmt("/%u", i)); }
ast::vstore_fixed(none) { word(s.s, "/_"); }
ast::vstore_uniq { word(s.s, "/~"); }
ast::vstore_box { word(s.s, "/@"); }
ast::vstore_slice(r) { word(s.s, "/"); print_region(s, r); }
}
}
@ -1259,18 +1259,18 @@ fn print_pat(s: ps, &&pat: @ast::pat) {
}
fn print_fn(s: ps, decl: ast::fn_decl, name: ast::ident,
typarams: [ast::ty_param]) {
typarams: [ast::ty_param]/~) {
alt decl.purity {
ast::impure_fn { head(s, "fn") }
_ { head(s, purity_to_str(decl.purity) + " fn") }
}
word(s.s, *name);
print_type_params(s, typarams);
print_fn_args_and_ret(s, decl, []);
print_fn_args_and_ret(s, decl, []/~);
}
fn print_fn_args(s: ps, decl: ast::fn_decl,
cap_items: [ast::capture_item]) {
cap_items: [ast::capture_item]/~) {
commasep(s, inconsistent, decl.inputs, print_arg);
if cap_items.is_not_empty() {
let mut first = decl.inputs.is_empty();
@ -1284,7 +1284,7 @@ fn print_fn_args(s: ps, decl: ast::fn_decl,
}
fn print_fn_args_and_ret(s: ps, decl: ast::fn_decl,
cap_items: [ast::capture_item]) {
cap_items: [ast::capture_item]/~) {
popen(s);
print_fn_args(s, decl, cap_items);
pclose(s);
@ -1301,7 +1301,7 @@ fn print_fn_args_and_ret(s: ps, decl: ast::fn_decl,
}
fn print_fn_block_args(s: ps, decl: ast::fn_decl,
cap_items: [ast::capture_item]) {
cap_items: [ast::capture_item]/~) {
word(s.s, "|");
print_fn_args(s, decl, cap_items);
word(s.s, "|");
@ -1329,7 +1329,7 @@ fn print_arg_mode(s: ps, m: ast::mode) {
if ms != "" { word(s.s, ms); }
}
fn print_bounds(s: ps, bounds: @[ast::ty_param_bound]) {
fn print_bounds(s: ps, bounds: @[ast::ty_param_bound]/~) {
if vec::len(*bounds) > 0u {
word(s.s, ":");
for vec::each(*bounds) {|bound|
@ -1351,7 +1351,7 @@ fn print_region_param(s: ps, rp: ast::region_param) {
}
}
fn print_type_params(s: ps, &&params: [ast::ty_param]) {
fn print_type_params(s: ps, &&params: [ast::ty_param]/~) {
if vec::len(params) > 0u {
word(s.s, "<");
fn printParam(s: ps, param: ast::ty_param) {
@ -1408,7 +1408,7 @@ fn print_view_path(s: ps, &&vp: @ast::view_path) {
}
}
fn print_view_paths(s: ps, vps: [@ast::view_path]) {
fn print_view_paths(s: ps, vps: [@ast::view_path]/~) {
commasep(s, inconsistent, vps, print_view_path);
}
@ -1480,7 +1480,7 @@ fn print_arg(s: ps, input: ast::arg) {
fn print_ty_fn(s: ps, opt_proto: option<ast::proto>,
decl: ast::fn_decl, id: option<ast::ident>,
tps: option<[ast::ty_param]>) {
tps: option<[ast::ty_param]/~>) {
ibox(s, indent_unit);
word(s.s, opt_proto_to_str(opt_proto));
alt id { some(id) { word(s.s, " "); word(s.s, *id); } _ { } }
@ -1682,7 +1682,8 @@ fn next_comment(s: ps) -> option<comments::cmnt> {
}
}
fn constr_args_to_str<T>(f: fn@(T) -> str, args: [@ast::sp_constr_arg<T>]) ->
fn constr_args_to_str<T>(f: fn@(T) -> str,
args: [@ast::sp_constr_arg<T>]/~) ->
str {
let mut comma = false;
let mut s = "(";
@ -1727,7 +1728,7 @@ fn ty_constr_to_str(&&c: @ast::ty_constr) -> str {
c.node.args);
}
fn constrs_str<T>(constrs: [T], elt: fn(T) -> str) -> str {
fn constrs_str<T>(constrs: [T]/~, elt: fn(T) -> str) -> str {
let mut s = "", colon = true;
for constrs.each {|c|
if colon { s += " : "; colon = false; } else { s += ", "; }

View file

@ -13,13 +13,13 @@ import codemap::span;
enum vt<E> { mk_vt(visitor<E>), }
enum fn_kind {
fk_item_fn(ident, [ty_param]), //< an item declared with fn()
fk_method(ident, [ty_param], @method),
fk_item_fn(ident, [ty_param]/~), //< an item declared with fn()
fk_method(ident, [ty_param]/~, @method),
fk_anon(proto, capture_clause), //< an anonymous function like fn@(...)
fk_fn_block(capture_clause), //< a block {||...}
fk_ctor(ident, [ty_param], node_id /* self id */,
fk_ctor(ident, [ty_param]/~, node_id /* self id */,
def_id /* parent class id */), // class constructor
fk_dtor([ty_param], node_id /* self id */,
fk_dtor([ty_param]/~, node_id /* self id */,
def_id /* parent class id */) // class destructor
}
@ -33,13 +33,13 @@ fn name_of_fn(fk: fn_kind) -> ident {
}
}
fn tps_of_fn(fk: fn_kind) -> [ty_param] {
fn tps_of_fn(fk: fn_kind) -> [ty_param]/~ {
alt fk {
fk_item_fn(_, tps) | fk_method(_, tps, _)
| fk_ctor(_, tps, _, _) | fk_dtor(tps, _, _) {
/* FIXME (#2543) */ copy tps
}
fk_anon(*) | fk_fn_block(*) { [] }
fk_anon(*) | fk_fn_block(*) { []/~ }
}
}
@ -58,7 +58,7 @@ type visitor<E> =
visit_decl: fn@(@decl, E, vt<E>),
visit_expr: fn@(@expr, E, vt<E>),
visit_ty: fn@(@ty, E, vt<E>),
visit_ty_params: fn@([ty_param], E, vt<E>),
visit_ty_params: fn@([ty_param]/~, E, vt<E>),
visit_constr: fn@(@path, span, node_id, E, vt<E>),
visit_fn: fn@(fn_kind, fn_decl, blk, span, node_id, E, vt<E>),
visit_class_item: fn@(@class_member, E, vt<E>)};
@ -256,7 +256,7 @@ fn visit_native_item<E>(ni: @native_item, e: E, v: vt<E>) {
}
}
fn visit_ty_params<E>(tps: [ty_param], e: E, v: vt<E>) {
fn visit_ty_params<E>(tps: [ty_param]/~, e: E, v: vt<E>) {
for tps.each {|tp|
for vec::each(*tp.bounds) {|bound|
alt bound {
@ -286,7 +286,7 @@ fn visit_method_helper<E>(m: @method, e: E, v: vt<E>) {
}
// Similar logic to the comment on visit_method_helper - Tim
fn visit_class_ctor_helper<E>(ctor: class_ctor, nm: ident, tps: [ty_param],
fn visit_class_ctor_helper<E>(ctor: class_ctor, nm: ident, tps: [ty_param]/~,
parent_id: def_id, e: E, v: vt<E>) {
v.visit_fn(fk_ctor(/* FIXME (#2543) */ copy nm,
/* FIXME (#2543) */ copy tps,
@ -295,7 +295,7 @@ fn visit_class_ctor_helper<E>(ctor: class_ctor, nm: ident, tps: [ty_param],
}
fn visit_class_dtor_helper<E>(dtor: class_dtor, tps: [ty_param],
fn visit_class_dtor_helper<E>(dtor: class_dtor, tps: [ty_param]/~,
parent_id: def_id, e: E, v: vt<E>) {
v.visit_fn(fk_dtor(/* FIXME (#2543) */ copy tps, dtor.node.self_id,
parent_id), ast_util::dtor_dec(),
@ -337,7 +337,7 @@ fn visit_expr_opt<E>(eo: option<@expr>, e: E, v: vt<E>) {
alt eo { none { } some(ex) { v.visit_expr(ex, e, v); } }
}
fn visit_exprs<E>(exprs: [@expr], e: E, v: vt<E>) {
fn visit_exprs<E>(exprs: [@expr]/~, e: E, v: vt<E>) {
for exprs.each {|ex| v.visit_expr(ex, e, v); }
}
@ -454,7 +454,7 @@ type simple_visitor =
visit_decl: fn@(@decl),
visit_expr: fn@(@expr),
visit_ty: fn@(@ty),
visit_ty_params: fn@([ty_param]),
visit_ty_params: fn@([ty_param]/~),
visit_constr: fn@(@path, span, node_id),
visit_fn: fn@(fn_kind, fn_decl, blk, span, node_id),
visit_class_item: fn@(@class_member)};
@ -474,7 +474,7 @@ fn default_simple_visitor() -> simple_visitor {
visit_decl: fn@(_d: @decl) { },
visit_expr: fn@(_e: @expr) { },
visit_ty: simple_ignore_ty,
visit_ty_params: fn@(_ps: [ty_param]) {},
visit_ty_params: fn@(_ps: [ty_param]/~) {},
visit_constr: fn@(_p: @path, _sp: span, _id: node_id) { },
visit_fn: fn@(_fk: fn_kind, _d: fn_decl, _b: blk, _sp: span,
_id: node_id) { },
@ -533,7 +533,9 @@ fn mk_simple_visitor(v: simple_visitor) -> vt<()> {
f(ty);
visit_ty(ty, e, v);
}
fn v_ty_params(f: fn@([ty_param]), ps: [ty_param], &&e: (), v: vt<()>) {
fn v_ty_params(f: fn@([ty_param]/~),
ps: [ty_param]/~,
&&e: (), v: vt<()>) {
f(ps);
visit_ty_params(ps, e, v);
}

View file

@ -292,27 +292,27 @@ fn build_link_meta(sess: session, c: ast::crate, output: str,
type provided_metas =
{name: option<@str>,
vers: option<@str>,
cmh_items: [@ast::meta_item]};
cmh_items: [@ast::meta_item]/~};
fn provided_link_metas(sess: session, c: ast::crate) ->
provided_metas {
let mut name: option<@str> = none;
let mut vers: option<@str> = none;
let mut cmh_items: [@ast::meta_item] = [];
let mut cmh_items: [@ast::meta_item]/~ = []/~;
let linkage_metas = attr::find_linkage_metas(c.node.attrs);
attr::require_unique_names(sess.diagnostic(), linkage_metas);
for linkage_metas.each {|meta|
if *attr::get_meta_item_name(meta) == "name" {
alt attr::get_meta_item_value_str(meta) {
some(v) { name = some(v); }
none { cmh_items += [meta]; }
none { cmh_items += [meta]/~; }
}
} else if *attr::get_meta_item_name(meta) == "vers" {
alt attr::get_meta_item_value_str(meta) {
some(v) { vers = some(v); }
none { cmh_items += [meta]; }
none { cmh_items += [meta]/~; }
}
} else { cmh_items += [meta]; }
} else { cmh_items += [meta]/~; }
}
ret {name: name, vers: vers, cmh_items: cmh_items};
}
@ -320,7 +320,7 @@ fn build_link_meta(sess: session, c: ast::crate, output: str,
// This calculates CMH as defined above
fn crate_meta_extras_hash(sha: sha1, _crate: ast::crate,
metas: provided_metas,
dep_hashes: [@str]) -> str {
dep_hashes: [@str]/~) -> str {
fn len_and_str(s: str) -> str {
ret #fmt["%u_%s", str::len(s), s];
}
@ -490,7 +490,7 @@ fn mangle(ss: path) -> str {
}
fn exported_name(path: path, hash: @str, vers: @str) -> str {
ret mangle(path + [path_name(hash)] + [path_name(vers)]);
ret mangle(path + [path_name(hash)]/~ + [path_name(vers)]/~);
}
fn mangle_exported_name(ccx: @crate_ctxt, path: path, t: ty::t) -> str {
@ -503,12 +503,12 @@ fn mangle_internal_name_by_type_only(ccx: @crate_ctxt,
str {
let s = @util::ppaux::ty_to_short_str(ccx.tcx, t);
let hash = get_symbol_hash(ccx, t);
ret mangle([path_name(name), path_name(s), path_name(@hash)]);
ret mangle([path_name(name), path_name(s), path_name(@hash)]/~);
}
fn mangle_internal_name_by_path_and_seq(ccx: @crate_ctxt, path: path,
flav: @str) -> str {
ret mangle(path + [path_name(@ccx.names(*flav))]);
ret mangle(path + [path_name(@ccx.names(*flav))]/~);
}
fn mangle_internal_name_by_path(_ccx: @crate_ctxt, path: path) -> str {
@ -577,8 +577,8 @@ fn link_binary(sess: session,
// The invocations of cc share some flags across platforms
let mut cc_args =
[stage] + sess.targ_cfg.target_strs.cc_args +
["-o", output, obj_filename];
[stage]/~ + sess.targ_cfg.target_strs.cc_args +
["-o", output, obj_filename]/~;
let mut lib_cmd;
let os = sess.targ_cfg.os;
@ -591,18 +591,18 @@ fn link_binary(sess: session,
let cstore = sess.cstore;
for cstore::get_used_crate_files(cstore).each {|cratepath|
if str::ends_with(cratepath, ".rlib") {
cc_args += [cratepath];
cc_args += [cratepath]/~;
cont;
}
let cratepath = cratepath;
let dir = path::dirname(cratepath);
if dir != "" { cc_args += ["-L" + dir]; }
if dir != "" { cc_args += ["-L" + dir]/~; }
let libarg = unlib(sess.targ_cfg, path::basename(cratepath));
cc_args += ["-l" + libarg];
cc_args += ["-l" + libarg]/~;
}
let ula = cstore::get_used_link_args(cstore);
for ula.each {|arg| cc_args += [arg]; }
for ula.each {|arg| cc_args += [arg]/~; }
// # Native library linking
@ -613,37 +613,37 @@ fn link_binary(sess: session,
// forces to make sure that library can be found at runtime.
let addl_paths = sess.opts.addl_lib_search_paths;
for addl_paths.each {|path| cc_args += ["-L" + path]; }
for addl_paths.each {|path| cc_args += ["-L" + path]/~; }
// The names of the native libraries
let used_libs = cstore::get_used_libraries(cstore);
for used_libs.each {|l| cc_args += ["-l" + l]; }
for used_libs.each {|l| cc_args += ["-l" + l]/~; }
if sess.building_library {
cc_args += [lib_cmd];
cc_args += [lib_cmd]/~;
// On mac we need to tell the linker to let this library
// be rpathed
if sess.targ_cfg.os == session::os_macos {
cc_args += ["-Wl,-install_name,@rpath/"
+ path::basename(output)];
+ path::basename(output)]/~;
}
}
if !sess.debugging_opt(session::no_rt) {
// Always want the runtime linked in
cc_args += ["-lrustrt"];
cc_args += ["-lrustrt"]/~;
}
// On linux librt and libdl are an indirect dependencies via rustrt,
// and binutils 2.22+ won't add them automatically
if sess.targ_cfg.os == session::os_linux {
cc_args += ["-lrt", "-ldl"];
cc_args += ["-lrt", "-ldl"]/~;
// LLVM implements the `frem` instruction as a call to `fmod`,
// which lives in libm. Similar to above, on some linuxes we
// have to be explicit about linking to it. See #2510
cc_args += ["-lm"];
cc_args += ["-lm"]/~;
}
if sess.targ_cfg.os == session::os_freebsd {
@ -653,7 +653,7 @@ fn link_binary(sess: session,
"-L/usr/local/lib/gcc44", "-lstdc++",
"-Wl,-z,origin",
"-Wl,-rpath,/usr/local/lib/gcc46",
"-Wl,-rpath,/usr/local/lib/gcc44"];
"-Wl,-rpath,/usr/local/lib/gcc44"]/~;
}
// OS X 10.6 introduced 'compact unwind info', which is produced by the
@ -661,11 +661,11 @@ fn link_binary(sess: session,
// understand how to unwind our __morestack frame, so we have to turn it
// off. This has impacted some other projects like GHC.
if sess.targ_cfg.os == session::os_macos {
cc_args += ["-Wl,-no_compact_unwind"];
cc_args += ["-Wl,-no_compact_unwind"]/~;
}
// Stack growth requires statically linking a __morestack function
cc_args += ["-lmorestack"];
cc_args += ["-lmorestack"]/~;
// FIXME (#2397): At some point we want to rpath our guesses as to where
// native libraries might live, based on the addl_lib_search_paths
@ -685,7 +685,7 @@ fn link_binary(sess: session,
// Clean up on Darwin
if sess.targ_cfg.os == session::os_macos {
run::run_program("dsymutil", [output]);
run::run_program("dsymutil", [output]/~);
}
// Remove the temporary object file if we aren't saving temps

View file

@ -13,12 +13,12 @@ pure fn not_win32(os: session::os) -> bool {
}
}
fn get_rpath_flags(sess: session::session, out_filename: str) -> [str] {
fn get_rpath_flags(sess: session::session, out_filename: str) -> [str]/~ {
let os = sess.targ_cfg.os;
// No rpath on windows
if os == session::os_win32 {
ret [];
ret []/~;
}
#debug("preparing the RPATH!");
@ -29,7 +29,7 @@ fn get_rpath_flags(sess: session::session, out_filename: str) -> [str] {
let libs = cstore::get_used_crate_files(sess.cstore);
// We don't currently rpath native libraries, but we know
// where rustrt is and we know every rust program needs it
let libs = libs + [get_sysroot_absolute_rt_lib(sess)];
let libs = libs + [get_sysroot_absolute_rt_lib(sess)]/~;
let target_triple = sess.opts.target_triple;
let rpaths = get_rpaths(os, cwd, sysroot, output, libs, target_triple);
@ -37,20 +37,20 @@ fn get_rpath_flags(sess: session::session, out_filename: str) -> [str] {
}
fn get_sysroot_absolute_rt_lib(sess: session::session) -> path::path {
let path = [sess.filesearch.sysroot()]
let path = [sess.filesearch.sysroot()]/~
+ filesearch::relative_target_lib_path(
sess.opts.target_triple)
+ [os::dll_filename("rustrt")];
+ [os::dll_filename("rustrt")]/~;
path::connect_many(path)
}
fn rpaths_to_flags(rpaths: [str]) -> [str] {
fn rpaths_to_flags(rpaths: [str]/~) -> [str]/~ {
vec::map(rpaths, { |rpath| #fmt("-Wl,-rpath,%s",rpath)})
}
fn get_rpaths(os: session::os, cwd: path::path, sysroot: path::path,
output: path::path, libs: [path::path],
target_triple: str) -> [str] {
output: path::path, libs: [path::path]/~,
target_triple: str) -> [str]/~ {
#debug("cwd: %s", cwd);
#debug("sysroot: %s", sysroot);
#debug("output: %s", output);
@ -70,9 +70,9 @@ fn get_rpaths(os: session::os, cwd: path::path, sysroot: path::path,
let abs_rpaths = get_absolute_rpaths(cwd, libs);
// And a final backup rpath to the global library location.
let fallback_rpaths = [get_install_prefix_rpath(cwd, target_triple)];
let fallback_rpaths = [get_install_prefix_rpath(cwd, target_triple)]/~;
fn log_rpaths(desc: str, rpaths: [str]) {
fn log_rpaths(desc: str, rpaths: [str]/~) {
#debug("%s rpaths:", desc);
for rpaths.each {|rpath|
#debug(" %s", rpath);
@ -93,7 +93,7 @@ fn get_rpaths(os: session::os, cwd: path::path, sysroot: path::path,
fn get_rpaths_relative_to_output(os: session::os,
cwd: path::path,
output: path::path,
libs: [path::path]) -> [str] {
libs: [path::path]/~) -> [str]/~ {
vec::map(libs, {|a|
check not_win32(os);
get_rpath_relative_to_output(os, cwd, output, a)
@ -139,8 +139,8 @@ fn get_relative_to(abs1: path::path, abs2: path::path) -> path::path {
start_idx += 1u;
}
let mut path = [];
for uint::range(start_idx, len1 - 1u) {|_i| path += [".."]; };
let mut path = []/~;
for uint::range(start_idx, len1 - 1u) {|_i| vec::push(path, ".."); };
path += vec::slice(split2, start_idx, len2 - 1u);
@ -151,7 +151,7 @@ fn get_relative_to(abs1: path::path, abs2: path::path) -> path::path {
}
}
fn get_absolute_rpaths(cwd: path::path, libs: [path::path]) -> [str] {
fn get_absolute_rpaths(cwd: path::path, libs: [path::path]/~) -> [str]/~ {
vec::map(libs, {|a|get_absolute_rpath(cwd, a)})
}
@ -174,17 +174,17 @@ fn get_install_prefix_rpath(cwd: path::path, target_triple: str) -> str {
fail "rustc compiled without CFG_PREFIX environment variable";
}
let path = [install_prefix]
let path = [install_prefix]/~
+ filesearch::relative_target_lib_path(target_triple);
get_absolute(cwd, path::connect_many(path))
}
fn minimize_rpaths(rpaths: [str]) -> [str] {
fn minimize_rpaths(rpaths: [str]/~) -> [str]/~ {
let set = map::str_hash::<()>();
let mut minimized = [];
let mut minimized = []/~;
for rpaths.each {|rpath|
if !set.contains_key(rpath) {
minimized += [rpath];
minimized += [rpath]/~;
set.insert(rpath, ());
}
}
@ -195,8 +195,8 @@ fn minimize_rpaths(rpaths: [str]) -> [str] {
mod test {
#[test]
fn test_rpaths_to_flags() {
let flags = rpaths_to_flags(["path1", "path2"]);
assert flags == ["-Wl,-rpath,path1", "-Wl,-rpath,path2"];
let flags = rpaths_to_flags(["path1", "path2"]/~);
assert flags == ["-Wl,-rpath,path1", "-Wl,-rpath,path2"]/~;
}
#[test]
@ -230,15 +230,15 @@ mod test {
#[test]
fn test_minimize1() {
let res = minimize_rpaths(["rpath1", "rpath2", "rpath1"]);
assert res == ["rpath1", "rpath2"];
let res = minimize_rpaths(["rpath1", "rpath2", "rpath1"]/~);
assert res == ["rpath1", "rpath2"]/~;
}
#[test]
fn test_minimize2() {
let res = minimize_rpaths(["1a", "2", "2", "1a", "4a",
"1a", "2", "3", "4a", "3"]);
assert res == ["1a", "2", "4a", "3"];
"1a", "2", "3", "4a", "3"]/~);
assert res == ["1a", "2", "4a", "3"]/~;
}
#[test]

View file

@ -3,5 +3,5 @@ type t = {
meta_sect_name: str,
data_layout: str,
target_triple: str,
cc_args: [str]
cc_args: [str]/~
};

View file

@ -33,10 +33,10 @@ fn declare_upcalls(targ_cfg: @session::config,
tydesc_type: TypeRef,
llmod: ModuleRef) -> @upcalls {
fn decl(llmod: ModuleRef, prefix: str, name: str,
tys: [TypeRef], rv: TypeRef) ->
tys: [TypeRef]/~, rv: TypeRef) ->
ValueRef {
let mut arg_tys: [TypeRef] = [];
for tys.each {|t| arg_tys += [t]; }
let mut arg_tys: [TypeRef]/~ = []/~;
for tys.each {|t| arg_tys += [t]/~; }
let fn_ty = T_fn(arg_tys, rv);
ret base::decl_cdecl_fn(llmod, prefix + name, fn_ty);
}
@ -51,61 +51,61 @@ fn declare_upcalls(targ_cfg: @session::config,
ret @{_fail: dv("fail", [T_ptr(T_i8()),
T_ptr(T_i8()),
size_t]),
size_t]/~),
trace: dv("trace", [T_ptr(T_i8()),
T_ptr(T_i8()),
int_t]),
int_t]/~),
malloc:
nothrow(d("malloc",
[T_ptr(tydesc_type), int_t],
[T_ptr(tydesc_type), int_t]/~,
T_ptr(T_i8()))),
free:
nothrow(dv("free", [T_ptr(T_i8())])),
nothrow(dv("free", [T_ptr(T_i8())]/~)),
exchange_malloc:
nothrow(d("exchange_malloc",
[T_ptr(tydesc_type), int_t],
[T_ptr(tydesc_type), int_t]/~,
T_ptr(T_i8()))),
exchange_free:
nothrow(dv("exchange_free", [T_ptr(T_i8())])),
nothrow(dv("exchange_free", [T_ptr(T_i8())]/~)),
validate_box:
nothrow(dv("validate_box", [T_ptr(T_i8())])),
nothrow(dv("validate_box", [T_ptr(T_i8())]/~)),
mark:
d("mark", [T_ptr(T_i8())], int_t),
d("mark", [T_ptr(T_i8())]/~, int_t),
vec_grow:
nothrow(dv("vec_grow", [T_ptr(T_ptr(T_i8())), int_t])),
nothrow(dv("vec_grow", [T_ptr(T_ptr(T_i8())), int_t]/~)),
str_new_uniq:
nothrow(d("str_new_uniq", [T_ptr(T_i8()), int_t],
nothrow(d("str_new_uniq", [T_ptr(T_i8()), int_t]/~,
T_ptr(T_i8()))),
str_new_shared:
nothrow(d("str_new_shared", [T_ptr(T_i8()), int_t],
nothrow(d("str_new_shared", [T_ptr(T_i8()), int_t]/~,
T_ptr(T_i8()))),
str_concat:
nothrow(d("str_concat", [T_ptr(T_i8()),
T_ptr(T_i8())],
T_ptr(T_i8())]/~,
T_ptr(T_i8()))),
cmp_type:
dv("cmp_type",
[T_ptr(T_i1()), T_ptr(tydesc_type),
T_ptr(T_ptr(tydesc_type)), T_ptr(T_i8()),
T_ptr(T_i8()),
T_i8()]),
T_i8()]/~),
log_type:
dv("log_type", [T_ptr(tydesc_type),
T_ptr(T_i8()), T_i32()]),
T_ptr(T_i8()), T_i32()]/~),
alloc_c_stack:
d("alloc_c_stack", [size_t], T_ptr(T_i8())),
d("alloc_c_stack", [size_t]/~, T_ptr(T_i8())),
call_shim_on_c_stack:
d("call_shim_on_c_stack",
// arguments: void *args, void *fn_ptr
[T_ptr(T_i8()), T_ptr(T_i8())],
[T_ptr(T_i8()), T_ptr(T_i8())]/~,
int_t),
call_shim_on_rust_stack:
d("call_shim_on_rust_stack",
[T_ptr(T_i8()), T_ptr(T_i8())], int_t),
[T_ptr(T_i8()), T_ptr(T_i8())]/~, int_t),
rust_personality:
nothrow(d("rust_personality", [], T_i32())),
nothrow(d("rust_personality", []/~, T_i32())),
reset_stack_limit:
nothrow(dv("reset_stack_limit", []))
nothrow(dv("reset_stack_limit", []/~))
};
}
//

View file

@ -35,7 +35,7 @@ fn get_target_strs(target_os: session::os) -> target_strs::t {
session::os_freebsd { "i686-unknown-freebsd" }
},
cc_args: ["-m32"]
cc_args: ["-m32"]/~
};
}

View file

@ -42,7 +42,7 @@ fn get_target_strs(target_os: session::os) -> target_strs::t {
session::os_freebsd { "x86_64-unknown-freebsd" }
},
cc_args: ["-m64"]
cc_args: ["-m64"]/~
};
}

View file

@ -56,7 +56,7 @@ fn default_configuration(sess: session, argv0: str, input: input) ->
mk(@"target_libc", libc),
// Build bindings.
mk(@"build_compiler", argv0),
mk(@"build_input", source_name(input))];
mk(@"build_input", source_name(input))]/~;
}
fn build_configuration(sess: session, argv0: str, input: input) ->
@ -70,19 +70,19 @@ fn build_configuration(sess: session, argv0: str, input: input) ->
{
if sess.opts.test && !attr::contains_name(user_cfg, "test")
{
[attr::mk_word_item(@"test")]
} else { [] }
[attr::mk_word_item(@"test")]/~
} else { []/~ }
};
ret user_cfg + gen_cfg + default_cfg;
}
// Convert strings provided as --cfg [cfgspec] into a crate_cfg
fn parse_cfgspecs(cfgspecs: [str]) -> ast::crate_cfg {
fn parse_cfgspecs(cfgspecs: [str]/~) -> ast::crate_cfg {
// FIXME (#2399): It would be nice to use the parser to parse all
// varieties of meta_item here. At the moment we just support the
// meta_word variant.
let mut words = [];
for cfgspecs.each {|s| words += [attr::mk_word_item(@s)]; }
let mut words = []/~;
for cfgspecs.each {|s| vec::push(words, attr::mk_word_item(@s)); }
ret words;
}
@ -563,7 +563,7 @@ fn parse_pretty(sess: session, &&name: str) -> pp_mode {
"`identified`");
}
fn opts() -> [getopts::opt] {
fn opts() -> [getopts::opt]/~ {
ret [optflag("h"), optflag("help"), optflag("v"), optflag("version"),
optflag("emit-llvm"), optflagopt("pretty"),
optflag("ls"), optflag("parse-only"), optflag("no-trans"),
@ -577,7 +577,7 @@ fn opts() -> [getopts::opt] {
optmulti("Z"),
optmulti("cfg"), optflag("test"),
optflag("lib"), optflag("bin"), optflag("static"), optflag("gc")];
optflag("lib"), optflag("bin"), optflag("static"), optflag("gc")]/~;
}
type output_filenames = @{out_filename: str, obj_filename:str};
@ -692,7 +692,7 @@ mod test {
#[test]
fn test_switch_implies_cfg_test() {
let match =
alt getopts::getopts(["--test"], opts()) {
alt getopts::getopts(["--test"]/~, opts()) {
ok(m) { m }
err(f) { fail "test_switch_implies_cfg_test: " +
getopts::fail_str(f); }
@ -708,7 +708,7 @@ mod test {
#[test]
fn test_switch_implies_cfg_test_unless_cfg_test() {
let match =
alt getopts::getopts(["--test", "--cfg=test"], opts()) {
alt getopts::getopts(["--test", "--cfg=test"]/~, opts()) {
ok(m) { m }
err(f) { fail "test_switch_implies_cfg_test_unless_cfg_test: " +
getopts::fail_str(f); }

View file

@ -109,7 +109,7 @@ fn describe_debug_flags() {
}
}
fn run_compiler(args: [str], demitter: diagnostic::emitter) {
fn run_compiler(args: [str]/~, demitter: diagnostic::emitter) {
// Don't display log spew by default. Can override with RUST_LOG.
logging::console_off();
@ -250,7 +250,7 @@ fn monitor(+f: fn~(diagnostic::emitter)) {
to get further details and report the results \
to github.com/mozilla/rust/issues"
].each {|note|
]/~.each {|note|
diagnostic::emit(none, note, diagnostic::note)
}
}
@ -260,7 +260,7 @@ fn monitor(+f: fn~(diagnostic::emitter)) {
}
}
fn main(args: [str]) {
fn main(args: [str]/~) {
monitor {|demitter|
run_compiler(args, demitter);
}

View file

@ -36,7 +36,7 @@ const trace: uint = 128u;
// It should be removed
const no_rt: uint = 256u;
fn debugging_opts_map() -> [(str, str, uint)] {
fn debugging_opts_map() -> [(str, str, uint)]/~ {
[("ppregions", "prettyprint regions with \
internal repr details", ppregions),
("time-passes", "measure time of each rustc pass", time_passes),
@ -48,7 +48,7 @@ fn debugging_opts_map() -> [(str, str, uint)] {
("no-verify", "skip LLVM verification", no_verify),
("trace", "emit trace logs", trace),
("no-rt", "do not link to the runtime", no_rt)
]
]/~
}
type options =
@ -59,10 +59,10 @@ type options =
optimize: uint,
debuginfo: bool,
extra_debuginfo: bool,
lint_opts: [(lint::lint, lint::level)],
lint_opts: [(lint::lint, lint::level)]/~,
save_temps: bool,
output_type: back::link::output_type,
addl_lib_search_paths: [str],
addl_lib_search_paths: [str]/~,
maybe_sysroot: option<str>,
target_triple: str,
cfg: ast::crate_cfg,
@ -72,7 +72,7 @@ type options =
debugging_opts: uint,
};
type crate_metadata = {name: str, data: [u8]};
type crate_metadata = {name: str, data: [u8]/~};
type session = @{targ_cfg: @config,
opts: @options,
@ -172,13 +172,13 @@ fn basic_options() -> @options {
optimize: 0u,
debuginfo: false,
extra_debuginfo: false,
lint_opts: [],
lint_opts: []/~,
save_temps: false,
output_type: link::output_type_exe,
addl_lib_search_paths: [],
addl_lib_search_paths: []/~,
maybe_sysroot: none,
target_triple: driver::host_triple(),
cfg: [],
cfg: []/~,
test: false,
parse_only: false,
no_trans: false,
@ -238,14 +238,14 @@ mod test {
}
fn make_crate(with_bin: bool, with_lib: bool) -> @ast::crate {
let mut attrs = [];
if with_bin { attrs += [make_crate_type_attr("bin")]; }
if with_lib { attrs += [make_crate_type_attr("lib")]; }
let mut attrs = []/~;
if with_bin { attrs += [make_crate_type_attr("bin")]/~; }
if with_lib { attrs += [make_crate_type_attr("lib")]/~; }
@ast_util::respan(ast_util::dummy_sp(), {
directives: [],
module: {view_items: [], items: []},
directives: []/~,
module: {view_items: []/~, items: []/~},
attrs: attrs,
config: []
config: []/~
})
}

View file

@ -4,7 +4,7 @@ export strip_unconfigured_items;
export metas_in_cfg;
export strip_items;
type in_cfg_pred = fn@([ast::attribute]) -> bool;
type in_cfg_pred = fn@([ast::attribute]/~) -> bool;
type ctxt = @{
in_cfg: in_cfg_pred
@ -100,11 +100,11 @@ fn native_item_in_cfg(cx: ctxt, item: @ast::native_item) -> bool {
// Determine if an item should be translated in the current crate
// configuration based on the item's attributes
fn in_cfg(cfg: ast::crate_cfg, attrs: [ast::attribute]) -> bool {
fn in_cfg(cfg: ast::crate_cfg, attrs: [ast::attribute]/~) -> bool {
metas_in_cfg(cfg, attr::attr_metas(attrs))
}
fn metas_in_cfg(cfg: ast::crate_cfg, metas: [@ast::meta_item]) -> bool {
fn metas_in_cfg(cfg: ast::crate_cfg, metas: [@ast::meta_item]/~) -> bool {
// The "cfg" attributes on the item
let cfg_metas = attr::find_meta_items_by_name(metas, "cfg");

View file

@ -30,18 +30,18 @@ fn inject_libcore_ref(sess: session,
let n1 = sess.next_node_id();
let n2 = sess.next_node_id();
let vi1 = @{node: ast::view_item_use(@"core", [], n1),
attrs: [],
let vi1 = @{node: ast::view_item_use(@"core", []/~, n1),
attrs: []/~,
vis: ast::public,
span: dummy_sp()};
let vp = spanned(ast::view_path_glob(ident_to_path(dummy_sp(), @"core"),
n2));
let vi2 = @{node: ast::view_item_import([vp]),
attrs: [],
let vi2 = @{node: ast::view_item_import([vp]/~),
attrs: []/~,
vis: ast::public,
span: dummy_sp()};
let vis = [vi1, vi2] + crate.node.module.view_items;
let vis = [vi1, vi2]/~ + crate.node.module.view_items;
ret @{node: {module: { view_items: vis with crate.node.module }
with crate.node} with *crate }

View file

@ -12,7 +12,7 @@ fn inject_intrinsic(sess: session,
let item = parse::parse_item_from_source_str("<intrinsic>",
intrinsic_module,
sess.opts.cfg,
[], ast::public,
[]/~, ast::public,
sess.parse_sess);
let item =
alt item {
@ -22,7 +22,7 @@ fn inject_intrinsic(sess: session,
}
};
let items = [item] + crate.node.module.items;
let items = [item]/~ + crate.node.module.items;
ret @{node: {module: { items: items with crate.node.module }
with crate.node} with *crate }

View file

@ -15,12 +15,13 @@ export modify_for_testing;
type node_id_gen = fn@() -> ast::node_id;
type test = {span: span, path: [ast::ident], ignore: bool, should_fail: bool};
type test = {span: span, path: [ast::ident]/~,
ignore: bool, should_fail: bool};
type test_ctxt =
@{sess: session::session,
crate: @ast::crate,
mut path: [ast::ident],
mut path: [ast::ident]/~,
testfns: dvec<test>};
// Traverse the crate, collecting all the test functions, eliding any
@ -40,7 +41,7 @@ fn generate_test_harness(sess: session::session,
let cx: test_ctxt =
@{sess: sess,
crate: crate,
mut path: [],
mut path: []/~,
testfns: dvec()};
let precursor =
@ -97,7 +98,7 @@ fn fold_crate(cx: test_ctxt, c: ast::crate_, fld: fold::ast_fold) ->
fn fold_item(cx: test_ctxt, &&i: @ast::item, fld: fold::ast_fold) ->
@ast::item {
cx.path += [i.ident];
cx.path += [i.ident]/~;
#debug("current path: %s", ast_util::path_name_i(cx.path));
if is_test_fn(i) {
@ -160,7 +161,7 @@ fn should_fail(i: @ast::item) -> bool {
fn add_test_module(cx: test_ctxt, m: ast::_mod) -> ast::_mod {
let testmod = mk_test_module(cx);
ret {items: m.items + [testmod] with m};
ret {items: m.items + [testmod]/~ with m};
}
/*
@ -169,11 +170,11 @@ We're going to be building a module that looks more or less like:
mod __test {
fn main(args: [str]) -> int {
fn main(args: [str]/~) -> int {
std::test::test_main(args, tests())
}
fn tests() -> [std::test::test_desc] {
fn tests() -> [std::test::test_desc]/~ {
... the list of tests in the crate ...
}
}
@ -187,14 +188,14 @@ fn mk_test_module(cx: test_ctxt) -> @ast::item {
// The synthesized main function which will call the console test runner
// with our list of tests
let mainfn = mk_main(cx);
let testmod: ast::_mod = {view_items: [], items: [mainfn, testsfn]};
let testmod: ast::_mod = {view_items: []/~, items: [mainfn, testsfn]/~};
let item_ = ast::item_mod(testmod);
// This attribute tells resolve to let us call unexported functions
let resolve_unexported_attr =
attr::mk_attr(attr::mk_word_item(@"!resolve_unexported"));
let item: ast::item =
{ident: @"__test",
attrs: [resolve_unexported_attr],
attrs: [resolve_unexported_attr]/~,
id: cx.sess.next_node_id(),
node: item_,
vis: ast::public,
@ -209,31 +210,31 @@ fn nospan<T: copy>(t: T) -> ast::spanned<T> {
ret {node: t, span: dummy_sp()};
}
fn path_node(ids: [ast::ident]) -> @ast::path {
@{span: dummy_sp(), global: false, idents: ids, rp: none, types: []}
fn path_node(ids: [ast::ident]/~) -> @ast::path {
@{span: dummy_sp(), global: false, idents: ids, rp: none, types: []/~}
}
fn mk_tests(cx: test_ctxt) -> @ast::item {
let ret_ty = mk_test_desc_vec_ty(cx);
let decl: ast::fn_decl =
{inputs: [],
{inputs: []/~,
output: ret_ty,
purity: ast::impure_fn,
cf: ast::return_val,
constraints: []};
constraints: []/~};
// The vector of test_descs for this crate
let test_descs = mk_test_desc_vec(cx);
let body_: ast::blk_ =
default_block([], option::some(test_descs), cx.sess.next_node_id());
default_block([]/~, option::some(test_descs), cx.sess.next_node_id());
let body = nospan(body_);
let item_ = ast::item_fn(decl, [], body);
let item_ = ast::item_fn(decl, []/~, body);
let item: ast::item =
{ident: @"tests",
attrs: [],
attrs: []/~,
id: cx.sess.next_node_id(),
node: item_,
vis: ast::public,
@ -241,7 +242,7 @@ fn mk_tests(cx: test_ctxt) -> @ast::item {
ret @item;
}
fn mk_path(cx: test_ctxt, path: [ast::ident]) -> [ast::ident] {
fn mk_path(cx: test_ctxt, path: [ast::ident]/~) -> [ast::ident]/~ {
// For tests that are inside of std we don't want to prefix
// the paths with std::
let is_std = {
@ -251,12 +252,12 @@ fn mk_path(cx: test_ctxt, path: [ast::ident]) -> [ast::ident] {
_ { false }
}
};
(if is_std { [] } else { [@"std"] }) + path
(if is_std { []/~ } else { [@"std"]/~ }) + path
}
// The ast::ty of [std::test::test_desc]
// The ast::ty of [std::test::test_desc]/~
fn mk_test_desc_vec_ty(cx: test_ctxt) -> @ast::ty {
let test_desc_ty_path = path_node(mk_path(cx, [@"test", @"test_desc"]));
let test_desc_ty_path = path_node(mk_path(cx, [@"test", @"test_desc"]/~));
let test_desc_ty: ast::ty =
{id: cx.sess.next_node_id(),
@ -275,9 +276,9 @@ fn mk_test_desc_vec_ty(cx: test_ctxt) -> @ast::ty {
fn mk_test_desc_vec(cx: test_ctxt) -> @ast::expr {
#debug("building test vector from %u tests", cx.testfns.len());
let mut descs = [];
let mut descs = []/~;
for cx.testfns.each {|test|
descs += [mk_test_desc_rec(cx, test)];
descs += [mk_test_desc_rec(cx, test)]/~;
}
let inner_expr = @{id: cx.sess.next_node_id(),
@ -337,7 +338,7 @@ fn mk_test_desc_rec(cx: test_ctxt, test: test) -> @ast::expr {
nospan({mutbl: ast::m_imm, ident: @"should_fail", expr: @fail_expr});
let desc_rec_: ast::expr_ =
ast::expr_rec([name_field, fn_field, ignore_field, fail_field],
ast::expr_rec([name_field, fn_field, ignore_field, fail_field]/~,
option::none);
let desc_rec: ast::expr =
{id: cx.sess.next_node_id(), node: desc_rec_, span: span};
@ -352,7 +353,7 @@ fn mk_test_wrapper(cx: test_ctxt,
span: span) -> @ast::expr {
let call_expr: ast::expr = {
id: cx.sess.next_node_id(),
node: ast::expr_call(@fn_path_expr, [], false),
node: ast::expr_call(@fn_path_expr, []/~, false),
span: span
};
@ -360,16 +361,16 @@ fn mk_test_wrapper(cx: test_ctxt,
ast::stmt_semi(@call_expr, cx.sess.next_node_id()));
let wrapper_decl: ast::fn_decl = {
inputs: [],
inputs: []/~,
output: @{id: cx.sess.next_node_id(), node: ast::ty_nil, span: span},
purity: ast::impure_fn,
cf: ast::return_val,
constraints: []
constraints: []/~
};
let wrapper_body: ast::blk = nospan({
view_items: [],
stmts: [@call_stmt],
view_items: []/~,
stmts: [@call_stmt]/~,
expr: option::none,
id: cx.sess.next_node_id(),
rules: ast::default_blk
@ -378,7 +379,7 @@ fn mk_test_wrapper(cx: test_ctxt,
let wrapper_expr: ast::expr = {
id: cx.sess.next_node_id(),
node: ast::expr_fn(ast::proto_bare, wrapper_decl,
wrapper_body, @[]),
wrapper_body, @[]/~),
span: span
};
@ -386,7 +387,7 @@ fn mk_test_wrapper(cx: test_ctxt,
}
fn mk_main(cx: test_ctxt) -> @ast::item {
let str_pt = path_node([@"str"]);
let str_pt = path_node([@"str"]/~);
let str_ty = @{id: cx.sess.next_node_id(),
node: ast::ty_path(str_pt, cx.sess.next_node_id()),
span: dummy_sp()};
@ -410,23 +411,23 @@ fn mk_main(cx: test_ctxt) -> @ast::item {
span: dummy_sp()};
let decl: ast::fn_decl =
{inputs: [args_arg],
{inputs: [args_arg]/~,
output: @ret_ty,
purity: ast::impure_fn,
cf: ast::return_val,
constraints: []};
constraints: []/~};
let test_main_call_expr = mk_test_main_call(cx);
let body_: ast::blk_ =
default_block([], option::some(test_main_call_expr),
default_block([]/~, option::some(test_main_call_expr),
cx.sess.next_node_id());
let body = {node: body_, span: dummy_sp()};
let item_ = ast::item_fn(decl, [], body);
let item_ = ast::item_fn(decl, []/~, body);
let item: ast::item =
{ident: @"main",
attrs: [],
attrs: []/~,
id: cx.sess.next_node_id(),
node: item_,
vis: ast::public,
@ -437,7 +438,7 @@ fn mk_main(cx: test_ctxt) -> @ast::item {
fn mk_test_main_call(cx: test_ctxt) -> @ast::expr {
// Get the args passed to main so we can pass the to test_main
let args_path = path_node([@"args"]);
let args_path = path_node([@"args"]/~);
let args_path_expr_: ast::expr_ = ast::expr_path(args_path);
@ -445,20 +446,20 @@ fn mk_test_main_call(cx: test_ctxt) -> @ast::expr {
{id: cx.sess.next_node_id(), node: args_path_expr_, span: dummy_sp()};
// Call __test::test to generate the vector of test_descs
let test_path = path_node([@"tests"]);
let test_path = path_node([@"tests"]/~);
let test_path_expr_: ast::expr_ = ast::expr_path(test_path);
let test_path_expr: ast::expr =
{id: cx.sess.next_node_id(), node: test_path_expr_, span: dummy_sp()};
let test_call_expr_ = ast::expr_call(@test_path_expr, [], false);
let test_call_expr_ = ast::expr_call(@test_path_expr, []/~, false);
let test_call_expr: ast::expr =
{id: cx.sess.next_node_id(), node: test_call_expr_, span: dummy_sp()};
// Call std::test::test_main
let test_main_path = path_node(mk_path(cx, [@"test", @"test_main"]));
let test_main_path = path_node(mk_path(cx, [@"test", @"test_main"]/~));
let test_main_path_expr_: ast::expr_ = ast::expr_path(test_main_path);
@ -468,7 +469,7 @@ fn mk_test_main_call(cx: test_ctxt) -> @ast::expr {
let test_main_call_expr_: ast::expr_ =
ast::expr_call(@test_main_path_expr,
[@args_path_expr, @test_call_expr], false);
[@args_path_expr, @test_call_expr]/~, false);
let test_main_call_expr: ast::expr =
{id: cx.sess.next_node_id(), node: test_main_call_expr_,

View file

@ -981,21 +981,22 @@ fn mk_type_names() -> type_names {
}
fn type_to_str(names: type_names, ty: TypeRef) -> str {
ret type_to_str_inner(names, [], ty);
ret type_to_str_inner(names, []/~, ty);
}
fn type_to_str_inner(names: type_names, outer0: [TypeRef], ty: TypeRef) ->
fn type_to_str_inner(names: type_names, outer0: [TypeRef]/~, ty: TypeRef) ->
str {
alt type_has_name(names, ty) {
option::some(n) { ret n; }
_ {}
}
let outer = outer0 + [ty];
let outer = outer0 + [ty]/~;
let kind = llvm::LLVMGetTypeKind(ty);
fn tys_str(names: type_names, outer: [TypeRef], tys: [TypeRef]) -> str {
fn tys_str(names: type_names, outer: [TypeRef]/~,
tys: [TypeRef]/~) -> str {
let mut s: str = "";
let mut first: bool = true;
for tys.each {|t|
@ -1021,7 +1022,7 @@ fn type_to_str_inner(names: type_names, outer0: [TypeRef], ty: TypeRef) ->
let mut s = "fn(";
let out_ty: TypeRef = llvm::LLVMGetReturnType(ty);
let n_args = llvm::LLVMCountParamTypes(ty) as uint;
let args: [TypeRef] = vec::from_elem::<TypeRef>(n_args, 0 as TypeRef);
let args = vec::from_elem(n_args, 0 as TypeRef);
unsafe {
llvm::LLVMGetParamTypes(ty, vec::unsafe::to_ptr(args));
}
@ -1033,7 +1034,7 @@ fn type_to_str_inner(names: type_names, outer0: [TypeRef], ty: TypeRef) ->
Struct {
let mut s: str = "{";
let n_elts = llvm::LLVMCountStructElementTypes(ty) as uint;
let elts: [TypeRef] = vec::from_elem::<TypeRef>(n_elts, 0 as TypeRef);
let elts = vec::from_elem(n_elts, 0 as TypeRef);
unsafe {
llvm::LLVMGetStructElementTypes(ty, vec::unsafe::to_ptr(elts));
}
@ -1082,7 +1083,7 @@ fn float_width(llt: TypeRef) -> uint {
};
}
fn fn_ty_param_tys(fn_ty: TypeRef) -> [TypeRef] unsafe {
fn fn_ty_param_tys(fn_ty: TypeRef) -> [TypeRef]/~ unsafe {
let args = vec::from_elem(llvm::LLVMCountParamTypes(fn_ty) as uint,
0 as TypeRef);
llvm::LLVMGetParamTypes(fn_ty, vec::unsafe::to_ptr(args));

View file

@ -68,7 +68,7 @@ fn encode_inlined_item(ecx: @e::encode_ctxt,
ii: ast::inlined_item) {
#debug["> Encoding inlined item: %s::%s (%u)",
ast_map::path_to_str(path), ii.ident(),
ebml_w.writer.tell()];
ebml_w.writer.tell()]/~;
let id_range = compute_id_range_for_inlined_item(ii);
ebml_w.wr_tag(c::tag_ast as uint) {||
@ -79,7 +79,7 @@ fn encode_inlined_item(ecx: @e::encode_ctxt,
#debug["< Encoded inlined fn: %s::%s (%u)",
ast_map::path_to_str(path), ii.ident(),
ebml_w.writer.tell()];
ebml_w.writer.tell()]/~;
}
fn decode_inlined_item(cdata: cstore::crate_metadata,
@ -103,7 +103,7 @@ fn decode_inlined_item(cdata: cstore::crate_metadata,
#debug["Fn named: %s", ii.ident()];
decode_side_tables(xcx, ast_doc);
#debug["< Decoded inlined fn: %s::%s",
ast_map::path_to_str(path), ii.ident()];
ast_map::path_to_str(path), ii.ident()]/~;
alt ii {
ast::ii_item(i) {
#debug(">>> DECODED ITEM >>>\n%s\n<<< DECODED ITEM <<<",
@ -522,7 +522,7 @@ impl helpers for ebml::writer {
e::write_type(ecx, self, ty)
}
fn emit_tys(ecx: @e::encode_ctxt, tys: [ty::t]) {
fn emit_tys(ecx: @e::encode_ctxt, tys: [ty::t]/~) {
self.emit_from_vec(tys) {|ty|
e::write_type(ecx, self, ty)
}
@ -707,7 +707,7 @@ fn encode_side_tables_for_id(ecx: @e::encode_ctxt,
impl decoder for ebml::doc {
fn as_int() -> int { ebml::doc_as_u64(self) as int }
fn [](tag: c::astencode_tag) -> ebml::doc {
fn []/~(tag: c::astencode_tag) -> ebml::doc {
ebml::get_doc(self, tag as uint)
}
fn opt_child(tag: c::astencode_tag) -> option<ebml::doc> {
@ -727,11 +727,11 @@ impl decoder for ebml::ebml_deserializer {
xcx.tr_def_id(_))
}
fn read_tys(xcx: extended_decode_ctxt) -> [ty::t] {
fn read_tys(xcx: extended_decode_ctxt) -> [ty::t]/~ {
self.read_to_vec {|| self.read_ty(xcx) }
}
fn read_bounds(xcx: extended_decode_ctxt) -> @[ty::param_bound] {
fn read_bounds(xcx: extended_decode_ctxt) -> @[ty::param_bound]/~ {
tydecode::parse_bounds_data(
self.parent.data, self.pos, xcx.dcx.cdata.cnum, xcx.dcx.tcx,
xcx.tr_def_id(_))
@ -765,7 +765,7 @@ fn decode_side_tables(xcx: extended_decode_ctxt,
#debug[">> Side table document with tag 0x%x \
found for id %d (orig %d)",
tag, id, id0];
tag, id, id0]/~;
if tag == (c::tag_table_mutbl as uint) {
dcx.maps.mutbl_map.insert(id, ());
@ -859,7 +859,7 @@ type fake_session = ();
#[cfg(test)]
impl of fake_ext_ctxt for fake_session {
fn cfg() -> ast::crate_cfg { [] }
fn cfg() -> ast::crate_cfg { []/~ }
fn parse_sess() -> parse::parse_sess { new_parse_sess() }
}
@ -922,13 +922,13 @@ fn test_simplification() {
let item_in = ast::ii_item(#ast(item) {
fn new_int_alist<B: copy>() -> alist<int, B> {
fn eq_int(&&a: int, &&b: int) -> bool { a == b }
ret {eq_fn: eq_int, mut data: []};
ret {eq_fn: eq_int, mut data: []/~};
}
});
let item_out = simplify_ast(item_in);
let item_exp = ast::ii_item(#ast(item) {
fn new_int_alist<B: copy>() -> alist<int, B> {
ret {eq_fn: eq_int, mut data: []};
ret {eq_fn: eq_int, mut data: []/~};
}
});
alt (item_out, item_exp) {

View file

@ -43,7 +43,7 @@ type cache_entry = {
cnum: int,
span: span,
hash: @str,
metas: @[@ast::meta_item]
metas: @[@ast::meta_item]/~
};
fn dump_crates(crate_cache: dvec<cache_entry>) {
@ -54,7 +54,7 @@ fn dump_crates(crate_cache: dvec<cache_entry>) {
#debug("hash: %?", entry.hash);
let attrs = [
attr::mk_attr(attr::mk_list_item(@"link", *entry.metas))
];
]/~;
for attr::find_linkage_attrs(attrs).each {|attr|
#debug("meta: %s", pprust::attr_to_str(attr));
}
@ -62,7 +62,7 @@ fn dump_crates(crate_cache: dvec<cache_entry>) {
}
fn warn_if_multiple_versions(diag: span_handler,
crate_cache: [cache_entry]) {
crate_cache: [cache_entry]/~) {
import either::*;
if crate_cache.len() != 0u {
@ -86,7 +86,7 @@ fn warn_if_multiple_versions(diag: span_handler,
diag.span_note(match.span, "used here");
let attrs = [
attr::mk_attr(attr::mk_list_item(@"link", *match.metas))
];
]/~;
loader::note_linkage_attrs(diag, attrs);
}
}
@ -161,21 +161,21 @@ fn visit_item(e: env, i: @ast::item) {
}
fn metas_with(ident: ast::ident, key: ast::ident,
metas: [@ast::meta_item]) -> [@ast::meta_item] {
metas: [@ast::meta_item]/~) -> [@ast::meta_item]/~ {
let name_items = attr::find_meta_items_by_name(metas, *key);
if name_items.is_empty() {
metas + [attr::mk_name_value_item_str(key, *ident)]
metas + [attr::mk_name_value_item_str(key, *ident)]/~
} else {
metas
}
}
fn metas_with_ident(ident: ast::ident,
metas: [@ast::meta_item]) -> [@ast::meta_item] {
metas: [@ast::meta_item]/~) -> [@ast::meta_item]/~ {
metas_with(ident, @"name", metas)
}
fn existing_match(e: env, metas: [@ast::meta_item], hash: str) ->
fn existing_match(e: env, metas: [@ast::meta_item]/~, hash: str) ->
option<int> {
for e.crate_cache.each {|c|
@ -187,7 +187,7 @@ fn existing_match(e: env, metas: [@ast::meta_item], hash: str) ->
ret none;
}
fn resolve_crate(e: env, ident: ast::ident, metas: [@ast::meta_item],
fn resolve_crate(e: env, ident: ast::ident, metas: [@ast::meta_item]/~,
hash: str, span: span) -> ast::crate_num {
let metas = metas_with_ident(ident, metas);
@ -241,7 +241,7 @@ fn resolve_crate(e: env, ident: ast::ident, metas: [@ast::meta_item],
}
// Go through the crate metadata and load any crates that it references
fn resolve_crate_deps(e: env, cdata: @[u8]) -> cstore::cnum_map {
fn resolve_crate_deps(e: env, cdata: @[u8]/~) -> cstore::cnum_map {
#debug("resolving deps of external crate");
// The map from crate numbers in the crate we're resolving to local crate
// numbers
@ -249,7 +249,7 @@ fn resolve_crate_deps(e: env, cdata: @[u8]) -> cstore::cnum_map {
for decoder::get_crate_deps(cdata).each {|dep|
let extrn_cnum = dep.cnum;
let cname = dep.name;
let cmetas = metas_with(dep.vers, @"vers", []);
let cmetas = metas_with(dep.vers, @"vers", []/~);
#debug("resolving dep crate %s ver: %s hash: %s",
*dep.name, *dep.vers, *dep.hash);
alt existing_match(e, metas_with_ident(cname, cmetas), *dep.hash) {

View file

@ -39,12 +39,12 @@ fn get_type_param_count(cstore: cstore::cstore, def: ast::def_id) -> uint {
}
fn lookup_defs(cstore: cstore::cstore, cnum: ast::crate_num,
path: [ast::ident]) -> [ast::def] {
let mut result = [];
path: [ast::ident]/~) -> [ast::def]/~ {
let mut result = []/~;
#debug("lookup_defs: path = %? cnum = %?", path, cnum);
for resolve_path(cstore, cnum, path).each {|elt|
let (c, data, def) = elt;
result += [decoder::lookup_def(c, data, def)];
result += [decoder::lookup_def(c, data, def)]/~;
}
ret result;
}
@ -60,21 +60,21 @@ fn lookup_method_purity(cstore: cstore::cstore, did: ast::def_id)
/* Returns a vector of possible def IDs for a given path,
in a given crate */
fn resolve_path(cstore: cstore::cstore, cnum: ast::crate_num,
path: [ast::ident]) ->
[(ast::crate_num, @[u8], ast::def_id)] {
path: [ast::ident]/~) ->
[(ast::crate_num, @[u8]/~, ast::def_id)]/~ {
let cm = cstore::get_crate_data(cstore, cnum);
#debug("resolve_path %s in crates[%d]:%s",
ast_util::path_name_i(path), cnum, cm.name);
let mut result = [];
let mut result = []/~;
for decoder::resolve_path(path, cm.data).each {|def|
if def.crate == ast::local_crate {
result += [(cnum, cm.data, def)];
result += [(cnum, cm.data, def)]/~;
} else {
if cm.cnum_map.contains_key(def.crate) {
// This reexport is itself a reexport from another crate
let next_cnum = cm.cnum_map.get(def.crate);
let next_cm_data = cstore::get_crate_data(cstore, next_cnum);
result += [(next_cnum, next_cm_data.data, def)];
result += [(next_cnum, next_cm_data.data, def)]/~;
}
}
}
@ -88,7 +88,7 @@ fn get_item_path(tcx: ty::ctxt, def: ast::def_id) -> ast_map::path {
// FIXME #1920: This path is not always correct if the crate is not linked
// into the root namespace.
[ast_map::path_mod(@cdata.name)] + path
[ast_map::path_mod(@cdata.name)]/~ + path
}
enum found_ast {
@ -109,7 +109,8 @@ fn maybe_get_item_ast(tcx: ty::ctxt, def: ast::def_id,
decode_inlined_item)
}
fn get_enum_variants(tcx: ty::ctxt, def: ast::def_id) -> [ty::variant_info] {
fn get_enum_variants(tcx: ty::ctxt, def: ast::def_id)
-> [ty::variant_info]/~ {
let cstore = tcx.cstore;
let cdata = cstore::get_crate_data(cstore, def.crate);
ret decoder::get_enum_variants(cdata, def.node, tcx)
@ -117,20 +118,20 @@ fn get_enum_variants(tcx: ty::ctxt, def: ast::def_id) -> [ty::variant_info] {
fn get_impls_for_mod(cstore: cstore::cstore, def: ast::def_id,
name: option<ast::ident>)
-> @[@decoder::_impl] {
-> @[@decoder::_impl]/~ {
let cdata = cstore::get_crate_data(cstore, def.crate);
decoder::get_impls_for_mod(cdata, def.node, name) {|cnum|
cstore::get_crate_data(cstore, cnum)
}
}
fn get_iface_methods(tcx: ty::ctxt, def: ast::def_id) -> @[ty::method] {
fn get_iface_methods(tcx: ty::ctxt, def: ast::def_id) -> @[ty::method]/~ {
let cstore = tcx.cstore;
let cdata = cstore::get_crate_data(cstore, def.crate);
decoder::get_iface_methods(cdata, def.node, tcx)
}
fn get_class_fields(tcx: ty::ctxt, def: ast::def_id) -> [ty::field_ty] {
fn get_class_fields(tcx: ty::ctxt, def: ast::def_id) -> [ty::field_ty]/~ {
let cstore = tcx.cstore;
let cdata = cstore::get_crate_data(cstore, def.crate);
decoder::get_class_fields(cdata, def.node)
@ -159,7 +160,7 @@ fn get_field_type(tcx: ty::ctxt, class_id: ast::def_id,
class_id, def)});
#debug("got field data %?", the_field);
let ty = decoder::item_type(def, the_field, tcx, cdata);
ret {bounds: @[], rp: ast::rp_none, ty: ty};
ret {bounds: @[]/~, rp: ast::rp_none, ty: ty};
}
// Given a def_id for an impl or class, return the iface it implements,

View file

@ -40,7 +40,7 @@ type cnum_map = map::hashmap<ast::crate_num, ast::crate_num>;
type mod_path_map = map::hashmap<ast::def_id, @str>;
type crate_metadata = @{name: str,
data: @[u8],
data: @[u8]/~,
cnum_map: cnum_map,
cnum: ast::crate_num};
@ -55,9 +55,9 @@ type cstore_private =
@{metas: map::hashmap<ast::crate_num, crate_metadata>,
use_crate_map: use_crate_map,
mod_path_map: mod_path_map,
mut used_crate_files: [str],
mut used_libraries: [str],
mut used_link_args: [str]};
mut used_crate_files: [str]/~,
mut used_libraries: [str]/~,
mut used_link_args: [str]/~};
// Map from node_id's of local use statements to crate numbers
type use_crate_map = map::hashmap<ast::node_id, ast::crate_num>;
@ -74,9 +74,9 @@ fn mk_cstore() -> cstore {
ret private(@{metas: meta_cache,
use_crate_map: crate_map,
mod_path_map: mod_path_map,
mut used_crate_files: [],
mut used_libraries: [],
mut used_link_args: []});
mut used_crate_files: []/~,
mut used_libraries: []/~,
mut used_link_args: []/~});
}
fn get_crate_data(cstore: cstore, cnum: ast::crate_num) -> crate_metadata {
@ -113,11 +113,11 @@ fn iter_crate_data(cstore: cstore, i: fn(ast::crate_num, crate_metadata)) {
fn add_used_crate_file(cstore: cstore, lib: str) {
if !vec::contains(p(cstore).used_crate_files, lib) {
p(cstore).used_crate_files += [lib];
p(cstore).used_crate_files += [lib]/~;
}
}
fn get_used_crate_files(cstore: cstore) -> [str] {
fn get_used_crate_files(cstore: cstore) -> [str]/~ {
ret p(cstore).used_crate_files;
}
@ -125,11 +125,11 @@ fn add_used_library(cstore: cstore, lib: str) -> bool {
assert lib != "";
if vec::contains(p(cstore).used_libraries, lib) { ret false; }
p(cstore).used_libraries += [lib];
p(cstore).used_libraries += [lib]/~;
ret true;
}
fn get_used_libraries(cstore: cstore) -> [str] {
fn get_used_libraries(cstore: cstore) -> [str]/~ {
ret p(cstore).used_libraries;
}
@ -137,7 +137,7 @@ fn add_used_link_args(cstore: cstore, args: str) {
p(cstore).used_link_args += str::split_char(args, ' ');
}
fn get_used_link_args(cstore: cstore) -> [str] {
fn get_used_link_args(cstore: cstore) -> [str]/~ {
ret p(cstore).used_link_args;
}
@ -153,15 +153,15 @@ fn find_use_stmt_cnum(cstore: cstore,
// returns hashes of crates directly used by this crate. Hashes are
// sorted by crate name.
fn get_dep_hashes(cstore: cstore) -> [@str] {
fn get_dep_hashes(cstore: cstore) -> [@str]/~ {
type crate_hash = {name: @str, hash: @str};
let mut result = [];
let mut result = []/~;
for p(cstore).use_crate_map.each_value {|cnum|
let cdata = cstore::get_crate_data(cstore, cnum);
let hash = decoder::get_crate_hash(cdata.data);
#debug("Add hash[%s]: %s", cdata.name, *hash);
result += [{name: @cdata.name, hash: hash}];
result += [{name: @cdata.name, hash: hash}]/~;
};
fn lteq(a: crate_hash, b: crate_hash) -> bool {
ret *a.name <= *b.name;
@ -175,9 +175,9 @@ fn get_dep_hashes(cstore: cstore) -> [@str] {
ret vec::map(sorted, mapper);
}
fn get_path(cstore: cstore, d: ast::def_id) -> [ast::ident] {
fn get_path(cstore: cstore, d: ast::def_id) -> [ast::ident]/~ {
// let f = bind str::split_str(_, "::");
option::map_default(p(cstore).mod_path_map.find(d), [],
option::map_default(p(cstore).mod_path_map.find(d), []/~,
{|ds| str::split_str(*ds, "::").map({|x|@x})})
}
// Local Variables:

Some files were not shown because too many files have changed in this diff Show more