auto merge of #6434 : alexcrichton/rust/less-implicit-vecs, r=bstrie
This closes #5204 and #6421. This also removes the `vecs_implicitly_copyable` lint (although now reading #6421, this may not be desired?). If we want to leave it in, it at least removes it from the compiler.
This commit is contained in:
commit
767e3ae86c
@ -10,9 +10,7 @@
|
|||||||
|
|
||||||
#[crate_type = "bin"];
|
#[crate_type = "bin"];
|
||||||
|
|
||||||
#[allow(vecs_implicitly_copyable)];
|
|
||||||
#[allow(non_camel_case_types)];
|
#[allow(non_camel_case_types)];
|
||||||
#[allow(deprecated_pattern)];
|
|
||||||
|
|
||||||
extern mod std(vers = "0.7-pre");
|
extern mod std(vers = "0.7-pre");
|
||||||
|
|
||||||
@ -43,8 +41,8 @@ pub mod errors;
|
|||||||
pub fn main() {
|
pub fn main() {
|
||||||
let args = os::args();
|
let args = os::args();
|
||||||
let config = parse_config(args);
|
let config = parse_config(args);
|
||||||
log_config(config);
|
log_config(&config);
|
||||||
run_tests(config);
|
run_tests(&config);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_config(args: ~[~str]) -> config {
|
pub fn parse_config(args: ~[~str]) -> config {
|
||||||
@ -89,22 +87,23 @@ pub fn parse_config(args: ~[~str]) -> config {
|
|||||||
run_ignored: getopts::opt_present(matches, ~"ignored"),
|
run_ignored: getopts::opt_present(matches, ~"ignored"),
|
||||||
filter:
|
filter:
|
||||||
if vec::len(matches.free) > 0u {
|
if vec::len(matches.free) > 0u {
|
||||||
option::Some(matches.free[0])
|
option::Some(copy matches.free[0])
|
||||||
} else { option::None },
|
} else { option::None },
|
||||||
logfile: getopts::opt_maybe_str(matches, ~"logfile").map(|s| Path(*s)),
|
logfile: getopts::opt_maybe_str(matches, ~"logfile").map(|s| Path(*s)),
|
||||||
runtool: getopts::opt_maybe_str(matches, ~"runtool"),
|
runtool: getopts::opt_maybe_str(matches, ~"runtool"),
|
||||||
rustcflags: getopts::opt_maybe_str(matches, ~"rustcflags"),
|
rustcflags: getopts::opt_maybe_str(matches, ~"rustcflags"),
|
||||||
jit: getopts::opt_present(matches, ~"jit"),
|
jit: getopts::opt_present(matches, ~"jit"),
|
||||||
newrt: getopts::opt_present(matches, ~"newrt"),
|
newrt: getopts::opt_present(matches, ~"newrt"),
|
||||||
target: opt_str(getopts::opt_maybe_str(matches, ~"target")),
|
target: opt_str2(getopts::opt_maybe_str(matches, ~"target")).to_str(),
|
||||||
adb_path: opt_str(getopts::opt_maybe_str(matches, ~"adb-path")),
|
adb_path: opt_str2(getopts::opt_maybe_str(matches, ~"adb-path")).to_str(),
|
||||||
adb_test_dir: opt_str(getopts::opt_maybe_str(matches, ~"adb-test-dir")),
|
adb_test_dir:
|
||||||
|
opt_str2(getopts::opt_maybe_str(matches, ~"adb-test-dir")).to_str(),
|
||||||
adb_device_status:
|
adb_device_status:
|
||||||
if (opt_str(getopts::opt_maybe_str(matches, ~"target")) ==
|
if (opt_str2(getopts::opt_maybe_str(matches, ~"target")) ==
|
||||||
~"arm-linux-androideabi") {
|
~"arm-linux-androideabi") {
|
||||||
if (opt_str(getopts::opt_maybe_str(matches, ~"adb-test-dir")) !=
|
if (opt_str2(getopts::opt_maybe_str(matches, ~"adb-test-dir")) !=
|
||||||
~"(none)" &&
|
~"(none)" &&
|
||||||
opt_str(getopts::opt_maybe_str(matches, ~"adb-test-dir")) !=
|
opt_str2(getopts::opt_maybe_str(matches, ~"adb-test-dir")) !=
|
||||||
~"") { true }
|
~"") { true }
|
||||||
else { false }
|
else { false }
|
||||||
} else { false },
|
} else { false },
|
||||||
@ -112,7 +111,7 @@ pub fn parse_config(args: ~[~str]) -> config {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn log_config(config: config) {
|
pub fn log_config(config: &config) {
|
||||||
let c = config;
|
let c = config;
|
||||||
logv(c, fmt!("configuration:"));
|
logv(c, fmt!("configuration:"));
|
||||||
logv(c, fmt!("compile_lib_path: %s", config.compile_lib_path));
|
logv(c, fmt!("compile_lib_path: %s", config.compile_lib_path));
|
||||||
@ -123,9 +122,9 @@ pub fn log_config(config: config) {
|
|||||||
logv(c, fmt!("stage_id: %s", config.stage_id));
|
logv(c, fmt!("stage_id: %s", config.stage_id));
|
||||||
logv(c, fmt!("mode: %s", mode_str(config.mode)));
|
logv(c, fmt!("mode: %s", mode_str(config.mode)));
|
||||||
logv(c, fmt!("run_ignored: %b", config.run_ignored));
|
logv(c, fmt!("run_ignored: %b", config.run_ignored));
|
||||||
logv(c, fmt!("filter: %s", opt_str(config.filter)));
|
logv(c, fmt!("filter: %s", opt_str(&config.filter)));
|
||||||
logv(c, fmt!("runtool: %s", opt_str(config.runtool)));
|
logv(c, fmt!("runtool: %s", opt_str(&config.runtool)));
|
||||||
logv(c, fmt!("rustcflags: %s", opt_str(config.rustcflags)));
|
logv(c, fmt!("rustcflags: %s", opt_str(&config.rustcflags)));
|
||||||
logv(c, fmt!("jit: %b", config.jit));
|
logv(c, fmt!("jit: %b", config.jit));
|
||||||
logv(c, fmt!("newrt: %b", config.newrt));
|
logv(c, fmt!("newrt: %b", config.newrt));
|
||||||
logv(c, fmt!("target: %s", config.target));
|
logv(c, fmt!("target: %s", config.target));
|
||||||
@ -136,8 +135,18 @@ pub fn log_config(config: config) {
|
|||||||
logv(c, fmt!("\n"));
|
logv(c, fmt!("\n"));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn opt_str(maybestr: Option<~str>) -> ~str {
|
pub fn opt_str<'a>(maybestr: &'a Option<~str>) -> &'a str {
|
||||||
match maybestr { option::Some(s) => s, option::None => ~"(none)" }
|
match *maybestr {
|
||||||
|
option::None => "(none)",
|
||||||
|
option::Some(ref s) => {
|
||||||
|
let s: &'a str = *s;
|
||||||
|
s
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn opt_str2(maybestr: Option<~str>) -> ~str {
|
||||||
|
match maybestr { None => ~"(none)", Some(s) => { s } }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn str_opt(maybestr: ~str) -> Option<~str> {
|
pub fn str_opt(maybestr: ~str) -> Option<~str> {
|
||||||
@ -165,16 +174,16 @@ pub fn mode_str(mode: mode) -> ~str {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_tests(config: config) {
|
pub fn run_tests(config: &config) {
|
||||||
let opts = test_opts(config);
|
let opts = test_opts(config);
|
||||||
let tests = make_tests(config);
|
let tests = make_tests(config);
|
||||||
let res = test::run_tests_console(&opts, tests);
|
let res = test::run_tests_console(&opts, tests);
|
||||||
if !res { fail!("Some tests failed"); }
|
if !res { fail!("Some tests failed"); }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn test_opts(config: config) -> test::TestOpts {
|
pub fn test_opts(config: &config) -> test::TestOpts {
|
||||||
test::TestOpts {
|
test::TestOpts {
|
||||||
filter: config.filter,
|
filter: copy config.filter,
|
||||||
run_ignored: config.run_ignored,
|
run_ignored: config.run_ignored,
|
||||||
logfile: copy config.logfile,
|
logfile: copy config.logfile,
|
||||||
run_tests: true,
|
run_tests: true,
|
||||||
@ -184,7 +193,7 @@ pub fn test_opts(config: config) -> test::TestOpts {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn make_tests(config: config) -> ~[test::TestDescAndFn] {
|
pub fn make_tests(config: &config) -> ~[test::TestDescAndFn] {
|
||||||
debug!("making tests from %s",
|
debug!("making tests from %s",
|
||||||
config.src_base.to_str());
|
config.src_base.to_str());
|
||||||
let mut tests = ~[];
|
let mut tests = ~[];
|
||||||
@ -198,7 +207,7 @@ pub fn make_tests(config: config) -> ~[test::TestDescAndFn] {
|
|||||||
tests
|
tests
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_test(config: config, testfile: &Path) -> bool {
|
pub fn is_test(config: &config, testfile: &Path) -> bool {
|
||||||
// Pretty-printer does not work with .rc files yet
|
// Pretty-printer does not work with .rc files yet
|
||||||
let valid_extensions =
|
let valid_extensions =
|
||||||
match config.mode {
|
match config.mode {
|
||||||
@ -221,7 +230,7 @@ pub fn is_test(config: config, testfile: &Path) -> bool {
|
|||||||
return valid;
|
return valid;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn make_test(config: config, testfile: &Path) -> test::TestDescAndFn {
|
pub fn make_test(config: &config, testfile: &Path) -> test::TestDescAndFn {
|
||||||
test::TestDescAndFn {
|
test::TestDescAndFn {
|
||||||
desc: test::TestDesc {
|
desc: test::TestDesc {
|
||||||
name: make_test_name(config, testfile),
|
name: make_test_name(config, testfile),
|
||||||
@ -232,13 +241,15 @@ pub fn make_test(config: config, testfile: &Path) -> test::TestDescAndFn {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn make_test_name(config: config, testfile: &Path) -> test::TestName {
|
pub fn make_test_name(config: &config, testfile: &Path) -> test::TestName {
|
||||||
test::DynTestName(fmt!("[%s] %s",
|
test::DynTestName(fmt!("[%s] %s",
|
||||||
mode_str(config.mode),
|
mode_str(config.mode),
|
||||||
testfile.to_str()))
|
testfile.to_str()))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn make_test_closure(config: config, testfile: &Path) -> test::TestFn {
|
pub fn make_test_closure(config: &config, testfile: &Path) -> test::TestFn {
|
||||||
let testfile = testfile.to_str();
|
use core::cell::Cell;
|
||||||
test::DynTestFn(|| runtest::run(config, testfile))
|
let config = Cell(copy *config);
|
||||||
|
let testfile = Cell(testfile.to_str());
|
||||||
|
test::DynTestFn(|| { runtest::run(config.take(), testfile.take()) })
|
||||||
}
|
}
|
||||||
|
@ -52,12 +52,14 @@ pub fn load_props(testfile: &Path) -> TestProps {
|
|||||||
pp_exact = parse_pp_exact(ln, testfile);
|
pp_exact = parse_pp_exact(ln, testfile);
|
||||||
}
|
}
|
||||||
|
|
||||||
for parse_aux_build(ln).each |ab| {
|
match parse_aux_build(ln) {
|
||||||
aux_builds.push(*ab);
|
Some(ab) => { aux_builds.push(ab); }
|
||||||
|
None => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
for parse_exec_env(ln).each |ee| {
|
match parse_exec_env(ln) {
|
||||||
exec_env.push(*ee);
|
Some(ee) => { exec_env.push(ee); }
|
||||||
|
None => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
match parse_debugger_cmd(ln) {
|
match parse_debugger_cmd(ln) {
|
||||||
@ -81,7 +83,7 @@ pub fn load_props(testfile: &Path) -> TestProps {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_test_ignored(config: config, testfile: &Path) -> bool {
|
pub fn is_test_ignored(config: &config, testfile: &Path) -> bool {
|
||||||
for iter_header(testfile) |ln| {
|
for iter_header(testfile) |ln| {
|
||||||
if parse_name_directive(ln, ~"xfail-test") { return true; }
|
if parse_name_directive(ln, ~"xfail-test") { return true; }
|
||||||
if parse_name_directive(ln, xfail_target()) { return true; }
|
if parse_name_directive(ln, xfail_target()) { return true; }
|
||||||
@ -111,44 +113,47 @@ fn iter_header(testfile: &Path, it: &fn(~str) -> bool) -> bool {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_error_pattern(line: ~str) -> Option<~str> {
|
fn parse_error_pattern(line: &str) -> Option<~str> {
|
||||||
parse_name_value_directive(line, ~"error-pattern")
|
parse_name_value_directive(line, ~"error-pattern")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_aux_build(line: ~str) -> Option<~str> {
|
fn parse_aux_build(line: &str) -> Option<~str> {
|
||||||
parse_name_value_directive(line, ~"aux-build")
|
parse_name_value_directive(line, ~"aux-build")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_compile_flags(line: ~str) -> Option<~str> {
|
fn parse_compile_flags(line: &str) -> Option<~str> {
|
||||||
parse_name_value_directive(line, ~"compile-flags")
|
parse_name_value_directive(line, ~"compile-flags")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_debugger_cmd(line: ~str) -> Option<~str> {
|
fn parse_debugger_cmd(line: &str) -> Option<~str> {
|
||||||
parse_name_value_directive(line, ~"debugger")
|
parse_name_value_directive(line, ~"debugger")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_check_line(line: ~str) -> Option<~str> {
|
fn parse_check_line(line: &str) -> Option<~str> {
|
||||||
parse_name_value_directive(line, ~"check")
|
parse_name_value_directive(line, ~"check")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_exec_env(line: ~str) -> Option<(~str, ~str)> {
|
fn parse_exec_env(line: &str) -> Option<(~str, ~str)> {
|
||||||
do parse_name_value_directive(line, ~"exec-env").map |nv| {
|
do parse_name_value_directive(line, ~"exec-env").map |nv| {
|
||||||
// nv is either FOO or FOO=BAR
|
// nv is either FOO or FOO=BAR
|
||||||
let mut strs = ~[];
|
let mut strs = ~[];
|
||||||
for str::each_splitn_char(*nv, '=', 1u) |s| { strs.push(s.to_owned()); }
|
for str::each_splitn_char(*nv, '=', 1u) |s| { strs.push(s.to_owned()); }
|
||||||
match strs.len() {
|
match strs.len() {
|
||||||
1u => (strs[0], ~""),
|
1u => (strs.pop(), ~""),
|
||||||
2u => (strs[0], strs[1]),
|
2u => {
|
||||||
|
let end = strs.pop();
|
||||||
|
(strs.pop(), end)
|
||||||
|
}
|
||||||
n => fail!("Expected 1 or 2 strings, not %u", n)
|
n => fail!("Expected 1 or 2 strings, not %u", n)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_pp_exact(line: ~str, testfile: &Path) -> Option<Path> {
|
fn parse_pp_exact(line: &str, testfile: &Path) -> Option<Path> {
|
||||||
match parse_name_value_directive(line, ~"pp-exact") {
|
match parse_name_value_directive(line, ~"pp-exact") {
|
||||||
Some(s) => Some(Path(s)),
|
Some(s) => Some(Path(s)),
|
||||||
None => {
|
None => {
|
||||||
if parse_name_directive(line, ~"pp-exact") {
|
if parse_name_directive(line, "pp-exact") {
|
||||||
Some(testfile.file_path())
|
Some(testfile.file_path())
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
@ -157,11 +162,11 @@ fn parse_pp_exact(line: ~str, testfile: &Path) -> Option<Path> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_name_directive(line: ~str, directive: ~str) -> bool {
|
fn parse_name_directive(line: &str, directive: &str) -> bool {
|
||||||
str::contains(line, directive)
|
str::contains(line, directive)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_name_value_directive(line: ~str,
|
fn parse_name_value_directive(line: &str,
|
||||||
directive: ~str) -> Option<~str> {
|
directive: ~str) -> Option<~str> {
|
||||||
let keycolon = directive + ~":";
|
let keycolon = directive + ~":";
|
||||||
match str::find_str(line, keycolon) {
|
match str::find_str(line, keycolon) {
|
||||||
|
@ -14,7 +14,7 @@ use core::run::spawn_process;
|
|||||||
use core::run;
|
use core::run;
|
||||||
|
|
||||||
#[cfg(target_os = "win32")]
|
#[cfg(target_os = "win32")]
|
||||||
fn target_env(lib_path: ~str, prog: ~str) -> ~[(~str,~str)] {
|
fn target_env(lib_path: &str, prog: &str) -> ~[(~str,~str)] {
|
||||||
|
|
||||||
let mut env = os::env();
|
let mut env = os::env();
|
||||||
|
|
||||||
@ -27,7 +27,7 @@ fn target_env(lib_path: ~str, prog: ~str) -> ~[(~str,~str)] {
|
|||||||
if k == ~"PATH" { (~"PATH", v + ~";" + lib_path + ~";" + aux_path) }
|
if k == ~"PATH" { (~"PATH", v + ~";" + lib_path + ~";" + aux_path) }
|
||||||
else { (k,v) }
|
else { (k,v) }
|
||||||
};
|
};
|
||||||
if str::ends_with(prog, ~"rustc.exe") {
|
if str::ends_with(prog, "rustc.exe") {
|
||||||
env.push((~"RUST_THREADS", ~"1"));
|
env.push((~"RUST_THREADS", ~"1"));
|
||||||
}
|
}
|
||||||
return env;
|
return env;
|
||||||
@ -36,16 +36,16 @@ fn target_env(lib_path: ~str, prog: ~str) -> ~[(~str,~str)] {
|
|||||||
#[cfg(target_os = "linux")]
|
#[cfg(target_os = "linux")]
|
||||||
#[cfg(target_os = "macos")]
|
#[cfg(target_os = "macos")]
|
||||||
#[cfg(target_os = "freebsd")]
|
#[cfg(target_os = "freebsd")]
|
||||||
fn target_env(_lib_path: ~str, _prog: ~str) -> ~[(~str,~str)] {
|
fn target_env(_lib_path: &str, _prog: &str) -> ~[(~str,~str)] {
|
||||||
~[]
|
~[]
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Result {status: int, out: ~str, err: ~str}
|
pub struct Result {status: int, out: ~str, err: ~str}
|
||||||
|
|
||||||
// FIXME (#2659): This code is duplicated in core::run::program_output
|
// FIXME (#2659): This code is duplicated in core::run::program_output
|
||||||
pub fn run(lib_path: ~str,
|
pub fn run(lib_path: &str,
|
||||||
prog: ~str,
|
prog: &str,
|
||||||
args: ~[~str],
|
args: &[~str],
|
||||||
env: ~[(~str, ~str)],
|
env: ~[(~str, ~str)],
|
||||||
input: Option<~str>) -> Result {
|
input: Option<~str>) -> Result {
|
||||||
let pipe_in = os::pipe();
|
let pipe_in = os::pipe();
|
||||||
|
@ -30,40 +30,40 @@ pub fn run(config: config, testfile: ~str) {
|
|||||||
let props = load_props(&testfile);
|
let props = load_props(&testfile);
|
||||||
debug!("loaded props");
|
debug!("loaded props");
|
||||||
match config.mode {
|
match config.mode {
|
||||||
mode_compile_fail => run_cfail_test(config, props, &testfile),
|
mode_compile_fail => run_cfail_test(&config, &props, &testfile),
|
||||||
mode_run_fail => run_rfail_test(config, props, &testfile),
|
mode_run_fail => run_rfail_test(&config, &props, &testfile),
|
||||||
mode_run_pass => run_rpass_test(config, props, &testfile),
|
mode_run_pass => run_rpass_test(&config, &props, &testfile),
|
||||||
mode_pretty => run_pretty_test(config, props, &testfile),
|
mode_pretty => run_pretty_test(&config, &props, &testfile),
|
||||||
mode_debug_info => run_debuginfo_test(config, props, &testfile)
|
mode_debug_info => run_debuginfo_test(&config, &props, &testfile)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_cfail_test(config: config, props: TestProps, testfile: &Path) {
|
fn run_cfail_test(config: &config, props: &TestProps, testfile: &Path) {
|
||||||
let ProcRes = compile_test(config, props, testfile);
|
let ProcRes = compile_test(config, props, testfile);
|
||||||
|
|
||||||
if ProcRes.status == 0 {
|
if ProcRes.status == 0 {
|
||||||
fatal_ProcRes(~"compile-fail test compiled successfully!", ProcRes);
|
fatal_ProcRes(~"compile-fail test compiled successfully!", &ProcRes);
|
||||||
}
|
}
|
||||||
|
|
||||||
check_correct_failure_status(ProcRes);
|
check_correct_failure_status(&ProcRes);
|
||||||
|
|
||||||
let expected_errors = errors::load_errors(testfile);
|
let expected_errors = errors::load_errors(testfile);
|
||||||
if !expected_errors.is_empty() {
|
if !expected_errors.is_empty() {
|
||||||
if !props.error_patterns.is_empty() {
|
if !props.error_patterns.is_empty() {
|
||||||
fatal(~"both error pattern and expected errors specified");
|
fatal(~"both error pattern and expected errors specified");
|
||||||
}
|
}
|
||||||
check_expected_errors(expected_errors, testfile, ProcRes);
|
check_expected_errors(expected_errors, testfile, &ProcRes);
|
||||||
} else {
|
} else {
|
||||||
check_error_patterns(props, testfile, ProcRes);
|
check_error_patterns(props, testfile, &ProcRes);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_rfail_test(config: config, props: TestProps, testfile: &Path) {
|
fn run_rfail_test(config: &config, props: &TestProps, testfile: &Path) {
|
||||||
let ProcRes = if !config.jit {
|
let ProcRes = if !config.jit {
|
||||||
let ProcRes = compile_test(config, props, testfile);
|
let ProcRes = compile_test(config, props, testfile);
|
||||||
|
|
||||||
if ProcRes.status != 0 {
|
if ProcRes.status != 0 {
|
||||||
fatal_ProcRes(~"compilation failed!", ProcRes);
|
fatal_ProcRes(~"compilation failed!", &ProcRes);
|
||||||
}
|
}
|
||||||
|
|
||||||
exec_compiled_test(config, props, testfile)
|
exec_compiled_test(config, props, testfile)
|
||||||
@ -74,26 +74,26 @@ fn run_rfail_test(config: config, props: TestProps, testfile: &Path) {
|
|||||||
// The value our Makefile configures valgrind to return on failure
|
// The value our Makefile configures valgrind to return on failure
|
||||||
static valgrind_err: int = 100;
|
static valgrind_err: int = 100;
|
||||||
if ProcRes.status == valgrind_err {
|
if ProcRes.status == valgrind_err {
|
||||||
fatal_ProcRes(~"run-fail test isn't valgrind-clean!", ProcRes);
|
fatal_ProcRes(~"run-fail test isn't valgrind-clean!", &ProcRes);
|
||||||
}
|
}
|
||||||
|
|
||||||
match config.target {
|
match config.target {
|
||||||
|
|
||||||
~"arm-linux-androideabi" => {
|
~"arm-linux-androideabi" => {
|
||||||
if (config.adb_device_status) {
|
if (config.adb_device_status) {
|
||||||
check_correct_failure_status(ProcRes);
|
check_correct_failure_status(&ProcRes);
|
||||||
check_error_patterns(props, testfile, ProcRes);
|
check_error_patterns(props, testfile, &ProcRes);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
_=> {
|
_=> {
|
||||||
check_correct_failure_status(ProcRes);
|
check_correct_failure_status(&ProcRes);
|
||||||
check_error_patterns(props, testfile, ProcRes);
|
check_error_patterns(props, testfile, &ProcRes);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_correct_failure_status(ProcRes: ProcRes) {
|
fn check_correct_failure_status(ProcRes: &ProcRes) {
|
||||||
// The value the rust runtime returns on failure
|
// The value the rust runtime returns on failure
|
||||||
static rust_err: int = 101;
|
static rust_err: int = 101;
|
||||||
if ProcRes.status != rust_err {
|
if ProcRes.status != rust_err {
|
||||||
@ -104,27 +104,27 @@ fn check_correct_failure_status(ProcRes: ProcRes) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_rpass_test(config: config, props: TestProps, testfile: &Path) {
|
fn run_rpass_test(config: &config, props: &TestProps, testfile: &Path) {
|
||||||
if !config.jit {
|
if !config.jit {
|
||||||
let mut ProcRes = compile_test(config, props, testfile);
|
let mut ProcRes = compile_test(config, props, testfile);
|
||||||
|
|
||||||
if ProcRes.status != 0 {
|
if ProcRes.status != 0 {
|
||||||
fatal_ProcRes(~"compilation failed!", ProcRes);
|
fatal_ProcRes(~"compilation failed!", &ProcRes);
|
||||||
}
|
}
|
||||||
|
|
||||||
ProcRes = exec_compiled_test(config, props, testfile);
|
ProcRes = exec_compiled_test(config, props, testfile);
|
||||||
|
|
||||||
if ProcRes.status != 0 {
|
if ProcRes.status != 0 {
|
||||||
fatal_ProcRes(~"test run failed!", ProcRes);
|
fatal_ProcRes(~"test run failed!", &ProcRes);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let ProcRes = jit_test(config, props, testfile);
|
let ProcRes = jit_test(config, props, testfile);
|
||||||
|
|
||||||
if ProcRes.status != 0 { fatal_ProcRes(~"jit failed!", ProcRes); }
|
if ProcRes.status != 0 { fatal_ProcRes(~"jit failed!", &ProcRes); }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_pretty_test(config: config, props: TestProps, testfile: &Path) {
|
fn run_pretty_test(config: &config, props: &TestProps, testfile: &Path) {
|
||||||
if props.pp_exact.is_some() {
|
if props.pp_exact.is_some() {
|
||||||
logv(config, ~"testing for exact pretty-printing");
|
logv(config, ~"testing for exact pretty-printing");
|
||||||
} else { logv(config, ~"testing for converging pretty-printing"); }
|
} else { logv(config, ~"testing for converging pretty-printing"); }
|
||||||
@ -137,32 +137,33 @@ fn run_pretty_test(config: config, props: TestProps, testfile: &Path) {
|
|||||||
let mut round = 0;
|
let mut round = 0;
|
||||||
while round < rounds {
|
while round < rounds {
|
||||||
logv(config, fmt!("pretty-printing round %d", round));
|
logv(config, fmt!("pretty-printing round %d", round));
|
||||||
let ProcRes = print_source(config, testfile, srcs[round]);
|
let ProcRes = print_source(config, testfile, copy srcs[round]);
|
||||||
|
|
||||||
if ProcRes.status != 0 {
|
if ProcRes.status != 0 {
|
||||||
fatal_ProcRes(fmt!("pretty-printing failed in round %d", round),
|
fatal_ProcRes(fmt!("pretty-printing failed in round %d", round),
|
||||||
ProcRes);
|
&ProcRes);
|
||||||
}
|
}
|
||||||
|
|
||||||
srcs.push(ProcRes.stdout);
|
let ProcRes{ stdout, _ } = ProcRes;
|
||||||
|
srcs.push(stdout);
|
||||||
round += 1;
|
round += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut expected =
|
let mut expected =
|
||||||
match props.pp_exact {
|
match props.pp_exact {
|
||||||
Some(file) => {
|
Some(ref file) => {
|
||||||
let filepath = testfile.dir_path().push_rel(&file);
|
let filepath = testfile.dir_path().push_rel(file);
|
||||||
io::read_whole_file_str(&filepath).get()
|
io::read_whole_file_str(&filepath).get()
|
||||||
}
|
}
|
||||||
None => { srcs[vec::len(srcs) - 2u] }
|
None => { copy srcs[srcs.len() - 2u] }
|
||||||
};
|
};
|
||||||
let mut actual = srcs[vec::len(srcs) - 1u];
|
let mut actual = copy srcs[srcs.len() - 1u];
|
||||||
|
|
||||||
if props.pp_exact.is_some() {
|
if props.pp_exact.is_some() {
|
||||||
// Now we have to care about line endings
|
// Now we have to care about line endings
|
||||||
let cr = ~"\r";
|
let cr = ~"\r";
|
||||||
actual = str::replace(actual, cr, ~"");
|
actual = str::replace(actual, cr, "");
|
||||||
expected = str::replace(expected, cr, ~"");
|
expected = str::replace(expected, cr, "");
|
||||||
}
|
}
|
||||||
|
|
||||||
compare_source(expected, actual);
|
compare_source(expected, actual);
|
||||||
@ -171,23 +172,22 @@ fn run_pretty_test(config: config, props: TestProps, testfile: &Path) {
|
|||||||
let ProcRes = typecheck_source(config, props, testfile, actual);
|
let ProcRes = typecheck_source(config, props, testfile, actual);
|
||||||
|
|
||||||
if ProcRes.status != 0 {
|
if ProcRes.status != 0 {
|
||||||
fatal_ProcRes(~"pretty-printed source does not typecheck", ProcRes);
|
fatal_ProcRes(~"pretty-printed source does not typecheck", &ProcRes);
|
||||||
}
|
}
|
||||||
|
|
||||||
return;
|
return;
|
||||||
|
|
||||||
fn print_source(config: config, testfile: &Path, src: ~str) -> ProcRes {
|
fn print_source(config: &config, testfile: &Path, src: ~str) -> ProcRes {
|
||||||
compose_and_run(config, testfile, make_pp_args(config, testfile),
|
compose_and_run(config, testfile, make_pp_args(config, testfile),
|
||||||
~[], config.compile_lib_path, Some(src))
|
~[], config.compile_lib_path, Some(src))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_pp_args(config: config, _testfile: &Path) -> ProcArgs {
|
fn make_pp_args(config: &config, _testfile: &Path) -> ProcArgs {
|
||||||
let prog = config.rustc_path;
|
|
||||||
let args = ~[~"-", ~"--pretty", ~"normal"];
|
let args = ~[~"-", ~"--pretty", ~"normal"];
|
||||||
return ProcArgs {prog: prog.to_str(), args: args};
|
return ProcArgs {prog: config.rustc_path.to_str(), args: args};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compare_source(expected: ~str, actual: ~str) {
|
fn compare_source(expected: &str, actual: &str) {
|
||||||
if expected != actual {
|
if expected != actual {
|
||||||
error(~"pretty-printed source does not match expected source");
|
error(~"pretty-printed source does not match expected source");
|
||||||
let msg =
|
let msg =
|
||||||
@ -207,46 +207,45 @@ actual:\n\
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn typecheck_source(config: config, props: TestProps,
|
fn typecheck_source(config: &config, props: &TestProps,
|
||||||
testfile: &Path, src: ~str) -> ProcRes {
|
testfile: &Path, src: ~str) -> ProcRes {
|
||||||
compose_and_run_compiler(
|
let args = make_typecheck_args(config, props, testfile);
|
||||||
config, props, testfile,
|
compose_and_run_compiler(config, props, testfile, args, Some(src))
|
||||||
make_typecheck_args(config, props, testfile),
|
|
||||||
Some(src))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_typecheck_args(config: config, props: TestProps, testfile: &Path) -> ProcArgs {
|
fn make_typecheck_args(config: &config, props: &TestProps, testfile: &Path) -> ProcArgs {
|
||||||
let prog = config.rustc_path;
|
|
||||||
let mut args = ~[~"-",
|
let mut args = ~[~"-",
|
||||||
~"--no-trans", ~"--lib",
|
~"--no-trans", ~"--lib",
|
||||||
~"-L", config.build_base.to_str(),
|
~"-L", config.build_base.to_str(),
|
||||||
~"-L",
|
~"-L",
|
||||||
aux_output_dir_name(config, testfile).to_str()];
|
aux_output_dir_name(config, testfile).to_str()];
|
||||||
args += split_maybe_args(config.rustcflags);
|
args += split_maybe_args(&config.rustcflags);
|
||||||
args += split_maybe_args(props.compile_flags);
|
args += split_maybe_args(&props.compile_flags);
|
||||||
return ProcArgs {prog: prog.to_str(), args: args};
|
return ProcArgs {prog: config.rustc_path.to_str(), args: args};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_debuginfo_test(config: config, props: TestProps, testfile: &Path) {
|
fn run_debuginfo_test(config: &config, props: &TestProps, testfile: &Path) {
|
||||||
// do not optimize debuginfo tests
|
// do not optimize debuginfo tests
|
||||||
let config = match config.rustcflags {
|
let mut config = match config.rustcflags {
|
||||||
Some(flags) => config {
|
Some(ref flags) => config {
|
||||||
rustcflags: Some(str::replace(flags, ~"-O", ~"")),
|
rustcflags: Some(str::replace(*flags, ~"-O", ~"")),
|
||||||
.. config
|
.. copy *config
|
||||||
},
|
},
|
||||||
None => config
|
None => copy *config
|
||||||
};
|
};
|
||||||
|
let config = &mut config;
|
||||||
|
let cmds = str::connect(props.debugger_cmds, "\n");
|
||||||
|
let check_lines = copy props.check_lines;
|
||||||
|
|
||||||
// compile test file (it shoud have 'compile-flags:-g' in the header)
|
// compile test file (it shoud have 'compile-flags:-g' in the header)
|
||||||
let mut ProcRes = compile_test(config, props, testfile);
|
let mut ProcRes = compile_test(config, props, testfile);
|
||||||
if ProcRes.status != 0 {
|
if ProcRes.status != 0 {
|
||||||
fatal_ProcRes(~"compilation failed!", ProcRes);
|
fatal_ProcRes(~"compilation failed!", &ProcRes);
|
||||||
}
|
}
|
||||||
|
|
||||||
// write debugger script
|
// write debugger script
|
||||||
let script_str = str::append(str::connect(props.debugger_cmds, "\n"),
|
let script_str = str::append(cmds, "\nquit\n");
|
||||||
~"\nquit\n");
|
|
||||||
debug!("script_str = %s", script_str);
|
debug!("script_str = %s", script_str);
|
||||||
dump_output_file(config, testfile, script_str, ~"debugger.script");
|
dump_output_file(config, testfile, script_str, ~"debugger.script");
|
||||||
|
|
||||||
@ -265,13 +264,13 @@ fn run_debuginfo_test(config: config, props: TestProps, testfile: &Path) {
|
|||||||
fatal(~"gdb failed to execute");
|
fatal(~"gdb failed to execute");
|
||||||
}
|
}
|
||||||
|
|
||||||
let num_check_lines = vec::len(props.check_lines);
|
let num_check_lines = vec::len(check_lines);
|
||||||
if num_check_lines > 0 {
|
if num_check_lines > 0 {
|
||||||
// check if each line in props.check_lines appears in the
|
// check if each line in props.check_lines appears in the
|
||||||
// output (in order)
|
// output (in order)
|
||||||
let mut i = 0u;
|
let mut i = 0u;
|
||||||
for str::each_line(ProcRes.stdout) |line| {
|
for str::each_line(ProcRes.stdout) |line| {
|
||||||
if props.check_lines[i].trim() == line.trim() {
|
if check_lines[i].trim() == line.trim() {
|
||||||
i += 1u;
|
i += 1u;
|
||||||
}
|
}
|
||||||
if i == num_check_lines {
|
if i == num_check_lines {
|
||||||
@ -281,14 +280,14 @@ fn run_debuginfo_test(config: config, props: TestProps, testfile: &Path) {
|
|||||||
}
|
}
|
||||||
if i != num_check_lines {
|
if i != num_check_lines {
|
||||||
fatal_ProcRes(fmt!("line not found in debugger output: %s"
|
fatal_ProcRes(fmt!("line not found in debugger output: %s"
|
||||||
props.check_lines[i]), ProcRes);
|
check_lines[i]), &ProcRes);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_error_patterns(props: TestProps,
|
fn check_error_patterns(props: &TestProps,
|
||||||
testfile: &Path,
|
testfile: &Path,
|
||||||
ProcRes: ProcRes) {
|
ProcRes: &ProcRes) {
|
||||||
if vec::is_empty(props.error_patterns) {
|
if vec::is_empty(props.error_patterns) {
|
||||||
fatal(~"no error pattern specified in " + testfile.to_str());
|
fatal(~"no error pattern specified in " + testfile.to_str());
|
||||||
}
|
}
|
||||||
@ -298,18 +297,18 @@ fn check_error_patterns(props: TestProps,
|
|||||||
}
|
}
|
||||||
|
|
||||||
let mut next_err_idx = 0u;
|
let mut next_err_idx = 0u;
|
||||||
let mut next_err_pat = props.error_patterns[next_err_idx];
|
let mut next_err_pat = &props.error_patterns[next_err_idx];
|
||||||
let mut done = false;
|
let mut done = false;
|
||||||
for str::each_line(ProcRes.stderr) |line| {
|
for str::each_line(ProcRes.stderr) |line| {
|
||||||
if str::contains(line, next_err_pat) {
|
if str::contains(line, *next_err_pat) {
|
||||||
debug!("found error pattern %s", next_err_pat);
|
debug!("found error pattern %s", *next_err_pat);
|
||||||
next_err_idx += 1u;
|
next_err_idx += 1u;
|
||||||
if next_err_idx == vec::len(props.error_patterns) {
|
if next_err_idx == vec::len(props.error_patterns) {
|
||||||
debug!("found all error patterns");
|
debug!("found all error patterns");
|
||||||
done = true;
|
done = true;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
next_err_pat = props.error_patterns[next_err_idx];
|
next_err_pat = &props.error_patterns[next_err_idx];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if done { return; }
|
if done { return; }
|
||||||
@ -330,7 +329,7 @@ fn check_error_patterns(props: TestProps,
|
|||||||
|
|
||||||
fn check_expected_errors(expected_errors: ~[errors::ExpectedError],
|
fn check_expected_errors(expected_errors: ~[errors::ExpectedError],
|
||||||
testfile: &Path,
|
testfile: &Path,
|
||||||
ProcRes: ProcRes) {
|
ProcRes: &ProcRes) {
|
||||||
|
|
||||||
// true if we found the error in question
|
// true if we found the error in question
|
||||||
let mut found_flags = vec::from_elem(
|
let mut found_flags = vec::from_elem(
|
||||||
@ -380,14 +379,14 @@ fn check_expected_errors(expected_errors: ~[errors::ExpectedError],
|
|||||||
|
|
||||||
for uint::range(0u, vec::len(found_flags)) |i| {
|
for uint::range(0u, vec::len(found_flags)) |i| {
|
||||||
if !found_flags[i] {
|
if !found_flags[i] {
|
||||||
let ee = expected_errors[i];
|
let ee = &expected_errors[i];
|
||||||
fatal_ProcRes(fmt!("expected %s on line %u not found: %s",
|
fatal_ProcRes(fmt!("expected %s on line %u not found: %s",
|
||||||
ee.kind, ee.line, ee.msg), ProcRes);
|
ee.kind, ee.line, ee.msg), ProcRes);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_compiler_error_or_warning(line: ~str) -> bool {
|
fn is_compiler_error_or_warning(line: &str) -> bool {
|
||||||
let mut i = 0u;
|
let mut i = 0u;
|
||||||
return
|
return
|
||||||
scan_until_char(line, ':', &mut i) &&
|
scan_until_char(line, ':', &mut i) &&
|
||||||
@ -401,11 +400,11 @@ fn is_compiler_error_or_warning(line: ~str) -> bool {
|
|||||||
scan_char(line, ':', &mut i) &&
|
scan_char(line, ':', &mut i) &&
|
||||||
scan_integer(line, &mut i) &&
|
scan_integer(line, &mut i) &&
|
||||||
scan_char(line, ' ', &mut i) &&
|
scan_char(line, ' ', &mut i) &&
|
||||||
(scan_string(line, ~"error", &mut i) ||
|
(scan_string(line, "error", &mut i) ||
|
||||||
scan_string(line, ~"warning", &mut i));
|
scan_string(line, "warning", &mut i));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn scan_until_char(haystack: ~str, needle: char, idx: &mut uint) -> bool {
|
fn scan_until_char(haystack: &str, needle: char, idx: &mut uint) -> bool {
|
||||||
if *idx >= haystack.len() {
|
if *idx >= haystack.len() {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@ -417,7 +416,7 @@ fn scan_until_char(haystack: ~str, needle: char, idx: &mut uint) -> bool {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn scan_char(haystack: ~str, needle: char, idx: &mut uint) -> bool {
|
fn scan_char(haystack: &str, needle: char, idx: &mut uint) -> bool {
|
||||||
if *idx >= haystack.len() {
|
if *idx >= haystack.len() {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@ -429,7 +428,7 @@ fn scan_char(haystack: ~str, needle: char, idx: &mut uint) -> bool {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn scan_integer(haystack: ~str, idx: &mut uint) -> bool {
|
fn scan_integer(haystack: &str, idx: &mut uint) -> bool {
|
||||||
let mut i = *idx;
|
let mut i = *idx;
|
||||||
while i < haystack.len() {
|
while i < haystack.len() {
|
||||||
let range = str::char_range_at(haystack, i);
|
let range = str::char_range_at(haystack, i);
|
||||||
@ -445,7 +444,7 @@ fn scan_integer(haystack: ~str, idx: &mut uint) -> bool {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn scan_string(haystack: ~str, needle: ~str, idx: &mut uint) -> bool {
|
fn scan_string(haystack: &str, needle: &str, idx: &mut uint) -> bool {
|
||||||
let mut haystack_i = *idx;
|
let mut haystack_i = *idx;
|
||||||
let mut needle_i = 0u;
|
let mut needle_i = 0u;
|
||||||
while needle_i < needle.len() {
|
while needle_i < needle.len() {
|
||||||
@ -466,34 +465,29 @@ struct ProcArgs {prog: ~str, args: ~[~str]}
|
|||||||
|
|
||||||
struct ProcRes {status: int, stdout: ~str, stderr: ~str, cmdline: ~str}
|
struct ProcRes {status: int, stdout: ~str, stderr: ~str, cmdline: ~str}
|
||||||
|
|
||||||
fn compile_test(config: config, props: TestProps,
|
fn compile_test(config: &config, props: &TestProps,
|
||||||
testfile: &Path) -> ProcRes {
|
testfile: &Path) -> ProcRes {
|
||||||
compile_test_(config, props, testfile, [])
|
compile_test_(config, props, testfile, [])
|
||||||
}
|
}
|
||||||
|
|
||||||
fn jit_test(config: config, props: TestProps, testfile: &Path) -> ProcRes {
|
fn jit_test(config: &config, props: &TestProps, testfile: &Path) -> ProcRes {
|
||||||
compile_test_(config, props, testfile, [~"--jit"])
|
compile_test_(config, props, testfile, [~"--jit"])
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compile_test_(config: config, props: TestProps,
|
fn compile_test_(config: &config, props: &TestProps,
|
||||||
testfile: &Path, extra_args: &[~str]) -> ProcRes {
|
testfile: &Path, extra_args: &[~str]) -> ProcRes {
|
||||||
let link_args = ~[~"-L", aux_output_dir_name(config, testfile).to_str()];
|
let link_args = ~[~"-L", aux_output_dir_name(config, testfile).to_str()];
|
||||||
compose_and_run_compiler(
|
let args = make_compile_args(config, props, link_args + extra_args,
|
||||||
config, props, testfile,
|
make_exe_name, testfile);
|
||||||
make_compile_args(config, props, link_args + extra_args,
|
compose_and_run_compiler(config, props, testfile, args, None)
|
||||||
make_exe_name, testfile),
|
|
||||||
None)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn exec_compiled_test(config: config, props: TestProps,
|
fn exec_compiled_test(config: &config, props: &TestProps,
|
||||||
testfile: &Path) -> ProcRes {
|
testfile: &Path) -> ProcRes {
|
||||||
|
|
||||||
// If testing the new runtime then set the RUST_NEWRT env var
|
// If testing the new runtime then set the RUST_NEWRT env var
|
||||||
let env = if config.newrt {
|
let env = copy props.exec_env;
|
||||||
props.exec_env + ~[(~"RUST_NEWRT", ~"1")]
|
let env = if config.newrt { env + &[(~"RUST_NEWRT", ~"1")] } else { env };
|
||||||
} else {
|
|
||||||
props.exec_env
|
|
||||||
};
|
|
||||||
|
|
||||||
match config.target {
|
match config.target {
|
||||||
|
|
||||||
@ -515,8 +509,8 @@ fn exec_compiled_test(config: config, props: TestProps,
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn compose_and_run_compiler(
|
fn compose_and_run_compiler(
|
||||||
config: config,
|
config: &config,
|
||||||
props: TestProps,
|
props: &TestProps,
|
||||||
testfile: &Path,
|
testfile: &Path,
|
||||||
args: ProcArgs,
|
args: ProcArgs,
|
||||||
input: Option<~str>) -> ProcRes {
|
input: Option<~str>) -> ProcRes {
|
||||||
@ -539,7 +533,7 @@ fn compose_and_run_compiler(
|
|||||||
fatal_ProcRes(
|
fatal_ProcRes(
|
||||||
fmt!("auxiliary build of %s failed to compile: ",
|
fmt!("auxiliary build of %s failed to compile: ",
|
||||||
abs_ab.to_str()),
|
abs_ab.to_str()),
|
||||||
auxres);
|
&auxres);
|
||||||
}
|
}
|
||||||
|
|
||||||
match config.target {
|
match config.target {
|
||||||
@ -565,74 +559,66 @@ fn ensure_dir(path: &Path) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compose_and_run(config: config, testfile: &Path,
|
fn compose_and_run(config: &config, testfile: &Path,
|
||||||
ProcArgs: ProcArgs,
|
ProcArgs{ args, prog }: ProcArgs,
|
||||||
procenv: ~[(~str, ~str)],
|
procenv: ~[(~str, ~str)],
|
||||||
lib_path: ~str,
|
lib_path: &str,
|
||||||
input: Option<~str>) -> ProcRes {
|
input: Option<~str>) -> ProcRes {
|
||||||
return program_output(config, testfile, lib_path,
|
return program_output(config, testfile, lib_path,
|
||||||
ProcArgs.prog, ProcArgs.args, procenv, input);
|
prog, args, procenv, input);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_compile_args(config: config, props: TestProps, extras: ~[~str],
|
fn make_compile_args(config: &config, props: &TestProps, extras: ~[~str],
|
||||||
xform: &fn(config, (&Path)) -> Path,
|
xform: &fn(&config, (&Path)) -> Path,
|
||||||
testfile: &Path) -> ProcArgs {
|
testfile: &Path) -> ProcArgs {
|
||||||
let prog = config.rustc_path;
|
|
||||||
let mut args = ~[testfile.to_str(),
|
let mut args = ~[testfile.to_str(),
|
||||||
~"-o", xform(config, testfile).to_str(),
|
~"-o", xform(config, testfile).to_str(),
|
||||||
~"-L", config.build_base.to_str()]
|
~"-L", config.build_base.to_str()]
|
||||||
+ extras;
|
+ extras;
|
||||||
args += split_maybe_args(config.rustcflags);
|
args += split_maybe_args(&config.rustcflags);
|
||||||
args += split_maybe_args(props.compile_flags);
|
args += split_maybe_args(&props.compile_flags);
|
||||||
return ProcArgs {prog: prog.to_str(), args: args};
|
return ProcArgs {prog: config.rustc_path.to_str(), args: args};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_lib_name(config: config, auxfile: &Path, testfile: &Path) -> Path {
|
fn make_lib_name(config: &config, auxfile: &Path, testfile: &Path) -> Path {
|
||||||
// what we return here is not particularly important, as it
|
// what we return here is not particularly important, as it
|
||||||
// happens; rustc ignores everything except for the directory.
|
// happens; rustc ignores everything except for the directory.
|
||||||
let auxname = output_testname(auxfile);
|
let auxname = output_testname(auxfile);
|
||||||
aux_output_dir_name(config, testfile).push_rel(&auxname)
|
aux_output_dir_name(config, testfile).push_rel(&auxname)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_exe_name(config: config, testfile: &Path) -> Path {
|
fn make_exe_name(config: &config, testfile: &Path) -> Path {
|
||||||
Path(output_base_name(config, testfile).to_str() +
|
Path(output_base_name(config, testfile).to_str() +
|
||||||
str::to_owned(os::EXE_SUFFIX))
|
str::to_owned(os::EXE_SUFFIX))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_run_args(config: config, _props: TestProps, testfile: &Path) ->
|
fn make_run_args(config: &config, _props: &TestProps, testfile: &Path) ->
|
||||||
ProcArgs {
|
ProcArgs {
|
||||||
let toolargs = {
|
// If we've got another tool to run under (valgrind),
|
||||||
// If we've got another tool to run under (valgrind),
|
// then split apart its command
|
||||||
// then split apart its command
|
let toolargs = split_maybe_args(&config.runtool);
|
||||||
let runtool =
|
|
||||||
match config.runtool {
|
|
||||||
Some(s) => Some(s),
|
|
||||||
None => None
|
|
||||||
};
|
|
||||||
split_maybe_args(runtool)
|
|
||||||
};
|
|
||||||
|
|
||||||
let args = toolargs + ~[make_exe_name(config, testfile).to_str()];
|
let mut args = toolargs + ~[make_exe_name(config, testfile).to_str()];
|
||||||
return ProcArgs {prog: args[0],
|
let prog = args.shift();
|
||||||
args: vec::slice(args, 1, args.len()).to_vec()};
|
return ProcArgs {prog: prog, args: args};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn split_maybe_args(argstr: Option<~str>) -> ~[~str] {
|
fn split_maybe_args(argstr: &Option<~str>) -> ~[~str] {
|
||||||
fn rm_whitespace(v: ~[~str]) -> ~[~str] {
|
fn rm_whitespace(v: ~[~str]) -> ~[~str] {
|
||||||
v.filtered(|s| !str::is_whitespace(*s))
|
v.filtered(|s| !str::is_whitespace(*s))
|
||||||
}
|
}
|
||||||
|
|
||||||
match argstr {
|
match *argstr {
|
||||||
Some(s) => {
|
Some(ref s) => {
|
||||||
let mut ss = ~[];
|
let mut ss = ~[];
|
||||||
for str::each_split_char(s, ' ') |s| { ss.push(s.to_owned()) }
|
for str::each_split_char(*s, ' ') |s| { ss.push(s.to_owned()) }
|
||||||
rm_whitespace(ss)
|
rm_whitespace(ss)
|
||||||
}
|
}
|
||||||
None => ~[]
|
None => ~[]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn program_output(config: config, testfile: &Path, lib_path: ~str, prog: ~str,
|
fn program_output(config: &config, testfile: &Path, lib_path: &str, prog: ~str,
|
||||||
args: ~[~str], env: ~[(~str, ~str)],
|
args: ~[~str], env: ~[(~str, ~str)],
|
||||||
input: Option<~str>) -> ProcRes {
|
input: Option<~str>) -> ProcRes {
|
||||||
let cmdline =
|
let cmdline =
|
||||||
@ -641,11 +627,12 @@ fn program_output(config: config, testfile: &Path, lib_path: ~str, prog: ~str,
|
|||||||
logv(config, fmt!("executing %s", cmdline));
|
logv(config, fmt!("executing %s", cmdline));
|
||||||
cmdline
|
cmdline
|
||||||
};
|
};
|
||||||
let res = procsrv::run(lib_path, prog, args, env, input);
|
let procsrv::Result{ out, err, status } =
|
||||||
dump_output(config, testfile, res.out, res.err);
|
procsrv::run(lib_path, prog, args, env, input);
|
||||||
return ProcRes {status: res.status,
|
dump_output(config, testfile, out, err);
|
||||||
stdout: res.out,
|
return ProcRes {status: status,
|
||||||
stderr: res.err,
|
stdout: out,
|
||||||
|
stderr: err,
|
||||||
cmdline: cmdline};
|
cmdline: cmdline};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -653,41 +640,41 @@ fn program_output(config: config, testfile: &Path, lib_path: ~str, prog: ~str,
|
|||||||
#[cfg(target_os = "linux")]
|
#[cfg(target_os = "linux")]
|
||||||
#[cfg(target_os = "macos")]
|
#[cfg(target_os = "macos")]
|
||||||
#[cfg(target_os = "freebsd")]
|
#[cfg(target_os = "freebsd")]
|
||||||
fn make_cmdline(_libpath: ~str, prog: ~str, args: ~[~str]) -> ~str {
|
fn make_cmdline(_libpath: &str, prog: &str, args: &[~str]) -> ~str {
|
||||||
fmt!("%s %s", prog, str::connect(args, ~" "))
|
fmt!("%s %s", prog, str::connect(args, ~" "))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(target_os = "win32")]
|
#[cfg(target_os = "win32")]
|
||||||
fn make_cmdline(libpath: ~str, prog: ~str, args: ~[~str]) -> ~str {
|
fn make_cmdline(libpath: &str, prog: &str, args: &[~str]) -> ~str {
|
||||||
fmt!("%s %s %s", lib_path_cmd_prefix(libpath), prog,
|
fmt!("%s %s %s", lib_path_cmd_prefix(libpath), prog,
|
||||||
str::connect(args, ~" "))
|
str::connect(args, ~" "))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build the LD_LIBRARY_PATH variable as it would be seen on the command line
|
// Build the LD_LIBRARY_PATH variable as it would be seen on the command line
|
||||||
// for diagnostic purposes
|
// for diagnostic purposes
|
||||||
fn lib_path_cmd_prefix(path: ~str) -> ~str {
|
fn lib_path_cmd_prefix(path: &str) -> ~str {
|
||||||
fmt!("%s=\"%s\"", util::lib_path_env_var(), util::make_new_path(path))
|
fmt!("%s=\"%s\"", util::lib_path_env_var(), util::make_new_path(path))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn dump_output(config: config, testfile: &Path, out: ~str, err: ~str) {
|
fn dump_output(config: &config, testfile: &Path, out: &str, err: &str) {
|
||||||
dump_output_file(config, testfile, out, ~"out");
|
dump_output_file(config, testfile, out, "out");
|
||||||
dump_output_file(config, testfile, err, ~"err");
|
dump_output_file(config, testfile, err, "err");
|
||||||
maybe_dump_to_stdout(config, out, err);
|
maybe_dump_to_stdout(config, out, err);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn dump_output_file(config: config, testfile: &Path,
|
fn dump_output_file(config: &config, testfile: &Path,
|
||||||
out: ~str, extension: ~str) {
|
out: &str, extension: &str) {
|
||||||
let outfile = make_out_name(config, testfile, extension);
|
let outfile = make_out_name(config, testfile, extension);
|
||||||
let writer =
|
let writer =
|
||||||
io::file_writer(&outfile, ~[io::Create, io::Truncate]).get();
|
io::file_writer(&outfile, ~[io::Create, io::Truncate]).get();
|
||||||
writer.write_str(out);
|
writer.write_str(out);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_out_name(config: config, testfile: &Path, extension: ~str) -> Path {
|
fn make_out_name(config: &config, testfile: &Path, extension: &str) -> Path {
|
||||||
output_base_name(config, testfile).with_filetype(extension)
|
output_base_name(config, testfile).with_filetype(extension)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn aux_output_dir_name(config: config, testfile: &Path) -> Path {
|
fn aux_output_dir_name(config: &config, testfile: &Path) -> Path {
|
||||||
output_base_name(config, testfile).with_filetype("libaux")
|
output_base_name(config, testfile).with_filetype("libaux")
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -695,13 +682,13 @@ fn output_testname(testfile: &Path) -> Path {
|
|||||||
Path(testfile.filestem().get())
|
Path(testfile.filestem().get())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn output_base_name(config: config, testfile: &Path) -> Path {
|
fn output_base_name(config: &config, testfile: &Path) -> Path {
|
||||||
config.build_base
|
config.build_base
|
||||||
.push_rel(&output_testname(testfile))
|
.push_rel(&output_testname(testfile))
|
||||||
.with_filetype(config.stage_id)
|
.with_filetype(config.stage_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn maybe_dump_to_stdout(config: config, out: ~str, err: ~str) {
|
fn maybe_dump_to_stdout(config: &config, out: &str, err: &str) {
|
||||||
if config.verbose {
|
if config.verbose {
|
||||||
let sep1 = fmt!("------%s------------------------------", ~"stdout");
|
let sep1 = fmt!("------%s------------------------------", ~"stdout");
|
||||||
let sep2 = fmt!("------%s------------------------------", ~"stderr");
|
let sep2 = fmt!("------%s------------------------------", ~"stderr");
|
||||||
@ -718,7 +705,7 @@ fn error(err: ~str) { io::stdout().write_line(fmt!("\nerror: %s", err)); }
|
|||||||
|
|
||||||
fn fatal(err: ~str) -> ! { error(err); fail!(); }
|
fn fatal(err: ~str) -> ! { error(err); fail!(); }
|
||||||
|
|
||||||
fn fatal_ProcRes(err: ~str, ProcRes: ProcRes) -> ! {
|
fn fatal_ProcRes(err: ~str, ProcRes: &ProcRes) -> ! {
|
||||||
let msg =
|
let msg =
|
||||||
fmt!("\n\
|
fmt!("\n\
|
||||||
error: %s\n\
|
error: %s\n\
|
||||||
@ -737,21 +724,20 @@ stderr:\n\
|
|||||||
fail!();
|
fail!();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn _arm_exec_compiled_test(config: config, props: TestProps,
|
fn _arm_exec_compiled_test(config: &config, props: &TestProps,
|
||||||
testfile: &Path) -> ProcRes {
|
testfile: &Path) -> ProcRes {
|
||||||
|
|
||||||
let args = make_run_args(config, props, testfile);
|
let args = make_run_args(config, props, testfile);
|
||||||
let cmdline = make_cmdline(~"", args.prog, args.args);
|
let cmdline = make_cmdline("", args.prog, args.args);
|
||||||
|
|
||||||
// get bare program string
|
// get bare program string
|
||||||
let mut tvec = ~[];
|
let mut tvec = ~[];
|
||||||
let tstr = args.prog;
|
for str::each_split_char(args.prog, '/') |ts| { tvec.push(ts.to_owned()) }
|
||||||
for str::each_split_char(tstr, '/') |ts| { tvec.push(ts.to_owned()) }
|
|
||||||
let prog_short = tvec.pop();
|
let prog_short = tvec.pop();
|
||||||
|
|
||||||
// copy to target
|
// copy to target
|
||||||
let copy_result = procsrv::run(~"", config.adb_path,
|
let copy_result = procsrv::run("", config.adb_path,
|
||||||
~[~"push", args.prog, config.adb_test_dir],
|
[~"push", copy args.prog, copy config.adb_test_dir],
|
||||||
~[(~"",~"")], Some(~""));
|
~[(~"",~"")], Some(~""));
|
||||||
|
|
||||||
if config.verbose {
|
if config.verbose {
|
||||||
@ -767,7 +753,6 @@ fn _arm_exec_compiled_test(config: config, props: TestProps,
|
|||||||
// to stdout and stderr separately but to stdout only
|
// to stdout and stderr separately but to stdout only
|
||||||
let mut newargs_out = ~[];
|
let mut newargs_out = ~[];
|
||||||
let mut newargs_err = ~[];
|
let mut newargs_err = ~[];
|
||||||
let subargs = args.args;
|
|
||||||
newargs_out.push(~"shell");
|
newargs_out.push(~"shell");
|
||||||
newargs_err.push(~"shell");
|
newargs_err.push(~"shell");
|
||||||
|
|
||||||
@ -780,7 +765,7 @@ fn _arm_exec_compiled_test(config: config, props: TestProps,
|
|||||||
newcmd_err.push_str(fmt!("LD_LIBRARY_PATH=%s %s/%s",
|
newcmd_err.push_str(fmt!("LD_LIBRARY_PATH=%s %s/%s",
|
||||||
config.adb_test_dir, config.adb_test_dir, prog_short));
|
config.adb_test_dir, config.adb_test_dir, prog_short));
|
||||||
|
|
||||||
for vec::each(subargs) |tv| {
|
for args.args.each |tv| {
|
||||||
newcmd_out.push_str(" ");
|
newcmd_out.push_str(" ");
|
||||||
newcmd_err.push_str(" ");
|
newcmd_err.push_str(" ");
|
||||||
newcmd_out.push_str(tv.to_owned());
|
newcmd_out.push_str(tv.to_owned());
|
||||||
@ -793,26 +778,28 @@ fn _arm_exec_compiled_test(config: config, props: TestProps,
|
|||||||
newargs_out.push(newcmd_out);
|
newargs_out.push(newcmd_out);
|
||||||
newargs_err.push(newcmd_err);
|
newargs_err.push(newcmd_err);
|
||||||
|
|
||||||
let exe_result_out = procsrv::run(~"", config.adb_path,
|
let procsrv::Result{ out: out_out, err: _out_err, status: out_status } =
|
||||||
newargs_out, ~[(~"",~"")], Some(~""));
|
procsrv::run(~"", config.adb_path, newargs_out, ~[(~"",~"")],
|
||||||
let exe_result_err = procsrv::run(~"", config.adb_path,
|
Some(~""));
|
||||||
newargs_err, ~[(~"",~"")], Some(~""));
|
let procsrv::Result{ out: err_out, err: _err_err, status: _err_status } =
|
||||||
|
procsrv::run(~"", config.adb_path, newargs_err, ~[(~"",~"")],
|
||||||
|
Some(~""));
|
||||||
|
|
||||||
dump_output(config, testfile, exe_result_out.out, exe_result_err.out);
|
dump_output(config, testfile, out_out, err_out);
|
||||||
|
|
||||||
match exe_result_err.out {
|
match err_out {
|
||||||
~"" => ProcRes {status: exe_result_out.status, stdout: exe_result_out.out,
|
~"" => ProcRes {status: out_status, stdout: out_out,
|
||||||
stderr: exe_result_err.out, cmdline: cmdline },
|
stderr: err_out, cmdline: cmdline },
|
||||||
_ => ProcRes {status: 101, stdout: exe_result_out.out,
|
_ => ProcRes {status: 101, stdout: out_out,
|
||||||
stderr: exe_result_err.out, cmdline: cmdline }
|
stderr: err_out, cmdline: cmdline }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn _dummy_exec_compiled_test(config: config, props: TestProps,
|
fn _dummy_exec_compiled_test(config: &config, props: &TestProps,
|
||||||
testfile: &Path) -> ProcRes {
|
testfile: &Path) -> ProcRes {
|
||||||
|
|
||||||
let args = make_run_args(config, props, testfile);
|
let args = make_run_args(config, props, testfile);
|
||||||
let cmdline = make_cmdline(~"", args.prog, args.args);
|
let cmdline = make_cmdline("", args.prog, args.args);
|
||||||
|
|
||||||
match config.mode {
|
match config.mode {
|
||||||
mode_run_fail => ProcRes {status: 101, stdout: ~"",
|
mode_run_fail => ProcRes {status: 101, stdout: ~"",
|
||||||
@ -822,7 +809,7 @@ fn _dummy_exec_compiled_test(config: config, props: TestProps,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn _arm_push_aux_shared_library(config: config, testfile: &Path) {
|
fn _arm_push_aux_shared_library(config: &config, testfile: &Path) {
|
||||||
let tstr = aux_output_dir_name(config, testfile).to_str();
|
let tstr = aux_output_dir_name(config, testfile).to_str();
|
||||||
|
|
||||||
for os::list_dir_path(&Path(tstr)).each |file| {
|
for os::list_dir_path(&Path(tstr)).each |file| {
|
||||||
@ -830,7 +817,7 @@ fn _arm_push_aux_shared_library(config: config, testfile: &Path) {
|
|||||||
if (file.filetype() == Some(~".so")) {
|
if (file.filetype() == Some(~".so")) {
|
||||||
|
|
||||||
let copy_result = procsrv::run(~"", config.adb_path,
|
let copy_result = procsrv::run(~"", config.adb_path,
|
||||||
~[~"push", file.to_str(), config.adb_test_dir],
|
~[~"push", file.to_str(), copy config.adb_test_dir],
|
||||||
~[(~"",~"")], Some(~""));
|
~[(~"",~"")], Some(~""));
|
||||||
|
|
||||||
if config.verbose {
|
if config.verbose {
|
||||||
|
@ -12,7 +12,7 @@ use common::config;
|
|||||||
|
|
||||||
use core::os::getenv;
|
use core::os::getenv;
|
||||||
|
|
||||||
pub fn make_new_path(path: ~str) -> ~str {
|
pub fn make_new_path(path: &str) -> ~str {
|
||||||
|
|
||||||
// Windows just uses PATH as the library search path, so we have to
|
// Windows just uses PATH as the library search path, so we have to
|
||||||
// maintain the current value while adding our own
|
// maintain the current value while adding our own
|
||||||
@ -20,7 +20,7 @@ pub fn make_new_path(path: ~str) -> ~str {
|
|||||||
Some(curr) => {
|
Some(curr) => {
|
||||||
fmt!("%s%s%s", path, path_div(), curr)
|
fmt!("%s%s%s", path, path_div(), curr)
|
||||||
}
|
}
|
||||||
None => path
|
None => path.to_str()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -42,7 +42,7 @@ pub fn path_div() -> ~str { ~":" }
|
|||||||
#[cfg(target_os = "win32")]
|
#[cfg(target_os = "win32")]
|
||||||
pub fn path_div() -> ~str { ~";" }
|
pub fn path_div() -> ~str { ~";" }
|
||||||
|
|
||||||
pub fn logv(config: config, s: ~str) {
|
pub fn logv(config: &config, s: ~str) {
|
||||||
debug!("%s", s);
|
debug!("%s", s);
|
||||||
if config.verbose { io::println(s); }
|
if config.verbose { io::println(s); }
|
||||||
}
|
}
|
||||||
|
@ -60,7 +60,6 @@ while cur < len(lines):
|
|||||||
if not re.search(r"\bextern mod std\b", block):
|
if not re.search(r"\bextern mod std\b", block):
|
||||||
block = "extern mod std;\n" + block
|
block = "extern mod std;\n" + block
|
||||||
block = """#[ forbid(ctypes) ];
|
block = """#[ forbid(ctypes) ];
|
||||||
#[ forbid(deprecated_mode) ];
|
|
||||||
#[ forbid(deprecated_pattern) ];
|
#[ forbid(deprecated_pattern) ];
|
||||||
#[ forbid(implicit_copies) ];
|
#[ forbid(implicit_copies) ];
|
||||||
#[ forbid(non_implicitly_copyable_typarams) ];
|
#[ forbid(non_implicitly_copyable_typarams) ];
|
||||||
@ -68,12 +67,9 @@ while cur < len(lines):
|
|||||||
#[ forbid(type_limits) ];
|
#[ forbid(type_limits) ];
|
||||||
#[ forbid(unrecognized_lint) ];
|
#[ forbid(unrecognized_lint) ];
|
||||||
#[ forbid(unused_imports) ];
|
#[ forbid(unused_imports) ];
|
||||||
#[ forbid(vecs_implicitly_copyable) ];
|
|
||||||
#[ forbid(while_true) ];
|
#[ forbid(while_true) ];
|
||||||
|
|
||||||
#[ warn(deprecated_self) ];
|
#[ warn(non_camel_case_types) ];\n
|
||||||
#[ warn(non_camel_case_types) ];
|
|
||||||
#[ warn(structural_records) ];\n
|
|
||||||
""" + block
|
""" + block
|
||||||
if xfail:
|
if xfail:
|
||||||
block = "// xfail-test\n" + block
|
block = "// xfail-test\n" + block
|
||||||
|
@ -134,7 +134,7 @@ pub fn stash_expr_if(c: @fn(@ast::expr, test_mode)->bool,
|
|||||||
e: @ast::expr,
|
e: @ast::expr,
|
||||||
tm: test_mode) {
|
tm: test_mode) {
|
||||||
if c(e, tm) {
|
if c(e, tm) {
|
||||||
*es += ~[e];
|
*es = *es + ~[e];
|
||||||
} else {
|
} else {
|
||||||
/* now my indices are wrong :( */
|
/* now my indices are wrong :( */
|
||||||
}
|
}
|
||||||
|
@ -51,7 +51,6 @@ pub enum lint {
|
|||||||
implicit_copies,
|
implicit_copies,
|
||||||
unrecognized_lint,
|
unrecognized_lint,
|
||||||
non_implicitly_copyable_typarams,
|
non_implicitly_copyable_typarams,
|
||||||
vecs_implicitly_copyable,
|
|
||||||
deprecated_pattern,
|
deprecated_pattern,
|
||||||
non_camel_case_types,
|
non_camel_case_types,
|
||||||
type_limits,
|
type_limits,
|
||||||
@ -132,14 +131,6 @@ static lint_table: &'static [(&'static str, LintSpec)] = &[
|
|||||||
default: warn
|
default: warn
|
||||||
}),
|
}),
|
||||||
|
|
||||||
("vecs_implicitly_copyable",
|
|
||||||
LintSpec {
|
|
||||||
lint: vecs_implicitly_copyable,
|
|
||||||
desc: "make vecs and strs not implicitly copyable \
|
|
||||||
(only checked at top level)",
|
|
||||||
default: warn
|
|
||||||
}),
|
|
||||||
|
|
||||||
("implicit_copies",
|
("implicit_copies",
|
||||||
LintSpec {
|
LintSpec {
|
||||||
lint: implicit_copies,
|
lint: implicit_copies,
|
||||||
|
@ -14,8 +14,6 @@ use metadata::csearch;
|
|||||||
use metadata;
|
use metadata;
|
||||||
use middle::const_eval;
|
use middle::const_eval;
|
||||||
use middle::freevars;
|
use middle::freevars;
|
||||||
use middle::lint::{get_lint_level, allow};
|
|
||||||
use middle::lint;
|
|
||||||
use middle::resolve::{Impl, MethodInfo};
|
use middle::resolve::{Impl, MethodInfo};
|
||||||
use middle::resolve;
|
use middle::resolve;
|
||||||
use middle::ty;
|
use middle::ty;
|
||||||
@ -241,7 +239,6 @@ struct ctxt_ {
|
|||||||
diag: @syntax::diagnostic::span_handler,
|
diag: @syntax::diagnostic::span_handler,
|
||||||
interner: @mut HashMap<intern_key, ~t_box_>,
|
interner: @mut HashMap<intern_key, ~t_box_>,
|
||||||
next_id: @mut uint,
|
next_id: @mut uint,
|
||||||
vecs_implicitly_copyable: bool,
|
|
||||||
legacy_modes: bool,
|
legacy_modes: bool,
|
||||||
cstore: @mut metadata::cstore::CStore,
|
cstore: @mut metadata::cstore::CStore,
|
||||||
sess: session::Session,
|
sess: session::Session,
|
||||||
@ -992,14 +989,10 @@ pub fn mk_ctxt(s: session::Session,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let vecs_implicitly_copyable =
|
|
||||||
get_lint_level(s.lint_settings.default_settings,
|
|
||||||
lint::vecs_implicitly_copyable) == allow;
|
|
||||||
@ctxt_ {
|
@ctxt_ {
|
||||||
diag: s.diagnostic(),
|
diag: s.diagnostic(),
|
||||||
interner: @mut HashMap::new(),
|
interner: @mut HashMap::new(),
|
||||||
next_id: @mut primitives::LAST_PRIMITIVE_ID,
|
next_id: @mut primitives::LAST_PRIMITIVE_ID,
|
||||||
vecs_implicitly_copyable: vecs_implicitly_copyable,
|
|
||||||
legacy_modes: legacy_modes,
|
legacy_modes: legacy_modes,
|
||||||
cstore: s.cstore,
|
cstore: s.cstore,
|
||||||
sess: s,
|
sess: s,
|
||||||
@ -1946,8 +1939,7 @@ pub impl TypeContents {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn nonimplicitly_copyable(cx: ctxt) -> TypeContents {
|
fn nonimplicitly_copyable(cx: ctxt) -> TypeContents {
|
||||||
let base = TypeContents::noncopyable(cx) + TC_OWNED_POINTER;
|
TypeContents::noncopyable(cx) + TC_OWNED_POINTER + TC_OWNED_VEC
|
||||||
if cx.vecs_implicitly_copyable {base} else {base + TC_OWNED_VEC}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn needs_drop(&self, cx: ctxt) -> bool {
|
fn needs_drop(&self, cx: ctxt) -> bool {
|
||||||
|
@ -102,7 +102,6 @@ impl<E:CLike> BitAnd<EnumSet<E>, EnumSet<E>> for EnumSet<E> {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use core;
|
|
||||||
use core::iter;
|
use core::iter;
|
||||||
use util::enum_set::*;
|
use util::enum_set::*;
|
||||||
|
|
||||||
|
@ -103,7 +103,7 @@ fn parse_item_attrs<T:Owned>(
|
|||||||
id: doc::AstId,
|
id: doc::AstId,
|
||||||
parse_attrs: ~fn(a: ~[ast::attribute]) -> T) -> T {
|
parse_attrs: ~fn(a: ~[ast::attribute]) -> T) -> T {
|
||||||
do astsrv::exec(srv) |ctxt| {
|
do astsrv::exec(srv) |ctxt| {
|
||||||
let attrs = match *ctxt.ast_map.get(&id) {
|
let attrs = match ctxt.ast_map.get_copy(&id) {
|
||||||
ast_map::node_item(item, _) => copy item.attrs,
|
ast_map::node_item(item, _) => copy item.attrs,
|
||||||
ast_map::node_foreign_item(item, _, _, _) => copy item.attrs,
|
ast_map::node_foreign_item(item, _, _, _) => copy item.attrs,
|
||||||
_ => fail!("parse_item_attrs: not an item")
|
_ => fail!("parse_item_attrs: not an item")
|
||||||
@ -127,7 +127,7 @@ fn fold_enum(
|
|||||||
let desc = {
|
let desc = {
|
||||||
let variant = copy variant;
|
let variant = copy variant;
|
||||||
do astsrv::exec(srv.clone()) |ctxt| {
|
do astsrv::exec(srv.clone()) |ctxt| {
|
||||||
match *ctxt.ast_map.get(&doc_id) {
|
match ctxt.ast_map.get_copy(&doc_id) {
|
||||||
ast_map::node_item(@ast::item {
|
ast_map::node_item(@ast::item {
|
||||||
node: ast::item_enum(ref enum_definition, _), _
|
node: ast::item_enum(ref enum_definition, _), _
|
||||||
}, _) => {
|
}, _) => {
|
||||||
@ -177,7 +177,7 @@ fn merge_method_attrs(
|
|||||||
|
|
||||||
// Create an assoc list from method name to attributes
|
// Create an assoc list from method name to attributes
|
||||||
let attrs: ~[(~str, Option<~str>)] = do astsrv::exec(srv) |ctxt| {
|
let attrs: ~[(~str, Option<~str>)] = do astsrv::exec(srv) |ctxt| {
|
||||||
match *ctxt.ast_map.get(&item_id) {
|
match ctxt.ast_map.get_copy(&item_id) {
|
||||||
ast_map::node_item(@ast::item {
|
ast_map::node_item(@ast::item {
|
||||||
node: ast::item_trait(_, _, ref methods), _
|
node: ast::item_trait(_, _, ref methods), _
|
||||||
}, _) => {
|
}, _) => {
|
||||||
|
@ -53,7 +53,7 @@ fn is_hidden(srv: astsrv::Srv, doc: doc::ItemDoc) -> bool {
|
|||||||
|
|
||||||
let id = doc.id;
|
let id = doc.id;
|
||||||
do astsrv::exec(srv) |ctxt| {
|
do astsrv::exec(srv) |ctxt| {
|
||||||
let attrs = match *ctxt.ast_map.get(&id) {
|
let attrs = match ctxt.ast_map.get_copy(&id) {
|
||||||
ast_map::node_item(item, _) => copy item.attrs,
|
ast_map::node_item(item, _) => copy item.attrs,
|
||||||
_ => ~[]
|
_ => ~[]
|
||||||
};
|
};
|
||||||
|
@ -55,7 +55,7 @@ fn fold_impl(
|
|||||||
let doc = fold::default_seq_fold_impl(fold, doc);
|
let doc = fold::default_seq_fold_impl(fold, doc);
|
||||||
|
|
||||||
do astsrv::exec(fold.ctxt.clone()) |ctxt| {
|
do astsrv::exec(fold.ctxt.clone()) |ctxt| {
|
||||||
match *ctxt.ast_map.get(&doc.item.id) {
|
match ctxt.ast_map.get_copy(&doc.item.id) {
|
||||||
ast_map::node_item(item, _) => {
|
ast_map::node_item(item, _) => {
|
||||||
match item.node {
|
match item.node {
|
||||||
ast::item_impl(_, None, _, ref methods) => {
|
ast::item_impl(_, None, _, ref methods) => {
|
||||||
@ -134,7 +134,7 @@ fn is_visible(srv: astsrv::Srv, doc: doc::ItemDoc) -> bool {
|
|||||||
let id = doc.id;
|
let id = doc.id;
|
||||||
|
|
||||||
do astsrv::exec(srv) |ctxt| {
|
do astsrv::exec(srv) |ctxt| {
|
||||||
match *ctxt.ast_map.get(&id) {
|
match ctxt.ast_map.get_copy(&id) {
|
||||||
ast_map::node_item(item, _) => {
|
ast_map::node_item(item, _) => {
|
||||||
match &item.node {
|
match &item.node {
|
||||||
&ast::item_impl(*) => {
|
&ast::item_impl(*) => {
|
||||||
|
@ -63,7 +63,7 @@ fn fold_fn(
|
|||||||
|
|
||||||
fn get_fn_sig(srv: astsrv::Srv, fn_id: doc::AstId) -> Option<~str> {
|
fn get_fn_sig(srv: astsrv::Srv, fn_id: doc::AstId) -> Option<~str> {
|
||||||
do astsrv::exec(srv) |ctxt| {
|
do astsrv::exec(srv) |ctxt| {
|
||||||
match *ctxt.ast_map.get(&fn_id) {
|
match ctxt.ast_map.get_copy(&fn_id) {
|
||||||
ast_map::node_item(@ast::item {
|
ast_map::node_item(@ast::item {
|
||||||
ident: ident,
|
ident: ident,
|
||||||
node: ast::item_fn(ref decl, purity, _, ref tys, _), _
|
node: ast::item_fn(ref decl, purity, _, ref tys, _), _
|
||||||
@ -90,7 +90,7 @@ fn fold_const(
|
|||||||
sig: Some({
|
sig: Some({
|
||||||
let doc = copy doc;
|
let doc = copy doc;
|
||||||
do astsrv::exec(srv) |ctxt| {
|
do astsrv::exec(srv) |ctxt| {
|
||||||
match *ctxt.ast_map.get(&doc.id()) {
|
match ctxt.ast_map.get_copy(&doc.id()) {
|
||||||
ast_map::node_item(@ast::item {
|
ast_map::node_item(@ast::item {
|
||||||
node: ast::item_const(ty, _), _
|
node: ast::item_const(ty, _), _
|
||||||
}, _) => {
|
}, _) => {
|
||||||
@ -115,7 +115,7 @@ fn fold_enum(
|
|||||||
let sig = {
|
let sig = {
|
||||||
let variant = copy *variant;
|
let variant = copy *variant;
|
||||||
do astsrv::exec(srv.clone()) |ctxt| {
|
do astsrv::exec(srv.clone()) |ctxt| {
|
||||||
match *ctxt.ast_map.get(&doc_id) {
|
match ctxt.ast_map.get_copy(&doc_id) {
|
||||||
ast_map::node_item(@ast::item {
|
ast_map::node_item(@ast::item {
|
||||||
node: ast::item_enum(ref enum_definition, _), _
|
node: ast::item_enum(ref enum_definition, _), _
|
||||||
}, _) => {
|
}, _) => {
|
||||||
@ -125,7 +125,7 @@ fn fold_enum(
|
|||||||
}.get();
|
}.get();
|
||||||
|
|
||||||
pprust::variant_to_str(
|
pprust::variant_to_str(
|
||||||
ast_variant, extract::interner())
|
&ast_variant, extract::interner())
|
||||||
}
|
}
|
||||||
_ => fail!("enum variant not bound to an enum item")
|
_ => fail!("enum variant not bound to an enum item")
|
||||||
}
|
}
|
||||||
@ -170,7 +170,7 @@ fn get_method_sig(
|
|||||||
method_name: ~str
|
method_name: ~str
|
||||||
) -> Option<~str> {
|
) -> Option<~str> {
|
||||||
do astsrv::exec(srv) |ctxt| {
|
do astsrv::exec(srv) |ctxt| {
|
||||||
match *ctxt.ast_map.get(&item_id) {
|
match ctxt.ast_map.get_copy(&item_id) {
|
||||||
ast_map::node_item(@ast::item {
|
ast_map::node_item(@ast::item {
|
||||||
node: ast::item_trait(_, _, ref methods), _
|
node: ast::item_trait(_, _, ref methods), _
|
||||||
}, _) => {
|
}, _) => {
|
||||||
@ -241,7 +241,7 @@ fn fold_impl(
|
|||||||
let (bounds, trait_types, self_ty) = {
|
let (bounds, trait_types, self_ty) = {
|
||||||
let doc = copy doc;
|
let doc = copy doc;
|
||||||
do astsrv::exec(srv) |ctxt| {
|
do astsrv::exec(srv) |ctxt| {
|
||||||
match *ctxt.ast_map.get(&doc.id()) {
|
match ctxt.ast_map.get_copy(&doc.id()) {
|
||||||
ast_map::node_item(@ast::item {
|
ast_map::node_item(@ast::item {
|
||||||
node: ast::item_impl(ref generics, opt_trait_type, self_ty, _), _
|
node: ast::item_impl(ref generics, opt_trait_type, self_ty, _), _
|
||||||
}, _) => {
|
}, _) => {
|
||||||
@ -280,7 +280,7 @@ fn fold_type(
|
|||||||
sig: {
|
sig: {
|
||||||
let doc = copy doc;
|
let doc = copy doc;
|
||||||
do astsrv::exec(srv) |ctxt| {
|
do astsrv::exec(srv) |ctxt| {
|
||||||
match *ctxt.ast_map.get(&doc.id()) {
|
match ctxt.ast_map.get_copy(&doc.id()) {
|
||||||
ast_map::node_item(@ast::item {
|
ast_map::node_item(@ast::item {
|
||||||
ident: ident,
|
ident: ident,
|
||||||
node: ast::item_ty(ty, ref params), _
|
node: ast::item_ty(ty, ref params), _
|
||||||
@ -312,7 +312,7 @@ fn fold_struct(
|
|||||||
sig: {
|
sig: {
|
||||||
let doc = copy doc;
|
let doc = copy doc;
|
||||||
do astsrv::exec(srv) |ctxt| {
|
do astsrv::exec(srv) |ctxt| {
|
||||||
match *ctxt.ast_map.get(&doc.id()) {
|
match ctxt.ast_map.get_copy(&doc.id()) {
|
||||||
ast_map::node_item(item, _) => {
|
ast_map::node_item(item, _) => {
|
||||||
let item = strip_struct_extra_stuff(item);
|
let item = strip_struct_extra_stuff(item);
|
||||||
Some(pprust::item_to_str(item,
|
Some(pprust::item_to_str(item,
|
||||||
|
@ -18,14 +18,12 @@
|
|||||||
#[license = "MIT/ASL2"];
|
#[license = "MIT/ASL2"];
|
||||||
#[crate_type = "lib"];
|
#[crate_type = "lib"];
|
||||||
|
|
||||||
#[allow(vecs_implicitly_copyable,
|
|
||||||
non_implicitly_copyable_typarams)];
|
|
||||||
|
|
||||||
extern mod std(vers = "0.7-pre");
|
extern mod std(vers = "0.7-pre");
|
||||||
extern mod rustc(vers = "0.7-pre");
|
extern mod rustc(vers = "0.7-pre");
|
||||||
extern mod syntax(vers = "0.7-pre");
|
extern mod syntax(vers = "0.7-pre");
|
||||||
|
|
||||||
use core::*;
|
use core::*;
|
||||||
|
use core::cell::Cell;
|
||||||
use rustc::driver::{driver, session};
|
use rustc::driver::{driver, session};
|
||||||
use syntax::{ast, diagnostic};
|
use syntax::{ast, diagnostic};
|
||||||
use syntax::ast_util::*;
|
use syntax::ast_util::*;
|
||||||
@ -71,8 +69,8 @@ fn with_pp(intr: @token::ident_interner,
|
|||||||
* because it has to parse the statements and view_items on each
|
* because it has to parse the statements and view_items on each
|
||||||
* input.
|
* input.
|
||||||
*/
|
*/
|
||||||
fn record(repl: Repl, blk: @ast::blk, intr: @token::ident_interner) -> Repl {
|
fn record(mut repl: Repl, blk: &ast::blk, intr: @token::ident_interner) -> Repl {
|
||||||
let view_items = if blk.node.view_items.len() > 0 {
|
if blk.node.view_items.len() > 0 {
|
||||||
let new_view_items = do with_pp(intr) |pp, writer| {
|
let new_view_items = do with_pp(intr) |pp, writer| {
|
||||||
for blk.node.view_items.each |view_item| {
|
for blk.node.view_items.each |view_item| {
|
||||||
pprust::print_view_item(pp, *view_item);
|
pprust::print_view_item(pp, *view_item);
|
||||||
@ -82,9 +80,9 @@ fn record(repl: Repl, blk: @ast::blk, intr: @token::ident_interner) -> Repl {
|
|||||||
|
|
||||||
debug!("new view items %s", new_view_items);
|
debug!("new view items %s", new_view_items);
|
||||||
|
|
||||||
repl.view_items + "\n" + new_view_items
|
repl.view_items = repl.view_items + "\n" + new_view_items
|
||||||
} else { repl.view_items };
|
}
|
||||||
let stmts = if blk.node.stmts.len() > 0 {
|
if blk.node.stmts.len() > 0 {
|
||||||
let new_stmts = do with_pp(intr) |pp, writer| {
|
let new_stmts = do with_pp(intr) |pp, writer| {
|
||||||
for blk.node.stmts.each |stmt| {
|
for blk.node.stmts.each |stmt| {
|
||||||
match stmt.node {
|
match stmt.node {
|
||||||
@ -105,24 +103,21 @@ fn record(repl: Repl, blk: @ast::blk, intr: @token::ident_interner) -> Repl {
|
|||||||
|
|
||||||
debug!("new stmts %s", new_stmts);
|
debug!("new stmts %s", new_stmts);
|
||||||
|
|
||||||
repl.stmts + "\n" + new_stmts
|
repl.stmts = repl.stmts + "\n" + new_stmts
|
||||||
} else { repl.stmts };
|
|
||||||
|
|
||||||
Repl{
|
|
||||||
view_items: view_items,
|
|
||||||
stmts: stmts,
|
|
||||||
.. repl
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return repl;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Run an input string in a Repl, returning the new Repl.
|
/// Run an input string in a Repl, returning the new Repl.
|
||||||
fn run(repl: Repl, input: ~str) -> Repl {
|
fn run(repl: Repl, input: ~str) -> Repl {
|
||||||
|
let binary = @copy repl.binary;
|
||||||
let options = @session::options {
|
let options = @session::options {
|
||||||
crate_type: session::unknown_crate,
|
crate_type: session::unknown_crate,
|
||||||
binary: @repl.binary,
|
binary: binary,
|
||||||
addl_lib_search_paths: repl.lib_search_paths.map(|p| Path(*p)),
|
addl_lib_search_paths: repl.lib_search_paths.map(|p| Path(*p)),
|
||||||
jit: true,
|
jit: true,
|
||||||
.. *session::basic_options()
|
.. copy *session::basic_options()
|
||||||
};
|
};
|
||||||
|
|
||||||
debug!("building driver input");
|
debug!("building driver input");
|
||||||
@ -138,7 +133,7 @@ fn run(repl: Repl, input: ~str) -> Repl {
|
|||||||
|
|
||||||
debug!("building driver configuration");
|
debug!("building driver configuration");
|
||||||
let cfg = driver::build_configuration(sess,
|
let cfg = driver::build_configuration(sess,
|
||||||
@repl.binary,
|
binary,
|
||||||
&wrapped);
|
&wrapped);
|
||||||
|
|
||||||
let outputs = driver::build_output_filenames(&wrapped, &None, &None, sess);
|
let outputs = driver::build_output_filenames(&wrapped, &None, &None, sess);
|
||||||
@ -151,7 +146,7 @@ fn run(repl: Repl, input: ~str) -> Repl {
|
|||||||
|
|
||||||
for crate.node.module.items.each |item| {
|
for crate.node.module.items.each |item| {
|
||||||
match item.node {
|
match item.node {
|
||||||
ast::item_fn(_, _, _, _, blk) => {
|
ast::item_fn(_, _, _, _, ref blk) => {
|
||||||
if item.ident == sess.ident_of("main") {
|
if item.ident == sess.ident_of("main") {
|
||||||
opt = blk.node.expr;
|
opt = blk.node.expr;
|
||||||
}
|
}
|
||||||
@ -160,10 +155,11 @@ fn run(repl: Repl, input: ~str) -> Repl {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let blk = match opt.get().node {
|
let e = opt.unwrap();
|
||||||
ast::expr_call(_, exprs, _) => {
|
let blk = match e.node {
|
||||||
|
ast::expr_call(_, ref exprs, _) => {
|
||||||
match exprs[0].node {
|
match exprs[0].node {
|
||||||
ast::expr_block(blk) => @blk,
|
ast::expr_block(ref blk) => blk,
|
||||||
_ => fail!()
|
_ => fail!()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -182,15 +178,16 @@ fn run(repl: Repl, input: ~str) -> Repl {
|
|||||||
fn compile_crate(src_filename: ~str, binary: ~str) -> Option<bool> {
|
fn compile_crate(src_filename: ~str, binary: ~str) -> Option<bool> {
|
||||||
match do task::try {
|
match do task::try {
|
||||||
let src_path = Path(src_filename);
|
let src_path = Path(src_filename);
|
||||||
|
let binary = @copy binary;
|
||||||
let options = @session::options {
|
let options = @session::options {
|
||||||
binary: @binary,
|
binary: binary,
|
||||||
addl_lib_search_paths: ~[os::getcwd()],
|
addl_lib_search_paths: ~[os::getcwd()],
|
||||||
.. *session::basic_options()
|
.. copy *session::basic_options()
|
||||||
};
|
};
|
||||||
let input = driver::file_input(src_path);
|
let input = driver::file_input(copy src_path);
|
||||||
let sess = driver::build_session(options, diagnostic::emit);
|
let sess = driver::build_session(options, diagnostic::emit);
|
||||||
*sess.building_library = true;
|
*sess.building_library = true;
|
||||||
let cfg = driver::build_configuration(sess, @binary, &input);
|
let cfg = driver::build_configuration(sess, binary, &input);
|
||||||
let outputs = driver::build_output_filenames(
|
let outputs = driver::build_output_filenames(
|
||||||
&input, &None, &None, sess);
|
&input, &None, &None, sess);
|
||||||
// If the library already exists and is newer than the source
|
// If the library already exists and is newer than the source
|
||||||
@ -233,7 +230,7 @@ fn compile_crate(src_filename: ~str, binary: ~str) -> Option<bool> {
|
|||||||
|
|
||||||
/// Tries to get a line from rl after outputting a prompt. Returns
|
/// Tries to get a line from rl after outputting a prompt. Returns
|
||||||
/// None if no input was read (e.g. EOF was reached).
|
/// None if no input was read (e.g. EOF was reached).
|
||||||
fn get_line(use_rl: bool, prompt: ~str) -> Option<~str> {
|
fn get_line(use_rl: bool, prompt: &str) -> Option<~str> {
|
||||||
if use_rl {
|
if use_rl {
|
||||||
let result = unsafe { rl::read(prompt) };
|
let result = unsafe { rl::read(prompt) };
|
||||||
|
|
||||||
@ -280,11 +277,11 @@ fn run_cmd(repl: &mut Repl, _in: @io::Reader, _out: @io::Writer,
|
|||||||
for args.each |arg| {
|
for args.each |arg| {
|
||||||
let (crate, filename) =
|
let (crate, filename) =
|
||||||
if arg.ends_with(".rs") || arg.ends_with(".rc") {
|
if arg.ends_with(".rs") || arg.ends_with(".rc") {
|
||||||
(arg.substr(0, arg.len() - 3).to_owned(), *arg)
|
(arg.substr(0, arg.len() - 3).to_owned(), copy *arg)
|
||||||
} else {
|
} else {
|
||||||
(*arg, arg + ~".rs")
|
(copy *arg, arg + ".rs")
|
||||||
};
|
};
|
||||||
match compile_crate(filename, repl.binary) {
|
match compile_crate(filename, copy repl.binary) {
|
||||||
Some(_) => loaded_crates.push(crate),
|
Some(_) => loaded_crates.push(crate),
|
||||||
None => { }
|
None => { }
|
||||||
}
|
}
|
||||||
@ -311,7 +308,7 @@ fn run_cmd(repl: &mut Repl, _in: @io::Reader, _out: @io::Writer,
|
|||||||
let mut multiline_cmd = ~"";
|
let mut multiline_cmd = ~"";
|
||||||
let mut end_multiline = false;
|
let mut end_multiline = false;
|
||||||
while (!end_multiline) {
|
while (!end_multiline) {
|
||||||
match get_line(use_rl, ~"rusti| ") {
|
match get_line(use_rl, "rusti| ") {
|
||||||
None => fail!("unterminated multiline command :{ .. :}"),
|
None => fail!("unterminated multiline command :{ .. :}"),
|
||||||
Some(line) => {
|
Some(line) => {
|
||||||
if str::trim(line) == ~":}" {
|
if str::trim(line) == ~":}" {
|
||||||
@ -334,14 +331,14 @@ fn run_cmd(repl: &mut Repl, _in: @io::Reader, _out: @io::Writer,
|
|||||||
fn run_line(repl: &mut Repl, in: @io::Reader, out: @io::Writer, line: ~str,
|
fn run_line(repl: &mut Repl, in: @io::Reader, out: @io::Writer, line: ~str,
|
||||||
use_rl: bool)
|
use_rl: bool)
|
||||||
-> Option<Repl> {
|
-> Option<Repl> {
|
||||||
if line.starts_with(~":") {
|
if line.starts_with(":") {
|
||||||
let full = line.substr(1, line.len() - 1);
|
let full = line.substr(1, line.len() - 1);
|
||||||
let mut split = ~[];
|
let mut split = ~[];
|
||||||
for str::each_word(full) |word| { split.push(word.to_owned()) }
|
for str::each_word(full) |word| { split.push(word.to_owned()) }
|
||||||
let len = split.len();
|
let len = split.len();
|
||||||
|
|
||||||
if len > 0 {
|
if len > 0 {
|
||||||
let cmd = split[0];
|
let cmd = copy split[0];
|
||||||
|
|
||||||
if !cmd.is_empty() {
|
if !cmd.is_empty() {
|
||||||
let args = if len > 1 {
|
let args = if len > 1 {
|
||||||
@ -361,9 +358,10 @@ fn run_line(repl: &mut Repl, in: @io::Reader, out: @io::Writer, line: ~str,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let r = *repl;
|
let line = Cell(line);
|
||||||
|
let r = Cell(copy *repl);
|
||||||
let result = do task::try {
|
let result = do task::try {
|
||||||
run(r, line)
|
run(r.take(), line.take())
|
||||||
};
|
};
|
||||||
|
|
||||||
if result.is_ok() {
|
if result.is_ok() {
|
||||||
@ -378,7 +376,7 @@ pub fn main() {
|
|||||||
let out = io::stdout();
|
let out = io::stdout();
|
||||||
let mut repl = Repl {
|
let mut repl = Repl {
|
||||||
prompt: ~"rusti> ",
|
prompt: ~"rusti> ",
|
||||||
binary: args[0],
|
binary: copy args[0],
|
||||||
running: true,
|
running: true,
|
||||||
view_items: ~"",
|
view_items: ~"",
|
||||||
lib_search_paths: ~[],
|
lib_search_paths: ~[],
|
||||||
|
@ -13,13 +13,15 @@
|
|||||||
#[allow(implicit_copies)];
|
#[allow(implicit_copies)];
|
||||||
#[allow(managed_heap_memory)];
|
#[allow(managed_heap_memory)];
|
||||||
#[allow(non_camel_case_types)];
|
#[allow(non_camel_case_types)];
|
||||||
#[allow(non_implicitly_copyable_typarams)];
|
|
||||||
#[allow(owned_heap_memory)];
|
#[allow(owned_heap_memory)];
|
||||||
#[allow(path_statement)];
|
#[allow(path_statement)];
|
||||||
#[allow(unrecognized_lint)];
|
#[allow(unrecognized_lint)];
|
||||||
#[allow(unused_imports)];
|
#[allow(unused_imports)];
|
||||||
#[allow(vecs_implicitly_copyable)];
|
|
||||||
#[allow(while_true)];
|
#[allow(while_true)];
|
||||||
|
#[allow(dead_assignment)];
|
||||||
|
#[allow(unused_variable)];
|
||||||
|
#[allow(unused_unsafe)];
|
||||||
|
#[allow(unused_mut)];
|
||||||
|
|
||||||
extern mod std;
|
extern mod std;
|
||||||
|
|
||||||
|
@ -55,21 +55,21 @@ pub fn normalize(p: ~Path) -> ~Path {
|
|||||||
|
|
||||||
/// True if there's a directory in <workspace> with
|
/// True if there's a directory in <workspace> with
|
||||||
/// pkgid's short name
|
/// pkgid's short name
|
||||||
pub fn workspace_contains_package_id(pkgid: PkgId, workspace: &Path) -> bool {
|
pub fn workspace_contains_package_id(pkgid: &PkgId, workspace: &Path) -> bool {
|
||||||
let pkgpath = workspace.push("src").push(pkgid.path.to_str());
|
let pkgpath = workspace.push("src").push(pkgid.path.to_str());
|
||||||
os::path_is_dir(&pkgpath)
|
os::path_is_dir(&pkgpath)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the directory for <pkgid>'s source files in <workspace>.
|
/// Return the directory for <pkgid>'s source files in <workspace>.
|
||||||
/// Doesn't check that it exists.
|
/// Doesn't check that it exists.
|
||||||
pub fn pkgid_src_in_workspace(pkgid: PkgId, workspace: &Path) -> Path {
|
pub fn pkgid_src_in_workspace(pkgid: &PkgId, workspace: &Path) -> Path {
|
||||||
let result = workspace.push("src");
|
let result = workspace.push("src");
|
||||||
result.push(pkgid.path.to_str())
|
result.push(pkgid.path.to_str())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Figure out what the executable name for <pkgid> in <workspace>'s build
|
/// Figure out what the executable name for <pkgid> in <workspace>'s build
|
||||||
/// directory is, and if the file exists, return it.
|
/// directory is, and if the file exists, return it.
|
||||||
pub fn built_executable_in_workspace(pkgid: PkgId, workspace: &Path) -> Option<Path> {
|
pub fn built_executable_in_workspace(pkgid: &PkgId, workspace: &Path) -> Option<Path> {
|
||||||
let mut result = workspace.push("build");
|
let mut result = workspace.push("build");
|
||||||
result = result.push_rel(&pkgid.path);
|
result = result.push_rel(&pkgid.path);
|
||||||
// should use a target-specific subdirectory
|
// should use a target-specific subdirectory
|
||||||
@ -87,7 +87,7 @@ pub fn built_executable_in_workspace(pkgid: PkgId, workspace: &Path) -> Option<P
|
|||||||
|
|
||||||
/// Figure out what the library name for <pkgid> in <workspace>'s build
|
/// Figure out what the library name for <pkgid> in <workspace>'s build
|
||||||
/// directory is, and if the file exists, return it.
|
/// directory is, and if the file exists, return it.
|
||||||
pub fn built_library_in_workspace(pkgid: PkgId, workspace: &Path) -> Option<Path> {
|
pub fn built_library_in_workspace(pkgid: &PkgId, workspace: &Path) -> Option<Path> {
|
||||||
let mut result = workspace.push("build");
|
let mut result = workspace.push("build");
|
||||||
result = result.push_rel(&pkgid.path);
|
result = result.push_rel(&pkgid.path);
|
||||||
// should use a target-specific subdirectory
|
// should use a target-specific subdirectory
|
||||||
@ -159,7 +159,7 @@ pub fn built_library_in_workspace(pkgid: PkgId, workspace: &Path) -> Option<Path
|
|||||||
/// Returns the executable that would be installed for <pkgid>
|
/// Returns the executable that would be installed for <pkgid>
|
||||||
/// in <workspace>
|
/// in <workspace>
|
||||||
/// As a side effect, creates the bin-dir if it doesn't exist
|
/// As a side effect, creates the bin-dir if it doesn't exist
|
||||||
pub fn target_executable_in_workspace(pkgid: PkgId, workspace: &Path) -> Path {
|
pub fn target_executable_in_workspace(pkgid: &PkgId, workspace: &Path) -> Path {
|
||||||
target_file_in_workspace(pkgid, workspace, Main)
|
target_file_in_workspace(pkgid, workspace, Main)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -167,23 +167,23 @@ pub fn target_executable_in_workspace(pkgid: PkgId, workspace: &Path) -> Path {
|
|||||||
/// Returns the executable that would be installed for <pkgid>
|
/// Returns the executable that would be installed for <pkgid>
|
||||||
/// in <workspace>
|
/// in <workspace>
|
||||||
/// As a side effect, creates the bin-dir if it doesn't exist
|
/// As a side effect, creates the bin-dir if it doesn't exist
|
||||||
pub fn target_library_in_workspace(pkgid: PkgId, workspace: &Path) -> Path {
|
pub fn target_library_in_workspace(pkgid: &PkgId, workspace: &Path) -> Path {
|
||||||
target_file_in_workspace(pkgid, workspace, Lib)
|
target_file_in_workspace(pkgid, workspace, Lib)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the test executable that would be installed for <pkgid>
|
/// Returns the test executable that would be installed for <pkgid>
|
||||||
/// in <workspace>
|
/// in <workspace>
|
||||||
pub fn target_test_in_workspace(pkgid: PkgId, workspace: &Path) -> Path {
|
pub fn target_test_in_workspace(pkgid: &PkgId, workspace: &Path) -> Path {
|
||||||
target_file_in_workspace(pkgid, workspace, Test)
|
target_file_in_workspace(pkgid, workspace, Test)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the bench executable that would be installed for <pkgid>
|
/// Returns the bench executable that would be installed for <pkgid>
|
||||||
/// in <workspace>
|
/// in <workspace>
|
||||||
pub fn target_bench_in_workspace(pkgid: PkgId, workspace: &Path) -> Path {
|
pub fn target_bench_in_workspace(pkgid: &PkgId, workspace: &Path) -> Path {
|
||||||
target_file_in_workspace(pkgid, workspace, Bench)
|
target_file_in_workspace(pkgid, workspace, Bench)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn target_file_in_workspace(pkgid: PkgId, workspace: &Path,
|
fn target_file_in_workspace(pkgid: &PkgId, workspace: &Path,
|
||||||
what: OutputType) -> Path {
|
what: OutputType) -> Path {
|
||||||
use conditions::bad_path::cond;
|
use conditions::bad_path::cond;
|
||||||
|
|
||||||
@ -193,7 +193,8 @@ fn target_file_in_workspace(pkgid: PkgId, workspace: &Path,
|
|||||||
let result = workspace.push(subdir);
|
let result = workspace.push(subdir);
|
||||||
if create_dir {
|
if create_dir {
|
||||||
if !os::path_exists(&result) && !mkdir_recursive(&result, u_rwx) {
|
if !os::path_exists(&result) && !mkdir_recursive(&result, u_rwx) {
|
||||||
cond.raise((result, fmt!("I couldn't create the %s dir", subdir)));
|
cond.raise((copy result,
|
||||||
|
fmt!("I couldn't create the %s dir", subdir)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
mk_output_path(what, pkgid.path.to_str(), result)
|
mk_output_path(what, pkgid.path.to_str(), result)
|
||||||
@ -202,13 +203,13 @@ fn target_file_in_workspace(pkgid: PkgId, workspace: &Path,
|
|||||||
|
|
||||||
/// Return the directory for <pkgid>'s build artifacts in <workspace>.
|
/// Return the directory for <pkgid>'s build artifacts in <workspace>.
|
||||||
/// Creates it if it doesn't exist.
|
/// Creates it if it doesn't exist.
|
||||||
pub fn build_pkg_id_in_workspace(pkgid: PkgId, workspace: &Path) -> Path {
|
pub fn build_pkg_id_in_workspace(pkgid: &PkgId, workspace: &Path) -> Path {
|
||||||
use conditions::bad_path::cond;
|
use conditions::bad_path::cond;
|
||||||
|
|
||||||
let mut result = workspace.push("build");
|
let mut result = workspace.push("build");
|
||||||
// n.b. Should actually use a target-specific
|
// n.b. Should actually use a target-specific
|
||||||
// subdirectory of build/
|
// subdirectory of build/
|
||||||
result = result.push(normalize(~pkgid.path).to_str());
|
result = result.push(normalize(~copy pkgid.path).to_str());
|
||||||
if os::path_exists(&result) || os::mkdir_recursive(&result, u_rwx) {
|
if os::path_exists(&result) || os::mkdir_recursive(&result, u_rwx) {
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
@ -17,8 +17,6 @@
|
|||||||
|
|
||||||
#[license = "MIT/ASL2"];
|
#[license = "MIT/ASL2"];
|
||||||
#[crate_type = "lib"];
|
#[crate_type = "lib"];
|
||||||
#[allow(vecs_implicitly_copyable,
|
|
||||||
non_implicitly_copyable_typarams)];
|
|
||||||
|
|
||||||
extern mod std(vers = "0.7-pre");
|
extern mod std(vers = "0.7-pre");
|
||||||
extern mod rustc(vers = "0.7-pre");
|
extern mod rustc(vers = "0.7-pre");
|
||||||
@ -43,6 +41,7 @@ use context::Ctx;
|
|||||||
mod conditions;
|
mod conditions;
|
||||||
mod context;
|
mod context;
|
||||||
mod path_util;
|
mod path_util;
|
||||||
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
mod util;
|
mod util;
|
||||||
mod workspace;
|
mod workspace;
|
||||||
@ -52,9 +51,9 @@ pub mod usage;
|
|||||||
/// A PkgScript represents user-supplied custom logic for
|
/// A PkgScript represents user-supplied custom logic for
|
||||||
/// special build hooks. This only exists for packages with
|
/// special build hooks. This only exists for packages with
|
||||||
/// an explicit package script.
|
/// an explicit package script.
|
||||||
struct PkgScript {
|
struct PkgScript<'self> {
|
||||||
/// Uniquely identifies this package
|
/// Uniquely identifies this package
|
||||||
id: PkgId,
|
id: &'self PkgId,
|
||||||
// Used to have this field: deps: ~[(~str, Option<~str>)]
|
// Used to have this field: deps: ~[(~str, Option<~str>)]
|
||||||
// but I think it shouldn't be stored here
|
// but I think it shouldn't be stored here
|
||||||
/// The contents of the package script: either a file path,
|
/// The contents of the package script: either a file path,
|
||||||
@ -71,24 +70,24 @@ struct PkgScript {
|
|||||||
build_dir: Path
|
build_dir: Path
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PkgScript {
|
impl<'self> PkgScript<'self> {
|
||||||
/// Given the path name for a package script
|
/// Given the path name for a package script
|
||||||
/// and a package ID, parse the package script into
|
/// and a package ID, parse the package script into
|
||||||
/// a PkgScript that we can then execute
|
/// a PkgScript that we can then execute
|
||||||
fn parse(script: Path, workspace: &Path, id: PkgId) -> PkgScript {
|
fn parse<'a>(script: Path, workspace: &Path, id: &'a PkgId) -> PkgScript<'a> {
|
||||||
// Get the executable name that was invoked
|
// Get the executable name that was invoked
|
||||||
let binary = os::args()[0];
|
let binary = @copy os::args()[0];
|
||||||
// Build the rustc session data structures to pass
|
// Build the rustc session data structures to pass
|
||||||
// to the compiler
|
// to the compiler
|
||||||
let options = @session::options {
|
let options = @session::options {
|
||||||
binary: @binary,
|
binary: binary,
|
||||||
crate_type: session::bin_crate,
|
crate_type: session::bin_crate,
|
||||||
.. *session::basic_options()
|
.. copy *session::basic_options()
|
||||||
};
|
};
|
||||||
let input = driver::file_input(script);
|
let input = driver::file_input(script);
|
||||||
let sess = driver::build_session(options, diagnostic::emit);
|
let sess = driver::build_session(options, diagnostic::emit);
|
||||||
let cfg = driver::build_configuration(sess, @binary, &input);
|
let cfg = driver::build_configuration(sess, binary, &input);
|
||||||
let (crate, _) = driver::compile_upto(sess, cfg, &input,
|
let (crate, _) = driver::compile_upto(sess, copy cfg, &input,
|
||||||
driver::cu_parse, None);
|
driver::cu_parse, None);
|
||||||
let work_dir = build_pkg_id_in_workspace(id, workspace);
|
let work_dir = build_pkg_id_in_workspace(id, workspace);
|
||||||
|
|
||||||
@ -123,10 +122,10 @@ impl PkgScript {
|
|||||||
let root = r.pop().pop().pop().pop(); // :-\
|
let root = r.pop().pop().pop().pop(); // :-\
|
||||||
debug!("Root is %s, calling compile_rest", root.to_str());
|
debug!("Root is %s, calling compile_rest", root.to_str());
|
||||||
let exe = self.build_dir.push(~"pkg" + util::exe_suffix());
|
let exe = self.build_dir.push(~"pkg" + util::exe_suffix());
|
||||||
util::compile_crate_from_input(self.input, self.id,
|
util::compile_crate_from_input(&self.input, self.id,
|
||||||
Some(self.build_dir),
|
Some(copy self.build_dir),
|
||||||
sess, Some(crate),
|
sess, Some(crate),
|
||||||
exe, os::args()[0],
|
&exe, @copy os::args()[0],
|
||||||
driver::cu_everything);
|
driver::cu_everything);
|
||||||
debug!("Running program: %s %s %s", exe.to_str(), root.to_str(), what);
|
debug!("Running program: %s %s %s", exe.to_str(), root.to_str(), what);
|
||||||
let status = run::run_program(exe.to_str(), ~[root.to_str(), what]);
|
let status = run::run_program(exe.to_str(), ~[root.to_str(), what]);
|
||||||
@ -188,9 +187,9 @@ impl Ctx {
|
|||||||
}
|
}
|
||||||
// The package id is presumed to be the first command-line
|
// The package id is presumed to be the first command-line
|
||||||
// argument
|
// argument
|
||||||
let pkgid = PkgId::new(args[0]);
|
let pkgid = PkgId::new(copy args[0]);
|
||||||
for pkg_parent_workspaces(pkgid) |workspace| {
|
for pkg_parent_workspaces(&pkgid) |workspace| {
|
||||||
self.build(workspace, pkgid);
|
self.build(workspace, &pkgid);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
~"clean" => {
|
~"clean" => {
|
||||||
@ -199,16 +198,16 @@ impl Ctx {
|
|||||||
}
|
}
|
||||||
// The package id is presumed to be the first command-line
|
// The package id is presumed to be the first command-line
|
||||||
// argument
|
// argument
|
||||||
let pkgid = PkgId::new(args[0]);
|
let pkgid = PkgId::new(copy args[0]);
|
||||||
let cwd = os::getcwd();
|
let cwd = os::getcwd();
|
||||||
self.clean(&cwd, pkgid); // tjc: should use workspace, not cwd
|
self.clean(&cwd, &pkgid); // tjc: should use workspace, not cwd
|
||||||
}
|
}
|
||||||
~"do" => {
|
~"do" => {
|
||||||
if args.len() < 2 {
|
if args.len() < 2 {
|
||||||
return usage::do_cmd();
|
return usage::do_cmd();
|
||||||
}
|
}
|
||||||
|
|
||||||
self.do_cmd(args[0], args[1]);
|
self.do_cmd(copy args[0], copy args[1]);
|
||||||
}
|
}
|
||||||
~"info" => {
|
~"info" => {
|
||||||
self.info();
|
self.info();
|
||||||
@ -221,8 +220,8 @@ impl Ctx {
|
|||||||
// The package id is presumed to be the first command-line
|
// The package id is presumed to be the first command-line
|
||||||
// argument
|
// argument
|
||||||
let pkgid = PkgId::new(args[0]);
|
let pkgid = PkgId::new(args[0]);
|
||||||
for pkg_parent_workspaces(pkgid) |workspace| {
|
for pkg_parent_workspaces(&pkgid) |workspace| {
|
||||||
self.install(workspace, pkgid);
|
self.install(workspace, &pkgid);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
~"prefer" => {
|
~"prefer" => {
|
||||||
@ -230,7 +229,7 @@ impl Ctx {
|
|||||||
return usage::uninstall();
|
return usage::uninstall();
|
||||||
}
|
}
|
||||||
|
|
||||||
let (name, vers) = sep_name_vers(args[0]);
|
let (name, vers) = sep_name_vers(copy args[0]);
|
||||||
|
|
||||||
self.prefer(name.get(), vers);
|
self.prefer(name.get(), vers);
|
||||||
}
|
}
|
||||||
@ -242,7 +241,7 @@ impl Ctx {
|
|||||||
return usage::uninstall();
|
return usage::uninstall();
|
||||||
}
|
}
|
||||||
|
|
||||||
let (name, vers) = sep_name_vers(args[0]);
|
let (name, vers) = sep_name_vers(copy args[0]);
|
||||||
|
|
||||||
self.uninstall(name.get(), vers);
|
self.uninstall(name.get(), vers);
|
||||||
}
|
}
|
||||||
@ -251,7 +250,7 @@ impl Ctx {
|
|||||||
return usage::uninstall();
|
return usage::uninstall();
|
||||||
}
|
}
|
||||||
|
|
||||||
let (name, vers) = sep_name_vers(args[0]);
|
let (name, vers) = sep_name_vers(copy args[0]);
|
||||||
|
|
||||||
self.unprefer(name.get(), vers);
|
self.unprefer(name.get(), vers);
|
||||||
}
|
}
|
||||||
@ -259,18 +258,18 @@ impl Ctx {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn do_cmd(&self, _cmd: ~str, _pkgname: ~str) {
|
fn do_cmd(&self, _cmd: &str, _pkgname: &str) {
|
||||||
// stub
|
// stub
|
||||||
fail!("`do` not yet implemented");
|
fail!("`do` not yet implemented");
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build(&self, workspace: &Path, pkgid: PkgId) {
|
fn build(&self, workspace: &Path, pkgid: &PkgId) {
|
||||||
let src_dir = pkgid_src_in_workspace(pkgid, workspace);
|
let src_dir = pkgid_src_in_workspace(pkgid, workspace);
|
||||||
let build_dir = build_pkg_id_in_workspace(pkgid, workspace);
|
let build_dir = build_pkg_id_in_workspace(pkgid, workspace);
|
||||||
debug!("Destination dir = %s", build_dir.to_str());
|
debug!("Destination dir = %s", build_dir.to_str());
|
||||||
|
|
||||||
// Create the package source
|
// Create the package source
|
||||||
let mut src = PkgSrc::new(&workspace.push("src"), &build_dir, &pkgid);
|
let mut src = PkgSrc::new(&workspace.push("src"), &build_dir, pkgid);
|
||||||
debug!("Package src = %?", src);
|
debug!("Package src = %?", src);
|
||||||
|
|
||||||
// Is there custom build logic? If so, use it
|
// Is there custom build logic? If so, use it
|
||||||
@ -311,7 +310,7 @@ impl Ctx {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn clean(&self, workspace: &Path, id: PkgId) {
|
fn clean(&self, workspace: &Path, id: &PkgId) {
|
||||||
// Could also support a custom build hook in the pkg
|
// Could also support a custom build hook in the pkg
|
||||||
// script for cleaning files rustpkg doesn't know about.
|
// script for cleaning files rustpkg doesn't know about.
|
||||||
// Do something reasonable for now
|
// Do something reasonable for now
|
||||||
@ -332,7 +331,7 @@ impl Ctx {
|
|||||||
fail!("info not yet implemented");
|
fail!("info not yet implemented");
|
||||||
}
|
}
|
||||||
|
|
||||||
fn install(&self, workspace: &Path, id: PkgId) {
|
fn install(&self, workspace: &Path, id: &PkgId) {
|
||||||
use conditions::copy_failed::cond;
|
use conditions::copy_failed::cond;
|
||||||
|
|
||||||
// Should use RUST_PATH in the future.
|
// Should use RUST_PATH in the future.
|
||||||
@ -348,13 +347,13 @@ impl Ctx {
|
|||||||
for maybe_executable.each |exec| {
|
for maybe_executable.each |exec| {
|
||||||
debug!("Copying: %s -> %s", exec.to_str(), target_exec.to_str());
|
debug!("Copying: %s -> %s", exec.to_str(), target_exec.to_str());
|
||||||
if !os::copy_file(exec, &target_exec) {
|
if !os::copy_file(exec, &target_exec) {
|
||||||
cond.raise((*exec, target_exec));
|
cond.raise((copy *exec, copy target_exec));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for maybe_library.each |lib| {
|
for maybe_library.each |lib| {
|
||||||
debug!("Copying: %s -> %s", lib.to_str(), target_lib.to_str());
|
debug!("Copying: %s -> %s", lib.to_str(), target_lib.to_str());
|
||||||
if !os::copy_file(lib, &target_lib) {
|
if !os::copy_file(lib, &target_lib) {
|
||||||
cond.raise((*lib, target_lib));
|
cond.raise((copy *lib, copy target_lib));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -387,7 +386,7 @@ impl Ctx {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fetch_git(&self, dir: &Path, url: ~str, target: Option<~str>) {
|
fn fetch_git(&self, dir: &Path, url: ~str, mut target: Option<~str>) {
|
||||||
util::note(fmt!("fetching from %s using git", url));
|
util::note(fmt!("fetching from %s using git", url));
|
||||||
|
|
||||||
// Git can't clone into a non-empty directory
|
// Git can't clone into a non-empty directory
|
||||||
@ -405,7 +404,7 @@ impl Ctx {
|
|||||||
do util::temp_change_dir(dir) {
|
do util::temp_change_dir(dir) {
|
||||||
success = run::program_output(~"git",
|
success = run::program_output(~"git",
|
||||||
~[~"checkout",
|
~[~"checkout",
|
||||||
target.get()]).status != 0
|
target.swap_unwrap()]).status != 0
|
||||||
}
|
}
|
||||||
|
|
||||||
if !success {
|
if !success {
|
||||||
@ -525,7 +524,7 @@ pub struct Listener {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn run(listeners: ~[Listener]) {
|
pub fn run(listeners: ~[Listener]) {
|
||||||
let rcmd = os::args()[2];
|
let rcmd = copy os::args()[2];
|
||||||
let mut found = false;
|
let mut found = false;
|
||||||
|
|
||||||
for listeners.each |listener| {
|
for listeners.each |listener| {
|
||||||
@ -652,12 +651,12 @@ impl PkgSrc {
|
|||||||
// tjc: Rather than erroring out, need to try downloading the
|
// tjc: Rather than erroring out, need to try downloading the
|
||||||
// contents of the path to a local directory (#5679)
|
// contents of the path to a local directory (#5679)
|
||||||
if !os::path_exists(&dir) {
|
if !os::path_exists(&dir) {
|
||||||
cond.raise((self.id, ~"missing package dir"));
|
cond.raise((copy self.id, ~"missing package dir"));
|
||||||
}
|
}
|
||||||
|
|
||||||
if !os::path_is_dir(&dir) {
|
if !os::path_is_dir(&dir) {
|
||||||
cond.raise((self.id, ~"supplied path for package dir is a \
|
cond.raise((copy self.id, ~"supplied path for package dir is a \
|
||||||
non-directory"));
|
non-directory"));
|
||||||
}
|
}
|
||||||
|
|
||||||
dir
|
dir
|
||||||
@ -681,7 +680,7 @@ impl PkgSrc {
|
|||||||
/// Requires that dashes in p have already been normalized to
|
/// Requires that dashes in p have already been normalized to
|
||||||
/// underscores
|
/// underscores
|
||||||
fn stem_matches(&self, p: &Path) -> bool {
|
fn stem_matches(&self, p: &Path) -> bool {
|
||||||
let self_id = normalize(~self.id.path).filestem();
|
let self_id = normalize(~copy self.id.path).filestem();
|
||||||
if self_id == p.filestem() {
|
if self_id == p.filestem() {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -737,7 +736,7 @@ impl PkgSrc {
|
|||||||
util::note(~"Couldn't infer any crates to build.\n\
|
util::note(~"Couldn't infer any crates to build.\n\
|
||||||
Try naming a crate `main.rs`, `lib.rs`, \
|
Try naming a crate `main.rs`, `lib.rs`, \
|
||||||
`test.rs`, or `bench.rs`.");
|
`test.rs`, or `bench.rs`.");
|
||||||
cond.raise(self.id);
|
cond.raise(copy self.id);
|
||||||
}
|
}
|
||||||
|
|
||||||
debug!("found %u libs, %u mains, %u tests, %u benchs",
|
debug!("found %u libs, %u mains, %u tests, %u benchs",
|
||||||
@ -752,7 +751,7 @@ impl PkgSrc {
|
|||||||
dst_dir: &Path,
|
dst_dir: &Path,
|
||||||
src_dir: &Path,
|
src_dir: &Path,
|
||||||
crates: &[Crate],
|
crates: &[Crate],
|
||||||
cfgs: ~[~str],
|
cfgs: &[~str],
|
||||||
test: bool, crate_type: crate_type) {
|
test: bool, crate_type: crate_type) {
|
||||||
|
|
||||||
for crates.each |&crate| {
|
for crates.each |&crate| {
|
||||||
@ -760,7 +759,7 @@ impl PkgSrc {
|
|||||||
util::note(fmt!("build_crates: compiling %s", path.to_str()));
|
util::note(fmt!("build_crates: compiling %s", path.to_str()));
|
||||||
util::note(fmt!("build_crates: destination dir is %s", dst_dir.to_str()));
|
util::note(fmt!("build_crates: destination dir is %s", dst_dir.to_str()));
|
||||||
|
|
||||||
let result = util::compile_crate(maybe_sysroot, self.id, path,
|
let result = util::compile_crate(maybe_sysroot, &self.id, path,
|
||||||
dst_dir,
|
dst_dir,
|
||||||
crate.flags,
|
crate.flags,
|
||||||
crate.cfgs + cfgs,
|
crate.cfgs + cfgs,
|
||||||
|
@ -77,7 +77,6 @@ fn is_rwx(p: &Path) -> bool {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
fn test_sysroot() -> Path {
|
fn test_sysroot() -> Path {
|
||||||
// Totally gross hack but it's just for test cases.
|
// Totally gross hack but it's just for test cases.
|
||||||
// Infer the sysroot from the exe name and tack "stage2"
|
// Infer the sysroot from the exe name and tack "stage2"
|
||||||
@ -107,19 +106,19 @@ fn test_install_valid() {
|
|||||||
let temp_pkg_id = fake_pkg();
|
let temp_pkg_id = fake_pkg();
|
||||||
let temp_workspace = mk_temp_workspace(&temp_pkg_id.path);
|
let temp_workspace = mk_temp_workspace(&temp_pkg_id.path);
|
||||||
// should have test, bench, lib, and main
|
// should have test, bench, lib, and main
|
||||||
ctxt.install(&temp_workspace, temp_pkg_id);
|
ctxt.install(&temp_workspace, &temp_pkg_id);
|
||||||
// Check that all files exist
|
// Check that all files exist
|
||||||
let exec = target_executable_in_workspace(temp_pkg_id, &temp_workspace);
|
let exec = target_executable_in_workspace(&temp_pkg_id, &temp_workspace);
|
||||||
debug!("exec = %s", exec.to_str());
|
debug!("exec = %s", exec.to_str());
|
||||||
assert!(os::path_exists(&exec));
|
assert!(os::path_exists(&exec));
|
||||||
assert!(is_rwx(&exec));
|
assert!(is_rwx(&exec));
|
||||||
let lib = target_library_in_workspace(temp_pkg_id, &temp_workspace);
|
let lib = target_library_in_workspace(&temp_pkg_id, &temp_workspace);
|
||||||
debug!("lib = %s", lib.to_str());
|
debug!("lib = %s", lib.to_str());
|
||||||
assert!(os::path_exists(&lib));
|
assert!(os::path_exists(&lib));
|
||||||
assert!(is_rwx(&lib));
|
assert!(is_rwx(&lib));
|
||||||
// And that the test and bench executables aren't installed
|
// And that the test and bench executables aren't installed
|
||||||
assert!(!os::path_exists(&target_test_in_workspace(temp_pkg_id, &temp_workspace)));
|
assert!(!os::path_exists(&target_test_in_workspace(&temp_pkg_id, &temp_workspace)));
|
||||||
let bench = target_bench_in_workspace(temp_pkg_id, &temp_workspace);
|
let bench = target_bench_in_workspace(&temp_pkg_id, &temp_workspace);
|
||||||
debug!("bench = %s", bench.to_str());
|
debug!("bench = %s", bench.to_str());
|
||||||
assert!(!os::path_exists(&bench));
|
assert!(!os::path_exists(&bench));
|
||||||
}
|
}
|
||||||
@ -140,7 +139,7 @@ fn test_install_invalid() {
|
|||||||
do cond.trap(|_| {
|
do cond.trap(|_| {
|
||||||
error_occurred = true;
|
error_occurred = true;
|
||||||
}).in {
|
}).in {
|
||||||
ctxt.install(&temp_workspace, pkgid);
|
ctxt.install(&temp_workspace, &pkgid);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
assert!(error_occurred && error1_occurred);
|
assert!(error_occurred && error1_occurred);
|
||||||
@ -155,19 +154,19 @@ fn test_install_url() {
|
|||||||
let temp_pkg_id = remote_pkg();
|
let temp_pkg_id = remote_pkg();
|
||||||
let temp_workspace = mk_temp_workspace(&temp_pkg_id.path);
|
let temp_workspace = mk_temp_workspace(&temp_pkg_id.path);
|
||||||
// should have test, bench, lib, and main
|
// should have test, bench, lib, and main
|
||||||
ctxt.install(&temp_workspace, temp_pkg_id);
|
ctxt.install(&temp_workspace, &temp_pkg_id);
|
||||||
// Check that all files exist
|
// Check that all files exist
|
||||||
let exec = target_executable_in_workspace(temp_pkg_id, &temp_workspace);
|
let exec = target_executable_in_workspace(&temp_pkg_id, &temp_workspace);
|
||||||
debug!("exec = %s", exec.to_str());
|
debug!("exec = %s", exec.to_str());
|
||||||
assert!(os::path_exists(&exec));
|
assert!(os::path_exists(&exec));
|
||||||
assert!(is_rwx(&exec));
|
assert!(is_rwx(&exec));
|
||||||
let lib = target_library_in_workspace(temp_pkg_id, &temp_workspace);
|
let lib = target_library_in_workspace(&temp_pkg_id, &temp_workspace);
|
||||||
debug!("lib = %s", lib.to_str());
|
debug!("lib = %s", lib.to_str());
|
||||||
assert!(os::path_exists(&lib));
|
assert!(os::path_exists(&lib));
|
||||||
assert!(is_rwx(&lib));
|
assert!(is_rwx(&lib));
|
||||||
// And that the test and bench executables aren't installed
|
// And that the test and bench executables aren't installed
|
||||||
assert!(!os::path_exists(&target_test_in_workspace(temp_pkg_id, &temp_workspace)));
|
assert!(!os::path_exists(&target_test_in_workspace(&temp_pkg_id, &temp_workspace)));
|
||||||
let bench = target_bench_in_workspace(temp_pkg_id, &temp_workspace);
|
let bench = target_bench_in_workspace(&temp_pkg_id, &temp_workspace);
|
||||||
debug!("bench = %s", bench.to_str());
|
debug!("bench = %s", bench.to_str());
|
||||||
assert!(!os::path_exists(&bench));
|
assert!(!os::path_exists(&bench));
|
||||||
}
|
}
|
||||||
|
@ -23,11 +23,16 @@ use syntax::codemap::{dummy_sp, spanned, dummy_spanned};
|
|||||||
use syntax::ext::base::{mk_ctxt, ext_ctxt};
|
use syntax::ext::base::{mk_ctxt, ext_ctxt};
|
||||||
use syntax::ext::build;
|
use syntax::ext::build;
|
||||||
use syntax::{ast, attr, codemap, diagnostic, fold};
|
use syntax::{ast, attr, codemap, diagnostic, fold};
|
||||||
use syntax::ast::{meta_name_value, meta_list, attribute, crate_};
|
use syntax::ast::{meta_name_value, meta_list, attribute};
|
||||||
use syntax::attr::{mk_attr};
|
use syntax::attr::{mk_attr};
|
||||||
use rustc::back::link::output_type_exe;
|
use rustc::back::link::output_type_exe;
|
||||||
use rustc::driver::session::{lib_crate, unknown_crate, crate_type};
|
use rustc::driver::session::{lib_crate, unknown_crate, crate_type};
|
||||||
|
|
||||||
|
static Commands: &'static [&'static str] =
|
||||||
|
&["build", "clean", "do", "info", "install", "prefer", "test", "uninstall",
|
||||||
|
"unprefer"];
|
||||||
|
|
||||||
|
|
||||||
pub type ExitCode = int; // For now
|
pub type ExitCode = int; // For now
|
||||||
|
|
||||||
/// A version is either an exact revision,
|
/// A version is either an exact revision,
|
||||||
@ -41,28 +46,28 @@ impl Ord for Version {
|
|||||||
fn lt(&self, other: &Version) -> bool {
|
fn lt(&self, other: &Version) -> bool {
|
||||||
match (self, other) {
|
match (self, other) {
|
||||||
(&ExactRevision(f1), &ExactRevision(f2)) => f1 < f2,
|
(&ExactRevision(f1), &ExactRevision(f2)) => f1 < f2,
|
||||||
(&SemVersion(v1), &SemVersion(v2)) => v1 < v2,
|
(&SemVersion(ref v1), &SemVersion(ref v2)) => v1 < v2,
|
||||||
_ => false // incomparable, really
|
_ => false // incomparable, really
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn le(&self, other: &Version) -> bool {
|
fn le(&self, other: &Version) -> bool {
|
||||||
match (self, other) {
|
match (self, other) {
|
||||||
(&ExactRevision(f1), &ExactRevision(f2)) => f1 <= f2,
|
(&ExactRevision(f1), &ExactRevision(f2)) => f1 <= f2,
|
||||||
(&SemVersion(v1), &SemVersion(v2)) => v1 <= v2,
|
(&SemVersion(ref v1), &SemVersion(ref v2)) => v1 <= v2,
|
||||||
_ => false // incomparable, really
|
_ => false // incomparable, really
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn ge(&self, other: &Version) -> bool {
|
fn ge(&self, other: &Version) -> bool {
|
||||||
match (self, other) {
|
match (self, other) {
|
||||||
(&ExactRevision(f1), &ExactRevision(f2)) => f1 > f2,
|
(&ExactRevision(f1), &ExactRevision(f2)) => f1 > f2,
|
||||||
(&SemVersion(v1), &SemVersion(v2)) => v1 > v2,
|
(&SemVersion(ref v1), &SemVersion(ref v2)) => v1 > v2,
|
||||||
_ => false // incomparable, really
|
_ => false // incomparable, really
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn gt(&self, other: &Version) -> bool {
|
fn gt(&self, other: &Version) -> bool {
|
||||||
match (self, other) {
|
match (self, other) {
|
||||||
(&ExactRevision(f1), &ExactRevision(f2)) => f1 >= f2,
|
(&ExactRevision(f1), &ExactRevision(f2)) => f1 >= f2,
|
||||||
(&SemVersion(v1), &SemVersion(v2)) => v1 >= v2,
|
(&SemVersion(ref v1), &SemVersion(ref v2)) => v1 >= v2,
|
||||||
_ => false // incomparable, really
|
_ => false // incomparable, really
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -72,8 +77,8 @@ impl Ord for Version {
|
|||||||
impl ToStr for Version {
|
impl ToStr for Version {
|
||||||
fn to_str(&self) -> ~str {
|
fn to_str(&self) -> ~str {
|
||||||
match *self {
|
match *self {
|
||||||
ExactRevision(n) => n.to_str(),
|
ExactRevision(ref n) => n.to_str(),
|
||||||
SemVersion(v) => v.to_str()
|
SemVersion(ref v) => v.to_str()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -147,11 +152,8 @@ pub fn root() -> Path {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_cmd(cmd: ~str) -> bool {
|
pub fn is_cmd(cmd: &str) -> bool {
|
||||||
let cmds = &[~"build", ~"clean", ~"do", ~"info", ~"install", ~"prefer",
|
Commands.any(|&c| c == cmd)
|
||||||
~"test", ~"uninstall", ~"unprefer"];
|
|
||||||
|
|
||||||
vec::contains(cmds, &cmd)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_name(id: ~str) -> result::Result<~str, ~str> {
|
pub fn parse_name(id: ~str) -> result::Result<~str, ~str> {
|
||||||
@ -220,7 +222,7 @@ fn fold_item(ctx: @mut ReadyCtx,
|
|||||||
|
|
||||||
for attrs.each |attr| {
|
for attrs.each |attr| {
|
||||||
match attr.node.value.node {
|
match attr.node.value.node {
|
||||||
ast::meta_list(_, mis) => {
|
ast::meta_list(_, ref mis) => {
|
||||||
for mis.each |mi| {
|
for mis.each |mi| {
|
||||||
match mi.node {
|
match mi.node {
|
||||||
ast::meta_word(cmd) => cmds.push(copy *cmd),
|
ast::meta_word(cmd) => cmds.push(copy *cmd),
|
||||||
@ -266,15 +268,14 @@ fn add_pkg_module(ctx: @mut ReadyCtx, m: ast::_mod) -> ast::_mod {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn mk_listener_vec(ctx: @mut ReadyCtx) -> @ast::expr {
|
fn mk_listener_vec(ctx: @mut ReadyCtx) -> @ast::expr {
|
||||||
let fns = ctx.fns;
|
let descs = do ctx.fns.map |listener| {
|
||||||
let descs = do fns.map |listener| {
|
mk_listener_rec(ctx, listener)
|
||||||
mk_listener_rec(ctx, *listener)
|
|
||||||
};
|
};
|
||||||
let ext_cx = ctx.ext_cx;
|
let ext_cx = ctx.ext_cx;
|
||||||
build::mk_slice_vec_e(ext_cx, dummy_sp(), descs)
|
build::mk_slice_vec_e(ext_cx, dummy_sp(), descs)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_listener_rec(ctx: @mut ReadyCtx, listener: ListenerFn) -> @ast::expr {
|
fn mk_listener_rec(ctx: @mut ReadyCtx, listener: &ListenerFn) -> @ast::expr {
|
||||||
let span = listener.span;
|
let span = listener.span;
|
||||||
let cmds = do listener.cmds.map |&cmd| {
|
let cmds = do listener.cmds.map |&cmd| {
|
||||||
let ext_cx = ctx.ext_cx;
|
let ext_cx = ctx.ext_cx;
|
||||||
@ -437,11 +438,11 @@ pub fn add_pkg(pkg: &Pkg) -> bool {
|
|||||||
|
|
||||||
// FIXME (#4432): Use workcache to only compile when needed
|
// FIXME (#4432): Use workcache to only compile when needed
|
||||||
pub fn compile_input(sysroot: Option<@Path>,
|
pub fn compile_input(sysroot: Option<@Path>,
|
||||||
pkg_id: PkgId,
|
pkg_id: &PkgId,
|
||||||
in_file: &Path,
|
in_file: &Path,
|
||||||
out_dir: &Path,
|
out_dir: &Path,
|
||||||
flags: ~[~str],
|
flags: &[~str],
|
||||||
cfgs: ~[~str],
|
cfgs: &[~str],
|
||||||
opt: bool,
|
opt: bool,
|
||||||
test: bool,
|
test: bool,
|
||||||
crate_type: session::crate_type) -> bool {
|
crate_type: session::crate_type) -> bool {
|
||||||
@ -456,7 +457,7 @@ pub fn compile_input(sysroot: Option<@Path>,
|
|||||||
// tjc: by default, use the package ID name as the link name
|
// tjc: by default, use the package ID name as the link name
|
||||||
// not sure if we should support anything else
|
// not sure if we should support anything else
|
||||||
|
|
||||||
let binary = os::args()[0];
|
let binary = @copy os::args()[0];
|
||||||
let building_library = match crate_type {
|
let building_library = match crate_type {
|
||||||
lib_crate | unknown_crate => true,
|
lib_crate | unknown_crate => true,
|
||||||
_ => false
|
_ => false
|
||||||
@ -485,32 +486,27 @@ pub fn compile_input(sysroot: Option<@Path>,
|
|||||||
+ flags
|
+ flags
|
||||||
+ cfgs.flat_map(|&c| { ~[~"--cfg", c] }),
|
+ cfgs.flat_map(|&c| { ~[~"--cfg", c] }),
|
||||||
driver::optgroups()).get();
|
driver::optgroups()).get();
|
||||||
let options = @session::options {
|
let mut options = session::options {
|
||||||
crate_type: crate_type,
|
crate_type: crate_type,
|
||||||
optimize: if opt { session::Aggressive } else { session::No },
|
optimize: if opt { session::Aggressive } else { session::No },
|
||||||
test: test,
|
test: test,
|
||||||
maybe_sysroot: sysroot,
|
maybe_sysroot: sysroot,
|
||||||
addl_lib_search_paths: ~[copy *out_dir],
|
addl_lib_search_paths: ~[copy *out_dir],
|
||||||
.. *driver::build_session_options(@binary, &matches, diagnostic::emit)
|
|
||||||
};
|
|
||||||
let mut crate_cfg = options.cfg;
|
|
||||||
|
|
||||||
for cfgs.each |&cfg| {
|
|
||||||
crate_cfg.push(attr::mk_word_item(@cfg));
|
|
||||||
}
|
|
||||||
|
|
||||||
let options = @session::options {
|
|
||||||
cfg: vec::append(options.cfg, crate_cfg),
|
|
||||||
// output_type should be conditional
|
// output_type should be conditional
|
||||||
output_type: output_type_exe, // Use this to get a library? That's weird
|
output_type: output_type_exe, // Use this to get a library? That's weird
|
||||||
.. *options
|
.. copy *driver::build_session_options(binary, &matches, diagnostic::emit)
|
||||||
};
|
};
|
||||||
let sess = driver::build_session(options, diagnostic::emit);
|
|
||||||
|
for cfgs.each |&cfg| {
|
||||||
|
options.cfg.push(attr::mk_word_item(@cfg));
|
||||||
|
}
|
||||||
|
|
||||||
|
let sess = driver::build_session(@options, diagnostic::emit);
|
||||||
|
|
||||||
debug!("calling compile_crate_from_input, out_dir = %s,
|
debug!("calling compile_crate_from_input, out_dir = %s,
|
||||||
building_library = %?", out_dir.to_str(), sess.building_library);
|
building_library = %?", out_dir.to_str(), sess.building_library);
|
||||||
let _ = compile_crate_from_input(input, pkg_id, Some(*out_dir), sess, None,
|
let _ = compile_crate_from_input(&input, pkg_id, Some(copy *out_dir), sess,
|
||||||
out_file, binary,
|
None, &out_file, binary,
|
||||||
driver::cu_everything);
|
driver::cu_everything);
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
@ -520,18 +516,19 @@ pub fn compile_input(sysroot: Option<@Path>,
|
|||||||
// If crate_opt is present, then finish compilation. If it's None, then
|
// If crate_opt is present, then finish compilation. If it's None, then
|
||||||
// call compile_upto and return the crate
|
// call compile_upto and return the crate
|
||||||
// also, too many arguments
|
// also, too many arguments
|
||||||
pub fn compile_crate_from_input(input: driver::input,
|
pub fn compile_crate_from_input(input: &driver::input,
|
||||||
pkg_id: PkgId,
|
pkg_id: &PkgId,
|
||||||
build_dir_opt: Option<Path>,
|
build_dir_opt: Option<Path>,
|
||||||
sess: session::Session,
|
sess: session::Session,
|
||||||
crate_opt: Option<@ast::crate>,
|
crate_opt: Option<@ast::crate>,
|
||||||
out_file: Path,
|
out_file: &Path,
|
||||||
binary: ~str,
|
binary: @~str,
|
||||||
what: driver::compile_upto) -> @ast::crate {
|
what: driver::compile_upto) -> @ast::crate {
|
||||||
debug!("Calling build_output_filenames with %? and %s", build_dir_opt, out_file.to_str());
|
debug!("Calling build_output_filenames with %? and %s", build_dir_opt, out_file.to_str());
|
||||||
let outputs = driver::build_output_filenames(&input, &build_dir_opt, &Some(out_file), sess);
|
let outputs = driver::build_output_filenames(input, &build_dir_opt,
|
||||||
|
&Some(copy *out_file), sess);
|
||||||
debug!("Outputs are %? and output type = %?", outputs, sess.opts.output_type);
|
debug!("Outputs are %? and output type = %?", outputs, sess.opts.output_type);
|
||||||
let cfg = driver::build_configuration(sess, @binary, &input);
|
let cfg = driver::build_configuration(sess, binary, input);
|
||||||
match crate_opt {
|
match crate_opt {
|
||||||
Some(c) => {
|
Some(c) => {
|
||||||
debug!("Calling compile_rest, outputs = %?", outputs);
|
debug!("Calling compile_rest, outputs = %?", outputs);
|
||||||
@ -541,7 +538,7 @@ pub fn compile_crate_from_input(input: driver::input,
|
|||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
debug!("Calling compile_upto, outputs = %?", outputs);
|
debug!("Calling compile_upto, outputs = %?", outputs);
|
||||||
let (crate, _) = driver::compile_upto(sess, cfg, &input,
|
let (crate, _) = driver::compile_upto(sess, copy cfg, input,
|
||||||
driver::cu_parse, Some(outputs));
|
driver::cu_parse, Some(outputs));
|
||||||
|
|
||||||
debug!("About to inject link_meta info...");
|
debug!("About to inject link_meta info...");
|
||||||
@ -552,7 +549,8 @@ pub fn compile_crate_from_input(input: driver::input,
|
|||||||
debug!("How many attrs? %?", attr::find_linkage_metas(crate.node.attrs).len());
|
debug!("How many attrs? %?", attr::find_linkage_metas(crate.node.attrs).len());
|
||||||
|
|
||||||
if attr::find_linkage_metas(crate.node.attrs).is_empty() {
|
if attr::find_linkage_metas(crate.node.attrs).is_empty() {
|
||||||
crate_to_use = add_attrs(*crate, ~[mk_attr(@dummy_spanned(meta_list(@~"link",
|
crate_to_use = add_attrs(copy *crate,
|
||||||
|
~[mk_attr(@dummy_spanned(meta_list(@~"link",
|
||||||
// change PkgId to have a <shortname> field?
|
// change PkgId to have a <shortname> field?
|
||||||
~[@dummy_spanned(meta_name_value(@~"name",
|
~[@dummy_spanned(meta_name_value(@~"name",
|
||||||
mk_string_lit(@pkg_id.path.filestem().get()))),
|
mk_string_lit(@pkg_id.path.filestem().get()))),
|
||||||
@ -578,20 +576,16 @@ pub fn exe_suffix() -> ~str { ~"" }
|
|||||||
|
|
||||||
/// Returns a copy of crate `c` with attributes `attrs` added to its
|
/// Returns a copy of crate `c` with attributes `attrs` added to its
|
||||||
/// attributes
|
/// attributes
|
||||||
fn add_attrs(c: ast::crate, new_attrs: ~[attribute]) -> @ast::crate {
|
fn add_attrs(mut c: ast::crate, new_attrs: ~[attribute]) -> @ast::crate {
|
||||||
@spanned {
|
c.node.attrs += new_attrs;
|
||||||
node: crate_ {
|
@c
|
||||||
attrs: c.node.attrs + new_attrs, ..c.node
|
|
||||||
},
|
|
||||||
span: c.span
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Called by build_crates
|
// Called by build_crates
|
||||||
// FIXME (#4432): Use workcache to only compile when needed
|
// FIXME (#4432): Use workcache to only compile when needed
|
||||||
pub fn compile_crate(sysroot: Option<@Path>, pkg_id: PkgId,
|
pub fn compile_crate(sysroot: Option<@Path>, pkg_id: &PkgId,
|
||||||
crate: &Path, dir: &Path,
|
crate: &Path, dir: &Path,
|
||||||
flags: ~[~str], cfgs: ~[~str], opt: bool,
|
flags: &[~str], cfgs: &[~str], opt: bool,
|
||||||
test: bool, crate_type: crate_type) -> bool {
|
test: bool, crate_type: crate_type) -> bool {
|
||||||
debug!("compile_crate: crate=%s, dir=%s", crate.to_str(), dir.to_str());
|
debug!("compile_crate: crate=%s, dir=%s", crate.to_str(), dir.to_str());
|
||||||
debug!("compile_crate: short_name = %s, flags =...", pkg_id.to_str());
|
debug!("compile_crate: short_name = %s, flags =...", pkg_id.to_str());
|
||||||
|
@ -14,7 +14,7 @@ use path_util::{rust_path, workspace_contains_package_id};
|
|||||||
use util::PkgId;
|
use util::PkgId;
|
||||||
use core::path::Path;
|
use core::path::Path;
|
||||||
|
|
||||||
pub fn pkg_parent_workspaces(pkgid: PkgId, action: &fn(&Path) -> bool) -> bool {
|
pub fn pkg_parent_workspaces(pkgid: &PkgId, action: &fn(&Path) -> bool) -> bool {
|
||||||
// Using the RUST_PATH, find workspaces that contain
|
// Using the RUST_PATH, find workspaces that contain
|
||||||
// this package ID
|
// this package ID
|
||||||
let workspaces = rust_path().filtered(|ws|
|
let workspaces = rust_path().filtered(|ws|
|
||||||
|
@ -28,7 +28,7 @@ pub mod rustrt {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Add a line to history
|
/// Add a line to history
|
||||||
pub unsafe fn add_history(line: ~str) -> bool {
|
pub unsafe fn add_history(line: &str) -> bool {
|
||||||
do str::as_c_str(line) |buf| {
|
do str::as_c_str(line) |buf| {
|
||||||
rustrt::linenoiseHistoryAdd(buf) == 1 as c_int
|
rustrt::linenoiseHistoryAdd(buf) == 1 as c_int
|
||||||
}
|
}
|
||||||
@ -40,21 +40,21 @@ pub unsafe fn set_history_max_len(len: int) -> bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Save line history to a file
|
/// Save line history to a file
|
||||||
pub unsafe fn save_history(file: ~str) -> bool {
|
pub unsafe fn save_history(file: &str) -> bool {
|
||||||
do str::as_c_str(file) |buf| {
|
do str::as_c_str(file) |buf| {
|
||||||
rustrt::linenoiseHistorySave(buf) == 1 as c_int
|
rustrt::linenoiseHistorySave(buf) == 1 as c_int
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Load line history from a file
|
/// Load line history from a file
|
||||||
pub unsafe fn load_history(file: ~str) -> bool {
|
pub unsafe fn load_history(file: &str) -> bool {
|
||||||
do str::as_c_str(file) |buf| {
|
do str::as_c_str(file) |buf| {
|
||||||
rustrt::linenoiseHistoryLoad(buf) == 1 as c_int
|
rustrt::linenoiseHistoryLoad(buf) == 1 as c_int
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Print out a prompt and then wait for input and return it
|
/// Print out a prompt and then wait for input and return it
|
||||||
pub unsafe fn read(prompt: ~str) -> Option<~str> {
|
pub unsafe fn read(prompt: &str) -> Option<~str> {
|
||||||
do str::as_c_str(prompt) |buf| {
|
do str::as_c_str(prompt) |buf| {
|
||||||
let line = rustrt::linenoise(buf);
|
let line = rustrt::linenoise(buf);
|
||||||
|
|
||||||
|
@ -60,12 +60,12 @@ pub fn path_to_str_with_sep(p: &[path_elt], sep: ~str, itr: @ident_interner)
|
|||||||
str::connect(strs, sep)
|
str::connect(strs, sep)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn path_ident_to_str(p: path, i: ident, itr: @ident_interner) -> ~str {
|
pub fn path_ident_to_str(p: &path, i: ident, itr: @ident_interner) -> ~str {
|
||||||
if vec::is_empty(p) {
|
if vec::is_empty(*p) {
|
||||||
//FIXME /* FIXME (#2543) */ copy *i
|
//FIXME /* FIXME (#2543) */ copy *i
|
||||||
copy *itr.get(i)
|
copy *itr.get(i)
|
||||||
} else {
|
} else {
|
||||||
fmt!("%s::%s", path_to_str(p, itr), *itr.get(i))
|
fmt!("%s::%s", path_to_str(*p, itr), *itr.get(i))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -338,7 +338,7 @@ pub fn node_id_to_str(map: map, id: node_id, itr: @ident_interner) -> ~str {
|
|||||||
fmt!("unknown node (id=%d)", id)
|
fmt!("unknown node (id=%d)", id)
|
||||||
}
|
}
|
||||||
Some(&node_item(item, path)) => {
|
Some(&node_item(item, path)) => {
|
||||||
let path_str = path_ident_to_str(*path, item.ident, itr);
|
let path_str = path_ident_to_str(path, item.ident, itr);
|
||||||
let item_str = match item.node {
|
let item_str = match item.node {
|
||||||
item_const(*) => ~"const",
|
item_const(*) => ~"const",
|
||||||
item_fn(*) => ~"fn",
|
item_fn(*) => ~"fn",
|
||||||
@ -355,7 +355,7 @@ pub fn node_id_to_str(map: map, id: node_id, itr: @ident_interner) -> ~str {
|
|||||||
}
|
}
|
||||||
Some(&node_foreign_item(item, abi, _, path)) => {
|
Some(&node_foreign_item(item, abi, _, path)) => {
|
||||||
fmt!("foreign item %s with abi %? (id=%?)",
|
fmt!("foreign item %s with abi %? (id=%?)",
|
||||||
path_ident_to_str(*path, item.ident, itr), abi, id)
|
path_ident_to_str(path, item.ident, itr), abi, id)
|
||||||
}
|
}
|
||||||
Some(&node_method(m, _, path)) => {
|
Some(&node_method(m, _, path)) => {
|
||||||
fmt!("method %s in %s (id=%?)",
|
fmt!("method %s in %s (id=%?)",
|
||||||
|
@ -679,19 +679,19 @@ mod test {
|
|||||||
#[test] fn xorpush_test () {
|
#[test] fn xorpush_test () {
|
||||||
let mut s = ~[];
|
let mut s = ~[];
|
||||||
xorPush(&mut s,14);
|
xorPush(&mut s,14);
|
||||||
assert_eq!(s,~[14]);
|
assert_eq!(copy s,~[14]);
|
||||||
xorPush(&mut s,14);
|
xorPush(&mut s,14);
|
||||||
assert_eq!(s,~[]);
|
assert_eq!(copy s,~[]);
|
||||||
xorPush(&mut s,14);
|
xorPush(&mut s,14);
|
||||||
assert_eq!(s,~[14]);
|
assert_eq!(copy s,~[14]);
|
||||||
xorPush(&mut s,15);
|
xorPush(&mut s,15);
|
||||||
assert_eq!(s,~[14,15]);
|
assert_eq!(copy s,~[14,15]);
|
||||||
xorPush (&mut s,16);
|
xorPush (&mut s,16);
|
||||||
assert_eq! (s,~[14,15,16]);
|
assert_eq!(copy s,~[14,15,16]);
|
||||||
xorPush (&mut s,16);
|
xorPush (&mut s,16);
|
||||||
assert_eq! (s,~[14,15]);
|
assert_eq!(copy s,~[14,15]);
|
||||||
xorPush (&mut s,15);
|
xorPush (&mut s,15);
|
||||||
assert_eq! (s,~[14]);
|
assert_eq!(copy s,~[14]);
|
||||||
}
|
}
|
||||||
|
|
||||||
// convert a list of uints to an @~[ident]
|
// convert a list of uints to an @~[ident]
|
||||||
@ -746,7 +746,7 @@ mod test {
|
|||||||
let mut t = mk_sctable();
|
let mut t = mk_sctable();
|
||||||
|
|
||||||
let test_sc = ~[M(3),R(id(101,0),14),M(9)];
|
let test_sc = ~[M(3),R(id(101,0),14),M(9)];
|
||||||
assert_eq!(unfold_test_sc(test_sc,empty_ctxt,&mut t),3);
|
assert_eq!(unfold_test_sc(copy test_sc,empty_ctxt,&mut t),3);
|
||||||
assert_eq!(t[1],Mark(9,0));
|
assert_eq!(t[1],Mark(9,0));
|
||||||
assert_eq!(t[2],Rename(id(101,0),14,1));
|
assert_eq!(t[2],Rename(id(101,0),14,1));
|
||||||
assert_eq!(t[3],Mark(3,2));
|
assert_eq!(t[3],Mark(3,2));
|
||||||
|
@ -184,7 +184,7 @@ fn diagnosticcolor(lvl: level) -> u8 {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_diagnostic(topic: ~str, lvl: level, msg: &str) {
|
fn print_diagnostic(topic: &str, lvl: level, msg: &str) {
|
||||||
let use_color = term::color_supported() &&
|
let use_color = term::color_supported() &&
|
||||||
io::stderr().get_type() == io::Screen;
|
io::stderr().get_type() == io::Screen;
|
||||||
if !topic.is_empty() {
|
if !topic.is_empty() {
|
||||||
|
@ -119,13 +119,13 @@ pub fn expand_asm(cx: @ext_ctxt, sp: span, tts: &[ast::token_tree])
|
|||||||
cons = str::connect(clobs, ",");
|
cons = str::connect(clobs, ",");
|
||||||
}
|
}
|
||||||
Options => {
|
Options => {
|
||||||
let option = *p.parse_str();
|
let option = p.parse_str();
|
||||||
|
|
||||||
if option == ~"volatile" {
|
if "volatile" == *option {
|
||||||
volatile = true;
|
volatile = true;
|
||||||
} else if option == ~"alignstack" {
|
} else if "alignstack" == *option {
|
||||||
alignstack = true;
|
alignstack = true;
|
||||||
} else if option == ~"intel" {
|
} else if "intel" == *option {
|
||||||
dialect = ast::asm_intel;
|
dialect = ast::asm_intel;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -836,7 +836,7 @@ fn mk_struct_deser_impl(
|
|||||||
cx: @ext_ctxt,
|
cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
ident: ast::ident,
|
ident: ast::ident,
|
||||||
fields: ~[@ast::struct_field],
|
fields: &[@ast::struct_field],
|
||||||
generics: &ast::Generics
|
generics: &ast::Generics
|
||||||
) -> @ast::item {
|
) -> @ast::item {
|
||||||
let fields = do mk_struct_fields(fields).mapi |idx, field| {
|
let fields = do mk_struct_fields(fields).mapi |idx, field| {
|
||||||
@ -1120,7 +1120,7 @@ fn mk_enum_deser_body(
|
|||||||
ext_cx: @ext_ctxt,
|
ext_cx: @ext_ctxt,
|
||||||
span: span,
|
span: span,
|
||||||
name: ast::ident,
|
name: ast::ident,
|
||||||
variants: ~[ast::variant]
|
variants: &[ast::variant]
|
||||||
) -> @ast::expr {
|
) -> @ast::expr {
|
||||||
let expr_arm_names = build::mk_base_vec_e(
|
let expr_arm_names = build::mk_base_vec_e(
|
||||||
ext_cx,
|
ext_cx,
|
||||||
|
@ -509,7 +509,7 @@ pub fn mk_unreachable(cx: @ext_ctxt, span: span) -> @ast::expr {
|
|||||||
],
|
],
|
||||||
~[
|
~[
|
||||||
mk_base_str(cx, span, ~"internal error: entered unreachable code"),
|
mk_base_str(cx, span, ~"internal error: entered unreachable code"),
|
||||||
mk_base_str(cx, span, loc.file.name),
|
mk_base_str(cx, span, copy loc.file.name),
|
||||||
mk_uint(cx, span, loc.line),
|
mk_uint(cx, span, loc.line),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
@ -60,11 +60,11 @@ fn cs_clone(cx: @ext_ctxt, span: span,
|
|||||||
build::mk_method_call(cx, span, field, clone_ident, ~[]);
|
build::mk_method_call(cx, span, field, clone_ident, ~[]);
|
||||||
|
|
||||||
match *substr.fields {
|
match *substr.fields {
|
||||||
Struct(af) => {
|
Struct(ref af) => {
|
||||||
ctor_ident = ~[ substr.type_ident ];
|
ctor_ident = ~[ substr.type_ident ];
|
||||||
all_fields = af;
|
all_fields = af;
|
||||||
}
|
}
|
||||||
EnumMatching(_, variant, af) => {
|
EnumMatching(_, variant, ref af) => {
|
||||||
ctor_ident = ~[ variant.node.name ];
|
ctor_ident = ~[ variant.node.name ];
|
||||||
all_fields = af;
|
all_fields = af;
|
||||||
},
|
},
|
||||||
@ -72,7 +72,7 @@ fn cs_clone(cx: @ext_ctxt, span: span,
|
|||||||
StaticEnum(*) | StaticStruct(*) => cx.span_bug(span, "Static method in `deriving(Clone)`")
|
StaticEnum(*) | StaticStruct(*) => cx.span_bug(span, "Static method in `deriving(Clone)`")
|
||||||
}
|
}
|
||||||
|
|
||||||
match all_fields {
|
match *all_fields {
|
||||||
[(None, _, _), .. _] => {
|
[(None, _, _), .. _] => {
|
||||||
// enum-like
|
// enum-like
|
||||||
let subcalls = all_fields.map(|&(_, self_f, _)| subcall(self_f));
|
let subcalls = all_fields.map(|&(_, self_f, _)| subcall(self_f));
|
||||||
|
@ -65,8 +65,6 @@ fn cs_ord(less: bool, equal: bool,
|
|||||||
let false_blk_expr = build::mk_block(cx, span,
|
let false_blk_expr = build::mk_block(cx, span,
|
||||||
~[], ~[],
|
~[], ~[],
|
||||||
Some(build::mk_bool(cx, span, false)));
|
Some(build::mk_bool(cx, span, false)));
|
||||||
let true_blk = build::mk_simple_block(cx, span,
|
|
||||||
build::mk_bool(cx, span, true));
|
|
||||||
let base = build::mk_bool(cx, span, equal);
|
let base = build::mk_bool(cx, span, equal);
|
||||||
|
|
||||||
cs_fold(
|
cs_fold(
|
||||||
@ -108,6 +106,8 @@ fn cs_ord(less: bool, equal: bool,
|
|||||||
|
|
||||||
let cmp = build::mk_method_call(cx, span,
|
let cmp = build::mk_method_call(cx, span,
|
||||||
self_f, binop, other_fs.to_owned());
|
self_f, binop, other_fs.to_owned());
|
||||||
|
let true_blk = build::mk_simple_block(cx, span,
|
||||||
|
build::mk_bool(cx, span, true));
|
||||||
let if_ = expr_if(cmp, true_blk, Some(elseif));
|
let if_ = expr_if(cmp, true_blk, Some(elseif));
|
||||||
|
|
||||||
build::mk_expr(cx, span, if_)
|
build::mk_expr(cx, span, if_)
|
||||||
|
@ -55,15 +55,16 @@ pub fn ordering_const(cx: @ext_ctxt, span: span, cnst: Ordering) -> @expr {
|
|||||||
|
|
||||||
pub fn cs_cmp(cx: @ext_ctxt, span: span,
|
pub fn cs_cmp(cx: @ext_ctxt, span: span,
|
||||||
substr: &Substructure) -> @expr {
|
substr: &Substructure) -> @expr {
|
||||||
let lexical_ord = ~[cx.ident_of("core"),
|
|
||||||
cx.ident_of("cmp"),
|
|
||||||
cx.ident_of("lexical_ordering")];
|
|
||||||
|
|
||||||
cs_same_method_fold(
|
cs_same_method_fold(
|
||||||
// foldr (possibly) nests the matches in lexical_ordering better
|
// foldr (possibly) nests the matches in lexical_ordering better
|
||||||
false,
|
false,
|
||||||
|cx, span, old, new| {
|
|cx, span, old, new| {
|
||||||
build::mk_call_global(cx, span, lexical_ord, ~[old, new])
|
build::mk_call_global(cx, span,
|
||||||
|
~[cx.ident_of("core"),
|
||||||
|
cx.ident_of("cmp"),
|
||||||
|
cx.ident_of("lexical_ordering")],
|
||||||
|
~[old, new])
|
||||||
},
|
},
|
||||||
ordering_const(cx, span, Equal),
|
ordering_const(cx, span, Equal),
|
||||||
|cx, span, list, _| {
|
|cx, span, list, _| {
|
||||||
|
@ -259,14 +259,14 @@ pub enum SubstructureFields<'self> {
|
|||||||
fields: `(field ident, self, [others])`, where the field ident is
|
fields: `(field ident, self, [others])`, where the field ident is
|
||||||
only non-`None` in the case of a struct variant.
|
only non-`None` in the case of a struct variant.
|
||||||
*/
|
*/
|
||||||
EnumMatching(uint, ast::variant, ~[(Option<ident>, @expr, ~[@expr])]),
|
EnumMatching(uint, &'self ast::variant, ~[(Option<ident>, @expr, ~[@expr])]),
|
||||||
|
|
||||||
/**
|
/**
|
||||||
non-matching variants of the enum, [(variant index, ast::variant,
|
non-matching variants of the enum, [(variant index, ast::variant,
|
||||||
[field ident, fields])] (i.e. all fields for self are in the
|
[field ident, fields])] (i.e. all fields for self are in the
|
||||||
first tuple, for other1 are in the second tuple, etc.)
|
first tuple, for other1 are in the second tuple, etc.)
|
||||||
*/
|
*/
|
||||||
EnumNonMatching(~[(uint, ast::variant, ~[(Option<ident>, @expr)])]),
|
EnumNonMatching(&'self [(uint, ast::variant, ~[(Option<ident>, @expr)])]),
|
||||||
|
|
||||||
/// A static method where Self is a struct
|
/// A static method where Self is a struct
|
||||||
StaticStruct(&'self ast::struct_def, Either<uint, ~[ident]>),
|
StaticStruct(&'self ast::struct_def, Either<uint, ~[ident]>),
|
||||||
@ -290,7 +290,7 @@ representing each variant: (variant index, ast::variant instance,
|
|||||||
*/
|
*/
|
||||||
pub type EnumNonMatchFunc<'self> =
|
pub type EnumNonMatchFunc<'self> =
|
||||||
&'self fn(@ext_ctxt, span,
|
&'self fn(@ext_ctxt, span,
|
||||||
~[(uint, ast::variant,
|
&[(uint, ast::variant,
|
||||||
~[(Option<ident>, @expr)])],
|
~[(Option<ident>, @expr)])],
|
||||||
&[@expr]) -> @expr;
|
&[@expr]) -> @expr;
|
||||||
|
|
||||||
@ -416,8 +416,9 @@ impl<'self> MethodDef<'self> {
|
|||||||
let mut nonstatic = false;
|
let mut nonstatic = false;
|
||||||
|
|
||||||
match self.self_ty {
|
match self.self_ty {
|
||||||
Some(self_ptr) => {
|
Some(ref self_ptr) => {
|
||||||
let (self_expr, self_ty) = ty::get_explicit_self(cx, span, self_ptr);
|
let (self_expr, self_ty) = ty::get_explicit_self(cx, span,
|
||||||
|
self_ptr);
|
||||||
|
|
||||||
ast_self_ty = self_ty;
|
ast_self_ty = self_ty;
|
||||||
self_args.push(self_expr);
|
self_args.push(self_expr);
|
||||||
@ -616,9 +617,10 @@ impl<'self> MethodDef<'self> {
|
|||||||
self_args: &[@expr],
|
self_args: &[@expr],
|
||||||
nonself_args: &[@expr])
|
nonself_args: &[@expr])
|
||||||
-> @expr {
|
-> @expr {
|
||||||
|
let mut matches = ~[];
|
||||||
self.build_enum_match(cx, span, enum_def, type_ident,
|
self.build_enum_match(cx, span, enum_def, type_ident,
|
||||||
self_args, nonself_args,
|
self_args, nonself_args,
|
||||||
None, ~[], 0)
|
None, &mut matches, 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -650,58 +652,57 @@ impl<'self> MethodDef<'self> {
|
|||||||
self_args: &[@expr],
|
self_args: &[@expr],
|
||||||
nonself_args: &[@expr],
|
nonself_args: &[@expr],
|
||||||
matching: Option<uint>,
|
matching: Option<uint>,
|
||||||
matches_so_far: ~[(uint, ast::variant,
|
matches_so_far: &mut ~[(uint, ast::variant,
|
||||||
~[(Option<ident>, @expr)])],
|
~[(Option<ident>, @expr)])],
|
||||||
match_count: uint) -> @expr {
|
match_count: uint) -> @expr {
|
||||||
if match_count == self_args.len() {
|
if match_count == self_args.len() {
|
||||||
// we've matched against all arguments, so make the final
|
// we've matched against all arguments, so make the final
|
||||||
// expression at the bottom of the match tree
|
// expression at the bottom of the match tree
|
||||||
match matches_so_far {
|
if matches_so_far.len() == 0 {
|
||||||
[] => cx.span_bug(span, ~"no self match on an enum in generic `deriving`"),
|
cx.span_bug(span, ~"no self match on an enum in generic \
|
||||||
_ => {
|
`deriving`");
|
||||||
// we currently have a vec of vecs, where each
|
}
|
||||||
// subvec is the fields of one of the arguments,
|
// we currently have a vec of vecs, where each
|
||||||
// but if the variants all match, we want this as
|
// subvec is the fields of one of the arguments,
|
||||||
// vec of tuples, where each tuple represents a
|
// but if the variants all match, we want this as
|
||||||
// field.
|
// vec of tuples, where each tuple represents a
|
||||||
|
// field.
|
||||||
|
|
||||||
let substructure;
|
let substructure;
|
||||||
|
|
||||||
// most arms don't have matching variants, so do a
|
// most arms don't have matching variants, so do a
|
||||||
// quick check to see if they match (even though
|
// quick check to see if they match (even though
|
||||||
// this means iterating twice) instead of being
|
// this means iterating twice) instead of being
|
||||||
// optimistic and doing a pile of allocations etc.
|
// optimistic and doing a pile of allocations etc.
|
||||||
match matching {
|
match matching {
|
||||||
Some(variant_index) => {
|
Some(variant_index) => {
|
||||||
// `ref` inside let matches is buggy. Causes havoc wih rusc.
|
// `ref` inside let matches is buggy. Causes havoc wih rusc.
|
||||||
// let (variant_index, ref self_vec) = matches_so_far[0];
|
// let (variant_index, ref self_vec) = matches_so_far[0];
|
||||||
let (variant, self_vec) = match matches_so_far[0] {
|
let (variant, self_vec) = match matches_so_far[0] {
|
||||||
(_, v, ref s) => (v, s)
|
(_, ref v, ref s) => (v, s)
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut enum_matching_fields = vec::from_elem(self_vec.len(), ~[]);
|
let mut enum_matching_fields = vec::from_elem(self_vec.len(), ~[]);
|
||||||
|
|
||||||
for matches_so_far.tail().each |&(_, _, other_fields)| {
|
for matches_so_far.tail().each |&(_, _, other_fields)| {
|
||||||
for other_fields.eachi |i, &(_, other_field)| {
|
for other_fields.eachi |i, &(_, other_field)| {
|
||||||
enum_matching_fields[i].push(other_field);
|
enum_matching_fields[i].push(other_field);
|
||||||
}
|
|
||||||
}
|
|
||||||
let field_tuples =
|
|
||||||
do vec::map_zip(*self_vec,
|
|
||||||
enum_matching_fields) |&(id, self_f), &other| {
|
|
||||||
(id, self_f, other)
|
|
||||||
};
|
|
||||||
substructure = EnumMatching(variant_index, variant, field_tuples);
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
substructure = EnumNonMatching(matches_so_far);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.call_substructure_method(cx, span, type_ident,
|
let field_tuples =
|
||||||
self_args, nonself_args,
|
do vec::map_zip(*self_vec,
|
||||||
&substructure)
|
enum_matching_fields) |&(id, self_f), &other| {
|
||||||
|
(id, self_f, other)
|
||||||
|
};
|
||||||
|
substructure = EnumMatching(variant_index, variant, field_tuples);
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
substructure = EnumNonMatching(*matches_so_far);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
self.call_substructure_method(cx, span, type_ident,
|
||||||
|
self_args, nonself_args,
|
||||||
|
&substructure)
|
||||||
|
|
||||||
} else { // there are still matches to create
|
} else { // there are still matches to create
|
||||||
let current_match_str = if match_count == 0 {
|
let current_match_str = if match_count == 0 {
|
||||||
@ -712,9 +713,6 @@ impl<'self> MethodDef<'self> {
|
|||||||
|
|
||||||
let mut arms = ~[];
|
let mut arms = ~[];
|
||||||
|
|
||||||
// this is used as a stack
|
|
||||||
let mut matches_so_far = matches_so_far;
|
|
||||||
|
|
||||||
// the code for nonmatching variants only matters when
|
// the code for nonmatching variants only matters when
|
||||||
// we've seen at least one other variant already
|
// we've seen at least one other variant already
|
||||||
if self.const_nonmatching && match_count > 0 {
|
if self.const_nonmatching && match_count > 0 {
|
||||||
@ -732,7 +730,7 @@ impl<'self> MethodDef<'self> {
|
|||||||
current_match_str,
|
current_match_str,
|
||||||
ast::m_imm);
|
ast::m_imm);
|
||||||
|
|
||||||
matches_so_far.push((index, *variant, idents));
|
matches_so_far.push((index, /*bad*/ copy *variant, idents));
|
||||||
let arm_expr = self.build_enum_match(cx, span,
|
let arm_expr = self.build_enum_match(cx, span,
|
||||||
enum_def,
|
enum_def,
|
||||||
type_ident,
|
type_ident,
|
||||||
@ -744,9 +742,10 @@ impl<'self> MethodDef<'self> {
|
|||||||
arms.push(build::mk_arm(cx, span, ~[ pattern ], arm_expr));
|
arms.push(build::mk_arm(cx, span, ~[ pattern ], arm_expr));
|
||||||
|
|
||||||
if enum_def.variants.len() > 1 {
|
if enum_def.variants.len() > 1 {
|
||||||
|
let e = &EnumNonMatching(&[]);
|
||||||
let wild_expr = self.call_substructure_method(cx, span, type_ident,
|
let wild_expr = self.call_substructure_method(cx, span, type_ident,
|
||||||
self_args, nonself_args,
|
self_args, nonself_args,
|
||||||
&EnumNonMatching(~[]));
|
e);
|
||||||
let wild_arm = build::mk_arm(cx, span,
|
let wild_arm = build::mk_arm(cx, span,
|
||||||
~[ build::mk_pat_wild(cx, span) ],
|
~[ build::mk_pat_wild(cx, span) ],
|
||||||
wild_expr);
|
wild_expr);
|
||||||
@ -760,7 +759,7 @@ impl<'self> MethodDef<'self> {
|
|||||||
current_match_str,
|
current_match_str,
|
||||||
ast::m_imm);
|
ast::m_imm);
|
||||||
|
|
||||||
matches_so_far.push((index, *variant, idents));
|
matches_so_far.push((index, /*bad*/ copy *variant, idents));
|
||||||
let new_matching =
|
let new_matching =
|
||||||
match matching {
|
match matching {
|
||||||
_ if match_count == 0 => Some(index),
|
_ if match_count == 0 => Some(index),
|
||||||
@ -850,7 +849,7 @@ pub fn cs_fold(use_foldl: bool,
|
|||||||
cx: @ext_ctxt, span: span,
|
cx: @ext_ctxt, span: span,
|
||||||
substructure: &Substructure) -> @expr {
|
substructure: &Substructure) -> @expr {
|
||||||
match *substructure.fields {
|
match *substructure.fields {
|
||||||
EnumMatching(_, _, all_fields) | Struct(all_fields) => {
|
EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => {
|
||||||
if use_foldl {
|
if use_foldl {
|
||||||
do all_fields.foldl(base) |&old, &(_, self_f, other_fs)| {
|
do all_fields.foldl(base) |&old, &(_, self_f, other_fs)| {
|
||||||
f(cx, span, old, self_f, other_fs)
|
f(cx, span, old, self_f, other_fs)
|
||||||
@ -861,8 +860,9 @@ pub fn cs_fold(use_foldl: bool,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
EnumNonMatching(all_enums) => enum_nonmatch_f(cx, span,
|
EnumNonMatching(ref all_enums) => enum_nonmatch_f(cx, span,
|
||||||
all_enums, substructure.nonself_args),
|
*all_enums,
|
||||||
|
substructure.nonself_args),
|
||||||
StaticEnum(*) | StaticStruct(*) => {
|
StaticEnum(*) | StaticStruct(*) => {
|
||||||
cx.span_bug(span, "Static function in `deriving`")
|
cx.span_bug(span, "Static function in `deriving`")
|
||||||
}
|
}
|
||||||
@ -885,7 +885,7 @@ pub fn cs_same_method(f: &fn(@ext_ctxt, span, ~[@expr]) -> @expr,
|
|||||||
cx: @ext_ctxt, span: span,
|
cx: @ext_ctxt, span: span,
|
||||||
substructure: &Substructure) -> @expr {
|
substructure: &Substructure) -> @expr {
|
||||||
match *substructure.fields {
|
match *substructure.fields {
|
||||||
EnumMatching(_, _, all_fields) | Struct(all_fields) => {
|
EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => {
|
||||||
// call self_n.method(other_1_n, other_2_n, ...)
|
// call self_n.method(other_1_n, other_2_n, ...)
|
||||||
let called = do all_fields.map |&(_, self_field, other_fields)| {
|
let called = do all_fields.map |&(_, self_field, other_fields)| {
|
||||||
build::mk_method_call(cx, span,
|
build::mk_method_call(cx, span,
|
||||||
@ -896,8 +896,9 @@ pub fn cs_same_method(f: &fn(@ext_ctxt, span, ~[@expr]) -> @expr,
|
|||||||
|
|
||||||
f(cx, span, called)
|
f(cx, span, called)
|
||||||
},
|
},
|
||||||
EnumNonMatching(all_enums) => enum_nonmatch_f(cx, span,
|
EnumNonMatching(ref all_enums) => enum_nonmatch_f(cx, span,
|
||||||
all_enums, substructure.nonself_args),
|
*all_enums,
|
||||||
|
substructure.nonself_args),
|
||||||
StaticEnum(*) | StaticStruct(*) => {
|
StaticEnum(*) | StaticStruct(*) => {
|
||||||
cx.span_bug(span, "Static function in `deriving`")
|
cx.span_bug(span, "Static function in `deriving`")
|
||||||
}
|
}
|
||||||
|
@ -59,7 +59,7 @@ pub fn expand_meta_deriving(cx: @ext_ctxt,
|
|||||||
use ast::{meta_list, meta_name_value, meta_word};
|
use ast::{meta_list, meta_name_value, meta_word};
|
||||||
|
|
||||||
match mitem.node {
|
match mitem.node {
|
||||||
meta_name_value(_, l) => {
|
meta_name_value(_, ref l) => {
|
||||||
cx.span_err(l.span, ~"unexpected value in `deriving`");
|
cx.span_err(l.span, ~"unexpected value in `deriving`");
|
||||||
in_items
|
in_items
|
||||||
}
|
}
|
||||||
@ -67,7 +67,7 @@ pub fn expand_meta_deriving(cx: @ext_ctxt,
|
|||||||
cx.span_warn(mitem.span, ~"empty trait list in `deriving`");
|
cx.span_warn(mitem.span, ~"empty trait list in `deriving`");
|
||||||
in_items
|
in_items
|
||||||
}
|
}
|
||||||
meta_list(_, titems) => {
|
meta_list(_, ref titems) => {
|
||||||
do titems.foldr(in_items) |&titem, in_items| {
|
do titems.foldr(in_items) |&titem, in_items| {
|
||||||
match titem.node {
|
match titem.node {
|
||||||
meta_name_value(tname, _) |
|
meta_name_value(tname, _) |
|
||||||
@ -92,9 +92,9 @@ pub fn expand_meta_deriving(cx: @ext_ctxt,
|
|||||||
|
|
||||||
~"ToStr" => expand!(to_str::expand_deriving_to_str),
|
~"ToStr" => expand!(to_str::expand_deriving_to_str),
|
||||||
|
|
||||||
tname => {
|
ref tname => {
|
||||||
cx.span_err(titem.span, fmt!("unknown \
|
cx.span_err(titem.span, fmt!("unknown \
|
||||||
`deriving` trait: `%s`", tname));
|
`deriving` trait: `%s`", *tname));
|
||||||
in_items
|
in_items
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -63,7 +63,7 @@ pub impl Path {
|
|||||||
fn to_path(&self, cx: @ext_ctxt, span: span,
|
fn to_path(&self, cx: @ext_ctxt, span: span,
|
||||||
self_ty: ident, self_generics: &Generics) -> @ast::Path {
|
self_ty: ident, self_generics: &Generics) -> @ast::Path {
|
||||||
let idents = self.path.map(|s| cx.ident_of(*s) );
|
let idents = self.path.map(|s| cx.ident_of(*s) );
|
||||||
let lt = mk_lifetime(cx, span, self.lifetime);
|
let lt = mk_lifetime(cx, span, &self.lifetime);
|
||||||
let tys = self.params.map(|t| t.to_ty(cx, span, self_ty, self_generics));
|
let tys = self.params.map(|t| t.to_ty(cx, span, self_ty, self_generics));
|
||||||
|
|
||||||
if self.global {
|
if self.global {
|
||||||
@ -106,9 +106,9 @@ pub fn nil_ty() -> Ty {
|
|||||||
Tuple(~[])
|
Tuple(~[])
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_lifetime(cx: @ext_ctxt, span: span, lt: Option<~str>) -> Option<@ast::Lifetime> {
|
fn mk_lifetime(cx: @ext_ctxt, span: span, lt: &Option<~str>) -> Option<@ast::Lifetime> {
|
||||||
match lt {
|
match *lt {
|
||||||
Some(s) => Some(@build::mk_lifetime(cx, span, cx.ident_of(s))),
|
Some(ref s) => Some(@build::mk_lifetime(cx, span, cx.ident_of(*s))),
|
||||||
None => None
|
None => None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -123,10 +123,10 @@ pub impl Ty {
|
|||||||
Owned => {
|
Owned => {
|
||||||
build::mk_ty_uniq(cx, span, raw_ty)
|
build::mk_ty_uniq(cx, span, raw_ty)
|
||||||
}
|
}
|
||||||
Managed(copy mutbl) => {
|
Managed(mutbl) => {
|
||||||
build::mk_ty_box(cx, span, raw_ty, mutbl)
|
build::mk_ty_box(cx, span, raw_ty, mutbl)
|
||||||
}
|
}
|
||||||
Borrowed(copy lt, copy mutbl) => {
|
Borrowed(ref lt, mutbl) => {
|
||||||
let lt = mk_lifetime(cx, span, lt);
|
let lt = mk_lifetime(cx, span, lt);
|
||||||
build::mk_ty_rptr(cx, span, raw_ty, lt, mutbl)
|
build::mk_ty_rptr(cx, span, raw_ty, lt, mutbl)
|
||||||
}
|
}
|
||||||
@ -216,20 +216,20 @@ pub impl LifetimeBounds {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
pub fn get_explicit_self(cx: @ext_ctxt, span: span, self_ptr: Option<PtrTy>)
|
pub fn get_explicit_self(cx: @ext_ctxt, span: span, self_ptr: &Option<PtrTy>)
|
||||||
-> (@expr, ast::self_ty) {
|
-> (@expr, ast::self_ty) {
|
||||||
let self_path = build::make_self(cx, span);
|
let self_path = build::make_self(cx, span);
|
||||||
match self_ptr {
|
match *self_ptr {
|
||||||
None => {
|
None => {
|
||||||
(self_path, respan(span, ast::sty_value))
|
(self_path, respan(span, ast::sty_value))
|
||||||
}
|
}
|
||||||
Some(ptr) => {
|
Some(ref ptr) => {
|
||||||
let self_ty = respan(
|
let self_ty = respan(
|
||||||
span,
|
span,
|
||||||
match ptr {
|
match *ptr {
|
||||||
Owned => ast::sty_uniq(ast::m_imm),
|
Owned => ast::sty_uniq(ast::m_imm),
|
||||||
Managed(mutbl) => ast::sty_box(mutbl),
|
Managed(mutbl) => ast::sty_box(mutbl),
|
||||||
Borrowed(lt, mutbl) => {
|
Borrowed(ref lt, mutbl) => {
|
||||||
let lt = lt.map(|s| @build::mk_lifetime(cx, span,
|
let lt = lt.map(|s| @build::mk_lifetime(cx, span,
|
||||||
cx.ident_of(*s)));
|
cx.ident_of(*s)));
|
||||||
ast::sty_region(lt, mutbl)
|
ast::sty_region(lt, mutbl)
|
||||||
|
@ -662,12 +662,11 @@ mod test {
|
|||||||
#[test] fn fail_exists_test () {
|
#[test] fn fail_exists_test () {
|
||||||
let src = ~"fn main() { fail!(\"something appropriately gloomy\");}";
|
let src = ~"fn main() { fail!(\"something appropriately gloomy\");}";
|
||||||
let sess = parse::new_parse_sess(None);
|
let sess = parse::new_parse_sess(None);
|
||||||
let cfg = ~[];
|
|
||||||
let crate_ast = parse::parse_crate_from_source_str(
|
let crate_ast = parse::parse_crate_from_source_str(
|
||||||
~"<test>",
|
~"<test>",
|
||||||
@src,
|
@src,
|
||||||
cfg,sess);
|
~[],sess);
|
||||||
expand_crate(sess,cfg,crate_ast);
|
expand_crate(sess,~[],crate_ast);
|
||||||
}
|
}
|
||||||
|
|
||||||
// these following tests are quite fragile, in that they don't test what
|
// these following tests are quite fragile, in that they don't test what
|
||||||
@ -679,13 +678,12 @@ mod test {
|
|||||||
let src = ~"fn bogus() {macro_rules! z (() => (3+4))}\
|
let src = ~"fn bogus() {macro_rules! z (() => (3+4))}\
|
||||||
fn inty() -> int { z!() }";
|
fn inty() -> int { z!() }";
|
||||||
let sess = parse::new_parse_sess(None);
|
let sess = parse::new_parse_sess(None);
|
||||||
let cfg = ~[];
|
|
||||||
let crate_ast = parse::parse_crate_from_source_str(
|
let crate_ast = parse::parse_crate_from_source_str(
|
||||||
~"<test>",
|
~"<test>",
|
||||||
@src,
|
@src,
|
||||||
cfg,sess);
|
~[],sess);
|
||||||
// should fail:
|
// should fail:
|
||||||
expand_crate(sess,cfg,crate_ast);
|
expand_crate(sess,~[],crate_ast);
|
||||||
}
|
}
|
||||||
|
|
||||||
// make sure that macros can leave scope for modules
|
// make sure that macros can leave scope for modules
|
||||||
@ -694,13 +692,12 @@ mod test {
|
|||||||
let src = ~"mod foo {macro_rules! z (() => (3+4))}\
|
let src = ~"mod foo {macro_rules! z (() => (3+4))}\
|
||||||
fn inty() -> int { z!() }";
|
fn inty() -> int { z!() }";
|
||||||
let sess = parse::new_parse_sess(None);
|
let sess = parse::new_parse_sess(None);
|
||||||
let cfg = ~[];
|
|
||||||
let crate_ast = parse::parse_crate_from_source_str(
|
let crate_ast = parse::parse_crate_from_source_str(
|
||||||
~"<test>",
|
~"<test>",
|
||||||
@src,
|
@src,
|
||||||
cfg,sess);
|
~[],sess);
|
||||||
// should fail:
|
// should fail:
|
||||||
expand_crate(sess,cfg,crate_ast);
|
expand_crate(sess,~[],crate_ast);
|
||||||
}
|
}
|
||||||
|
|
||||||
// macro_escape modules shouldn't cause macros to leave scope
|
// macro_escape modules shouldn't cause macros to leave scope
|
||||||
@ -708,13 +705,12 @@ mod test {
|
|||||||
let src = ~"#[macro_escape] mod foo {macro_rules! z (() => (3+4))}\
|
let src = ~"#[macro_escape] mod foo {macro_rules! z (() => (3+4))}\
|
||||||
fn inty() -> int { z!() }";
|
fn inty() -> int { z!() }";
|
||||||
let sess = parse::new_parse_sess(None);
|
let sess = parse::new_parse_sess(None);
|
||||||
let cfg = ~[];
|
|
||||||
let crate_ast = parse::parse_crate_from_source_str(
|
let crate_ast = parse::parse_crate_from_source_str(
|
||||||
~"<test>",
|
~"<test>",
|
||||||
@src,
|
@src,
|
||||||
cfg,sess);
|
~[], sess);
|
||||||
// should fail:
|
// should fail:
|
||||||
expand_crate(sess,cfg,crate_ast);
|
expand_crate(sess,~[],crate_ast);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test] fn core_macros_must_parse () {
|
#[test] fn core_macros_must_parse () {
|
||||||
|
@ -62,7 +62,7 @@ fn pieces_to_expr(cx: @ext_ctxt, sp: span,
|
|||||||
// which tells the RT::conv* functions how to perform the conversion
|
// which tells the RT::conv* functions how to perform the conversion
|
||||||
|
|
||||||
fn make_rt_conv_expr(cx: @ext_ctxt, sp: span, cnv: &Conv) -> @ast::expr {
|
fn make_rt_conv_expr(cx: @ext_ctxt, sp: span, cnv: &Conv) -> @ast::expr {
|
||||||
fn make_flags(cx: @ext_ctxt, sp: span, flags: ~[Flag]) -> @ast::expr {
|
fn make_flags(cx: @ext_ctxt, sp: span, flags: &[Flag]) -> @ast::expr {
|
||||||
let mut tmp_expr = make_rt_path_expr(cx, sp, "flag_none");
|
let mut tmp_expr = make_rt_path_expr(cx, sp, "flag_none");
|
||||||
for flags.each |f| {
|
for flags.each |f| {
|
||||||
let fstr = match *f {
|
let fstr = match *f {
|
||||||
|
@ -154,14 +154,14 @@ pub struct protocol_ {
|
|||||||
|
|
||||||
pub impl protocol_ {
|
pub impl protocol_ {
|
||||||
/// Get a state.
|
/// Get a state.
|
||||||
fn get_state(&self, name: ~str) -> state {
|
fn get_state(&self, name: &str) -> state {
|
||||||
self.states.find(|i| i.name == name).get()
|
self.states.find(|i| name == i.name).get()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_state_by_id(&self, id: uint) -> state { self.states[id] }
|
fn get_state_by_id(&self, id: uint) -> state { self.states[id] }
|
||||||
|
|
||||||
fn has_state(&self, name: ~str) -> bool {
|
fn has_state(&self, name: &str) -> bool {
|
||||||
self.states.find(|i| i.name == name).is_some()
|
self.states.find(|i| name == i.name).is_some()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn filename(&self) -> ~str {
|
fn filename(&self) -> ~str {
|
||||||
|
@ -452,9 +452,9 @@ fn mk_binop(cx: @ext_ctxt, sp: span, bop: token::binop) -> @ast::expr {
|
|||||||
ids_ext(cx, ~[name.to_owned()]))
|
ids_ext(cx, ~[name.to_owned()]))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_token(cx: @ext_ctxt, sp: span, tok: token::Token) -> @ast::expr {
|
fn mk_token(cx: @ext_ctxt, sp: span, tok: &token::Token) -> @ast::expr {
|
||||||
|
|
||||||
match tok {
|
match *tok {
|
||||||
BINOP(binop) => {
|
BINOP(binop) => {
|
||||||
return build::mk_call(cx, sp,
|
return build::mk_call(cx, sp,
|
||||||
ids_ext(cx, ~[~"BINOP"]),
|
ids_ext(cx, ~[~"BINOP"]),
|
||||||
@ -561,7 +561,7 @@ fn mk_token(cx: @ext_ctxt, sp: span, tok: token::Token) -> @ast::expr {
|
|||||||
_ => ()
|
_ => ()
|
||||||
}
|
}
|
||||||
|
|
||||||
let name = match tok {
|
let name = match *tok {
|
||||||
EQ => "EQ",
|
EQ => "EQ",
|
||||||
LT => "LT",
|
LT => "LT",
|
||||||
LE => "LE",
|
LE => "LE",
|
||||||
@ -612,7 +612,7 @@ fn mk_tt(cx: @ext_ctxt, sp: span, tt: &ast::token_tree)
|
|||||||
let e_tok =
|
let e_tok =
|
||||||
build::mk_call(cx, sp,
|
build::mk_call(cx, sp,
|
||||||
ids_ext(cx, ~[~"tt_tok"]),
|
ids_ext(cx, ~[~"tt_tok"]),
|
||||||
~[e_sp, mk_token(cx, sp, *tok)]);
|
~[e_sp, mk_token(cx, sp, tok)]);
|
||||||
let e_push =
|
let e_push =
|
||||||
build::mk_method_call(cx, sp,
|
build::mk_method_call(cx, sp,
|
||||||
build::mk_path(cx, sp, ids_ext(cx, ~[~"tt"])),
|
build::mk_path(cx, sp, ids_ext(cx, ~[~"tt"])),
|
||||||
|
@ -130,7 +130,6 @@ pub fn count_names(ms: &[matcher]) -> uint {
|
|||||||
}})
|
}})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(non_implicitly_copyable_typarams)]
|
|
||||||
pub fn initial_matcher_pos(ms: ~[matcher], sep: Option<Token>, lo: BytePos)
|
pub fn initial_matcher_pos(ms: ~[matcher], sep: Option<Token>, lo: BytePos)
|
||||||
-> ~MatcherPos {
|
-> ~MatcherPos {
|
||||||
let mut match_idx_hi = 0u;
|
let mut match_idx_hi = 0u;
|
||||||
@ -184,15 +183,15 @@ pub enum named_match {
|
|||||||
|
|
||||||
pub type earley_item = ~MatcherPos;
|
pub type earley_item = ~MatcherPos;
|
||||||
|
|
||||||
pub fn nameize(p_s: @mut ParseSess, ms: ~[matcher], res: ~[@named_match])
|
pub fn nameize(p_s: @mut ParseSess, ms: &[matcher], res: &[@named_match])
|
||||||
-> HashMap<ident,@named_match> {
|
-> HashMap<ident,@named_match> {
|
||||||
fn n_rec(p_s: @mut ParseSess, m: matcher, res: ~[@named_match],
|
fn n_rec(p_s: @mut ParseSess, m: &matcher, res: &[@named_match],
|
||||||
ret_val: &mut HashMap<ident, @named_match>) {
|
ret_val: &mut HashMap<ident, @named_match>) {
|
||||||
match m {
|
match *m {
|
||||||
codemap::spanned {node: match_tok(_), _} => (),
|
codemap::spanned {node: match_tok(_), _} => (),
|
||||||
codemap::spanned {node: match_seq(ref more_ms, _, _, _, _), _} => {
|
codemap::spanned {node: match_seq(ref more_ms, _, _, _, _), _} => {
|
||||||
for (*more_ms).each() |next_m| {
|
for more_ms.each |next_m| {
|
||||||
n_rec(p_s, *next_m, res, ret_val)
|
n_rec(p_s, next_m, res, ret_val)
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
codemap::spanned {
|
codemap::spanned {
|
||||||
@ -207,7 +206,7 @@ pub fn nameize(p_s: @mut ParseSess, ms: ~[matcher], res: ~[@named_match])
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mut ret_val = HashMap::new();
|
let mut ret_val = HashMap::new();
|
||||||
for ms.each() |m| { n_rec(p_s, *m, res, &mut ret_val) }
|
for ms.each |m| { n_rec(p_s, m, res, &mut ret_val) }
|
||||||
return ret_val;
|
return ret_val;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -234,10 +233,10 @@ pub fn parse(
|
|||||||
sess: @mut ParseSess,
|
sess: @mut ParseSess,
|
||||||
cfg: ast::crate_cfg,
|
cfg: ast::crate_cfg,
|
||||||
rdr: @reader,
|
rdr: @reader,
|
||||||
ms: ~[matcher]
|
ms: &[matcher]
|
||||||
) -> parse_result {
|
) -> parse_result {
|
||||||
let mut cur_eis = ~[];
|
let mut cur_eis = ~[];
|
||||||
cur_eis.push(initial_matcher_pos(copy ms, None, rdr.peek().sp.lo));
|
cur_eis.push(initial_matcher_pos(ms.to_owned(), None, rdr.peek().sp.lo));
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
let mut bb_eis = ~[]; // black-box parsed by parser.rs
|
let mut bb_eis = ~[]; // black-box parsed by parser.rs
|
||||||
@ -277,7 +276,7 @@ pub fn parse(
|
|||||||
|
|
||||||
// Only touch the binders we have actually bound
|
// Only touch the binders we have actually bound
|
||||||
for uint::range(ei.match_lo, ei.match_hi) |idx| {
|
for uint::range(ei.match_lo, ei.match_hi) |idx| {
|
||||||
let sub = ei.matches[idx];
|
let sub = copy ei.matches[idx];
|
||||||
new_pos.matches[idx]
|
new_pos.matches[idx]
|
||||||
.push(@matched_seq(sub,
|
.push(@matched_seq(sub,
|
||||||
mk_sp(ei.sp_lo,
|
mk_sp(ei.sp_lo,
|
||||||
@ -410,31 +409,31 @@ pub fn parse(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_nt(p: &Parser, name: ~str) -> nonterminal {
|
pub fn parse_nt(p: &Parser, name: &str) -> nonterminal {
|
||||||
match name {
|
match name {
|
||||||
~"item" => match p.parse_item(~[]) {
|
"item" => match p.parse_item(~[]) {
|
||||||
Some(i) => token::nt_item(i),
|
Some(i) => token::nt_item(i),
|
||||||
None => p.fatal(~"expected an item keyword")
|
None => p.fatal(~"expected an item keyword")
|
||||||
},
|
},
|
||||||
~"block" => token::nt_block(p.parse_block()),
|
"block" => token::nt_block(p.parse_block()),
|
||||||
~"stmt" => token::nt_stmt(p.parse_stmt(~[])),
|
"stmt" => token::nt_stmt(p.parse_stmt(~[])),
|
||||||
~"pat" => token::nt_pat(p.parse_pat(true)),
|
"pat" => token::nt_pat(p.parse_pat(true)),
|
||||||
~"expr" => token::nt_expr(p.parse_expr()),
|
"expr" => token::nt_expr(p.parse_expr()),
|
||||||
~"ty" => token::nt_ty(p.parse_ty(false /* no need to disambiguate*/)),
|
"ty" => token::nt_ty(p.parse_ty(false /* no need to disambiguate*/)),
|
||||||
// this could be handled like a token, since it is one
|
// this could be handled like a token, since it is one
|
||||||
~"ident" => match *p.token {
|
"ident" => match *p.token {
|
||||||
token::IDENT(sn,b) => { p.bump(); token::nt_ident(sn,b) }
|
token::IDENT(sn,b) => { p.bump(); token::nt_ident(sn,b) }
|
||||||
_ => p.fatal(~"expected ident, found "
|
_ => p.fatal(~"expected ident, found "
|
||||||
+ token::to_str(p.reader.interner(), © *p.token))
|
+ token::to_str(p.reader.interner(), © *p.token))
|
||||||
},
|
},
|
||||||
~"path" => token::nt_path(p.parse_path_with_tps(false)),
|
"path" => token::nt_path(p.parse_path_with_tps(false)),
|
||||||
~"tt" => {
|
"tt" => {
|
||||||
*p.quote_depth += 1u; //but in theory, non-quoted tts might be useful
|
*p.quote_depth += 1u; //but in theory, non-quoted tts might be useful
|
||||||
let res = token::nt_tt(@p.parse_token_tree());
|
let res = token::nt_tt(@p.parse_token_tree());
|
||||||
*p.quote_depth -= 1u;
|
*p.quote_depth -= 1u;
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
~"matchers" => token::nt_matchers(p.parse_matchers()),
|
"matchers" => token::nt_matchers(p.parse_matchers()),
|
||||||
_ => p.fatal(~"Unsupported builtin nonterminal parser: " + name)
|
_ => p.fatal(~"Unsupported builtin nonterminal parser: " + name)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -63,19 +63,19 @@ pub fn add_new_extension(cx: @ext_ctxt,
|
|||||||
|
|
||||||
// Extract the arguments:
|
// Extract the arguments:
|
||||||
let lhses = match *argument_map.get(&lhs_nm) {
|
let lhses = match *argument_map.get(&lhs_nm) {
|
||||||
@matched_seq(ref s, _) => /* FIXME (#2543) */ copy *s,
|
@matched_seq(ref s, _) => /* FIXME (#2543) */ @copy *s,
|
||||||
_ => cx.span_bug(sp, ~"wrong-structured lhs")
|
_ => cx.span_bug(sp, ~"wrong-structured lhs")
|
||||||
};
|
};
|
||||||
|
|
||||||
let rhses = match *argument_map.get(&rhs_nm) {
|
let rhses = match *argument_map.get(&rhs_nm) {
|
||||||
@matched_seq(ref s, _) => /* FIXME (#2543) */ copy *s,
|
@matched_seq(ref s, _) => /* FIXME (#2543) */ @copy *s,
|
||||||
_ => cx.span_bug(sp, ~"wrong-structured rhs")
|
_ => cx.span_bug(sp, ~"wrong-structured rhs")
|
||||||
};
|
};
|
||||||
|
|
||||||
// Given `lhses` and `rhses`, this is the new macro we create
|
// Given `lhses` and `rhses`, this is the new macro we create
|
||||||
fn generic_extension(cx: @ext_ctxt, sp: span, name: ident,
|
fn generic_extension(cx: @ext_ctxt, sp: span, name: ident,
|
||||||
arg: &[ast::token_tree],
|
arg: &[ast::token_tree],
|
||||||
lhses: ~[@named_match], rhses: ~[@named_match])
|
lhses: &[@named_match], rhses: &[@named_match])
|
||||||
-> MacResult {
|
-> MacResult {
|
||||||
|
|
||||||
if cx.trace_macros() {
|
if cx.trace_macros() {
|
||||||
@ -93,7 +93,7 @@ pub fn add_new_extension(cx: @ext_ctxt,
|
|||||||
let s_d = cx.parse_sess().span_diagnostic;
|
let s_d = cx.parse_sess().span_diagnostic;
|
||||||
let itr = cx.parse_sess().interner;
|
let itr = cx.parse_sess().interner;
|
||||||
|
|
||||||
for lhses.eachi() |i, lhs| { // try each arm's matchers
|
for lhses.eachi |i, lhs| { // try each arm's matchers
|
||||||
match *lhs {
|
match *lhs {
|
||||||
@matched_nonterminal(nt_matchers(ref mtcs)) => {
|
@matched_nonterminal(nt_matchers(ref mtcs)) => {
|
||||||
// `none` is because we're not interpolating
|
// `none` is because we're not interpolating
|
||||||
@ -103,7 +103,7 @@ pub fn add_new_extension(cx: @ext_ctxt,
|
|||||||
None,
|
None,
|
||||||
vec::to_owned(arg)
|
vec::to_owned(arg)
|
||||||
) as @reader;
|
) as @reader;
|
||||||
match parse(cx.parse_sess(), cx.cfg(), arg_rdr, (*mtcs)) {
|
match parse(cx.parse_sess(), cx.cfg(), arg_rdr, *mtcs) {
|
||||||
success(named_matches) => {
|
success(named_matches) => {
|
||||||
let rhs = match rhses[i] {
|
let rhs = match rhses[i] {
|
||||||
// okay, what's your transcriber?
|
// okay, what's your transcriber?
|
||||||
@ -146,7 +146,7 @@ pub fn add_new_extension(cx: @ext_ctxt,
|
|||||||
}
|
}
|
||||||
|
|
||||||
let exp: @fn(@ext_ctxt, span, &[ast::token_tree]) -> MacResult =
|
let exp: @fn(@ext_ctxt, span, &[ast::token_tree]) -> MacResult =
|
||||||
|cx, sp, arg| generic_extension(cx, sp, name, arg, lhses, rhses);
|
|cx, sp, arg| generic_extension(cx, sp, name, arg, *lhses, *rhses);
|
||||||
|
|
||||||
return MRDef(MacroDef{
|
return MRDef(MacroDef{
|
||||||
name: copy *cx.parse_sess().interner.get(name),
|
name: copy *cx.parse_sess().interner.get(name),
|
||||||
|
@ -91,11 +91,11 @@ pub fn dup_tt_reader(r: @mut TtReader) -> @mut TtReader {
|
|||||||
sp_diag: r.sp_diag,
|
sp_diag: r.sp_diag,
|
||||||
interner: r.interner,
|
interner: r.interner,
|
||||||
stack: dup_tt_frame(r.stack),
|
stack: dup_tt_frame(r.stack),
|
||||||
interpolations: r.interpolations,
|
|
||||||
repeat_idx: copy r.repeat_idx,
|
repeat_idx: copy r.repeat_idx,
|
||||||
repeat_len: copy r.repeat_len,
|
repeat_len: copy r.repeat_len,
|
||||||
cur_tok: copy r.cur_tok,
|
cur_tok: copy r.cur_tok,
|
||||||
cur_span: r.cur_span
|
cur_span: r.cur_span,
|
||||||
|
interpolations: copy r.interpolations,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -127,7 +127,7 @@ enum lis {
|
|||||||
lis_unconstrained, lis_constraint(uint, ident), lis_contradiction(~str)
|
lis_unconstrained, lis_constraint(uint, ident), lis_contradiction(~str)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lockstep_iter_size(t: token_tree, r: &mut TtReader) -> lis {
|
fn lockstep_iter_size(t: &token_tree, r: &mut TtReader) -> lis {
|
||||||
fn lis_merge(lhs: lis, rhs: lis, r: &mut TtReader) -> lis {
|
fn lis_merge(lhs: lis, rhs: lis, r: &mut TtReader) -> lis {
|
||||||
match lhs {
|
match lhs {
|
||||||
lis_unconstrained => copy rhs,
|
lis_unconstrained => copy rhs,
|
||||||
@ -146,10 +146,10 @@ fn lockstep_iter_size(t: token_tree, r: &mut TtReader) -> lis {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
match t {
|
match *t {
|
||||||
tt_delim(ref tts) | tt_seq(_, ref tts, _, _) => {
|
tt_delim(ref tts) | tt_seq(_, ref tts, _, _) => {
|
||||||
vec::foldl(lis_unconstrained, (*tts), |lis, tt| {
|
vec::foldl(lis_unconstrained, *tts, |lis, tt| {
|
||||||
let lis2 = lockstep_iter_size(*tt, r);
|
let lis2 = lockstep_iter_size(tt, r);
|
||||||
lis_merge(lis, lis2, r)
|
lis_merge(lis, lis2, r)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -230,7 +230,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
|||||||
}
|
}
|
||||||
tt_seq(sp, copy tts, copy sep, zerok) => {
|
tt_seq(sp, copy tts, copy sep, zerok) => {
|
||||||
let t = tt_seq(sp, copy tts, copy sep, zerok);
|
let t = tt_seq(sp, copy tts, copy sep, zerok);
|
||||||
match lockstep_iter_size(t, r) {
|
match lockstep_iter_size(&t, r) {
|
||||||
lis_unconstrained => {
|
lis_unconstrained => {
|
||||||
r.sp_diag.span_fatal(
|
r.sp_diag.span_fatal(
|
||||||
sp, /* blame macro writer */
|
sp, /* blame macro writer */
|
||||||
|
@ -44,7 +44,7 @@ pub trait ast_fold {
|
|||||||
pub struct AstFoldFns {
|
pub struct AstFoldFns {
|
||||||
//unlike the others, item_ is non-trivial
|
//unlike the others, item_ is non-trivial
|
||||||
fold_crate: @fn(&crate_, span, @ast_fold) -> (crate_, span),
|
fold_crate: @fn(&crate_, span, @ast_fold) -> (crate_, span),
|
||||||
fold_view_item: @fn(view_item_, @ast_fold) -> view_item_,
|
fold_view_item: @fn(&view_item_, @ast_fold) -> view_item_,
|
||||||
fold_foreign_item: @fn(@foreign_item, @ast_fold) -> @foreign_item,
|
fold_foreign_item: @fn(@foreign_item, @ast_fold) -> @foreign_item,
|
||||||
fold_item: @fn(@item, @ast_fold) -> Option<@item>,
|
fold_item: @fn(@item, @ast_fold) -> Option<@item>,
|
||||||
fold_struct_field: @fn(@struct_field, @ast_fold) -> @struct_field,
|
fold_struct_field: @fn(@struct_field, @ast_fold) -> @struct_field,
|
||||||
@ -112,7 +112,7 @@ fn fold_arg_(a: arg, fld: @ast_fold) -> arg {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
//used in noop_fold_expr, and possibly elsewhere in the future
|
//used in noop_fold_expr, and possibly elsewhere in the future
|
||||||
fn fold_mac_(m: mac, fld: @ast_fold) -> mac {
|
fn fold_mac_(m: &mac, fld: @ast_fold) -> mac {
|
||||||
spanned {
|
spanned {
|
||||||
node: match m.node { mac_invoc_tt(*) => copy m.node },
|
node: match m.node { mac_invoc_tt(*) => copy m.node },
|
||||||
span: fld.new_span(m.span),
|
span: fld.new_span(m.span),
|
||||||
@ -174,8 +174,8 @@ pub fn noop_fold_crate(c: &crate_, fld: @ast_fold) -> crate_ {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn noop_fold_view_item(vi: view_item_, _fld: @ast_fold) -> view_item_ {
|
fn noop_fold_view_item(vi: &view_item_, _fld: @ast_fold) -> view_item_ {
|
||||||
return /* FIXME (#2543) */ copy vi;
|
return /* FIXME (#2543) */ copy *vi;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -351,7 +351,7 @@ fn noop_fold_stmt(s: &stmt_, fld: @ast_fold) -> stmt_ {
|
|||||||
stmt_decl(d, nid) => stmt_decl(fld.fold_decl(d), fld.new_id(nid)),
|
stmt_decl(d, nid) => stmt_decl(fld.fold_decl(d), fld.new_id(nid)),
|
||||||
stmt_expr(e, nid) => stmt_expr(fld.fold_expr(e), fld.new_id(nid)),
|
stmt_expr(e, nid) => stmt_expr(fld.fold_expr(e), fld.new_id(nid)),
|
||||||
stmt_semi(e, nid) => stmt_semi(fld.fold_expr(e), fld.new_id(nid)),
|
stmt_semi(e, nid) => stmt_semi(fld.fold_expr(e), fld.new_id(nid)),
|
||||||
stmt_mac(ref mac, semi) => stmt_mac(fold_mac((*mac)), semi)
|
stmt_mac(ref mac, semi) => stmt_mac(fold_mac(mac), semi)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -540,14 +540,14 @@ pub fn noop_fold_expr(e: &expr_, fld: @ast_fold) -> expr_ {
|
|||||||
fld.fold_expr(e)
|
fld.fold_expr(e)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
expr_inline_asm(a) => {
|
expr_inline_asm(ref a) => {
|
||||||
expr_inline_asm(inline_asm {
|
expr_inline_asm(inline_asm {
|
||||||
inputs: a.inputs.map(|&(c, in)| (c, fld.fold_expr(in))),
|
inputs: a.inputs.map(|&(c, in)| (c, fld.fold_expr(in))),
|
||||||
outputs: a.outputs.map(|&(c, out)| (c, fld.fold_expr(out))),
|
outputs: a.outputs.map(|&(c, out)| (c, fld.fold_expr(out))),
|
||||||
.. a
|
.. copy *a
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
expr_mac(ref mac) => expr_mac(fold_mac((*mac))),
|
expr_mac(ref mac) => expr_mac(fold_mac(mac)),
|
||||||
expr_struct(path, ref fields, maybe_expr) => {
|
expr_struct(path, ref fields, maybe_expr) => {
|
||||||
expr_struct(
|
expr_struct(
|
||||||
fld.fold_path(path),
|
fld.fold_path(path),
|
||||||
@ -590,12 +590,12 @@ pub fn noop_fold_ty(t: &ty_, fld: @ast_fold) -> ty_ {
|
|||||||
region: f.region,
|
region: f.region,
|
||||||
onceness: f.onceness,
|
onceness: f.onceness,
|
||||||
decl: fold_fn_decl(&f.decl, fld),
|
decl: fold_fn_decl(&f.decl, fld),
|
||||||
lifetimes: f.lifetimes,
|
lifetimes: copy f.lifetimes,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
ty_bare_fn(ref f) => {
|
ty_bare_fn(ref f) => {
|
||||||
ty_bare_fn(@TyBareFn {
|
ty_bare_fn(@TyBareFn {
|
||||||
lifetimes: f.lifetimes,
|
lifetimes: copy f.lifetimes,
|
||||||
purity: f.purity,
|
purity: f.purity,
|
||||||
abis: f.abis,
|
abis: f.abis,
|
||||||
decl: fold_fn_decl(&f.decl, fld)
|
decl: fold_fn_decl(&f.decl, fld)
|
||||||
@ -609,7 +609,7 @@ pub fn noop_fold_ty(t: &ty_, fld: @ast_fold) -> ty_ {
|
|||||||
fld.fold_expr(e)
|
fld.fold_expr(e)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
ty_mac(ref mac) => ty_mac(fold_mac(*mac))
|
ty_mac(ref mac) => ty_mac(fold_mac(mac))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -740,7 +740,7 @@ impl ast_fold for AstFoldFns {
|
|||||||
fn fold_view_item(@self, x: @view_item) ->
|
fn fold_view_item(@self, x: @view_item) ->
|
||||||
@view_item {
|
@view_item {
|
||||||
@ast::view_item {
|
@ast::view_item {
|
||||||
node: (self.fold_view_item)(x.node, self as @ast_fold),
|
node: (self.fold_view_item)(&x.node, self as @ast_fold),
|
||||||
attrs: vec::map(x.attrs, |a|
|
attrs: vec::map(x.attrs, |a|
|
||||||
fold_attribute_(*a, self as @ast_fold)),
|
fold_attribute_(*a, self as @ast_fold)),
|
||||||
vis: x.vis,
|
vis: x.vis,
|
||||||
|
@ -192,7 +192,7 @@ fn read_line_comments(rdr: @mut StringReader, code_to_the_left: bool,
|
|||||||
|
|
||||||
// FIXME #3961: This is not the right way to convert string byte
|
// FIXME #3961: This is not the right way to convert string byte
|
||||||
// offsets to characters.
|
// offsets to characters.
|
||||||
fn all_whitespace(s: ~str, begin: uint, end: uint) -> bool {
|
fn all_whitespace(s: &str, begin: uint, end: uint) -> bool {
|
||||||
let mut i: uint = begin;
|
let mut i: uint = begin;
|
||||||
while i != end {
|
while i != end {
|
||||||
if !is_whitespace(s[i] as char) { return false; } i += 1u;
|
if !is_whitespace(s[i] as char) { return false; } i += 1u;
|
||||||
|
@ -351,15 +351,14 @@ mod test {
|
|||||||
use core::option::None;
|
use core::option::None;
|
||||||
use core::int;
|
use core::int;
|
||||||
use core::num::NumCast;
|
use core::num::NumCast;
|
||||||
use core::path::Path;
|
use codemap::{CodeMap, span, BytePos, spanned};
|
||||||
use codemap::{dummy_sp, CodeMap, span, BytePos, spanned};
|
|
||||||
use opt_vec;
|
use opt_vec;
|
||||||
use ast;
|
use ast;
|
||||||
use abi;
|
use abi;
|
||||||
use ast_util::mk_ident;
|
use ast_util::mk_ident;
|
||||||
use parse::parser::Parser;
|
use parse::parser::Parser;
|
||||||
use parse::token::{ident_interner, mk_ident_interner, mk_fresh_ident_interner};
|
use parse::token::{ident_interner, mk_fresh_ident_interner};
|
||||||
use diagnostic::{span_handler, mk_span_handler, mk_handler, Emitter};
|
use diagnostic::{mk_span_handler, mk_handler};
|
||||||
|
|
||||||
// add known names to interner for testing
|
// add known names to interner for testing
|
||||||
fn mk_testing_interner() -> @ident_interner {
|
fn mk_testing_interner() -> @ident_interner {
|
||||||
@ -408,7 +407,7 @@ mod test {
|
|||||||
|
|
||||||
// map a string to tts, return the tt without its parsesess
|
// map a string to tts, return the tt without its parsesess
|
||||||
fn string_to_tts_only(source_str : @~str) -> ~[ast::token_tree] {
|
fn string_to_tts_only(source_str : @~str) -> ~[ast::token_tree] {
|
||||||
let (tts,ps) = string_to_tts_t(source_str);
|
let (tts,_ps) = string_to_tts_t(source_str);
|
||||||
tts
|
tts
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -483,7 +482,7 @@ mod test {
|
|||||||
}*/
|
}*/
|
||||||
|
|
||||||
#[test] fn string_to_tts_1 () {
|
#[test] fn string_to_tts_1 () {
|
||||||
let (tts,ps) = string_to_tts_t(@~"fn a (b : int) { b; }");
|
let (tts,_ps) = string_to_tts_t(@~"fn a (b : int) { b; }");
|
||||||
assert_eq!(to_json_str(@tts),
|
assert_eq!(to_json_str(@tts),
|
||||||
~"[\
|
~"[\
|
||||||
[\"tt_tok\",null,[\"IDENT\",\"fn\",false]],\
|
[\"tt_tok\",null,[\"IDENT\",\"fn\",false]],\
|
||||||
@ -548,7 +547,7 @@ mod test {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn parser_done(p: Parser){
|
fn parser_done(p: Parser){
|
||||||
assert_eq!(*p.token,token::EOF);
|
assert_eq!(copy *p.token,token::EOF);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test] fn parse_ident_pat () {
|
#[test] fn parse_ident_pat () {
|
||||||
|
@ -253,9 +253,9 @@ pub impl Parser {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn token_is_obsolete_ident(&self, ident: &str, token: Token) -> bool {
|
fn token_is_obsolete_ident(&self, ident: &str, token: &Token) -> bool {
|
||||||
match token {
|
match *token {
|
||||||
token::IDENT(copy sid, _) => {
|
token::IDENT(sid, _) => {
|
||||||
str::eq_slice(*self.id_to_str(sid), ident)
|
str::eq_slice(*self.id_to_str(sid), ident)
|
||||||
}
|
}
|
||||||
_ => false
|
_ => false
|
||||||
@ -263,7 +263,7 @@ pub impl Parser {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn is_obsolete_ident(&self, ident: &str) -> bool {
|
fn is_obsolete_ident(&self, ident: &str) -> bool {
|
||||||
self.token_is_obsolete_ident(ident, *self.token)
|
self.token_is_obsolete_ident(ident, self.token)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eat_obsolete_ident(&self, ident: &str) -> bool {
|
fn eat_obsolete_ident(&self, ident: &str) -> bool {
|
||||||
@ -289,7 +289,7 @@ pub impl Parser {
|
|||||||
fn try_parse_obsolete_with(&self) -> bool {
|
fn try_parse_obsolete_with(&self) -> bool {
|
||||||
if *self.token == token::COMMA
|
if *self.token == token::COMMA
|
||||||
&& self.token_is_obsolete_ident("with",
|
&& self.token_is_obsolete_ident("with",
|
||||||
self.look_ahead(1u)) {
|
&self.look_ahead(1u)) {
|
||||||
self.bump();
|
self.bump();
|
||||||
}
|
}
|
||||||
if self.eat_obsolete_ident("with") {
|
if self.eat_obsolete_ident("with") {
|
||||||
@ -301,13 +301,13 @@ pub impl Parser {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn try_parse_obsolete_priv_section(&self, attrs: ~[attribute]) -> bool {
|
fn try_parse_obsolete_priv_section(&self, attrs: &[attribute]) -> bool {
|
||||||
if self.is_keyword(&~"priv") && self.look_ahead(1) == token::LBRACE {
|
if self.is_keyword(&~"priv") && self.look_ahead(1) == token::LBRACE {
|
||||||
self.obsolete(copy *self.span, ObsoletePrivSection);
|
self.obsolete(copy *self.span, ObsoletePrivSection);
|
||||||
self.eat_keyword(&~"priv");
|
self.eat_keyword(&~"priv");
|
||||||
self.bump();
|
self.bump();
|
||||||
while *self.token != token::RBRACE {
|
while *self.token != token::RBRACE {
|
||||||
self.parse_single_struct_field(ast::private, attrs);
|
self.parse_single_struct_field(ast::private, attrs.to_owned());
|
||||||
}
|
}
|
||||||
self.bump();
|
self.bump();
|
||||||
true
|
true
|
||||||
|
@ -708,7 +708,7 @@ pub impl Parser {
|
|||||||
self.obsolete(*self.last_span, ObsoleteBareFnType);
|
self.obsolete(*self.last_span, ObsoleteBareFnType);
|
||||||
result
|
result
|
||||||
} else if *self.token == token::MOD_SEP
|
} else if *self.token == token::MOD_SEP
|
||||||
|| is_ident_or_path(&*self.token) {
|
|| is_ident_or_path(self.token) {
|
||||||
// NAMED TYPE
|
// NAMED TYPE
|
||||||
let path = self.parse_path_with_tps(false);
|
let path = self.parse_path_with_tps(false);
|
||||||
ty_path(path, self.get_id())
|
ty_path(path, self.get_id())
|
||||||
@ -1556,9 +1556,12 @@ pub impl Parser {
|
|||||||
|p| p.parse_token_tree()
|
|p| p.parse_token_tree()
|
||||||
);
|
);
|
||||||
let (s, z) = p.parse_sep_and_zerok();
|
let (s, z) = p.parse_sep_and_zerok();
|
||||||
|
let seq = match seq {
|
||||||
|
spanned { node, _ } => node,
|
||||||
|
};
|
||||||
tt_seq(
|
tt_seq(
|
||||||
mk_sp(sp.lo ,p.span.hi),
|
mk_sp(sp.lo, p.span.hi),
|
||||||
seq.node,
|
seq,
|
||||||
s,
|
s,
|
||||||
z
|
z
|
||||||
)
|
)
|
||||||
@ -1624,9 +1627,9 @@ pub impl Parser {
|
|||||||
token::LBRACE | token::LPAREN | token::LBRACKET => {
|
token::LBRACE | token::LPAREN | token::LBRACKET => {
|
||||||
self.parse_matcher_subseq(
|
self.parse_matcher_subseq(
|
||||||
name_idx,
|
name_idx,
|
||||||
*self.token,
|
copy *self.token,
|
||||||
// tjc: not sure why we need a copy
|
// tjc: not sure why we need a copy
|
||||||
token::flip_delimiter(&*self.token)
|
token::flip_delimiter(self.token)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
_ => self.fatal(~"expected open delimiter")
|
_ => self.fatal(~"expected open delimiter")
|
||||||
@ -1986,14 +1989,15 @@ pub impl Parser {
|
|||||||
// them as the lambda arguments
|
// them as the lambda arguments
|
||||||
let e = self.parse_expr_res(RESTRICT_NO_BAR_OR_DOUBLEBAR_OP);
|
let e = self.parse_expr_res(RESTRICT_NO_BAR_OR_DOUBLEBAR_OP);
|
||||||
match e.node {
|
match e.node {
|
||||||
expr_call(f, args, NoSugar) => {
|
expr_call(f, /*bad*/ copy args, NoSugar) => {
|
||||||
let block = self.parse_lambda_block_expr();
|
let block = self.parse_lambda_block_expr();
|
||||||
let last_arg = self.mk_expr(block.span.lo, block.span.hi,
|
let last_arg = self.mk_expr(block.span.lo, block.span.hi,
|
||||||
ctor(block));
|
ctor(block));
|
||||||
let args = vec::append(args, ~[last_arg]);
|
let args = vec::append(args, ~[last_arg]);
|
||||||
self.mk_expr(lo.lo, block.span.hi, expr_call(f, args, sugar))
|
self.mk_expr(lo.lo, block.span.hi, expr_call(f, args, sugar))
|
||||||
}
|
}
|
||||||
expr_method_call(f, i, tps, args, NoSugar) => {
|
expr_method_call(f, i, /*bad*/ copy tps,
|
||||||
|
/*bad*/ copy args, NoSugar) => {
|
||||||
let block = self.parse_lambda_block_expr();
|
let block = self.parse_lambda_block_expr();
|
||||||
let last_arg = self.mk_expr(block.span.lo, block.span.hi,
|
let last_arg = self.mk_expr(block.span.lo, block.span.hi,
|
||||||
ctor(block));
|
ctor(block));
|
||||||
@ -2001,7 +2005,7 @@ pub impl Parser {
|
|||||||
self.mk_expr(lo.lo, block.span.hi,
|
self.mk_expr(lo.lo, block.span.hi,
|
||||||
expr_method_call(f, i, tps, args, sugar))
|
expr_method_call(f, i, tps, args, sugar))
|
||||||
}
|
}
|
||||||
expr_field(f, i, tps) => {
|
expr_field(f, i, /*bad*/ copy tps) => {
|
||||||
let block = self.parse_lambda_block_expr();
|
let block = self.parse_lambda_block_expr();
|
||||||
let last_arg = self.mk_expr(block.span.lo, block.span.hi,
|
let last_arg = self.mk_expr(block.span.lo, block.span.hi,
|
||||||
ctor(block));
|
ctor(block));
|
||||||
@ -2259,7 +2263,7 @@ pub impl Parser {
|
|||||||
let lo = self.span.lo;
|
let lo = self.span.lo;
|
||||||
let mut hi = self.span.hi;
|
let mut hi = self.span.hi;
|
||||||
let pat;
|
let pat;
|
||||||
match *self.token {
|
match /*bad*/ copy *self.token {
|
||||||
// parse _
|
// parse _
|
||||||
token::UNDERSCORE => { self.bump(); pat = pat_wild; }
|
token::UNDERSCORE => { self.bump(); pat = pat_wild; }
|
||||||
// parse @pat
|
// parse @pat
|
||||||
@ -2373,8 +2377,8 @@ pub impl Parser {
|
|||||||
self.expect(&token::RBRACKET);
|
self.expect(&token::RBRACKET);
|
||||||
pat = ast::pat_vec(before, slice, after);
|
pat = ast::pat_vec(before, slice, after);
|
||||||
}
|
}
|
||||||
tok => {
|
ref tok => {
|
||||||
if !is_ident_or_path(&tok)
|
if !is_ident_or_path(tok)
|
||||||
|| self.is_keyword(&~"true")
|
|| self.is_keyword(&~"true")
|
||||||
|| self.is_keyword(&~"false")
|
|| self.is_keyword(&~"false")
|
||||||
{
|
{
|
||||||
@ -2384,7 +2388,7 @@ pub impl Parser {
|
|||||||
// preceded by unary-minus) or identifiers.
|
// preceded by unary-minus) or identifiers.
|
||||||
let val = self.parse_literal_maybe_minus();
|
let val = self.parse_literal_maybe_minus();
|
||||||
if self.eat(&token::DOTDOT) {
|
if self.eat(&token::DOTDOT) {
|
||||||
let end = if is_ident_or_path(&tok) {
|
let end = if is_ident_or_path(tok) {
|
||||||
let path = self.parse_path_with_tps(true);
|
let path = self.parse_path_with_tps(true);
|
||||||
let hi = self.span.hi;
|
let hi = self.span.hi;
|
||||||
self.mk_expr(lo, hi, expr_path(path))
|
self.mk_expr(lo, hi, expr_path(path))
|
||||||
@ -2897,7 +2901,7 @@ pub impl Parser {
|
|||||||
loop;
|
loop;
|
||||||
}
|
}
|
||||||
|
|
||||||
if is_ident_or_path(&*self.token) {
|
if is_ident_or_path(self.token) {
|
||||||
self.obsolete(*self.span,
|
self.obsolete(*self.span,
|
||||||
ObsoleteTraitBoundSeparator);
|
ObsoleteTraitBoundSeparator);
|
||||||
}
|
}
|
||||||
@ -3531,6 +3535,7 @@ pub impl Parser {
|
|||||||
fn parse_item_mod(&self, outer_attrs: ~[ast::attribute]) -> item_info {
|
fn parse_item_mod(&self, outer_attrs: ~[ast::attribute]) -> item_info {
|
||||||
let id_span = *self.span;
|
let id_span = *self.span;
|
||||||
let id = self.parse_ident();
|
let id = self.parse_ident();
|
||||||
|
let merge = ::attr::first_attr_value_str_by_name(outer_attrs, "merge");
|
||||||
let info_ = if *self.token == token::SEMI {
|
let info_ = if *self.token == token::SEMI {
|
||||||
self.bump();
|
self.bump();
|
||||||
// This mod is in an external file. Let's go get it!
|
// This mod is in an external file. Let's go get it!
|
||||||
@ -3550,7 +3555,7 @@ pub impl Parser {
|
|||||||
// (int-template, iter-trait). If there's a 'merge' attribute
|
// (int-template, iter-trait). If there's a 'merge' attribute
|
||||||
// on the mod, then we'll go and suck in another file and merge
|
// on the mod, then we'll go and suck in another file and merge
|
||||||
// its contents
|
// its contents
|
||||||
match ::attr::first_attr_value_str_by_name(outer_attrs, ~"merge") {
|
match merge {
|
||||||
Some(path) => {
|
Some(path) => {
|
||||||
let prefix = Path(
|
let prefix = Path(
|
||||||
self.sess.cm.span_to_filename(*self.span));
|
self.sess.cm.span_to_filename(*self.span));
|
||||||
@ -3636,10 +3641,7 @@ pub impl Parser {
|
|||||||
new_sub_parser_from_file(self.sess, copy self.cfg,
|
new_sub_parser_from_file(self.sess, copy self.cfg,
|
||||||
&full_path, id_sp);
|
&full_path, id_sp);
|
||||||
let (inner, next) = p0.parse_inner_attrs_and_next();
|
let (inner, next) = p0.parse_inner_attrs_and_next();
|
||||||
let mod_attrs = vec::append(
|
let mod_attrs = vec::append(outer_attrs, inner);
|
||||||
/*bad*/ copy outer_attrs,
|
|
||||||
inner
|
|
||||||
);
|
|
||||||
let first_item_outer_attrs = next;
|
let first_item_outer_attrs = next;
|
||||||
let m0 = p0.parse_mod_items(token::EOF, first_item_outer_attrs);
|
let m0 = p0.parse_mod_items(token::EOF, first_item_outer_attrs);
|
||||||
return (ast::item_mod(m0), mod_attrs);
|
return (ast::item_mod(m0), mod_attrs);
|
||||||
@ -4105,7 +4107,8 @@ pub impl Parser {
|
|||||||
}
|
}
|
||||||
if self.eat_keyword(&~"mod") {
|
if self.eat_keyword(&~"mod") {
|
||||||
// MODULE ITEM
|
// MODULE ITEM
|
||||||
let (ident, item_, extra_attrs) = self.parse_item_mod(attrs);
|
let (ident, item_, extra_attrs) =
|
||||||
|
self.parse_item_mod(/*bad*/ copy attrs);
|
||||||
return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_,
|
return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_,
|
||||||
visibility,
|
visibility,
|
||||||
maybe_append(attrs, extra_attrs)));
|
maybe_append(attrs, extra_attrs)));
|
||||||
|
@ -457,9 +457,9 @@ pub impl Printer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn print_str(&mut self, s: ~str) {
|
fn print_str(&mut self, s: &str) {
|
||||||
while self.pending_indentation > 0 {
|
while self.pending_indentation > 0 {
|
||||||
(*self.out).write_str(~" ");
|
(*self.out).write_str(" ");
|
||||||
self.pending_indentation -= 1;
|
self.pending_indentation -= 1;
|
||||||
}
|
}
|
||||||
(*self.out).write_str(s);
|
(*self.out).write_str(s);
|
||||||
@ -562,16 +562,16 @@ pub fn end(p: @mut Printer) { p.pretty_print(END); }
|
|||||||
|
|
||||||
pub fn eof(p: @mut Printer) { p.pretty_print(EOF); }
|
pub fn eof(p: @mut Printer) { p.pretty_print(EOF); }
|
||||||
|
|
||||||
pub fn word(p: @mut Printer, wrd: ~str) {
|
pub fn word(p: @mut Printer, wrd: &str) {
|
||||||
p.pretty_print(STRING(@/*bad*/ copy wrd, wrd.len() as int));
|
p.pretty_print(STRING(@/*bad*/ wrd.to_owned(), wrd.len() as int));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn huge_word(p: @mut Printer, wrd: ~str) {
|
pub fn huge_word(p: @mut Printer, wrd: &str) {
|
||||||
p.pretty_print(STRING(@/*bad*/ copy wrd, size_infinity));
|
p.pretty_print(STRING(@/*bad*/ wrd.to_owned(), size_infinity));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn zero_word(p: @mut Printer, wrd: ~str) {
|
pub fn zero_word(p: @mut Printer, wrd: &str) {
|
||||||
p.pretty_print(STRING(@/*bad*/ copy wrd, 0));
|
p.pretty_print(STRING(@/*bad*/ wrd.to_owned(), 0));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spaces(p: @mut Printer, n: uint) { break_offset(p, n, 0); }
|
pub fn spaces(p: @mut Printer, n: uint) { break_offset(p, n, 0); }
|
||||||
|
@ -156,7 +156,7 @@ pub fn lifetime_to_str(e: &ast::Lifetime, intr: @ident_interner) -> ~str {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn tt_to_str(tt: ast::token_tree, intr: @ident_interner) -> ~str {
|
pub fn tt_to_str(tt: ast::token_tree, intr: @ident_interner) -> ~str {
|
||||||
to_str(tt, print_tt, intr)
|
to_str(&tt, print_tt, intr)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn tts_to_str(tts: &[ast::token_tree], intr: @ident_interner) -> ~str {
|
pub fn tts_to_str(tts: &[ast::token_tree], intr: @ident_interner) -> ~str {
|
||||||
@ -213,7 +213,7 @@ pub fn attribute_to_str(attr: ast::attribute, intr: @ident_interner) -> ~str {
|
|||||||
to_str(attr, print_attribute, intr)
|
to_str(attr, print_attribute, intr)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn variant_to_str(var: ast::variant, intr: @ident_interner) -> ~str {
|
pub fn variant_to_str(var: &ast::variant, intr: @ident_interner) -> ~str {
|
||||||
to_str(var, print_variant, intr)
|
to_str(var, print_variant, intr)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -229,9 +229,9 @@ pub fn box(s: @ps, u: uint, b: pp::breaks) {
|
|||||||
|
|
||||||
pub fn nbsp(s: @ps) { word(s.s, ~" "); }
|
pub fn nbsp(s: @ps) { word(s.s, ~" "); }
|
||||||
|
|
||||||
pub fn word_nbsp(s: @ps, w: ~str) { word(s.s, w); nbsp(s); }
|
pub fn word_nbsp(s: @ps, w: &str) { word(s.s, w); nbsp(s); }
|
||||||
|
|
||||||
pub fn word_space(s: @ps, w: ~str) { word(s.s, w); space(s.s); }
|
pub fn word_space(s: @ps, w: &str) { word(s.s, w); space(s.s); }
|
||||||
|
|
||||||
pub fn popen(s: @ps) { word(s.s, ~"("); }
|
pub fn popen(s: @ps) { word(s.s, ~"("); }
|
||||||
|
|
||||||
@ -346,7 +346,7 @@ pub fn commasep_exprs(s: @ps, b: breaks, exprs: &[@ast::expr]) {
|
|||||||
commasep_cmnt(s, b, exprs, print_expr, expr_span);
|
commasep_cmnt(s, b, exprs, print_expr, expr_span);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_mod(s: @ps, _mod: &ast::_mod, attrs: ~[ast::attribute]) {
|
pub fn print_mod(s: @ps, _mod: &ast::_mod, attrs: &[ast::attribute]) {
|
||||||
print_inner_attributes(s, attrs);
|
print_inner_attributes(s, attrs);
|
||||||
for _mod.view_items.each |vitem| {
|
for _mod.view_items.each |vitem| {
|
||||||
print_view_item(s, *vitem);
|
print_view_item(s, *vitem);
|
||||||
@ -355,7 +355,7 @@ pub fn print_mod(s: @ps, _mod: &ast::_mod, attrs: ~[ast::attribute]) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_foreign_mod(s: @ps, nmod: &ast::foreign_mod,
|
pub fn print_foreign_mod(s: @ps, nmod: &ast::foreign_mod,
|
||||||
attrs: ~[ast::attribute]) {
|
attrs: &[ast::attribute]) {
|
||||||
print_inner_attributes(s, attrs);
|
print_inner_attributes(s, attrs);
|
||||||
for nmod.view_items.each |vitem| {
|
for nmod.view_items.each |vitem| {
|
||||||
print_view_item(s, *vitem);
|
print_view_item(s, *vitem);
|
||||||
@ -539,7 +539,7 @@ pub fn print_item(s: @ps, item: @ast::item) {
|
|||||||
ast::item_enum(ref enum_definition, ref params) => {
|
ast::item_enum(ref enum_definition, ref params) => {
|
||||||
print_enum_def(
|
print_enum_def(
|
||||||
s,
|
s,
|
||||||
*enum_definition,
|
enum_definition,
|
||||||
params,
|
params,
|
||||||
item.ident,
|
item.ident,
|
||||||
item.span,
|
item.span,
|
||||||
@ -621,7 +621,7 @@ fn print_trait_ref(s: @ps, t: &ast::trait_ref) {
|
|||||||
print_path(s, t.path, false);
|
print_path(s, t.path, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_enum_def(s: @ps, enum_definition: ast::enum_def,
|
pub fn print_enum_def(s: @ps, enum_definition: &ast::enum_def,
|
||||||
generics: &ast::Generics, ident: ast::ident,
|
generics: &ast::Generics, ident: ast::ident,
|
||||||
span: codemap::span, visibility: ast::visibility) {
|
span: codemap::span, visibility: ast::visibility) {
|
||||||
head(s, visibility_qualified(visibility, ~"enum"));
|
head(s, visibility_qualified(visibility, ~"enum"));
|
||||||
@ -632,7 +632,7 @@ pub fn print_enum_def(s: @ps, enum_definition: ast::enum_def,
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_variants(s: @ps,
|
pub fn print_variants(s: @ps,
|
||||||
variants: ~[ast::variant],
|
variants: &[ast::variant],
|
||||||
span: codemap::span) {
|
span: codemap::span) {
|
||||||
bopen(s);
|
bopen(s);
|
||||||
for variants.each |v| {
|
for variants.each |v| {
|
||||||
@ -640,7 +640,7 @@ pub fn print_variants(s: @ps,
|
|||||||
maybe_print_comment(s, v.span.lo);
|
maybe_print_comment(s, v.span.lo);
|
||||||
print_outer_attributes(s, v.node.attrs);
|
print_outer_attributes(s, v.node.attrs);
|
||||||
ibox(s, indent_unit);
|
ibox(s, indent_unit);
|
||||||
print_variant(s, *v);
|
print_variant(s, v);
|
||||||
word(s.s, ~",");
|
word(s.s, ~",");
|
||||||
end(s);
|
end(s);
|
||||||
maybe_print_trailing_comment(s, v.span, None);
|
maybe_print_trailing_comment(s, v.span, None);
|
||||||
@ -727,15 +727,15 @@ pub fn print_struct(s: @ps,
|
|||||||
/// appropriate macro, transcribe back into the grammar we just parsed from,
|
/// appropriate macro, transcribe back into the grammar we just parsed from,
|
||||||
/// and then pretty-print the resulting AST nodes (so, e.g., we print
|
/// and then pretty-print the resulting AST nodes (so, e.g., we print
|
||||||
/// expression arguments as expressions). It can be done! I think.
|
/// expression arguments as expressions). It can be done! I think.
|
||||||
pub fn print_tt(s: @ps, tt: ast::token_tree) {
|
pub fn print_tt(s: @ps, tt: &ast::token_tree) {
|
||||||
match tt {
|
match *tt {
|
||||||
ast::tt_delim(ref tts) => print_tts(s, *tts),
|
ast::tt_delim(ref tts) => print_tts(s, *tts),
|
||||||
ast::tt_tok(_, ref tk) => {
|
ast::tt_tok(_, ref tk) => {
|
||||||
word(s.s, parse::token::to_str(s.intr, tk));
|
word(s.s, parse::token::to_str(s.intr, tk));
|
||||||
}
|
}
|
||||||
ast::tt_seq(_, ref tts, ref sep, zerok) => {
|
ast::tt_seq(_, ref tts, ref sep, zerok) => {
|
||||||
word(s.s, ~"$(");
|
word(s.s, ~"$(");
|
||||||
for (*tts).each() |tt_elt| { print_tt(s, *tt_elt); }
|
for (*tts).each() |tt_elt| { print_tt(s, tt_elt); }
|
||||||
word(s.s, ~")");
|
word(s.s, ~")");
|
||||||
match (*sep) {
|
match (*sep) {
|
||||||
Some(ref tk) => word(s.s, parse::token::to_str(s.intr, tk)),
|
Some(ref tk) => word(s.s, parse::token::to_str(s.intr, tk)),
|
||||||
@ -756,12 +756,12 @@ pub fn print_tts(s: @ps, tts: &[ast::token_tree]) {
|
|||||||
if i != 0 {
|
if i != 0 {
|
||||||
space(s.s);
|
space(s.s);
|
||||||
}
|
}
|
||||||
print_tt(s, *tt);
|
print_tt(s, tt);
|
||||||
}
|
}
|
||||||
end(s);
|
end(s);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_variant(s: @ps, v: ast::variant) {
|
pub fn print_variant(s: @ps, v: &ast::variant) {
|
||||||
print_visibility(s, v.node.vis);
|
print_visibility(s, v.node.vis);
|
||||||
match v.node.kind {
|
match v.node.kind {
|
||||||
ast::tuple_variant_kind(ref args) => {
|
ast::tuple_variant_kind(ref args) => {
|
||||||
@ -819,7 +819,7 @@ pub fn print_method(s: @ps, meth: @ast::method) {
|
|||||||
print_block_with_attrs(s, &meth.body, meth.attrs);
|
print_block_with_attrs(s, &meth.body, meth.attrs);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_outer_attributes(s: @ps, attrs: ~[ast::attribute]) {
|
pub fn print_outer_attributes(s: @ps, attrs: &[ast::attribute]) {
|
||||||
let mut count = 0;
|
let mut count = 0;
|
||||||
for attrs.each |attr| {
|
for attrs.each |attr| {
|
||||||
match attr.node.style {
|
match attr.node.style {
|
||||||
@ -830,7 +830,7 @@ pub fn print_outer_attributes(s: @ps, attrs: ~[ast::attribute]) {
|
|||||||
if count > 0 { hardbreak_if_not_bol(s); }
|
if count > 0 { hardbreak_if_not_bol(s); }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_inner_attributes(s: @ps, attrs: ~[ast::attribute]) {
|
pub fn print_inner_attributes(s: @ps, attrs: &[ast::attribute]) {
|
||||||
let mut count = 0;
|
let mut count = 0;
|
||||||
for attrs.each |attr| {
|
for attrs.each |attr| {
|
||||||
match attr.node.style {
|
match attr.node.style {
|
||||||
@ -879,7 +879,7 @@ pub fn print_stmt(s: @ps, st: &ast::stmt) {
|
|||||||
}
|
}
|
||||||
ast::stmt_mac(ref mac, semi) => {
|
ast::stmt_mac(ref mac, semi) => {
|
||||||
space_if_not_bol(s);
|
space_if_not_bol(s);
|
||||||
print_mac(s, (*mac));
|
print_mac(s, mac);
|
||||||
if semi { word(s.s, ~";"); }
|
if semi { word(s.s, ~";"); }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -892,18 +892,18 @@ pub fn print_block(s: @ps, blk: &ast::blk) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_block_unclosed(s: @ps, blk: &ast::blk) {
|
pub fn print_block_unclosed(s: @ps, blk: &ast::blk) {
|
||||||
print_possibly_embedded_block_(s, blk, block_normal, indent_unit, ~[],
|
print_possibly_embedded_block_(s, blk, block_normal, indent_unit, &[],
|
||||||
false);
|
false);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_block_unclosed_indent(s: @ps, blk: &ast::blk, indented: uint) {
|
pub fn print_block_unclosed_indent(s: @ps, blk: &ast::blk, indented: uint) {
|
||||||
print_possibly_embedded_block_(s, blk, block_normal, indented, ~[],
|
print_possibly_embedded_block_(s, blk, block_normal, indented, &[],
|
||||||
false);
|
false);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_block_with_attrs(s: @ps,
|
pub fn print_block_with_attrs(s: @ps,
|
||||||
blk: &ast::blk,
|
blk: &ast::blk,
|
||||||
attrs: ~[ast::attribute]) {
|
attrs: &[ast::attribute]) {
|
||||||
print_possibly_embedded_block_(s, blk, block_normal, indent_unit, attrs,
|
print_possibly_embedded_block_(s, blk, block_normal, indent_unit, attrs,
|
||||||
true);
|
true);
|
||||||
}
|
}
|
||||||
@ -915,14 +915,14 @@ pub fn print_possibly_embedded_block(s: @ps,
|
|||||||
embedded: embed_type,
|
embedded: embed_type,
|
||||||
indented: uint) {
|
indented: uint) {
|
||||||
print_possibly_embedded_block_(
|
print_possibly_embedded_block_(
|
||||||
s, blk, embedded, indented, ~[], true);
|
s, blk, embedded, indented, &[], true);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_possibly_embedded_block_(s: @ps,
|
pub fn print_possibly_embedded_block_(s: @ps,
|
||||||
blk: &ast::blk,
|
blk: &ast::blk,
|
||||||
embedded: embed_type,
|
embedded: embed_type,
|
||||||
indented: uint,
|
indented: uint,
|
||||||
attrs: ~[ast::attribute],
|
attrs: &[ast::attribute],
|
||||||
close_box: bool) {
|
close_box: bool) {
|
||||||
match blk.node.rules {
|
match blk.node.rules {
|
||||||
ast::unsafe_blk => word_space(s, ~"unsafe"),
|
ast::unsafe_blk => word_space(s, ~"unsafe"),
|
||||||
@ -994,7 +994,7 @@ pub fn print_if(s: @ps, test: @ast::expr, blk: &ast::blk,
|
|||||||
do_else(s, elseopt);
|
do_else(s, elseopt);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_mac(s: @ps, m: ast::mac) {
|
pub fn print_mac(s: @ps, m: &ast::mac) {
|
||||||
match m.node {
|
match m.node {
|
||||||
ast::mac_invoc_tt(pth, ref tts) => {
|
ast::mac_invoc_tt(pth, ref tts) => {
|
||||||
print_path(s, pth, false);
|
print_path(s, pth, false);
|
||||||
@ -1387,7 +1387,7 @@ pub fn print_expr(s: @ps, expr: @ast::expr) {
|
|||||||
print_expr(s, expr);
|
print_expr(s, expr);
|
||||||
pclose(s);
|
pclose(s);
|
||||||
}
|
}
|
||||||
ast::expr_inline_asm(a) => {
|
ast::expr_inline_asm(ref a) => {
|
||||||
if a.volatile {
|
if a.volatile {
|
||||||
word(s.s, ~"__volatile__ asm!");
|
word(s.s, ~"__volatile__ asm!");
|
||||||
} else {
|
} else {
|
||||||
@ -1415,7 +1415,7 @@ pub fn print_expr(s: @ps, expr: @ast::expr) {
|
|||||||
print_string(s, *a.clobbers);
|
print_string(s, *a.clobbers);
|
||||||
pclose(s);
|
pclose(s);
|
||||||
}
|
}
|
||||||
ast::expr_mac(ref m) => print_mac(s, (*m)),
|
ast::expr_mac(ref m) => print_mac(s, m),
|
||||||
ast::expr_paren(e) => {
|
ast::expr_paren(e) => {
|
||||||
popen(s);
|
popen(s);
|
||||||
print_expr(s, e);
|
print_expr(s, e);
|
||||||
@ -1559,7 +1559,7 @@ pub fn print_pat(s: @ps, pat: @ast::pat, refutable: bool) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ast::pat_struct(path, fields, etc) => {
|
ast::pat_struct(path, ref fields, etc) => {
|
||||||
print_path(s, path, true);
|
print_path(s, path, true);
|
||||||
word(s.s, ~"{");
|
word(s.s, ~"{");
|
||||||
fn print_field(s: @ps, f: ast::field_pat, refutable: bool) {
|
fn print_field(s: @ps, f: ast::field_pat, refutable: bool) {
|
||||||
@ -1570,18 +1570,18 @@ pub fn print_pat(s: @ps, pat: @ast::pat, refutable: bool) {
|
|||||||
end(s);
|
end(s);
|
||||||
}
|
}
|
||||||
fn get_span(f: ast::field_pat) -> codemap::span { return f.pat.span; }
|
fn get_span(f: ast::field_pat) -> codemap::span { return f.pat.span; }
|
||||||
commasep_cmnt(s, consistent, fields,
|
commasep_cmnt(s, consistent, *fields,
|
||||||
|s, f| print_field(s,f,refutable),
|
|s, f| print_field(s,f,refutable),
|
||||||
get_span);
|
get_span);
|
||||||
if etc {
|
if etc {
|
||||||
if vec::len(fields) != 0u { word_space(s, ~","); }
|
if fields.len() != 0u { word_space(s, ~","); }
|
||||||
word(s.s, ~"_");
|
word(s.s, ~"_");
|
||||||
}
|
}
|
||||||
word(s.s, ~"}");
|
word(s.s, ~"}");
|
||||||
}
|
}
|
||||||
ast::pat_tup(elts) => {
|
ast::pat_tup(ref elts) => {
|
||||||
popen(s);
|
popen(s);
|
||||||
commasep(s, inconsistent, elts, |s, p| print_pat(s, p, refutable));
|
commasep(s, inconsistent, *elts, |s, p| print_pat(s, p, refutable));
|
||||||
if elts.len() == 1 {
|
if elts.len() == 1 {
|
||||||
word(s.s, ~",");
|
word(s.s, ~",");
|
||||||
}
|
}
|
||||||
@ -1606,9 +1606,9 @@ pub fn print_pat(s: @ps, pat: @ast::pat, refutable: bool) {
|
|||||||
word(s.s, ~"..");
|
word(s.s, ~"..");
|
||||||
print_expr(s, end);
|
print_expr(s, end);
|
||||||
}
|
}
|
||||||
ast::pat_vec(before, slice, after) => {
|
ast::pat_vec(ref before, slice, ref after) => {
|
||||||
word(s.s, ~"[");
|
word(s.s, ~"[");
|
||||||
do commasep(s, inconsistent, before) |s, p| {
|
do commasep(s, inconsistent, *before) |s, p| {
|
||||||
print_pat(s, p, refutable);
|
print_pat(s, p, refutable);
|
||||||
}
|
}
|
||||||
for slice.each |&p| {
|
for slice.each |&p| {
|
||||||
@ -1617,7 +1617,7 @@ pub fn print_pat(s: @ps, pat: @ast::pat, refutable: bool) {
|
|||||||
print_pat(s, p, refutable);
|
print_pat(s, p, refutable);
|
||||||
if !after.is_empty() { word_space(s, ~","); }
|
if !after.is_empty() { word_space(s, ~","); }
|
||||||
}
|
}
|
||||||
do commasep(s, inconsistent, after) |s, p| {
|
do commasep(s, inconsistent, *after) |s, p| {
|
||||||
print_pat(s, p, refutable);
|
print_pat(s, p, refutable);
|
||||||
}
|
}
|
||||||
word(s.s, ~"]");
|
word(s.s, ~"]");
|
||||||
@ -1832,12 +1832,12 @@ pub fn print_view_item(s: @ps, item: @ast::view_item) {
|
|||||||
print_outer_attributes(s, item.attrs);
|
print_outer_attributes(s, item.attrs);
|
||||||
print_visibility(s, item.vis);
|
print_visibility(s, item.vis);
|
||||||
match item.node {
|
match item.node {
|
||||||
ast::view_item_extern_mod(id, mta, _) => {
|
ast::view_item_extern_mod(id, ref mta, _) => {
|
||||||
head(s, ~"extern mod");
|
head(s, ~"extern mod");
|
||||||
print_ident(s, id);
|
print_ident(s, id);
|
||||||
if !mta.is_empty() {
|
if !mta.is_empty() {
|
||||||
popen(s);
|
popen(s);
|
||||||
commasep(s, consistent, mta, print_meta_item);
|
commasep(s, consistent, *mta, print_meta_item);
|
||||||
pclose(s);
|
pclose(s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1960,7 +1960,7 @@ pub fn maybe_print_trailing_comment(s: @ps, span: codemap::span,
|
|||||||
match next_pos { None => (), Some(p) => next = p }
|
match next_pos { None => (), Some(p) => next = p }
|
||||||
if span.hi < (*cmnt).pos && (*cmnt).pos < next &&
|
if span.hi < (*cmnt).pos && (*cmnt).pos < next &&
|
||||||
span_line.line == comment_line.line {
|
span_line.line == comment_line.line {
|
||||||
print_comment(s, (*cmnt));
|
print_comment(s, cmnt);
|
||||||
s.cur_cmnt_and_lit.cur_cmnt += 1u;
|
s.cur_cmnt_and_lit.cur_cmnt += 1u;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1975,7 +1975,7 @@ pub fn print_remaining_comments(s: @ps) {
|
|||||||
loop {
|
loop {
|
||||||
match next_comment(s) {
|
match next_comment(s) {
|
||||||
Some(ref cmnt) => {
|
Some(ref cmnt) => {
|
||||||
print_comment(s, (*cmnt));
|
print_comment(s, cmnt);
|
||||||
s.cur_cmnt_and_lit.cur_cmnt += 1u;
|
s.cur_cmnt_and_lit.cur_cmnt += 1u;
|
||||||
}
|
}
|
||||||
_ => break
|
_ => break
|
||||||
@ -2055,7 +2055,7 @@ pub fn maybe_print_comment(s: @ps, pos: BytePos) {
|
|||||||
match next_comment(s) {
|
match next_comment(s) {
|
||||||
Some(ref cmnt) => {
|
Some(ref cmnt) => {
|
||||||
if (*cmnt).pos < pos {
|
if (*cmnt).pos < pos {
|
||||||
print_comment(s, (*cmnt));
|
print_comment(s, cmnt);
|
||||||
s.cur_cmnt_and_lit.cur_cmnt += 1u;
|
s.cur_cmnt_and_lit.cur_cmnt += 1u;
|
||||||
} else { break; }
|
} else { break; }
|
||||||
}
|
}
|
||||||
@ -2064,7 +2064,7 @@ pub fn maybe_print_comment(s: @ps, pos: BytePos) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_comment(s: @ps, cmnt: comments::cmnt) {
|
pub fn print_comment(s: @ps, cmnt: &comments::cmnt) {
|
||||||
match cmnt.style {
|
match cmnt.style {
|
||||||
comments::mixed => {
|
comments::mixed => {
|
||||||
assert!((vec::len(cmnt.lines) == 1u));
|
assert!((vec::len(cmnt.lines) == 1u));
|
||||||
@ -2274,7 +2274,7 @@ mod test {
|
|||||||
vis: ast::public,
|
vis: ast::public,
|
||||||
});
|
});
|
||||||
|
|
||||||
let varstr = variant_to_str(var,mock_interner);
|
let varstr = variant_to_str(&var,mock_interner);
|
||||||
assert_eq!(&varstr,&~"pub principal_skinner");
|
assert_eq!(&varstr,&~"pub principal_skinner");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -20,7 +20,6 @@
|
|||||||
#[license = "MIT/ASL2"];
|
#[license = "MIT/ASL2"];
|
||||||
#[crate_type = "lib"];
|
#[crate_type = "lib"];
|
||||||
|
|
||||||
#[allow(vecs_implicitly_copyable)];
|
|
||||||
#[allow(non_camel_case_types)];
|
#[allow(non_camel_case_types)];
|
||||||
#[deny(deprecated_pattern)];
|
#[deny(deprecated_pattern)];
|
||||||
|
|
||||||
|
@ -179,7 +179,7 @@ pub fn visit_item<E: Copy>(i: @item, e: E, v: vt<E>) {
|
|||||||
item_enum(ref enum_definition, ref tps) => {
|
item_enum(ref enum_definition, ref tps) => {
|
||||||
(v.visit_generics)(tps, e, v);
|
(v.visit_generics)(tps, e, v);
|
||||||
visit_enum_def(
|
visit_enum_def(
|
||||||
*enum_definition,
|
enum_definition,
|
||||||
tps,
|
tps,
|
||||||
e,
|
e,
|
||||||
v
|
v
|
||||||
@ -206,11 +206,11 @@ pub fn visit_item<E: Copy>(i: @item, e: E, v: vt<E>) {
|
|||||||
(v.visit_trait_method)(m, e, v);
|
(v.visit_trait_method)(m, e, v);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
item_mac(ref m) => visit_mac((*m), e, v)
|
item_mac(ref m) => visit_mac(m, e, v)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn visit_enum_def<E: Copy>(enum_definition: ast::enum_def,
|
pub fn visit_enum_def<E: Copy>(enum_definition: &ast::enum_def,
|
||||||
tps: &Generics,
|
tps: &Generics,
|
||||||
e: E,
|
e: E,
|
||||||
v: vt<E>) {
|
v: vt<E>) {
|
||||||
@ -422,7 +422,7 @@ pub fn visit_stmt<E>(s: @stmt, e: E, v: vt<E>) {
|
|||||||
stmt_decl(d, _) => (v.visit_decl)(d, e, v),
|
stmt_decl(d, _) => (v.visit_decl)(d, e, v),
|
||||||
stmt_expr(ex, _) => (v.visit_expr)(ex, e, v),
|
stmt_expr(ex, _) => (v.visit_expr)(ex, e, v),
|
||||||
stmt_semi(ex, _) => (v.visit_expr)(ex, e, v),
|
stmt_semi(ex, _) => (v.visit_expr)(ex, e, v),
|
||||||
stmt_mac(ref mac, _) => visit_mac((*mac), e, v)
|
stmt_mac(ref mac, _) => visit_mac(mac, e, v)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -445,7 +445,7 @@ pub fn visit_exprs<E: Copy>(exprs: &[@expr], e: E, v: vt<E>) {
|
|||||||
for exprs.each |ex| { (v.visit_expr)(*ex, e, v); }
|
for exprs.each |ex| { (v.visit_expr)(*ex, e, v); }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn visit_mac<E>(_m: mac, _e: E, _v: vt<E>) {
|
pub fn visit_mac<E>(_m: &mac, _e: E, _v: vt<E>) {
|
||||||
/* no user-serviceable parts inside */
|
/* no user-serviceable parts inside */
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -537,7 +537,7 @@ pub fn visit_expr<E: Copy>(ex: @expr, e: E, v: vt<E>) {
|
|||||||
(v.visit_expr)(lv, e, v);
|
(v.visit_expr)(lv, e, v);
|
||||||
(v.visit_expr)(x, e, v);
|
(v.visit_expr)(x, e, v);
|
||||||
}
|
}
|
||||||
expr_mac(ref mac) => visit_mac((*mac), e, v),
|
expr_mac(ref mac) => visit_mac(mac, e, v),
|
||||||
expr_paren(x) => (v.visit_expr)(x, e, v),
|
expr_paren(x) => (v.visit_expr)(x, e, v),
|
||||||
expr_inline_asm(ref a) => {
|
expr_inline_asm(ref a) => {
|
||||||
for a.inputs.each |&(_, in)| {
|
for a.inputs.each |&(_, in)| {
|
||||||
|
Loading…
Reference in New Issue
Block a user