From 9187406da5ce8a6b4bfca6ee017e56230be0872c Mon Sep 17 00:00:00 2001 From: Zack Corr Date: Wed, 6 Jun 2012 18:55:44 +1000 Subject: [PATCH] Cargo: Added new source scheme, added sources CLI management, added source dumping --- src/cargo/cargo.rs | 945 ++++++++++++++++++++++++++++++++++++--------- 1 file changed, 752 insertions(+), 193 deletions(-) diff --git a/src/cargo/cargo.rs b/src/cargo/cargo.rs index eee76f24ec2..10a3f87b827 100644 --- a/src/cargo/cargo.rs +++ b/src/cargo/cargo.rs @@ -16,21 +16,21 @@ import vec; import getopts::{optflag, optopt, opt_present}; type package = { - name: str, - uuid: str, - url: str, - method: str, - description: str, - ref: option, - tags: [str] + mut name: str, + mut uuid: str, + mut url: str, + mut method: str, + mut description: str, + mut ref: option, + mut tags: [str] }; type source = { - name: str, - url: str, - sig: option, - key: option, - keyfp: option, + mut name: str, + mut url: str, + mut method: str, + mut key: option, + mut keyfp: option, mut packages: [package] }; @@ -47,12 +47,12 @@ type cargo = { }; type pkg = { - name: str, - vers: str, - uuid: str, - desc: option, - sigs: option, - crate_type: option + mut name: str, + mut vers: str, + mut uuid: str, + mut desc: option, + mut sigs: option, + mut crate_type: option }; type options = { @@ -108,17 +108,16 @@ fn is_uuid(id: str) -> bool { if vec::len(parts) == 5u { let mut correct = 0u; for vec::eachi(parts) { |i, part| + fn is_hex_digit(ch: char) -> bool { + ('0' <= ch && ch <= '9') || + ('a' <= ch && ch <= 'f') || + ('A' <= ch && ch <= 'F') + } if !part.all(is_hex_digit) { ret false; } - fn is_hex_digit(ch: char) -> bool { - ('0' <= ch && ch <= '9') || - ('a' <= ch && ch <= 'f') || - ('A' <= ch && ch <= 'F') - } - alt i { 0u { if str::len(part) == 8u { @@ -156,27 +155,26 @@ fn test_is_uuid() { assert !is_uuid("aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaป"); } -// FIXME: implement URI/URL parsing so we don't have to resort to weak checks +// FIXME: implement url/URL parsing so we don't have to resort to weak checks -fn is_archive_uri(uri: str) -> bool { - str::ends_with(uri, ".tar") - || str::ends_with(uri, ".tar.gz") - || str::ends_with(uri, ".tar.xz") - || str::ends_with(uri, ".tar.bz2") +fn has_archive_extension(p: str) -> bool { + str::ends_with(p, ".tar") || + str::ends_with(p, ".tar.gz") || + str::ends_with(p, ".tar.xz") || + str::ends_with(p, ".tar.bz2") } -fn is_archive_url(url: str) -> bool { - // FIXME: this requires the protocol bit - if we had proper URI parsing, +fn is_archive_path(u: str) -> bool { + has_archive_extension(u) && os::path_exists(u) +} + +fn is_archive_url(u: str) -> bool { + // FIXME: this requires the protocol bit - if we had proper url parsing, // we wouldn't need it - alt str::find_str(url, "://") { - option::some(idx) { - str::ends_with(url, ".tar") - || str::ends_with(url, ".tar.gz") - || str::ends_with(url, ".tar.xz") - || str::ends_with(url, ".tar.bz2") - } - option::none { false } + alt str::find_str(u, "://") { + option::some(i) { has_archive_extension(u) } + _ { false } } } @@ -187,6 +185,15 @@ fn is_git_url(url: str) -> bool { } } +fn assume_source_method(url: str) -> str { + if is_git_url(url) { ret "git"; } + if str::starts_with(url, "file://") || os::path_exists(url) { + ret "file"; + } + + "curl" +} + fn load_link(mis: [@ast::meta_item]) -> (option, option, option) { @@ -253,12 +260,12 @@ fn load_pkg(filename: str) -> option { alt (name, vers, uuid) { (some(name0), some(vers0), some(uuid0)) { some({ - name: name0, - vers: vers0, - uuid: uuid0, - desc: desc, - sigs: sigs, - crate_type: crate_type}) + mut name: name0, + mut vers: vers0, + mut uuid: uuid0, + mut desc: desc, + mut sigs: sigs, + mut crate_type: crate_type}) } _ { ret none; } } @@ -283,20 +290,36 @@ fn need_dir(s: str) { } } +fn valid_pkg_name(s: str) -> bool { + fn is_valid_digit(c: char) -> bool { + ('0' <= c && c <= '9') || + ('a' <= c && c <= 'z') || + ('A' <= c && c <= 'Z') || + c == '-' || + c == '_' + } + + s.all(is_valid_digit) +} + fn parse_source(name: str, j: json::json) -> source { + if !valid_pkg_name(name) { + fail #fmt("'%s' is an invalid source name", name); + } + alt j { json::dict(_j) { - let url = alt _j.find("url") { + let mut url = alt _j.find("url") { some(json::string(u)) { u } _ { fail "needed 'url' field in source"; } }; - let sig = alt _j.find("sig") { + let method = alt _j.find("method") { some(json::string(u)) { - some(u) + u } - _ { none } + _ { assume_source_method(url) } }; let key = alt _j.find("key") { some(json::string(u)) { @@ -310,7 +333,14 @@ fn parse_source(name: str, j: json::json) -> source { } _ { none } }; - ret { name: name, url: url, sig: sig, key: key, keyfp: keyfp, + if method == "file" { + url = os::make_absolute(url); + } + ret { mut name: name, + mut url: url, + mut method: method, + mut key: key, + mut keyfp: keyfp, mut packages: [] }; } _ { fail "needed dict value in source"; } @@ -334,7 +364,15 @@ fn try_parse_sources(filename: str, sources: map::hashmap) { fn load_one_source_package(&src: source, p: map::hashmap) { let name = alt p.find("name") { - some(json::string(_n)) { _n } + some(json::string(_n)) { + if !valid_pkg_name(_n) { + warn("malformed source json: " + src.name + ", '" + _n + "'"+ + " is an invalid name (alphanumeric, underscores and" + + " dashes only)"); + ret; + } + _n + } _ { warn("malformed source json: " + src.name + " (missing name)"); ret; @@ -342,7 +380,14 @@ fn load_one_source_package(&src: source, p: map::hashmap) { }; let uuid = alt p.find("uuid") { - some(json::string(_n)) { _n } + some(json::string(_n)) { + if !is_uuid(_n) { + warn("malformed source json: " + src.name + ", '" + _n + "'"+ + " is an invalid uuid"); + ret; + } + _n + } _ { warn("malformed source json: " + src.name + " (missing uuid)"); ret; @@ -392,18 +437,55 @@ fn load_one_source_package(&src: source, p: map::hashmap) { } }; - vec::grow(src.packages, 1u, { - name: name, - uuid: uuid, - url: url, - method: method, - description: description, - ref: ref, - tags: tags - }); + let newpkg = { + mut name: name, + mut uuid: uuid, + mut url: url, + mut method: method, + mut description: description, + mut ref: ref, + mut tags: tags + }; + + for src.packages.each { |pkg| + if pkg.uuid == uuid { + pkg.name = newpkg.name; + pkg.uuid = newpkg.uuid; + pkg.url = newpkg.url; + pkg.method = newpkg.method; + pkg.description = newpkg.description; + pkg.ref = newpkg.ref; + pkg.tags = newpkg.tags; + log(debug, " updated package: " + src.name + "/" + name); + ret; + } + } + + vec::grow(src.packages, 1u, newpkg); log(debug, " loaded package: " + src.name + "/" + name); } +fn load_source_info(&c: cargo, &src: source) { + let dir = path::connect(c.sourcedir, src.name); + let srcfile = path::connect(dir, "source.json"); + if !os::path_exists(srcfile) { ret; } + let srcstr = io::read_whole_file_str(srcfile); + alt json::from_str(result::get(srcstr)) { + ok(json::dict(_s)) { + let o = parse_source(src.name, json::dict(_s)); + + src.key = o.key; + src.keyfp = o.keyfp; + } + ok(_) { + warn("malformed source.json: " + src.name + + "(source info is not a dict)"); + } + err(e) { + warn(#fmt("%s:%u:%u: %s", src.name, e.line, e.col, e.msg)); + } + }; +} fn load_source_packages(&c: cargo, &src: source) { log(debug, "loading source: " + src.name); let dir = path::connect(c.sourcedir, src.name); @@ -425,7 +507,7 @@ fn load_source_packages(&c: cargo, &src: source) { } } ok(_) { - warn("malformed source json: " + src.name + + warn("malformed packages.json: " + src.name + "(packages is not a list)"); } err(e) { @@ -519,7 +601,7 @@ fn configure(opts: options) -> cargo { c } -fn for_each_package(c: cargo, b: fn(source, package)) { +fn for_each_package(&c: cargo, b: fn(source, package)) { for c.sources.each_value {|v| // FIXME (#2280): this temporary shouldn't be // necessary, but seems to be, for borrowing. @@ -563,7 +645,7 @@ fn test_one_crate(_c: cargo, path: str, cf: str) { run_programs(buildpath); } -fn install_one_crate(c: cargo, path: str, cf: str) { +fn install_one_crate(&c: cargo, path: str, cf: str) { let buildpath = alt run_in_buildpath("installing", path, "/build", cf, []) { none { ret; } @@ -603,7 +685,7 @@ fn rustc_sysroot() -> str { } } -fn install_source(c: cargo, path: str) { +fn install_source(&c: cargo, path: str) { #debug("source: %s", path); os::change_dir(path); @@ -632,9 +714,8 @@ fn install_source(c: cargo, path: str) { } } -fn install_git(c: cargo, wd: str, url: str, ref: option) { - info("installing with git from " + url + "..."); - run::run_program("git", ["clone", url, wd]); +fn install_git(&c: cargo, wd: str, url: str, ref: option) { + run::program_output("git", ["clone", url, wd]); if option::is_some::(ref) { let r = option::get::(ref); os::change_dir(wd); @@ -644,8 +725,7 @@ fn install_git(c: cargo, wd: str, url: str, ref: option) { install_source(c, wd); } -fn install_curl(c: cargo, wd: str, url: str) { - info("installing with curl from " + url + "..."); +fn install_curl(&c: cargo, wd: str, url: str) { let tarpath = path::connect(wd, "pkg.tar"); let p = run::program_output("curl", ["-f", "-s", "-o", tarpath, url]); @@ -657,96 +737,95 @@ fn install_curl(c: cargo, wd: str, url: str) { install_source(c, wd); } -fn install_file(c: cargo, wd: str, path: str) { - info("installing with tar from " + path + "..."); - run::run_program("tar", ["-x", "--strip-components=1", +fn install_file(&c: cargo, wd: str, path: str) { + run::program_output("tar", ["-x", "--strip-components=1", "-C", wd, "-f", path]); install_source(c, wd); } -fn install_package(c: cargo, wd: str, pkg: package) { - alt pkg.method { - "git" { install_git(c, wd, pkg.url, pkg.ref); } - "http" | "ftp" | "curl" { install_curl(c, wd, pkg.url); } - "file" { install_file(c, wd, pkg.url); } - _ { fail #fmt("don't know how to install with: %s", pkg.method) } +fn install_package(&c: cargo, src: str, wd: str, pkg: package) { + let url = copy pkg.url; + let method = alt pkg.method { + "git" { "git" } + "file" { "file" } + _ { "curl" } + }; + + info(#fmt["installing %s/%s via %s...", src, pkg.name, method]); + + alt method { + "git" { install_git(c, wd, url, copy pkg.ref); } + "file" { install_file(c, wd, url); } + "curl" { install_curl(c, wd, copy url); } + _ {} } } -fn cargo_suggestion(c: cargo, syncing: bool, fallback: fn()) +fn cargo_suggestion(&c: cargo, fallback: fn()) { if c.sources.size() == 0u { error("no sources defined - you may wish to run " + - "`cargo init` then `cargo sync`"); + "`cargo init`"); ret; } - if !syncing { - let mut npkg = 0u; - for c.sources.each_value { |v| npkg += vec::len(v.packages) } - if npkg == 0u { - error("no packages synced - you may wish to run " + - "`cargo sync`"); - ret; - } - } fallback(); } -fn install_uuid(c: cargo, wd: str, uuid: str) { +fn install_uuid(&c: cargo, wd: str, uuid: str) { let mut ps = []; for_each_package(c, { |s, p| if p.uuid == uuid { - vec::grow(ps, 1u, (s.name, p)); + vec::grow(ps, 1u, (s.name, copy p)); } }); if vec::len(ps) == 1u { - let (_, p) = ps[0]; - install_package(c, wd, p); + let (sname, p) = copy ps[0]; + install_package(c, sname, wd, p); ret; } else if vec::len(ps) == 0u { - cargo_suggestion(c, false, { || + cargo_suggestion(c, { || error("can't find package: " + uuid); }); ret; } error("found multiple packages:"); for ps.each {|elt| - let (sname,p) = elt; + let (sname,p) = copy elt; info(" " + sname + "/" + p.uuid + " (" + p.name + ")"); } } -fn install_named(c: cargo, wd: str, name: str) { +fn install_named(&c: cargo, wd: str, name: str) { let mut ps = []; for_each_package(c, { |s, p| if p.name == name { - vec::grow(ps, 1u, (s.name, p)); + vec::grow(ps, 1u, (s.name, copy p)); } }); if vec::len(ps) == 1u { - let (_, p) = ps[0]; - install_package(c, wd, p); + let (sname, p) = copy ps[0]; + install_package(c, sname, wd, p); ret; } else if vec::len(ps) == 0u { - cargo_suggestion(c, false, { || + cargo_suggestion(c, { || error("can't find package: " + name); }); ret; } error("found multiple packages:"); for ps.each {|elt| - let (sname,p) = elt; + let (sname,p) = copy elt; info(" " + sname + "/" + p.uuid + " (" + p.name + ")"); } } -fn install_uuid_specific(c: cargo, wd: str, src: str, uuid: str) { +fn install_uuid_specific(&c: cargo, wd: str, src: str, uuid: str) { alt c.sources.find(src) { some(s) { let packages = copy s.packages; if vec::any(packages, { |p| if p.uuid == uuid { - install_package(c, wd, p); + install_package(c, src, wd, p); true } else { false } }) { ret; } @@ -756,13 +835,13 @@ fn install_uuid_specific(c: cargo, wd: str, src: str, uuid: str) { error("can't find package: " + src + "/" + uuid); } -fn install_named_specific(c: cargo, wd: str, src: str, name: str) { +fn install_named_specific(&c: cargo, wd: str, src: str, name: str) { alt c.sources.find(src) { some(s) { let packages = copy s.packages; if vec::any(packages, { |p| if p.name == name { - install_package(c, wd, p); + install_package(c, src, wd, p); true } else { false } }) { ret; } @@ -772,7 +851,7 @@ fn install_named_specific(c: cargo, wd: str, src: str, name: str) { error("can't find package: " + src + "/" + name); } -fn cmd_uninstall(c: cargo) { +fn cmd_uninstall(&c: cargo) { if vec::len(c.opts.free) < 3u { cmd_usage(); ret; @@ -837,13 +916,7 @@ fn cmd_uninstall(c: cargo) { } } -fn cmd_install(c: cargo) unsafe { - // cargo install [pkg] - if vec::len(c.opts.free) < 2u { - cmd_usage(); - ret; - } - +fn cmd_install(&c: cargo) unsafe { let wd_base = c.workdir + path::path_sep(); let wd = alt tempfile::mkdtemp(wd_base, "") { some(_wd) { _wd } @@ -862,10 +935,13 @@ fn cmd_install(c: cargo) unsafe { ret; } + sync(c); + let target = c.opts.free[2]; - if is_archive_url(target) { - install_curl(c, wd, target); + if is_archive_path(target) { + install_file(c, wd, target); + ret; } else if is_git_url(target) { let ref = if c.opts.free.len() >= 4u { some(c.opts.free[3u]) @@ -873,8 +949,8 @@ fn cmd_install(c: cargo) unsafe { none }; install_git(c, wd, target, ref) - } else if is_archive_uri(target) { - install_file(c, wd, target); + } else if !valid_pkg_name(target) && has_archive_extension(target) { + install_curl(c, wd, target); ret; } else { let mut ps = copy target; @@ -900,80 +976,323 @@ fn cmd_install(c: cargo) unsafe { } } -fn sync_one(c: cargo, name: str, src: source) { - let dir = path::connect(c.sourcedir, name); +fn sync(&c: cargo) { + for c.sources.each_key { |k| + let mut s = c.sources.get(k); + + sync_one(c, s); + // FIXME: mutability hack + c.sources.insert(k, s); + } +} + +fn sync_one_file(&c: cargo, dir: str, &src: source) -> bool { + let name = src.name; + let srcfile = path::connect(dir, "source.json.new"); + let destsrcfile = path::connect(dir, "source.json"); let pkgfile = path::connect(dir, "packages.json.new"); let destpkgfile = path::connect(dir, "packages.json"); - let sigfile = path::connect(dir, "packages.json.sig"); let keyfile = path::connect(dir, "key.gpg"); + let srcsigfile = path::connect(dir, "source.json.sig"); + let sigfile = path::connect(dir, "packages.json.sig"); let url = src.url; - need_dir(dir); - info(#fmt["fetching source %s...", name]); - let p = run::program_output("curl", ["-f", "-s", "-o", pkgfile, url]); - if p.status != 0 { - warn(#fmt["fetch for source %s (url %s) failed", name, url]); - } else { - info(#fmt["fetched source: %s", name]); + let mut has_src_file = false; + + if !os::copy_file(path::connect(url, "packages.json"), pkgfile) { + error(#fmt["fetch for source %s (url %s) failed", name, url]); + ret false; } - alt src.sig { - some(u) { - let p = run::program_output("curl", ["-f", "-s", "-o", sigfile, - u]); - if p.status != 0 { - warn(#fmt["fetch for source %s (sig %s) failed", name, u]); - } - } - _ { } + + if os::copy_file(path::connect(url, "source.json"), srcfile) { + has_src_file = false; } + + os::copy_file(path::connect(url, "source.json.sig"), srcsigfile); + os::copy_file(path::connect(url, "packages.json.sig"), sigfile); + alt src.key { some(u) { let p = run::program_output("curl", ["-f", "-s", "-o", keyfile, u]); if p.status != 0 { - warn(#fmt["fetch for source %s (key %s) failed", name, u]); + error(#fmt["fetch for source %s (key %s) failed", name, u]); + ret false; } pgp::add(c.root, keyfile); } _ { } } - alt (src.sig, src.key, src.keyfp) { - (some(_), some(_), some(f)) { + alt (src.key, src.keyfp) { + (some(_), some(f)) { let r = pgp::verify(c.root, pkgfile, sigfile, f); + if !r { - warn(#fmt["signature verification failed for source %s", + error(#fmt["signature verification failed for source %s", name]); - } else { - info(#fmt["signature ok for source %s", name]); + ret false; + } + + if has_src_file { + let e = pgp::verify(c.root, srcfile, srcsigfile, f); + + if !e { + error(#fmt["signature verification failed for source %s", + name]); + ret false; + } } } - _ { - info(#fmt["no signature for source %s", name]); - } + _ {} } + copy_warn(pkgfile, destpkgfile); + + if has_src_file { + copy_warn(srcfile, destsrcfile); + } + + os::remove_file(keyfile); + os::remove_file(srcfile); + os::remove_file(srcsigfile); + os::remove_file(pkgfile); + os::remove_file(sigfile); + + info(#fmt["synced source: %s", name]); + + ret true; } -fn cmd_sync(c: cargo) { - if vec::len(c.opts.free) >= 3u { - vec::iter_between(c.opts.free, 2u, vec::len(c.opts.free)) { |name| - alt c.sources.find(name) { - some(source) { - sync_one(c, name, source); - } - none { - error(#fmt("no such source: %s", name)); +fn sync_one_git(&c: cargo, dir: str, &src: source) -> bool { + let name = src.name; + let srcfile = path::connect(dir, "source.json"); + let pkgfile = path::connect(dir, "packages.json"); + let keyfile = path::connect(dir, "key.gpg"); + let srcsigfile = path::connect(dir, "source.json.sig"); + let sigfile = path::connect(dir, "packages.json.sig"); + let url = src.url; + + fn rollback(name: str, dir: str, insecure: bool) { + fn msg(name: str, insecure: bool) { + error(#fmt["could not rollback source: %s", name]); + + if insecure { + warn("a past security check failed on source " + + name + " and rolling back the source failed -" + + " this source may be compromised"); + } + } + + if !os::change_dir(dir) { + msg(name, insecure); + } + else { + let p = run::program_output("git", ["reset", "--hard", + "HEAD@{1}"]); + + if p.status != 0 { + msg(name, insecure); + } + } + } + + if !os::path_exists(path::connect(dir, ".git")) { + let p = run::program_output("git", ["clone", url, dir]); + + if p.status != 0 { + error(#fmt["fetch for source %s (url %s) failed", name, url]); + ret false; + } + } + else { + if !os::change_dir(dir) { + error(#fmt["fetch for source %s (url %s) failed", name, url]); + ret false; + } + + let p = run::program_output("git", ["pull"]); + + if p.status != 0 { + error(#fmt["fetch for source %s (url %s) failed", name, url]); + ret false; + } + } + + let has_src_file = os::path_exists(srcfile); + + alt src.key { + some(u) { + let p = run::program_output("curl", ["-f", "-s", "-o", keyfile, + u]); + if p.status != 0 { + error(#fmt["fetch for source %s (key %s) failed", name, u]); + rollback(name, dir, false); + ret false; + } + pgp::add(c.root, keyfile); + } + _ { } + } + alt (src.key, src.keyfp) { + (some(_), some(f)) { + let r = pgp::verify(c.root, pkgfile, sigfile, f); + + if !r { + error(#fmt["signature verification failed for source %s", + name]); + rollback(name, dir, false); + ret false; + } + + if has_src_file { + let e = pgp::verify(c.root, srcfile, srcsigfile, f); + + if !e { + error(#fmt["signature verification failed for source %s", + name]); + rollback(name, dir, false); + ret false; } } } - } else { - cargo_suggestion(c, true, { || } ); - for c.sources.each_value { |v| - sync_one(c, v.name, v); + _ {} + } + + os::remove_file(keyfile); + + info(#fmt["synced source: %s", name]); + + ret true; +} + +fn sync_one_curl(&c: cargo, dir: str, &src: source) -> bool { + let name = src.name; + let srcfile = path::connect(dir, "source.json.new"); + let destsrcfile = path::connect(dir, "source.json"); + let pkgfile = path::connect(dir, "packages.json.new"); + let destpkgfile = path::connect(dir, "packages.json"); + let keyfile = path::connect(dir, "key.gpg"); + let srcsigfile = path::connect(dir, "source.json.sig"); + let sigfile = path::connect(dir, "packages.json.sig"); + let mut url = src.url; + let smart = !str::ends_with(src.url, "packages.json"); + let mut has_src_file = false; + + if smart { + url += "/packages.json"; + } + + let p = run::program_output("curl", ["-f", "-s", "-o", pkgfile, url]); + + if p.status != 0 { + error(#fmt["fetch for source %s (url %s) failed", name, url]); + ret false; + } + if smart { + url = src.url + "/source.json"; + let p = run::program_output("curl", ["-f", "-s", "-o", srcfile, url]); + + if p.status == 0 { + has_src_file = true; } } + + alt src.key { + some(u) { + let p = run::program_output("curl", ["-f", "-s", "-o", keyfile, + u]); + if p.status != 0 { + error(#fmt["fetch for source %s (key %s) failed", name, u]); + ret false; + } + pgp::add(c.root, keyfile); + } + _ { } + } + alt (src.key, src.keyfp) { + (some(_), some(f)) { + if smart { + url = src.url + "/packages.json.sig"; + } + else { + url = src.url + ".sig"; + } + + let mut p = run::program_output("curl", ["-f", "-s", "-o", + sigfile, url]); + if p.status != 0 { + error(#fmt["fetch for source %s (sig %s) failed", name, url]); + ret false; + } + + let r = pgp::verify(c.root, pkgfile, sigfile, f); + + if !r { + error(#fmt["signature verification failed for source %s", + name]); + ret false; + } + + if smart && has_src_file { + url = src.url + "/source.json.sig"; + + p = run::program_output("curl", ["-f", "-s", "-o", srcsigfile, + url]); + if p.status != 0 { + error(#fmt["fetch for source %s (sig %s) failed", + name, url]); + ret false; + } + + let e = pgp::verify(c.root, srcfile, srcsigfile, f); + + if !e { + error("signature verification failed for " + + "source " + name); + ret false; + } + } + } + _ {} + } + + copy_warn(pkgfile, destpkgfile); + + if smart && has_src_file { + copy_warn(srcfile, destsrcfile); + } + + os::remove_file(keyfile); + os::remove_file(srcfile); + os::remove_file(srcsigfile); + os::remove_file(pkgfile); + os::remove_file(sigfile); + + info(#fmt["synced source: %s", name]); + + ret true; +} + +fn sync_one(&c: cargo, &src: source) { + let name = src.name; + let dir = path::connect(c.sourcedir, name); + + info(#fmt["syncing source: %s...", name]); + + need_dir(dir); + + let result = alt src.method { + "git" { sync_one_git(c, dir, src) } + "file" { sync_one_file(c, dir, src) } + _ { sync_one_curl(c, dir, src) } + }; + + if result { + load_source_info(c, src); + load_source_packages(c, src); + } } -fn cmd_init(c: cargo) { +fn cmd_init(&c: cargo) { let srcurl = "http://www.rust-lang.org/cargo/sources.json"; let sigurl = "http://www.rust-lang.org/cargo/sources.json.sig"; @@ -995,11 +1314,13 @@ fn cmd_init(c: cargo) { let r = pgp::verify(c.root, srcfile, sigfile, pgp::signing_key_fp()); if !r { - warn(#fmt["signature verification failed for '%s'", srcfile]); - } else { - info(#fmt["signature ok for '%s'", srcfile]); + error(#fmt["signature verification failed for '%s'", srcfile]); + ret; } + copy_warn(srcfile, destsrcfile); + os::remove_file(srcfile); + os::remove_file(sigfile); info(#fmt["initialized .cargo in %s", c.root]); } @@ -1038,15 +1359,21 @@ fn print_source(s: source) { }); } -fn cmd_list(c: cargo) { +fn cmd_list(&c: cargo) { + sync(c); + if vec::len(c.opts.free) >= 3u { vec::iter_between(c.opts.free, 2u, vec::len(c.opts.free)) { |name| - alt c.sources.find(name) { - some(source) { - print_source(source); - } - none { - error(#fmt("no such source: %s", name)); + if !valid_pkg_name(name) { + error(#fmt("'%s' is an invalid source name", name)); + } else { + alt c.sources.find(name) { + some(source) { + print_source(source); + } + none { + error(#fmt("no such source: %s", name)); + } } } } @@ -1057,11 +1384,14 @@ fn cmd_list(c: cargo) { } } -fn cmd_search(c: cargo) { +fn cmd_search(&c: cargo) { if vec::len(c.opts.free) < 3u { cmd_usage(); ret; } + + sync(c); + let mut n = 0; let name = c.opts.free[2]; let tags = vec::slice(c.opts.free, 3u, vec::len(c.opts.free)); @@ -1086,10 +1416,233 @@ fn install_to_dir(srcfile: str, destdir: str) { } } +fn dump_cache(c: cargo) { + need_dir(c.root); + + let out = path::connect(c.root, "cache.json"); + let root = json::dict(map::str_hash()); + + if os::path_exists(out) { + copy_warn(out, path::connect(c.root, "cache.json.old")); + } +} +fn dump_sources(c: cargo) { + need_dir(c.root); + + let out = path::connect(c.root, "sources.json"); + + if os::path_exists(out) { + copy_warn(out, path::connect(c.root, "sources.json.old")); + } + + alt io::buffered_file_writer(out) { + result::ok(writer) { + let hash = map::str_hash(); + let root = json::dict(hash); + + for c.sources.each { |k, v| + let chash = map::str_hash(); + let child = json::dict(chash); + + chash.insert("url", json::string(v.url)); + chash.insert("method", json::string(v.method)); + + alt v.key { + some(key) { + chash.insert("key", json::string(key)); + } + _ {} + } + alt v.keyfp { + some(keyfp) { + chash.insert("keyfp", json::string(keyfp)); + } + _ {} + } + + hash.insert(k, child); + } + + writer.write_str(json::to_str(root)); + } + result::err(e) { + error(#fmt("could not dump sources: %s", e)); + } + } +} + fn copy_warn(srcfile: str, destfile: str) { - if !os::copy_file(srcfile, destfile) { - warn(#fmt["copying %s to %s failed", srcfile, destfile]); - } + if !os::copy_file(srcfile, destfile) { + warn(#fmt["copying %s to %s failed", srcfile, destfile]); + } +} + +fn cmd_sources(&c: cargo) { + if vec::len(c.opts.free) < 3u { + for c.sources.each_value { |v| + info(#fmt("%s (%s) via %s", v.name, v.url, v.method)); + } + ret; + } + + let action = c.opts.free[2u]; + + alt action { + "clear" { + for c.sources.each_key { |k| + c.sources.remove(k); + } + + info("cleared sources"); + } + "add" { + if vec::len(c.opts.free) < 5u { + cmd_usage(); + ret; + } + + let name = c.opts.free[3u]; + let url = c.opts.free[4u]; + + if !valid_pkg_name(name) { + error(#fmt("'%s' is an invalid source name", name)); + ret; + } + + alt c.sources.find(name) { + some(source) { + error(#fmt("source already exists: %s", name)); + } + none { + c.sources.insert(name, { + mut name: name, + mut url: url, + mut method: assume_source_method(url), + mut key: none, + mut keyfp: none, + mut packages: [] + }); + info(#fmt("added source: %s", name)); + } + } + } + "remove" { + if vec::len(c.opts.free) < 4u { + cmd_usage(); + ret; + } + + let name = c.opts.free[3u]; + + if !valid_pkg_name(name) { + error(#fmt("'%s' is an invalid source name", name)); + ret; + } + + alt c.sources.find(name) { + some(source) { + c.sources.remove(name); + info(#fmt("removed source: %s", name)); + } + none { + error(#fmt("no such source: %s", name)); + } + } + } + "set-url" { + if vec::len(c.opts.free) < 5u { + cmd_usage(); + ret; + } + + let name = c.opts.free[3u]; + let url = c.opts.free[4u]; + + if !valid_pkg_name(name) { + error(#fmt("'%s' is an invalid source name", name)); + ret; + } + + alt c.sources.find(name) { + some(source) { + let old = copy source.url; + + source.url = if source.method == "file" { + os::make_absolute(url) + } else { + url + }; + + info(#fmt("changed source url: '%s' to '%s'", old, url)); + } + none { + error(#fmt("no such source: %s", name)); + } + } + } + "set-method" { + if vec::len(c.opts.free) < 5u { + cmd_usage(); + ret; + } + + let name = c.opts.free[3u]; + let method = c.opts.free[4u]; + + if !valid_pkg_name(name) { + error(#fmt("'%s' is an invalid source name", name)); + ret; + } + + alt c.sources.find(name) { + some(source) { + let old = copy source.method; + + source.method = alt method { + "git" { "git" } + "file" { "file" } + _ { "curl" } + }; + + info(#fmt("changed source method: '%s' to '%s'", old, + method)); + } + none { + error(#fmt("no such source: %s", name)); + } + } + } + "rename" { + if vec::len(c.opts.free) < 5u { + cmd_usage(); + ret; + } + + let name = c.opts.free[3u]; + let newn = c.opts.free[4u]; + + if !valid_pkg_name(name) { + error(#fmt("'%s' is an invalid source name", name)); + ret; + } + if !valid_pkg_name(newn) { + error(#fmt("'%s' is an invalid source name", newn)); + ret; + } + + alt c.sources.find(name) { + some(source) { + c.sources.remove(name); + c.sources.insert(newn, source); + info(#fmt("renamed source: %s to %s", name, newn)); + } + none { + error(#fmt("no such source: %s", name)); + } + } + } + _ { cmd_usage(); } + } } fn cmd_usage() { @@ -1100,13 +1653,22 @@ fn cmd_usage() { General: init Reinitialize cargo in ~/.cargo usage Display this message - sync [sources..] Sync all sources (or specific sources) Querying: - list [sources..] List sources and their packages - or a single source + list [sources..] List the packages in sources search [tags...] Search packages +Sources: + sources List sources + sources add Add a source + sources remove Remove a source + sources rename Rename a source + sources set-url Change the source URL + sources set-method Change the method (guesses from + the URL by default) can be ;git', + 'file' or 'curl' + sources clear Remove all sources + Packages: install [options] Install a package from source code in the current directory @@ -1115,14 +1677,15 @@ Packages: install [options] Install a package via curl (HTTP, FTP, etc.) from an .tar[.gz|bz2|xz] file - install [options] [ref] Install a package via git + install [options] [ref] Install a package via read-only + git install [options] Install a package directly from an .tar[.gz|bz2|xz] file - uninstall [options] Remove a package by (meta) name - uninstall [options] Remove a package by (meta) uuid + uninstall [options] [source/] Remove a package by [meta]name + uninstall [options] [source/] Remove a package by [meta]uuid Package installation options: - --tests Run crate tests before installing + --test Run crate tests before installing Package [un]installation options: -g Work at the user level (~/.cargo/bin/ instead of @@ -1143,17 +1706,11 @@ fn main(argv: [str]) { } let mut c = configure(o); - let mut sources = c.sources; let home = c.root; + let first_time = os::path_exists(path::connect(home, "sources.json")); - if !os::path_exists(path::connect(home, "sources.json")) { + if !first_time && o.free[1] != "init" { cmd_init(c); - try_parse_sources(path::connect(home, "sources.json"), sources); - try_parse_sources(path::connect(home, "local-sources.json"), sources); - - for sources.each_value { |v| - sync_one(c, v.name, v); - } // FIXME: shouldn't need to reconfigure c = configure(o); @@ -1165,8 +1722,10 @@ fn main(argv: [str]) { "uninstall" { cmd_uninstall(c); } "list" { cmd_list(c); } "search" { cmd_search(c); } - "sync" { cmd_sync(c); } - "usage" { cmd_usage(); } + "sources" { cmd_sources(c); } _ { cmd_usage(); } } -} + + dump_cache(c); + dump_sources(c); +} \ No newline at end of file