]> git.proxmox.com Git - cargo.git/commitdiff
Changed try! macros to ? operator
authorSascha Grunert <Sascha.Grunert@rohde-schwarz.com>
Fri, 11 Nov 2016 13:25:20 +0000 (14:25 +0100)
committerSascha Grunert <Sascha.Grunert@rohde-schwarz.com>
Fri, 11 Nov 2016 13:25:20 +0000 (14:25 +0100)
Since the stabilization of the ? operator (release 1.13.0)
the ? operator should be used to use idiomatic Rust.

90 files changed:
src/bin/bench.rs
src/bin/build.rs
src/bin/cargo.rs
src/bin/clean.rs
src/bin/doc.rs
src/bin/fetch.rs
src/bin/generate_lockfile.rs
src/bin/git_checkout.rs
src/bin/init.rs
src/bin/install.rs
src/bin/locate_project.rs
src/bin/login.rs
src/bin/metadata.rs
src/bin/new.rs
src/bin/owner.rs
src/bin/package.rs
src/bin/pkgid.rs
src/bin/publish.rs
src/bin/read_manifest.rs
src/bin/run.rs
src/bin/rustc.rs
src/bin/rustdoc.rs
src/bin/search.rs
src/bin/test.rs
src/bin/uninstall.rs
src/bin/update.rs
src/bin/verify_project.rs
src/bin/yank.rs
src/cargo/core/dependency.rs
src/cargo/core/package.rs
src/cargo/core/package_id.rs
src/cargo/core/package_id_spec.rs
src/cargo/core/registry.rs
src/cargo/core/resolver/encode.rs
src/cargo/core/resolver/mod.rs
src/cargo/core/shell.rs
src/cargo/core/source.rs
src/cargo/core/workspace.rs
src/cargo/lib.rs
src/cargo/ops/cargo_clean.rs
src/cargo/ops/cargo_compile.rs
src/cargo/ops/cargo_doc.rs
src/cargo/ops/cargo_fetch.rs
src/cargo/ops/cargo_generate_lockfile.rs
src/cargo/ops/cargo_install.rs
src/cargo/ops/cargo_new.rs
src/cargo/ops/cargo_output_metadata.rs
src/cargo/ops/cargo_package.rs
src/cargo/ops/cargo_pkgid.rs
src/cargo/ops/cargo_read_manifest.rs
src/cargo/ops/cargo_run.rs
src/cargo/ops/cargo_rustc/compilation.rs
src/cargo/ops/cargo_rustc/context.rs
src/cargo/ops/cargo_rustc/custom_build.rs
src/cargo/ops/cargo_rustc/fingerprint.rs
src/cargo/ops/cargo_rustc/job.rs
src/cargo/ops/cargo_rustc/job_queue.rs
src/cargo/ops/cargo_rustc/layout.rs
src/cargo/ops/cargo_rustc/mod.rs
src/cargo/ops/cargo_test.rs
src/cargo/ops/lockfile.rs
src/cargo/ops/registry.rs
src/cargo/ops/resolve.rs
src/cargo/sources/config.rs
src/cargo/sources/directory.rs
src/cargo/sources/git/source.rs
src/cargo/sources/git/utils.rs
src/cargo/sources/path.rs
src/cargo/sources/registry/index.rs
src/cargo/sources/registry/local.rs
src/cargo/sources/registry/mod.rs
src/cargo/sources/registry/remote.rs
src/cargo/sources/replaced.rs
src/cargo/util/cfg.rs
src/cargo/util/config.rs
src/cargo/util/errors.rs
src/cargo/util/flock.rs
src/cargo/util/graph.rs
src/cargo/util/lazy_cell.rs
src/cargo/util/network.rs
src/cargo/util/paths.rs
src/cargo/util/process_builder.rs
src/cargo/util/read2.rs
src/cargo/util/rustc.rs
src/cargo/util/toml.rs
src/cargo/util/vcs.rs
src/crates-io/lib.rs
tests/cargotest/support/mod.rs
tests/resolve.rs
tests/shell.rs

index d5bc21dd46e238c063ba0fbdeb7b36b409d722db..18c90c6248f808911501af30fc5809707341a299 100644 (file)
@@ -71,12 +71,12 @@ Compilation can be customized with the `bench` profile in the manifest.
 ";
 
 pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
-    let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
-    try!(config.configure(options.flag_verbose,
+    let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?;
+    config.configure(options.flag_verbose,
                           options.flag_quiet,
                           &options.flag_color,
                           options.flag_frozen,
-                          options.flag_locked));
+                          options.flag_locked)?;
     let ops = ops::TestOptions {
         no_run: options.flag_no_run,
         no_fail_fast: false,
@@ -102,8 +102,8 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
         },
     };
 
-    let ws = try!(Workspace::new(&root, config));
-    let err = try!(ops::run_benches(&ws, &ops, &options.arg_args));
+    let ws = Workspace::new(&root, config)?;
+    let err = ops::run_benches(&ws, &ops, &options.arg_args)?;
     match err {
         None => Ok(None),
         Some(err) => {
index eb6673c2f9abcef82883a841c4e8f96676138f26..439201f7420356387c8987613475387231610ba8 100644 (file)
@@ -69,13 +69,13 @@ the --release flag will use the `release` profile instead.
 pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
     debug!("executing; cmd=cargo-build; args={:?}",
            env::args().collect::<Vec<_>>());
-    try!(config.configure(options.flag_verbose,
+    config.configure(options.flag_verbose,
                           options.flag_quiet,
                           &options.flag_color,
                           options.flag_frozen,
-                          options.flag_locked));
+                          options.flag_locked)?;
 
-    let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
+    let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?;
 
     let opts = CompileOptions {
         config: config,
@@ -97,7 +97,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
         target_rustc_args: None,
     };
 
-    let ws = try!(Workspace::new(&root, config));
-    try!(ops::compile(&ws, &opts));
+    let ws = Workspace::new(&root, config)?;
+    ops::compile(&ws, &opts)?;
     Ok(None)
 }
index fdcea48c89fd0f951b5e1f93a5a182f36897e9e0..94bcfbb9e9910b53e2fe8d48d869055e2c41aa0b 100644 (file)
@@ -116,11 +116,11 @@ each_subcommand!(declare_mod);
   on this top-level information.
 */
 fn execute(flags: Flags, config: &Config) -> CliResult<Option<()>> {
-    try!(config.configure(flags.flag_verbose,
+    config.configure(flags.flag_verbose,
                           flags.flag_quiet,
                           &flags.flag_color,
                           flags.flag_frozen,
-                          flags.flag_locked));
+                          flags.flag_locked)?;
 
     init_git_transports(config);
     let _token = cargo::util::job::setup();
@@ -139,8 +139,8 @@ fn execute(flags: Flags, config: &Config) -> CliResult<Option<()>> {
     }
 
     if let Some(ref code) = flags.flag_explain {
-        let mut procss = try!(config.rustc()).process();
-        try!(procss.arg("--explain").arg(code).exec().map_err(human));
+        let mut procss = config.rustc()?.process();
+        procss.arg("--explain").arg(code).exec().map_err(human)?;
         return Ok(None)
     }
 
@@ -189,7 +189,7 @@ fn execute(flags: Flags, config: &Config) -> CliResult<Option<()>> {
         return Ok(None)
     }
 
-    let alias_list = try!(aliased_command(&config, &args[1]));
+    let alias_list = aliased_command(&config, &args[1])?;
     let args = match alias_list {
         Some(alias_command) => {
             let chain = args.iter().take(1)
@@ -205,7 +205,7 @@ fn execute(flags: Flags, config: &Config) -> CliResult<Option<()>> {
         }
         None => args,
     };
-    try!(execute_subcommand(config, &args[1], &args));
+    execute_subcommand(config, &args[1], &args)?;
     Ok(None)
 }
 
@@ -239,7 +239,7 @@ fn aliased_command(config: &Config, command: &String) -> CargoResult<Option<Vec<
             }
         },
         Err(_) => {
-            let value = try!(config.get_list(&alias_name));
+            let value = config.get_list(&alias_name)?;
             if let Some(record) = value {
                 let alias_commands: Vec<String> = record.val.iter()
                                 .map(|s| s.0.to_string()).collect();
index c259e83c6f3d6bf6ebab3461ea9056d84bf17cbb..5a36aa6005cfdd89fc65f71d48066e4caef0adc6 100644 (file)
@@ -44,20 +44,20 @@ and its format, see the `cargo help pkgid` command.
 
 pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
     debug!("executing; cmd=cargo-clean; args={:?}", env::args().collect::<Vec<_>>());
-    try!(config.configure(options.flag_verbose,
+    config.configure(options.flag_verbose,
                           options.flag_quiet,
                           &options.flag_color,
                           options.flag_frozen,
-                          options.flag_locked));
+                          options.flag_locked)?;
 
-    let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
+    let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?;
     let opts = ops::CleanOptions {
         config: config,
         spec: &options.flag_package,
         target: options.flag_target.as_ref().map(|s| &s[..]),
         release: options.flag_release,
     };
-    let ws = try!(Workspace::new(&root, config));
-    try!(ops::clean(&ws, &opts));
+    let ws = Workspace::new(&root, config)?;
+    ops::clean(&ws, &opts)?;
     Ok(None)
 }
index c250dde227c03e9444c4b7ffdf1a64db8518c8ef..c4de72dc07823c5bc21dced090f0d3420d240269 100644 (file)
@@ -62,13 +62,13 @@ the `cargo help pkgid` command.
 ";
 
 pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
-    try!(config.configure(options.flag_verbose,
+    config.configure(options.flag_verbose,
                           options.flag_quiet,
                           &options.flag_color,
                           options.flag_frozen,
-                          options.flag_locked));
+                          options.flag_locked)?;
 
-    let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
+    let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?;
 
     let empty = Vec::new();
     let doc_opts = ops::DocOptions {
@@ -96,7 +96,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
         },
     };
 
-    let ws = try!(Workspace::new(&root, config));
-    try!(ops::doc(&ws, &doc_opts));
+    let ws = Workspace::new(&root, config)?;
+    ops::doc(&ws, &doc_opts)?;
     Ok(None)
 }
index 1a970b71f05594e63e0a97f6faa99227a0940e24..8d191c55a791945992d1b5ff5f94eef976624800 100644 (file)
@@ -39,14 +39,14 @@ all updated.
 ";
 
 pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
-    try!(config.configure(options.flag_verbose,
+    config.configure(options.flag_verbose,
                           options.flag_quiet,
                           &options.flag_color,
                           options.flag_frozen,
-                          options.flag_locked));
-    let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
-    let ws = try!(Workspace::new(&root, config));
-    try!(ops::fetch(&ws));
+                          options.flag_locked)?;
+    let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?;
+    let ws = Workspace::new(&root, config)?;
+    ops::fetch(&ws)?;
     Ok(None)
 }
 
index 36057c1bb6e7b8820e3326dabf3f2e4722ac9c0f..7c170d05bc22eb7cb1d5b69e4c2133cad4ad6991 100644 (file)
@@ -33,14 +33,14 @@ Options:
 
 pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
     debug!("executing; cmd=cargo-generate-lockfile; args={:?}", env::args().collect::<Vec<_>>());
-    try!(config.configure(options.flag_verbose,
+    config.configure(options.flag_verbose,
                           options.flag_quiet,
                           &options.flag_color,
                           options.flag_frozen,
-                          options.flag_locked));
-    let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
+                          options.flag_locked)?;
+    let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?;
 
-    let ws = try!(Workspace::new(&root, config));
-    try!(ops::generate_lockfile(&ws));
+    let ws = Workspace::new(&root, config)?;
+    ops::generate_lockfile(&ws)?;
     Ok(None)
 }
index f7762483ea6c658c7b1e587a754990531741ed61..ebfde445e53779694f94316388b06cc83886f946 100644 (file)
@@ -30,21 +30,21 @@ Options:
 ";
 
 pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
-    try!(config.configure(options.flag_verbose,
+    config.configure(options.flag_verbose,
                           options.flag_quiet,
                           &options.flag_color,
                           options.flag_frozen,
-                          options.flag_locked));
+                          options.flag_locked)?;
     let Options { flag_url: url, flag_reference: reference, .. } = options;
 
-    let url = try!(url.to_url());
+    let url = url.to_url()?;
 
     let reference = GitReference::Branch(reference.clone());
     let source_id = SourceId::for_git(&url, reference);
 
     let mut source = GitSource::new(&source_id, config);
 
-    try!(source.update());
+    source.update()?;
 
     Ok(None)
 }
index 237d7663fb4fce7540d117d31bc0a0d8be5416df..c5d5017f57697795dede427f2dd9b8cd126597a1 100644 (file)
@@ -41,11 +41,11 @@ Options:
 
 pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
     debug!("executing; cmd=cargo-init; args={:?}", env::args().collect::<Vec<_>>());
-    try!(config.configure(options.flag_verbose,
+    config.configure(options.flag_verbose,
                           options.flag_quiet,
                           &options.flag_color,
                           options.flag_frozen,
-                          options.flag_locked));
+                          options.flag_locked)?;
 
     let Options { flag_bin, flag_lib, arg_path, flag_name, flag_vcs, .. } = options;
 
@@ -57,11 +57,11 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
                                      flag_name.as_ref().map(|s| s.as_ref()));
 
     let opts_lib = opts.lib;
-    try!(ops::init(opts, config));
+    ops::init(opts, config)?;
 
-    try!(config.shell().status("Created", format!("{} project",
+    config.shell().status("Created", format!("{} project",
                                                    if opts_lib { "library" }
-                                                   else {"binary (application)"})));
+                                                   else {"binary (application)"}))?;
 
     Ok(None)
 }
index 319b9bdf6f47ab17c940ad968572e1caf1b63e79..c9e75ade072c160b232cb7b5118661b5c7c21d32 100644 (file)
@@ -95,11 +95,11 @@ The `--list` option will list all installed packages (and their versions).
 ";
 
 pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
-    try!(config.configure(options.flag_verbose,
+    config.configure(options.flag_verbose,
                           options.flag_quiet,
                           &options.flag_color,
                           options.flag_frozen,
-                          options.flag_locked));
+                          options.flag_locked)?;
 
     let compile_opts = ops::CompileOptions {
         config: config,
@@ -119,7 +119,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
     };
 
     let source = if let Some(url) = options.flag_git {
-        let url = try!(url.to_url());
+        let url = url.to_url()?;
         let gitref = if let Some(branch) = options.flag_branch {
             GitReference::Branch(branch)
         } else if let Some(tag) = options.flag_tag {
@@ -131,11 +131,11 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
         };
         SourceId::for_git(&url, gitref)
     } else if let Some(path) = options.flag_path {
-        try!(SourceId::for_path(&config.cwd().join(path)))
+        SourceId::for_path(&config.cwd().join(path))?
     } else if options.arg_crate == None {
-        try!(SourceId::for_path(&config.cwd()))
+        SourceId::for_path(&config.cwd())?
     } else {
-        try!(SourceId::crates_io(config))
+        SourceId::crates_io(config)?
     };
 
     let krate = options.arg_crate.as_ref().map(|s| &s[..]);
@@ -143,9 +143,9 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
     let root = options.flag_root.as_ref().map(|s| &s[..]);
 
     if options.flag_list {
-        try!(ops::install_list(root, config));
+        ops::install_list(root, config)?;
     } else {
-        try!(ops::install(root, krate, &source, vers, &compile_opts, options.flag_force));
+        ops::install(root, krate, &source, vers, &compile_opts, options.flag_force)?;
     }
     Ok(None)
 }
index f162788fcbd20575152cd786f195018a2fdb2325..c9a352453f23b936aee3ed82124ccd47be20c805 100644 (file)
@@ -24,13 +24,13 @@ pub struct ProjectLocation {
 
 pub fn execute(flags: LocateProjectFlags,
                config: &Config) -> CliResult<Option<ProjectLocation>> {
-    let root = try!(find_root_manifest_for_wd(flags.flag_manifest_path, config.cwd()));
+    let root = find_root_manifest_for_wd(flags.flag_manifest_path, config.cwd())?;
 
-    let string = try!(root.to_str()
+    let string = root.to_str()
                       .chain_error(|| human("Your project path contains \
                                              characters not representable in \
                                              Unicode"))
-                      .map_err(|e| CliError::new(e, 1)));
+                      .map_err(|e| CliError::new(e, 1))?;
 
     Ok(Some(ProjectLocation { root: string.to_string() }))
 }
index 53de98af66e8572fa4b53e3a38049efe4389f6ff..d93e11f8d3451f0e13609f343daac555afbb020b 100644 (file)
@@ -35,31 +35,31 @@ Options:
 ";
 
 pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
-    try!(config.configure(options.flag_verbose,
+    config.configure(options.flag_verbose,
                           options.flag_quiet,
                           &options.flag_color,
                           options.flag_frozen,
-                          options.flag_locked));
+                          options.flag_locked)?;
     let token = match options.arg_token.clone() {
         Some(token) => token,
         None => {
-            let src = try!(SourceId::crates_io(config));
+            let src = SourceId::crates_io(config)?;
             let mut src = RegistrySource::remote(&src, config);
-            try!(src.update());
-            let config = try!(src.config()).unwrap();
+            src.update()?;
+            let config = src.config()?.unwrap();
             let host = options.flag_host.clone().unwrap_or(config.api);
             println!("please visit {}me and paste the API Token below", host);
             let mut line = String::new();
             let input = io::stdin();
-            try!(input.lock().read_line(&mut line).chain_error(|| {
+            input.lock().read_line(&mut line).chain_error(|| {
                 human("failed to read stdin")
-            }));
+            })?;
             line
         }
     };
 
     let token = token.trim().to_string();
-    try!(ops::registry_login(config, token));
+    ops::registry_login(config, token)?;
     Ok(None)
 }
 
index 4553711045b23590ba9aefe9ae10f718873d97cb..df3b21ad6b65ddd0d0f0dc44bc8c21880fbe1f3f 100644 (file)
@@ -43,12 +43,12 @@ Options:
 ";
 
 pub fn execute(options: Options, config: &Config) -> CliResult<Option<ExportInfo>> {
-    try!(config.configure(options.flag_verbose,
+    config.configure(options.flag_verbose,
                           options.flag_quiet,
                           &options.flag_color,
                           options.flag_frozen,
-                          options.flag_locked));
-    let manifest = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
+                          options.flag_locked)?;
+    let manifest = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?;
 
     let options = OutputMetadataOptions {
         features: options.flag_features,
@@ -58,7 +58,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<ExportInfo
         version: options.flag_format_version,
     };
 
-    let ws = try!(Workspace::new(&manifest, config));
-    let result = try!(output_metadata(&ws, &options));
+    let ws = Workspace::new(&manifest, config)?;
+    let result = output_metadata(&ws, &options)?;
     Ok(Some(result))
 }
index 1d7770d7a06f41e9061c09cfb6860c20cebf2d27..da505af4970bc5797ef5022f8c382be9c45a7d3a 100644 (file)
@@ -41,11 +41,11 @@ Options:
 
 pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
     debug!("executing; cmd=cargo-new; args={:?}", env::args().collect::<Vec<_>>());
-    try!(config.configure(options.flag_verbose,
+    config.configure(options.flag_verbose,
                           options.flag_quiet,
                           &options.flag_color,
                           options.flag_frozen,
-                          options.flag_locked));
+                          options.flag_locked)?;
 
     let Options { flag_bin, flag_lib, arg_path, flag_name, flag_vcs, .. } = options;
 
@@ -56,12 +56,12 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
                                     flag_name.as_ref().map(|s| s.as_ref()));
 
     let opts_lib = opts.lib;
-    try!(ops::new(opts, config));
+    ops::new(opts, config)?;
 
-    try!(config.shell().status("Created", format!("{} `{}` project",
+    config.shell().status("Created", format!("{} `{}` project",
                                                    if opts_lib { "library" }
                                                    else {"binary (application)"},
-                                                   arg_path)));
+                                                   arg_path))?;
 
     Ok(None)
 }
index 9b666adfe87e721c08e81f4de44ebbe7a1b7eb71..f1232e99bfb543d785e72513010588f928b1ffa1 100644 (file)
@@ -45,11 +45,11 @@ and troubleshooting.
 ";
 
 pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
-    try!(config.configure(options.flag_verbose,
+    config.configure(options.flag_verbose,
                           options.flag_quiet,
                           &options.flag_color,
                           options.flag_frozen,
-                          options.flag_locked));
+                          options.flag_locked)?;
     let opts = ops::OwnersOptions {
         krate: options.arg_crate,
         token: options.flag_token,
@@ -58,7 +58,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
         to_remove: options.flag_remove,
         list: options.flag_list,
     };
-    try!(ops::modify_owners(config, &opts));
+    ops::modify_owners(config, &opts)?;
     Ok(None)
 }
 
index 40eb7bac77803f77920567b87bb2f4bb0d80d959..66cf3ada7899ca0bd4edbfe24510e20a53345eba 100644 (file)
@@ -40,20 +40,20 @@ Options:
 ";
 
 pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
-    try!(config.configure(options.flag_verbose,
+    config.configure(options.flag_verbose,
                           options.flag_quiet,
                           &options.flag_color,
                           options.flag_frozen,
-                          options.flag_locked));
-    let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
-    let ws = try!(Workspace::new(&root, config));
-    try!(ops::package(&ws, &ops::PackageOpts {
+                          options.flag_locked)?;
+    let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?;
+    let ws = Workspace::new(&root, config)?;
+    ops::package(&ws, &ops::PackageOpts {
         config: config,
         verify: !options.flag_no_verify,
         list: options.flag_list,
         check_metadata: !options.flag_no_metadata,
         allow_dirty: options.flag_allow_dirty,
         jobs: options.flag_jobs,
-    }));
+    })?;
     Ok(None)
 }
index f9d90523ceae29cb2e7689767ca90ff3d76bcd97..e0588cae3acf947bc4a0a00a930142e1fefb01ac 100644 (file)
@@ -54,13 +54,13 @@ Example Package IDs
 
 pub fn execute(options: Options,
                config: &Config) -> CliResult<Option<()>> {
-    try!(config.configure(options.flag_verbose,
+    config.configure(options.flag_verbose,
                           options.flag_quiet,
                           &options.flag_color,
                           options.flag_frozen,
-                          options.flag_locked));
-    let root = try!(find_root_manifest_for_wd(options.flag_manifest_path.clone(), config.cwd()));
-    let ws = try!(Workspace::new(&root, config));
+                          options.flag_locked)?;
+    let root = find_root_manifest_for_wd(options.flag_manifest_path.clone(), config.cwd())?;
+    let ws = Workspace::new(&root, config)?;
 
     let spec = if options.arg_spec.is_some() {
         options.arg_spec
@@ -70,7 +70,7 @@ pub fn execute(options: Options,
         None
     };
     let spec = spec.as_ref().map(|s| &s[..]);
-    let spec = try!(ops::pkgid(&ws, spec));
+    let spec = ops::pkgid(&ws, spec)?;
     println!("{}", spec);
     Ok(None)
 }
index 7c27704600688a11b2cb8643482ff65222c4df64..60534f4cf4dd2e4a88acd9531625bf8f4fb1e401 100644 (file)
@@ -43,11 +43,11 @@ Options:
 ";
 
 pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
-    try!(config.configure(options.flag_verbose,
+    config.configure(options.flag_verbose,
                           options.flag_quiet,
                           &options.flag_color,
                           options.flag_frozen,
-                          options.flag_locked));
+                          options.flag_locked)?;
     let Options {
         flag_token: token,
         flag_host: host,
@@ -59,9 +59,9 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
         ..
     } = options;
 
-    let root = try!(find_root_manifest_for_wd(flag_manifest_path.clone(), config.cwd()));
-    let ws = try!(Workspace::new(&root, config));
-    try!(ops::publish(&ws, &ops::PublishOpts {
+    let root = find_root_manifest_for_wd(flag_manifest_path.clone(), config.cwd())?;
+    let ws = Workspace::new(&root, config)?;
+    ops::publish(&ws, &ops::PublishOpts {
         config: config,
         token: token,
         index: host,
@@ -69,6 +69,6 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
         allow_dirty: allow_dirty,
         jobs: jobs,
         dry_run: dry_run,
-    }));
+    })?;
     Ok(None)
 }
index bd44b0ad5816701b581a5140f24f4ad980715b64..dfff8a78e1b8cfe3ab978eeb05aad293cc6574a5 100644 (file)
@@ -28,10 +28,10 @@ Options:
 pub fn execute(options: Options, config: &Config) -> CliResult<Option<Package>> {
     debug!("executing; cmd=cargo-read-manifest; args={:?}",
            env::args().collect::<Vec<_>>());
-    try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..])));
+    config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))?;
 
-    let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
+    let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?;
 
-    let pkg = try!(Package::for_path(&root, config));
+    let pkg = Package::for_path(&root, config)?;
     Ok(Some(pkg))
 }
index f9ce057319ce4b58f50b41758dd661a8bc65e4a2..1de5ccbba78d4380d4bd2785e47e5c07ef8abef8 100644 (file)
@@ -58,13 +58,13 @@ the ones before go to Cargo.
 ";
 
 pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
-    try!(config.configure(options.flag_verbose,
+    config.configure(options.flag_verbose,
                           options.flag_quiet,
                           &options.flag_color,
                           options.flag_frozen,
-                          options.flag_locked));
+                          options.flag_locked)?;
 
-    let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
+    let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?;
 
     let (mut examples, mut bins) = (Vec::new(), Vec::new());
     if let Some(s) = options.flag_bin {
@@ -97,8 +97,8 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
         target_rustc_args: None,
     };
 
-    let ws = try!(Workspace::new(&root, config));
-    match try!(ops::run(&ws, &compile_opts, &options.arg_args)) {
+    let ws = Workspace::new(&root, config)?;
+    match ops::run(&ws, &compile_opts, &options.arg_args)? {
         None => Ok(None),
         Some(err) => {
             // If we never actually spawned the process then that sounds pretty
index 83103198496889d3c6ca444bcacce1d60d48719a..1268c7ba266ca54d55ea1c8498f23326fb172e97 100644 (file)
@@ -76,14 +76,14 @@ processes spawned by Cargo, use the $RUSTFLAGS environment variable or the
 pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
     debug!("executing; cmd=cargo-rustc; args={:?}",
            env::args().collect::<Vec<_>>());
-    try!(config.configure(options.flag_verbose,
+    config.configure(options.flag_verbose,
                           options.flag_quiet,
                           &options.flag_color,
                           options.flag_frozen,
-                          options.flag_locked));
+                          options.flag_locked)?;
 
-    let root = try!(find_root_manifest_for_wd(options.flag_manifest_path,
-                                              config.cwd()));
+    let root = find_root_manifest_for_wd(options.flag_manifest_path,
+                                              config.cwd())?;
     let mode = match options.flag_profile.as_ref().map(|t| &t[..]) {
         Some("dev") | None => CompileMode::Build,
         Some("test") => CompileMode::Test,
@@ -115,8 +115,8 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
         target_rustc_args: options.arg_opts.as_ref().map(|a| &a[..]),
     };
 
-    let ws = try!(Workspace::new(&root, config));
-    try!(ops::compile(&ws, &opts));
+    let ws = Workspace::new(&root, config)?;
+    ops::compile(&ws, &opts)?;
     Ok(None)
 }
 
index df3e4886ba1b95fc9c795935284cd95de29de4cb..d993a48503acf8f3415ae374aef8ba7be5147cc8 100644 (file)
@@ -71,14 +71,14 @@ the `cargo help pkgid` command.
 ";
 
 pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
-    try!(config.configure(options.flag_verbose,
+    config.configure(options.flag_verbose,
                           options.flag_quiet,
                           &options.flag_color,
                           options.flag_frozen,
-                          options.flag_locked));
+                          options.flag_locked)?;
 
-    let root = try!(find_root_manifest_for_wd(options.flag_manifest_path,
-                                              config.cwd()));
+    let root = find_root_manifest_for_wd(options.flag_manifest_path,
+                                              config.cwd())?;
 
     let doc_opts = ops::DocOptions {
         open_result: options.flag_open,
@@ -103,8 +103,8 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
         },
     };
 
-    let ws = try!(Workspace::new(&root, config));
-    try!(ops::doc(&ws, &doc_opts));
+    let ws = Workspace::new(&root, config)?;
+    ops::doc(&ws, &doc_opts)?;
 
     Ok(None)
 }
index 829039aaa0ef014da6236a07b86dc505d9753e5c..828356dc0d0702e63b7f904752bb3530b1e24748 100644 (file)
@@ -34,11 +34,11 @@ Options:
 ";
 
 pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
-    try!(config.configure(options.flag_verbose,
+    config.configure(options.flag_verbose,
                           options.flag_quiet,
                           &options.flag_color,
                           options.flag_frozen,
-                          options.flag_locked));
+                          options.flag_locked)?;
     let Options {
         flag_host: host,
         flag_limit: limit,
@@ -46,6 +46,6 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
         ..
     } = options;
 
-    try!(ops::search(&query.join("+"), config, host, cmp::min(100, limit.unwrap_or(10)) as u8));
+    ops::search(&query.join("+"), config, host, cmp::min(100, limit.unwrap_or(10)) as u8)?;
     Ok(None)
 }
index ed487aa4226019ea6c296fe6a38a218f02e52f81..5ae7dee946aa963900d854f32e0c413cdedd4483 100644 (file)
@@ -89,13 +89,13 @@ To get the list of all options available for the test binaries use this:
 ";
 
 pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
-    try!(config.configure(options.flag_verbose,
+    config.configure(options.flag_verbose,
                           options.flag_quiet,
                           &options.flag_color,
                           options.flag_frozen,
-                          options.flag_locked));
+                          options.flag_locked)?;
 
-    let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
+    let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?;
 
     let empty = Vec::new();
     let (mode, filter);
@@ -132,8 +132,8 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
         },
     };
 
-    let ws = try!(Workspace::new(&root, config));
-    let err = try!(ops::run_tests(&ws, &ops, &options.arg_args));
+    let ws = Workspace::new(&root, config)?;
+    let err = ops::run_tests(&ws, &ops, &options.arg_args)?;
     match err {
         None => Ok(None),
         Some(err) => {
index 001abde41ef3137240cb7396ada4f246bd28f3a6..659702bd9f43f73ef70f0c97f14c560d26340c21 100644 (file)
@@ -38,14 +38,14 @@ only uninstall particular binaries.
 ";
 
 pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
-    try!(config.configure(options.flag_verbose,
+    config.configure(options.flag_verbose,
                           options.flag_quiet,
                           &options.flag_color,
                           options.flag_frozen,
-                          options.flag_locked));
+                          options.flag_locked)?;
 
     let root = options.flag_root.as_ref().map(|s| &s[..]);
-    try!(ops::uninstall(root, &options.arg_spec, &options.flag_bin, config));
+    ops::uninstall(root, &options.arg_spec, &options.flag_bin, config)?;
     Ok(None)
 }
 
index 6d1d7935b9c1edf4ccc9f8ff070e34b47a07a076..064bb5ee991389a07ef278eab2a3a696dcf413de 100644 (file)
@@ -59,12 +59,12 @@ For more information about package id specifications, see `cargo help pkgid`.
 
 pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
     debug!("executing; cmd=cargo-update; args={:?}", env::args().collect::<Vec<_>>());
-    try!(config.configure(options.flag_verbose,
+    config.configure(options.flag_verbose,
                           options.flag_quiet,
                           &options.flag_color,
                           options.flag_frozen,
-                          options.flag_locked));
-    let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
+                          options.flag_locked)?;
+    let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?;
 
     let update_opts = ops::UpdateOptions {
         aggressive: options.flag_aggressive,
@@ -73,7 +73,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
         config: config,
     };
 
-    let ws = try!(Workspace::new(&root, config));
-    try!(ops::update_lockfile(&ws, &update_opts));
+    let ws = Workspace::new(&root, config)?;
+    ops::update_lockfile(&ws, &update_opts)?;
     Ok(None)
 }
index 27424f7a6813f3b561361b22c3cba0f794019235..447bf7a5dc870a0bc5cd9b5d0480034591ceb229 100644 (file)
@@ -38,11 +38,11 @@ Options:
 ";
 
 pub fn execute(args: Flags, config: &Config) -> CliResult<Option<Error>> {
-    try!(config.configure(args.flag_verbose,
+    config.configure(args.flag_verbose,
                           args.flag_quiet,
                           &args.flag_color,
                           args.flag_frozen,
-                          args.flag_locked));
+                          args.flag_locked)?;
 
     let mut contents = String::new();
     let filename = args.flag_manifest_path.unwrap_or("Cargo.toml".into());
index 7971efbb37e2e6c44f6480e02bdb52c5e69fd9fa..bac383c4fcdcd64000f27a9f0a04d9f8c0d5741e 100644 (file)
@@ -43,17 +43,17 @@ crates to be locked to any yanked version.
 ";
 
 pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
-    try!(config.configure(options.flag_verbose,
+    config.configure(options.flag_verbose,
                           options.flag_quiet,
                           &options.flag_color,
                           options.flag_frozen,
-                          options.flag_locked));
-    try!(ops::yank(config,
+                          options.flag_locked)?;
+    ops::yank(config,
                    options.arg_crate,
                    options.flag_vers,
                    options.flag_token,
                    options.flag_index,
-                   options.flag_undo));
+                   options.flag_undo)?;
     Ok(None)
 }
 
index b96fed73f1d1e869ce5482b70ff56a69ef18670b..23cb71034b4d73e7491d325a6723626f61672dfb 100644 (file)
@@ -102,7 +102,7 @@ impl DependencyInner {
                  deprecated_extra: Option<(&PackageId, &Config)>)
                  -> CargoResult<DependencyInner> {
         let (specified_req, version_req) = match version {
-            Some(v) => (true, try!(DependencyInner::parse_with_deprecated(v, deprecated_extra))),
+            Some(v) => (true, DependencyInner::parse_with_deprecated(v, deprecated_extra)?),
             None => (false, VersionReq::any())
         };
 
@@ -137,7 +137,7 @@ update to a fixed version or contact the upstream maintainer about
 this warning.
 ",
        req, inside.name(), inside.version(), requirement);
-                        try!(config.shell().warn(&msg));
+                        config.shell().warn(&msg)?;
 
                         Ok(requirement)
                     }
index da14b04fc33c6d36b0328b3cfad981a337348770..6fa0bb7f3e0f8bbd15f0cc7f2f1fff83eaafab77 100644 (file)
@@ -72,9 +72,9 @@ impl Package {
 
     pub fn for_path(manifest_path: &Path, config: &Config) -> CargoResult<Package> {
         let path = manifest_path.parent().unwrap();
-        let source_id = try!(SourceId::for_path(path));
-        let (pkg, _) = try!(ops::read_package(&manifest_path, &source_id,
-                                              config));
+        let source_id = SourceId::for_path(path)?;
+        let (pkg, _) = ops::read_package(&manifest_path, &source_id,
+                                              config)?;
         Ok(pkg)
     }
 
@@ -157,20 +157,20 @@ impl<'cfg> PackageSet<'cfg> {
     }
 
     pub fn get(&self, id: &PackageId) -> CargoResult<&Package> {
-        let slot = try!(self.packages.iter().find(|p| p.0 == *id).chain_error(|| {
+        let slot = self.packages.iter().find(|p| p.0 == *id).chain_error(|| {
             internal(format!("couldn't find `{}` in package set", id))
-        }));
+        })?;
         let slot = &slot.1;
         if let Some(pkg) = slot.borrow() {
             return Ok(pkg)
         }
         let mut sources = self.sources.borrow_mut();
-        let source = try!(sources.get_mut(id.source_id()).chain_error(|| {
+        let source = sources.get_mut(id.source_id()).chain_error(|| {
             internal(format!("couldn't find source for `{}`", id))
-        }));
-        let pkg = try!(source.download(id).chain_error(|| {
+        })?;
+        let pkg = source.download(id).chain_error(|| {
             human("unable to get packages from source")
-        }));
+        })?;
         assert!(slot.fill(pkg).is_ok());
         Ok(slot.borrow().unwrap())
     }
index b29b8ca11d5ee5d8d6d1c3848a4a07fa7b030fd4..8f1992a733e651bfa87925f20aeaa35b744bc1e1 100644 (file)
@@ -36,21 +36,21 @@ impl Encodable for PackageId {
 
 impl Decodable for PackageId {
     fn decode<D: Decoder>(d: &mut D) -> Result<PackageId, D::Error> {
-        let string: String = try!(Decodable::decode(d));
+        let string: String = Decodable::decode(d)?;
         let regex = Regex::new(r"^([^ ]+) ([^ ]+) \(([^\)]+)\)$").unwrap();
-        let captures = try!(regex.captures(&string).ok_or_else(|| {
+        let captures = regex.captures(&string).ok_or_else(|| {
             d.error("invalid serialized PackageId")
-        }));
+        })?;
 
         let name = captures.at(1).unwrap();
         let version = captures.at(2).unwrap();
         let url = captures.at(3).unwrap();
-        let version = try!(semver::Version::parse(version).map_err(|_| {
+        let version = semver::Version::parse(version).map_err(|_| {
             d.error("invalid version")
-        }));
-        let source_id = try!(SourceId::from_url(url).map_err(|e| {
+        })?;
+        let source_id = SourceId::from_url(url).map_err(|e| {
             d.error(&e.to_string())
-        }));
+        })?;
 
         Ok(PackageId {
             inner: Arc::new(PackageIdInner {
@@ -127,7 +127,7 @@ pub struct Metadata {
 impl PackageId {
     pub fn new<T: ToSemver>(name: &str, version: T,
                              sid: &SourceId) -> CargoResult<PackageId> {
-        let v = try!(version.to_semver().map_err(PackageIdError::InvalidVersion));
+        let v = version.to_semver().map_err(PackageIdError::InvalidVersion)?;
         Ok(PackageId {
             inner: Arc::new(PackageIdInner {
                 name: name.to_string(),
@@ -179,10 +179,10 @@ impl Metadata {
 
 impl fmt::Display for PackageId {
     fn fmt(&self, f: &mut Formatter) -> fmt::Result {
-        try!(write!(f, "{} v{}", self.inner.name, self.inner.version));
+        write!(f, "{} v{}", self.inner.name, self.inner.version)?;
 
         if !self.inner.source_id.is_default_registry() {
-            try!(write!(f, " ({})", self.inner.source_id));
+            write!(f, " ({})", self.inner.source_id)?;
         }
 
         Ok(())
index a07b69e49f7ad0318c8f44232a733782c1ca163b..2af87fb6c01bdea1876c4bdad385254e0574bcb5 100644 (file)
@@ -29,7 +29,7 @@ impl PackageIdSpec {
         let mut parts = spec.splitn(2, ':');
         let name = parts.next().unwrap();
         let version = match parts.next() {
-            Some(version) => Some(try!(Version::parse(version).map_err(human))),
+            Some(version) => Some(Version::parse(version).map_err(human)?),
             None => None,
         };
         for ch in name.chars() {
@@ -47,9 +47,9 @@ impl PackageIdSpec {
     pub fn query_str<'a, I>(spec: &str, i: I) -> CargoResult<&'a PackageId>
         where I: IntoIterator<Item=&'a PackageId>
     {
-        let spec = try!(PackageIdSpec::parse(spec).chain_error(|| {
+        let spec = PackageIdSpec::parse(spec).chain_error(|| {
             human(format!("invalid package id specification: `{}`", spec))
-        }));
+        })?;
         spec.query(i)
     }
 
@@ -68,20 +68,20 @@ impl PackageIdSpec {
         let frag = url.fragment().map(|s| s.to_owned());
         url.set_fragment(None);
         let (name, version) = {
-            let mut path = try!(url.path_segments().chain_error(|| {
+            let mut path = url.path_segments().chain_error(|| {
                 human(format!("pkgid urls must have a path: {}", url))
-            }));
-            let path_name = try!(path.next_back().chain_error(|| {
+            })?;
+            let path_name = path.next_back().chain_error(|| {
                 human(format!("pkgid urls must have at least one path \
                                component: {}", url))
-            }));
+            })?;
             match frag {
                 Some(fragment) => {
                     let mut parts = fragment.splitn(2, ':');
                     let name_or_version = parts.next().unwrap();
                     match parts.next() {
                         Some(part) => {
-                            let version = try!(part.to_semver().map_err(human));
+                            let version = part.to_semver().map_err(human)?;
                             (name_or_version.to_string(), Some(version))
                         }
                         None => {
@@ -89,8 +89,8 @@ impl PackageIdSpec {
                                               .is_alphabetic() {
                                 (name_or_version.to_string(), None)
                             } else {
-                                let version = try!(name_or_version.to_semver()
-                                                                  .map_err(human));
+                                let version = name_or_version.to_semver()
+                                                                  .map_err(human)?;
                                 (path_name.to_string(), Some(version))
                             }
                         }
@@ -180,20 +180,20 @@ impl fmt::Display for PackageIdSpec {
         match self.url {
             Some(ref url) => {
                 if url.scheme() == "cargo" {
-                    try!(write!(f, "{}{}", url.host().unwrap(), url.path()));
+                    write!(f, "{}{}", url.host().unwrap(), url.path())?;
                 } else {
-                    try!(write!(f, "{}", url));
+                    write!(f, "{}", url)?;
                 }
                 if url.path_segments().unwrap().next_back().unwrap() != &self.name {
                     printed_name = true;
-                    try!(write!(f, "#{}", self.name));
+                    write!(f, "#{}", self.name)?;
                 }
             }
-            None => { printed_name = true; try!(write!(f, "{}", self.name)) }
+            None => { printed_name = true; write!(f, "{}", self.name)? }
         }
         match self.version {
             Some(ref v) => {
-                try!(write!(f, "{}{}", if printed_name {":"} else {"#"}, v));
+                write!(f, "{}{}", if printed_name {":"} else {"#"}, v)?;
             }
             None => {}
         }
index 5b7c21ac35e41fc008148bc4d098721759a35877..037b87a8b62c34a8ff3b1eb4991b1d4dc6d99837 100644 (file)
@@ -91,7 +91,7 @@ enum Kind {
 
 impl<'cfg> PackageRegistry<'cfg> {
     pub fn new(config: &'cfg Config) -> CargoResult<PackageRegistry<'cfg>> {
-        let source_config = try!(SourceConfigMap::new(config));
+        let source_config = SourceConfigMap::new(config)?;
         Ok(PackageRegistry {
             sources: SourceMap::new(),
             source_ids: HashMap::new(),
@@ -138,13 +138,13 @@ impl<'cfg> PackageRegistry<'cfg> {
             }
         }
 
-        try!(self.load(namespace, kind));
+        self.load(namespace, kind)?;
         Ok(())
     }
 
     pub fn add_sources(&mut self, ids: &[SourceId]) -> CargoResult<()> {
         for id in ids.iter() {
-            try!(self.ensure_loaded(id, Kind::Locked));
+            self.ensure_loaded(id, Kind::Locked)?;
         }
         Ok(())
     }
@@ -178,7 +178,7 @@ impl<'cfg> PackageRegistry<'cfg> {
 
     fn load(&mut self, source_id: &SourceId, kind: Kind) -> CargoResult<()> {
         (|| {
-            let source = try!(self.source_config.load(source_id));
+            let source = self.source_config.load(source_id)?;
 
             if kind == Kind::Override {
                 self.overrides.push(source_id.clone());
@@ -196,7 +196,7 @@ impl<'cfg> PackageRegistry<'cfg> {
         for s in self.overrides.iter() {
             let src = self.sources.get_mut(s).unwrap();
             let dep = Dependency::new_override(dep.name(), s);
-            let mut results = try!(src.query(&dep));
+            let mut results = src.query(&dep)?;
             if results.len() > 0 {
                 return Ok(Some(results.remove(0)))
             }
@@ -291,17 +291,17 @@ impl<'cfg> PackageRegistry<'cfg> {
                          override_summary: &Summary,
                          real_summary: &Summary) -> CargoResult<()> {
         let real = real_summary.package_id();
-        let map = try!(self.locked.get(real.source_id()).chain_error(|| {
+        let map = self.locked.get(real.source_id()).chain_error(|| {
             human(format!("failed to find lock source of {}", real))
-        }));
-        let list = try!(map.get(real.name()).chain_error(|| {
+        })?;
+        let list = map.get(real.name()).chain_error(|| {
             human(format!("failed to find lock name of {}", real))
-        }));
-        let &(_, ref real_deps) = try!(list.iter().find(|&&(ref id, _)| {
+        })?;
+        let &(_, ref real_deps) = list.iter().find(|&&(ref id, _)| {
             real == id
         }).chain_error(|| {
             human(format!("failed to find lock version of {}", real))
-        }));
+        })?;
         let mut real_deps = real_deps.clone();
 
         let boilerplate = "\
@@ -327,7 +327,7 @@ http://doc.crates.io/specifying-dependencies.html#overriding-dependencies
                 dependencies; the dependency on `{}` was either added or\n\
                 modified to not match the previously resolved version\n\n\
                 {}", override_summary.package_id().name(), dep.name(), boilerplate);
-            try!(self.source_config.config().shell().warn(&msg));
+            self.source_config.config().shell().warn(&msg)?;
             return Ok(())
         }
 
@@ -336,7 +336,7 @@ http://doc.crates.io/specifying-dependencies.html#overriding-dependencies
                 path override for crate `{}` has altered the original list of
                 dependencies; the dependency on `{}` was removed\n\n
                 {}", override_summary.package_id().name(), id.name(), boilerplate);
-            try!(self.source_config.config().shell().warn(&msg));
+            self.source_config.config().shell().warn(&msg)?;
             return Ok(())
         }
 
@@ -347,14 +347,14 @@ http://doc.crates.io/specifying-dependencies.html#overriding-dependencies
 impl<'cfg> Registry for PackageRegistry<'cfg> {
     fn query(&mut self, dep: &Dependency) -> CargoResult<Vec<Summary>> {
         // Ensure the requested source_id is loaded
-        try!(self.ensure_loaded(dep.source_id(), Kind::Normal).chain_error(|| {
+        self.ensure_loaded(dep.source_id(), Kind::Normal).chain_error(|| {
             human(format!("failed to load source for a dependency \
                            on `{}`", dep.name()))
-        }));
+        })?;
 
-        let override_summary = try!(self.query_overrides(&dep));
+        let override_summary = self.query_overrides(&dep)?;
         let real_summaries = match self.sources.get_mut(dep.source_id()) {
-            Some(src) => Some(try!(src.query(&dep))),
+            Some(src) => Some(src.query(&dep)?),
             None => None,
         };
 
@@ -363,7 +363,7 @@ impl<'cfg> Registry for PackageRegistry<'cfg> {
                 if summaries.len() != 1 {
                     bail!("found an override with a non-locked list");
                 }
-                try!(self.warn_bad_override(&candidate, &summaries[0]));
+                self.warn_bad_override(&candidate, &summaries[0])?;
                 vec![candidate]
             }
             (Some(_), None) => bail!("override found but no real ones"),
index 2e67ef6e5c7a166f9327995a2119f124500e3c74..47c20b89beaa08d52bdd75dbcafa57663ecafbfc 100644 (file)
@@ -52,7 +52,7 @@ impl EncodableResolve {
                     // We failed to find a local package in the workspace.
                     // It must have been removed and should be ignored.
                     None => continue,
-                    Some(source) => try!(PackageId::new(&pkg.name, &pkg.version, source))
+                    Some(source) => PackageId::new(&pkg.name, &pkg.version, source)?
                 };
 
                 assert!(live_pkgs.insert(enc_id, (id, pkg)).is_none())
@@ -88,7 +88,7 @@ impl EncodableResolve {
                 };
 
                 for edge in deps.iter() {
-                    if let Some(to_depend_on) = try!(lookup_id(edge)) {
+                    if let Some(to_depend_on) = lookup_id(edge)? {
                         g.link(id.clone(), to_depend_on);
                     }
                 }
@@ -101,7 +101,7 @@ impl EncodableResolve {
             for &(ref id, ref pkg) in live_pkgs.values() {
                 if let Some(ref replace) = pkg.replace {
                     assert!(pkg.dependencies.is_none());
-                    if let Some(replace_id) = try!(lookup_id(replace)) {
+                    if let Some(replace_id) = lookup_id(replace)? {
                         replacements.insert(id.clone(), replace_id);
                     }
                 }
@@ -132,9 +132,9 @@ impl EncodableResolve {
         for (k, v) in metadata.iter().filter(|p| p.0.starts_with(prefix)) {
             to_remove.push(k.to_string());
             let k = &k[prefix.len()..];
-            let enc_id: EncodablePackageId = try!(k.parse().chain_error(|| {
+            let enc_id: EncodablePackageId = k.parse().chain_error(|| {
                 internal("invalid encoding of checksum in lockfile")
-            }));
+            })?;
             let id = match lookup_id(&enc_id) {
                 Ok(Some(id)) => id,
                 _ => continue,
@@ -222,9 +222,9 @@ pub struct EncodablePackageId {
 
 impl fmt::Display for EncodablePackageId {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        try!(write!(f, "{} {}", self.name, self.version));
+        write!(f, "{} {}", self.name, self.version)?;
         if let Some(ref s) = self.source {
-            try!(write!(f, " ({})", s.to_url()));
+            write!(f, " ({})", s.to_url())?;
         }
         Ok(())
     }
@@ -235,15 +235,15 @@ impl FromStr for EncodablePackageId {
 
     fn from_str(s: &str) -> CargoResult<EncodablePackageId> {
         let regex = Regex::new(r"^([^ ]+) ([^ ]+)(?: \(([^\)]+)\))?$").unwrap();
-        let captures = try!(regex.captures(s).ok_or_else(|| {
+        let captures = regex.captures(s).ok_or_else(|| {
             internal("invalid serialized PackageId")
-        }));
+        })?;
 
         let name = captures.at(1).unwrap();
         let version = captures.at(2).unwrap();
 
         let source_id = match captures.at(3) {
-            Some(s) => Some(try!(SourceId::from_url(s))),
+            Some(s) => Some(SourceId::from_url(s)?),
             None => None,
         };
 
index 9cc723d69dc90e747255626018bd22637d94fe7a..b67e32d56dc83492b233823f243e3eb982b5af58 100644 (file)
@@ -226,10 +226,10 @@ unable to verify that `{0}` is the same as when the lockfile was generated
 
 impl fmt::Debug for Resolve {
     fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
-        try!(write!(fmt, "graph: {:?}\n", self.graph));
-        try!(write!(fmt, "\nfeatures: {{\n"));
+        write!(fmt, "graph: {:?}\n", self.graph)?;
+        write!(fmt, "\nfeatures: {{\n")?;
         for (pkg, features) in &self.features {
-            try!(write!(fmt, "  {}: {:?}\n", pkg, features));
+            write!(fmt, "  {}: {:?}\n", pkg, features)?;
         }
         write!(fmt, "}}")
     }
@@ -274,7 +274,7 @@ pub fn resolve(summaries: &[(Summary, Method)],
         replacements: replacements,
     };
     let _p = profile::start(format!("resolving"));
-    let cx = try!(activate_deps_loop(cx, registry, summaries));
+    let cx = activate_deps_loop(cx, registry, summaries)?;
 
     let mut resolve = Resolve {
         graph: cx.resolve_graph,
@@ -289,7 +289,7 @@ pub fn resolve(summaries: &[(Summary, Method)],
         resolve.checksums.insert(summary.package_id().clone(), cksum);
     }
 
-    try!(check_cycles(&resolve, &cx.activations));
+    check_cycles(&resolve, &cx.activations)?;
 
     trace!("resolved: {:?}", resolve);
     Ok(resolve)
@@ -333,7 +333,7 @@ fn activate(cx: &mut Context,
         }
     };
 
-    let deps = try!(cx.build_deps(registry, &candidate, method));
+    let deps = cx.build_deps(registry, &candidate, method)?;
 
     Ok(Some(DepsFrame {
         parent: candidate,
@@ -450,8 +450,8 @@ fn activate_deps_loop<'a>(mut cx: Context<'a>,
         debug!("initial activation: {}", summary.package_id());
         let summary = Rc::new(summary.clone());
         let candidate = Candidate { summary: summary, replace: None };
-        remaining_deps.extend(try!(activate(&mut cx, registry, None, candidate,
-                                            method)));
+        remaining_deps.extend(activate(&mut cx, registry, None, candidate,
+                                            method)?);
     }
 
     // Main resolution loop, this is the workhorse of the resolution algorithm.
@@ -558,8 +558,8 @@ fn activate_deps_loop<'a>(mut cx: Context<'a>,
         };
         trace!("{}[{}]>{} trying {}", parent.name(), cur, dep.name(),
                candidate.summary.version());
-        remaining_deps.extend(try!(activate(&mut cx, registry, Some(&parent),
-                                            candidate, &method)));
+        remaining_deps.extend(activate(&mut cx, registry, Some(&parent),
+                                            candidate, &method)?);
     }
 
     Ok(cx)
@@ -709,16 +709,16 @@ fn build_features(s: &Summary, method: &Method)
     match *method {
         Method::Everything => {
             for key in s.features().keys() {
-                try!(add_feature(s, key, &mut deps, &mut used, &mut visited));
+                add_feature(s, key, &mut deps, &mut used, &mut visited)?;
             }
             for dep in s.dependencies().iter().filter(|d| d.is_optional()) {
-                try!(add_feature(s, dep.name(), &mut deps, &mut used,
-                                 &mut visited));
+                add_feature(s, dep.name(), &mut deps, &mut used,
+                                 &mut visited)?;
             }
         }
         Method::Required { features: requested_features, .. } =>  {
             for feat in requested_features.iter() {
-                try!(add_feature(s, feat, &mut deps, &mut used, &mut visited));
+                add_feature(s, feat, &mut deps, &mut used, &mut visited)?;
             }
         }
     }
@@ -726,8 +726,8 @@ fn build_features(s: &Summary, method: &Method)
         Method::Everything |
         Method::Required { uses_default_features: true, .. } => {
             if s.features().get("default").is_some() {
-                try!(add_feature(s, "default", &mut deps, &mut used,
-                                 &mut visited));
+                add_feature(s, "default", &mut deps, &mut used,
+                                 &mut visited)?;
             }
         }
         Method::Required { uses_default_features: false, .. } => {}
@@ -765,7 +765,7 @@ fn build_features(s: &Summary, method: &Method)
                 match s.features().get(feat) {
                     Some(recursive) => {
                         for f in recursive {
-                            try!(add_feature(s, f, deps, used, visited));
+                            add_feature(s, f, deps, used, visited)?;
                         }
                     }
                     None => {
@@ -820,19 +820,19 @@ impl<'a> Context<'a> {
         // First, figure out our set of dependencies based on the requsted set
         // of features. This also calculates what features we're going to enable
         // for our own dependencies.
-        let deps = try!(self.resolve_features(candidate, method));
+        let deps = self.resolve_features(candidate, method)?;
 
         // Next, transform all dependencies into a list of possible candidates
         // which can satisfy that dependency.
-        let mut deps = try!(deps.into_iter().map(|(dep, features)| {
-            let mut candidates = try!(self.query(registry, &dep));
+        let mut deps = deps.into_iter().map(|(dep, features)| {
+            let mut candidates = self.query(registry, &dep)?;
             // When we attempt versions for a package, we'll want to start at
             // the maximum version and work our way down.
             candidates.sort_by(|a, b| {
                 b.summary.version().cmp(a.summary.version())
             });
             Ok((dep, candidates, features))
-        }).collect::<CargoResult<Vec<DepInfo>>>());
+        }).collect::<CargoResult<Vec<DepInfo>>>()?;
 
         // Attempt to resolve dependencies with fewer candidates before trying
         // dependencies with more candidates.  This way if the dependency with
@@ -852,7 +852,7 @@ impl<'a> Context<'a> {
     fn query(&self,
              registry: &mut Registry,
              dep: &Dependency) -> CargoResult<Vec<Candidate>> {
-        let summaries = try!(registry.query(dep));
+        let summaries = registry.query(dep)?;
         summaries.into_iter().map(Rc::new).map(|summary| {
             // get around lack of non-lexical lifetimes
             let summary2 = summary.clone();
@@ -866,13 +866,13 @@ impl<'a> Context<'a> {
             };
             debug!("found an override for {} {}", dep.name(), dep.version_req());
 
-            let mut summaries = try!(registry.query(dep)).into_iter();
-            let s = try!(summaries.next().chain_error(|| {
+            let mut summaries = registry.query(dep)?.into_iter();
+            let s = summaries.next().chain_error(|| {
                 human(format!("no matching package for override `{}` found\n\
                                location searched: {}\n\
                                version required: {}",
                               spec, dep.source_id(), dep.version_req()))
-            }));
+            })?;
             let summaries = summaries.collect::<Vec<_>>();
             if summaries.len() > 0 {
                 let bullets = summaries.iter().map(|s| {
@@ -928,8 +928,8 @@ impl<'a> Context<'a> {
         let deps = candidate.dependencies();
         let deps = deps.iter().filter(|d| d.is_transitive() || dev_deps);
 
-        let (mut feature_deps, used_features) = try!(build_features(candidate,
-                                                                    method));
+        let (mut feature_deps, used_features) = build_features(candidate,
+                                                                    method)?;
         let mut ret = Vec::new();
 
         // Next, sanitize all requested features by whitelisting all the
@@ -988,11 +988,11 @@ fn check_cycles(resolve: &Resolve,
     let mut checked = HashSet::new();
     for pkg in all_packages {
         if !checked.contains(pkg) {
-            try!(visit(resolve,
+            visit(resolve,
                        pkg,
                        &summaries,
                        &mut HashSet::new(),
-                       &mut checked))
+                       &mut checked)?
         }
     }
     return Ok(());
@@ -1024,7 +1024,7 @@ fn check_cycles(resolve: &Resolve,
                 });
                 let mut empty = HashSet::new();
                 let visited = if is_transitive {&mut *visited} else {&mut empty};
-                try!(visit(resolve, dep, summaries, visited, checked));
+                visit(resolve, dep, summaries, visited, checked)?;
             }
         }
 
index b7996a36a1e2764ef2bd39872ac3349b139c05c7..a28307874673285aebfff542544a3b629d53d955 100644 (file)
@@ -163,7 +163,7 @@ impl Shell {
     fn get_term(out: Box<Write + Send>) -> CargoResult<AdequateTerminal> {
         // Check if the creation of a console will succeed
         if ::term::WinConsole::new(vec![0u8; 0]).is_ok() {
-            let t = try!(::term::WinConsole::new(out));
+            let t = ::term::WinConsole::new(out)?;
             if !t.supports_color() {
                 Ok(NoColor(Box::new(t)))
             } else {
@@ -206,11 +206,11 @@ impl Shell {
     }
 
     pub fn say<T: ToString>(&mut self, message: T, color: Color) -> CargoResult<()> {
-        try!(self.reset());
-        if color != BLACK { try!(self.fg(color)); }
-        try!(write!(self, "{}\n", message.to_string()));
-        try!(self.reset());
-        try!(self.flush());
+        self.reset()?;
+        if color != BLACK { self.fg(color)?; }
+        write!(self, "{}\n", message.to_string())?;
+        self.reset()?;
+        self.flush()?;
         Ok(())
     }
 
@@ -222,17 +222,17 @@ impl Shell {
                             -> CargoResult<()>
         where T: fmt::Display, U: fmt::Display
     {
-        try!(self.reset());
-        if color != BLACK { try!(self.fg(color)); }
-        if self.supports_attr(Attr::Bold) { try!(self.attr(Attr::Bold)); }
+        self.reset()?;
+        if color != BLACK { self.fg(color)?; }
+        if self.supports_attr(Attr::Bold) { self.attr(Attr::Bold)?; }
         if justified {
-            try!(write!(self, "{:>12}", status.to_string()));
+            write!(self, "{:>12}", status.to_string())?;
         } else {
-            try!(write!(self, "{}", status));
+            write!(self, "{}", status)?;
         }
-        try!(self.reset());
-        try!(write!(self, " {}\n", message));
-        try!(self.flush());
+        self.reset()?;
+        write!(self, " {}\n", message)?;
+        self.flush()?;
         Ok(())
     }
 
@@ -240,7 +240,7 @@ impl Shell {
         let colored = self.colored();
 
         match self.terminal {
-            Colored(ref mut c) if colored => try!(c.fg(color)),
+            Colored(ref mut c) if colored => c.fg(color)?,
             _ => return Ok(false),
         }
         Ok(true)
@@ -250,7 +250,7 @@ impl Shell {
         let colored = self.colored();
 
         match self.terminal {
-            Colored(ref mut c) if colored => try!(c.attr(attr)),
+            Colored(ref mut c) if colored => c.attr(attr)?,
             _ => return Ok(false)
         }
         Ok(true)
@@ -269,7 +269,7 @@ impl Shell {
         let colored = self.colored();
 
         match self.terminal {
-            Colored(ref mut c) if colored => try!(c.reset()),
+            Colored(ref mut c) if colored => c.reset()?,
             _ => ()
         }
         Ok(())
index 4a46c19294d26677af0d5842604bf1a935caf069..91d2acff789d8715f0b03a8be86e7f7df8a91194 100644 (file)
@@ -131,11 +131,11 @@ impl SourceId {
     pub fn from_url(string: &str) -> CargoResult<SourceId> {
         let mut parts = string.splitn(2, '+');
         let kind = parts.next().unwrap();
-        let url = try!(parts.next().ok_or(human(format!("invalid source `{}`", string))));
+        let url = parts.next().ok_or(human(format!("invalid source `{}`", string)))?;
 
         match kind {
             "git" => {
-                let mut url = try!(url.to_url());
+                let mut url = url.to_url()?;
                 let mut reference = GitReference::Branch("master".to_string());
                 for (k, v) in url.query_pairs() {
                     match &k[..] {
@@ -154,12 +154,12 @@ impl SourceId {
                 Ok(SourceId::for_git(&url, reference).with_precise(precise))
             },
             "registry" => {
-                let url = try!(url.to_url());
+                let url = url.to_url()?;
                 Ok(SourceId::new(Kind::Registry, url)
                             .with_precise(Some("locked".to_string())))
             }
             "path" => {
-                let url = try!(url.to_url());
+                let url = url.to_url()?;
                 Ok(SourceId::new(Kind::Path, url))
             }
             kind => Err(human(format!("unsupported source protocol: {}", kind)))
@@ -198,7 +198,7 @@ impl SourceId {
 
     // Pass absolute path
     pub fn for_path(path: &Path) -> CargoResult<SourceId> {
-        let url = try!(path.to_url());
+        let url = path.to_url()?;
         Ok(SourceId::new(Kind::Path, url))
     }
 
@@ -211,12 +211,12 @@ impl SourceId {
     }
 
     pub fn for_local_registry(path: &Path) -> CargoResult<SourceId> {
-        let url = try!(path.to_url());
+        let url = path.to_url()?;
         Ok(SourceId::new(Kind::LocalRegistry, url))
     }
 
     pub fn for_directory(path: &Path) -> CargoResult<SourceId> {
-        let url = try!(path.to_url());
+        let url = path.to_url()?;
         Ok(SourceId::new(Kind::Directory, url))
     }
 
@@ -225,20 +225,20 @@ impl SourceId {
     /// This is the main cargo registry by default, but it can be overridden in
     /// a `.cargo/config`.
     pub fn crates_io(config: &Config) -> CargoResult<SourceId> {
-        let cfg = try!(ops::registry_configuration(config));
+        let cfg = ops::registry_configuration(config)?;
         let url = if let Some(ref index) = cfg.index {
             static WARNED: AtomicBool = ATOMIC_BOOL_INIT;
             if !WARNED.swap(true, SeqCst) {
-                try!(config.shell().warn("custom registry support via \
+                config.shell().warn("custom registry support via \
                                           the `registry.index` configuration is \
                                           being removed, this functionality \
-                                          will not work in the future"));
+                                          will not work in the future")?;
             }
             &index[..]
         } else {
             CRATES_IO
         };
-        let url = try!(url.to_url());
+        let url = url.to_url()?;
         Ok(SourceId::for_registry(&url))
     }
 
@@ -348,7 +348,7 @@ impl Encodable for SourceId {
 
 impl Decodable for SourceId {
     fn decode<D: Decoder>(d: &mut D) -> Result<SourceId, D::Error> {
-        let string: String = try!(Decodable::decode(d));
+        let string: String = Decodable::decode(d)?;
         SourceId::from_url(&string).map_err(|e| {
             d.error(&e.to_string())
         })
@@ -363,11 +363,11 @@ impl fmt::Display for SourceId {
             }
             SourceIdInner { kind: Kind::Git(ref reference), ref url,
                             ref precise, .. } => {
-                try!(write!(f, "{}{}", url, reference.url_ref()));
+                write!(f, "{}{}", url, reference.url_ref())?;
 
                 if let Some(ref s) = *precise {
                     let len = cmp::min(s.len(), 8);
-                    try!(write!(f, "#{}", &s[..len]));
+                    write!(f, "#{}", &s[..len])?;
                 }
                 Ok(())
             }
index 65f1537647c7d535c507c8dbd2f8a5017fd955d5..95b2cd67fdb46d4a38096690946042f14d70945c 100644 (file)
@@ -82,7 +82,7 @@ impl<'cfg> Workspace<'cfg> {
     /// before returning it, so `Ok` is only returned for valid workspaces.
     pub fn new(manifest_path: &Path, config: &'cfg Config)
                -> CargoResult<Workspace<'cfg>> {
-        let target_dir = try!(config.target_dir());
+        let target_dir = config.target_dir()?;
 
         let mut ws = Workspace {
             config: config,
@@ -95,9 +95,9 @@ impl<'cfg> Workspace<'cfg> {
             target_dir: target_dir,
             members: Vec::new(),
         };
-        ws.root_manifest = try!(ws.find_root(manifest_path));
-        try!(ws.find_members());
-        try!(ws.validate());
+        ws.root_manifest = ws.find_root(manifest_path)?;
+        ws.find_members()?;
+        ws.validate()?;
         Ok(ws)
     }
 
@@ -130,7 +130,7 @@ impl<'cfg> Workspace<'cfg> {
             ws.target_dir = if let Some(dir) = target_dir {
                 Some(dir)
             } else {
-                try!(ws.config.target_dir())
+                ws.config.target_dir()?
             };
             ws.members.push(ws.current_manifest.clone());
         }
@@ -221,7 +221,7 @@ impl<'cfg> Workspace<'cfg> {
     fn find_root(&mut self, manifest_path: &Path)
                  -> CargoResult<Option<PathBuf>> {
         {
-            let current = try!(self.packages.load(&manifest_path));
+            let current = self.packages.load(&manifest_path)?;
             match *current.workspace_config() {
                 WorkspaceConfig::Root { .. } => {
                     debug!("find_root - is root {}", manifest_path.display());
@@ -274,7 +274,7 @@ impl<'cfg> Workspace<'cfg> {
             }
         };
         let members = {
-            let root = try!(self.packages.load(&root_manifest));
+            let root = self.packages.load(&root_manifest)?;
             match *root.workspace_config() {
                 WorkspaceConfig::Root { ref members } => members.clone(),
                 _ => bail!("root of a workspace inferred but wasn't a root: {}",
@@ -286,7 +286,7 @@ impl<'cfg> Workspace<'cfg> {
             let root = root_manifest.parent().unwrap();
             for path in list {
                 let manifest_path = root.join(path).join("Cargo.toml");
-                try!(self.find_path_deps(&manifest_path));
+                self.find_path_deps(&manifest_path)?;
             }
         }
 
@@ -302,7 +302,7 @@ impl<'cfg> Workspace<'cfg> {
         self.members.push(manifest_path.to_path_buf());
 
         let candidates = {
-            let pkg = match *try!(self.packages.load(manifest_path)) {
+            let pkg = match *self.packages.load(manifest_path)? {
                 MaybePackage::Package(ref p) => p,
                 MaybePackage::Virtual(_) => return Ok(()),
             };
@@ -315,7 +315,7 @@ impl<'cfg> Workspace<'cfg> {
                .collect::<Vec<_>>()
         };
         for candidate in candidates {
-            try!(self.find_path_deps(&candidate));
+            self.find_path_deps(&candidate)?;
         }
         Ok(())
     }
@@ -373,7 +373,7 @@ impl<'cfg> Workspace<'cfg> {
         }
 
         for member in self.members.clone() {
-            let root = try!(self.find_root(&member));
+            let root = self.find_root(&member)?;
             if root == self.root_manifest {
                 continue
             }
@@ -462,7 +462,7 @@ impl<'cfg> Workspace<'cfg> {
                                            root_manifest.display());
 
                     //TODO: remove `Eq` bound from `Profiles` when the warning is removed.
-                    try!(self.config.shell().warn(&message));
+                    self.config.shell().warn(&message)?;
                 }
             }
         }
@@ -481,9 +481,9 @@ impl<'cfg> Packages<'cfg> {
         match self.packages.entry(key.to_path_buf()) {
             Entry::Occupied(e) => Ok(e.into_mut()),
             Entry::Vacant(v) => {
-                let source_id = try!(SourceId::for_path(key));
-                let pair = try!(ops::read_manifest(&manifest_path, &source_id,
-                                                   self.config));
+                let source_id = SourceId::for_path(key)?;
+                let pair = ops::read_manifest(&manifest_path, &source_id,
+                                                   self.config)?;
                 let (manifest, _nested_paths) = pair;
                 Ok(v.insert(match manifest {
                     EitherManifest::Real(manifest) => {
index d1a8f908866371f2763663e1e9fc68e0dc75c39e..33098ee174918ab3b72c587ab01c33477f2f9681 100644 (file)
@@ -67,7 +67,7 @@ pub fn call_main_without_stdin<T, V>(
             options_first: bool) -> CliResult<Option<V>>
     where V: Encodable, T: Decodable
 {
-    let flags = try!(flags_from_args::<T>(usage, args, options_first));
+    let flags = flags_from_args::<T>(usage, args, options_first)?;
     exec(flags, config)
 }
 
@@ -77,7 +77,7 @@ fn process<V, F>(mut callback: F)
 {
     let mut config = None;
     let result = (|| {
-        config = Some(try!(Config::default()));
+        config = Some(Config::default()?);
         let args: Vec<_> = try!(env::args_os().map(|s| {
             s.into_string().map_err(|s| {
                 human(format!("invalid unicode in argument: {:?}", s))
index 6d544c19be24fc980662892d423094f8117ee7a9..4f6e6cf03531cbfc91430553d3cdb49554030037 100644 (file)
@@ -28,26 +28,26 @@ pub fn clean(ws: &Workspace, opts: &CleanOptions) -> CargoResult<()> {
         return rm_rf(&target_dir);
     }
 
-    let mut registry = try!(PackageRegistry::new(opts.config));
-    let resolve = try!(ops::resolve_ws(&mut registry, ws));
+    let mut registry = PackageRegistry::new(opts.config)?;
+    let resolve = ops::resolve_ws(&mut registry, ws)?;
     let packages = ops::get_resolved_packages(&resolve, registry);
 
     let profiles = ws.profiles();
-    let host_triple = try!(opts.config.rustc()).host.clone();
-    let mut cx = try!(Context::new(ws, &resolve, &packages, opts.config,
+    let host_triple = opts.config.rustc()?.host.clone();
+    let mut cx = Context::new(ws, &resolve, &packages, opts.config,
                                    BuildConfig {
                                        host_triple: host_triple,
                                        requested_target: opts.target.map(|s| s.to_owned()),
                                        release: opts.release,
                                        ..BuildConfig::default()
                                    },
-                                   profiles));
+                                   profiles)?;
     let mut units = Vec::new();
 
     for spec in opts.spec {
         // Translate the spec to a Package
-        let pkgid = try!(resolve.query(spec));
-        let pkg = try!(packages.get(&pkgid));
+        let pkgid = resolve.query(spec)?;
+        let pkg = packages.get(&pkgid)?;
 
         // Generate all relevant `Unit` targets for this package
         for target in pkg.targets() {
@@ -70,16 +70,16 @@ pub fn clean(ws: &Workspace, opts: &CleanOptions) -> CargoResult<()> {
         }
     }
 
-    try!(cx.probe_target_info(&units));
+    cx.probe_target_info(&units)?;
 
     for unit in units.iter() {
         let layout = cx.layout(unit);
-        try!(rm_rf(&layout.proxy().fingerprint(&unit.pkg)));
-        try!(rm_rf(&layout.build(&unit.pkg)));
+        rm_rf(&layout.proxy().fingerprint(&unit.pkg))?;
+        rm_rf(&layout.build(&unit.pkg))?;
 
         let root = cx.out_dir(&unit);
-        for (filename, _) in try!(cx.target_filenames(&unit)) {
-            try!(rm_rf(&root.join(&filename)));
+        for (filename, _) in cx.target_filenames(&unit)? {
+            rm_rf(&root.join(&filename))?;
         }
     }
 
@@ -89,13 +89,13 @@ pub fn clean(ws: &Workspace, opts: &CleanOptions) -> CargoResult<()> {
 fn rm_rf(path: &Path) -> CargoResult<()> {
     let m = fs::metadata(path);
     if m.as_ref().map(|s| s.is_dir()).unwrap_or(false) {
-        try!(fs::remove_dir_all(path).chain_error(|| {
+        fs::remove_dir_all(path).chain_error(|| {
             human("could not remove build directory")
-        }));
+        })?;
     } else if m.is_ok() {
-        try!(fs::remove_file(path).chain_error(|| {
+        fs::remove_file(path).chain_error(|| {
             human("failed to remove build artifact")
-        }));
+        })?;
     }
     Ok(())
 }
index 4c385c8d1698a145a92041a48829edb75d22c713..a8ebb30e2c8fc2ea615d949f4525ab189576e2af 100644 (file)
@@ -92,8 +92,8 @@ pub enum CompileFilter<'a> {
 
 pub fn compile<'a>(ws: &Workspace<'a>, options: &CompileOptions<'a>)
                    -> CargoResult<ops::Compilation<'a>> {
-    for key in try!(ws.current()).manifest().warnings().iter() {
-        try!(options.config.shell().warn(key))
+    for key in ws.current()?.manifest().warnings().iter() {
+        options.config.shell().warn(key)?
     }
     compile_ws(ws, None, options)
 }
@@ -109,23 +109,23 @@ pub fn resolve_dependencies<'a>(ws: &Workspace<'a>,
         s.split_whitespace()
     }).map(|s| s.to_string()).collect::<Vec<String>>();
 
-    let mut registry = try!(PackageRegistry::new(ws.config()));
+    let mut registry = PackageRegistry::new(ws.config())?;
 
     if let Some(source) = source {
-        registry.add_preloaded(try!(ws.current()).package_id().source_id(),
+        registry.add_preloaded(ws.current()?.package_id().source_id(),
                                source);
     }
 
     // First, resolve the root_package's *listed* dependencies, as well as
     // downloading and updating all remotes and such.
-    let resolve = try!(ops::resolve_ws(&mut registry, ws));
+    let resolve = ops::resolve_ws(&mut registry, ws)?;
 
     // Second, resolve with precisely what we're doing. Filter out
     // transitive dependencies if necessary, specify features, handle
     // overrides, etc.
     let _p = profile::start("resolving w/ overrides...");
 
-    try!(add_overrides(&mut registry, ws));
+    add_overrides(&mut registry, ws)?;
 
     let method = if all_features {
         Method::Everything
@@ -138,9 +138,9 @@ pub fn resolve_dependencies<'a>(ws: &Workspace<'a>,
     };
 
     let resolved_with_overrides =
-            try!(ops::resolve_with_previous(&mut registry, ws,
+            ops::resolve_with_previous(&mut registry, ws,
                                             method, Some(&resolve), None,
-                                            specs));
+                                            specs)?;
 
     let packages = ops::get_resolved_packages(&resolved_with_overrides,
                                               registry);
@@ -152,7 +152,7 @@ pub fn compile_ws<'a>(ws: &Workspace<'a>,
                       source: Option<Box<Source + 'a>>,
                       options: &CompileOptions<'a>)
                       -> CargoResult<ops::Compilation<'a>> {
-    let root_package = try!(ws.current());
+    let root_package = ws.current()?;
     let CompileOptions { config, jobs, target, spec, features,
                          all_features, no_default_features,
                          release, mode, message_format,
@@ -168,32 +168,32 @@ pub fn compile_ws<'a>(ws: &Workspace<'a>,
 
     let profiles = ws.profiles();
     if spec.len() == 0 {
-        try!(generate_targets(root_package, profiles, mode, filter, release));
+        generate_targets(root_package, profiles, mode, filter, release)?;
     }
 
-    let specs = try!(spec.iter().map(|p| PackageIdSpec::parse(p))
-                                .collect::<CargoResult<Vec<_>>>());
+    let specs = spec.iter().map(|p| PackageIdSpec::parse(p))
+                                .collect::<CargoResult<Vec<_>>>()?;
 
-    let pair = try!(resolve_dependencies(ws,
+    let pair = resolve_dependencies(ws,
                                          source,
                                          features,
                                          all_features,
                                          no_default_features,
-                                         &specs));
+                                         &specs)?;
     let (packages, resolve_with_overrides) = pair;
 
     let mut pkgids = Vec::new();
     if spec.len() > 0 {
         for p in spec {
-            pkgids.push(try!(resolve_with_overrides.query(&p)));
+            pkgids.push(resolve_with_overrides.query(&p)?);
         }
     } else {
         pkgids.push(root_package.package_id());
     };
 
-    let to_builds = try!(pkgids.iter().map(|id| {
+    let to_builds = pkgids.iter().map(|id| {
         packages.get(id)
-    }).collect::<CargoResult<Vec<_>>>());
+    }).collect::<CargoResult<Vec<_>>>()?;
 
     let mut general_targets = Vec::new();
     let mut package_targets = Vec::new();
@@ -204,8 +204,8 @@ pub fn compile_ws<'a>(ws: &Workspace<'a>,
             panic!("`rustc` and `rustdoc` should not accept multiple `-p` flags")
         }
         (Some(args), _) => {
-            let targets = try!(generate_targets(to_builds[0], profiles,
-                                                mode, filter, release));
+            let targets = generate_targets(to_builds[0], profiles,
+                                                mode, filter, release)?;
             if targets.len() == 1 {
                 let (target, profile) = targets[0];
                 let mut profile = profile.clone();
@@ -218,8 +218,8 @@ pub fn compile_ws<'a>(ws: &Workspace<'a>,
             }
         }
         (None, Some(args)) => {
-            let targets = try!(generate_targets(to_builds[0], profiles,
-                                                mode, filter, release));
+            let targets = generate_targets(to_builds[0], profiles,
+                                                mode, filter, release)?;
             if targets.len() == 1 {
                 let (target, profile) = targets[0];
                 let mut profile = profile.clone();
@@ -233,8 +233,8 @@ pub fn compile_ws<'a>(ws: &Workspace<'a>,
         }
         (None, None) => {
             for &to_build in to_builds.iter() {
-                let targets = try!(generate_targets(to_build, profiles, mode,
-                                                    filter, release));
+                let targets = generate_targets(to_build, profiles, mode,
+                                                    filter, release)?;
                 package_targets.push((to_build, targets));
             }
         }
@@ -248,7 +248,7 @@ pub fn compile_ws<'a>(ws: &Workspace<'a>,
 
     let mut ret = {
         let _p = profile::start("compiling");
-        let mut build_config = try!(scrape_build_config(config, jobs, target));
+        let mut build_config = scrape_build_config(config, jobs, target)?;
         build_config.release = release;
         build_config.test = mode == CompileMode::Test || mode == CompileMode::Bench;
         build_config.json_errors = message_format == MessageFormat::Json;
@@ -256,13 +256,13 @@ pub fn compile_ws<'a>(ws: &Workspace<'a>,
             build_config.doc_all = deps;
         }
 
-        try!(ops::compile_targets(ws,
+        ops::compile_targets(ws,
                                   &package_targets,
                                   &packages,
                                   &resolve_with_overrides,
                                   config,
                                   build_config,
-                                  profiles))
+                                  profiles)?
     };
 
     ret.to_doc_test = to_builds.iter().map(|&p| p.clone()).collect();
@@ -397,10 +397,10 @@ fn generate_targets<'a>(pkg: &'a Package,
                     }
                     Ok(())
                 };
-                try!(find(bins, "bin", TargetKind::Bin, profile));
-                try!(find(examples, "example", TargetKind::Example, build));
-                try!(find(tests, "test", TargetKind::Test, test));
-                try!(find(benches, "bench", TargetKind::Bench, &profiles.bench));
+                find(bins, "bin", TargetKind::Bin, profile)?;
+                find(examples, "example", TargetKind::Example, build)?;
+                find(tests, "test", TargetKind::Test, test)?;
+                find(benches, "bench", TargetKind::Bench, &profiles.bench)?;
             }
             Ok(targets)
         }
@@ -411,7 +411,7 @@ fn generate_targets<'a>(pkg: &'a Package,
 /// have been configured.
 fn add_overrides<'a>(registry: &mut PackageRegistry<'a>,
                      ws: &Workspace<'a>) -> CargoResult<()> {
-    let paths = match try!(ws.config().get_list("paths")) {
+    let paths = match ws.config().get_list("paths")? {
         Some(list) => list,
         None => return Ok(())
     };
@@ -424,13 +424,13 @@ fn add_overrides<'a>(registry: &mut PackageRegistry<'a>,
     });
 
     for (path, definition) in paths {
-        let id = try!(SourceId::for_path(&path));
+        let id = SourceId::for_path(&path)?;
         let mut source = PathSource::new_recursive(&path, &id, ws.config());
-        try!(source.update().chain_error(|| {
+        source.update().chain_error(|| {
             human(format!("failed to update path override `{}` \
                            (defined in `{}`)", path.display(),
                           definition.display()))
-        }));
+        })?;
         registry.add_override(&id, Box::new(source));
     }
     Ok(())
@@ -448,7 +448,7 @@ fn scrape_build_config(config: &Config,
                        jobs: Option<u32>,
                        target: Option<String>)
                        -> CargoResult<ops::BuildConfig> {
-    let cfg_jobs = match try!(config.get_i64("build.jobs")) {
+    let cfg_jobs = match config.get_i64("build.jobs")? {
         Some(v) => {
             if v.val <= 0 {
                 bail!("build.jobs must be positive, but found {} in {}",
@@ -463,17 +463,17 @@ fn scrape_build_config(config: &Config,
         None => None,
     };
     let jobs = jobs.or(cfg_jobs).unwrap_or(::num_cpus::get() as u32);
-    let cfg_target = try!(config.get_string("build.target")).map(|s| s.val);
+    let cfg_target = config.get_string("build.target")?.map(|s| s.val);
     let target = target.or(cfg_target);
     let mut base = ops::BuildConfig {
-        host_triple: try!(config.rustc()).host.clone(),
+        host_triple: config.rustc()?.host.clone(),
         requested_target: target.clone(),
         jobs: jobs,
         ..Default::default()
     };
-    base.host = try!(scrape_target_config(config, &base.host_triple));
+    base.host = scrape_target_config(config, &base.host_triple)?;
     base.target = match target.as_ref() {
-        Some(triple) => try!(scrape_target_config(config, &triple)),
+        Some(triple) => scrape_target_config(config, &triple)?,
         None => base.host.clone(),
     };
     Ok(base)
@@ -484,11 +484,11 @@ fn scrape_target_config(config: &Config, triple: &str)
 
     let key = format!("target.{}", triple);
     let mut ret = ops::TargetConfig {
-        ar: try!(config.get_path(&format!("{}.ar", key))).map(|v| v.val),
-        linker: try!(config.get_path(&format!("{}.linker", key))).map(|v| v.val),
+        ar: config.get_path(&format!("{}.ar", key))?.map(|v| v.val),
+        linker: config.get_path(&format!("{}.linker", key))?.map(|v| v.val),
         overrides: HashMap::new(),
     };
-    let table = match try!(config.get_table(&key)) {
+    let table = match config.get_table(&key)? {
         Some(table) => table.val,
         None => return Ok(ret),
     };
@@ -505,36 +505,36 @@ fn scrape_target_config(config: &Config, triple: &str)
             rerun_if_changed: Vec::new(),
             warnings: Vec::new(),
         };
-        for (k, value) in try!(value.table(&lib_name)).0 {
+        for (k, value) in value.table(&lib_name)?.0 {
             let key = format!("{}.{}", key, k);
             match &k[..] {
                 "rustc-flags" => {
-                    let (flags, definition) = try!(value.string(&k));
+                    let (flags, definition) = value.string(&k)?;
                     let whence = format!("in `{}` (in {})", key,
                                          definition.display());
-                    let (paths, links) = try!(
+                    let (paths, links) = 
                         BuildOutput::parse_rustc_flags(&flags, &whence)
-                    );
+                    ?;
                     output.library_paths.extend(paths);
                     output.library_links.extend(links);
                 }
                 "rustc-link-lib" => {
-                    let list = try!(value.list(&k));
+                    let list = value.list(&k)?;
                     output.library_links.extend(list.iter()
                                                     .map(|v| v.0.clone()));
                 }
                 "rustc-link-search" => {
-                    let list = try!(value.list(&k));
+                    let list = value.list(&k)?;
                     output.library_paths.extend(list.iter().map(|v| {
                         PathBuf::from(&v.0)
                     }));
                 }
                 "rustc-cfg" => {
-                    let list = try!(value.list(&k));
+                    let list = value.list(&k)?;
                     output.cfgs.extend(list.iter().map(|v| v.0.clone()));
                 }
                 _ => {
-                    let val = try!(value.string(&k)).0;
+                    let val = value.string(&k)?.0;
                     output.metadata.push((k.clone(), val.to_string()));
                 }
             }
index d9afaed03a94244d10a66bbbe834310be10023f7..8c27335745e299d14a50d6c461c47a9878be543b 100644 (file)
@@ -13,7 +13,7 @@ pub struct DocOptions<'a> {
 }
 
 pub fn doc(ws: &Workspace, options: &DocOptions) -> CargoResult<()> {
-    let package = try!(ws.current());
+    let package = ws.current()?;
 
     let mut lib_names = HashSet::new();
     let mut bin_names = HashSet::new();
@@ -34,13 +34,13 @@ pub fn doc(ws: &Workspace, options: &DocOptions) -> CargoResult<()> {
         }
     }
 
-    try!(ops::compile(ws, &options.compile_opts));
+    ops::compile(ws, &options.compile_opts)?;
 
     if options.open_result {
         let name = if options.compile_opts.spec.len() > 1 {
             bail!("Passing multiple packages and `open` is not supported")
         } else if options.compile_opts.spec.len() == 1 {
-            try!(PackageIdSpec::parse(&options.compile_opts.spec[0]))
+            PackageIdSpec::parse(&options.compile_opts.spec[0])?
                 .name()
                 .replace("-", "_")
         } else {
@@ -62,12 +62,12 @@ pub fn doc(ws: &Workspace, options: &DocOptions) -> CargoResult<()> {
         if fs::metadata(&path).is_ok() {
             let mut shell = options.compile_opts.config.shell();
             match open_docs(&path) {
-                Ok(m) => try!(shell.status("Launching", m)),
+                Ok(m) => shell.status("Launching", m)?,
                 Err(e) => {
-                    try!(shell.warn(
-                            "warning: could not determine a browser to open docs with, tried:"));
+                    shell.warn(
+                            "warning: could not determine a browser to open docs with, tried:")?;
                     for method in e {
-                        try!(shell.warn(format!("\t{}", method)));
+                        shell.warn(format!("\t{}", method))?;
                     }
                 }
             }
index a0144c8f4729c3f2758821bbdc30d6bf0ffd37da..1c30b0b7e8b81ac0cacb0b01f3749231de7e13d8 100644 (file)
@@ -5,11 +5,11 @@ use util::CargoResult;
 
 /// Executes `cargo fetch`.
 pub fn fetch<'a>(ws: &Workspace<'a>) -> CargoResult<(Resolve, PackageSet<'a>)> {
-    let mut registry = try!(PackageRegistry::new(ws.config()));
-    let resolve = try!(ops::resolve_ws(&mut registry, ws));
+    let mut registry = PackageRegistry::new(ws.config())?;
+    let resolve = ops::resolve_ws(&mut registry, ws)?;
     let packages = get_resolved_packages(&resolve, registry);
     for id in resolve.iter() {
-        try!(packages.get(id));
+        packages.get(id)?;
     }
     Ok((resolve, packages))
 }
index 25e2204ba9e12dcc94dfff963b17188a23ea6e55..c09ee67022f78bc36ef6b49bafffb8858bf68716 100644 (file)
@@ -16,11 +16,11 @@ pub struct UpdateOptions<'a> {
 }
 
 pub fn generate_lockfile(ws: &Workspace) -> CargoResult<()> {
-    let mut registry = try!(PackageRegistry::new(ws.config()));
-    let resolve = try!(ops::resolve_with_previous(&mut registry, ws,
+    let mut registry = PackageRegistry::new(ws.config())?;
+    let resolve = ops::resolve_with_previous(&mut registry, ws,
                                                   Method::Everything,
-                                                  None, None, &[]));
-    try!(ops::write_pkg_lockfile(ws, &resolve));
+                                                  None, None, &[])?;
+    ops::write_pkg_lockfile(ws, &resolve)?;
     Ok(())
 }
 
@@ -35,11 +35,11 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions)
         bail!("you can't generate a lockfile for an empty workspace.")
     }
 
-    let previous_resolve = match try!(ops::load_pkg_lockfile(ws)) {
+    let previous_resolve = match ops::load_pkg_lockfile(ws)? {
         Some(resolve) => resolve,
         None => return generate_lockfile(ws),
     };
-    let mut registry = try!(PackageRegistry::new(opts.config));
+    let mut registry = PackageRegistry::new(opts.config)?;
     let mut to_avoid = HashSet::new();
 
     if opts.to_update.is_empty() {
@@ -47,7 +47,7 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions)
     } else {
         let mut sources = Vec::new();
         for name in opts.to_update {
-            let dep = try!(previous_resolve.query(name));
+            let dep = previous_resolve.query(name)?;
             if opts.aggressive {
                 fill_with_deps(&previous_resolve, dep, &mut to_avoid,
                                &mut HashSet::new());
@@ -71,15 +71,15 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions)
                 });
             }
         }
-        try!(registry.add_sources(&sources));
+        registry.add_sources(&sources)?;
     }
 
-    let resolve = try!(ops::resolve_with_previous(&mut registry,
+    let resolve = ops::resolve_with_previous(&mut registry,
                                                   ws,
                                                   Method::Everything,
                                                   Some(&previous_resolve),
                                                   Some(&to_avoid),
-                                                  &[]));
+                                                  &[])?;
 
     // Summarize what is changing for the user.
     let print_change = |status: &str, msg: String| {
@@ -93,18 +93,18 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions)
             } else {
                 format!("{} -> v{}", removed[0], added[0].version())
             };
-            try!(print_change("Updating", msg));
+            print_change("Updating", msg)?;
         } else {
             for package in removed.iter() {
-                try!(print_change("Removing", format!("{}", package)));
+                print_change("Removing", format!("{}", package))?;
             }
             for package in added.iter() {
-                try!(print_change("Adding", format!("{}", package)));
+                print_change("Adding", format!("{}", package))?;
             }
         }
     }
 
-    try!(ops::write_pkg_lockfile(&ws, &resolve));
+    ops::write_pkg_lockfile(&ws, &resolve)?;
     return Ok(());
 
     fn fill_with_deps<'a>(resolve: &'a Resolve, dep: &'a PackageId,
index 93ceb40409a334b2eebeb443a19d73fe13ff9ca7..b9a5aa85b06fad6dddeeb70dc1f922932b266e08 100644 (file)
@@ -53,29 +53,29 @@ pub fn install(root: Option<&str>,
                opts: &ops::CompileOptions,
                force: bool) -> CargoResult<()> {
     let config = opts.config;
-    let root = try!(resolve_root(root, config));
-    let map = try!(SourceConfigMap::new(config));
+    let root = resolve_root(root, config)?;
+    let map = SourceConfigMap::new(config)?;
     let (pkg, source) = if source_id.is_git() {
-        try!(select_pkg(GitSource::new(source_id, config), source_id,
-                        krate, vers, &mut |git| git.read_packages()))
+        select_pkg(GitSource::new(source_id, config), source_id,
+                        krate, vers, &mut |git| git.read_packages())?
     } else if source_id.is_path() {
         let path = source_id.url().to_file_path().ok()
                             .expect("path sources must have a valid path");
         let mut src = PathSource::new(&path, source_id, config);
-        try!(src.update().chain_error(|| {
+        src.update().chain_error(|| {
             human(format!("`{}` is not a crate root; specify a crate to \
                            install from crates.io, or use --path or --git to \
                            specify an alternate source", path.display()))
-        }));
-        try!(select_pkg(PathSource::new(&path, source_id, config),
+        })?;
+        select_pkg(PathSource::new(&path, source_id, config),
                         source_id, krate, vers,
-                        &mut |path| path.read_packages()))
+                        &mut |path| path.read_packages())?
     } else {
-        try!(select_pkg(try!(map.load(source_id)),
+        select_pkg(map.load(source_id)?,
                         source_id, krate, vers,
                         &mut |_| Err(human("must specify a crate to install from \
                                             crates.io, or use --path or --git to \
-                                            specify alternate source"))))
+                                            specify alternate source")))?
     };
 
 
@@ -91,22 +91,22 @@ pub fn install(root: Option<&str>,
     };
 
     let ws = match overidden_target_dir {
-        Some(dir) => try!(Workspace::one(pkg, config, Some(dir))),
-        None => try!(Workspace::new(pkg.manifest_path(), config)),
+        Some(dir) => Workspace::one(pkg, config, Some(dir))?,
+        None => Workspace::new(pkg.manifest_path(), config)?,
     };
-    let pkg = try!(ws.current());
+    let pkg = ws.current()?;
 
     // Preflight checks to check up front whether we'll overwrite something.
     // We have to check this again afterwards, but may as well avoid building
     // anything if we're gonna throw it away anyway.
     {
-        let metadata = try!(metadata(config, &root));
-        let list = try!(read_crate_list(metadata.file()));
+        let metadata = metadata(config, &root)?;
+        let list = read_crate_list(metadata.file())?;
         let dst = metadata.parent().join("bin");
-        try!(check_overwrites(&dst, pkg, &opts.filter, &list, force));
+        check_overwrites(&dst, pkg, &opts.filter, &list, force)?;
     }
 
-    let compile = try!(ops::compile_ws(&ws, Some(source), opts).chain_error(|| {
+    let compile = ops::compile_ws(&ws, Some(source), opts).chain_error(|| {
         if let Some(td) = td_opt.take() {
             // preserve the temporary directory, so the user can inspect it
             td.into_path();
@@ -114,28 +114,28 @@ pub fn install(root: Option<&str>,
 
         human(format!("failed to compile `{}`, intermediate artifacts can be \
                        found at `{}`", pkg, ws.target_dir().display()))
-    }));
-    let binaries: Vec<(&str, &Path)> = try!(compile.binaries.iter().map(|bin| {
+    })?;
+    let binaries: Vec<(&str, &Path)> = compile.binaries.iter().map(|bin| {
         let name = bin.file_name().unwrap();
         if let Some(s) = name.to_str() {
             Ok((s, bin.as_ref()))
         } else {
             bail!("Binary `{:?}` name can't be serialized into string", name)
         }
-    }).collect::<CargoResult<_>>());
+    }).collect::<CargoResult<_>>()?;
 
-    let metadata = try!(metadata(config, &root));
-    let mut list = try!(read_crate_list(metadata.file()));
+    let metadata = metadata(config, &root)?;
+    let mut list = read_crate_list(metadata.file())?;
     let dst = metadata.parent().join("bin");
-    let duplicates = try!(check_overwrites(&dst, pkg, &opts.filter,
-                                           &list, force));
+    let duplicates = check_overwrites(&dst, pkg, &opts.filter,
+                                           &list, force)?;
 
-    try!(fs::create_dir_all(&dst));
+    fs::create_dir_all(&dst)?;
 
     // Copy all binaries to a temporary directory under `dst` first, catching
     // some failure modes (e.g. out of space) before touching the existing
     // binaries. This directory will get cleaned up via RAII.
-    let staging_dir = try!(TempDir::new_in(&dst, "cargo-install"));
+    let staging_dir = TempDir::new_in(&dst, "cargo-install")?;
     for &(bin, src) in binaries.iter() {
         let dst = staging_dir.path().join(bin);
         // Try to move if `target_dir` is transient.
@@ -144,10 +144,10 @@ pub fn install(root: Option<&str>,
                 continue
             }
         }
-        try!(fs::copy(src, &dst).chain_error(|| {
+        fs::copy(src, &dst).chain_error(|| {
             human(format!("failed to copy `{}` to `{}`", src.display(),
                           dst.display()))
-        }));
+        })?;
     }
 
     let (to_replace, to_install): (Vec<&str>, Vec<&str>) =
@@ -160,11 +160,11 @@ pub fn install(root: Option<&str>,
     for bin in to_install.iter() {
         let src = staging_dir.path().join(bin);
         let dst = dst.join(bin);
-        try!(config.shell().status("Installing", dst.display()));
-        try!(fs::rename(&src, &dst).chain_error(|| {
+        config.shell().status("Installing", dst.display())?;
+        fs::rename(&src, &dst).chain_error(|| {
             human(format!("failed to move `{}` to `{}`", src.display(),
                           dst.display()))
-        }));
+        })?;
         installed.bins.push(dst);
     }
 
@@ -176,11 +176,11 @@ pub fn install(root: Option<&str>,
             for &bin in to_replace.iter() {
                 let src = staging_dir.path().join(bin);
                 let dst = dst.join(bin);
-                try!(config.shell().status("Replacing", dst.display()));
-                try!(fs::rename(&src, &dst).chain_error(|| {
+                config.shell().status("Replacing", dst.display())?;
+                fs::rename(&src, &dst).chain_error(|| {
                     human(format!("failed to move `{}` to `{}`", src.display(),
                                   dst.display()))
-                }));
+                })?;
                 replaced_names.push(bin);
             }
             Ok(())
@@ -219,8 +219,8 @@ pub fn install(root: Option<&str>,
     match write_result {
         // Replacement error (if any) isn't actually caused by write error
         // but this seems to be the only way to show both.
-        Err(err) => try!(result.chain_error(|| err)),
-        Ok(_) => try!(result),
+        Err(err) => result.chain_error(|| err)?,
+        Ok(_) => result?,
     }
 
     // Reaching here means all actions have succeeded. Clean up.
@@ -229,7 +229,7 @@ pub fn install(root: Option<&str>,
         // Don't bother grabbing a lock as we're going to blow it all away
         // anyway.
         let target_dir = ws.target_dir().into_path_unlocked();
-        try!(fs::remove_dir_all(&target_dir));
+        fs::remove_dir_all(&target_dir)?;
     }
 
     // Print a warning that if this directory isn't in PATH that they won't be
@@ -241,9 +241,9 @@ pub fn install(root: Option<&str>,
         }
     }
 
-    try!(config.shell().warn(&format!("be sure to add `{}` to your PATH to be \
+    config.shell().warn(&format!("be sure to add `{}` to your PATH to be \
                                        able to run the installed binaries",
-                                      dst.display())));
+                                      dst.display()))?;
     Ok(())
 }
 
@@ -255,14 +255,14 @@ fn select_pkg<'a, T>(mut source: T,
                      -> CargoResult<(Package, Box<Source + 'a>)>
     where T: Source + 'a
 {
-    try!(source.update());
+    source.update()?;
     match name {
         Some(name) => {
-            let dep = try!(Dependency::parse_no_deprecated(name, vers, source_id));
-            let deps = try!(source.query(&dep));
+            let dep = Dependency::parse_no_deprecated(name, vers, source_id)?;
+            let deps = source.query(&dep)?;
             match deps.iter().map(|p| p.package_id()).max() {
                 Some(pkgid) => {
-                    let pkg = try!(source.download(pkgid));
+                    let pkg = source.download(pkgid)?;
                     Ok((pkg, Box::new(source)))
                 }
                 None => {
@@ -274,17 +274,17 @@ fn select_pkg<'a, T>(mut source: T,
             }
         }
         None => {
-            let candidates = try!(list_all(&mut source));
+            let candidates = list_all(&mut source)?;
             let binaries = candidates.iter().filter(|cand| {
                 cand.targets().iter().filter(|t| t.is_bin()).count() > 0
             });
             let examples = candidates.iter().filter(|cand| {
                 cand.targets().iter().filter(|t| t.is_example()).count() > 0
             });
-            let pkg = match try!(one(binaries, |v| multi_err("binaries", v))) {
+            let pkg = match one(binaries, |v| multi_err("binaries", v))? {
                 Some(p) => p,
                 None => {
-                    match try!(one(examples, |v| multi_err("examples", v))) {
+                    match one(examples, |v| multi_err("examples", v))? {
                         Some(p) => p,
                         None => bail!("no packages found with binaries or \
                                        examples"),
@@ -381,10 +381,10 @@ fn find_duplicates(dst: &Path,
 fn read_crate_list(mut file: &File) -> CargoResult<CrateListingV1> {
     (|| -> CargoResult<_> {
         let mut contents = String::new();
-        try!(file.read_to_string(&mut contents));
-        let listing = try!(toml::decode_str(&contents).chain_error(|| {
+        file.read_to_string(&mut contents)?;
+        let listing = toml::decode_str(&contents).chain_error(|| {
             internal("invalid TOML found for metadata")
-        }));
+        })?;
         match listing {
             CrateListing::V1(v1) => Ok(v1),
             CrateListing::Empty => {
@@ -398,10 +398,10 @@ fn read_crate_list(mut file: &File) -> CargoResult<CrateListingV1> {
 
 fn write_crate_list(mut file: &File, listing: CrateListingV1) -> CargoResult<()> {
     (|| -> CargoResult<_> {
-        try!(file.seek(SeekFrom::Start(0)));
-        try!(file.set_len(0));
+        file.seek(SeekFrom::Start(0))?;
+        file.set_len(0)?;
         let data = toml::encode_str::<CrateListing>(&CrateListing::V1(listing));
-        try!(file.write_all(data.as_bytes()));
+        file.write_all(data.as_bytes())?;
         Ok(())
     }).chain_error(|| {
         human("failed to write crate metadata")
@@ -409,15 +409,15 @@ fn write_crate_list(mut file: &File, listing: CrateListingV1) -> CargoResult<()>
 }
 
 pub fn install_list(dst: Option<&str>, config: &Config) -> CargoResult<()> {
-    let dst = try!(resolve_root(dst, config));
-    let dst = try!(metadata(config, &dst));
-    let list = try!(read_crate_list(dst.file()));
+    let dst = resolve_root(dst, config)?;
+    let dst = metadata(config, &dst)?;
+    let list = read_crate_list(dst.file())?;
     let mut shell = config.shell();
     let out = shell.out();
     for (k, v) in list.v1.iter() {
-        try!(writeln!(out, "{}:", k));
+        writeln!(out, "{}:", k)?;
         for bin in v {
-            try!(writeln!(out, "    {}", bin));
+            writeln!(out, "    {}", bin)?;
         }
     }
     Ok(())
@@ -427,12 +427,12 @@ pub fn uninstall(root: Option<&str>,
                  spec: &str,
                  bins: &[String],
                  config: &Config) -> CargoResult<()> {
-    let root = try!(resolve_root(root, config));
-    let crate_metadata = try!(metadata(config, &root));
-    let mut metadata = try!(read_crate_list(crate_metadata.file()));
+    let root = resolve_root(root, config)?;
+    let crate_metadata = metadata(config, &root)?;
+    let mut metadata = read_crate_list(crate_metadata.file())?;
     let mut to_remove = Vec::new();
     {
-        let result = try!(PackageIdSpec::query_str(spec, metadata.v1.keys()))
+        let result = PackageIdSpec::query_str(spec, metadata.v1.keys())?
                                         .clone();
         let mut installed = match metadata.v1.entry(result.clone()) {
             Entry::Occupied(e) => e,
@@ -474,10 +474,10 @@ pub fn uninstall(root: Option<&str>,
             installed.remove();
         }
     }
-    try!(write_crate_list(crate_metadata.file(), metadata));
+    write_crate_list(crate_metadata.file(), metadata)?;
     for bin in to_remove {
-        try!(config.shell().status("Removing", bin.display()));
-        try!(fs::remove_file(bin));
+        config.shell().status("Removing", bin.display())?;
+        fs::remove_file(bin)?;
     }
 
     Ok(())
@@ -489,7 +489,7 @@ fn metadata(config: &Config, root: &Filesystem) -> CargoResult<FileLock> {
 
 fn resolve_root(flag: Option<&str>,
                 config: &Config) -> CargoResult<Filesystem> {
-    let config_root = try!(config.get_path("install.root"));
+    let config_root = config.get_path("install.root")?;
     Ok(flag.map(PathBuf::from).or_else(|| {
         env::var_os("CARGO_INSTALL_ROOT").map(PathBuf::from)
     }).or_else(move || {
index fe00a74d9891c0218178be3421c68073257d6d05..6b41f3ed5d3a1a793e7640f2e97c0188dd6c39e4 100644 (file)
@@ -42,7 +42,7 @@ struct MkOptions<'a> {
 
 impl Decodable for VersionControl {
     fn decode<D: Decoder>(d: &mut D) -> Result<VersionControl, D::Error> {
-        Ok(match &try!(d.read_str())[..] {
+        Ok(match &d.read_str()?[..] {
             "git" => VersionControl::Git,
             "hg" => VersionControl::Hg,
             "none" => VersionControl::NoVcs,
@@ -95,10 +95,10 @@ fn get_name<'a>(path: &'a Path, opts: &'a NewOptions, config: &Config) -> CargoR
                               path.as_os_str());
     }
 
-    let dir_name = try!(path.file_name().and_then(|s| s.to_str()).chain_error(|| {
+    let dir_name = path.file_name().and_then(|s| s.to_str()).chain_error(|| {
         human(&format!("cannot create a project with a non-unicode name: {:?}",
                        path.file_name().unwrap()))
-    }));
+    })?;
 
     if opts.bin {
         Ok(dir_name)
@@ -108,7 +108,7 @@ fn get_name<'a>(path: &'a Path, opts: &'a NewOptions, config: &Config) -> CargoR
             let message = format!(
                 "note: package will be named `{}`; use --name to override",
                 new_name);
-            try!(config.shell().say(&message, BLACK));
+            config.shell().say(&message, BLACK)?;
         }
         Ok(new_name)
     }
@@ -196,7 +196,7 @@ fn detect_source_paths_and_types(project_path : &Path,
                 }
             }
             H::Detect => {
-                let content = try!(paths::read(&path.join(pp.clone())));
+                let content = paths::read(&path.join(pp.clone()))?;
                 let isbin = content.contains("fn main");
                 SourceFileInformation {
                     relative_path: pp,
@@ -265,8 +265,8 @@ pub fn new(opts: NewOptions, config: &Config) -> CargoResult<()> {
         bail!("can't specify both lib and binary outputs");
     }
 
-    let name = try!(get_name(&path, &opts, config));
-    try!(check_name(name));
+    let name = get_name(&path, &opts, config)?;
+    check_name(name)?;
 
     let mkopts = MkOptions {
         version_control: opts.version_control,
@@ -294,12 +294,12 @@ pub fn init(opts: NewOptions, config: &Config) -> CargoResult<()> {
         bail!("can't specify both lib and binary outputs");
     }
 
-    let name = try!(get_name(&path, &opts, config));
-    try!(check_name(name));
+    let name = get_name(&path, &opts, config)?;
+    check_name(name)?;
 
     let mut src_paths_types = vec![];
 
-    try!(detect_source_paths_and_types(&path, name, &mut src_paths_types));
+    detect_source_paths_and_types(&path, name, &mut src_paths_types)?;
 
     if src_paths_types.len() == 0 {
         src_paths_types.push(plan_new_source_file(opts.bin, name.to_string()));
@@ -369,7 +369,7 @@ fn existing_vcs_repo(path: &Path, cwd: &Path) -> bool {
 fn mk(config: &Config, opts: &MkOptions) -> CargoResult<()> {
     let path = opts.path;
     let name = opts.name;
-    let cfg = try!(global_config(config));
+    let cfg = global_config(config)?;
     let mut ignore = "target\n".to_string();
     let in_existing_vcs_repo = existing_vcs_repo(path.parent().unwrap(), config.cwd());
     if !opts.bin {
@@ -386,22 +386,22 @@ fn mk(config: &Config, opts: &MkOptions) -> CargoResult<()> {
     match vcs {
         VersionControl::Git => {
             if !fs::metadata(&path.join(".git")).is_ok() {
-                try!(GitRepo::init(path, config.cwd()));
+                GitRepo::init(path, config.cwd())?;
             }
-            try!(paths::append(&path.join(".gitignore"), ignore.as_bytes()));
+            paths::append(&path.join(".gitignore"), ignore.as_bytes())?;
         },
         VersionControl::Hg => {
             if !fs::metadata(&path.join(".hg")).is_ok() {
-                try!(HgRepo::init(path, config.cwd()));
+                HgRepo::init(path, config.cwd())?;
             }
-            try!(paths::append(&path.join(".hgignore"), ignore.as_bytes()));
+            paths::append(&path.join(".hgignore"), ignore.as_bytes())?;
         },
         VersionControl::NoVcs => {
-            try!(fs::create_dir_all(path));
+            fs::create_dir_all(path)?;
         },
     };
 
-    let (author_name, email) = try!(discover_author());
+    let (author_name, email) = discover_author()?;
     // Hoo boy, sure glad we've got exhaustivenes checking behind us.
     let author = match (cfg.name, cfg.email, author_name, email) {
         (Some(name), Some(email), _, _) |
@@ -438,14 +438,14 @@ path = {}
 
     // Create Cargo.toml file with necessary [lib] and [[bin]] sections, if needed
 
-    try!(paths::write(&path.join("Cargo.toml"), format!(
+    paths::write(&path.join("Cargo.toml"), format!(
 r#"[package]
 name = "{}"
 version = "0.1.0"
 authors = [{}]
 
 [dependencies]
-{}"#, name, toml::Value::String(author), cargotoml_path_specifier).as_bytes()));
+{}"#, name, toml::Value::String(author), cargotoml_path_specifier).as_bytes())?;
 
 
     // Create all specified source files
@@ -456,7 +456,7 @@ authors = [{}]
         let path_of_source_file = path.join(i.relative_path.clone());
 
         if let Some(src_dir) = path_of_source_file.parent() {
-            try!(fs::create_dir_all(src_dir));
+            fs::create_dir_all(src_dir)?;
         }
 
         let default_file_content : &[u8] = if i.bin {
@@ -477,14 +477,14 @@ mod tests {
         };
 
         if !fs::metadata(&path_of_source_file).map(|x| x.is_file()).unwrap_or(false) {
-            try!(paths::write(&path_of_source_file, default_file_content));
+            paths::write(&path_of_source_file, default_file_content)?;
         }
     }
 
     if let Err(e) = Workspace::new(&path.join("Cargo.toml"), config) {
         let msg = format!("compiling this new crate may not work due to invalid \
                            workspace configuration\n\n{}", e);
-        try!(config.shell().warn(msg));
+        config.shell().warn(msg)?;
     }
 
     Ok(())
@@ -526,9 +526,9 @@ fn discover_author() -> CargoResult<(String, Option<String>)> {
 }
 
 fn global_config(config: &Config) -> CargoResult<CargoNewConfig> {
-    let name = try!(config.get_string("cargo-new.name")).map(|s| s.val);
-    let email = try!(config.get_string("cargo-new.email")).map(|s| s.val);
-    let vcs = try!(config.get_string("cargo-new.vcs"));
+    let name = config.get_string("cargo-new.name")?.map(|s| s.val);
+    let email = config.get_string("cargo-new.email")?.map(|s| s.val);
+    let vcs = config.get_string("cargo-new.vcs")?;
 
     let vcs = match vcs.as_ref().map(|p| (&p.val[..], &p.definition)) {
         Some(("git", _)) => Some(VersionControl::Git),
index affaaa6a18eca51e202efe9eda5e8bfa004af12b..b8dfca5ef363cdf08974ca2210ce1fdf82de4a64 100644 (file)
@@ -46,12 +46,12 @@ fn metadata_full(ws: &Workspace,
     let specs = ws.members().map(|pkg| {
         PackageIdSpec::from_package_id(pkg.package_id())
     }).collect::<Vec<_>>();
-    let deps = try!(ops::resolve_dependencies(ws,
+    let deps = ops::resolve_dependencies(ws,
                                               None,
                                               &opt.features,
                                               opt.all_features,
                                               opt.no_default_features,
-                                              &specs));
+                                              &specs)?;
     let (packages, resolve) = deps;
 
     let packages = try!(packages.package_ids()
index 55f8dd64fc975e0c173413d6b80119ad120e9409..26841796ec821a9a21f1c566cf4f2ddde5af2bae 100644 (file)
@@ -24,22 +24,22 @@ pub struct PackageOpts<'cfg> {
 
 pub fn package(ws: &Workspace,
                opts: &PackageOpts) -> CargoResult<Option<FileLock>> {
-    let pkg = try!(ws.current());
+    let pkg = ws.current()?;
     let config = ws.config();
     let mut src = PathSource::new(pkg.root(),
                                   pkg.package_id().source_id(),
                                   config);
-    try!(src.update());
+    src.update()?;
 
     if opts.check_metadata {
-        try!(check_metadata(pkg, config));
+        check_metadata(pkg, config)?;
     }
 
-    try!(verify_dependencies(&pkg));
+    verify_dependencies(&pkg)?;
 
     if opts.list {
         let root = pkg.root();
-        let mut list: Vec<_> = try!(src.list_files(&pkg)).iter().map(|file| {
+        let mut list: Vec<_> = src.list_files(&pkg)?.iter().map(|file| {
             util::without_prefix(&file, &root).unwrap().to_path_buf()
         }).collect();
         list.sort();
@@ -50,38 +50,38 @@ pub fn package(ws: &Workspace,
     }
 
     if !opts.allow_dirty {
-        try!(check_not_dirty(&pkg, &src));
+        check_not_dirty(&pkg, &src)?;
     }
 
     let filename = format!("{}-{}.crate", pkg.name(), pkg.version());
     let dir = ws.target_dir().join("package");
     let mut dst = {
         let tmp = format!(".{}", filename);
-        try!(dir.open_rw(&tmp, config, "package scratch space"))
+        dir.open_rw(&tmp, config, "package scratch space")?
     };
 
     // Package up and test a temporary tarball and only move it to the final
     // location if it actually passes all our tests. Any previously existing
     // tarball can be assumed as corrupt or invalid, so we just blow it away if
     // it exists.
-    try!(config.shell().status("Packaging", pkg.package_id().to_string()));
-    try!(dst.file().set_len(0));
-    try!(tar(ws, &src, dst.file(), &filename).chain_error(|| {
+    config.shell().status("Packaging", pkg.package_id().to_string())?;
+    dst.file().set_len(0)?;
+    tar(ws, &src, dst.file(), &filename).chain_error(|| {
         human("failed to prepare local package for uploading")
-    }));
+    })?;
     if opts.verify {
-        try!(dst.seek(SeekFrom::Start(0)));
-        try!(run_verify(ws, dst.file(), opts).chain_error(|| {
+        dst.seek(SeekFrom::Start(0))?;
+        run_verify(ws, dst.file(), opts).chain_error(|| {
             human("failed to verify package tarball")
-        }))
+        })?
     }
-    try!(dst.seek(SeekFrom::Start(0)));
+    dst.seek(SeekFrom::Start(0))?;
     {
         let src_path = dst.path();
         let dst_path = dst.parent().join(&filename);
-        try!(fs::rename(&src_path, &dst_path).chain_error(|| {
+        fs::rename(&src_path, &dst_path).chain_error(|| {
             human("failed to move temporary tarball into final location")
-        }));
+        })?;
     }
     Ok(Some(dst))
 }
@@ -113,10 +113,10 @@ fn check_metadata(pkg: &Package, config: &Config) -> CargoResult<()> {
         }
         things.push_str(&missing.last().unwrap());
 
-        try!(config.shell().warn(
+        config.shell().warn(
             &format!("manifest has no {things}.\n\
                     See http://doc.crates.io/manifest.html#package-metadata for more info.",
-                    things = things)))
+                    things = things))?
     }
     Ok(())
 }
@@ -159,7 +159,7 @@ fn check_not_dirty(p: &Package, src: &PathSource) -> CargoResult<()> {
            src: &PathSource,
            repo: &git2::Repository) -> CargoResult<()> {
         let workdir = repo.workdir().unwrap();
-        let dirty = try!(src.list_files(p)).iter().filter(|file| {
+        let dirty = src.list_files(p)?.iter().filter(|file| {
             let relative = file.strip_prefix(workdir).unwrap();
             if let Ok(status) = repo.status_file(relative) {
                 status != git2::STATUS_CURRENT
@@ -185,27 +185,27 @@ fn tar(ws: &Workspace,
        filename: &str) -> CargoResult<()> {
     // Prepare the encoder and its header
     let filename = Path::new(filename);
-    let encoder = GzBuilder::new().filename(try!(util::path2bytes(filename)))
+    let encoder = GzBuilder::new().filename(util::path2bytes(filename)?)
                                   .write(dst, Compression::Best);
 
     // Put all package files into a compressed archive
     let mut ar = Builder::new(encoder);
-    let pkg = try!(ws.current());
+    let pkg = ws.current()?;
     let config = ws.config();
     let root = pkg.root();
-    for file in try!(src.list_files(pkg)).iter() {
+    for file in src.list_files(pkg)?.iter() {
         let relative = util::without_prefix(&file, &root).unwrap();
-        try!(check_filename(relative));
-        let relative = try!(relative.to_str().chain_error(|| {
+        check_filename(relative)?;
+        let relative = relative.to_str().chain_error(|| {
             human(format!("non-utf8 path in source directory: {}",
                           relative.display()))
-        }));
-        let mut file = try!(File::open(file).chain_error(|| {
+        })?;
+        let mut file = File::open(file).chain_error(|| {
             human(format!("failed to open for archiving: `{}`", file.display()))
-        }));
-        try!(config.shell().verbose(|shell| {
+        })?;
+        config.shell().verbose(|shell| {
             shell.status("Archiving", &relative)
-        }));
+        })?;
         let path = format!("{}-{}{}{}", pkg.name(), pkg.version(),
                            path::MAIN_SEPARATOR, relative);
 
@@ -228,38 +228,38 @@ fn tar(ws: &Workspace,
         // unpack the selectors 0.4.0 crate on crates.io. Either that or take a
         // look at rust-lang/cargo#2326
         let mut header = Header::new_ustar();
-        let metadata = try!(file.metadata().chain_error(|| {
+        let metadata = file.metadata().chain_error(|| {
             human(format!("could not learn metadata for: `{}`", relative))
-        }));
-        try!(header.set_path(&path).chain_error(|| {
+        })?;
+        header.set_path(&path).chain_error(|| {
             human(format!("failed to add to archive: `{}`", relative))
-        }));
+        })?;
         header.set_metadata(&metadata);
         header.set_cksum();
 
-        try!(ar.append(&header, &mut file).chain_error(|| {
+        ar.append(&header, &mut file).chain_error(|| {
             internal(format!("could not archive source file `{}`", relative))
-        }));
+        })?;
     }
-    let encoder = try!(ar.into_inner());
-    try!(encoder.finish());
+    let encoder = ar.into_inner()?;
+    encoder.finish()?;
     Ok(())
 }
 
 fn run_verify(ws: &Workspace, tar: &File, opts: &PackageOpts) -> CargoResult<()> {
     let config = ws.config();
-    let pkg = try!(ws.current());
+    let pkg = ws.current()?;
 
-    try!(config.shell().status("Verifying", pkg));
+    config.shell().status("Verifying", pkg)?;
 
-    let f = try!(GzDecoder::new(tar));
+    let f = GzDecoder::new(tar)?;
     let dst = pkg.root().join(&format!("target/package/{}-{}",
                                        pkg.name(), pkg.version()));
     if fs::metadata(&dst).is_ok() {
-        try!(fs::remove_dir_all(&dst));
+        fs::remove_dir_all(&dst)?;
     }
     let mut archive = Archive::new(f);
-    try!(archive.unpack(dst.parent().unwrap()));
+    archive.unpack(dst.parent().unwrap())?;
     let manifest_path = dst.join("Cargo.toml");
 
     // When packages are uploaded to a registry, all path dependencies are
@@ -270,10 +270,10 @@ fn run_verify(ws: &Workspace, tar: &File, opts: &PackageOpts) -> CargoResult<()>
     // location that the package was originally read from. In locking the
     // `SourceId` we're telling it that the corresponding `PathSource` will be
     // considered updated and we won't actually read any packages.
-    let cratesio = try!(SourceId::crates_io(config));
+    let cratesio = SourceId::crates_io(config)?;
     let precise = Some("locked".to_string());
-    let new_src = try!(SourceId::for_path(&dst)).with_precise(precise);
-    let new_pkgid = try!(PackageId::new(pkg.name(), pkg.version(), &new_src));
+    let new_src = SourceId::for_path(&dst)?.with_precise(precise);
+    let new_pkgid = PackageId::new(pkg.name(), pkg.version(), &new_src)?;
     let new_summary = pkg.summary().clone().map_dependencies(|d| {
         if !d.source_id().is_path() { return d }
         d.clone_inner().set_source_id(cratesio.clone()).into_dependency()
@@ -283,8 +283,8 @@ fn run_verify(ws: &Workspace, tar: &File, opts: &PackageOpts) -> CargoResult<()>
     let new_pkg = Package::new(new_manifest, &manifest_path);
 
     // Now that we've rewritten all our path dependencies, compile it!
-    let ws = try!(Workspace::one(new_pkg, config, None));
-    try!(ops::compile_ws(&ws, None, &ops::CompileOptions {
+    let ws = Workspace::one(new_pkg, config, None)?;
+    ops::compile_ws(&ws, None, &ops::CompileOptions {
         config: config,
         jobs: opts.jobs,
         target: None,
@@ -298,7 +298,7 @@ fn run_verify(ws: &Workspace, tar: &File, opts: &PackageOpts) -> CargoResult<()>
         mode: ops::CompileMode::Build,
         target_rustdoc_args: None,
         target_rustc_args: None,
-    }));
+    })?;
 
     Ok(())
 }
index 94737d507d41739521fd35e153fb69608905a396..0461bc4c87fb056222327600ec720417504d39d6 100644 (file)
@@ -3,14 +3,14 @@ use core::{PackageIdSpec, Workspace};
 use util::CargoResult;
 
 pub fn pkgid(ws: &Workspace, spec: Option<&str>) -> CargoResult<PackageIdSpec> {
-    let resolve = match try!(ops::load_pkg_lockfile(ws)) {
+    let resolve = match ops::load_pkg_lockfile(ws)? {
         Some(resolve) => resolve,
         None => bail!("a Cargo.lock must exist for this command"),
     };
 
     let pkgid = match spec {
-        Some(spec) => try!(PackageIdSpec::query_str(spec, resolve.iter())),
-        None => try!(ws.current()).package_id(),
+        Some(spec) => PackageIdSpec::query_str(spec, resolve.iter())?,
+        None => ws.current()?.package_id(),
     };
     Ok(PackageIdSpec::from_package_id(pkgid))
 }
index 39028b798a2f583112e2f41a07e27a3ac54d6cbc..5c25edbf1c9ec20078e4f28b90533510c0f27d22 100644 (file)
@@ -11,7 +11,7 @@ use util::toml::Layout;
 pub fn read_manifest(path: &Path, source_id: &SourceId, config: &Config)
                      -> CargoResult<(EitherManifest, Vec<PathBuf>)> {
     trace!("read_package; path={}; source-id={}", path.display(), source_id);
-    let contents = try!(paths::read(path));
+    let contents = paths::read(path)?;
 
     let layout = Layout::from_project_path(path.parent().unwrap());
     let root = layout.root.clone();
@@ -24,7 +24,7 @@ pub fn read_manifest(path: &Path, source_id: &SourceId, config: &Config)
 pub fn read_package(path: &Path, source_id: &SourceId, config: &Config)
                     -> CargoResult<(Package, Vec<PathBuf>)> {
     trace!("read_package; path={}; source-id={}", path.display(), source_id);
-    let (manifest, nested) = try!(read_manifest(path, source_id, config));
+    let (manifest, nested) = read_manifest(path, source_id, config)?;
     let manifest = match manifest {
         EitherManifest::Real(manifest) => manifest,
         EitherManifest::Virtual(..) => {
@@ -43,7 +43,7 @@ pub fn read_packages(path: &Path, source_id: &SourceId, config: &Config)
 
     trace!("looking for root package: {}, source_id={}", path.display(), source_id);
 
-    try!(walk(path, &mut |dir| {
+    walk(path, &mut |dir| {
         trace!("looking for child package: {}", dir.display());
 
         // Don't recurse into hidden/dot directories unless we're at the toplevel
@@ -66,11 +66,11 @@ pub fn read_packages(path: &Path, source_id: &SourceId, config: &Config)
         }
 
         if has_manifest(dir) {
-            try!(read_nested_packages(dir, &mut all_packages, source_id, config,
-                                      &mut visited));
+            read_nested_packages(dir, &mut all_packages, source_id, config,
+                                      &mut visited)?;
         }
         Ok(true)
-    }));
+    })?;
 
     if all_packages.is_empty() {
         Err(human(format!("Could not find Cargo.toml in `{}`", path.display())))
@@ -81,7 +81,7 @@ pub fn read_packages(path: &Path, source_id: &SourceId, config: &Config)
 
 fn walk(path: &Path, callback: &mut FnMut(&Path) -> CargoResult<bool>)
         -> CargoResult<()> {
-    if !try!(callback(path)) {
+    if !callback(path)? {
         trace!("not processing {}", path.display());
         return Ok(())
     }
@@ -100,9 +100,9 @@ fn walk(path: &Path, callback: &mut FnMut(&Path) -> CargoResult<bool>)
         }
     };
     for dir in dirs {
-        let dir = try!(dir);
-        if try!(dir.file_type()).is_dir() {
-            try!(walk(&dir.path(), callback));
+        let dir = dir?;
+        if dir.file_type()?.is_dir() {
+            walk(&dir.path(), callback)?;
         }
     }
     Ok(())
@@ -119,9 +119,9 @@ fn read_nested_packages(path: &Path,
                         visited: &mut HashSet<PathBuf>) -> CargoResult<()> {
     if !visited.insert(path.to_path_buf()) { return Ok(()) }
 
-    let manifest_path = try!(find_project_manifest_exact(path, "Cargo.toml"));
+    let manifest_path = find_project_manifest_exact(path, "Cargo.toml")?;
 
-    let (manifest, nested) = try!(read_manifest(&manifest_path, source_id, config));
+    let (manifest, nested) = read_manifest(&manifest_path, source_id, config)?;
     let manifest = match manifest {
         EitherManifest::Real(manifest) => manifest,
         EitherManifest::Virtual(..) => return Ok(()),
@@ -147,8 +147,8 @@ fn read_nested_packages(path: &Path,
     if !source_id.is_registry() {
         for p in nested.iter() {
             let path = util::normalize_path(&path.join(p));
-            try!(read_nested_packages(&path, all_packages, source_id,
-                                      config, visited));
+            read_nested_packages(&path, all_packages, source_id,
+                                      config, visited)?;
         }
     }
 
index 897a3da5116820ed57b8bc2de5eb8470f587130d..1c4c4a485c113ac88dd3c8980fe012455847a49d 100644 (file)
@@ -8,7 +8,7 @@ pub fn run(ws: &Workspace,
            options: &ops::CompileOptions,
            args: &[String]) -> CargoResult<Option<ProcessError>> {
     let config = ws.config();
-    let root = try!(ws.current());
+    let root = ws.current()?;
 
     let mut bins = root.manifest().targets().iter().filter(|a| {
         !a.is_lib() && !a.is_custom_build() && match options.filter {
@@ -40,7 +40,7 @@ pub fn run(ws: &Workspace,
         }
     }
 
-    let compile = try!(ops::compile(ws, options));
+    let compile = ops::compile(ws, options)?;
     let exe = &compile.binaries[0];
     let exe = match util::without_prefix(&exe, config.cwd()) {
         Some(path) if path.file_name() == Some(path.as_os_str())
@@ -48,9 +48,9 @@ pub fn run(ws: &Workspace,
         Some(path) => path.to_path_buf(),
         None => exe.to_path_buf(),
     };
-    let mut process = try!(compile.target_process(exe, &root));
+    let mut process = compile.target_process(exe, &root)?;
     process.args(args).cwd(config.cwd());
 
-    try!(config.shell().status("Running", process.to_string()));
+    config.shell().status("Running", process.to_string())?;
     Ok(process.exec_replace().err())
 }
index f15032c2336626758c7a845663103822ece01219..afb8962589b1e1e4adcd2c8eee0f3566e4347212 100644 (file)
@@ -71,12 +71,12 @@ impl<'cfg> Compilation<'cfg> {
 
     /// See `process`.
     pub fn rustc_process(&self, pkg: &Package) -> CargoResult<ProcessBuilder> {
-        self.fill_env(try!(self.config.rustc()).process(), pkg, true)
+        self.fill_env(self.config.rustc()?.process(), pkg, true)
     }
 
     /// See `process`.
     pub fn rustdoc_process(&self, pkg: &Package) -> CargoResult<ProcessBuilder> {
-        self.fill_env(process(&*try!(self.config.rustdoc())), pkg, false)
+        self.fill_env(process(&*self.config.rustdoc()?), pkg, false)
     }
 
     /// See `process`.
@@ -128,7 +128,7 @@ impl<'cfg> Compilation<'cfg> {
         };
 
         search_path.extend(util::dylib_path().into_iter());
-        let search_path = try!(join_paths(&search_path, util::dylib_path_envvar()));
+        let search_path = join_paths(&search_path, util::dylib_path_envvar())?;
 
         cmd.env(util::dylib_path_envvar(), &search_path);
         if let Some(env) = self.extra_env.get(pkg.package_id()) {
index 4e0cf00db6af268a54c9906cdaa94e6d295e6501..fcdb3979583149c7d1a0b8e7f9e71b21cd170a22 100644 (file)
@@ -62,15 +62,15 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
                profiles: &'a Profiles) -> CargoResult<Context<'a, 'cfg>> {
 
         let dest = if build_config.release { "release" } else { "debug" };
-        let host_layout = try!(Layout::new(ws, None, &dest));
+        let host_layout = Layout::new(ws, None, &dest)?;
         let target_layout = match build_config.requested_target.as_ref() {
             Some(target) => {
-                Some(try!(Layout::new(ws, Some(&target), &dest)))
+                Some(Layout::new(ws, Some(&target), &dest)?)
             }
             None => None,
         };
 
-        let current_package = try!(ws.current()).package_id().clone();
+        let current_package = ws.current()?.package_id().clone();
         Ok(Context {
             host: host_layout,
             target: target_layout,
@@ -98,14 +98,14 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
     pub fn prepare(&mut self) -> CargoResult<()> {
         let _p = profile::start("preparing layout");
 
-        try!(self.host.prepare().chain_error(|| {
+        self.host.prepare().chain_error(|| {
             internal(format!("couldn't prepare build directories"))
-        }));
+        })?;
         match self.target {
             Some(ref mut target) => {
-                try!(target.prepare().chain_error(|| {
+                target.prepare().chain_error(|| {
                     internal(format!("couldn't prepare build directories"))
-                }));
+                })?;
             }
             None => {}
         }
@@ -128,13 +128,13 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
         crate_types.insert("bin".to_string());
         crate_types.insert("rlib".to_string());
         for unit in units {
-            try!(self.visit_crate_type(unit, &mut crate_types));
+            self.visit_crate_type(unit, &mut crate_types)?;
         }
-        try!(self.probe_target_info_kind(&crate_types, Kind::Target));
+        self.probe_target_info_kind(&crate_types, Kind::Target)?;
         if self.requested_target().is_none() {
             self.host_info = self.target_info.clone();
         } else {
-            try!(self.probe_target_info_kind(&crate_types, Kind::Host));
+            self.probe_target_info_kind(&crate_types, Kind::Host)?;
         }
         Ok(())
     }
@@ -152,8 +152,8 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
                 }
             }));
         }
-        for dep in try!(self.dep_targets(&unit)) {
-            try!(self.visit_crate_type(&dep, crate_types));
+        for dep in self.dep_targets(&unit)? {
+            self.visit_crate_type(&dep, crate_types)?;
         }
         Ok(())
     }
@@ -162,11 +162,11 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
                               crate_types: &BTreeSet<String>,
                               kind: Kind)
                               -> CargoResult<()> {
-        let rustflags = try!(env_args(self.config,
+        let rustflags = env_args(self.config,
                                       &self.build_config,
                                       kind,
-                                      "RUSTFLAGS"));
-        let mut process = try!(self.config.rustc()).process();
+                                      "RUSTFLAGS")?;
+        let mut process = self.config.rustc()?.process();
         process.arg("-")
                .arg("--crate-name").arg("_")
                .arg("--print=file-names")
@@ -184,13 +184,13 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
         with_cfg.arg("--print=cfg");
 
         let mut has_cfg = true;
-        let output = try!(with_cfg.exec_with_output().or_else(|_| {
+        let output = with_cfg.exec_with_output().or_else(|_| {
             has_cfg = false;
             process.exec_with_output()
         }).chain_error(|| {
             human(format!("failed to run `rustc` to learn about \
                            target-specific information"))
-        }));
+        })?;
 
         let error = str::from_utf8(&output.stderr).unwrap();
         let output = str::from_utf8(&output.stdout).unwrap();
@@ -245,9 +245,9 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
                                     -> CargoResult<()> {
         let mut visited = HashSet::new();
         for unit in units {
-            try!(self.walk_used_in_plugin_map(unit,
+            self.walk_used_in_plugin_map(unit,
                                               unit.target.for_host(),
-                                              &mut visited));
+                                              &mut visited)?;
         }
         Ok(())
     }
@@ -263,10 +263,10 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
         if is_plugin {
             self.used_in_plugin.insert(*unit);
         }
-        for unit in try!(self.dep_targets(unit)) {
-            try!(self.walk_used_in_plugin_map(&unit,
+        for unit in self.dep_targets(unit)? {
+            self.walk_used_in_plugin_map(&unit,
                                               is_plugin || unit.target.for_host(),
-                                              visited));
+                                              visited)?;
         }
         Ok(())
     }
@@ -407,14 +407,14 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
                 TargetKind::CustomBuild |
                 TargetKind::Bench |
                 TargetKind::Test => {
-                    try!(add("bin", false));
+                    add("bin", false)?;
                 }
                 TargetKind::Lib(..) if unit.profile.test => {
-                    try!(add("bin", false));
+                    add("bin", false)?;
                 }
                 TargetKind::Lib(ref libs) => {
                     for lib in libs {
-                        try!(add(lib.crate_type(), lib.linkable()));
+                        add(lib.crate_type(), lib.linkable())?;
                     }
                 }
             }
@@ -443,7 +443,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
 
         let id = unit.pkg.package_id();
         let deps = self.resolve.deps(id);
-        let mut ret = try!(deps.filter(|dep| {
+        let mut ret = deps.filter(|dep| {
             unit.pkg.dependencies().iter().filter(|d| {
                 d.name() == dep.name() && d.version_req().matches(dep.version())
             }).any(|d| {
@@ -494,7 +494,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
                 }
                 Err(e) => Some(Err(e))
             }
-        }).collect::<CargoResult<Vec<_>>>());
+        }).collect::<CargoResult<Vec<_>>>()?;
 
         // If this target is a build script, then what we've collected so far is
         // all we need. If this isn't a build script, then it depends on the
@@ -555,7 +555,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
             profile: &self.profiles.dev,
             ..*unit
         };
-        let deps = try!(self.dep_targets(&tmp));
+        let deps = self.dep_targets(&tmp)?;
         Ok(deps.iter().filter_map(|unit| {
             if !unit.target.linkable() || unit.pkg.manifest().links().is_none() {
                 return None
@@ -589,7 +589,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
         // the documentation of the library being built.
         let mut ret = Vec::new();
         for dep in deps {
-            let dep = try!(dep);
+            let dep = dep?;
             let lib = match dep.targets().iter().find(|t| t.is_lib()) {
                 Some(lib) => lib,
                 None => continue,
@@ -779,14 +779,14 @@ fn env_args(config: &Config,
     // Then the target.*.rustflags value
     let target = build_config.requested_target.as_ref().unwrap_or(&build_config.host_triple);
     let key = format!("target.{}.{}", target, name);
-    if let Some(args) = try!(config.get_list(&key)) {
+    if let Some(args) = config.get_list(&key)? {
         let args = args.val.into_iter().map(|a| a.0);
         return Ok(args.collect());
     }
 
     // Then the build.rustflags value
     let key = format!("build.{}", name);
-    if let Some(args) = try!(config.get_list(&key)) {
+    if let Some(args) = config.get_list(&key)? {
         let args = args.val.into_iter().map(|a| a.0);
         return Ok(args.collect());
     }
index 54c688134be57efea6940311ade8b6728f648187..375a6ea434ccc2da65a23263fd327f8773c170b6 100644 (file)
@@ -69,13 +69,13 @@ pub fn prepare<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
     let (work_dirty, work_fresh) = if overridden {
         (Work::new(|_| Ok(())), Work::new(|_| Ok(())))
     } else {
-        try!(build_work(cx, unit))
+        build_work(cx, unit)?
     };
 
     // Now that we've prep'd our work, build the work needed to manage the
     // fingerprint and then start returning that upwards.
     let (freshness, dirty, fresh) =
-            try!(fingerprint::prepare_build_cmd(cx, unit));
+            fingerprint::prepare_build_cmd(cx, unit)?;
 
     Ok((work_dirty.then(dirty), work_fresh.then(fresh), freshness))
 }
@@ -97,7 +97,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
     // package's library profile.
     let profile = cx.lib_profile(unit.pkg.package_id());
     let to_exec = to_exec.into_os_string();
-    let mut cmd = try!(cx.compilation.host_process(to_exec, unit.pkg));
+    let mut cmd = cx.compilation.host_process(to_exec, unit.pkg)?;
     cmd.env("OUT_DIR", &build_output)
        .env("CARGO_MANIFEST_DIR", unit.pkg.root())
        .env("NUM_JOBS", &cx.jobs().to_string())
@@ -109,8 +109,8 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
        .env("OPT_LEVEL", &profile.opt_level)
        .env("PROFILE", if cx.build_config.release { "release" } else { "debug" })
        .env("HOST", cx.host_triple())
-       .env("RUSTC", &try!(cx.config.rustc()).path)
-       .env("RUSTDOC", &*try!(cx.config.rustdoc()));
+       .env("RUSTC", &cx.config.rustc()?.path)
+       .env("RUSTDOC", &*cx.config.rustdoc()?);
 
     if let Some(links) = unit.pkg.manifest().links() {
         cmd.env("CARGO_MANIFEST_LINKS", links);
@@ -150,7 +150,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
     // This information will be used at build-time later on to figure out which
     // sorts of variables need to be discovered at that time.
     let lib_deps = {
-        try!(cx.dep_run_custom_build(unit)).iter().filter_map(|unit| {
+        cx.dep_run_custom_build(unit)?.iter().filter_map(|unit| {
             if unit.profile.run_custom_build {
                 Some((unit.pkg.manifest().links().unwrap().to_string(),
                       unit.pkg.package_id().clone()))
@@ -177,8 +177,8 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
     };
     cx.build_explicit_deps.insert(*unit, (output_file.clone(), rerun_if_changed));
 
-    try!(fs::create_dir_all(&cx.layout(&host_unit).build(unit.pkg)));
-    try!(fs::create_dir_all(&cx.layout(unit).build(unit.pkg)));
+    fs::create_dir_all(&cx.layout(&host_unit).build(unit.pkg))?;
+    fs::create_dir_all(&cx.layout(unit).build(unit.pkg))?;
 
     // Prepare the unit of "dirty work" which will actually run the custom build
     // command.
@@ -191,10 +191,10 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
         // If we have an old build directory, then just move it into place,
         // otherwise create it!
         if fs::metadata(&build_output).is_err() {
-            try!(fs::create_dir(&build_output).chain_error(|| {
+            fs::create_dir(&build_output).chain_error(|| {
                 internal("failed to create script output directory for \
                           build command")
-            }));
+            })?;
         }
 
         // For all our native lib dependencies, pick up their metadata to pass
@@ -205,10 +205,10 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
             let build_state = build_state.outputs.lock().unwrap();
             for (name, id) in lib_deps {
                 let key = (id.clone(), kind);
-                let state = try!(build_state.get(&key).chain_error(|| {
+                let state = build_state.get(&key).chain_error(|| {
                     internal(format!("failed to locate build state for env \
                                       vars: {}/{:?}", id, kind))
-                }));
+                })?;
                 let data = &state.metadata;
                 for &(ref key, ref value) in data.iter() {
                     cmd.env(&format!("DEP_{}_{}", super::envify(&name),
@@ -216,22 +216,22 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
                 }
             }
             if let Some(build_scripts) = build_scripts {
-                try!(super::add_plugin_deps(&mut cmd, &build_state,
-                                            &build_scripts));
+                super::add_plugin_deps(&mut cmd, &build_state,
+                                            &build_scripts)?;
             }
         }
 
         // And now finally, run the build command itself!
         state.running(&cmd);
-        let output = try!(cmd.exec_with_streaming(
+        let output = cmd.exec_with_streaming(
             &mut |out_line| { state.stdout(out_line); Ok(()) },
             &mut |err_line| { state.stderr(err_line); Ok(()) },
         ).map_err(|mut e| {
             e.desc = format!("failed to run custom build command for `{}`\n{}",
                              pkg_name, e.desc);
             Human(e)
-        }));
-        try!(paths::write(&output_file, &output.stdout));
+        })?;
+        paths::write(&output_file, &output.stdout)?;
 
         // After the build command has finished running, we need to be sure to
         // remember all of its output so we can later discover precisely what it
@@ -240,7 +240,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
         // This is also the location where we provide feedback into the build
         // state informing what variables were discovered via our script as
         // well.
-        let parsed_output = try!(BuildOutput::parse(&output.stdout, &pkg_name));
+        let parsed_output = BuildOutput::parse(&output.stdout, &pkg_name)?;
         build_state.insert(id, kind, parsed_output);
         Ok(())
     });
@@ -252,7 +252,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
         let (id, pkg_name, build_state, output_file) = all;
         let output = match prev_output {
             Some(output) => output,
-            None => try!(BuildOutput::parse_file(&output_file, &pkg_name)),
+            None => BuildOutput::parse_file(&output_file, &pkg_name)?,
         };
         build_state.insert(id, kind, output);
         Ok(())
@@ -294,7 +294,7 @@ impl BuildState {
 
 impl BuildOutput {
     pub fn parse_file(path: &Path, pkg_name: &str) -> CargoResult<BuildOutput> {
-        let contents = try!(paths::read_bytes(path));
+        let contents = paths::read_bytes(path)?;
         BuildOutput::parse(&contents, pkg_name)
     }
 
@@ -336,9 +336,9 @@ impl BuildOutput {
 
             match key {
                 "rustc-flags" => {
-                    let (libs, links) = try!(
+                    let (libs, links) = 
                         BuildOutput::parse_rustc_flags(value, &whence)
-                    );
+                    ?;
                     library_links.extend(links.into_iter());
                     library_paths.extend(libs.into_iter());
                 }
@@ -407,7 +407,7 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>,
                            -> CargoResult<()> {
     let mut ret = HashMap::new();
     for unit in units {
-        try!(build(&mut ret, cx, unit));
+        build(&mut ret, cx, unit)?;
     }
     cx.build_scripts.extend(ret.into_iter().map(|(k, v)| {
         (k, Arc::new(v))
@@ -431,8 +431,8 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>,
         if !unit.target.is_custom_build() && unit.pkg.has_custom_build() {
             add_to_link(&mut ret, unit.pkg.package_id(), unit.kind);
         }
-        for unit in try!(cx.dep_targets(unit)).iter() {
-            let dep_scripts = try!(build(out, cx, unit));
+        for unit in cx.dep_targets(unit)?.iter() {
+            let dep_scripts = build(out, cx, unit)?;
 
             if unit.target.for_host() {
                 ret.plugins.extend(dep_scripts.to_link.iter()
index b16cf7f90df4651bb7f94070e8d5682bececf37d..ae0497e716d46bf1f6508a7f1fb03f64e1279c7d 100644 (file)
@@ -53,7 +53,7 @@ pub fn prepare_target<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
 
     debug!("fingerprint at: {}", loc.display());
 
-    let fingerprint = try!(calculate(cx, unit));
+    let fingerprint = calculate(cx, unit)?;
     let compare = compare_old_fingerprint(&loc, &*fingerprint);
     log_compare(unit, &compare);
 
@@ -70,10 +70,10 @@ pub fn prepare_target<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
     if compare.is_err() {
         let source_id = unit.pkg.package_id().source_id();
         let sources = cx.packages.sources();
-        let source = try!(sources.get(source_id).chain_error(|| {
+        let source = sources.get(source_id).chain_error(|| {
             internal("missing package source")
-        }));
-        try!(source.verify(unit.pkg.package_id()));
+        })?;
+        source.verify(unit.pkg.package_id())?;
     }
 
     let root = cx.out_dir(unit);
@@ -82,7 +82,7 @@ pub fn prepare_target<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
         missing_outputs = !root.join(unit.target.crate_name())
                                .join("index.html").exists();
     } else {
-        for (filename, _) in try!(cx.target_filenames(unit)) {
+        for (filename, _) in cx.target_filenames(unit)? {
             missing_outputs |= fs::metadata(root.join(filename)).is_err();
         }
     }
@@ -145,9 +145,9 @@ impl Fingerprint {
     fn update_local(&self) -> CargoResult<()> {
         match self.local {
             LocalFingerprint::MtimeBased(ref slot, ref path) => {
-                let meta = try!(fs::metadata(path).chain_error(|| {
+                let meta = fs::metadata(path).chain_error(|| {
                     internal(format!("failed to stat `{}`", path.display()))
-                }));
+                })?;
                 let mtime = FileTime::from_last_modification_time(&meta);
                 *slot.0.lock().unwrap() = Some(mtime);
             }
@@ -242,19 +242,19 @@ impl hash::Hash for Fingerprint {
 impl Encodable for Fingerprint {
     fn encode<E: Encoder>(&self, e: &mut E) -> Result<(), E::Error> {
         e.emit_struct("Fingerprint", 6, |e| {
-            try!(e.emit_struct_field("rustc", 0, |e| self.rustc.encode(e)));
-            try!(e.emit_struct_field("target", 1, |e| self.target.encode(e)));
-            try!(e.emit_struct_field("profile", 2, |e| self.profile.encode(e)));
-            try!(e.emit_struct_field("local", 3, |e| self.local.encode(e)));
-            try!(e.emit_struct_field("features", 4, |e| {
+            e.emit_struct_field("rustc", 0, |e| self.rustc.encode(e))?;
+            e.emit_struct_field("target", 1, |e| self.target.encode(e))?;
+            e.emit_struct_field("profile", 2, |e| self.profile.encode(e))?;
+            e.emit_struct_field("local", 3, |e| self.local.encode(e))?;
+            e.emit_struct_field("features", 4, |e| {
                 self.features.encode(e)
-            }));
-            try!(e.emit_struct_field("deps", 5, |e| {
+            })?;
+            e.emit_struct_field("deps", 5, |e| {
                 self.deps.iter().map(|&(ref a, ref b)| {
                     (a, b.hash())
                 }).collect::<Vec<_>>().encode(e)
-            }));
-            try!(e.emit_struct_field("rustflags", 6, |e| self.rustflags.encode(e)));
+            })?;
+            e.emit_struct_field("rustflags", 6, |e| self.rustflags.encode(e))?;
             Ok(())
         })
     }
@@ -267,15 +267,15 @@ impl Decodable for Fingerprint {
         }
         d.read_struct("Fingerprint", 6, |d| {
             Ok(Fingerprint {
-                rustc: try!(d.read_struct_field("rustc", 0, decode)),
-                target: try!(d.read_struct_field("target", 1, decode)),
-                profile: try!(d.read_struct_field("profile", 2, decode)),
-                local: try!(d.read_struct_field("local", 3, decode)),
-                features: try!(d.read_struct_field("features", 4, decode)),
+                rustc: d.read_struct_field("rustc", 0, decode)?,
+                target: d.read_struct_field("target", 1, decode)?,
+                profile: d.read_struct_field("profile", 2, decode)?,
+                local: d.read_struct_field("local", 3, decode)?,
+                features: d.read_struct_field("features", 4, decode)?,
                 memoized_hash: Mutex::new(None),
                 deps: {
                     let decode = decode::<Vec<(String, u64)>, D>;
-                    let v = try!(d.read_struct_field("deps", 5, decode));
+                    let v = d.read_struct_field("deps", 5, decode)?;
                     v.into_iter().map(|(name, hash)| {
                         (name, Arc::new(Fingerprint {
                             rustc: 0,
@@ -289,7 +289,7 @@ impl Decodable for Fingerprint {
                         }))
                     }).collect()
                 },
-                rustflags: try!(d.read_struct_field("rustflags", 6, decode)),
+                rustflags: d.read_struct_field("rustflags", 6, decode)?,
             })
         })
     }
@@ -311,7 +311,7 @@ impl Encodable for MtimeSlot {
 
 impl Decodable for MtimeSlot {
     fn decode<D: Decoder>(e: &mut D) -> Result<MtimeSlot, D::Error> {
-        let kind: Option<(u64, u32)> = try!(Decodable::decode(e));
+        let kind: Option<(u64, u32)> = Decodable::decode(e)?;
         Ok(MtimeSlot(Mutex::new(kind.map(|(s, n)| {
             FileTime::from_seconds_since_1970(s, n)
         }))))
@@ -353,33 +353,33 @@ fn calculate<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
     // elsewhere. Also skip fingerprints of binaries because they don't actually
     // induce a recompile, they're just dependencies in the sense that they need
     // to be built.
-    let deps = try!(cx.dep_targets(unit));
-    let deps = try!(deps.iter().filter(|u| {
+    let deps = cx.dep_targets(unit)?;
+    let deps = deps.iter().filter(|u| {
         !u.target.is_custom_build() && !u.target.is_bin()
     }).map(|unit| {
         calculate(cx, unit).map(|fingerprint| {
             (unit.pkg.package_id().to_string(), fingerprint)
         })
-    }).collect::<CargoResult<Vec<_>>>());
+    }).collect::<CargoResult<Vec<_>>>()?;
 
     // And finally, calculate what our own local fingerprint is
     let local = if use_dep_info(unit) {
         let dep_info = dep_info_loc(cx, unit);
-        let mtime = try!(dep_info_mtime_if_fresh(&dep_info));
+        let mtime = dep_info_mtime_if_fresh(&dep_info)?;
         LocalFingerprint::MtimeBased(MtimeSlot(Mutex::new(mtime)), dep_info)
     } else {
-        let fingerprint = try!(pkg_fingerprint(cx, unit.pkg));
+        let fingerprint = pkg_fingerprint(cx, unit.pkg)?;
         LocalFingerprint::Precalculated(fingerprint)
     };
     let mut deps = deps;
     deps.sort_by(|&(ref a, _), &(ref b, _)| a.cmp(b));
     let extra_flags = if unit.profile.doc {
-        try!(cx.rustdocflags_args(unit))
+        cx.rustdocflags_args(unit)?
     } else {
-        try!(cx.rustflags_args(unit))
+        cx.rustflags_args(unit)?
     };
     let fingerprint = Arc::new(Fingerprint {
-        rustc: util::hash_u64(&try!(cx.config.rustc()).verbose_version),
+        rustc: util::hash_u64(&cx.config.rustc()?.verbose_version),
         target: util::hash_u64(&unit.target),
         profile: util::hash_u64(&unit.profile),
         features: format!("{:?}", features),
@@ -444,7 +444,7 @@ pub fn prepare_build_cmd<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
                 let &(ref output, ref deps) = &cx.build_explicit_deps[unit];
 
                 let local = if deps.is_empty() {
-                    let s = try!(pkg_fingerprint(cx, unit.pkg));
+                    let s = pkg_fingerprint(cx, unit.pkg)?;
                     LocalFingerprint::Precalculated(s)
                 } else {
                     let deps = deps.iter().map(|p| unit.pkg.root().join(p));
@@ -490,7 +490,7 @@ pub fn prepare_build_cmd<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
                 let slot = MtimeSlot(Mutex::new(None));
                 fingerprint.local = LocalFingerprint::MtimeBased(slot,
                                                                  output_path);
-                try!(fingerprint.update_local());
+                fingerprint.update_local()?;
             }
         }
         write_fingerprint(&loc, &fingerprint)
@@ -502,9 +502,9 @@ pub fn prepare_build_cmd<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
 fn write_fingerprint(loc: &Path, fingerprint: &Fingerprint) -> CargoResult<()> {
     let hash = fingerprint.hash();
     debug!("write fingerprint: {}", loc.display());
-    try!(paths::write(&loc, util::to_hex(hash).as_bytes()));
-    try!(paths::write(&loc.with_extension("json"),
-                      json::encode(&fingerprint).unwrap().as_bytes()));
+    paths::write(&loc, util::to_hex(hash).as_bytes())?;
+    paths::write(&loc.with_extension("json"),
+                      json::encode(&fingerprint).unwrap().as_bytes())?;
     Ok(())
 }
 
@@ -514,10 +514,10 @@ pub fn prepare_init(cx: &mut Context, unit: &Unit) -> CargoResult<()> {
     let new2 = new1.clone();
 
     if fs::metadata(&new1).is_err() {
-        try!(fs::create_dir(&new1));
+        fs::create_dir(&new1)?;
     }
     if fs::metadata(&new2).is_err() {
-        try!(fs::create_dir(&new2));
+        fs::create_dir(&new2)?;
     }
     Ok(())
 }
@@ -534,17 +534,17 @@ pub fn dep_info_loc(cx: &Context, unit: &Unit) -> PathBuf {
 
 fn compare_old_fingerprint(loc: &Path, new_fingerprint: &Fingerprint)
                            -> CargoResult<()> {
-    let old_fingerprint_short = try!(paths::read(loc));
+    let old_fingerprint_short = paths::read(loc)?;
     let new_hash = new_fingerprint.hash();
 
     if util::to_hex(new_hash) == old_fingerprint_short {
         return Ok(())
     }
 
-    let old_fingerprint_json = try!(paths::read(&loc.with_extension("json")));
-    let old_fingerprint = try!(json::decode(&old_fingerprint_json).chain_error(|| {
+    let old_fingerprint_json = paths::read(&loc.with_extension("json"))?;
+    let old_fingerprint = json::decode(&old_fingerprint_json).chain_error(|| {
         internal(format!("failed to deserialize json"))
-    }));
+    })?;
     new_fingerprint.compare(&old_fingerprint)
 }
 
@@ -575,15 +575,15 @@ fn dep_info_mtime_if_fresh(dep_info: &Path) -> CargoResult<Option<FileTime>> {
     if fs_try!(f.read_until(0, &mut cwd)) == 0 {
         return Ok(None)
     }
-    let cwd = try!(util::bytes2path(&cwd[..cwd.len()-1]));
+    let cwd = util::bytes2path(&cwd[..cwd.len()-1])?;
     let line = match f.lines().next() {
         Some(Ok(line)) => line,
         _ => return Ok(None),
     };
-    let pos = try!(line.find(": ").chain_error(|| {
+    let pos = line.find(": ").chain_error(|| {
         internal(format!("dep-info not in an understood format: {}",
                          dep_info.display()))
-    }));
+    })?;
     let deps = &line[pos + 2..];
 
     let mut paths = Vec::new();
@@ -596,9 +596,9 @@ fn dep_info_mtime_if_fresh(dep_info: &Path) -> CargoResult<Option<FileTime>> {
         while file.ends_with("\\") {
             file.pop();
             file.push(' ');
-            file.push_str(try!(deps.next().chain_error(|| {
+            file.push_str(deps.next().chain_error(|| {
                 internal(format!("malformed dep-info format, trailing \\"))
-            })));
+            })?);
         }
         paths.push(cwd.join(&file));
     }
@@ -609,9 +609,9 @@ fn dep_info_mtime_if_fresh(dep_info: &Path) -> CargoResult<Option<FileTime>> {
 fn pkg_fingerprint(cx: &Context, pkg: &Package) -> CargoResult<String> {
     let source_id = pkg.package_id().source_id();
     let sources = cx.packages.sources();
-    let source = try!(sources.get(source_id).chain_error(|| {
+    let source = sources.get(source_id).chain_error(|| {
         internal("missing package source")
-    }));
+    })?;
     source.fingerprint(pkg)
 }
 
@@ -676,12 +676,12 @@ fn filename(unit: &Unit) -> String {
 // next time.
 pub fn append_current_dir(path: &Path, cwd: &Path) -> CargoResult<()> {
     debug!("appending {} <- {}", path.display(), cwd.display());
-    let mut f = try!(OpenOptions::new().read(true).write(true).open(path));
+    let mut f = OpenOptions::new().read(true).write(true).open(path)?;
     let mut contents = Vec::new();
-    try!(f.read_to_end(&mut contents));
-    try!(f.seek(SeekFrom::Start(0)));
-    try!(f.write_all(try!(util::path2bytes(cwd))));
-    try!(f.write_all(&[0]));
-    try!(f.write_all(&contents));
+    f.read_to_end(&mut contents)?;
+    f.seek(SeekFrom::Start(0))?;
+    f.write_all(util::path2bytes(cwd)?)?;
+    f.write_all(&[0])?;
+    f.write_all(&contents)?;
     Ok(())
 }
index ae7ba303738fd359ab52a97e978c70a693963bfa..219a6d4374968b67d34204184b766141b2730c58 100644 (file)
@@ -38,7 +38,7 @@ impl Work {
 
     pub fn then(self, next: Work) -> Work {
         Work::new(move |state| {
-            try!(self.call(state));
+            self.call(state)?;
             next.call(state)
         })
     }
index 39d1c45a7852572de2265c0c11ce7efd20d81abe..e69bd33cd9157f32be5c93af68cfde29637697c8 100644 (file)
@@ -100,7 +100,7 @@ impl<'a> JobQueue<'a> {
                          job: Job,
                          fresh: Freshness) -> CargoResult<()> {
         let key = Key::new(unit);
-        let deps = try!(key.dependencies(cx));
+        let deps = key.dependencies(cx)?;
         self.queue.queue(Fresh, key, Vec::new(), &deps).push((job, fresh));
         *self.counts.entry(key.pkg).or_insert(0) += 1;
         Ok(())
@@ -141,7 +141,7 @@ impl<'a> JobQueue<'a> {
             while error.is_none() && self.active < self.jobs {
                 if !queue.is_empty() {
                     let (key, job, fresh) = queue.remove(0);
-                    try!(self.run(key, fresh, job, cx.config, scope));
+                    self.run(key, fresh, job, cx.config, scope)?;
                 } else if let Some((fresh, key, jobs)) = self.queue.dequeue() {
                     let total_fresh = jobs.iter().fold(fresh, |fresh, &(_, f)| {
                         f.combine(fresh)
@@ -165,28 +165,28 @@ impl<'a> JobQueue<'a> {
 
             match msg {
                 Message::Run(cmd) => {
-                    try!(cx.config.shell().verbose(|c| c.status("Running", &cmd)));
+                    cx.config.shell().verbose(|c| c.status("Running", &cmd))?;
                 }
                 Message::Stdout(out) => {
                     if cx.config.extra_verbose() {
-                        try!(writeln!(cx.config.shell().out(), "{}", out));
+                        writeln!(cx.config.shell().out(), "{}", out)?;
                     }
                 }
                 Message::Stderr(err) => {
                     if cx.config.extra_verbose() {
-                        try!(writeln!(cx.config.shell().err(), "{}", err));
+                        writeln!(cx.config.shell().err(), "{}", err)?;
                     }
                 }
                 Message::Finish(result) => {
                     info!("end: {:?}", key);
                     self.active -= 1;
                     match result {
-                        Ok(()) => try!(self.finish(key, cx)),
+                        Ok(()) => self.finish(key, cx)?,
                         Err(e) => {
                             if self.active > 0 {
-                                try!(cx.config.shell().say(
+                                cx.config.shell().say(
                                             "Build failed, waiting for other \
-                                             jobs to finish...", YELLOW));
+                                             jobs to finish...", YELLOW)?;
                             }
                             if error.is_none() {
                                 error = Some(e);
@@ -210,10 +210,10 @@ impl<'a> JobQueue<'a> {
                                    duration.subsec_nanos() / 10000000);
         if self.queue.is_empty() {
             if !self.is_doc_all {
-                try!(cx.config.shell().status("Finished", format!("{} [{}] target(s) in {}",
+                cx.config.shell().status("Finished", format!("{} [{}] target(s) in {}",
                                                                   build_type,
                                                                   opt_type,
-                                                                  time_elapsed)));
+                                                                  time_elapsed))?;
             }
             Ok(())
         } else if let Some(e) = error {
@@ -247,7 +247,7 @@ impl<'a> JobQueue<'a> {
         });
 
         // Print out some nice progress information
-        try!(self.note_working_on(config, &key, fresh));
+        self.note_working_on(config, &key, fresh)?;
 
         Ok(())
     }
@@ -257,7 +257,7 @@ impl<'a> JobQueue<'a> {
             let output = cx.build_state.outputs.lock().unwrap();
             if let Some(output) = output.get(&(key.pkg.clone(), key.kind)) {
                 for warning in output.warnings.iter() {
-                    try!(cx.config.shell().warn(warning));
+                    cx.config.shell().warn(warning)?;
                 }
             }
         }
@@ -293,15 +293,15 @@ impl<'a> JobQueue<'a> {
             Dirty => {
                 if key.profile.doc {
                     self.documented.insert(key.pkg);
-                    try!(config.shell().status("Documenting", key.pkg));
+                    config.shell().status("Documenting", key.pkg)?;
                 } else {
                     self.compiled.insert(key.pkg);
-                    try!(config.shell().status("Compiling", key.pkg));
+                    config.shell().status("Compiling", key.pkg)?;
                 }
             }
             Fresh if self.counts[key.pkg] == 0 => {
                 self.compiled.insert(key.pkg);
-                try!(config.shell().verbose(|c| c.status("Fresh", key.pkg)));
+                config.shell().verbose(|c| c.status("Fresh", key.pkg))?;
             }
             Fresh => {}
         }
@@ -322,12 +322,12 @@ impl<'a> Key<'a> {
     fn dependencies<'cfg>(&self, cx: &Context<'a, 'cfg>)
                           -> CargoResult<Vec<Key<'a>>> {
         let unit = Unit {
-            pkg: try!(cx.get_package(self.pkg)),
+            pkg: cx.get_package(self.pkg)?,
             target: self.target,
             profile: self.profile,
             kind: self.kind,
         };
-        let targets = try!(cx.dep_targets(&unit));
+        let targets = cx.dep_targets(&unit)?;
         Ok(targets.iter().filter_map(|unit| {
             // Binaries aren't actually needed to *compile* tests, just to run
             // them, so we don't include this dependency edge in the job graph.
index 18ae026e19396d1447ca38e643d50b7402179754..1e371a9517e1d07328c029c81542f1a2e5001f55 100644 (file)
@@ -78,8 +78,8 @@ impl Layout {
         // the target triple as a Path and then just use the file stem as the
         // component for the directory name.
         if let Some(triple) = triple {
-            path.push(try!(Path::new(triple).file_stem()
-                           .ok_or(human(format!("target was empty")))));
+            path.push(Path::new(triple).file_stem()
+                           .ok_or(human(format!("target was empty")))?);
         }
         path.push(dest);
         Layout::at(ws.config(), path)
@@ -89,7 +89,7 @@ impl Layout {
         // For now we don't do any more finer-grained locking on the artifact
         // directory, so just lock the entire thing for the duration of this
         // compile.
-        let lock = try!(root.open_rw(".cargo-lock", config, "build directory"));
+        let lock = root.open_rw(".cargo-lock", config, "build directory")?;
         let root = root.into_path_unlocked();
 
         Ok(Layout {
@@ -105,20 +105,20 @@ impl Layout {
 
     pub fn prepare(&mut self) -> io::Result<()> {
         if fs::metadata(&self.root).is_err() {
-            try!(fs::create_dir_all(&self.root));
+            fs::create_dir_all(&self.root)?;
         }
 
-        try!(mkdir(&self.deps));
-        try!(mkdir(&self.native));
-        try!(mkdir(&self.fingerprint));
-        try!(mkdir(&self.examples));
-        try!(mkdir(&self.build));
+        mkdir(&self.deps)?;
+        mkdir(&self.native)?;
+        mkdir(&self.fingerprint)?;
+        mkdir(&self.examples)?;
+        mkdir(&self.build)?;
 
         return Ok(());
 
         fn mkdir(dir: &Path) -> io::Result<()> {
             if fs::metadata(&dir).is_err() {
-                try!(fs::create_dir(dir));
+                fs::create_dir(dir)?;
             }
             Ok(())
         }
index 6db0bb6f35e34fc02492035a1479cf0aab5aea0a..a7aedbe6b755a7541024d70eb27cff841d43571f 100644 (file)
@@ -81,15 +81,15 @@ pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>,
         })
     }).collect::<Vec<_>>();
 
-    let mut cx = try!(Context::new(ws, resolve, packages, config,
-                                   build_config, profiles));
+    let mut cx = Context::new(ws, resolve, packages, config,
+                                   build_config, profiles)?;
 
     let mut queue = JobQueue::new(&cx);
 
-    try!(cx.prepare());
-    try!(cx.probe_target_info(&units));
-    try!(cx.build_used_in_plugin_map(&units));
-    try!(custom_build::build_map(&mut cx, &units));
+    cx.prepare()?;
+    cx.probe_target_info(&units)?;
+    cx.build_used_in_plugin_map(&units)?;
+    custom_build::build_map(&mut cx, &units)?;
 
     for unit in units.iter() {
         // Build up a list of pending jobs, each of which represent
@@ -97,11 +97,11 @@ pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>,
         // part of this, that's all done next as part of the `execute`
         // function which will run everything in order with proper
         // parallelism.
-        try!(compile(&mut cx, &mut queue, unit));
+        compile(&mut cx, &mut queue, unit)?;
     }
 
     // Now that we've figured out everything that we're going to do, do it!
-    try!(queue.execute(&mut cx));
+    queue.execute(&mut cx)?;
 
     for unit in units.iter() {
         let out_dir = cx.layout(unit).build_out(unit.pkg)
@@ -110,7 +110,7 @@ pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>,
           .or_insert(Vec::new())
           .push(("OUT_DIR".to_string(), out_dir));
 
-        for (filename, _linkable) in try!(cx.target_filenames(unit)) {
+        for (filename, _linkable) in cx.target_filenames(unit)? {
             let dst = cx.out_dir(unit).join(filename);
             if unit.profile.test {
                 cx.compilation.tests.push((unit.pkg.clone(),
@@ -126,7 +126,7 @@ pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>,
             if !unit.target.is_lib() { continue }
 
             // Include immediate lib deps as well
-            for unit in try!(cx.dep_targets(unit)).iter() {
+            for unit in cx.dep_targets(unit)?.iter() {
                 let pkgid = unit.pkg.package_id();
                 if !unit.target.is_lib() { continue }
                 if unit.profile.doc { continue }
@@ -134,7 +134,7 @@ pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>,
                     continue
                 }
 
-                let v = try!(cx.target_filenames(unit));
+                let v = cx.target_filenames(unit)?;
                 let v = v.into_iter().map(|(f, _)| {
                     (unit.target.clone(), cx.out_dir(unit).join(f))
                 }).collect::<Vec<_>>();
@@ -173,39 +173,39 @@ fn compile<'a, 'cfg: 'a>(cx: &mut Context<'a, 'cfg>,
     // we've got everything constructed.
     let p = profile::start(format!("preparing: {}/{}", unit.pkg,
                                    unit.target.name()));
-    try!(fingerprint::prepare_init(cx, unit));
-    try!(cx.links.validate(unit));
+    fingerprint::prepare_init(cx, unit)?;
+    cx.links.validate(unit)?;
 
     let (dirty, fresh, freshness) = if unit.profile.run_custom_build {
-        try!(custom_build::prepare(cx, unit))
+        custom_build::prepare(cx, unit)?
     } else {
-        let (freshness, dirty, fresh) = try!(fingerprint::prepare_target(cx,
-                                                                         unit));
+        let (freshness, dirty, fresh) = fingerprint::prepare_target(cx,
+                                                                         unit)?;
         let work = if unit.profile.doc {
-            try!(rustdoc(cx, unit))
+            rustdoc(cx, unit)?
         } else {
-            try!(rustc(cx, unit))
+            rustc(cx, unit)?
         };
         let dirty = work.then(dirty);
         (dirty, fresh, freshness)
     };
-    try!(jobs.enqueue(cx, unit, Job::new(dirty, fresh), freshness));
+    jobs.enqueue(cx, unit, Job::new(dirty, fresh), freshness)?;
     drop(p);
 
     // Be sure to compile all dependencies of this target as well.
-    for unit in try!(cx.dep_targets(unit)).iter() {
-        try!(compile(cx, jobs, unit));
+    for unit in cx.dep_targets(unit)?.iter() {
+        compile(cx, jobs, unit)?;
     }
     Ok(())
 }
 
 fn rustc(cx: &mut Context, unit: &Unit) -> CargoResult<Work> {
     let crate_types = unit.target.rustc_crate_types();
-    let mut rustc = try!(prepare_rustc(cx, crate_types, unit));
+    let mut rustc = prepare_rustc(cx, crate_types, unit)?;
 
     let name = unit.pkg.name().to_string();
     if !cx.show_warnings(unit.pkg.package_id()) {
-        if try!(cx.config.rustc()).cap_lints {
+        if cx.config.rustc()?.cap_lints {
             rustc.arg("--cap-lints").arg("allow");
         } else {
             rustc.arg("-Awarnings");
@@ -213,7 +213,7 @@ fn rustc(cx: &mut Context, unit: &Unit) -> CargoResult<Work> {
     }
     let has_custom_args = unit.profile.rustc_args.is_some();
 
-    let filenames = try!(cx.target_filenames(unit));
+    let filenames = cx.target_filenames(unit)?;
     let root = cx.out_dir(unit);
 
     // Prepare the native lib state (extra -L and -l flags)
@@ -238,7 +238,7 @@ fn rustc(cx: &mut Context, unit: &Unit) -> CargoResult<Work> {
     let dep_info_loc = fingerprint::dep_info_loc(cx, unit);
     let cwd = cx.config.cwd().to_path_buf();
 
-    rustc.args(&try!(cx.rustflags_args(unit)));
+    rustc.args(&cx.rustflags_args(unit)?);
     let json_errors = cx.build_config.json_errors;
     let package_id = unit.pkg.package_id().clone();
     let target = unit.target.clone();
@@ -250,9 +250,9 @@ fn rustc(cx: &mut Context, unit: &Unit) -> CargoResult<Work> {
         // located somewhere in there.
         if let Some(build_deps) = build_deps {
             let build_state = build_state.outputs.lock().unwrap();
-            try!(add_native_deps(&mut rustc, &build_state, &build_deps,
-                                 pass_l_flag, &current_id));
-            try!(add_plugin_deps(&mut rustc, &build_state, &build_deps));
+            add_native_deps(&mut rustc, &build_state, &build_deps,
+                                 pass_l_flag, &current_id)?;
+            add_plugin_deps(&mut rustc, &build_state, &build_deps)?;
         }
 
         // FIXME(rust-lang/rust#18913): we probably shouldn't have to do
@@ -260,14 +260,14 @@ fn rustc(cx: &mut Context, unit: &Unit) -> CargoResult<Work> {
         for &(ref filename, _linkable) in filenames.iter() {
             let dst = root.join(filename);
             if fs::metadata(&dst).is_ok() {
-                try!(fs::remove_file(&dst).chain_error(|| {
+                fs::remove_file(&dst).chain_error(|| {
                     human(format!("Could not remove file: {}.", dst.display()))
-                }));
+                })?;
             }
         }
 
         state.running(&rustc);
-        try!(if json_errors {
+        if json_errors {
             rustc.exec_with_streaming(
                 &mut |line| if !line.is_empty() {
                     Err(internal(&format!("compiler stdout is not empty: `{}`", line)))
@@ -275,9 +275,9 @@ fn rustc(cx: &mut Context, unit: &Unit) -> CargoResult<Work> {
                     Ok(())
                 },
                 &mut |line| {
-                    let compiler_message = try!(json::Json::from_str(line).map_err(|_| {
+                    let compiler_message = json::Json::from_str(line).map_err(|_| {
                         internal(&format!("compiler produced invalid json: `{}`", line))
-                    }));
+                    })?;
 
                     machine_message::FromCompiler::new(
                         &package_id,
@@ -292,7 +292,7 @@ fn rustc(cx: &mut Context, unit: &Unit) -> CargoResult<Work> {
             rustc.exec()
         }.chain_error(|| {
             human(format!("Could not compile `{}`.", name))
-        }));
+        })?;
 
         if do_rename && real_name != crate_name {
             let dst = root.join(&filenames[0].0);
@@ -300,18 +300,18 @@ fn rustc(cx: &mut Context, unit: &Unit) -> CargoResult<Work> {
                                             .to_str().unwrap()
                                             .replace(&real_name, &crate_name));
             if !has_custom_args || src.exists() {
-                try!(fs::rename(&src, &dst).chain_error(|| {
+                fs::rename(&src, &dst).chain_error(|| {
                     internal(format!("could not rename crate {:?}", src))
-                }));
+                })?;
             }
         }
 
         if !has_custom_args || fs::metadata(&rustc_dep_info_loc).is_ok() {
-            try!(fs::rename(&rustc_dep_info_loc, &dep_info_loc).chain_error(|| {
+            fs::rename(&rustc_dep_info_loc, &dep_info_loc).chain_error(|| {
                 internal(format!("could not rename dep info: {:?}",
                               rustc_dep_info_loc))
-            }));
-            try!(fingerprint::append_current_dir(&dep_info_loc, &cwd));
+            })?;
+            fingerprint::append_current_dir(&dep_info_loc, &cwd)?;
         }
 
         // If we're a "root crate", e.g. the target of this compilation, then we
@@ -337,16 +337,16 @@ fn rustc(cx: &mut Context, unit: &Unit) -> CargoResult<Work> {
                 let dst = src_dir.parent().unwrap()
                                  .join(src.file_name().unwrap());
                 if dst.exists() {
-                    try!(fs::remove_file(&dst).chain_error(|| {
+                    fs::remove_file(&dst).chain_error(|| {
                         human(format!("failed to remove: {}", dst.display()))
-                    }));
+                    })?;
                 }
-                try!(fs::hard_link(&src, &dst)
+                fs::hard_link(&src, &dst)
                      .or_else(|_| fs::copy(&src, &dst).map(|_| ()))
                      .chain_error(|| {
                          human(format!("failed to link or copy `{}` to `{}`",
                                        src.display(), dst.display()))
-                }));
+                })?;
             }
         }
 
@@ -361,10 +361,10 @@ fn rustc(cx: &mut Context, unit: &Unit) -> CargoResult<Work> {
                        pass_l_flag: bool,
                        current_id: &PackageId) -> CargoResult<()> {
         for key in build_scripts.to_link.iter() {
-            let output = try!(build_state.get(key).chain_error(|| {
+            let output = build_state.get(key).chain_error(|| {
                 internal(format!("couldn't find build state for {}/{:?}",
                                  key.0, key.1))
-            }));
+            })?;
             for path in output.library_paths.iter() {
                 rustc.arg("-L").arg(path);
             }
@@ -399,14 +399,14 @@ fn add_plugin_deps(rustc: &mut ProcessBuilder,
     let mut search_path = env::split_paths(&search_path).collect::<Vec<_>>();
     for id in build_scripts.plugins.iter() {
         let key = (id.clone(), Kind::Host);
-        let output = try!(build_state.get(&key).chain_error(|| {
+        let output = build_state.get(&key).chain_error(|| {
             internal(format!("couldn't find libs for plugin dep {}", id))
-        }));
+        })?;
         for path in output.library_paths.iter() {
             search_path.push(path.clone());
         }
     }
-    let search_path = try!(join_paths(&search_path, var));
+    let search_path = join_paths(&search_path, var)?;
     rustc.env(var, &search_path);
     Ok(())
 }
@@ -414,16 +414,16 @@ fn add_plugin_deps(rustc: &mut ProcessBuilder,
 fn prepare_rustc(cx: &Context,
                  crate_types: Vec<&str>,
                  unit: &Unit) -> CargoResult<ProcessBuilder> {
-    let mut base = try!(cx.compilation.rustc_process(unit.pkg));
+    let mut base = cx.compilation.rustc_process(unit.pkg)?;
     build_base_args(cx, &mut base, unit, &crate_types);
     build_plugin_args(&mut base, cx, unit);
-    try!(build_deps_args(&mut base, cx, unit));
+    build_deps_args(&mut base, cx, unit)?;
     Ok(base)
 }
 
 
 fn rustdoc(cx: &mut Context, unit: &Unit) -> CargoResult<Work> {
-    let mut rustdoc = try!(cx.compilation.rustdoc_process(unit.pkg));
+    let mut rustdoc = cx.compilation.rustdoc_process(unit.pkg)?;
     rustdoc.arg(&root_path(cx, unit))
            .cwd(cx.config.cwd())
            .arg("--crate-name").arg(&unit.target.crate_name());
@@ -439,7 +439,7 @@ fn rustdoc(cx: &mut Context, unit: &Unit) -> CargoResult<Work> {
     // Create the documentation directory ahead of time as rustdoc currently has
     // a bug where concurrent invocations will race to create this directory if
     // it doesn't already exist.
-    try!(fs::create_dir_all(&doc_dir));
+    fs::create_dir_all(&doc_dir)?;
 
     rustdoc.arg("-o").arg(doc_dir);
 
@@ -453,13 +453,13 @@ fn rustdoc(cx: &mut Context, unit: &Unit) -> CargoResult<Work> {
         rustdoc.args(args);
     }
 
-    try!(build_deps_args(&mut rustdoc, cx, unit));
+    build_deps_args(&mut rustdoc, cx, unit)?;
 
     if unit.pkg.has_custom_build() {
         rustdoc.env("OUT_DIR", &cx.layout(unit).build_out(unit.pkg));
     }
 
-    rustdoc.args(&try!(cx.rustdocflags_args(unit)));
+    rustdoc.args(&cx.rustdocflags_args(unit)?);
 
     let name = unit.pkg.name().to_string();
     let build_state = cx.build_state.clone();
@@ -648,9 +648,9 @@ fn build_deps_args(cmd: &mut ProcessBuilder, cx: &Context, unit: &Unit)
         cmd.env("OUT_DIR", &layout.build_out(unit.pkg));
     }
 
-    for unit in try!(cx.dep_targets(unit)).iter() {
+    for unit in cx.dep_targets(unit)?.iter() {
         if unit.target.linkable() && !unit.profile.doc {
-            try!(link_to(cmd, cx, unit));
+            link_to(cmd, cx, unit)?;
         }
     }
 
@@ -658,7 +658,7 @@ fn build_deps_args(cmd: &mut ProcessBuilder, cx: &Context, unit: &Unit)
 
     fn link_to(cmd: &mut ProcessBuilder, cx: &Context, unit: &Unit)
                -> CargoResult<()> {
-        for (filename, linkable) in try!(cx.target_filenames(unit)) {
+        for (filename, linkable) in cx.target_filenames(unit)? {
             if !linkable {
                 continue
             }
index f382a1dd128e828adc3e423641a7ea80f00479cb..6c267efb60b033193e74d704da76a501b63ab086 100644 (file)
@@ -14,15 +14,15 @@ pub struct TestOptions<'a> {
 pub fn run_tests(ws: &Workspace,
                  options: &TestOptions,
                  test_args: &[String]) -> CargoResult<Option<CargoTestError>> {
-    let compilation = try!(compile_tests(ws, options));
+    let compilation = compile_tests(ws, options)?;
 
     if options.no_run {
         return Ok(None)
     }
     let mut errors = if options.only_doc {
-        try!(run_doc_tests(options, test_args, &compilation))
+        run_doc_tests(options, test_args, &compilation)?
     } else {
-        try!(run_unit_tests(options, test_args, &compilation))
+        run_unit_tests(options, test_args, &compilation)?
     };
 
     // If we have an error and want to fail fast, return
@@ -39,7 +39,7 @@ pub fn run_tests(ws: &Workspace,
         }
     }
 
-    errors.extend(try!(run_doc_tests(options, test_args, &compilation)));
+    errors.extend(run_doc_tests(options, test_args, &compilation)?);
     if errors.is_empty() {
         Ok(None)
     } else {
@@ -52,12 +52,12 @@ pub fn run_benches(ws: &Workspace,
                    args: &[String]) -> CargoResult<Option<CargoTestError>> {
     let mut args = args.to_vec();
     args.push("--bench".to_string());
-    let compilation = try!(compile_tests(ws, options));
+    let compilation = compile_tests(ws, options)?;
 
     if options.no_run {
         return Ok(None)
     }
-    let errors = try!(run_unit_tests(options, &args, &compilation));
+    let errors = run_unit_tests(options, &args, &compilation)?;
     match errors.len() {
         0 => Ok(None),
         _ => Ok(Some(CargoTestError::new(errors))),
@@ -67,7 +67,7 @@ pub fn run_benches(ws: &Workspace,
 fn compile_tests<'a>(ws: &Workspace<'a>,
                      options: &TestOptions<'a>)
                      -> CargoResult<Compilation<'a>> {
-    let mut compilation = try!(ops::compile(ws, &options.compile_opts));
+    let mut compilation = ops::compile(ws, &options.compile_opts)?;
     compilation.tests.sort_by(|a, b| {
         (a.0.package_id(), &a.1).cmp(&(b.0.package_id(), &b.1))
     });
@@ -89,14 +89,14 @@ fn run_unit_tests(options: &TestOptions,
             Some(path) => path,
             None => &**exe,
         };
-        let mut cmd = try!(compilation.target_process(exe, pkg));
+        let mut cmd = compilation.target_process(exe, pkg)?;
         cmd.args(test_args);
-        try!(config.shell().concise(|shell| {
+        config.shell().concise(|shell| {
             shell.status("Running", to_display.display().to_string())
-        }));
-        try!(config.shell().verbose(|shell| {
+        })?;
+        config.shell().verbose(|shell| {
             shell.status("Running", cmd.to_string())
-        }));
+        })?;
 
         if let Err(e) = cmd.exec() {
             errors.push(e);
@@ -116,7 +116,7 @@ fn run_doc_tests(options: &TestOptions,
     let config = options.compile_opts.config;
 
     // We don't build/rust doctests if target != host
-    if try!(config.rustc()).host != compilation.target {
+    if config.rustc()?.host != compilation.target {
         return Ok(errors);
     }
 
@@ -127,8 +127,8 @@ fn run_doc_tests(options: &TestOptions,
 
     for (package, tests) in libs {
         for (lib, name, crate_name) in tests {
-            try!(config.shell().status("Doc-tests", name));
-            let mut p = try!(compilation.rustdoc_process(package));
+            config.shell().status("Doc-tests", name)?;
+            let mut p = compilation.rustdoc_process(package)?;
             p.arg("--test").arg(lib)
              .arg("--crate-name").arg(&crate_name);
 
@@ -174,9 +174,9 @@ fn run_doc_tests(options: &TestOptions,
                 }
             }
 
-            try!(config.shell().verbose(|shell| {
+            config.shell().verbose(|shell| {
                 shell.status("Running", p.to_string())
-            }));
+            })?;
             if let Err(e) = p.exec() {
                 errors.push(e);
                 if !options.no_fail_fast {
index ac9fcbddf311439534c092cf8975a9e4bb4a4087..30eec985a288addf4604eeb4f9da9ebbfd4dd6a0 100644 (file)
@@ -14,19 +14,19 @@ pub fn load_pkg_lockfile(ws: &Workspace) -> CargoResult<Option<Resolve>> {
     }
 
     let root = Filesystem::new(ws.root().to_path_buf());
-    let mut f = try!(root.open_ro("Cargo.lock", ws.config(), "Cargo.lock file"));
+    let mut f = root.open_ro("Cargo.lock", ws.config(), "Cargo.lock file")?;
 
     let mut s = String::new();
-    try!(f.read_to_string(&mut s).chain_error(|| {
+    f.read_to_string(&mut s).chain_error(|| {
         human(format!("failed to read file: {}", f.path().display()))
-    }));
+    })?;
 
     (|| {
-        let table = try!(cargo_toml::parse(&s, f.path(), ws.config()));
+        let table = cargo_toml::parse(&s, f.path(), ws.config())?;
         let table = toml::Value::Table(table);
         let mut d = toml::Decoder::new(table);
-        let v: resolver::EncodableResolve = try!(Decodable::decode(&mut d));
-        Ok(Some(try!(v.into_resolve(ws))))
+        let v: resolver::EncodableResolve = Decodable::decode(&mut d)?;
+        Ok(Some(v.into_resolve(ws)?))
     }).chain_error(|| {
         human(format!("failed to parse lock file at: {}", f.path().display()))
     })
@@ -38,7 +38,7 @@ pub fn write_pkg_lockfile(ws: &Workspace, resolve: &Resolve) -> CargoResult<()>
     let orig = ws_root.open_ro("Cargo.lock", ws.config(), "Cargo.lock file");
     let orig = orig.and_then(|mut f| {
         let mut s = String::new();
-        try!(f.read_to_string(&mut s));
+        f.read_to_string(&mut s)?;
         Ok(s)
     });
 
@@ -102,8 +102,8 @@ pub fn write_pkg_lockfile(ws: &Workspace, resolve: &Resolve) -> CargoResult<()>
 
     // Ok, if that didn't work just write it out
     ws_root.open_rw("Cargo.lock", ws.config(), "Cargo.lock file").and_then(|mut f| {
-        try!(f.file().set_len(0));
-        try!(f.write_all(out.as_bytes()));
+        f.file().set_len(0)?;
+        f.write_all(out.as_bytes())?;
         Ok(())
     }).chain_error(|| {
         human(format!("failed to write {}",
index 5866242fdab310a1d798d3d7b452eedb4e3217fd..d937976f867acdc86448fcc9c9a208a1e5ef7b83 100644 (file)
@@ -40,32 +40,32 @@ pub struct PublishOpts<'cfg> {
 }
 
 pub fn publish(ws: &Workspace, opts: &PublishOpts) -> CargoResult<()> {
-    let pkg = try!(ws.current());
+    let pkg = ws.current()?;
 
     if !pkg.publish() {
         bail!("some crates cannot be published.\n\
                `{}` is marked as unpublishable", pkg.name());
     }
 
-    let (mut registry, reg_id) = try!(registry(opts.config,
+    let (mut registry, reg_id) = registry(opts.config,
                                                opts.token.clone(),
-                                               opts.index.clone()));
-    try!(verify_dependencies(&pkg, &reg_id));
+                                               opts.index.clone())?;
+    verify_dependencies(&pkg, &reg_id)?;
 
     // Prepare a tarball, with a non-surpressable warning if metadata
     // is missing since this is being put online.
-    let tarball = try!(ops::package(ws, &ops::PackageOpts {
+    let tarball = ops::package(ws, &ops::PackageOpts {
         config: opts.config,
         verify: opts.verify,
         list: false,
         check_metadata: true,
         allow_dirty: opts.allow_dirty,
         jobs: opts.jobs,
-    })).unwrap();
+    })?.unwrap();
 
     // Upload said tarball to the specified destination
-    try!(opts.config.shell().status("Uploading", pkg.package_id().to_string()));
-    try!(transmit(opts.config, &pkg, tarball.file(), &mut registry, opts.dry_run));
+    opts.config.shell().status("Uploading", pkg.package_id().to_string())?;
+    transmit(opts.config, &pkg, tarball.file(), &mut registry, opts.dry_run)?;
 
     Ok(())
 }
@@ -114,7 +114,7 @@ fn transmit(config: &Config,
         ref keywords, ref readme, ref repository, ref license, ref license_file,
     } = *manifest.metadata();
     let readme = match *readme {
-        Some(ref readme) => Some(try!(paths::read(&pkg.root().join(readme)))),
+        Some(ref readme) => Some(paths::read(&pkg.root().join(readme))?),
         None => None,
     };
     match *license_file {
@@ -128,7 +128,7 @@ fn transmit(config: &Config,
 
     // Do not upload if performing a dry run
     if dry_run {
-        try!(config.shell().warn("aborting upload due to dry run"));
+        config.shell().warn("aborting upload due to dry run")?;
         return Ok(());
     }
 
@@ -152,8 +152,8 @@ fn transmit(config: &Config,
 }
 
 pub fn registry_configuration(config: &Config) -> CargoResult<RegistryConfig> {
-    let index = try!(config.get_string("registry.index")).map(|p| p.val);
-    let token = try!(config.get_string("registry.token")).map(|p| p.val);
+    let index = config.get_string("registry.index")?.map(|p| p.val);
+    let token = config.get_string("registry.token")?.map(|p| p.val);
     Ok(RegistryConfig { index: index, token: token })
 }
 
@@ -164,20 +164,20 @@ pub fn registry(config: &Config,
     let RegistryConfig {
         token: token_config,
         index: _index_config,
-    } = try!(registry_configuration(config));
+    } = registry_configuration(config)?;
     let token = token.or(token_config);
     let sid = match index {
-        Some(index) => SourceId::for_registry(&try!(index.to_url())),
-        None => try!(SourceId::crates_io(config)),
+        Some(index) => SourceId::for_registry(&index.to_url()?),
+        None => SourceId::crates_io(config)?,
     };
     let api_host = {
         let mut src = RegistrySource::remote(&sid, config);
-        try!(src.update().chain_error(|| {
+        src.update().chain_error(|| {
             human(format!("failed to update {}", sid))
-        }));
-        (try!(src.config())).unwrap().api
+        })?;
+        (src.config()?).unwrap().api
     };
-    let handle = try!(http_handle(config));
+    let handle = http_handle(config)?;
     Ok((Registry::new_handle(api_host, token, handle), sid))
 }
 
@@ -193,18 +193,18 @@ pub fn http_handle(config: &Config) -> CargoResult<Easy> {
     // connect phase as well as a "low speed" timeout so if we don't receive
     // many bytes in a large-ish period of time then we time out.
     let mut handle = Easy::new();
-    try!(handle.connect_timeout(Duration::new(30, 0)));
-    try!(handle.low_speed_limit(10 /* bytes per second */));
-    try!(handle.low_speed_time(Duration::new(30, 0)));
-    if let Some(proxy) = try!(http_proxy(config)) {
-        try!(handle.proxy(&proxy));
+    handle.connect_timeout(Duration::new(30, 0))?;
+    handle.low_speed_limit(10 /* bytes per second */)?;
+    handle.low_speed_time(Duration::new(30, 0))?;
+    if let Some(proxy) = http_proxy(config)? {
+        handle.proxy(&proxy)?;
     }
-    if let Some(cainfo) = try!(config.get_path("http.cainfo")) {
-        try!(handle.cainfo(&cainfo.val));
+    if let Some(cainfo) = config.get_path("http.cainfo")? {
+        handle.cainfo(&cainfo.val)?;
     }
-    if let Some(timeout) = try!(http_timeout(config)) {
-        try!(handle.connect_timeout(Duration::new(timeout as u64, 0)));
-        try!(handle.low_speed_time(Duration::new(timeout as u64, 0)));
+    if let Some(timeout) = http_timeout(config)? {
+        handle.connect_timeout(Duration::new(timeout as u64, 0))?;
+        handle.low_speed_time(Duration::new(timeout as u64, 0))?;
     }
     Ok(handle)
 }
@@ -214,7 +214,7 @@ pub fn http_handle(config: &Config) -> CargoResult<Easy> {
 /// Favor cargo's `http.proxy`, then git's `http.proxy`. Proxies specified
 /// via environment variables are picked up by libcurl.
 fn http_proxy(config: &Config) -> CargoResult<Option<String>> {
-    match try!(config.get_string("http.proxy")) {
+    match config.get_string("http.proxy")? {
         Some(s) => return Ok(Some(s.val)),
         None => {}
     }
@@ -241,7 +241,7 @@ fn http_proxy(config: &Config) -> CargoResult<Option<String>> {
 /// * https_proxy env var
 /// * HTTPS_PROXY env var
 pub fn http_proxy_exists(config: &Config) -> CargoResult<bool> {
-    if try!(http_proxy(config)).is_some() {
+    if http_proxy(config)?.is_some() {
         Ok(true)
     } else {
         Ok(["http_proxy", "HTTP_PROXY",
@@ -250,7 +250,7 @@ pub fn http_proxy_exists(config: &Config) -> CargoResult<bool> {
 }
 
 pub fn http_timeout(config: &Config) -> CargoResult<Option<i64>> {
-    match try!(config.get_i64("http.timeout")) {
+    match config.get_i64("http.timeout")? {
         Some(s) => return Ok(Some(s.val)),
         None => {}
     }
@@ -258,7 +258,7 @@ pub fn http_timeout(config: &Config) -> CargoResult<Option<i64>> {
 }
 
 pub fn registry_login(config: &Config, token: String) -> CargoResult<()> {
-    let RegistryConfig { index, token: _ } = try!(registry_configuration(config));
+    let RegistryConfig { index, token: _ } = registry_configuration(config)?;
     let mut map = HashMap::new();
     let p = config.cwd().to_path_buf();
     match index {
@@ -286,23 +286,23 @@ pub fn modify_owners(config: &Config, opts: &OwnersOptions) -> CargoResult<()> {
     let name = match opts.krate {
         Some(ref name) => name.clone(),
         None => {
-            let manifest_path = try!(find_root_manifest_for_wd(None, config.cwd()));
-            let pkg = try!(Package::for_path(&manifest_path, config));
+            let manifest_path = find_root_manifest_for_wd(None, config.cwd())?;
+            let pkg = Package::for_path(&manifest_path, config)?;
             pkg.name().to_string()
         }
     };
 
-    let (mut registry, _) = try!(registry(config, opts.token.clone(),
-                                          opts.index.clone()));
+    let (mut registry, _) = registry(config, opts.token.clone(),
+                                          opts.index.clone())?;
 
     match opts.to_add {
         Some(ref v) => {
             let v = v.iter().map(|s| &s[..]).collect::<Vec<_>>();
-            try!(config.shell().status("Owner", format!("adding {:?} to crate {}",
-                                                        v, name)));
-            try!(registry.add_owners(&name, &v).map_err(|e| {
+            config.shell().status("Owner", format!("adding {:?} to crate {}",
+                                                        v, name))?;
+            registry.add_owners(&name, &v).map_err(|e| {
                 human(format!("failed to add owners to crate {}: {}", name, e))
-            }));
+            })?;
         }
         None => {}
     }
@@ -310,19 +310,19 @@ pub fn modify_owners(config: &Config, opts: &OwnersOptions) -> CargoResult<()> {
     match opts.to_remove {
         Some(ref v) => {
             let v = v.iter().map(|s| &s[..]).collect::<Vec<_>>();
-            try!(config.shell().status("Owner", format!("removing {:?} from crate {}",
-                                                        v, name)));
-            try!(registry.remove_owners(&name, &v).map_err(|e| {
+            config.shell().status("Owner", format!("removing {:?} from crate {}",
+                                                        v, name))?;
+            registry.remove_owners(&name, &v).map_err(|e| {
                 human(format!("failed to remove owners from crate {}: {}", name, e))
-            }));
+            })?;
         }
         None => {}
     }
 
     if opts.list {
-        let owners = try!(registry.list_owners(&name).map_err(|e| {
+        let owners = registry.list_owners(&name).map_err(|e| {
             human(format!("failed to list owners of crate {}: {}", name, e))
-        }));
+        })?;
         for owner in owners.iter() {
             print!("{}", owner.login);
             match (owner.name.as_ref(), owner.email.as_ref()) {
@@ -346,8 +346,8 @@ pub fn yank(config: &Config,
     let name = match krate {
         Some(name) => name,
         None => {
-            let manifest_path = try!(find_root_manifest_for_wd(None, config.cwd()));
-            let pkg = try!(Package::for_path(&manifest_path, config));
+            let manifest_path = find_root_manifest_for_wd(None, config.cwd())?;
+            let pkg = Package::for_path(&manifest_path, config)?;
             pkg.name().to_string()
         }
     };
@@ -356,18 +356,18 @@ pub fn yank(config: &Config,
         None => bail!("a version must be specified to yank")
     };
 
-    let (mut registry, _) = try!(registry(config, token, index));
+    let (mut registry, _) = registry(config, token, index)?;
 
     if undo {
-        try!(config.shell().status("Unyank", format!("{}:{}", name, version)));
-        try!(registry.unyank(&name, &version).map_err(|e| {
+        config.shell().status("Unyank", format!("{}:{}", name, version))?;
+        registry.unyank(&name, &version).map_err(|e| {
             human(format!("failed to undo a yank: {}", e))
-        }));
+        })?;
     } else {
-        try!(config.shell().status("Yank", format!("{}:{}", name, version)));
-        try!(registry.yank(&name, &version).map_err(|e| {
+        config.shell().status("Yank", format!("{}:{}", name, version))?;
+        registry.yank(&name, &version).map_err(|e| {
             human(format!("failed to yank: {}", e))
-        }));
+        })?;
     }
 
     Ok(())
@@ -385,10 +385,10 @@ pub fn search(query: &str,
         }
     }
 
-    let (mut registry, _) = try!(registry(config, None, index));
-    let (crates, total_crates) = try!(registry.search(query, limit).map_err(|e| {
+    let (mut registry, _) = registry(config, None, index)?;
+    let (crates, total_crates) = registry.search(query, limit).map_err(|e| {
         human(format!("failed to retrieve search results from the registry: {}", e))
-    }));
+    })?;
 
     let list_items = crates.iter()
         .map(|krate| (
@@ -411,25 +411,25 @@ pub fn search(query: &str,
             }
             None => name
         };
-        try!(config.shell().say(line, BLACK));
+        config.shell().say(line, BLACK)?;
     }
 
     let search_max_limit = 100;
     if total_crates > limit as u32 && limit < search_max_limit {
-        try!(config.shell().say(
+        config.shell().say(
             format!("... and {} crates more (use --limit N to see more)",
                     total_crates - limit as u32),
             BLACK)
-        );
+        ?;
     } else if total_crates > limit as u32 && limit >= search_max_limit {
-        try!(config.shell().say(
+        config.shell().say(
             format!(
                 "... and {} crates more (go to http://crates.io/search?q={} to see more)",
                 total_crates - limit as u32,
                 percent_encode(query.as_bytes(), QUERY_ENCODE_SET)
             ),
             BLACK)
-        );
+        ?;
     }
 
     Ok(())
index 87936d3f91d2c0400d41cf5d464457280aefce94..42898b6afc93b7834b36ecd584fa254ddda7ac75 100644 (file)
@@ -13,14 +13,14 @@ use util::CargoResult;
 /// lockfile.
 pub fn resolve_ws(registry: &mut PackageRegistry, ws: &Workspace)
                    -> CargoResult<Resolve> {
-    let prev = try!(ops::load_pkg_lockfile(ws));
-    let resolve = try!(resolve_with_previous(registry, ws,
+    let prev = ops::load_pkg_lockfile(ws)?;
+    let resolve = resolve_with_previous(registry, ws,
                                              Method::Everything,
-                                             prev.as_ref(), None, &[]));
+                                             prev.as_ref(), None, &[])?;
 
     // Avoid writing a lockfile if we are `cargo install`ing a non local package.
     if ws.current_opt().map(|pkg| pkg.package_id().source_id().is_path()).unwrap_or(true) {
-        try!(ops::write_pkg_lockfile(ws, &resolve));
+        ops::write_pkg_lockfile(ws, &resolve)?;
     }
     Ok(resolve)
 }
@@ -88,8 +88,8 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry,
 
     let mut summaries = Vec::new();
     for member in ws.members() {
-        try!(registry.add_sources(&[member.package_id().source_id()
-                                          .clone()]));
+        registry.add_sources(&[member.package_id().source_id()
+                                          .clone()])?;
         let method_to_resolve = match method {
             // When everything for a workspace we want to be sure to resolve all
             // members in the workspace, so propagate the `Method::Everything`.
@@ -154,9 +154,9 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry,
         None => root_replace.to_vec(),
     };
 
-    let mut resolved = try!(resolver::resolve(&summaries, &replace, registry));
+    let mut resolved = resolver::resolve(&summaries, &replace, registry)?;
     if let Some(previous) = previous {
-        try!(resolved.merge_from(previous));
+        resolved.merge_from(previous)?;
     }
     return Ok(resolved);
 
index ac254f147ecb9f19f7d033b4b90d341ffaebb482..80a13318f65141b4dce95fba0c64b65cf41fcbd4 100644 (file)
@@ -40,10 +40,10 @@ struct SourceConfig {
 
 impl<'cfg> SourceConfigMap<'cfg> {
     pub fn new(config: &'cfg Config) -> CargoResult<SourceConfigMap<'cfg>> {
-        let mut base = try!(SourceConfigMap::empty(config));
-        if let Some(table) = try!(config.get_table("source")) {
+        let mut base = SourceConfigMap::empty(config)?;
+        if let Some(table) = config.get_table("source")? {
             for (key, value) in table.val.iter() {
-                try!(base.add_config(key, value));
+                base.add_config(key, value)?;
             }
         }
         Ok(base)
@@ -56,7 +56,7 @@ impl<'cfg> SourceConfigMap<'cfg> {
             config: config,
         };
         base.add("crates-io", SourceConfig {
-            id: try!(SourceId::crates_io(config)),
+            id: SourceId::crates_io(config)?,
             replace_with: None,
         });
         Ok(base)
@@ -126,40 +126,40 @@ a lock file compatible with `{orig}` cannot be generated in this situation
     }
 
     fn add_config(&mut self, name: &str, cfg: &ConfigValue) -> CargoResult<()> {
-        let (table, _path) = try!(cfg.table(&format!("source.{}", name)));
+        let (table, _path) = cfg.table(&format!("source.{}", name))?;
         let mut srcs = Vec::new();
         if let Some(val) = table.get("registry") {
-            let url = try!(url(val, &format!("source.{}.registry", name)));
+            let url = url(val, &format!("source.{}.registry", name))?;
             srcs.push(SourceId::for_registry(&url));
         }
         if let Some(val) = table.get("local-registry") {
-            let (s, path) = try!(val.string(&format!("source.{}.local-registry",
-                                                     name)));
+            let (s, path) = val.string(&format!("source.{}.local-registry",
+                                                     name))?;
             let mut path = path.to_path_buf();
             path.pop();
             path.pop();
             path.push(s);
-            srcs.push(try!(SourceId::for_local_registry(&path)));
+            srcs.push(SourceId::for_local_registry(&path)?);
         }
         if let Some(val) = table.get("directory") {
-            let (s, path) = try!(val.string(&format!("source.{}.directory",
-                                                     name)));
+            let (s, path) = val.string(&format!("source.{}.directory",
+                                                     name))?;
             let mut path = path.to_path_buf();
             path.pop();
             path.pop();
             path.push(s);
-            srcs.push(try!(SourceId::for_directory(&path)));
+            srcs.push(SourceId::for_directory(&path)?);
         }
         if name == "crates-io" && srcs.is_empty() {
-            srcs.push(try!(SourceId::crates_io(self.config)));
+            srcs.push(SourceId::crates_io(self.config)?);
         }
 
         let mut srcs = srcs.into_iter();
-        let src = try!(srcs.next().chain_error(|| {
+        let src = srcs.next().chain_error(|| {
             human(format!("no source URL specified for `source.{}`, need \
                            either `registry` or `local-registry` defined",
                           name))
-        }));
+        })?;
         if srcs.next().is_some() {
             return Err(human(format!("more than one source URL specified for \
                                       `source.{}`", name)))
@@ -167,8 +167,8 @@ a lock file compatible with `{orig}` cannot be generated in this situation
 
         let mut replace_with = None;
         if let Some(val) = table.get("replace-with") {
-            let (s, path) = try!(val.string(&format!("source.{}.replace-with",
-                                                     name)));
+            let (s, path) = val.string(&format!("source.{}.replace-with",
+                                                     name))?;
             replace_with = Some((s.to_string(), path.to_path_buf()));
         }
 
@@ -180,7 +180,7 @@ a lock file compatible with `{orig}` cannot be generated in this situation
         return Ok(());
 
         fn url(cfg: &ConfigValue, key: &str) -> CargoResult<Url> {
-            let (url, path) = try!(cfg.string(key));
+            let (url, path) = cfg.string(key)?;
             url.to_url().chain_error(|| {
                 human(format!("configuration key `{}` specified an invalid \
                                URL (in {})", key, path.display()))
index 84a9501a03b59c8d4c7abb176337b6e412b9f291..fc7abf56c734d4f033ff457d4755afc24e0066c7 100644 (file)
@@ -59,34 +59,34 @@ impl<'cfg> Registry for DirectorySource<'cfg> {
 impl<'cfg> Source for DirectorySource<'cfg> {
     fn update(&mut self) -> CargoResult<()> {
         self.packages.clear();
-        let entries = try!(self.root.read_dir().chain_error(|| {
+        let entries = self.root.read_dir().chain_error(|| {
             human(format!("failed to read root of directory source: {}",
                           self.root.display()))
-        }));
+        })?;
 
         for entry in entries {
-            let entry = try!(entry);
+            let entry = entry?;
             let path = entry.path();
             let mut src = PathSource::new(&path,
                                           &self.id,
                                           self.config);
-            try!(src.update());
-            let pkg = try!(src.root_package());
+            src.update()?;
+            let pkg = src.root_package()?;
 
             let cksum_file = path.join(".cargo-checksum.json");
-            let cksum = try!(paths::read(&path.join(cksum_file)).chain_error(|| {
+            let cksum = paths::read(&path.join(cksum_file)).chain_error(|| {
                 human(format!("failed to load checksum `.cargo-checksum.json` \
                                of {} v{}",
                               pkg.package_id().name(),
                               pkg.package_id().version()))
 
-            }));
-            let cksum: Checksum = try!(json::decode(&cksum).chain_error(|| {
+            })?;
+            let cksum: Checksum = json::decode(&cksum).chain_error(|| {
                 human(format!("failed to decode `.cargo-checksum.json` of \
                                {} v{}",
                               pkg.package_id().name(),
                               pkg.package_id().version()))
-            }));
+            })?;
 
             let mut manifest = pkg.manifest().clone();
             let summary = manifest.summary().clone();
@@ -120,10 +120,10 @@ impl<'cfg> Source for DirectorySource<'cfg> {
             let mut h = Sha256::new();
             let file = pkg.root().join(file);
 
-            try!((|| -> CargoResult<()> {
-                let mut f = try!(File::open(&file));
+            (|| -> CargoResult<()> {
+                let mut f = File::open(&file)?;
                 loop {
-                    match try!(f.read(&mut buf)) {
+                    match f.read(&mut buf)? {
                         0 => return Ok(()),
                         n => h.update(&buf[..n]),
                     }
@@ -131,7 +131,7 @@ impl<'cfg> Source for DirectorySource<'cfg> {
             }).chain_error(|| {
                 human(format!("failed to calculate checksum of: {}",
                               file.display()))
-            }));
+            })?;
 
             let actual = h.finish().to_hex();
             if &*actual != cksum {
index b0f1053ef1dc234d735e273fa05999bb375c4bae..321fe4dfcd0d824c8e4187bd776404e13c1c0a99 100644 (file)
@@ -50,7 +50,7 @@ impl<'cfg> GitSource<'cfg> {
 
     pub fn read_packages(&mut self) -> CargoResult<Vec<Package>> {
         if self.path_source.is_none() {
-            try!(self.update());
+            self.update()?;
         }
         self.path_source.as_mut().unwrap().read_packages()
     }
@@ -104,7 +104,7 @@ pub fn canonicalize_url(url: &Url) -> Url {
 
 impl<'cfg> Debug for GitSource<'cfg> {
     fn fmt(&self, f: &mut Formatter) -> fmt::Result {
-        try!(write!(f, "git repo at {}", self.remote.url()));
+        write!(f, "git repo at {}", self.remote.url())?;
 
         match self.reference.to_ref_string() {
             Some(s) => write!(f, " ({})", s),
@@ -123,8 +123,8 @@ impl<'cfg> Registry for GitSource<'cfg> {
 
 impl<'cfg> Source for GitSource<'cfg> {
     fn update(&mut self) -> CargoResult<()> {
-        let lock = try!(self.config.git_path()
-            .open_rw(".cargo-lock-git", self.config, "the git checkouts"));
+        let lock = self.config.git_path()
+            .open_rw(".cargo-lock-git", self.config, "the git checkouts")?;
 
         let db_path = lock.parent().join("db").join(&self.ident);
 
@@ -137,16 +137,16 @@ impl<'cfg> Source for GitSource<'cfg> {
                             self.source_id.precise().is_none();
 
         let (repo, actual_rev) = if should_update {
-            try!(self.config.shell().status("Updating",
-                format!("git repository `{}`", self.remote.url())));
+            self.config.shell().status("Updating",
+                format!("git repository `{}`", self.remote.url()))?;
 
             trace!("updating git source `{:?}`", self.remote);
 
-            let repo = try!(self.remote.checkout(&db_path, &self.config));
-            let rev = try!(repo.rev_for(&self.reference));
+            let repo = self.remote.checkout(&db_path, &self.config)?;
+            let rev = repo.rev_for(&self.reference)?;
             (repo, rev)
         } else {
-            (try!(self.remote.db_at(&db_path)), actual_rev.unwrap())
+            (self.remote.db_at(&db_path)?, actual_rev.unwrap())
         };
 
         let checkout_path = lock.parent().join("checkouts")
@@ -157,7 +157,7 @@ impl<'cfg> Source for GitSource<'cfg> {
         // in scope so the destructors here won't tamper with too much.
         // Checkout is immutable, so we don't need to protect it with a lock once
         // it is created.
-        try!(repo.copy_to(actual_rev.clone(), &checkout_path, &self.config));
+        repo.copy_to(actual_rev.clone(), &checkout_path, &self.config)?;
 
         let source_id = self.source_id.with_precise(Some(actual_rev.to_string()));
         let path_source = PathSource::new_recursive(&checkout_path,
index f7dc89d7fba6b705d97517e314a8a9847abbf4db..7452c1dbbf8e2bb00e91ca90c721ab622140ea77 100644 (file)
@@ -105,22 +105,22 @@ impl GitRemote {
 
     pub fn rev_for(&self, path: &Path, reference: &GitReference)
                    -> CargoResult<GitRevision> {
-        let db = try!(self.db_at(path));
+        let db = self.db_at(path)?;
         db.rev_for(reference)
     }
 
     pub fn checkout(&self, into: &Path, cargo_config: &Config) -> CargoResult<GitDatabase> {
         let repo = match git2::Repository::open(into) {
             Ok(repo) => {
-                try!(self.fetch_into(&repo, &cargo_config).chain_error(|| {
+                self.fetch_into(&repo, &cargo_config).chain_error(|| {
                     human(format!("failed to fetch into {}", into.display()))
-                }));
+                })?;
                 repo
             }
             Err(..) => {
-                try!(self.clone_into(into, &cargo_config).chain_error(|| {
+                self.clone_into(into, &cargo_config).chain_error(|| {
                     human(format!("failed to clone into: {}", into.display()))
-                }))
+                })?
             }
         };
 
@@ -132,7 +132,7 @@ impl GitRemote {
     }
 
     pub fn db_at(&self, db_path: &Path) -> CargoResult<GitDatabase> {
-        let repo = try!(git2::Repository::open(db_path));
+        let repo = git2::Repository::open(db_path)?;
         Ok(GitDatabase {
             remote: self.clone(),
             path: db_path.to_path_buf(),
@@ -150,11 +150,11 @@ impl GitRemote {
     fn clone_into(&self, dst: &Path, cargo_config: &Config) -> CargoResult<git2::Repository> {
         let url = self.url.to_string();
         if fs::metadata(&dst).is_ok() {
-            try!(fs::remove_dir_all(dst));
+            fs::remove_dir_all(dst)?;
         }
-        try!(fs::create_dir_all(dst));
-        let repo = try!(git2::Repository::init_bare(dst));
-        try!(fetch(&repo, &url, "refs/heads/*:refs/heads/*", &cargo_config));
+        fs::create_dir_all(dst)?;
+        let repo = git2::Repository::init_bare(dst)?;
+        fetch(&repo, &url, "refs/heads/*:refs/heads/*", &cargo_config)?;
         Ok(repo)
     }
 }
@@ -170,45 +170,45 @@ impl GitDatabase {
             Ok(repo) => {
                 let checkout = GitCheckout::new(dest, self, rev, repo);
                 if !checkout.is_fresh() {
-                    try!(checkout.fetch(&cargo_config));
-                    try!(checkout.reset());
+                    checkout.fetch(&cargo_config)?;
+                    checkout.reset()?;
                     assert!(checkout.is_fresh());
                 }
                 checkout
             }
-            Err(..) => try!(GitCheckout::clone_into(dest, self, rev)),
+            Err(..) => GitCheckout::clone_into(dest, self, rev)?,
         };
-        try!(checkout.update_submodules(&cargo_config).chain_error(|| {
+        checkout.update_submodules(&cargo_config).chain_error(|| {
             internal("failed to update submodules")
-        }));
+        })?;
         Ok(checkout)
     }
 
     pub fn rev_for(&self, reference: &GitReference) -> CargoResult<GitRevision> {
         let id = match *reference {
             GitReference::Tag(ref s) => {
-                try!((|| {
+                (|| {
                     let refname = format!("refs/tags/{}", s);
-                    let id = try!(self.repo.refname_to_id(&refname));
-                    let obj = try!(self.repo.find_object(id, None));
-                    let obj = try!(obj.peel(ObjectType::Commit));
+                    let id = self.repo.refname_to_id(&refname)?;
+                    let obj = self.repo.find_object(id, None)?;
+                    let obj = obj.peel(ObjectType::Commit)?;
                     Ok(obj.id())
                 }).chain_error(|| {
                     human(format!("failed to find tag `{}`", s))
-                }))
+                })?
             }
             GitReference::Branch(ref s) => {
-                try!((|| {
-                    let b = try!(self.repo.find_branch(s, git2::BranchType::Local));
+                (|| {
+                    let b = self.repo.find_branch(s, git2::BranchType::Local)?;
                     b.get().target().chain_error(|| {
                         human(format!("branch `{}` did not have a target", s))
                     })
                 }).chain_error(|| {
                     human(format!("failed to find branch `{}`", s))
-                }))
+                })?
             }
             GitReference::Rev(ref s) => {
-                let obj = try!(self.repo.revparse_single(s));
+                let obj = self.repo.revparse_single(s)?;
                 obj.id()
             }
         };
@@ -216,7 +216,7 @@ impl GitDatabase {
     }
 
     pub fn has_ref(&self, reference: &str) -> CargoResult<()> {
-        try!(self.repo.revparse_single(reference));
+        self.repo.revparse_single(reference)?;
         Ok(())
     }
 }
@@ -238,31 +238,31 @@ impl<'a> GitCheckout<'a> {
                   revision: GitRevision)
                   -> CargoResult<GitCheckout<'a>>
     {
-        let repo = try!(GitCheckout::clone_repo(database.path(), into));
+        let repo = GitCheckout::clone_repo(database.path(), into)?;
         let checkout = GitCheckout::new(into, database, revision, repo);
-        try!(checkout.reset());
+        checkout.reset()?;
         Ok(checkout)
     }
 
     fn clone_repo(source: &Path, into: &Path) -> CargoResult<git2::Repository> {
         let dirname = into.parent().unwrap();
 
-        try!(fs::create_dir_all(&dirname).chain_error(|| {
+        fs::create_dir_all(&dirname).chain_error(|| {
             human(format!("Couldn't mkdir {}", dirname.display()))
-        }));
+        })?;
 
         if fs::metadata(&into).is_ok() {
-            try!(fs::remove_dir_all(into).chain_error(|| {
+            fs::remove_dir_all(into).chain_error(|| {
                 human(format!("Couldn't rmdir {}", into.display()))
-            }));
+            })?;
         }
 
-        let url = try!(source.to_url());
+        let url = source.to_url()?;
         let url = url.to_string();
-        let repo = try!(git2::Repository::clone(&url, into).chain_error(|| {
+        let repo = git2::Repository::clone(&url, into).chain_error(|| {
             internal(format!("failed to clone {} into {}", source.display(),
                              into.display()))
-        }));
+        })?;
         Ok(repo)
     }
 
@@ -278,10 +278,10 @@ impl<'a> GitCheckout<'a> {
 
     fn fetch(&self, cargo_config: &Config) -> CargoResult<()> {
         info!("fetch {}", self.repo.path().display());
-        let url = try!(self.database.path.to_url());
+        let url = self.database.path.to_url()?;
         let url = url.to_string();
         let refspec = "refs/heads/*:refs/heads/*";
-        try!(fetch(&self.repo, &url, refspec, &cargo_config));
+        fetch(&self.repo, &url, refspec, &cargo_config)?;
         Ok(())
     }
 
@@ -297,9 +297,9 @@ impl<'a> GitCheckout<'a> {
         let ok_file = self.location.join(".cargo-ok");
         let _ = fs::remove_file(&ok_file);
         info!("reset {} to {}", self.repo.path().display(), self.revision);
-        let object = try!(self.repo.find_object(self.revision.0, None));
-        try!(self.repo.reset(&object, git2::ResetType::Hard, None));
-        try!(File::create(ok_file));
+        let object = self.repo.find_object(self.revision.0, None)?;
+        self.repo.reset(&object, git2::ResetType::Hard, None)?;
+        File::create(ok_file)?;
         Ok(())
     }
 
@@ -309,11 +309,11 @@ impl<'a> GitCheckout<'a> {
         fn update_submodules(repo: &git2::Repository, cargo_config: &Config) -> CargoResult<()> {
             info!("update submodules for: {:?}", repo.workdir().unwrap());
 
-            for mut child in try!(repo.submodules()).into_iter() {
-                try!(child.init(false));
-                let url = try!(child.url().chain_error(|| {
+            for mut child in repo.submodules()?.into_iter() {
+                child.init(false)?;
+                let url = child.url().chain_error(|| {
                     internal("non-utf8 url for submodule")
-                }));
+                })?;
 
                 // A submodule which is listed in .gitmodules but not actually
                 // checked out will not have a head id, so we should ignore it.
@@ -327,7 +327,7 @@ impl<'a> GitCheckout<'a> {
                 // as the submodule's head, then we can bail out and go to the
                 // next submodule.
                 let head_and_repo = child.open().and_then(|repo| {
-                    let target = try!(repo.head()).target();
+                    let target = repo.head()?.target();
                     Ok((target, repo))
                 });
                 let repo = match head_and_repo {
@@ -340,20 +340,20 @@ impl<'a> GitCheckout<'a> {
                     Err(..) => {
                         let path = repo.workdir().unwrap().join(child.path());
                         let _ = fs::remove_dir_all(&path);
-                        try!(git2::Repository::clone(url, &path))
+                        git2::Repository::clone(url, &path)?
                     }
                 };
 
                 // Fetch data from origin and reset to the head commit
                 let refspec = "refs/heads/*:refs/heads/*";
-                try!(fetch(&repo, url, refspec, &cargo_config).chain_error(|| {
+                fetch(&repo, url, refspec, &cargo_config).chain_error(|| {
                     internal(format!("failed to fetch submodule `{}` from {}",
                                      child.name().unwrap_or(""), url))
-                }));
+                })?;
 
-                let obj = try!(repo.find_object(head, None));
-                try!(repo.reset(&obj, git2::ResetType::Hard, None));
-                try!(update_submodules(&repo, &cargo_config));
+                let obj = repo.find_object(head, None)?;
+                repo.reset(&obj, git2::ResetType::Hard, None)?;
+                update_submodules(&repo, &cargo_config)?;
             }
             Ok(())
         }
@@ -569,19 +569,19 @@ pub fn fetch(repo: &git2::Repository,
                was specified")
     }
 
-    with_authentication(url, &try!(repo.config()), |f| {
+    with_authentication(url, &repo.config()?, |f| {
         let mut cb = git2::RemoteCallbacks::new();
         cb.credentials(f);
 
         // Create a local anonymous remote in the repository to fetch the url
-        let mut remote = try!(repo.remote_anonymous(&url));
+        let mut remote = repo.remote_anonymous(&url)?;
         let mut opts = git2::FetchOptions::new();
         opts.remote_callbacks(cb)
             .download_tags(git2::AutotagOption::All);
 
-        try!(network::with_retry(config, ||{
+        network::with_retry(config, ||{
             remote.fetch(&[refspec], Some(&mut opts), None)
-        }));
+        })?;
         Ok(())
     })
 }
index 052d01c7dab0c755efe172975f6c0d6e5c3ef875..197d756d66fab5f72910b8717287194ad72023a8 100644 (file)
@@ -56,7 +56,7 @@ impl<'cfg> PathSource<'cfg> {
     pub fn root_package(&mut self) -> CargoResult<Package> {
         trace!("root_package; source={:?}", self);
 
-        try!(self.update());
+        self.update()?;
 
         match self.packages.iter().find(|p| p.root() == &*self.path) {
             Some(pkg) => Ok(pkg.clone()),
@@ -71,8 +71,8 @@ impl<'cfg> PathSource<'cfg> {
             ops::read_packages(&self.path, &self.id, self.config)
         } else {
             let path = self.path.join("Cargo.toml");
-            let (pkg, _) = try!(ops::read_package(&path, &self.id,
-                                                  self.config));
+            let (pkg, _) = ops::read_package(&path, &self.id,
+                                                  self.config)?;
             Ok(vec![pkg])
         }
     }
@@ -94,10 +94,10 @@ impl<'cfg> PathSource<'cfg> {
                 human(format!("could not parse pattern `{}`: {}", p, e))
             })
         };
-        let exclude = try!(pkg.manifest().exclude().iter()
-                              .map(|p| parse(p)).collect::<Result<Vec<_>, _>>());
-        let include = try!(pkg.manifest().include().iter()
-                              .map(|p| parse(p)).collect::<Result<Vec<_>, _>>());
+        let exclude = pkg.manifest().exclude().iter()
+                              .map(|p| parse(p)).collect::<Result<Vec<_>, _>>()?;
+        let include = pkg.manifest().include().iter()
+                              .map(|p| parse(p)).collect::<Result<Vec<_>, _>>()?;
 
         let mut filter = |p: &Path| {
             let relative_path = util::without_prefix(p, &root).unwrap();
@@ -122,7 +122,7 @@ impl<'cfg> PathSource<'cfg> {
                 // check to see if we are indeed part of the index. If not, then
                 // this is likely an unrelated git repo, so keep going.
                 if let Ok(repo) = git2::Repository::open(cur) {
-                    let index = try!(repo.index());
+                    let index = repo.index()?;
                     let path = util::without_prefix(root, cur)
                                     .unwrap().join("Cargo.toml");
                     if index.get_path(&path, 0).is_some() {
@@ -146,10 +146,10 @@ impl<'cfg> PathSource<'cfg> {
                       filter: &mut FnMut(&Path) -> bool)
                       -> CargoResult<Vec<PathBuf>> {
         warn!("list_files_git {}", pkg.package_id());
-        let index = try!(repo.index());
-        let root = try!(repo.workdir().chain_error(|| {
+        let index = repo.index()?;
+        let root = repo.workdir().chain_error(|| {
             internal_error("Can't list files on a bare repository.", "")
-        }));
+        })?;
         let pkg_path = pkg.root();
 
         let mut ret = Vec::<PathBuf>::new();
@@ -171,7 +171,7 @@ impl<'cfg> PathSource<'cfg> {
         if let Some(suffix) = util::without_prefix(pkg_path, &root) {
             opts.pathspec(suffix);
         }
-        let statuses = try!(repo.statuses(Some(&mut opts)));
+        let statuses = repo.statuses(Some(&mut opts))?;
         let untracked = statuses.iter().filter_map(|entry| {
             match entry.status() {
                 git2::STATUS_WT_NEW => Some((join(&root, entry.path_bytes()), None)),
@@ -182,7 +182,7 @@ impl<'cfg> PathSource<'cfg> {
         let mut subpackages_found = Vec::new();
 
         'outer: for (file_path, is_dir) in index_files.chain(untracked) {
-            let file_path = try!(file_path);
+            let file_path = file_path?;
 
             // Filter out files blatantly outside this package. This is helped a
             // bit obove via the `pathspec` function call, but we need to filter
@@ -223,20 +223,20 @@ impl<'cfg> PathSource<'cfg> {
             if is_dir.unwrap_or_else(|| file_path.is_dir()) {
                 warn!("  found submodule {}", file_path.display());
                 let rel = util::without_prefix(&file_path, &root).unwrap();
-                let rel = try!(rel.to_str().chain_error(|| {
+                let rel = rel.to_str().chain_error(|| {
                     human(format!("invalid utf-8 filename: {}", rel.display()))
-                }));
+                })?;
                 // Git submodules are currently only named through `/` path
                 // separators, explicitly not `\` which windows uses. Who knew?
                 let rel = rel.replace(r"\", "/");
                 match repo.find_submodule(&rel).and_then(|s| s.open()) {
                     Ok(repo) => {
-                        let files = try!(self.list_files_git(pkg, repo, filter));
+                        let files = self.list_files_git(pkg, repo, filter)?;
                         ret.extend(files.into_iter());
                     }
                     Err(..) => {
-                        try!(PathSource::walk(&file_path, &mut ret, false,
-                                              filter));
+                        PathSource::walk(&file_path, &mut ret, false,
+                                              filter)?;
                     }
                 }
             } else if (*filter)(&file_path) {
@@ -267,7 +267,7 @@ impl<'cfg> PathSource<'cfg> {
     fn list_files_walk(&self, pkg: &Package, filter: &mut FnMut(&Path) -> bool)
                        -> CargoResult<Vec<PathBuf>> {
         let mut ret = Vec::new();
-        try!(PathSource::walk(pkg.root(), &mut ret, true, filter));
+        PathSource::walk(pkg.root(), &mut ret, true, filter)?;
         Ok(ret)
     }
 
@@ -284,8 +284,8 @@ impl<'cfg> PathSource<'cfg> {
         if !is_root && fs::metadata(&path.join("Cargo.toml")).is_ok() {
             return Ok(())
         }
-        for dir in try!(fs::read_dir(path)) {
-            let dir = try!(dir).path();
+        for dir in fs::read_dir(path)? {
+            let dir = dir?.path();
             let name = dir.file_name().and_then(|s| s.to_str());
             // Skip dotfile directories
             if name.map(|s| s.starts_with('.')) == Some(true) {
@@ -297,7 +297,7 @@ impl<'cfg> PathSource<'cfg> {
                     _ => {}
                 }
             }
-            try!(PathSource::walk(&dir, ret, false, filter));
+            PathSource::walk(&dir, ret, false, filter)?;
         }
         Ok(())
     }
@@ -318,7 +318,7 @@ impl<'cfg> Registry for PathSource<'cfg> {
 impl<'cfg> Source for PathSource<'cfg> {
     fn update(&mut self) -> CargoResult<()> {
         if !self.updated {
-            let packages = try!(self.read_packages());
+            let packages = self.read_packages()?;
             self.packages.extend(packages.into_iter());
             self.updated = true;
         }
@@ -342,7 +342,7 @@ impl<'cfg> Source for PathSource<'cfg> {
 
         let mut max = FileTime::zero();
         let mut max_path = PathBuf::from("");
-        for file in try!(self.list_files(pkg)) {
+        for file in self.list_files(pkg)? {
             // An fs::stat error here is either because path is a
             // broken symlink, a permissions error, or a race
             // condition where this path was rm'ed - either way,
index cda824676d47990a7d3f1e85f7230efda562f262..86c02802942871e9d28d19665c446714dd9ef428 100644 (file)
@@ -41,7 +41,7 @@ impl<'cfg> RegistryIndex<'cfg> {
             return Ok(s.clone())
         }
         // Ok, we're missing the key, so parse the index file to load it.
-        try!(self.summaries(pkg.name()));
+        self.summaries(pkg.name())?;
         self.hashes.get(&key).chain_error(|| {
             internal(format!("no hash listed for {}", pkg))
         }).map(|s| s.clone())
@@ -55,7 +55,7 @@ impl<'cfg> RegistryIndex<'cfg> {
         if self.cache.contains_key(name) {
             return Ok(self.cache.get(name).unwrap());
         }
-        let summaries = try!(self.load_summaries(name));
+        let summaries = self.load_summaries(name)?;
         let summaries = summaries.into_iter().filter(|summary| {
             summary.0.package_id().name() == name
         }).collect();
@@ -94,7 +94,7 @@ impl<'cfg> RegistryIndex<'cfg> {
         match File::open(&path) {
             Ok(mut f) => {
                 let mut contents = String::new();
-                try!(f.read_to_string(&mut contents));
+                f.read_to_string(&mut contents)?;
                 let ret: CargoResult<Vec<(Summary, bool)>>;
                 ret = contents.lines().filter(|l| l.trim().len() > 0)
                               .map(|l| self.parse_registry_package(l))
@@ -116,13 +116,13 @@ impl<'cfg> RegistryIndex<'cfg> {
                               -> CargoResult<(Summary, bool)> {
         let RegistryPackage {
             name, vers, cksum, deps, features, yanked
-        } = try!(json::decode::<RegistryPackage>(line));
-        let pkgid = try!(PackageId::new(&name, &vers, &self.source_id));
+        } = json::decode::<RegistryPackage>(line)?;
+        let pkgid = PackageId::new(&name, &vers, &self.source_id)?;
         let deps: CargoResult<Vec<Dependency>> = deps.into_iter().map(|dep| {
             self.parse_registry_dependency(dep)
         }).collect();
-        let deps = try!(deps);
-        let summary = try!(Summary::new(pkgid, deps, features));
+        let deps = deps?;
+        let summary = Summary::new(pkgid, deps, features)?;
         let summary = summary.set_checksum(cksum.clone());
         self.hashes.insert((name, vers), cksum);
         Ok((summary, yanked.unwrap_or(false)))
@@ -135,7 +135,7 @@ impl<'cfg> RegistryIndex<'cfg> {
             name, req, features, optional, default_features, target, kind
         } = dep;
 
-        let dep = try!(DependencyInner::parse(&name, Some(&req), &self.source_id, None));
+        let dep = DependencyInner::parse(&name, Some(&req), &self.source_id, None)?;
         let kind = match kind.as_ref().map(|s| &s[..]).unwrap_or("") {
             "dev" => Kind::Development,
             "build" => Kind::Build,
@@ -143,7 +143,7 @@ impl<'cfg> RegistryIndex<'cfg> {
         };
 
         let platform = match target {
-            Some(target) => Some(try!(target.parse())),
+            Some(target) => Some(target.parse()?),
             None => None,
         };
 
@@ -166,7 +166,7 @@ impl<'cfg> RegistryIndex<'cfg> {
 impl<'cfg> Registry for RegistryIndex<'cfg> {
     fn query(&mut self, dep: &Dependency) -> CargoResult<Vec<Summary>> {
         let mut summaries = {
-            let summaries = try!(self.summaries(dep.name()));
+            let summaries = self.summaries(dep.name())?;
             summaries.iter().filter(|&&(_, yanked)| {
                 dep.source_id().precise().is_some() || !yanked
             }).map(|s| s.0.clone()).collect::<Vec<_>>()
index 46387bb6841b3c2f721cb491c695fcafedfdc244..6c108cd90171c6d8a97fa6cbffdb50afb655a338 100644 (file)
@@ -60,9 +60,9 @@ impl<'cfg> RegistryData for LocalRegistry<'cfg> {
     fn download(&mut self, pkg: &PackageId, checksum: &str)
                 -> CargoResult<FileLock> {
         let crate_file = format!("{}-{}.crate", pkg.name(), pkg.version());
-        let mut crate_file = try!(self.root.open_ro(&crate_file,
+        let mut crate_file = self.root.open_ro(&crate_file,
                                                     self.config,
-                                                    "crate file"));
+                                                    "crate file")?;
 
         // If we've already got an unpacked version of this crate, then skip the
         // checksum below as it is in theory already verified.
@@ -71,16 +71,16 @@ impl<'cfg> RegistryData for LocalRegistry<'cfg> {
             return Ok(crate_file)
         }
 
-        try!(self.config.shell().status("Unpacking", pkg));
+        self.config.shell().status("Unpacking", pkg)?;
 
         // We don't actually need to download anything per-se, we just need to
         // verify the checksum matches the .crate file itself.
         let mut state = Sha256::new();
         let mut buf = [0; 64 * 1024];
         loop {
-            let n = try!(crate_file.read(&mut buf).chain_error(|| {
+            let n = crate_file.read(&mut buf).chain_error(|| {
                 human(format!("failed to read `{}`", crate_file.path().display()))
-            }));
+            })?;
             if n == 0 {
                 break
             }
@@ -90,7 +90,7 @@ impl<'cfg> RegistryData for LocalRegistry<'cfg> {
             bail!("failed to verify the checksum of `{}`", pkg)
         }
 
-        try!(crate_file.seek(SeekFrom::Start(0)));
+        crate_file.seek(SeekFrom::Start(0))?;
 
         Ok(crate_file)
     }
index 13517fc08d57a0dfc677cb96435e2ee4315ed28f..dd5056be9802715b97e172d927d5c21c23661dee 100644 (file)
@@ -288,7 +288,7 @@ impl<'cfg> RegistrySource<'cfg> {
                       -> CargoResult<PathBuf> {
         let dst = self.src_path.join(&format!("{}-{}", pkg.name(),
                                               pkg.version()));
-        try!(dst.create_dir());
+        dst.create_dir()?;
         // Note that we've already got the `tarball` locked above, and that
         // implies a lock on the unpacked destination as well, so this access
         // via `into_path_unlocked` should be ok.
@@ -298,15 +298,15 @@ impl<'cfg> RegistrySource<'cfg> {
             return Ok(dst)
         }
 
-        let gz = try!(GzDecoder::new(tarball.file()));
+        let gz = GzDecoder::new(tarball.file())?;
         let mut tar = Archive::new(gz);
-        try!(tar.unpack(dst.parent().unwrap()));
-        try!(File::create(&ok));
+        tar.unpack(dst.parent().unwrap())?;
+        File::create(&ok)?;
         Ok(dst)
     }
 
     fn do_update(&mut self) -> CargoResult<()> {
-        try!(self.ops.update_index());
+        self.ops.update_index()?;
         let path = self.ops.index_path();
         self.index = index::RegistryIndex::new(&self.source_id,
                                                path,
@@ -323,8 +323,8 @@ impl<'cfg> Registry for RegistrySource<'cfg> {
         // come back with no summaries, then our registry may need to be
         // updated, so we fall back to performing a lazy update.
         if dep.source_id().precise().is_some() && !self.updated {
-            if try!(self.index.query(dep)).is_empty() {
-                try!(self.do_update());
+            if self.index.query(dep)?.is_empty() {
+                self.do_update()?;
             }
         }
 
@@ -346,26 +346,26 @@ impl<'cfg> Source for RegistrySource<'cfg> {
         // `Some("locked")` as other `Some` values indicate a `cargo update
         // --precise` request
         if self.source_id.precise() != Some("locked") {
-            try!(self.do_update());
+            self.do_update()?;
         }
         Ok(())
     }
 
     fn download(&mut self, package: &PackageId) -> CargoResult<Package> {
-        let hash = try!(self.index.hash(package));
-        let path = try!(self.ops.download(package, &hash));
-        let path = try!(self.unpack_package(package, &path).chain_error(|| {
+        let hash = self.index.hash(package)?;
+        let path = self.ops.download(package, &hash)?;
+        let path = self.unpack_package(package, &path).chain_error(|| {
             internal(format!("failed to unpack package `{}`", package))
-        }));
+        })?;
         let mut src = PathSource::new(&path, &self.source_id, self.config);
-        try!(src.update());
-        let pkg = try!(src.download(package));
+        src.update()?;
+        let pkg = src.download(package)?;
 
         // Unfortunately the index and the actual Cargo.toml in the index can
         // differ due to historical Cargo bugs. To paper over these we trash the
         // *summary* loaded from the Cargo.toml we just downloaded with the one
         // we loaded from the index.
-        let summaries = try!(self.index.summaries(package.name()));
+        let summaries = self.index.summaries(package.name())?;
         let summary = summaries.iter().map(|s| &s.0).find(|s| {
             s.package_id() == package
         }).expect("summary not found");
index fd301b0b6f7b2bb68006283ba260df1ca625d087..d4bc144889ac2c469327a9634baf7e3128500455 100644 (file)
@@ -44,12 +44,12 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
     }
 
     fn config(&self) -> CargoResult<Option<RegistryConfig>> {
-        let lock = try!(self.index_path.open_ro(Path::new(INDEX_LOCK),
+        let lock = self.index_path.open_ro(Path::new(INDEX_LOCK),
                                                 self.config,
-                                                "the registry index"));
+                                                "the registry index")?;
         let path = lock.path().parent().unwrap();
-        let contents = try!(paths::read(&path.join("config.json")));
-        let config = try!(json::decode(&contents));
+        let contents = paths::read(&path.join("config.json"))?;
+        let config = json::decode(&contents)?;
         Ok(Some(config))
     }
 
@@ -60,29 +60,29 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
         //
         // This way if there's a problem the error gets printed before we even
         // hit the index, which may not actually read this configuration.
-        try!(ops::http_handle(self.config));
+        ops::http_handle(self.config)?;
 
         // Then we actually update the index
-        try!(self.index_path.create_dir());
-        let lock = try!(self.index_path.open_rw(Path::new(INDEX_LOCK),
+        self.index_path.create_dir()?;
+        let lock = self.index_path.open_rw(Path::new(INDEX_LOCK),
                                                 self.config,
-                                                "the registry index"));
+                                                "the registry index")?;
         let path = lock.path().parent().unwrap();
 
-        try!(self.config.shell().status("Updating",
-             format!("registry `{}`", self.source_id.url())));
+        self.config.shell().status("Updating",
+             format!("registry `{}`", self.source_id.url()))?;
 
-        let repo = try!(git2::Repository::open(path).or_else(|_| {
+        let repo = git2::Repository::open(path).or_else(|_| {
             let _ = lock.remove_siblings();
             git2::Repository::init(path)
-        }));
+        })?;
 
         if self.source_id.url().host_str() == Some("github.com") {
             if let Ok(oid) = repo.refname_to_id("refs/heads/master") {
                 let handle = match self.handle {
                     Some(ref mut handle) => handle,
                     None => {
-                        self.handle = Some(try!(ops::http_handle(self.config)));
+                        self.handle = Some(ops::http_handle(self.config)?);
                         self.handle.as_mut().unwrap()
                     }
                 };
@@ -99,16 +99,16 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
         let url = self.source_id.url().to_string();
         let refspec = "refs/heads/*:refs/remotes/origin/*";
 
-        try!(git::fetch(&repo, &url, refspec, &self.config).chain_error(|| {
+        git::fetch(&repo, &url, refspec, &self.config).chain_error(|| {
             human(format!("failed to fetch `{}`", url))
-        }));
+        })?;
 
         // git reset --hard origin/master
         let reference = "refs/remotes/origin/master";
-        let oid = try!(repo.refname_to_id(reference));
+        let oid = repo.refname_to_id(reference)?;
         trace!("[{}] updating to rev {}", self.source_id, oid);
-        let object = try!(repo.find_object(oid, None));
-        try!(repo.reset(&object, git2::ResetType::Hard, None));
+        let object = repo.find_object(oid, None)?;
+        repo.reset(&object, git2::ResetType::Hard, None)?;
         Ok(())
     }
 
@@ -124,20 +124,20 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
         // If this fails then we fall through to the exclusive path where we may
         // have to redownload the file.
         if let Ok(dst) = self.cache_path.open_ro(path, self.config, &filename) {
-            let meta = try!(dst.file().metadata());
+            let meta = dst.file().metadata()?;
             if meta.len() > 0 {
                 return Ok(dst)
             }
         }
-        let mut dst = try!(self.cache_path.open_rw(path, self.config, &filename));
-        let meta = try!(dst.file().metadata());
+        let mut dst = self.cache_path.open_rw(path, self.config, &filename)?;
+        let meta = dst.file().metadata()?;
         if meta.len() > 0 {
             return Ok(dst)
         }
-        try!(self.config.shell().status("Downloading", pkg));
+        self.config.shell().status("Downloading", pkg)?;
 
-        let config = try!(self.config()).unwrap();
-        let mut url = try!(config.dl.to_url());
+        let config = self.config()?.unwrap();
+        let mut url = config.dl.to_url()?;
         url.path_segments_mut().unwrap()
             .push(pkg.name())
             .push(&pkg.version().to_string())
@@ -146,30 +146,30 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
         let handle = match self.handle {
             Some(ref mut handle) => handle,
             None => {
-                self.handle = Some(try!(ops::http_handle(self.config)));
+                self.handle = Some(ops::http_handle(self.config)?);
                 self.handle.as_mut().unwrap()
             }
         };
         // TODO: don't download into memory, but ensure that if we ctrl-c a
         //       download we should resume either from the start or the middle
         //       on the next time
-        try!(handle.get(true));
-        try!(handle.url(&url.to_string()));
-        try!(handle.follow_location(true));
+        handle.get(true)?;
+        handle.url(&url.to_string())?;
+        handle.follow_location(true)?;
         let mut state = Sha256::new();
         let mut body = Vec::new();
         {
             let mut handle = handle.transfer();
-            try!(handle.write_function(|buf| {
+            handle.write_function(|buf| {
                 state.update(buf);
                 body.extend_from_slice(buf);
                 Ok(buf.len())
-            }));
-            try!(network::with_retry(self.config, || {
+            })?;
+            network::with_retry(self.config, || {
                 handle.perform()
-            }))
+            })?
         }
-        let code = try!(handle.response_code());
+        let code = handle.response_code()?;
         if code != 200 && code != 0 {
             bail!("failed to get 200 response from `{}`, got {}", url, code)
         }
@@ -179,8 +179,8 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
             bail!("failed to verify the checksum of `{}`", pkg)
         }
 
-        try!(dst.write_all(&body));
-        try!(dst.seek(SeekFrom::Start(0)));
+        dst.write_all(&body)?;
+        dst.seek(SeekFrom::Start(0))?;
         Ok(dst)
     }
 }
index 7fb95bdf6c87ee9aca2f20c980bcc58ac8ad4da6..e164a1c7cf93179ebcaeb10e4ecc42f3e852edea 100644 (file)
@@ -22,10 +22,10 @@ impl<'cfg> ReplacedSource<'cfg> {
 impl<'cfg> Registry for ReplacedSource<'cfg> {
     fn query(&mut self, dep: &Dependency) -> CargoResult<Vec<Summary>> {
         let dep = dep.clone().map_source(&self.to_replace, &self.replace_with);
-        let ret = try!(self.inner.query(&dep).chain_error(|| {
+        let ret = self.inner.query(&dep).chain_error(|| {
             human(format!("failed to query replaced source `{}`",
                           self.to_replace))
-        }));
+        })?;
         Ok(ret.into_iter().map(|summary| {
             summary.map_source(&self.replace_with, &self.to_replace)
         }).collect())
@@ -42,10 +42,10 @@ impl<'cfg> Source for ReplacedSource<'cfg> {
 
     fn download(&mut self, id: &PackageId) -> CargoResult<Package> {
         let id = id.with_source_id(&self.replace_with);
-        let pkg = try!(self.inner.download(&id).chain_error(|| {
+        let pkg = self.inner.download(&id).chain_error(|| {
             human(format!("failed to download replaced source `{}`",
                           self.to_replace))
-        }));
+        })?;
         Ok(pkg.map_source(&self.replace_with, &self.to_replace))
     }
 
index fbdfb919eeddee6e7e971e1678b52c1930cb92f2..bd89586bc3822112bbfdaf14eff46afcf366f268 100644 (file)
@@ -42,7 +42,7 @@ impl FromStr for Cfg {
 
     fn from_str(s: &str) -> CargoResult<Cfg> {
         let mut p = Parser::new(s);
-        let e = try!(p.cfg());
+        let e = p.cfg()?;
         if p.t.next().is_some() {
             bail!("malformed cfg value or key/value pair")
         }
@@ -75,7 +75,7 @@ impl FromStr for CfgExpr {
 
     fn from_str(s: &str) -> CargoResult<CfgExpr> {
         let mut p = Parser::new(s);
-        let e = try!(p.expr());
+        let e = p.expr()?;
         if p.t.next().is_some() {
             bail!("can only have one cfg-expression, consider using all() or \
                    any() explicitly")
@@ -101,9 +101,9 @@ impl<'a, T: fmt::Display> fmt::Display for CommaSep<'a, T> {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         for (i, v) in self.0.iter().enumerate() {
             if i > 0 {
-                try!(write!(f, ", "));
+                write!(f, ", ")?;
             }
-            try!(write!(f, "{}", v));
+            write!(f, "{}", v)?;
         }
         Ok(())
     }
@@ -125,11 +125,11 @@ impl<'a> Parser<'a> {
             Some(&Ok(Token::Ident(op @ "any"))) => {
                 self.t.next();
                 let mut e = Vec::new();
-                try!(self.eat(Token::LeftParen));
+                self.eat(Token::LeftParen)?;
                 while !self.try(Token::RightParen) {
-                    e.push(try!(self.expr()));
+                    e.push(self.expr()?);
                     if !self.try(Token::Comma) {
-                        try!(self.eat(Token::RightParen));
+                        self.eat(Token::RightParen)?;
                         break
                     }
                 }
@@ -141,9 +141,9 @@ impl<'a> Parser<'a> {
             }
             Some(&Ok(Token::Ident("not"))) => {
                 self.t.next();
-                try!(self.eat(Token::LeftParen));
-                let e = try!(self.expr());
-                try!(self.eat(Token::RightParen));
+                self.eat(Token::LeftParen)?;
+                let e = self.expr()?;
+                self.eat(Token::RightParen)?;
                 Ok(CfgExpr::Not(Box::new(e)))
             }
             Some(&Ok(..)) => self.cfg().map(CfgExpr::Value),
index e9988fe94966b126e0fe0160859db89db8163526..dc97acc0aac87493a5d2e0f7d1dd1f6a69d2dd21 100644 (file)
@@ -53,13 +53,13 @@ impl Config {
 
     pub fn default() -> CargoResult<Config> {
         let shell = ::shell(Verbosity::Verbose, ColorConfig::Auto);
-        let cwd = try!(env::current_dir().chain_error(|| {
+        let cwd = env::current_dir().chain_error(|| {
             human("couldn't get the current directory of the process")
-        }));
-        let homedir = try!(homedir(&cwd).chain_error(|| {
+        })?;
+        let homedir = homedir(&cwd).chain_error(|| {
             human("Cargo couldn't find your home directory. \
                   This probably means that $HOME was not set.")
-        }));
+        })?;
         Ok(Config::new(shell, cwd, homedir))
     }
 
@@ -90,7 +90,7 @@ impl Config {
     }
 
     pub fn rustc(&self) -> CargoResult<&Rustc> {
-        self.rustc.get_or_try_init(|| Rustc::new(try!(self.get_tool("rustc"))))
+        self.rustc.get_or_try_init(|| Rustc::new(self.get_tool("rustc")?))
     }
 
     pub fn values(&self) -> CargoResult<&HashMap<String, ConfigValue>> {
@@ -102,7 +102,7 @@ impl Config {
     pub fn target_dir(&self) -> CargoResult<Option<Filesystem>> {
         if let Some(dir) = env::var_os("CARGO_TARGET_DIR") {
             Ok(Some(Filesystem::new(self.cwd.join(dir))))
-        } else if let Some(val) = try!(self.get_path("build.target-dir")) {
+        } else if let Some(val) = self.get_path("build.target-dir")? {
             let val = self.cwd.join(val.val);
             Ok(Some(Filesystem::new(val)))
         } else {
@@ -111,7 +111,7 @@ impl Config {
     }
 
     fn get(&self, key: &str) -> CargoResult<Option<ConfigValue>> {
-        let vals = try!(self.values());
+        let vals = self.values()?;
         let mut parts = key.split('.').enumerate();
         let mut val = match vals.get(parts.next().unwrap().1) {
             Some(val) => val,
@@ -152,7 +152,7 @@ impl Config {
         match env::var(&format!("CARGO_{}", key)) {
             Ok(value) => {
                 Ok(Some(Value {
-                    val: try!(value.parse()),
+                    val: value.parse()?,
                     definition: Definition::Environment,
                 }))
             }
@@ -161,10 +161,10 @@ impl Config {
     }
 
     pub fn get_string(&self, key: &str) -> CargoResult<Option<Value<String>>> {
-        if let Some(v) = try!(self.get_env(key)) {
+        if let Some(v) = self.get_env(key)? {
             return Ok(Some(v))
         }
-        match try!(self.get(key)) {
+        match self.get(key)? {
             Some(CV::String(i, path)) => {
                 Ok(Some(Value {
                     val: i,
@@ -177,10 +177,10 @@ impl Config {
     }
 
     pub fn get_bool(&self, key: &str) -> CargoResult<Option<Value<bool>>> {
-        if let Some(v) = try!(self.get_env(key)) {
+        if let Some(v) = self.get_env(key)? {
             return Ok(Some(v))
         }
-        match try!(self.get(key)) {
+        match self.get(key)? {
             Some(CV::Boolean(b, path)) => {
                 Ok(Some(Value {
                     val: b,
@@ -193,7 +193,7 @@ impl Config {
     }
 
     pub fn get_path(&self, key: &str) -> CargoResult<Option<Value<PathBuf>>> {
-        if let Some(val) = try!(self.get_string(&key)) {
+        if let Some(val) = self.get_string(&key)? {
             let is_path = val.val.contains('/') ||
                           (cfg!(windows) && val.val.contains('\\'));
             let path = if is_path {
@@ -213,7 +213,7 @@ impl Config {
 
     pub fn get_list(&self, key: &str)
                     -> CargoResult<Option<Value<Vec<(String, PathBuf)>>>> {
-        match try!(self.get(key)) {
+        match self.get(key)? {
             Some(CV::List(i, path)) => {
                 Ok(Some(Value {
                     val: i,
@@ -227,7 +227,7 @@ impl Config {
 
     pub fn get_table(&self, key: &str)
                     -> CargoResult<Option<Value<HashMap<String, CV>>>> {
-        match try!(self.get(key)) {
+        match self.get(key)? {
             Some(CV::Table(i, path)) => {
                 Ok(Some(Value {
                     val: i,
@@ -240,10 +240,10 @@ impl Config {
     }
 
     pub fn get_i64(&self, key: &str) -> CargoResult<Option<Value<i64>>> {
-        if let Some(v) = try!(self.get_env(key)) {
+        if let Some(v) = self.get_env(key)? {
             return Ok(Some(v))
         }
-        match try!(self.get(key)) {
+        match self.get(key)? {
             Some(CV::Integer(i, path)) => {
                 Ok(Some(Value {
                     val: i,
@@ -256,7 +256,7 @@ impl Config {
     }
 
     pub fn net_retry(&self) -> CargoResult<i64> {
-        match try!(self.get_i64("net.retry")) {
+        match self.get_i64("net.retry")? {
             Some(v) => {
                 let value = v.val;
                 if value < 0 {
@@ -316,7 +316,7 @@ impl Config {
         };
 
         self.shell().set_verbosity(verbosity);
-        try!(self.shell().set_color_config(color.map(|s| &s[..])));
+        self.shell().set_color_config(color.map(|s| &s[..]))?;
         self.extra_verbose.set(extra_verbose);
         self.frozen.set(frozen);
         self.locked.set(locked);
@@ -339,23 +339,23 @@ impl Config {
     fn load_values(&self) -> CargoResult<HashMap<String, ConfigValue>> {
         let mut cfg = CV::Table(HashMap::new(), PathBuf::from("."));
 
-        try!(walk_tree(&self.cwd, |mut file, path| {
+        walk_tree(&self.cwd, |mut file, path| {
             let mut contents = String::new();
-            try!(file.read_to_string(&mut contents));
-            let table = try!(cargo_toml::parse(&contents,
+            file.read_to_string(&mut contents)?;
+            let table = cargo_toml::parse(&contents,
                                                &path,
                                                self).chain_error(|| {
                 human(format!("could not parse TOML configuration in `{}`",
                               path.display()))
-            }));
+            })?;
             let toml = toml::Value::Table(table);
-            let value = try!(CV::from_toml(&path, toml).chain_error(|| {
+            let value = CV::from_toml(&path, toml).chain_error(|| {
                 human(format!("failed to load TOML configuration from `{}`",
                               path.display()))
-            }));
-            try!(cfg.merge(value));
+            })?;
+            cfg.merge(value)?;
             Ok(())
-        }).chain_error(|| human("Couldn't load Cargo configuration")));
+        }).chain_error(|| human("Couldn't load Cargo configuration"))?;
 
 
         match cfg {
@@ -371,7 +371,7 @@ impl Config {
         }
 
         let var = format!("build.{}", tool);
-        if let Some(tool_path) = try!(self.get_path(&var)) {
+        if let Some(tool_path) = self.get_path(&var)? {
             return Ok(tool_path.val);
         }
 
@@ -414,10 +414,10 @@ impl fmt::Debug for ConfigValue {
             CV::String(ref s, ref path) => write!(f, "{} (from {})", s,
                                                   path.display()),
             CV::List(ref list, ref path) => {
-                try!(write!(f, "["));
+                write!(f, "[")?;
                 for (i, &(ref s, ref path)) in list.iter().enumerate() {
-                    if i > 0 { try!(write!(f, ", ")); }
-                    try!(write!(f, "{} (from {})", s, path.display()));
+                    if i > 0 { write!(f, ", ")?; }
+                    write!(f, "{} (from {})", s, path.display())?;
                 }
                 write!(f, "] (from {})", path.display())
             }
@@ -448,21 +448,21 @@ impl ConfigValue {
             toml::Value::Boolean(b) => Ok(CV::Boolean(b, path.to_path_buf())),
             toml::Value::Integer(i) => Ok(CV::Integer(i, path.to_path_buf())),
             toml::Value::Array(val) => {
-                Ok(CV::List(try!(val.into_iter().map(|toml| {
+                Ok(CV::List(val.into_iter().map(|toml| {
                     match toml {
                         toml::Value::String(val) => Ok((val, path.to_path_buf())),
                         v => Err(human(format!("expected string but found {} \
                                                 in list", v.type_str()))),
                     }
-                }).collect::<CargoResult<_>>()), path.to_path_buf()))
+                }).collect::<CargoResult<_>>()?, path.to_path_buf()))
             }
             toml::Value::Table(val) => {
-                Ok(CV::Table(try!(val.into_iter().map(|(key, value)| {
-                    let value = try!(CV::from_toml(path, value).chain_error(|| {
+                Ok(CV::Table(val.into_iter().map(|(key, value)| {
+                    let value = CV::from_toml(path, value).chain_error(|| {
                         human(format!("failed to parse key `{}`", key))
-                    }));
+                    })?;
                     Ok((key, value))
-                }).collect::<CargoResult<_>>()), path.to_path_buf()))
+                }).collect::<CargoResult<_>>()?, path.to_path_buf()))
             }
             v => bail!("found TOML configuration value of unknown type `{}`",
                        v.type_str()),
@@ -485,7 +485,7 @@ impl ConfigValue {
                         Occupied(mut entry) => {
                             let path = value.definition_path().to_path_buf();
                             let entry = entry.get_mut();
-                            try!(entry.merge(value).chain_error(|| {
+                            entry.merge(value).chain_error(|| {
                                 human(format!("failed to merge key `{}` between \
                                                files:\n  \
                                                file 1: {}\n  \
@@ -494,7 +494,7 @@ impl ConfigValue {
                                               entry.definition_path().display(),
                                               path.display()))
 
-                            }));
+                            })?;
                         }
                         Vacant(entry) => { entry.insert(value); }
                     };
@@ -664,9 +664,9 @@ fn walk_tree<F>(pwd: &Path, mut walk: F) -> CargoResult<()>
     loop {
         let possible = current.join(".cargo").join("config");
         if fs::metadata(&possible).is_ok() {
-            let file = try!(File::open(&possible));
+            let file = File::open(&possible)?;
 
-            try!(walk(file, &possible));
+            walk(file, &possible)?;
 
             stash.insert(possible);
         }
@@ -680,14 +680,14 @@ fn walk_tree<F>(pwd: &Path, mut walk: F) -> CargoResult<()>
     // Once we're done, also be sure to walk the home directory even if it's not
     // in our history to be sure we pick up that standard location for
     // information.
-    let home = try!(homedir(pwd).chain_error(|| {
+    let home = homedir(pwd).chain_error(|| {
         human("Cargo couldn't find your home directory. \
               This probably means that $HOME was not set.")
-    }));
+    })?;
     let config = home.join("config");
     if !stash.contains(&config) && fs::metadata(&config).is_ok() {
-        let file = try!(File::open(&config));
-        try!(walk(file, &config));
+        let file = File::open(&config)?;
+        walk(file, &config)?;
     }
 
     Ok(())
@@ -704,20 +704,20 @@ pub fn set_config(cfg: &Config,
     // 3. This blows away the previous ordering of a file.
     let mut file = match loc {
         Location::Global => {
-            try!(cfg.home_path.create_dir());
-            try!(cfg.home_path.open_rw(Path::new("config"), cfg,
-                                       "the global config file"))
+            cfg.home_path.create_dir()?;
+            cfg.home_path.open_rw(Path::new("config"), cfg,
+                                       "the global config file")?
         }
         Location::Project => unimplemented!(),
     };
     let mut contents = String::new();
     let _ = file.read_to_string(&mut contents);
-    let mut toml = try!(cargo_toml::parse(&contents, file.path(), cfg));
+    let mut toml = cargo_toml::parse(&contents, file.path(), cfg)?;
     toml.insert(key.to_string(), value.into_toml());
 
     let contents = toml::Value::Table(toml).to_string();
-    try!(file.seek(SeekFrom::Start(0)));
-    try!(file.write_all(contents.as_bytes()));
-    try!(file.file().set_len(contents.len() as u64));
+    file.seek(SeekFrom::Start(0))?;
+    file.write_all(contents.as_bytes())?;
+    file.file().set_len(contents.len() as u64)?;
     Ok(())
 }
index 0f1bfe9efb4b356ce134aa93c2a662ec79c258df..853b8bef9d5b77d025512532261bc781568bf115 100644 (file)
@@ -190,9 +190,9 @@ struct ConcreteCargoError {
 
 impl fmt::Display for ConcreteCargoError {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        try!(write!(f, "{}", self.description));
+        write!(f, "{}", self.description)?;
         if let Some(ref s) = self.detail {
-            try!(write!(f, " ({})", s));
+            write!(f, " ({})", s)?;
         }
         Ok(())
     }
index f90fa596f2889384ace491b637c91a1573ed12c3..a6f90afc33552442f089a5000e34a1ad4fd459ae 100644 (file)
@@ -50,16 +50,16 @@ impl FileLock {
     /// needs to be cleared out as it may be corrupt.
     pub fn remove_siblings(&self) -> io::Result<()> {
         let path = self.path();
-        for entry in try!(path.parent().unwrap().read_dir()) {
-            let entry = try!(entry);
+        for entry in path.parent().unwrap().read_dir()? {
+            let entry = entry?;
             if Some(&entry.file_name()[..]) == path.file_name() {
                 continue
             }
-            let kind = try!(entry.file_type());
+            let kind = entry.file_type()?;
             if kind.is_dir() {
-                try!(fs::remove_dir_all(entry.path()));
+                fs::remove_dir_all(entry.path())?;
             } else {
-                try!(fs::remove_file(entry.path()));
+                fs::remove_file(entry.path())?;
             }
         }
         Ok(())
@@ -204,26 +204,26 @@ impl Filesystem {
         // If we want an exclusive lock then if we fail because of NotFound it's
         // likely because an intermediate directory didn't exist, so try to
         // create the directory and then continue.
-        let f = try!(opts.open(&path).or_else(|e| {
+        let f = opts.open(&path).or_else(|e| {
             if e.kind() == io::ErrorKind::NotFound && state == State::Exclusive {
-                try!(create_dir_all(path.parent().unwrap()));
+                create_dir_all(path.parent().unwrap())?;
                 opts.open(&path)
             } else {
                 Err(e)
             }
         }).chain_error(|| {
             human(format!("failed to open: {}", path.display()))
-        }));
+        })?;
         match state {
             State::Exclusive => {
-                try!(acquire(config, msg, &path,
+                acquire(config, msg, &path,
                              &|| f.try_lock_exclusive(),
-                             &|| f.lock_exclusive()));
+                             &|| f.lock_exclusive())?;
             }
             State::Shared => {
-                try!(acquire(config, msg, &path,
+                acquire(config, msg, &path,
                              &|| f.try_lock_shared(),
-                             &|| f.lock_shared()));
+                             &|| f.lock_shared())?;
             }
             State::Unlocked => {}
 
@@ -285,7 +285,7 @@ fn acquire(config: &Config,
         }
     }
     let msg = format!("waiting for file lock on {}", msg);
-    try!(config.shell().err().say_status("Blocking", &msg, CYAN, true));
+    config.shell().err().say_status("Blocking", &msg, CYAN, true)?;
 
     return block().chain_error(|| {
         human(format!("failed to lock file: {}", path.display()))
index cc0414f6188cde0d405271f0bda85767b3416bb8..6543c8f91792582ef7757441123fc9469347d6ca 100644 (file)
@@ -69,17 +69,17 @@ impl<N: Eq + Hash + Clone> Graph<N> {
 
 impl<N: fmt::Display + Eq + Hash> fmt::Debug for Graph<N> {
     fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
-        try!(writeln!(fmt, "Graph {{"));
+        writeln!(fmt, "Graph {{")?;
 
         for (n, e) in self.nodes.iter() {
-            try!(writeln!(fmt, "  - {}", n));
+            writeln!(fmt, "  - {}", n)?;
 
             for n in e.iter() {
-                try!(writeln!(fmt, "    - {}", n));
+                writeln!(fmt, "    - {}", n)?;
             }
         }
 
-        try!(write!(fmt, "}}"));
+        write!(fmt, "}}")?;
 
         Ok(())
     }
index 3cdaa641692d546e059984c2d9ec6185028e3b50..fc751dc3cf2b272a0df1b18e2e25320fdfd3e268 100644 (file)
@@ -58,7 +58,7 @@ impl<T> LazyCell<T> {
         where F: FnOnce() -> Result<T, Error>
     {
         if self.borrow().is_none() {
-            if let Err(_) = self.fill(try!(init())) {
+            if let Err(_) = self.fill(init()?) {
                 unreachable!();
             }
         }
index a53d04d284136ed1fbbe35d83feb448236648c17..4bc12b7b4e677be9f6b8e6003e7c83485b1f2432 100644 (file)
@@ -14,14 +14,14 @@ pub fn with_retry<T, E, F>(config: &Config, mut callback: F) -> CargoResult<T>
     where F: FnMut() -> Result<T, E>,
           E: errors::NetworkError
 {
-    let mut remaining = try!(config.net_retry());
+    let mut remaining = config.net_retry()?;
     loop {
         match callback() {
             Ok(ret) => return Ok(ret),
             Err(ref e) if e.maybe_spurious() && remaining > 0 => {
                 let msg = format!("spurious network error ({} tries \
                           remaining): {}", remaining, e);
-                try!(config.shell().warn(msg));
+                config.shell().warn(msg)?;
                 remaining -= 1;
             }
             Err(e) => return Err(Box::new(e)),
index 8444c3fd209eda055259da927e23a8b5b60ca7f8..23a42a84c706d8e007e1a06f24176b134c0ee5c2 100644 (file)
@@ -70,8 +70,8 @@ pub fn without_prefix<'a>(a: &'a Path, b: &'a Path) -> Option<&'a Path> {
 pub fn read(path: &Path) -> CargoResult<String> {
     (|| -> CargoResult<_> {
         let mut ret = String::new();
-        let mut f = try!(File::open(path));
-        try!(f.read_to_string(&mut ret));
+        let mut f = File::open(path)?;
+        f.read_to_string(&mut ret)?;
         Ok(ret)
     })().map_err(human).chain_error(|| {
         human(format!("failed to read `{}`", path.display()))
@@ -81,8 +81,8 @@ pub fn read(path: &Path) -> CargoResult<String> {
 pub fn read_bytes(path: &Path) -> CargoResult<Vec<u8>> {
     (|| -> CargoResult<_> {
         let mut ret = Vec::new();
-        let mut f = try!(File::open(path));
-        try!(f.read_to_end(&mut ret));
+        let mut f = File::open(path)?;
+        f.read_to_end(&mut ret)?;
         Ok(ret)
     })().map_err(human).chain_error(|| {
         human(format!("failed to read `{}`", path.display()))
@@ -91,8 +91,8 @@ pub fn read_bytes(path: &Path) -> CargoResult<Vec<u8>> {
 
 pub fn write(path: &Path, contents: &[u8]) -> CargoResult<()> {
     (|| -> CargoResult<()> {
-        let mut f = try!(File::create(path));
-        try!(f.write_all(contents));
+        let mut f = File::create(path)?;
+        f.write_all(contents)?;
         Ok(())
     })().map_err(human).chain_error(|| {
         human(format!("failed to write `{}`", path.display()))
@@ -101,13 +101,13 @@ pub fn write(path: &Path, contents: &[u8]) -> CargoResult<()> {
 
 pub fn append(path: &Path, contents: &[u8]) -> CargoResult<()> {
     (|| -> CargoResult<()> {
-        let mut f = try!(OpenOptions::new()
+        let mut f = OpenOptions::new()
                             .write(true)
                             .append(true)
                             .create(true)
-                            .open(path));
+                            .open(path)?;
 
-        try!(f.write_all(contents));
+        f.write_all(contents)?;
         Ok(())
     }).chain_error(|| {
         internal(format!("failed to write `{}`", path.display()))
index b66b26ec0a4ee6bb021ffa9f42b6c051916a9d80..ed92321896eec6b982773647c8c6289d5226198a 100644 (file)
@@ -18,10 +18,10 @@ pub struct ProcessBuilder {
 
 impl fmt::Display for ProcessBuilder {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        try!(write!(f, "`{}", self.program.to_string_lossy()));
+        write!(f, "`{}", self.program.to_string_lossy())?;
 
         for arg in self.args.iter() {
-            try!(write!(f, " {}", escape(arg.to_string_lossy())));
+            write!(f, " {}", escape(arg.to_string_lossy()))?;
         }
 
         write!(f, "`")
@@ -74,11 +74,11 @@ impl ProcessBuilder {
 
     pub fn exec(&self) -> Result<(), ProcessError> {
         let mut command = self.build_command();
-        let exit = try!(command.status().map_err(|e| {
+        let exit = command.status().map_err(|e| {
             process_error(&format!("could not execute process `{}`",
                                    self.debug_string()),
                           Some(Box::new(e)), None, None)
-        }));
+        })?;
 
         if exit.success() {
             Ok(())
@@ -108,11 +108,11 @@ impl ProcessBuilder {
     pub fn exec_with_output(&self) -> Result<Output, ProcessError> {
         let mut command = self.build_command();
 
-        let output = try!(command.output().map_err(|e| {
+        let output = command.output().map_err(|e| {
             process_error(&format!("could not execute process `{}`",
                                    self.debug_string()),
                           Some(Box::new(e)), None, None)
-        }));
+        })?;
 
         if output.status.success() {
             Ok(output)
@@ -136,11 +136,11 @@ impl ProcessBuilder {
             .stdin(Stdio::null());
 
         let mut callback_error = None;
-        let status = try!((|| {
-            let mut child = try!(cmd.spawn());
+        let status = (|| {
+            let mut child = cmd.spawn()?;
             let out = child.stdout.take().unwrap();
             let err = child.stderr.take().unwrap();
-            try!(read2(out, err, &mut |is_out, data, eof| {
+            read2(out, err, &mut |is_out, data, eof| {
                 let idx = if eof {
                     data.len()
                 } else {
@@ -164,13 +164,13 @@ impl ProcessBuilder {
                         callback_error = Some(e);
                     }
                 }
-            }));
+            })?;
             child.wait()
         })().map_err(|e| {
             process_error(&format!("could not execute process `{}`",
                                    self.debug_string()),
                           Some(Box::new(e)), None, None)
-        }));
+        })?;
         let output = Output {
             stdout: stdout,
             stderr: stderr,
index 4596b260a13b6d30f183330cc30fb02d5abb8d09..7eac02783f8c76f0465dab2f3286d5fb8c6a9221 100644 (file)
@@ -62,11 +62,11 @@ mod imp {
                     }
                 }
             };
-            if !out_done && try!(handle(out_pipe.read_to_end(&mut out))) {
+            if !out_done && handle(out_pipe.read_to_end(&mut out))? {
                 out_done = true;
             }
             data(true, &mut out, out_done);
-            if !err_done && try!(handle(err_pipe.read_to_end(&mut err))) {
+            if !err_done && handle(err_pipe.read_to_end(&mut err))? {
                 err_done = true;
             }
             data(false, &mut err, err_done);
@@ -116,29 +116,29 @@ mod imp {
         let mut out = Vec::new();
         let mut err = Vec::new();
 
-        let port = try!(CompletionPort::new(1));
-        try!(port.add_handle(0, &out_pipe));
-        try!(port.add_handle(1, &err_pipe));
+        let port = CompletionPort::new(1)?;
+        port.add_handle(0, &out_pipe)?;
+        port.add_handle(1, &err_pipe)?;
 
         unsafe {
             let mut out_pipe = Pipe::new(out_pipe, &mut out);
             let mut err_pipe = Pipe::new(err_pipe, &mut err);
 
-            try!(out_pipe.read());
-            try!(err_pipe.read());
+            out_pipe.read()?;
+            err_pipe.read()?;
 
             let mut status = [CompletionStatus::zero(), CompletionStatus::zero()];
 
             while !out_pipe.done || !err_pipe.done {
-                for status in try!(port.get_many(&mut status, None)) {
+                for status in port.get_many(&mut status, None)? {
                     if status.token() == 0 {
                         out_pipe.complete(status);
                         data(true, out_pipe.dst, out_pipe.done);
-                        try!(out_pipe.read());
+                        out_pipe.read()?;
                     } else {
                         err_pipe.complete(status);
                         data(false, err_pipe.dst, err_pipe.done);
-                        try!(err_pipe.read());
+                        err_pipe.read()?;
                     }
                 }
             }
index 59a6e17e1fe56ec3e2706d39b09cc28485348c15..954f6d88d9599d97b23d156308f9b01fe56e9db0 100644 (file)
@@ -25,20 +25,20 @@ impl Rustc {
 
         let (cap_lints, output) = match first.exec_with_output() {
             Ok(output) => (true, output),
-            Err(..) => (false, try!(cmd.exec_with_output())),
+            Err(..) => (false, cmd.exec_with_output()?),
         };
 
-        let verbose_version = try!(String::from_utf8(output.stdout).map_err(|_| {
+        let verbose_version = String::from_utf8(output.stdout).map_err(|_| {
             internal("rustc -v didn't return utf8 output")
-        }));
+        })?;
 
         let host = {
             let triple = verbose_version.lines().find(|l| {
                 l.starts_with("host: ")
             }).map(|l| &l[6..]);
-            let triple = try!(triple.chain_error(|| {
+            let triple = triple.chain_error(|| {
                 internal("rustc -v didn't have a line for `host:`")
-            }));
+            })?;
             triple.to_string()
         };
 
index 2708abd90b0f7f519ea92bab375710c973d28660..c45ef1707a79e4f3cfb441d13ccbe7caab348bc1 100644 (file)
@@ -112,11 +112,11 @@ pub fn to_manifest(contents: &str,
         Some(path) => path.to_path_buf(),
         None => manifest.clone(),
     };
-    let root = try!(parse(contents, &manifest, config));
+    let root = parse(contents, &manifest, config)?;
     let mut d = toml::Decoder::new(toml::Value::Table(root));
-    let manifest: TomlManifest = try!(Decodable::decode(&mut d).map_err(|e| {
+    let manifest: TomlManifest = Decodable::decode(&mut d).map_err(|e| {
         human(e.to_string())
-    }));
+    })?;
 
     return match manifest.to_real_manifest(source_id, &layout, config) {
         Ok((mut manifest, paths)) => {
@@ -181,7 +181,7 @@ The TOML spec requires newlines after table definitions (e.g. `[a] b = 1` is
 invalid), but this file has a table header which does not have a newline after
 it. A newline needs to be added and this warning will soon become a hard error
 in the future.", file.display());
-        try!(config.shell().warn(&msg));
+        config.shell().warn(&msg)?;
         return Ok(toml)
     }
 
@@ -321,7 +321,7 @@ pub struct TomlVersion {
 
 impl Decodable for TomlVersion {
     fn decode<D: Decoder>(d: &mut D) -> Result<TomlVersion, D::Error> {
-        let s = try!(d.read_str());
+        let s = d.read_str()?;
         match s.to_semver() {
             Ok(s) => Ok(TomlVersion { version: s }),
             Err(e) => Err(d.error(&e)),
@@ -428,15 +428,15 @@ impl TomlManifest {
         let mut warnings = vec![];
 
         let project = self.project.as_ref().or_else(|| self.package.as_ref());
-        let project = try!(project.chain_error(|| {
+        let project = project.chain_error(|| {
             human("no `package` or `project` section found.")
-        }));
+        })?;
 
         if project.name.trim().is_empty() {
             bail!("package name cannot be an empty string.")
         }
 
-        let pkgid = try!(project.to_package_id(source_id));
+        let pkgid = project.to_package_id(source_id)?;
         let metadata = pkgid.generate_metadata();
 
         // If we have no lib at all, use the inferred lib if available
@@ -445,8 +445,8 @@ impl TomlManifest {
 
         let lib = match self.lib {
             Some(ref lib) => {
-                try!(lib.validate_library_name());
-                try!(lib.validate_crate_type());
+                lib.validate_library_name()?;
+                lib.validate_crate_type()?;
                 Some(
                     TomlTarget {
                         name: lib.name.clone().or(Some(project.name.clone())),
@@ -465,7 +465,7 @@ impl TomlManifest {
                 let bin = layout.main();
 
                 for target in bins {
-                    try!(target.validate_binary_name());
+                    target.validate_binary_name()?;
                 }
 
                 bins.iter().map(|t| {
@@ -494,7 +494,7 @@ impl TomlManifest {
         let examples = match self.example {
             Some(ref examples) => {
                 for target in examples {
-                    try!(target.validate_example_name());
+                    target.validate_example_name()?;
                 }
                 examples.clone()
             }
@@ -504,7 +504,7 @@ impl TomlManifest {
         let tests = match self.test {
             Some(ref tests) => {
                 for target in tests {
-                    try!(target.validate_test_name());
+                    target.validate_test_name()?;
                 }
                 tests.clone()
             }
@@ -514,7 +514,7 @@ impl TomlManifest {
         let benches = match self.bench {
             Some(ref benches) => {
                 for target in benches {
-                    try!(target.validate_bench_name());
+                    target.validate_bench_name()?;
                 }
                 benches.clone()
             }
@@ -589,7 +589,7 @@ impl TomlManifest {
                     None => return Ok(())
                 };
                 for (n, v) in dependencies.iter() {
-                    let dep = try!(v.to_dependency(n, cx, kind));
+                    let dep = v.to_dependency(n, cx, kind)?;
                     cx.deps.push(dep);
                 }
 
@@ -597,27 +597,27 @@ impl TomlManifest {
             }
 
             // Collect the deps
-            try!(process_dependencies(&mut cx, self.dependencies.as_ref(),
-                                      None));
-            try!(process_dependencies(&mut cx, self.dev_dependencies.as_ref(),
-                                      Some(Kind::Development)));
-            try!(process_dependencies(&mut cx, self.build_dependencies.as_ref(),
-                                      Some(Kind::Build)));
+            process_dependencies(&mut cx, self.dependencies.as_ref(),
+                                      None)?;
+            process_dependencies(&mut cx, self.dev_dependencies.as_ref(),
+                                      Some(Kind::Development))?;
+            process_dependencies(&mut cx, self.build_dependencies.as_ref(),
+                                      Some(Kind::Build))?;
 
             for (name, platform) in self.target.iter().flat_map(|t| t) {
-                cx.platform = Some(try!(name.parse()));
-                try!(process_dependencies(&mut cx,
+                cx.platform = Some(name.parse()?);
+                process_dependencies(&mut cx,
                                           platform.dependencies.as_ref(),
-                                          None));
-                try!(process_dependencies(&mut cx,
+                                          None)?;
+                process_dependencies(&mut cx,
                                           platform.build_dependencies.as_ref(),
-                                          Some(Kind::Build)));
-                try!(process_dependencies(&mut cx,
+                                          Some(Kind::Build))?;
+                process_dependencies(&mut cx,
                                           platform.dev_dependencies.as_ref(),
-                                          Some(Kind::Development)));
+                                          Some(Kind::Development))?;
             }
 
-            replace = try!(self.replace(&mut cx));
+            replace = self.replace(&mut cx)?;
         }
 
         {
@@ -635,9 +635,9 @@ impl TomlManifest {
         let exclude = project.exclude.clone().unwrap_or(Vec::new());
         let include = project.include.clone().unwrap_or(Vec::new());
 
-        let summary = try!(Summary::new(pkgid, deps,
+        let summary = Summary::new(pkgid, deps,
                                         self.features.clone()
-                                            .unwrap_or(HashMap::new())));
+                                            .unwrap_or(HashMap::new()))?;
         let metadata = ManifestMetadata {
             description: project.description.clone(),
             homepage: project.homepage.clone(),
@@ -716,7 +716,7 @@ impl TomlManifest {
         let mut nested_paths = Vec::new();
         let mut warnings = Vec::new();
         let mut deps = Vec::new();
-        let replace = try!(self.replace(&mut Context {
+        let replace = self.replace(&mut Context {
             pkgid: None,
             deps: &mut deps,
             source_id: source_id,
@@ -725,7 +725,7 @@ impl TomlManifest {
             warnings: &mut warnings,
             platform: None,
             layout: layout,
-        }));
+        })?;
         let profiles = build_profiles(&self.profile);
         let workspace_config = match self.workspace {
             Some(ref config) => {
@@ -742,11 +742,11 @@ impl TomlManifest {
                -> CargoResult<Vec<(PackageIdSpec, Dependency)>> {
         let mut replace = Vec::new();
         for (spec, replacement) in self.replace.iter().flat_map(|x| x) {
-            let mut spec = try!(PackageIdSpec::parse(spec).chain_error(|| {
+            let mut spec = PackageIdSpec::parse(spec).chain_error(|| {
                 human(format!("replacements must specify a valid semver \
                                version to replace, but `{}` does not",
                               spec))
-            }));
+            })?;
             if spec.url().is_none() {
                 spec.set_url(CRATES_IO.parse().unwrap());
             }
@@ -760,13 +760,13 @@ impl TomlManifest {
                        requirement, but found one for `{}`", spec);
             }
 
-            let dep = try!(replacement.to_dependency(spec.name(), cx, None));
+            let dep = replacement.to_dependency(spec.name(), cx, None)?;
             let dep = {
-                let version = try!(spec.version().chain_error(|| {
+                let version = spec.version().chain_error(|| {
                     human(format!("replacements must specify a version \
                                    to replace, but `{}` does not",
                                   spec))
-                }));
+                })?;
                 let req = VersionReq::exact(version);
                 dep.clone_inner().set_version_req(req)
                    .into_dependency()
@@ -866,7 +866,7 @@ impl TomlDependency {
                     .or_else(|| details.tag.clone().map(GitReference::Tag))
                     .or_else(|| details.rev.clone().map(GitReference::Rev))
                     .unwrap_or_else(|| GitReference::Branch("master".to_string()));
-                let loc = try!(git.to_url());
+                let loc = git.to_url()?;
                 SourceId::for_git(&loc, reference)
             },
             (None, Some(path)) => {
@@ -882,21 +882,21 @@ impl TomlDependency {
                 if cx.source_id.is_path() {
                     let path = cx.layout.root.join(path);
                     let path = util::normalize_path(&path);
-                    try!(SourceId::for_path(&path))
+                    SourceId::for_path(&path)?
                 } else {
                     cx.source_id.clone()
                 }
             },
-            (None, None) => try!(SourceId::crates_io(cx.config)),
+            (None, None) => SourceId::crates_io(cx.config)?,
         };
 
         let version = details.version.as_ref().map(|v| &v[..]);
         let mut dep = match cx.pkgid {
             Some(id) => {
-                try!(DependencyInner::parse(name, version, &new_source_id,
-                                            Some((id, cx.config))))
+                DependencyInner::parse(name, version, &new_source_id,
+                                            Some((id, cx.config)))?
             }
-            None => try!(DependencyInner::parse(name, version, &new_source_id, None)),
+            None => DependencyInner::parse(name, version, &new_source_id, None)?,
         };
         dep = dep.set_features(details.features.unwrap_or(Vec::new()))
                  .set_default_features(details.default_features.unwrap_or(true))
index ffd260680a26d9fc44170e9592c8f0cccff2db89..730200316a09334f4d4cc99cda46ace63178321d 100644 (file)
@@ -9,7 +9,7 @@ pub struct GitRepo;
 
 impl GitRepo {
     pub fn init(path: &Path, _: &Path) -> CargoResult<GitRepo> {
-        try!(git2::Repository::init(path));
+        git2::Repository::init(path)?;
         Ok(GitRepo)
     }
     pub fn discover(path: &Path, _: &Path) -> Result<git2::Repository,git2::Error> {
@@ -19,11 +19,11 @@ impl GitRepo {
 
 impl HgRepo {
     pub fn init(path: &Path, cwd: &Path) -> CargoResult<HgRepo> {
-        try!(process("hg").cwd(cwd).arg("init").arg(path).exec());
+        process("hg").cwd(cwd).arg("init").arg(path).exec()?;
         Ok(HgRepo)
     }
     pub fn discover(path: &Path, cwd: &Path) -> CargoResult<HgRepo> {
-        try!(process("hg").cwd(cwd).arg("root").cwd(path).exec_with_output());
+        process("hg").cwd(cwd).arg("root").cwd(path).exec_with_output()?;
         Ok(HgRepo)
     }
 }
index 8ecb932fbdee2208570b741e5be2227d7c9d0ff1..884460df2902f944e325cc22f96d575c257e841b 100644 (file)
@@ -127,35 +127,35 @@ impl Registry {
     }
 
     pub fn add_owners(&mut self, krate: &str, owners: &[&str]) -> Result<()> {
-        let body = try!(json::encode(&OwnersReq { users: owners }));
-        let body = try!(self.put(format!("/crates/{}/owners", krate),
-                                 body.as_bytes()));
-        assert!(try!(json::decode::<R>(&body)).ok);
+        let body = json::encode(&OwnersReq { users: owners })?;
+        let body = self.put(format!("/crates/{}/owners", krate),
+                                 body.as_bytes())?;
+        assert!(json::decode::<R>(&body)?.ok);
         Ok(())
     }
 
     pub fn remove_owners(&mut self, krate: &str, owners: &[&str]) -> Result<()> {
-        let body = try!(json::encode(&OwnersReq { users: owners }));
-        let body = try!(self.delete(format!("/crates/{}/owners", krate),
-                                    Some(body.as_bytes())));
-        assert!(try!(json::decode::<R>(&body)).ok);
+        let body = json::encode(&OwnersReq { users: owners })?;
+        let body = self.delete(format!("/crates/{}/owners", krate),
+                                    Some(body.as_bytes()))?;
+        assert!(json::decode::<R>(&body)?.ok);
         Ok(())
     }
 
     pub fn list_owners(&mut self, krate: &str) -> Result<Vec<User>> {
-        let body = try!(self.get(format!("/crates/{}/owners", krate)));
-        Ok(try!(json::decode::<Users>(&body)).users)
+        let body = self.get(format!("/crates/{}/owners", krate))?;
+        Ok(json::decode::<Users>(&body)?.users)
     }
 
     pub fn publish(&mut self, krate: &NewCrate, tarball: &File) -> Result<()> {
-        let json = try!(json::encode(krate));
+        let json = json::encode(krate)?;
         // Prepare the body. The format of the upload request is:
         //
         //      <le u32 of json>
         //      <json request> (metadata for the package)
         //      <le u32 of tarball>
         //      <source tarball>
-        let stat = try!(tarball.metadata().map_err(Error::Io));
+        let stat = tarball.metadata().map_err(Error::Io)?;
         let header = {
             let mut w = Vec::new();
             w.extend([
@@ -182,57 +182,57 @@ impl Registry {
             Some(s) => s,
             None => return Err(Error::TokenMissing),
         };
-        try!(self.handle.put(true));
-        try!(self.handle.url(&url));
-        try!(self.handle.in_filesize(size as u64));
+        self.handle.put(true)?;
+        self.handle.url(&url)?;
+        self.handle.in_filesize(size as u64)?;
         let mut headers = List::new();
-        try!(headers.append("Accept: application/json"));
-        try!(headers.append(&format!("Authorization: {}", token)));
-        try!(self.handle.http_headers(headers));
+        headers.append("Accept: application/json")?;
+        headers.append(&format!("Authorization: {}", token))?;
+        self.handle.http_headers(headers)?;
 
-        let _body = try!(handle(&mut self.handle, &mut |buf| {
+        let _body = handle(&mut self.handle, &mut |buf| {
             body.read(buf).unwrap_or(0)
-        }));
+        })?;
         Ok(())
     }
 
     pub fn search(&mut self, query: &str, limit: u8) -> Result<(Vec<Crate>, u32)> {
         let formated_query = percent_encode(query.as_bytes(), QUERY_ENCODE_SET);
-        let body = try!(self.req(
+        let body = self.req(
             format!("/crates?q={}&per_page={}", formated_query, limit),
             None, Auth::Unauthorized
-        ));
+        )?;
 
-        let crates = try!(json::decode::<Crates>(&body));
+        let crates = json::decode::<Crates>(&body)?;
         Ok((crates.crates, crates.meta.total))
     }
 
     pub fn yank(&mut self, krate: &str, version: &str) -> Result<()> {
-        let body = try!(self.delete(format!("/crates/{}/{}/yank", krate, version),
-                                    None));
-        assert!(try!(json::decode::<R>(&body)).ok);
+        let body = self.delete(format!("/crates/{}/{}/yank", krate, version),
+                                    None)?;
+        assert!(json::decode::<R>(&body)?.ok);
         Ok(())
     }
 
     pub fn unyank(&mut self, krate: &str, version: &str) -> Result<()> {
-        let body = try!(self.put(format!("/crates/{}/{}/unyank", krate, version),
-                                 &[]));
-        assert!(try!(json::decode::<R>(&body)).ok);
+        let body = self.put(format!("/crates/{}/{}/unyank", krate, version),
+                                 &[])?;
+        assert!(json::decode::<R>(&body)?.ok);
         Ok(())
     }
 
     fn put(&mut self, path: String, b: &[u8]) -> Result<String> {
-        try!(self.handle.put(true));
+        self.handle.put(true)?;
         self.req(path, Some(b), Auth::Authorized)
     }
 
     fn get(&mut self, path: String) -> Result<String> {
-        try!(self.handle.get(true));
+        self.handle.get(true)?;
         self.req(path, None, Auth::Authorized)
     }
 
     fn delete(&mut self, path: String, b: Option<&[u8]>) -> Result<String> {
-        try!(self.handle.custom_request("DELETE"));
+        self.handle.custom_request("DELETE")?;
         self.req(path, b, Auth::Authorized)
     }
 
@@ -240,23 +240,23 @@ impl Registry {
            path: String,
            body: Option<&[u8]>,
            authorized: Auth) -> Result<String> {
-        try!(self.handle.url(&format!("{}/api/v1{}", self.host, path)));
+        self.handle.url(&format!("{}/api/v1{}", self.host, path))?;
         let mut headers = List::new();
-        try!(headers.append("Accept: application/json"));
-        try!(headers.append("Content-Type: application/json"));
+        headers.append("Accept: application/json")?;
+        headers.append("Content-Type: application/json")?;
 
         if authorized == Auth::Authorized {
             let token = match self.token.as_ref() {
                 Some(s) => s,
                 None => return Err(Error::TokenMissing),
             };
-            try!(headers.append(&format!("Authorization: {}", token)));
+            headers.append(&format!("Authorization: {}", token))?;
         }
-        try!(self.handle.http_headers(headers));
+        self.handle.http_headers(headers)?;
         match body {
             Some(mut body) => {
-                try!(self.handle.upload(true));
-                try!(self.handle.in_filesize(body.len() as u64));
+                self.handle.upload(true)?;
+                self.handle.in_filesize(body.len() as u64)?;
                 handle(&mut self.handle, &mut |buf| body.read(buf).unwrap_or(0))
             }
             None => handle(&mut self.handle, &mut |_| 0),
@@ -270,19 +270,19 @@ fn handle(handle: &mut Easy,
     let mut body = Vec::new();
     {
         let mut handle = handle.transfer();
-        try!(handle.read_function(|buf| Ok(read(buf))));
-        try!(handle.write_function(|data| {
+        handle.read_function(|buf| Ok(read(buf)))?;
+        handle.write_function(|data| {
             body.extend_from_slice(data);
             Ok(data.len())
-        }));
-        try!(handle.header_function(|data| {
+        })?;
+        handle.header_function(|data| {
             headers.push(String::from_utf8_lossy(data).into_owned());
             true
-        }));
-        try!(handle.perform());
+        })?;
+        handle.perform()?;
     }
 
-    match try!(handle.response_code()) {
+    match handle.response_code()? {
         0 => {} // file upload url sometimes
         200 => {}
         403 => return Err(Error::Unauthorized),
@@ -310,13 +310,13 @@ impl fmt::Display for Error {
             Error::NonUtf8Body => write!(f, "response body was not utf-8"),
             Error::Curl(ref err) => write!(f, "http error: {}", err),
             Error::NotOkResponse(code, ref headers, ref body) => {
-                try!(writeln!(f, "failed to get a 200 OK response, got {}", code));
-                try!(writeln!(f, "headers:"));
+                writeln!(f, "failed to get a 200 OK response, got {}", code)?;
+                writeln!(f, "headers:")?;
                 for header in headers {
-                    try!(writeln!(f, "    {}", header));
+                    writeln!(f, "    {}", header)?;
                 }
-                try!(writeln!(f, "body:"));
-                try!(writeln!(f, "{}", String::from_utf8_lossy(body)));
+                writeln!(f, "body:")?;
+                writeln!(f, "{}", String::from_utf8_lossy(body))?;
                 Ok(())
             }
             Error::Api(ref errs) => {
index ea63261892beb98a76239dc2cc4fc9a744ea4038..6da0aa56cef253bec9185d3838cf9223054262e2 100644 (file)
@@ -315,15 +315,15 @@ impl Execs {
     }
 
     fn match_stdout(&self, actual: &Output) -> ham::MatchResult {
-        try!(self.match_std(self.expect_stdout.as_ref(), &actual.stdout,
-                            "stdout", &actual.stderr, false));
+        self.match_std(self.expect_stdout.as_ref(), &actual.stdout,
+                            "stdout", &actual.stderr, false)?;
         for expect in self.expect_stdout_contains.iter() {
-            try!(self.match_std(Some(expect), &actual.stdout, "stdout",
-                                &actual.stderr, true));
+            self.match_std(Some(expect), &actual.stdout, "stdout",
+                                &actual.stderr, true)?;
         }
         for expect in self.expect_stderr_contains.iter() {
-            try!(self.match_std(Some(expect), &actual.stderr, "stderr",
-                                &actual.stdout, true));
+            self.match_std(Some(expect), &actual.stderr, "stderr",
+                                &actual.stdout, true)?;
         }
 
         if let Some(ref objects) = self.expect_json {
@@ -336,7 +336,7 @@ impl Execs {
                                    objects.len(), lines.len()));
             }
             for (obj, line) in objects.iter().zip(lines) {
-                try!(self.match_json(obj, line));
+                self.match_json(obj, line)?;
             }
         }
         Ok(())
index 9129a1e6e9290e4e62270c03570831e68d99e615..b40c26f840a2b489cfb6575019a362db3d210aea 100644 (file)
@@ -18,9 +18,9 @@ fn resolve<R: Registry>(pkg: PackageId, deps: Vec<Dependency>,
                         -> CargoResult<Vec<PackageId>> {
     let summary = Summary::new(pkg.clone(), deps, HashMap::new()).unwrap();
     let method = Method::Everything;
-    Ok(try!(resolver::resolve(&[(summary, method)],
+    Ok(resolver::resolve(&[(summary, method)],
                               &[],
-                              registry)).iter().map(|p| {
+                              registry)?.iter().map(|p| {
         p.clone()
     }).collect())
 }
index 549070d41573287a8ba4ef1217df28f9812639d3..4b03aa6f6367a6dc2e525c889ccb2d89a3663959 100644 (file)
@@ -93,10 +93,10 @@ fn no_term() {
 
 fn colored_output(string: &str, color: color::Color) -> CargoResult<String> {
     let mut term = TerminfoTerminal::new(Vec::new()).unwrap();
-    try!(term.reset());
-    try!(term.fg(color));
-    try!(write!(&mut term, "{}", string));
-    try!(term.reset());
-    try!(term.flush());
+    term.reset()?;
+    term.fg(color)?;
+    write!(&mut term, "{}", string)?;
+    term.reset()?;
+    term.flush()?;
     Ok(String::from_utf8_lossy(term.get_ref()).to_string())
 }