diff --git a/src/bin/bench.rs b/src/bin/bench.rs
index a4591c9112f..9ec67d3d4cd 100644
--- a/src/bin/bench.rs
+++ b/src/bin/bench.rs
@@ -62,7 +62,7 @@ Compilation can be customized with the `bench` profile in the manifest.
";
pub fn execute(options: Options, config: &Config) -> CliResult> {
- let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
+ let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?;
try!(config.configure_shell(options.flag_verbose,
options.flag_quiet,
&options.flag_color));
@@ -90,7 +90,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult > {
},
};
- let err = try!(ops::run_benches(&root, &ops, &options.arg_args));
+ let err = ops::run_benches(&root, &ops, &options.arg_args)?;
match err {
None => Ok(None),
Some(err) => {
diff --git a/src/bin/build.rs b/src/bin/build.rs
index 5a66ca75496..4a050df3e2d 100644
--- a/src/bin/build.rs
+++ b/src/bin/build.rs
@@ -65,7 +65,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult > {
options.flag_quiet,
&options.flag_color));
- let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
+ let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?;
let opts = CompileOptions {
config: config,
@@ -86,6 +86,6 @@ pub fn execute(options: Options, config: &Config) -> CliResult > {
target_rustc_args: None,
};
- try!(ops::compile(&root, &opts));
+ ops::compile(&root, &opts)?;
Ok(None)
}
diff --git a/src/bin/cargo.rs b/src/bin/cargo.rs
index 9e4af20cffd..a511b87dc54 100644
--- a/src/bin/cargo.rs
+++ b/src/bin/cargo.rs
@@ -1,3 +1,5 @@
+#![feature(question_mark)]
+
extern crate cargo;
extern crate url;
extern crate env_logger;
@@ -171,7 +173,7 @@ fn execute(flags: Flags, config: &Config) -> CliResult > {
}
each_subcommand!(cmd);
- try!(execute_subcommand(config, &args[1], &args));
+ execute_subcommand(config, &args[1], &args)?;
Ok(None)
}
diff --git a/src/bin/clean.rs b/src/bin/clean.rs
index 5bcb10aeb25..0348aa8e313 100644
--- a/src/bin/clean.rs
+++ b/src/bin/clean.rs
@@ -43,13 +43,13 @@ pub fn execute(options: Options, config: &Config) -> CliResult > {
options.flag_quiet,
&options.flag_color));
- let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
+ let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?;
let opts = ops::CleanOptions {
config: config,
spec: &options.flag_package,
target: options.flag_target.as_ref().map(|s| &s[..]),
release: options.flag_release,
};
- try!(ops::clean(&root, &opts));
+ ops::clean(&root, &opts)?;
Ok(None)
}
diff --git a/src/bin/doc.rs b/src/bin/doc.rs
index 3cb73273809..293c392ae24 100644
--- a/src/bin/doc.rs
+++ b/src/bin/doc.rs
@@ -53,7 +53,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult > {
options.flag_quiet,
&options.flag_color));
- let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
+ let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?;
let doc_opts = ops::DocOptions {
open_result: options.flag_open,
@@ -75,6 +75,6 @@ pub fn execute(options: Options, config: &Config) -> CliResult > {
},
};
- try!(ops::doc(&root, &doc_opts));
+ ops::doc(&root, &doc_opts)?;
Ok(None)
}
diff --git a/src/bin/fetch.rs b/src/bin/fetch.rs
index 1f32e874154..bbde6ffa48e 100644
--- a/src/bin/fetch.rs
+++ b/src/bin/fetch.rs
@@ -37,8 +37,8 @@ pub fn execute(options: Options, config: &Config) -> CliResult > {
try!(config.configure_shell(options.flag_verbose,
options.flag_quiet,
&options.flag_color));
- let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
- try!(ops::fetch(&root, config));
+ let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?;
+ ops::fetch(&root, config)?;
Ok(None)
}
diff --git a/src/bin/generate_lockfile.rs b/src/bin/generate_lockfile.rs
index 7e50eff265f..7d5276f1d14 100644
--- a/src/bin/generate_lockfile.rs
+++ b/src/bin/generate_lockfile.rs
@@ -31,8 +31,8 @@ pub fn execute(options: Options, config: &Config) -> CliResult > {
try!(config.configure_shell(options.flag_verbose,
options.flag_quiet,
&options.flag_color));
- let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
+ let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?;
- try!(ops::generate_lockfile(&root, config));
+ ops::generate_lockfile(&root, config)?;
Ok(None)
}
diff --git a/src/bin/init.rs b/src/bin/init.rs
index 8856f8b3599..259301c39e5 100644
--- a/src/bin/init.rs
+++ b/src/bin/init.rs
@@ -48,7 +48,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult > {
name: flag_name.as_ref().map(|s| s.as_ref()),
};
- try!(ops::init(opts, config));
+ ops::init(opts, config)?;
Ok(None)
}
diff --git a/src/bin/install.rs b/src/bin/install.rs
index bd2c3191bf3..c472e94c814 100644
--- a/src/bin/install.rs
+++ b/src/bin/install.rs
@@ -104,7 +104,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult > {
};
let source = if let Some(url) = options.flag_git {
- let url = try!(url.to_url().map_err(human));
+ let url = url.to_url().map_err(human)?;
let gitref = if let Some(branch) = options.flag_branch {
GitReference::Branch(branch)
} else if let Some(tag) = options.flag_tag {
@@ -116,11 +116,11 @@ pub fn execute(options: Options, config: &Config) -> CliResult > {
};
SourceId::for_git(&url, gitref)
} else if let Some(path) = options.flag_path {
- try!(SourceId::for_path(&config.cwd().join(path)))
+ SourceId::for_path(&config.cwd().join(path))?
} else if options.arg_crate == None {
- try!(SourceId::for_path(&config.cwd()))
+ SourceId::for_path(&config.cwd())?
} else {
- try!(SourceId::for_central(config))
+ SourceId::for_central(config)?
};
let krate = options.arg_crate.as_ref().map(|s| &s[..]);
@@ -128,9 +128,9 @@ pub fn execute(options: Options, config: &Config) -> CliResult > {
let root = options.flag_root.as_ref().map(|s| &s[..]);
if options.flag_list {
- try!(ops::install_list(root, config));
+ ops::install_list(root, config)?;
} else {
- try!(ops::install(root, krate, &source, vers, &compile_opts));
+ ops::install(root, krate, &source, vers, &compile_opts)?;
}
Ok(None)
}
diff --git a/src/bin/locate_project.rs b/src/bin/locate_project.rs
index b6c7aa075ee..faa9608d529 100644
--- a/src/bin/locate_project.rs
+++ b/src/bin/locate_project.rs
@@ -24,7 +24,7 @@ pub struct ProjectLocation {
pub fn execute(flags: LocateProjectFlags,
config: &Config) -> CliResult > {
- let root = try!(find_root_manifest_for_wd(flags.flag_manifest_path, config.cwd()));
+ let root = find_root_manifest_for_wd(flags.flag_manifest_path, config.cwd())?;
let string = try!(root.to_str()
.chain_error(|| human("Your project path contains \
diff --git a/src/bin/login.rs b/src/bin/login.rs
index bfb8418e40e..1e91fc8a0c5 100644
--- a/src/bin/login.rs
+++ b/src/bin/login.rs
@@ -37,10 +37,10 @@ pub fn execute(options: Options, config: &Config) -> CliResult > {
let token = match options.arg_token.clone() {
Some(token) => token,
None => {
- let src = try!(SourceId::for_central(config));
+ let src = SourceId::for_central(config)?;
let mut src = RegistrySource::new(&src, config);
- try!(src.update());
- let config = try!(src.config());
+ src.update()?;
+ let config = src.config()?;
let host = options.flag_host.clone().unwrap_or(config.api);
println!("please visit {}me and paste the API Token below", host);
let mut line = String::new();
@@ -53,7 +53,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult > {
};
let token = token.trim().to_string();
- try!(ops::registry_login(config, token));
+ ops::registry_login(config, token)?;
Ok(None)
}
diff --git a/src/bin/metadata.rs b/src/bin/metadata.rs
index c8375d95c95..de9d3a9129f 100644
--- a/src/bin/metadata.rs
+++ b/src/bin/metadata.rs
@@ -44,7 +44,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult CliResult CliResult > {
name: flag_name.as_ref().map(|s| s.as_ref()),
};
- try!(ops::new(opts, config));
+ ops::new(opts, config)?;
Ok(None)
}
diff --git a/src/bin/owner.rs b/src/bin/owner.rs
index 33f49e51140..3d281e528eb 100644
--- a/src/bin/owner.rs
+++ b/src/bin/owner.rs
@@ -52,7 +52,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult > {
to_remove: options.flag_remove,
list: options.flag_list,
};
- try!(ops::modify_owners(config, &opts));
+ ops::modify_owners(config, &opts)?;
Ok(None)
}
diff --git a/src/bin/package.rs b/src/bin/package.rs
index 37dd9a122d4..ca6ee869f45 100644
--- a/src/bin/package.rs
+++ b/src/bin/package.rs
@@ -35,7 +35,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult > {
try!(config.configure_shell(options.flag_verbose,
options.flag_quiet,
&options.flag_color));
- let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
+ let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?;
try!(ops::package(&root, config,
!options.flag_no_verify,
options.flag_list,
diff --git a/src/bin/pkgid.rs b/src/bin/pkgid.rs
index 68e53236f20..1eec2b376d8 100644
--- a/src/bin/pkgid.rs
+++ b/src/bin/pkgid.rs
@@ -50,10 +50,10 @@ pub fn execute(options: Options,
try!(config.configure_shell(options.flag_verbose,
options.flag_quiet,
&options.flag_color));
- let root = try!(find_root_manifest_for_wd(options.flag_manifest_path.clone(), config.cwd()));
+ let root = find_root_manifest_for_wd(options.flag_manifest_path.clone(), config.cwd())?;
let spec = options.arg_spec.as_ref().map(|s| &s[..]);
- let spec = try!(ops::pkgid(&root, spec, config));
+ let spec = ops::pkgid(&root, spec, config)?;
println!("{}", spec);
Ok(None)
}
diff --git a/src/bin/publish.rs b/src/bin/publish.rs
index dac40fae3e2..a3c7cda871b 100644
--- a/src/bin/publish.rs
+++ b/src/bin/publish.rs
@@ -43,7 +43,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult > {
..
} = options;
- let root = try!(find_root_manifest_for_wd(flag_manifest_path.clone(), config.cwd()));
- try!(ops::publish(&root, config, token, host, !no_verify));
+ let root = find_root_manifest_for_wd(flag_manifest_path.clone(), config.cwd())?;
+ ops::publish(&root, config, token, host, !no_verify)?;
Ok(None)
}
diff --git a/src/bin/read_manifest.rs b/src/bin/read_manifest.rs
index 1cbb0cff8c4..5171e665886 100644
--- a/src/bin/read_manifest.rs
+++ b/src/bin/read_manifest.rs
@@ -27,10 +27,10 @@ Options:
pub fn execute(options: Options, config: &Config) -> CliResult > {
debug!("executing; cmd=cargo-read-manifest; args={:?}",
env::args().collect::>());
- try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..])));
+ config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))?;
- let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
+ let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?;
- let pkg = try!(Package::for_path(&root, config));
+ let pkg = Package::for_path(&root, config)?;
Ok(Some(pkg))
}
diff --git a/src/bin/run.rs b/src/bin/run.rs
index 34f83c66941..695a0e2b118 100644
--- a/src/bin/run.rs
+++ b/src/bin/run.rs
@@ -53,7 +53,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult> {
options.flag_quiet,
&options.flag_color));
- let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
+ let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?;
let (mut examples, mut bins) = (Vec::new(), Vec::new());
if let Some(s) = options.flag_bin {
@@ -85,7 +85,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult > {
target_rustc_args: None,
};
- match try!(ops::run(&root, &compile_opts, &options.arg_args)) {
+ match ops::run(&root, &compile_opts, &options.arg_args)? {
None => Ok(None),
Some(err) => {
Err(match err.exit.as_ref().and_then(|e| e.code()) {
diff --git a/src/bin/rustc.rs b/src/bin/rustc.rs
index aae58dfb81e..d2e920f2c7b 100644
--- a/src/bin/rustc.rs
+++ b/src/bin/rustc.rs
@@ -103,7 +103,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult > {
target_rustc_args: options.arg_opts.as_ref().map(|a| &a[..]),
};
- try!(ops::compile(&root, &opts));
+ ops::compile(&root, &opts)?;
Ok(None)
}
diff --git a/src/bin/rustdoc.rs b/src/bin/rustdoc.rs
index ed53d769ba8..52ee735414f 100644
--- a/src/bin/rustdoc.rs
+++ b/src/bin/rustdoc.rs
@@ -91,7 +91,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult > {
},
};
- try!(ops::doc(&root, &doc_opts));
+ ops::doc(&root, &doc_opts)?;
Ok(None)
}
diff --git a/src/bin/search.rs b/src/bin/search.rs
index 2eae9173c9c..cd7c9f92d9a 100644
--- a/src/bin/search.rs
+++ b/src/bin/search.rs
@@ -40,6 +40,6 @@ pub fn execute(options: Options, config: &Config) -> CliResult > {
..
} = options;
- try!(ops::search(&query.join("+"), config, host, cmp::min(100, limit.unwrap_or(10)) as u8));
+ ops::search(&query.join("+"), config, host, cmp::min(100, limit.unwrap_or(10)) as u8)?;
Ok(None)
}
diff --git a/src/bin/test.rs b/src/bin/test.rs
index 65f82490fb3..a2a78a4e2a0 100644
--- a/src/bin/test.rs
+++ b/src/bin/test.rs
@@ -77,7 +77,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult > {
try!(config.configure_shell(options.flag_verbose,
options.flag_quiet,
&options.flag_color));
- let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
+ let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?;
let ops = ops::TestOptions {
no_run: options.flag_no_run,
@@ -102,7 +102,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult > {
},
};
- let err = try!(ops::run_tests(&root, &ops, &options.arg_args));
+ let err = ops::run_tests(&root, &ops, &options.arg_args)?;
match err {
None => Ok(None),
Some(err) => {
diff --git a/src/bin/uninstall.rs b/src/bin/uninstall.rs
index ace6a25007e..e50e8bbf1f0 100644
--- a/src/bin/uninstall.rs
+++ b/src/bin/uninstall.rs
@@ -39,7 +39,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult > {
&options.flag_color));
let root = options.flag_root.as_ref().map(|s| &s[..]);
- try!(ops::uninstall(root, &options.arg_spec, &options.flag_bin, config));
+ ops::uninstall(root, &options.arg_spec, &options.flag_bin, config)?;
Ok(None)
}
diff --git a/src/bin/update.rs b/src/bin/update.rs
index 77e4c470cc4..f141e0eb78a 100644
--- a/src/bin/update.rs
+++ b/src/bin/update.rs
@@ -57,7 +57,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult > {
try!(config.configure_shell(options.flag_verbose,
options.flag_quiet,
&options.flag_color));
- let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
+ let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?;
let update_opts = ops::UpdateOptions {
aggressive: options.flag_aggressive,
@@ -66,6 +66,6 @@ pub fn execute(options: Options, config: &Config) -> CliResult > {
config: config,
};
- try!(ops::update_lockfile(&root, &update_opts));
+ ops::update_lockfile(&root, &update_opts)?;
Ok(None)
}
diff --git a/src/cargo/core/dependency.rs b/src/cargo/core/dependency.rs
index a030de626f9..876e0b0a9e5 100644
--- a/src/cargo/core/dependency.rs
+++ b/src/cargo/core/dependency.rs
@@ -91,7 +91,7 @@ impl DependencyInner {
version: Option<&str>,
source_id: &SourceId) -> CargoResult {
let version_req = match version {
- Some(v) => try!(VersionReq::parse(v)),
+ Some(v) => VersionReq::parse(v)?,
None => VersionReq::any()
};
diff --git a/src/cargo/core/package.rs b/src/cargo/core/package.rs
index c8362ffa286..1c6f920ee3b 100644
--- a/src/cargo/core/package.rs
+++ b/src/cargo/core/package.rs
@@ -65,7 +65,7 @@ impl Package {
pub fn for_path(manifest_path: &Path, config: &Config) -> CargoResult {
let path = manifest_path.parent().unwrap();
- let source_id = try!(SourceId::for_path(path));
+ let source_id = SourceId::for_path(path)?;
let (pkg, _) = try!(ops::read_package(&manifest_path, &source_id,
config));
Ok(pkg)
diff --git a/src/cargo/core/package_id.rs b/src/cargo/core/package_id.rs
index 0d968aeeef0..270249068e6 100644
--- a/src/cargo/core/package_id.rs
+++ b/src/cargo/core/package_id.rs
@@ -36,7 +36,7 @@ impl Encodable for PackageId {
impl Decodable for PackageId {
fn decode(d: &mut D) -> Result {
- let string: String = try!(Decodable::decode(d));
+ let string: String = Decodable::decode(d)?;
let regex = Regex::new(r"^([^ ]+) ([^ ]+) \(([^\)]+)\)$").unwrap();
let captures = regex.captures(&string).expect("invalid serialized PackageId");
@@ -121,7 +121,7 @@ pub struct Metadata {
impl PackageId {
pub fn new(name: &str, version: T,
sid: &SourceId) -> CargoResult {
- let v = try!(version.to_semver().map_err(PackageIdError::InvalidVersion));
+ let v = version.to_semver().map_err(PackageIdError::InvalidVersion)?;
Ok(PackageId {
inner: Arc::new(PackageIdInner {
name: name.to_string(),
@@ -163,10 +163,10 @@ impl Metadata {
impl fmt::Display for PackageId {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
- try!(write!(f, "{} v{}", self.inner.name, self.inner.version));
+ write!(f, "{} v{}", self.inner.name, self.inner.version)?;
if !self.inner.source_id.is_default_registry() {
- try!(write!(f, " ({})", self.inner.source_id));
+ write!(f, " ({})", self.inner.source_id)?;
}
Ok(())
diff --git a/src/cargo/core/package_id_spec.rs b/src/cargo/core/package_id_spec.rs
index 1621ccfc261..d1311762ab7 100644
--- a/src/cargo/core/package_id_spec.rs
+++ b/src/cargo/core/package_id_spec.rs
@@ -31,7 +31,7 @@ impl PackageIdSpec {
let mut parts = spec.splitn(2, ':');
let name = parts.next().unwrap();
let version = match parts.next() {
- Some(version) => Some(try!(Version::parse(version).map_err(human))),
+ Some(version) => Some(Version::parse(version).map_err(human)?),
None => None,
};
for ch in name.chars() {
@@ -82,7 +82,7 @@ impl PackageIdSpec {
let name_or_version = parts.next().unwrap();
match parts.next() {
Some(part) => {
- let version = try!(part.to_semver().map_err(human));
+ let version = part.to_semver().map_err(human)?;
(name_or_version.to_string(), Some(version))
}
None => {
@@ -193,18 +193,18 @@ impl fmt::Display for PackageIdSpec {
try!(write!(f, "{}/{}", url.host().unwrap(),
url.path().unwrap().join("/")));
} else {
- try!(write!(f, "{}", url));
+ write!(f, "{}", url)?;
}
if url.path().unwrap().last().unwrap() != &self.name {
printed_name = true;
- try!(write!(f, "#{}", self.name));
+ write!(f, "#{}", self.name)?;
}
}
- None => { printed_name = true; try!(write!(f, "{}", self.name)) }
+ None => { printed_name = true; write!(f, "{}", self.name)? }
}
match self.version {
Some(ref v) => {
- try!(write!(f, "{}{}", if printed_name {":"} else {"#"}, v));
+ write!(f, "{}{}", if printed_name {":"} else {"#"}, v)?;
}
None => {}
}
diff --git a/src/cargo/core/registry.rs b/src/cargo/core/registry.rs
index c005703860a..12e203c6f30 100644
--- a/src/cargo/core/registry.rs
+++ b/src/cargo/core/registry.rs
@@ -122,13 +122,13 @@ impl<'cfg> PackageRegistry<'cfg> {
}
}
- try!(self.load(namespace, kind));
+ self.load(namespace, kind)?;
Ok(())
}
pub fn add_sources(&mut self, ids: &[SourceId]) -> CargoResult<()> {
for id in ids.iter() {
- try!(self.ensure_loaded(id, Kind::Locked));
+ self.ensure_loaded(id, Kind::Locked)?;
}
Ok(())
}
@@ -162,7 +162,7 @@ impl<'cfg> PackageRegistry<'cfg> {
// Ensure the source has fetched all necessary remote data.
let p = profile::start(format!("updating: {}", source_id));
- try!(source.update());
+ source.update()?;
drop(p);
if kind == Kind::Override {
@@ -183,7 +183,7 @@ impl<'cfg> PackageRegistry<'cfg> {
for s in self.overrides.iter() {
let src = self.sources.get_mut(s).unwrap();
let dep = Dependency::new_override(dep.name(), s);
- ret.extend(try!(src.query(&dep)).into_iter().filter(|s| {
+ ret.extend(src.query(&dep)?.into_iter().filter(|s| {
seen.insert(s.name().to_string())
}));
}
@@ -270,15 +270,15 @@ impl<'cfg> PackageRegistry<'cfg> {
impl<'cfg> Registry for PackageRegistry<'cfg> {
fn query(&mut self, dep: &Dependency) -> CargoResult> {
- let overrides = try!(self.query_overrides(dep));
+ let overrides = self.query_overrides(dep)?;
let ret = if overrides.is_empty() {
// Ensure the requested source_id is loaded
- try!(self.ensure_loaded(dep.source_id(), Kind::Normal));
+ self.ensure_loaded(dep.source_id(), Kind::Normal)?;
let mut ret = Vec::new();
for (id, src) in self.sources.sources_mut() {
if id == dep.source_id() {
- ret.extend(try!(src.query(dep)).into_iter());
+ ret.extend(src.query(dep)?.into_iter());
}
}
ret
diff --git a/src/cargo/core/resolver/encode.rs b/src/cargo/core/resolver/encode.rs
index c2d187c3f09..df081d91b52 100644
--- a/src/cargo/core/resolver/encode.rs
+++ b/src/cargo/core/resolver/encode.rs
@@ -21,7 +21,7 @@ impl EncodableResolve {
pub fn to_resolve(&self, root: &Package, config: &Config)
-> CargoResult {
let mut path_deps = HashMap::new();
- try!(build_path_deps(root, &mut path_deps, config));
+ build_path_deps(root, &mut path_deps, config)?;
let default = root.package_id().source_id();
let mut g = Graph::new();
@@ -76,9 +76,9 @@ impl EncodableResolve {
Ok(())
};
- try!(add_dependencies(&root, &self.root));
+ add_dependencies(&root, &self.root)?;
for (id, pkg) in ids.iter().zip(packages) {
- try!(add_dependencies(id, pkg));
+ add_dependencies(id, pkg)?;
}
}
@@ -113,7 +113,7 @@ fn build_path_deps(root: &Package,
for pkg in deps {
let source_id = pkg.package_id().source_id();
if map.insert(pkg.name().to_string(), source_id.clone()).is_none() {
- try!(build_path_deps(&pkg, map, config));
+ build_path_deps(&pkg, map, config)?;
}
}
@@ -158,7 +158,7 @@ impl Encodable for EncodablePackageId {
impl Decodable for EncodablePackageId {
fn decode(d: &mut D) -> Result {
- let string: String = try!(Decodable::decode(d));
+ let string: String = Decodable::decode(d)?;
let regex = Regex::new(r"^([^ ]+) ([^ ]+)(?: \(([^\)]+)\))?$").unwrap();
let captures = regex.captures(&string)
.expect("invalid serialized PackageId");
diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs
index f1f2e100a82..bed43e83d33 100644
--- a/src/cargo/core/resolver/mod.rs
+++ b/src/cargo/core/resolver/mod.rs
@@ -129,10 +129,10 @@ impl Resolve {
impl fmt::Debug for Resolve {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
- try!(write!(fmt, "graph: {:?}\n", self.graph));
- try!(write!(fmt, "\nfeatures: {{\n"));
+ write!(fmt, "graph: {:?}\n", self.graph)?;
+ write!(fmt, "\nfeatures: {{\n")?;
for (pkg, features) in &self.features {
- try!(write!(fmt, " {}: {:?}\n", pkg, features));
+ write!(fmt, " {}: {:?}\n", pkg, features)?;
}
write!(fmt, "}}")
}
@@ -155,8 +155,8 @@ pub fn resolve(summary: &Summary, method: &Method,
activations: HashMap::new(),
};
let _p = profile::start(format!("resolving: {}", summary.package_id()));
- let cx = try!(activate_deps_loop(cx, registry, summary, method));
- try!(check_cycles(&cx));
+ let cx = activate_deps_loop(cx, registry, summary, method)?;
+ check_cycles(&cx)?;
Ok(cx.resolve)
}
@@ -181,7 +181,7 @@ fn activate(cx: &mut Context,
}
trace!("activating {}", parent.package_id());
- let deps = try!(cx.build_deps(registry, &parent, method));
+ let deps = cx.build_deps(registry, &parent, method)?;
Ok(Some(DepsFrame{
parent: parent,
@@ -296,7 +296,7 @@ fn activate_deps_loop(mut cx: Context,
// use (those with more candidates).
let mut backtrack_stack = Vec::new();
let mut remaining_deps = BinaryHeap::new();
- remaining_deps.extend(try!(activate(&mut cx, registry, top, &top_method)));
+ remaining_deps.extend(activate(&mut cx, registry, top, &top_method)?);
// Main resolution loop, this is the workhorse of the resolution algorithm.
//
@@ -555,7 +555,7 @@ fn build_features(s: &Summary, method: &Method)
match *method {
Method::Everything => {
for key in s.features().keys() {
- try!(add_feature(s, key, &mut deps, &mut used, &mut visited));
+ add_feature(s, key, &mut deps, &mut used, &mut visited)?;
}
for dep in s.dependencies().iter().filter(|d| d.is_optional()) {
try!(add_feature(s, dep.name(), &mut deps, &mut used,
@@ -564,7 +564,7 @@ fn build_features(s: &Summary, method: &Method)
}
Method::Required { features: requested_features, .. } => {
for feat in requested_features.iter() {
- try!(add_feature(s, feat, &mut deps, &mut used, &mut visited));
+ add_feature(s, feat, &mut deps, &mut used, &mut visited)?;
}
}
}
@@ -611,7 +611,7 @@ fn build_features(s: &Summary, method: &Method)
match s.features().get(feat) {
Some(recursive) => {
for f in recursive {
- try!(add_feature(s, f, deps, used, visited));
+ add_feature(s, f, deps, used, visited)?;
}
}
None => {
@@ -665,12 +665,12 @@ impl Context {
// First, figure out our set of dependencies based on the requsted set
// of features. This also calculates what features we're going to enable
// for our own dependencies.
- let deps = try!(self.resolve_features(parent, method));
+ let deps = self.resolve_features(parent, method)?;
// Next, transform all dependencies into a list of possible candidates
// which can satisfy that dependency.
let mut deps = try!(deps.into_iter().map(|(dep, features)| {
- let mut candidates = try!(registry.query(&dep));
+ let mut candidates = registry.query(&dep)?;
// When we attempt versions for a package, we'll want to start at
// the maximum version and work our way down.
candidates.sort_by(|a, b| {
@@ -793,7 +793,7 @@ fn check_cycles(cx: &Context) -> CargoResult<()> {
});
let mut empty = HashSet::new();
let visited = if is_transitive {&mut *visited} else {&mut empty};
- try!(visit(resolve, dep, summaries, visited, checked));
+ visit(resolve, dep, summaries, visited, checked)?;
}
}
diff --git a/src/cargo/core/shell.rs b/src/cargo/core/shell.rs
index 57876c97188..bd0852c61fc 100644
--- a/src/cargo/core/shell.rs
+++ b/src/cargo/core/shell.rs
@@ -171,11 +171,11 @@ impl Shell {
}
pub fn say(&mut self, message: T, color: Color) -> CargoResult<()> {
- try!(self.reset());
- if color != BLACK { try!(self.fg(color)); }
- try!(write!(self, "{}\n", message.to_string()));
- try!(self.reset());
- try!(self.flush());
+ self.reset()?;
+ if color != BLACK { self.fg(color)?; }
+ write!(self, "{}\n", message.to_string())?;
+ self.reset()?;
+ self.flush()?;
Ok(())
}
@@ -187,17 +187,17 @@ impl Shell {
-> CargoResult<()>
where T: fmt::Display, U: fmt::Display
{
- try!(self.reset());
- if color != BLACK { try!(self.fg(color)); }
- if self.supports_attr(Attr::Bold) { try!(self.attr(Attr::Bold)); }
+ self.reset()?;
+ if color != BLACK { self.fg(color)?; }
+ if self.supports_attr(Attr::Bold) { self.attr(Attr::Bold)?; }
if justified {
- try!(write!(self, "{:>12}", status.to_string()));
+ write!(self, "{:>12}", status.to_string())?;
} else {
- try!(write!(self, "{}", status));
+ write!(self, "{}", status)?;
}
- try!(self.reset());
- try!(write!(self, " {}\n", message));
- try!(self.flush());
+ self.reset()?;
+ write!(self, " {}\n", message)?;
+ self.flush()?;
Ok(())
}
@@ -205,7 +205,7 @@ impl Shell {
let colored = self.colored();
match self.terminal {
- Colored(ref mut c) if colored => try!(c.fg(color)),
+ Colored(ref mut c) if colored => c.fg(color)?,
_ => return Ok(false),
}
Ok(true)
@@ -215,7 +215,7 @@ impl Shell {
let colored = self.colored();
match self.terminal {
- Colored(ref mut c) if colored => try!(c.attr(attr)),
+ Colored(ref mut c) if colored => c.attr(attr)?,
_ => return Ok(false)
}
Ok(true)
@@ -234,7 +234,7 @@ impl Shell {
let colored = self.colored();
match self.terminal {
- Colored(ref mut c) if colored => try!(c.reset()),
+ Colored(ref mut c) if colored => c.reset()?,
_ => ()
}
Ok(())
diff --git a/src/cargo/core/source.rs b/src/cargo/core/source.rs
index c1b6dd3156a..04619089029 100644
--- a/src/cargo/core/source.rs
+++ b/src/cargo/core/source.rs
@@ -156,7 +156,7 @@ impl SourceId {
// Pass absolute path
pub fn for_path(path: &Path) -> CargoResult {
- let url = try!(path.to_url().map_err(human));
+ let url = path.to_url().map_err(human)?;
Ok(SourceId::new(Kind::Path, url))
}
@@ -173,7 +173,7 @@ impl SourceId {
/// This is the main cargo registry by default, but it can be overridden in
/// a `.cargo/config`.
pub fn for_central(config: &Config) -> CargoResult {
- Ok(SourceId::for_registry(&try!(RegistrySource::url(config))))
+ Ok(SourceId::for_registry(&RegistrySource::url(config)?))
}
pub fn url(&self) -> &Url {
@@ -281,11 +281,11 @@ impl fmt::Display for SourceId {
}
SourceIdInner { kind: Kind::Git(ref reference), ref url,
ref precise, .. } => {
- try!(write!(f, "{}{}", url, url_ref(reference)));
+ write!(f, "{}{}", url, url_ref(reference))?;
if let Some(ref s) = *precise {
let len = cmp::min(s.len(), 8);
- try!(write!(f, "#{}", &s[..len]));
+ write!(f, "#{}", &s[..len])?;
}
Ok(())
}
diff --git a/src/cargo/lib.rs b/src/cargo/lib.rs
index 2d5db07fa11..1d73162b134 100644
--- a/src/cargo/lib.rs
+++ b/src/cargo/lib.rs
@@ -1,5 +1,6 @@
#![deny(unused)]
#![cfg_attr(test, deny(warnings))]
+#![feature(question_mark)]
#[cfg(test)] extern crate hamcrest;
#[macro_use] extern crate log;
@@ -69,8 +70,8 @@ pub fn call_main(
options_first: bool) -> CliResult>
where V: Encodable, T: Decodable, U: Decodable
{
- let flags = try!(flags_from_args::(usage, args, options_first));
- let json = try!(json_from_stdin::());
+ let flags = flags_from_args::(usage, args, options_first)?;
+ let json = json_from_stdin::()?;
exec(flags, json, shell)
}
@@ -94,7 +95,7 @@ pub fn call_main_without_stdin(
options_first: bool) -> CliResult>
where V: Encodable, T: Decodable
{
- let flags = try!(flags_from_args::(usage, args, options_first));
+ let flags = flags_from_args::(usage, args, options_first)?;
exec(flags, shell)
}
@@ -104,7 +105,7 @@ fn process(mut callback: F)
{
let mut config = None;
let result = (|| {
- config = Some(try!(Config::default()));
+ config = Some(Config::default()?);
let args: Vec<_> = try!(env::args_os().map(|s| {
s.into_string().map_err(|s| {
human(format!("invalid unicode in argument: {:?}", s))
diff --git a/src/cargo/ops/cargo_clean.rs b/src/cargo/ops/cargo_clean.rs
index c4918f9c352..e5e96adcf51 100644
--- a/src/cargo/ops/cargo_clean.rs
+++ b/src/cargo/ops/cargo_clean.rs
@@ -16,7 +16,7 @@ pub struct CleanOptions<'a> {
/// Cleans the project from build artifacts.
pub fn clean(manifest_path: &Path, opts: &CleanOptions) -> CargoResult<()> {
- let root = try!(Package::for_path(manifest_path, opts.config));
+ let root = Package::for_path(manifest_path, opts.config)?;
let target_dir = opts.config.target_dir(&root);
// If we have a spec, then we need to delete some packages, otherwise, just
@@ -25,7 +25,7 @@ pub fn clean(manifest_path: &Path, opts: &CleanOptions) -> CargoResult<()> {
return rm_rf(&target_dir);
}
- let (resolve, packages) = try!(ops::fetch(manifest_path, opts.config));
+ let (resolve, packages) = ops::fetch(manifest_path, opts.config)?;
let dest = if opts.release {"release"} else {"debug"};
let host_layout = Layout::new(opts.config, &root, None, dest);
@@ -41,15 +41,15 @@ pub fn clean(manifest_path: &Path, opts: &CleanOptions) -> CargoResult<()> {
// resolve package specs and remove the corresponding packages
for spec in opts.spec {
// Translate the spec to a Package
- let pkgid = try!(resolve.query(spec));
- let pkg = try!(packages.get(&pkgid));
+ let pkgid = resolve.query(spec)?;
+ let pkg = packages.get(&pkgid)?;
// And finally, clean everything out!
for target in pkg.targets() {
for kind in [Kind::Host, Kind::Target].iter() {
let layout = cx.layout(&pkg, *kind);
- try!(rm_rf(&layout.proxy().fingerprint(&pkg)));
- try!(rm_rf(&layout.build(&pkg)));
+ rm_rf(&layout.proxy().fingerprint(&pkg))?;
+ rm_rf(&layout.build(&pkg))?;
let Profiles {
ref release, ref dev, ref test, ref bench, ref doc,
ref custom_build,
@@ -62,8 +62,8 @@ pub fn clean(manifest_path: &Path, opts: &CleanOptions) -> CargoResult<()> {
kind: *kind,
};
let root = cx.out_dir(&unit);
- for filename in try!(cx.target_filenames(&unit)).iter() {
- try!(rm_rf(&root.join(&filename)));
+ for filename in cx.target_filenames(&unit)?.iter() {
+ rm_rf(&root.join(&filename))?;
}
}
}
diff --git a/src/cargo/ops/cargo_compile.rs b/src/cargo/ops/cargo_compile.rs
index fa0ff05818b..3bccbf2b671 100644
--- a/src/cargo/ops/cargo_compile.rs
+++ b/src/cargo/ops/cargo_compile.rs
@@ -89,11 +89,11 @@ pub fn compile<'a>(manifest_path: &Path,
-> CargoResult> {
debug!("compile; manifest-path={}", manifest_path.display());
- let package = try!(Package::for_path(manifest_path, options.config));
+ let package = Package::for_path(manifest_path, options.config)?;
debug!("loaded package; package={}", package);
for key in package.manifest().warnings().iter() {
- try!(options.config.shell().warn(key))
+ options.config.shell().warn(key)?
}
compile_pkg(&package, None, options)
}
@@ -113,14 +113,14 @@ pub fn resolve_dependencies<'a>(root_package: &Package,
// First, resolve the root_package's *listed* dependencies, as well as
// downloading and updating all remotes and such.
- let resolve = try!(ops::resolve_pkg(&mut registry, root_package, config));
+ let resolve = ops::resolve_pkg(&mut registry, root_package, config)?;
// Second, resolve with precisely what we're doing. Filter out
// transitive dependencies if necessary, specify features, handle
// overrides, etc.
let _p = profile::start("resolving w/ overrides...");
- try!(add_overrides(&mut registry, root_package.root(), config));
+ add_overrides(&mut registry, root_package.root(), config)?;
let method = Method::Required{
dev_deps: true, // TODO: remove this option?
@@ -237,7 +237,7 @@ pub fn compile_pkg<'a>(root_package: &Package,
let mut ret = {
let _p = profile::start("compiling");
- let mut build_config = try!(scrape_build_config(config, jobs, target));
+ let mut build_config = scrape_build_config(config, jobs, target)?;
build_config.exec_engine = exec_engine.clone();
build_config.release = release;
if let CompileMode::Doc { deps } = mode {
@@ -370,10 +370,10 @@ fn generate_targets<'a>(pkg: &'a Package,
}
Ok(())
};
- try!(find(bins, "bin", TargetKind::Bin, profile));
- try!(find(examples, "example", TargetKind::Example, build));
- try!(find(tests, "test", TargetKind::Test, test));
- try!(find(benches, "bench", TargetKind::Bench, &profiles.bench));
+ find(bins, "bin", TargetKind::Bin, profile)?;
+ find(examples, "example", TargetKind::Example, build)?;
+ find(tests, "test", TargetKind::Test, test)?;
+ find(benches, "bench", TargetKind::Bench, &profiles.bench)?;
}
Ok(targets)
}
@@ -385,7 +385,7 @@ fn generate_targets<'a>(pkg: &'a Package,
fn add_overrides<'a>(registry: &mut PackageRegistry<'a>,
cur_path: &Path,
config: &'a Config) -> CargoResult<()> {
- let paths = match try!(config.get_list("paths")) {
+ let paths = match config.get_list("paths")? {
Some(list) => list,
None => return Ok(())
};
@@ -401,7 +401,7 @@ fn add_overrides<'a>(registry: &mut PackageRegistry<'a>,
});
for (path, definition) in paths {
- let id = try!(SourceId::for_path(&path));
+ let id = SourceId::for_path(&path)?;
let mut source = PathSource::new_recursive(&path, &id, config);
try!(source.update().chain_error(|| {
human(format!("failed to update path override `{}` \
@@ -425,7 +425,7 @@ fn scrape_build_config(config: &Config,
jobs: Option,
target: Option)
-> CargoResult {
- let cfg_jobs = match try!(config.get_i64("build.jobs")) {
+ let cfg_jobs = match config.get_i64("build.jobs")? {
Some(v) => {
if v.val <= 0 {
bail!("build.jobs must be positive, but found {} in {}",
@@ -440,16 +440,16 @@ fn scrape_build_config(config: &Config,
None => None,
};
let jobs = jobs.or(cfg_jobs).unwrap_or(::num_cpus::get() as u32);
- let cfg_target = try!(config.get_string("build.target")).map(|s| s.val);
+ let cfg_target = config.get_string("build.target")?.map(|s| s.val);
let target = target.or(cfg_target);
let mut base = ops::BuildConfig {
jobs: jobs,
requested_target: target.clone(),
..Default::default()
};
- base.host = try!(scrape_target_config(config, &config.rustc_info().host));
+ base.host = scrape_target_config(config, &config.rustc_info().host)?;
base.target = match target.as_ref() {
- Some(triple) => try!(scrape_target_config(config, &triple)),
+ Some(triple) => scrape_target_config(config, &triple)?,
None => base.host.clone(),
};
Ok(base)
@@ -460,11 +460,11 @@ fn scrape_target_config(config: &Config, triple: &str)
let key = format!("target.{}", triple);
let mut ret = ops::TargetConfig {
- ar: try!(config.get_path(&format!("{}.ar", key))).map(|v| v.val),
- linker: try!(config.get_path(&format!("{}.linker", key))).map(|v| v.val),
+ ar: config.get_path(&format!("{}.ar", key))?.map(|v| v.val),
+ linker: config.get_path(&format!("{}.linker", key))?.map(|v| v.val),
overrides: HashMap::new(),
};
- let table = match try!(config.get_table(&key)) {
+ let table = match config.get_table(&key)? {
Some(table) => table.val,
None => return Ok(ret),
};
@@ -481,12 +481,12 @@ fn scrape_target_config(config: &Config, triple: &str)
rerun_if_changed: Vec::new(),
};
let key = format!("{}.{}", key, lib_name);
- let table = try!(config.get_table(&key)).unwrap().val;
+ let table = config.get_table(&key)?.unwrap().val;
for (k, _) in table.into_iter() {
let key = format!("{}.{}", key, k);
match &k[..] {
"rustc-flags" => {
- let flags = try!(config.get_string(&key)).unwrap();
+ let flags = config.get_string(&key)?.unwrap();
let whence = format!("in `{}` (in {})", key,
flags.definition);
let (paths, links) = try!(
@@ -496,22 +496,22 @@ fn scrape_target_config(config: &Config, triple: &str)
output.library_links.extend(links.into_iter());
}
"rustc-link-lib" => {
- let list = try!(config.get_list(&key)).unwrap();
+ let list = config.get_list(&key)?.unwrap();
output.library_links.extend(list.val.into_iter()
.map(|v| v.0));
}
"rustc-link-search" => {
- let list = try!(config.get_list(&key)).unwrap();
+ let list = config.get_list(&key)?.unwrap();
output.library_paths.extend(list.val.into_iter().map(|v| {
PathBuf::from(&v.0)
}));
}
"rustc-cfg" => {
- let list = try!(config.get_list(&key)).unwrap();
+ let list = config.get_list(&key)?.unwrap();
output.cfgs.extend(list.val.into_iter().map(|v| v.0));
}
_ => {
- let val = try!(config.get_string(&key)).unwrap();
+ let val = config.get_string(&key)?.unwrap();
output.metadata.push((k, val.val));
}
}
diff --git a/src/cargo/ops/cargo_doc.rs b/src/cargo/ops/cargo_doc.rs
index a818ec80e1d..fbba07c1c6f 100644
--- a/src/cargo/ops/cargo_doc.rs
+++ b/src/cargo/ops/cargo_doc.rs
@@ -14,7 +14,7 @@ pub struct DocOptions<'a> {
pub fn doc(manifest_path: &Path,
options: &DocOptions) -> CargoResult<()> {
- let package = try!(Package::for_path(manifest_path, options.compile_opts.config));
+ let package = Package::for_path(manifest_path, options.compile_opts.config)?;
let mut lib_names = HashSet::new();
let mut bin_names = HashSet::new();
@@ -35,13 +35,13 @@ pub fn doc(manifest_path: &Path,
}
}
- try!(ops::compile(manifest_path, &options.compile_opts));
+ ops::compile(manifest_path, &options.compile_opts)?;
if options.open_result {
let name = if options.compile_opts.spec.len() > 1 {
bail!("Passing multiple packages and `open` is not supported")
} else if options.compile_opts.spec.len() == 1 {
- try!(PackageIdSpec::parse(&options.compile_opts.spec[0]))
+ PackageIdSpec::parse(&options.compile_opts.spec[0])?
.name().replace("-", "_")
} else {
match lib_names.iter().chain(bin_names.iter()).nth(0) {
@@ -55,12 +55,12 @@ pub fn doc(manifest_path: &Path,
if fs::metadata(&path).is_ok() {
let mut shell = options.compile_opts.config.shell();
match open_docs(&path) {
- Ok(m) => try!(shell.status("Launching", m)),
+ Ok(m) => shell.status("Launching", m)?,
Err(e) => {
try!(shell.warn(
"warning: could not determine a browser to open docs with, tried:"));
for method in e {
- try!(shell.warn(format!("\t{}", method)));
+ shell.warn(format!("\t{}", method))?;
}
}
}
diff --git a/src/cargo/ops/cargo_fetch.rs b/src/cargo/ops/cargo_fetch.rs
index e8f401d5333..756f3ca86c1 100644
--- a/src/cargo/ops/cargo_fetch.rs
+++ b/src/cargo/ops/cargo_fetch.rs
@@ -9,12 +9,12 @@ use util::{CargoResult, Config};
pub fn fetch<'a>(manifest_path: &Path,
config: &'a Config)
-> CargoResult<(Resolve, PackageSet<'a>)> {
- let package = try!(Package::for_path(manifest_path, config));
+ let package = Package::for_path(manifest_path, config)?;
let mut registry = PackageRegistry::new(config);
- let resolve = try!(ops::resolve_pkg(&mut registry, &package, config));
+ let resolve = ops::resolve_pkg(&mut registry, &package, config)?;
let packages = get_resolved_packages(&resolve, registry);
for id in resolve.iter() {
- try!(packages.get(id));
+ packages.get(id)?;
}
Ok((resolve, packages))
}
diff --git a/src/cargo/ops/cargo_generate_lockfile.rs b/src/cargo/ops/cargo_generate_lockfile.rs
index 1b8bb0087f0..d7505bace43 100644
--- a/src/cargo/ops/cargo_generate_lockfile.rs
+++ b/src/cargo/ops/cargo_generate_lockfile.rs
@@ -18,18 +18,18 @@ pub struct UpdateOptions<'a> {
pub fn generate_lockfile(manifest_path: &Path, config: &Config)
-> CargoResult<()> {
- let package = try!(Package::for_path(manifest_path, config));
+ let package = Package::for_path(manifest_path, config)?;
let mut registry = PackageRegistry::new(config);
let resolve = try!(ops::resolve_with_previous(&mut registry, &package,
Method::Everything,
None, None));
- try!(ops::write_pkg_lockfile(&package, &resolve));
+ ops::write_pkg_lockfile(&package, &resolve)?;
Ok(())
}
pub fn update_lockfile(manifest_path: &Path,
opts: &UpdateOptions) -> CargoResult<()> {
- let package = try!(Package::for_path(manifest_path, opts.config));
+ let package = Package::for_path(manifest_path, opts.config)?;
let previous_resolve = match try!(ops::load_pkg_lockfile(&package,
opts.config)) {
@@ -49,7 +49,7 @@ pub fn update_lockfile(manifest_path: &Path,
} else {
let mut sources = Vec::new();
for name in opts.to_update {
- let dep = try!(previous_resolve.query(name));
+ let dep = previous_resolve.query(name)?;
if opts.aggressive {
fill_with_deps(&previous_resolve, dep, &mut to_avoid,
&mut HashSet::new());
@@ -73,7 +73,7 @@ pub fn update_lockfile(manifest_path: &Path,
});
}
}
- try!(registry.add_sources(&sources));
+ registry.add_sources(&sources)?;
}
let resolve = try!(ops::resolve_with_previous(&mut registry,
@@ -94,18 +94,18 @@ pub fn update_lockfile(manifest_path: &Path,
} else {
format!("{} -> v{}", removed[0], added[0].version())
};
- try!(print_change("Updating", msg));
+ print_change("Updating", msg)?;
} else {
for package in removed.iter() {
- try!(print_change("Removing", format!("{}", package)));
+ print_change("Removing", format!("{}", package))?;
}
for package in added.iter() {
- try!(print_change("Adding", format!("{}", package)));
+ print_change("Adding", format!("{}", package))?;
}
}
}
- try!(ops::write_pkg_lockfile(&package, &resolve));
+ ops::write_pkg_lockfile(&package, &resolve)?;
return Ok(());
fn fill_with_deps<'a>(resolve: &'a Resolve, dep: &'a PackageId,
diff --git a/src/cargo/ops/cargo_install.rs b/src/cargo/ops/cargo_install.rs
index dfb018e6ff8..394189ee8a5 100644
--- a/src/cargo/ops/cargo_install.rs
+++ b/src/cargo/ops/cargo_install.rs
@@ -45,7 +45,7 @@ pub fn install(root: Option<&str>,
vers: Option<&str>,
opts: &ops::CompileOptions) -> CargoResult<()> {
let config = opts.config;
- let root = try!(resolve_root(root, config));
+ let root = resolve_root(root, config)?;
let (pkg, source) = if source_id.is_git() {
try!(select_pkg(GitSource::new(source_id, config), source_id,
krate, vers, &mut |git| git.read_packages()))
@@ -73,10 +73,10 @@ pub fn install(root: Option<&str>,
// We have to check this again afterwards, but may as well avoid building
// anything if we're gonna throw it away anyway.
{
- let metadata = try!(metadata(config, &root));
- let list = try!(read_crate_list(metadata.file()));
+ let metadata = metadata(config, &root)?;
+ let list = read_crate_list(metadata.file())?;
let dst = metadata.parent().join("bin");
- try!(check_overwrites(&dst, &pkg, &opts.filter, &list));
+ check_overwrites(&dst, &pkg, &opts.filter, &list)?;
}
let target_dir = if source_id.is_path() {
@@ -90,16 +90,16 @@ pub fn install(root: Option<&str>,
found at `{}`", pkg, target_dir.display()))
}));
- let metadata = try!(metadata(config, &root));
- let mut list = try!(read_crate_list(metadata.file()));
+ let metadata = metadata(config, &root)?;
+ let mut list = read_crate_list(metadata.file())?;
let dst = metadata.parent().join("bin");
- try!(check_overwrites(&dst, &pkg, &opts.filter, &list));
+ check_overwrites(&dst, &pkg, &opts.filter, &list)?;
let mut t = Transaction { bins: Vec::new() };
- try!(fs::create_dir_all(&dst));
+ fs::create_dir_all(&dst)?;
for bin in compile.binaries.iter() {
let dst = dst.join(bin.file_name().unwrap());
- try!(config.shell().status("Installing", dst.display()));
+ config.shell().status("Installing", dst.display())?;
try!(fs::copy(&bin, &dst).chain_error(|| {
human(format!("failed to copy `{}` to `{}`", bin.display(),
dst.display()))
@@ -108,7 +108,7 @@ pub fn install(root: Option<&str>,
}
if !source_id.is_path() {
- try!(fs::remove_dir_all(&target_dir));
+ fs::remove_dir_all(&target_dir)?;
}
list.v1.entry(pkg.package_id().clone()).or_insert_with(|| {
@@ -116,7 +116,7 @@ pub fn install(root: Option<&str>,
}).extend(t.bins.iter().map(|t| {
t.file_name().unwrap().to_string_lossy().into_owned()
}));
- try!(write_crate_list(metadata.file(), list));
+ write_crate_list(metadata.file(), list)?;
t.bins.truncate(0);
@@ -143,14 +143,14 @@ fn select_pkg<'a, T>(mut source: T,
-> CargoResult<(Package, Box)>
where T: Source + 'a
{
- try!(source.update());
+ source.update()?;
match name {
Some(name) => {
- let dep = try!(Dependency::parse(name, vers, source_id));
- let deps = try!(source.query(&dep));
+ let dep = Dependency::parse(name, vers, source_id)?;
+ let deps = source.query(&dep)?;
match deps.iter().map(|p| p.package_id()).max() {
Some(pkgid) => {
- let pkg = try!(source.download(pkgid));
+ let pkg = source.download(pkgid)?;
Ok((pkg, Box::new(source)))
}
None => {
@@ -162,17 +162,17 @@ fn select_pkg<'a, T>(mut source: T,
}
}
None => {
- let candidates = try!(list_all(&mut source));
+ let candidates = list_all(&mut source)?;
let binaries = candidates.iter().filter(|cand| {
cand.targets().iter().filter(|t| t.is_bin()).count() > 0
});
let examples = candidates.iter().filter(|cand| {
cand.targets().iter().filter(|t| t.is_example()).count() > 0
});
- let pkg = match try!(one(binaries, |v| multi_err("binaries", v))) {
+ let pkg = match one(binaries, |v| multi_err("binaries", v))? {
Some(p) => p,
None => {
- match try!(one(examples, |v| multi_err("examples", v))) {
+ match one(examples, |v| multi_err("examples", v))? {
Some(p) => p,
None => bail!("no packages found with binaries or \
examples"),
@@ -231,12 +231,12 @@ fn check_overwrites(dst: &Path,
}
for target in pkg.targets().iter().filter(|t| t.is_bin()) {
- try!(check(target.name()));
+ check(target.name())?;
}
}
CompileFilter::Only { bins, examples, .. } => {
for bin in bins.iter().chain(examples) {
- try!(check(bin));
+ check(bin)?;
}
}
}
@@ -246,7 +246,7 @@ fn check_overwrites(dst: &Path,
fn read_crate_list(mut file: &File) -> CargoResult {
(|| -> CargoResult<_> {
let mut contents = String::new();
- try!(file.read_to_string(&mut contents));
+ file.read_to_string(&mut contents)?;
let listing = try!(toml::decode_str(&contents).chain_error(|| {
internal("invalid TOML found for metadata")
}));
@@ -263,10 +263,10 @@ fn read_crate_list(mut file: &File) -> CargoResult {
fn write_crate_list(mut file: &File, listing: CrateListingV1) -> CargoResult<()> {
(|| -> CargoResult<_> {
- try!(file.seek(SeekFrom::Start(0)));
- try!(file.set_len(0));
+ file.seek(SeekFrom::Start(0))?;
+ file.set_len(0)?;
let data = toml::encode_str::(&CrateListing::V1(listing));
- try!(file.write_all(data.as_bytes()));
+ file.write_all(data.as_bytes())?;
Ok(())
}).chain_error(|| {
human("failed to write crate metadata")
@@ -274,15 +274,15 @@ fn write_crate_list(mut file: &File, listing: CrateListingV1) -> CargoResult<()>
}
pub fn install_list(dst: Option<&str>, config: &Config) -> CargoResult<()> {
- let dst = try!(resolve_root(dst, config));
- let dst = try!(metadata(config, &dst));
- let list = try!(read_crate_list(dst.file()));
+ let dst = resolve_root(dst, config)?;
+ let dst = metadata(config, &dst)?;
+ let list = read_crate_list(dst.file())?;
let mut shell = config.shell();
let out = shell.out();
for (k, v) in list.v1.iter() {
- try!(writeln!(out, "{}:", k));
+ writeln!(out, "{}:", k)?;
for bin in v {
- try!(writeln!(out, " {}", bin));
+ writeln!(out, " {}", bin)?;
}
}
Ok(())
@@ -292,12 +292,12 @@ pub fn uninstall(root: Option<&str>,
spec: &str,
bins: &[String],
config: &Config) -> CargoResult<()> {
- let root = try!(resolve_root(root, config));
- let crate_metadata = try!(metadata(config, &root));
- let mut metadata = try!(read_crate_list(crate_metadata.file()));
+ let root = resolve_root(root, config)?;
+ let crate_metadata = metadata(config, &root)?;
+ let mut metadata = read_crate_list(crate_metadata.file())?;
let mut to_remove = Vec::new();
{
- let result = try!(PackageIdSpec::query_str(spec, metadata.v1.keys()))
+ let result = PackageIdSpec::query_str(spec, metadata.v1.keys())?
.clone();
let mut installed = match metadata.v1.entry(result.clone()) {
Entry::Occupied(e) => e,
@@ -339,10 +339,10 @@ pub fn uninstall(root: Option<&str>,
installed.remove();
}
}
- try!(write_crate_list(crate_metadata.file(), metadata));
+ write_crate_list(crate_metadata.file(), metadata)?;
for bin in to_remove {
- try!(config.shell().status("Removing", bin.display()));
- try!(fs::remove_file(bin));
+ config.shell().status("Removing", bin.display())?;
+ fs::remove_file(bin)?;
}
Ok(())
@@ -354,7 +354,7 @@ fn metadata(config: &Config, root: &Filesystem) -> CargoResult {
fn resolve_root(flag: Option<&str>,
config: &Config) -> CargoResult {
- let config_root = try!(config.get_path("install.root"));
+ let config_root = config.get_path("install.root")?;
Ok(flag.map(PathBuf::from).or_else(|| {
env::var_os("CARGO_INSTALL_ROOT").map(PathBuf::from)
}).or_else(move || {
diff --git a/src/cargo/ops/cargo_new.rs b/src/cargo/ops/cargo_new.rs
index 41fa01fc11f..5175d868ecd 100644
--- a/src/cargo/ops/cargo_new.rs
+++ b/src/cargo/ops/cargo_new.rs
@@ -41,7 +41,7 @@ struct MkOptions<'a> {
impl Decodable for VersionControl {
fn decode(d: &mut D) -> Result {
- Ok(match &try!(d.read_str())[..] {
+ Ok(match &d.read_str()?[..] {
"git" => VersionControl::Git,
"hg" => VersionControl::Hg,
"none" => VersionControl::NoVcs,
@@ -82,7 +82,7 @@ fn get_name<'a>(path: &'a Path, opts: &'a NewOptions, config: &Config) -> CargoR
let message = format!(
"note: package will be named `{}`; use --name to override",
new_name);
- try!(config.shell().say(&message, BLACK));
+ config.shell().say(&message, BLACK)?;
}
Ok(new_name)
}
@@ -150,7 +150,7 @@ fn detect_source_paths_and_types(project_path : &Path,
}
}
H::Detect => {
- let content = try!(paths::read(&path.join(pp.clone())));
+ let content = paths::read(&path.join(pp.clone()))?;
let isbin = content.contains("fn main");
SourceFileInformation {
relative_path: pp,
@@ -215,8 +215,8 @@ pub fn new(opts: NewOptions, config: &Config) -> CargoResult<()> {
path.display())
}
- let name = try!(get_name(&path, &opts, config));
- try!(check_name(name));
+ let name = get_name(&path, &opts, config)?;
+ check_name(name)?;
let mkopts = MkOptions {
version_control: opts.version_control,
@@ -240,12 +240,12 @@ pub fn init(opts: NewOptions, config: &Config) -> CargoResult<()> {
bail!("`cargo init` cannot be run on existing Cargo projects")
}
- let name = try!(get_name(&path, &opts, config));
- try!(check_name(name));
+ let name = get_name(&path, &opts, config)?;
+ check_name(name)?;
let mut src_paths_types = vec![];
- try!(detect_source_paths_and_types(&path, name, &mut src_paths_types));
+ detect_source_paths_and_types(&path, name, &mut src_paths_types)?;
if src_paths_types.len() == 0 {
src_paths_types.push(plan_new_source_file(opts.bin, name.to_string()));
@@ -315,7 +315,7 @@ fn existing_vcs_repo(path: &Path, cwd: &Path) -> bool {
fn mk(config: &Config, opts: &MkOptions) -> CargoResult<()> {
let path = opts.path;
let name = opts.name;
- let cfg = try!(global_config(config));
+ let cfg = global_config(config)?;
let mut ignore = "target\n".to_string();
let in_existing_vcs_repo = existing_vcs_repo(path.parent().unwrap(), config.cwd());
if !opts.bin {
@@ -332,22 +332,22 @@ fn mk(config: &Config, opts: &MkOptions) -> CargoResult<()> {
match vcs {
VersionControl::Git => {
if !fs::metadata(&path.join(".git")).is_ok() {
- try!(GitRepo::init(path, config.cwd()));
+ GitRepo::init(path, config.cwd())?;
}
- try!(paths::append(&path.join(".gitignore"), ignore.as_bytes()));
+ paths::append(&path.join(".gitignore"), ignore.as_bytes())?;
},
VersionControl::Hg => {
if !fs::metadata(&path.join(".hg")).is_ok() {
- try!(HgRepo::init(path, config.cwd()));
+ HgRepo::init(path, config.cwd())?;
}
- try!(paths::append(&path.join(".hgignore"), ignore.as_bytes()));
+ paths::append(&path.join(".hgignore"), ignore.as_bytes())?;
},
VersionControl::NoVcs => {
- try!(fs::create_dir_all(path));
+ fs::create_dir_all(path)?;
},
};
- let (author_name, email) = try!(discover_author());
+ let (author_name, email) = discover_author()?;
// Hoo boy, sure glad we've got exhaustivenes checking behind us.
let author = match (cfg.name, cfg.email, author_name, email) {
(Some(name), Some(email), _, _) |
@@ -402,7 +402,7 @@ authors = [{}]
let path_of_source_file = path.join(i.relative_path.clone());
if let Some(src_dir) = path_of_source_file.parent() {
- try!(fs::create_dir_all(src_dir));
+ fs::create_dir_all(src_dir)?;
}
let default_file_content : &[u8] = if i.bin {
@@ -454,9 +454,9 @@ fn discover_author() -> CargoResult<(String, Option)> {
}
fn global_config(config: &Config) -> CargoResult {
- let name = try!(config.get_string("cargo-new.name")).map(|s| s.val);
- let email = try!(config.get_string("cargo-new.email")).map(|s| s.val);
- let vcs = try!(config.get_string("cargo-new.vcs"));
+ let name = config.get_string("cargo-new.name")?.map(|s| s.val);
+ let email = config.get_string("cargo-new.email")?.map(|s| s.val);
+ let vcs = config.get_string("cargo-new.vcs")?;
let vcs = match vcs.as_ref().map(|p| (&p.val[..], &p.definition)) {
Some(("git", _)) => Some(VersionControl::Git),
diff --git a/src/cargo/ops/cargo_output_metadata.rs b/src/cargo/ops/cargo_output_metadata.rs
index 79b308053d4..01d8b7c2b65 100644
--- a/src/cargo/ops/cargo_output_metadata.rs
+++ b/src/cargo/ops/cargo_output_metadata.rs
@@ -34,7 +34,7 @@ pub fn output_metadata(opt: OutputMetadataOptions, config: &Config) -> CargoResu
}
fn metadata_no_deps(opt: OutputMetadataOptions, config: &Config) -> CargoResult {
- let root = try!(Package::for_path(opt.manifest_path, config));
+ let root = Package::for_path(opt.manifest_path, config)?;
Ok(ExportInfo {
packages: vec![root],
resolve: None,
@@ -110,7 +110,7 @@ fn resolve_dependencies<'a>(manifest: &Path,
features: Vec,
no_default_features: bool)
-> CargoResult<(PackageSet<'a>, Resolve)> {
- let package = try!(Package::for_path(manifest, config));
+ let package = Package::for_path(manifest, config)?;
ops::resolve_dependencies(&package,
config,
None,
diff --git a/src/cargo/ops/cargo_package.rs b/src/cargo/ops/cargo_package.rs
index 277b88eae1b..a1a7e83ee9a 100644
--- a/src/cargo/ops/cargo_package.rs
+++ b/src/cargo/ops/cargo_package.rs
@@ -17,17 +17,17 @@ pub fn package(manifest_path: &Path,
list: bool,
metadata: bool) -> CargoResult> {
let path = manifest_path.parent().unwrap();
- let id = try!(SourceId::for_path(path));
+ let id = SourceId::for_path(path)?;
let mut src = PathSource::new(path, &id, config);
- let pkg = try!(src.root_package());
+ let pkg = src.root_package()?;
if metadata {
- try!(check_metadata(&pkg, config));
+ check_metadata(&pkg, config)?;
}
if list {
let root = pkg.root();
- let mut list: Vec<_> = try!(src.list_files(&pkg)).iter().map(|file| {
+ let mut list: Vec<_> = src.list_files(&pkg)?.iter().map(|file| {
util::without_prefix(&file, &root).unwrap().to_path_buf()
}).collect();
list.sort();
@@ -48,7 +48,7 @@ pub fn package(manifest_path: &Path,
// location if it actually passes all our tests. Any previously existing
// tarball can be assumed as corrupt or invalid, so we just blow it away if
// it exists.
- try!(config.shell().status("Packaging", pkg.package_id().to_string()));
+ config.shell().status("Packaging", pkg.package_id().to_string())?;
let tmp_dst = dir.join(format!(".{}", filename));
let _ = fs::remove_file(&tmp_dst);
try!(tar(&pkg, &src, config, &tmp_dst, &filename).chain_error(|| {
@@ -109,27 +109,27 @@ fn tar(pkg: &Package,
bail!("destination already exists: {}", dst.display())
}
- try!(fs::create_dir_all(dst.parent().unwrap()));
+ fs::create_dir_all(dst.parent().unwrap())?;
- let tmpfile = try!(File::create(dst));
+ let tmpfile = File::create(dst)?;
// Prepare the encoder and its header
let filename = Path::new(filename);
- let encoder = GzBuilder::new().filename(try!(util::path2bytes(filename)))
+ let encoder = GzBuilder::new().filename(util::path2bytes(filename)?)
.write(tmpfile, Compression::Best);
// Put all package files into a compressed archive
let mut ar = Builder::new(encoder);
let root = pkg.root();
- for file in try!(src.list_files(pkg)).iter() {
+ for file in src.list_files(pkg)?.iter() {
if &**file == dst { continue }
let relative = util::without_prefix(&file, &root).unwrap();
- try!(check_filename(relative));
+ check_filename(relative)?;
let relative = try!(relative.to_str().chain_error(|| {
human(format!("non-utf8 path in source directory: {}",
relative.display()))
}));
- let mut file = try!(File::open(file));
+ let mut file = File::open(file)?;
try!(config.shell().verbose(|shell| {
shell.status("Archiving", &relative)
}));
@@ -168,23 +168,23 @@ fn tar(pkg: &Package,
internal(format!("could not archive source file `{}`", relative))
}));
}
- let encoder = try!(ar.into_inner());
- try!(encoder.finish());
+ let encoder = ar.into_inner()?;
+ encoder.finish()?;
Ok(())
}
fn run_verify(config: &Config, pkg: &Package, tar: &Path)
-> CargoResult<()> {
- try!(config.shell().status("Verifying", pkg));
+ config.shell().status("Verifying", pkg)?;
- let f = try!(GzDecoder::new(try!(File::open(tar))));
+ let f = GzDecoder::new(File::open(tar)?)?;
let dst = pkg.root().join(&format!("target/package/{}-{}",
pkg.name(), pkg.version()));
if fs::metadata(&dst).is_ok() {
- try!(fs::remove_dir_all(&dst));
+ fs::remove_dir_all(&dst)?;
}
let mut archive = Archive::new(f);
- try!(archive.unpack(dst.parent().unwrap()));
+ archive.unpack(dst.parent().unwrap())?;
let manifest_path = dst.join("Cargo.toml");
// When packages are uploaded to the registry, all path dependencies are
@@ -195,10 +195,10 @@ fn run_verify(config: &Config, pkg: &Package, tar: &Path)
// location that the package was originally read from. In locking the
// `SourceId` we're telling it that the corresponding `PathSource` will be
// considered updated and we won't actually read any packages.
- let registry = try!(SourceId::for_central(config));
+ let registry = SourceId::for_central(config)?;
let precise = Some("locked".to_string());
- let new_src = try!(SourceId::for_path(&dst)).with_precise(precise);
- let new_pkgid = try!(PackageId::new(pkg.name(), pkg.version(), &new_src));
+ let new_src = SourceId::for_path(&dst)?.with_precise(precise);
+ let new_pkgid = PackageId::new(pkg.name(), pkg.version(), &new_src)?;
let new_summary = pkg.summary().clone().map_dependencies(|d| {
if !d.source_id().is_path() { return d }
d.clone_inner().set_source_id(registry.clone()).into_dependency()
diff --git a/src/cargo/ops/cargo_pkgid.rs b/src/cargo/ops/cargo_pkgid.rs
index 2bf32e627f4..13fa544681a 100644
--- a/src/cargo/ops/cargo_pkgid.rs
+++ b/src/cargo/ops/cargo_pkgid.rs
@@ -7,16 +7,16 @@ use util::{CargoResult, Config};
pub fn pkgid(manifest_path: &Path,
spec: Option<&str>,
config: &Config) -> CargoResult {
- let package = try!(Package::for_path(manifest_path, config));
+ let package = Package::for_path(manifest_path, config)?;
let lockfile = package.root().join("Cargo.lock");
- let resolve = match try!(ops::load_lockfile(&lockfile, &package, config)) {
+ let resolve = match ops::load_lockfile(&lockfile, &package, config)? {
Some(resolve) => resolve,
None => bail!("a Cargo.lock must exist for this command"),
};
let pkgid = match spec {
- Some(spec) => try!(PackageIdSpec::query_str(spec, resolve.iter())),
+ Some(spec) => PackageIdSpec::query_str(spec, resolve.iter())?,
None => package.package_id(),
};
Ok(PackageIdSpec::from_package_id(pkgid))
diff --git a/src/cargo/ops/cargo_read_manifest.rs b/src/cargo/ops/cargo_read_manifest.rs
index 445a356c426..b38dd6d76c4 100644
--- a/src/cargo/ops/cargo_read_manifest.rs
+++ b/src/cargo/ops/cargo_read_manifest.rs
@@ -21,11 +21,11 @@ pub fn read_manifest(contents: &[u8], layout: Layout, source_id: &SourceId,
pub fn read_package(path: &Path, source_id: &SourceId, config: &Config)
-> CargoResult<(Package, Vec)> {
trace!("read_package; path={}; source-id={}", path.display(), source_id);
- let data = try!(paths::read(path));
+ let data = paths::read(path)?;
let layout = project_layout(path.parent().unwrap());
let (manifest, nested) =
- try!(read_manifest(data.as_bytes(), layout, source_id, config));
+ read_manifest(data.as_bytes(), layout, source_id, config)?;
Ok((Package::new(manifest, path), nested))
}
@@ -75,7 +75,7 @@ pub fn read_packages(path: &Path, source_id: &SourceId, config: &Config)
fn walk(path: &Path, callback: &mut FnMut(&Path) -> CargoResult)
-> CargoResult<()> {
- if !try!(callback(path)) {
+ if !callback(path)? {
trace!("not processing {}", path.display());
return Ok(())
}
@@ -94,9 +94,9 @@ fn walk(path: &Path, callback: &mut FnMut(&Path) -> CargoResult)
}
};
for dir in dirs {
- let dir = try!(dir);
- if try!(dir.file_type()).is_dir() {
- try!(walk(&dir.path(), callback));
+ let dir = dir?;
+ if dir.file_type()?.is_dir() {
+ walk(&dir.path(), callback)?;
}
}
Ok(())
@@ -113,9 +113,9 @@ fn read_nested_packages(path: &Path,
visited: &mut HashSet) -> CargoResult<()> {
if !visited.insert(path.to_path_buf()) { return Ok(()) }
- let manifest = try!(find_project_manifest_exact(path, "Cargo.toml"));
+ let manifest = find_project_manifest_exact(path, "Cargo.toml")?;
- let (pkg, nested) = try!(read_package(&manifest, source_id, config));
+ let (pkg, nested) = read_package(&manifest, source_id, config)?;
let pkg_id = pkg.package_id().clone();
if !all_packages.contains_key(&pkg_id) {
all_packages.insert(pkg_id, pkg);
diff --git a/src/cargo/ops/cargo_run.rs b/src/cargo/ops/cargo_run.rs
index 05646e7e2cf..8ec0875c1ff 100644
--- a/src/cargo/ops/cargo_run.rs
+++ b/src/cargo/ops/cargo_run.rs
@@ -8,7 +8,7 @@ pub fn run(manifest_path: &Path,
options: &ops::CompileOptions,
args: &[String]) -> CargoResult> {
let config = options.config;
- let root = try!(Package::for_path(manifest_path, config));
+ let root = Package::for_path(manifest_path, config)?;
let mut bins = root.manifest().targets().iter().filter(|a| {
!a.is_lib() && !a.is_custom_build() && match options.filter {
@@ -40,7 +40,7 @@ pub fn run(manifest_path: &Path,
}
}
- let compile = try!(ops::compile(manifest_path, options));
+ let compile = ops::compile(manifest_path, options)?;
let exe = &compile.binaries[0];
let exe = match util::without_prefix(&exe, config.cwd()) {
Some(path) if path.file_name() == Some(path.as_os_str())
@@ -48,10 +48,10 @@ pub fn run(manifest_path: &Path,
Some(path) => path.to_path_buf(),
None => exe.to_path_buf(),
};
- let mut process = try!(compile.target_process(exe, &root))
+ let mut process = compile.target_process(exe, &root)?
.into_process_builder();
process.args(args).cwd(config.cwd());
- try!(config.shell().status("Running", process.to_string()));
+ config.shell().status("Running", process.to_string())?;
Ok(process.exec().err())
}
diff --git a/src/cargo/ops/cargo_rustc/compilation.rs b/src/cargo/ops/cargo_rustc/compilation.rs
index 9e15288e434..0fe12d52288 100644
--- a/src/cargo/ops/cargo_rustc/compilation.rs
+++ b/src/cargo/ops/cargo_rustc/compilation.rs
@@ -101,7 +101,7 @@ impl<'cfg> Compilation<'cfg> {
search_path.push(self.deps_output.clone());
let search_path = try!(util::join_paths(&search_path,
util::dylib_path_envvar()));
- let mut cmd = try!(CommandPrototype::new(cmd, self.config));
+ let mut cmd = CommandPrototype::new(cmd, self.config)?;
cmd.env(util::dylib_path_envvar(), &search_path);
if let Some(env) = self.extra_env.get(pkg.package_id()) {
for &(ref k, ref v) in env {
diff --git a/src/cargo/ops/cargo_rustc/context.rs b/src/cargo/ops/cargo_rustc/context.rs
index b400745644c..8f0fed3a51d 100644
--- a/src/cargo/ops/cargo_rustc/context.rs
+++ b/src/cargo/ops/cargo_rustc/context.rs
@@ -67,11 +67,11 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
profiles: &'a Profiles) -> CargoResult> {
let target = build_config.requested_target.clone();
let target = target.as_ref().map(|s| &s[..]);
- let target_info = try!(Context::target_info(target, config));
+ let target_info = Context::target_info(target, config)?;
let host_info = if build_config.requested_target.is_none() {
target_info.clone()
} else {
- try!(Context::target_info(None, config))
+ Context::target_info(None, config)?
};
let target_triple = target.unwrap_or_else(|| {
&config.rustc_info().host[..]
@@ -162,7 +162,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
};
let cfg = if has_cfg {
- Some(try!(lines.map(Cfg::from_str).collect()))
+ Some(lines.map(Cfg::from_str).collect()?)
} else {
None
};
@@ -464,7 +464,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
profile: &self.profiles.dev,
..*unit
};
- let deps = try!(self.dep_targets(&tmp));
+ let deps = self.dep_targets(&tmp)?;
Ok(deps.iter().filter_map(|unit| {
if !unit.target.linkable() || unit.pkg.manifest().links().is_none() {
return None
@@ -499,7 +499,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
// the documentation of the library being built.
let mut ret = Vec::new();
for dep in deps {
- let dep = try!(dep);
+ let dep = dep?;
let lib = match dep.targets().iter().find(|t| t.is_lib()) {
Some(lib) => lib,
None => continue,
@@ -660,7 +660,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
}
// Then the build.rustflags value
- if let Some(args) = try!(self.config.get_list("build.rustflags")) {
+ if let Some(args) = self.config.get_list("build.rustflags")? {
let args = args.val.into_iter().map(|a| a.0);
return Ok(args.collect());
}
diff --git a/src/cargo/ops/cargo_rustc/custom_build.rs b/src/cargo/ops/cargo_rustc/custom_build.rs
index 929e54b932a..996c6849ac7 100644
--- a/src/cargo/ops/cargo_rustc/custom_build.rs
+++ b/src/cargo/ops/cargo_rustc/custom_build.rs
@@ -70,13 +70,13 @@ pub fn prepare<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
let (work_dirty, work_fresh) = if overridden {
(Work::new(|_| Ok(())), Work::new(|_| Ok(())))
} else {
- try!(build_work(cx, unit))
+ build_work(cx, unit)?
};
// Now that we've prep'd our work, build the work needed to manage the
// fingerprint and then start returning that upwards.
let (freshness, dirty, fresh) =
- try!(fingerprint::prepare_build_cmd(cx, unit));
+ fingerprint::prepare_build_cmd(cx, unit)?;
Ok((work_dirty.then(dirty), work_fresh.then(fresh), freshness))
}
@@ -97,7 +97,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
// package's library profile.
let profile = cx.lib_profile(unit.pkg.package_id());
let to_exec = to_exec.into_os_string();
- let mut p = try!(super::process(CommandType::Host(to_exec), unit.pkg, cx));
+ let mut p = super::process(CommandType::Host(to_exec), unit.pkg, cx)?;
p.env("OUT_DIR", &build_output)
.env("CARGO_MANIFEST_DIR", unit.pkg.root())
.env("NUM_JOBS", &cx.jobs().to_string())
@@ -124,7 +124,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
// This information will be used at build-time later on to figure out which
// sorts of variables need to be discovered at that time.
let lib_deps = {
- try!(cx.dep_run_custom_build(unit)).iter().filter_map(|unit| {
+ cx.dep_run_custom_build(unit)?.iter().filter_map(|unit| {
if unit.profile.run_custom_build {
Some((unit.pkg.manifest().links().unwrap().to_string(),
unit.pkg.package_id().clone()))
@@ -151,8 +151,8 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
};
cx.build_explicit_deps.insert(*unit, (output_file.clone(), rerun_if_changed));
- try!(fs::create_dir_all(&cx.layout(unit.pkg, Kind::Host).build(unit.pkg)));
- try!(fs::create_dir_all(&cx.layout(unit.pkg, unit.kind).build(unit.pkg)));
+ fs::create_dir_all(&cx.layout(unit.pkg, Kind::Host).build(unit.pkg))?;
+ fs::create_dir_all(&cx.layout(unit.pkg, unit.kind).build(unit.pkg))?;
let exec_engine = cx.exec_engine.clone();
@@ -204,7 +204,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
pkg_name, e.desc);
Human(e)
}));
- try!(paths::write(&output_file, &output.stdout));
+ paths::write(&output_file, &output.stdout)?;
// After the build command has finished running, we need to be sure to
// remember all of its output so we can later discover precisely what it
@@ -216,7 +216,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
let output = try!(str::from_utf8(&output.stdout).map_err(|_| {
human("build script output was not valid utf-8")
}));
- let parsed_output = try!(BuildOutput::parse(output, &pkg_name));
+ let parsed_output = BuildOutput::parse(output, &pkg_name)?;
build_state.insert(id, kind, parsed_output);
Ok(())
});
@@ -228,7 +228,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
let (id, pkg_name, build_state, output_file) = all;
let output = match prev_output {
Some(output) => output,
- None => try!(BuildOutput::parse_file(&output_file, &pkg_name)),
+ None => BuildOutput::parse_file(&output_file, &pkg_name)?,
};
build_state.insert(id, kind, output);
Ok(())
@@ -270,7 +270,7 @@ impl BuildState {
impl BuildOutput {
pub fn parse_file(path: &Path, pkg_name: &str) -> CargoResult {
- let contents = try!(paths::read(path));
+ let contents = paths::read(path)?;
BuildOutput::parse(&contents, pkg_name)
}
@@ -376,7 +376,7 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>,
-> CargoResult<()> {
let mut ret = HashMap::new();
for unit in units {
- try!(build(&mut ret, cx, unit));
+ build(&mut ret, cx, unit)?;
}
cx.build_scripts.extend(ret.into_iter().map(|(k, v)| {
(k, Arc::new(v))
@@ -400,8 +400,8 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>,
if !unit.target.is_custom_build() && unit.pkg.has_custom_build() {
add_to_link(&mut ret, unit.pkg.package_id(), unit.kind);
}
- for unit in try!(cx.dep_targets(unit)).iter() {
- let dep_scripts = try!(build(out, cx, unit));
+ for unit in cx.dep_targets(unit)?.iter() {
+ let dep_scripts = build(out, cx, unit)?;
if unit.target.for_host() {
ret.plugins.extend(dep_scripts.to_link.iter()
diff --git a/src/cargo/ops/cargo_rustc/fingerprint.rs b/src/cargo/ops/cargo_rustc/fingerprint.rs
index 042e223b3cb..0c17299edb3 100644
--- a/src/cargo/ops/cargo_rustc/fingerprint.rs
+++ b/src/cargo/ops/cargo_rustc/fingerprint.rs
@@ -53,7 +53,7 @@ pub fn prepare_target<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
debug!("fingerprint at: {}", loc.display());
- let fingerprint = try!(calculate(cx, unit));
+ let fingerprint = calculate(cx, unit)?;
let compare = compare_old_fingerprint(&loc, &*fingerprint);
log_compare(unit, &compare);
@@ -63,7 +63,7 @@ pub fn prepare_target<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
missing_outputs = !root.join(unit.target.crate_name())
.join("index.html").exists();
} else {
- for filename in try!(cx.target_filenames(unit)).iter() {
+ for filename in cx.target_filenames(unit)?.iter() {
missing_outputs |= fs::metadata(root.join(filename)).is_err();
}
}
@@ -215,10 +215,10 @@ impl hash::Hash for Fingerprint {
impl Encodable for Fingerprint {
fn encode(&self, e: &mut E) -> Result<(), E::Error> {
e.emit_struct("Fingerprint", 6, |e| {
- try!(e.emit_struct_field("rustc", 0, |e| self.rustc.encode(e)));
- try!(e.emit_struct_field("target", 1, |e| self.target.encode(e)));
- try!(e.emit_struct_field("profile", 2, |e| self.profile.encode(e)));
- try!(e.emit_struct_field("local", 3, |e| self.local.encode(e)));
+ e.emit_struct_field("rustc", 0, |e| self.rustc.encode(e))?;
+ e.emit_struct_field("target", 1, |e| self.target.encode(e))?;
+ e.emit_struct_field("profile", 2, |e| self.profile.encode(e))?;
+ e.emit_struct_field("local", 3, |e| self.local.encode(e))?;
try!(e.emit_struct_field("features", 4, |e| {
self.features.encode(e)
}));
@@ -227,7 +227,7 @@ impl Encodable for Fingerprint {
(a, b.hash())
}).collect::>().encode(e)
}));
- try!(e.emit_struct_field("rustflags", 6, |e| self.rustflags.encode(e)));
+ e.emit_struct_field("rustflags", 6, |e| self.rustflags.encode(e))?;
Ok(())
})
}
@@ -240,15 +240,15 @@ impl Decodable for Fingerprint {
}
d.read_struct("Fingerprint", 6, |d| {
Ok(Fingerprint {
- rustc: try!(d.read_struct_field("rustc", 0, decode)),
- target: try!(d.read_struct_field("target", 1, decode)),
- profile: try!(d.read_struct_field("profile", 2, decode)),
- local: try!(d.read_struct_field("local", 3, decode)),
- features: try!(d.read_struct_field("features", 4, decode)),
+ rustc: d.read_struct_field("rustc", 0, decode)?,
+ target: d.read_struct_field("target", 1, decode)?,
+ profile: d.read_struct_field("profile", 2, decode)?,
+ local: d.read_struct_field("local", 3, decode)?,
+ features: d.read_struct_field("features", 4, decode)?,
memoized_hash: Mutex::new(None),
deps: {
let decode = decode::, D>;
- let v = try!(d.read_struct_field("deps", 5, decode));
+ let v = d.read_struct_field("deps", 5, decode)?;
v.into_iter().map(|(name, hash)| {
(name, Arc::new(Fingerprint {
rustc: 0,
@@ -262,7 +262,7 @@ impl Decodable for Fingerprint {
}))
}).collect()
},
- rustflags: try!(d.read_struct_field("rustflags", 6, decode)),
+ rustflags: d.read_struct_field("rustflags", 6, decode)?,
})
})
}
@@ -284,7 +284,7 @@ impl Encodable for MtimeSlot {
impl Decodable for MtimeSlot {
fn decode(e: &mut D) -> Result {
- let kind: Option<(u64, u32)> = try!(Decodable::decode(e));
+ let kind: Option<(u64, u32)> = Decodable::decode(e)?;
Ok(MtimeSlot(Mutex::new(kind.map(|(s, n)| {
FileTime::from_seconds_since_1970(s, n)
}))))
@@ -326,7 +326,7 @@ fn calculate<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
// elsewhere. Also skip fingerprints of binaries because they don't actually
// induce a recompile, they're just dependencies in the sense that they need
// to be built.
- let deps = try!(cx.dep_targets(unit));
+ let deps = cx.dep_targets(unit)?;
let deps = try!(deps.iter().filter(|u| {
!u.target.is_custom_build() && !u.target.is_bin()
}).map(|unit| {
@@ -338,10 +338,10 @@ fn calculate<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
// And finally, calculate what our own local fingerprint is
let local = if use_dep_info(unit) {
let dep_info = dep_info_loc(cx, unit);
- let mtime = try!(dep_info_mtime_if_fresh(&dep_info));
+ let mtime = dep_info_mtime_if_fresh(&dep_info)?;
LocalFingerprint::MtimeBased(MtimeSlot(Mutex::new(mtime)), dep_info)
} else {
- let fingerprint = try!(pkg_fingerprint(cx, unit.pkg));
+ let fingerprint = pkg_fingerprint(cx, unit.pkg)?;
LocalFingerprint::Precalculated(fingerprint)
};
let mut deps = deps;
@@ -354,7 +354,7 @@ fn calculate<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
deps: deps,
local: local,
memoized_hash: Mutex::new(None),
- rustflags: try!(cx.rustflags_args(unit)),
+ rustflags: cx.rustflags_args(unit)?,
});
cx.fingerprints.insert(*unit, fingerprint.clone());
Ok(fingerprint)
@@ -412,7 +412,7 @@ pub fn prepare_build_cmd<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
let &(ref output, ref deps) = &cx.build_explicit_deps[unit];
let local = if deps.is_empty() {
- let s = try!(pkg_fingerprint(cx, unit.pkg));
+ let s = pkg_fingerprint(cx, unit.pkg)?;
LocalFingerprint::Precalculated(s)
} else {
let deps = deps.iter().map(|p| unit.pkg.root().join(p));
@@ -458,7 +458,7 @@ pub fn prepare_build_cmd<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
let slot = MtimeSlot(Mutex::new(None));
fingerprint.local = LocalFingerprint::MtimeBased(slot,
output_path);
- try!(fingerprint.update_local());
+ fingerprint.update_local()?;
}
}
write_fingerprint(&loc, &fingerprint)
@@ -470,7 +470,7 @@ pub fn prepare_build_cmd<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
fn write_fingerprint(loc: &Path, fingerprint: &Fingerprint) -> CargoResult<()> {
let hash = fingerprint.hash();
debug!("write fingerprint: {}", loc.display());
- try!(paths::write(&loc, util::to_hex(hash).as_bytes()));
+ paths::write(&loc, util::to_hex(hash).as_bytes())?;
try!(paths::write(&loc.with_extension("json"),
json::encode(&fingerprint).unwrap().as_bytes()));
Ok(())
@@ -482,10 +482,10 @@ pub fn prepare_init(cx: &mut Context, unit: &Unit) -> CargoResult<()> {
let new2 = new1.clone();
if fs::metadata(&new1).is_err() {
- try!(fs::create_dir(&new1));
+ fs::create_dir(&new1)?;
}
if fs::metadata(&new2).is_err() {
- try!(fs::create_dir(&new2));
+ fs::create_dir(&new2)?;
}
Ok(())
}
@@ -502,14 +502,14 @@ pub fn dep_info_loc(cx: &Context, unit: &Unit) -> PathBuf {
fn compare_old_fingerprint(loc: &Path, new_fingerprint: &Fingerprint)
-> CargoResult<()> {
- let old_fingerprint_short = try!(paths::read(loc));
+ let old_fingerprint_short = paths::read(loc)?;
let new_hash = new_fingerprint.hash();
if util::to_hex(new_hash) == old_fingerprint_short {
return Ok(())
}
- let old_fingerprint_json = try!(paths::read(&loc.with_extension("json")));
+ let old_fingerprint_json = paths::read(&loc.with_extension("json"))?;
let old_fingerprint = try!(json::decode(&old_fingerprint_json).chain_error(|| {
internal(format!("failed to deserialize json"))
}));
@@ -543,7 +543,7 @@ fn dep_info_mtime_if_fresh(dep_info: &Path) -> CargoResult> {
if fs_try!(f.read_until(0, &mut cwd)) == 0 {
return Ok(None)
}
- let cwd = try!(util::bytes2path(&cwd[..cwd.len()-1]));
+ let cwd = util::bytes2path(&cwd[..cwd.len()-1])?;
let line = match f.lines().next() {
Some(Ok(line)) => line,
_ => return Ok(None),
@@ -644,12 +644,12 @@ fn filename(unit: &Unit) -> String {
// next time.
pub fn append_current_dir(path: &Path, cwd: &Path) -> CargoResult<()> {
debug!("appending {} <- {}", path.display(), cwd.display());
- let mut f = try!(OpenOptions::new().read(true).write(true).open(path));
+ let mut f = OpenOptions::new().read(true).write(true).open(path)?;
let mut contents = Vec::new();
- try!(f.read_to_end(&mut contents));
- try!(f.seek(SeekFrom::Start(0)));
- try!(f.write_all(try!(util::path2bytes(cwd))));
- try!(f.write_all(&[0]));
- try!(f.write_all(&contents));
+ f.read_to_end(&mut contents)?;
+ f.seek(SeekFrom::Start(0))?;
+ f.write_all(util::path2bytes(cwd)?)?;
+ f.write_all(&[0])?;
+ f.write_all(&contents)?;
Ok(())
}
diff --git a/src/cargo/ops/cargo_rustc/job.rs b/src/cargo/ops/cargo_rustc/job.rs
index 0c270cfdf23..e98d70a2aec 100644
--- a/src/cargo/ops/cargo_rustc/job.rs
+++ b/src/cargo/ops/cargo_rustc/job.rs
@@ -38,7 +38,7 @@ impl Work {
pub fn then(self, next: Work) -> Work {
Work::new(move |tx| {
- try!(self.call(tx.clone()));
+ self.call(tx.clone())?;
next.call(tx)
})
}
diff --git a/src/cargo/ops/cargo_rustc/job_queue.rs b/src/cargo/ops/cargo_rustc/job_queue.rs
index ff35daac2eb..515efcfeb1f 100644
--- a/src/cargo/ops/cargo_rustc/job_queue.rs
+++ b/src/cargo/ops/cargo_rustc/job_queue.rs
@@ -74,7 +74,7 @@ impl<'a> JobQueue<'a> {
job: Job,
fresh: Freshness) -> CargoResult<()> {
let key = Key::new(unit);
- let deps = try!(key.dependencies(cx));
+ let deps = key.dependencies(cx)?;
self.queue.queue(Fresh, key, Vec::new(), &deps).push((job, fresh));
*self.counts.entry(key.pkg).or_insert(0) += 1;
Ok(())
@@ -111,7 +111,7 @@ impl<'a> JobQueue<'a> {
while self.active < self.jobs {
if !queue.is_empty() {
let (key, job, fresh) = queue.remove(0);
- try!(self.run(key, fresh, job, config, scope));
+ self.run(key, fresh, job, config, scope)?;
} else if let Some((fresh, key, jobs)) = self.queue.dequeue() {
let total_fresh = jobs.iter().fold(fresh, |fresh, &(_, f)| {
f.combine(fresh)
@@ -188,11 +188,11 @@ impl<'a> JobQueue<'a> {
});
// Print out some nice progress information
- try!(self.note_working_on(config, &key, fresh));
+ self.note_working_on(config, &key, fresh)?;
// only the first message of each job is processed
if let Ok(msg) = desc_rx.recv() {
- try!(config.shell().verbose(|c| c.status("Running", &msg)));
+ config.shell().verbose(|c| c.status("Running", &msg))?;
}
Ok(())
}
@@ -219,15 +219,15 @@ impl<'a> JobQueue<'a> {
Dirty => {
if key.profile.doc {
self.documented.insert(key.pkg);
- try!(config.shell().status("Documenting", key.pkg));
+ config.shell().status("Documenting", key.pkg)?;
} else {
self.compiled.insert(key.pkg);
- try!(config.shell().status("Compiling", key.pkg));
+ config.shell().status("Compiling", key.pkg)?;
}
}
Fresh if self.counts[key.pkg] == 0 => {
self.compiled.insert(key.pkg);
- try!(config.shell().verbose(|c| c.status("Fresh", key.pkg)));
+ config.shell().verbose(|c| c.status("Fresh", key.pkg))?;
}
Fresh => {}
}
@@ -248,12 +248,12 @@ impl<'a> Key<'a> {
fn dependencies<'cfg>(&self, cx: &Context<'a, 'cfg>)
-> CargoResult>> {
let unit = Unit {
- pkg: try!(cx.get_package(self.pkg)),
+ pkg: cx.get_package(self.pkg)?,
target: self.target,
profile: self.profile,
kind: self.kind,
};
- let targets = try!(cx.dep_targets(&unit));
+ let targets = cx.dep_targets(&unit)?;
Ok(targets.iter().filter_map(|unit| {
// Binaries aren't actually needed to *compile* tests, just to run
// them, so we don't include this dependency edge in the job graph.
diff --git a/src/cargo/ops/cargo_rustc/layout.rs b/src/cargo/ops/cargo_rustc/layout.rs
index 4add3a75863..c447d8d06ac 100644
--- a/src/cargo/ops/cargo_rustc/layout.rs
+++ b/src/cargo/ops/cargo_rustc/layout.rs
@@ -94,20 +94,20 @@ impl Layout {
pub fn prepare(&mut self) -> io::Result<()> {
if fs::metadata(&self.root).is_err() {
- try!(fs::create_dir_all(&self.root));
+ fs::create_dir_all(&self.root)?;
}
- try!(mkdir(&self.deps));
- try!(mkdir(&self.native));
- try!(mkdir(&self.fingerprint));
- try!(mkdir(&self.examples));
- try!(mkdir(&self.build));
+ mkdir(&self.deps)?;
+ mkdir(&self.native)?;
+ mkdir(&self.fingerprint)?;
+ mkdir(&self.examples)?;
+ mkdir(&self.build)?;
return Ok(());
fn mkdir(dir: &Path) -> io::Result<()> {
if fs::metadata(&dir).is_err() {
- try!(fs::create_dir(dir));
+ fs::create_dir(dir)?;
}
Ok(())
}
diff --git a/src/cargo/ops/cargo_rustc/mod.rs b/src/cargo/ops/cargo_rustc/mod.rs
index 80bff8c3ef1..adafc11dd55 100644
--- a/src/cargo/ops/cargo_rustc/mod.rs
+++ b/src/cargo/ops/cargo_rustc/mod.rs
@@ -79,7 +79,7 @@ pub fn compile_targets<'a, 'cfg: 'a>(pkg_targets: &'a PackagesToBuild<'a>,
}).collect::>();
let dest = if build_config.release {"release"} else {"debug"};
- let root = try!(packages.get(resolve.root()));
+ let root = packages.get(resolve.root())?;
let host_layout = Layout::new(config, root, None, &dest);
let target_layout = build_config.requested_target.as_ref().map(|target| {
layout::Layout::new(config, root, Some(&target), &dest)
@@ -90,7 +90,7 @@ pub fn compile_targets<'a, 'cfg: 'a>(pkg_targets: &'a PackagesToBuild<'a>,
// compile.
let fs = Filesystem::new(host_layout.root().to_path_buf());
let path = Path::new(".cargo-lock");
- let _lock = try!(fs.open_rw(path, config, "build directory"));
+ let _lock = fs.open_rw(path, config, "build directory")?;
let mut cx = try!(Context::new(resolve, packages, config,
host_layout, target_layout,
@@ -98,8 +98,8 @@ pub fn compile_targets<'a, 'cfg: 'a>(pkg_targets: &'a PackagesToBuild<'a>,
let mut queue = JobQueue::new(&cx);
- try!(cx.prepare(root));
- try!(custom_build::build_map(&mut cx, &units));
+ cx.prepare(root)?;
+ custom_build::build_map(&mut cx, &units)?;
for unit in units.iter() {
// Build up a list of pending jobs, each of which represent
@@ -107,11 +107,11 @@ pub fn compile_targets<'a, 'cfg: 'a>(pkg_targets: &'a PackagesToBuild<'a>,
// part of this, that's all done next as part of the `execute`
// function which will run everything in order with proper
// parallelism.
- try!(compile(&mut cx, &mut queue, unit));
+ compile(&mut cx, &mut queue, unit)?;
}
// Now that we've figured out everything that we're going to do, do it!
- try!(queue.execute(cx.config));
+ queue.execute(cx.config)?;
for unit in units.iter() {
let out_dir = cx.layout(unit.pkg, unit.kind).build_out(unit.pkg)
@@ -120,7 +120,7 @@ pub fn compile_targets<'a, 'cfg: 'a>(pkg_targets: &'a PackagesToBuild<'a>,
.or_insert(Vec::new())
.push(("OUT_DIR".to_string(), out_dir));
- for filename in try!(cx.target_filenames(unit)).iter() {
+ for filename in cx.target_filenames(unit)?.iter() {
let dst = cx.out_dir(unit).join(filename);
if unit.profile.test {
cx.compilation.tests.push((unit.pkg.clone(),
@@ -136,7 +136,7 @@ pub fn compile_targets<'a, 'cfg: 'a>(pkg_targets: &'a PackagesToBuild<'a>,
if !unit.target.is_lib() { continue }
// Include immediate lib deps as well
- for unit in try!(cx.dep_targets(unit)).iter() {
+ for unit in cx.dep_targets(unit)?.iter() {
let pkgid = unit.pkg.package_id();
if !unit.target.is_lib() { continue }
if unit.profile.doc { continue }
@@ -144,7 +144,7 @@ pub fn compile_targets<'a, 'cfg: 'a>(pkg_targets: &'a PackagesToBuild<'a>,
continue
}
- let v = try!(cx.target_filenames(unit));
+ let v = cx.target_filenames(unit)?;
let v = v.into_iter().map(|f| {
(unit.target.clone(), cx.out_dir(unit).join(f))
}).collect::>();
@@ -182,35 +182,35 @@ fn compile<'a, 'cfg: 'a>(cx: &mut Context<'a, 'cfg>,
// we've got everything constructed.
let p = profile::start(format!("preparing: {}/{}", unit.pkg,
unit.target.name()));
- try!(fingerprint::prepare_init(cx, unit));
- try!(cx.links.validate(unit));
+ fingerprint::prepare_init(cx, unit)?;
+ cx.links.validate(unit)?;
let (dirty, fresh, freshness) = if unit.profile.run_custom_build {
- try!(custom_build::prepare(cx, unit))
+ custom_build::prepare(cx, unit)?
} else {
let (freshness, dirty, fresh) = try!(fingerprint::prepare_target(cx,
unit));
let work = if unit.profile.doc {
- try!(rustdoc(cx, unit))
+ rustdoc(cx, unit)?
} else {
- try!(rustc(cx, unit))
+ rustc(cx, unit)?
};
let dirty = work.then(dirty);
(dirty, fresh, freshness)
};
- try!(jobs.enqueue(cx, unit, Job::new(dirty, fresh), freshness));
+ jobs.enqueue(cx, unit, Job::new(dirty, fresh), freshness)?;
drop(p);
// Be sure to compile all dependencies of this target as well.
- for unit in try!(cx.dep_targets(unit)).iter() {
- try!(compile(cx, jobs, unit));
+ for unit in cx.dep_targets(unit)?.iter() {
+ compile(cx, jobs, unit)?;
}
Ok(())
}
fn rustc(cx: &mut Context, unit: &Unit) -> CargoResult {
let crate_types = unit.target.rustc_crate_types();
- let mut rustc = try!(prepare_rustc(cx, crate_types, unit));
+ let mut rustc = prepare_rustc(cx, crate_types, unit)?;
let name = unit.pkg.name().to_string();
let is_path_source = unit.pkg.package_id().source_id().is_path();
@@ -226,7 +226,7 @@ fn rustc(cx: &mut Context, unit: &Unit) -> CargoResult {
let has_custom_args = unit.profile.rustc_args.is_some();
let exec_engine = cx.exec_engine.clone();
- let filenames = try!(cx.target_filenames(unit));
+ let filenames = cx.target_filenames(unit)?;
let root = cx.out_dir(unit);
// Prepare the native lib state (extra -L and -l flags)
@@ -250,7 +250,7 @@ fn rustc(cx: &mut Context, unit: &Unit) -> CargoResult {
let dep_info_loc = fingerprint::dep_info_loc(cx, unit);
let cwd = cx.config.cwd().to_path_buf();
- let rustflags = try!(cx.rustflags_args(unit));
+ let rustflags = cx.rustflags_args(unit)?;
return Ok(Work::new(move |desc_tx| {
// Only at runtime have we discovered what the extra -L and -l
@@ -262,7 +262,7 @@ fn rustc(cx: &mut Context, unit: &Unit) -> CargoResult {
let build_state = build_state.outputs.lock().unwrap();
try!(add_native_deps(&mut rustc, &build_state, &build_deps,
pass_l_flag, ¤t_id));
- try!(add_plugin_deps(&mut rustc, &build_state, &build_deps));
+ add_plugin_deps(&mut rustc, &build_state, &build_deps)?;
}
// FIXME(rust-lang/rust#18913): we probably shouldn't have to do
@@ -270,7 +270,7 @@ fn rustc(cx: &mut Context, unit: &Unit) -> CargoResult {
for filename in filenames.iter() {
let dst = root.join(filename);
if fs::metadata(&dst).is_ok() {
- try!(fs::remove_file(&dst));
+ fs::remove_file(&dst)?;
}
}
@@ -299,7 +299,7 @@ fn rustc(cx: &mut Context, unit: &Unit) -> CargoResult {
internal(format!("could not rename dep info: {:?}",
rustc_dep_info_loc))
}));
- try!(fingerprint::append_current_dir(&dep_info_loc, &cwd));
+ fingerprint::append_current_dir(&dep_info_loc, &cwd)?;
}
Ok(())
@@ -358,7 +358,7 @@ fn add_plugin_deps(rustc: &mut CommandPrototype,
search_path.push(path.clone());
}
}
- let search_path = try!(join_paths(&search_path, var));
+ let search_path = join_paths(&search_path, var)?;
rustc.env(var, &search_path);
Ok(())
}
@@ -366,16 +366,16 @@ fn add_plugin_deps(rustc: &mut CommandPrototype,
fn prepare_rustc(cx: &Context,
crate_types: Vec<&str>,
unit: &Unit) -> CargoResult {
- let mut base = try!(process(CommandType::Rustc, unit.pkg, cx));
+ let mut base = process(CommandType::Rustc, unit.pkg, cx)?;
build_base_args(cx, &mut base, unit, &crate_types);
build_plugin_args(&mut base, cx, unit);
- try!(build_deps_args(&mut base, cx, unit));
+ build_deps_args(&mut base, cx, unit)?;
Ok(base)
}
fn rustdoc(cx: &mut Context, unit: &Unit) -> CargoResult {
- let mut rustdoc = try!(process(CommandType::Rustdoc, unit.pkg, cx));
+ let mut rustdoc = process(CommandType::Rustdoc, unit.pkg, cx)?;
rustdoc.arg(&root_path(cx, unit))
.cwd(cx.config.cwd())
.arg("--crate-name").arg(&unit.target.crate_name());
@@ -389,7 +389,7 @@ fn rustdoc(cx: &mut Context, unit: &Unit) -> CargoResult {
// Create the documentation directory ahead of time as rustdoc currently has
// a bug where concurrent invocations will race to create this directory if
// it doesn't already exist.
- try!(fs::create_dir_all(&doc_dir));
+ fs::create_dir_all(&doc_dir)?;
rustdoc.arg("-o").arg(doc_dir);
@@ -403,7 +403,7 @@ fn rustdoc(cx: &mut Context, unit: &Unit) -> CargoResult {
rustdoc.args(args);
}
- try!(build_deps_args(&mut rustdoc, cx, unit));
+ build_deps_args(&mut rustdoc, cx, unit)?;
if unit.pkg.has_custom_build() {
rustdoc.env("OUT_DIR", &cx.layout(unit.pkg, unit.kind)
@@ -569,9 +569,9 @@ fn build_deps_args(cmd: &mut CommandPrototype, cx: &Context, unit: &Unit)
cmd.env("OUT_DIR", &layout.build_out(unit.pkg));
}
- for unit in try!(cx.dep_targets(unit)).iter() {
+ for unit in cx.dep_targets(unit)?.iter() {
if unit.target.linkable() {
- try!(link_to(cmd, cx, unit));
+ link_to(cmd, cx, unit)?;
}
}
@@ -581,7 +581,7 @@ fn build_deps_args(cmd: &mut CommandPrototype, cx: &Context, unit: &Unit)
-> CargoResult<()> {
let layout = cx.layout(unit.pkg, unit.kind);
- for filename in try!(cx.target_filenames(unit)) {
+ for filename in cx.target_filenames(unit)? {
if let Ok((prefix, suffix)) = cx.staticlib(unit.kind) {
if filename.starts_with(prefix) && filename.ends_with(suffix) {
continue
@@ -609,8 +609,8 @@ pub fn process(cmd: CommandType, pkg: &Package,
// We want to use the same environment and such as normal processes, but we
// want to override the dylib search path with the one we just calculated.
- let search_path = try!(join_paths(&search_path, util::dylib_path_envvar()));
- let mut cmd = try!(cx.compilation.process(cmd, pkg));
+ let search_path = join_paths(&search_path, util::dylib_path_envvar())?;
+ let mut cmd = cx.compilation.process(cmd, pkg)?;
cmd.env(util::dylib_path_envvar(), &search_path);
Ok(cmd)
}
diff --git a/src/cargo/ops/cargo_test.rs b/src/cargo/ops/cargo_test.rs
index d4d20fea7d7..dba961d47be 100644
--- a/src/cargo/ops/cargo_test.rs
+++ b/src/cargo/ops/cargo_test.rs
@@ -15,12 +15,12 @@ pub struct TestOptions<'a> {
pub fn run_tests(manifest_path: &Path,
options: &TestOptions,
test_args: &[String]) -> CargoResult> {
- let compilation = try!(compile_tests(manifest_path, options));
+ let compilation = compile_tests(manifest_path, options)?;
if options.no_run {
return Ok(None)
}
- let mut errors = try!(run_unit_tests(options, test_args, &compilation));
+ let mut errors = run_unit_tests(options, test_args, &compilation)?;
// If we have an error and want to fail fast, return
if !errors.is_empty() && !options.no_fail_fast {
@@ -36,7 +36,7 @@ pub fn run_tests(manifest_path: &Path,
}
}
- errors.extend(try!(run_doc_tests(options, test_args, &compilation)));
+ errors.extend(run_doc_tests(options, test_args, &compilation)?);
if errors.is_empty() {
Ok(None)
} else {
@@ -49,12 +49,12 @@ pub fn run_benches(manifest_path: &Path,
args: &[String]) -> CargoResult > {
let mut args = args.to_vec();
args.push("--bench".to_string());
- let compilation = try!(compile_tests(manifest_path, options));
+ let compilation = compile_tests(manifest_path, options)?;
if options.no_run {
return Ok(None)
}
- let errors = try!(run_unit_tests(options, &args, &compilation));
+ let errors = run_unit_tests(options, &args, &compilation)?;
match errors.len() {
0 => Ok(None),
_ => Ok(Some(CargoTestError::new(errors))),
@@ -87,7 +87,7 @@ fn run_unit_tests(options: &TestOptions,
Some(path) => path,
None => &**exe,
};
- let mut cmd = try!(compilation.target_process(exe, pkg));
+ let mut cmd = compilation.target_process(exe, pkg)?;
cmd.args(test_args);
try!(config.shell().concise(|shell| {
shell.status("Running", to_display.display().to_string())
@@ -127,8 +127,8 @@ fn run_doc_tests(options: &TestOptions,
for (package, tests) in libs {
for (lib, name, crate_name) in tests {
- try!(config.shell().status("Doc-tests", name));
- let mut p = try!(compilation.rustdoc_process(package));
+ config.shell().status("Doc-tests", name)?;
+ let mut p = compilation.rustdoc_process(package)?;
p.arg("--test").arg(lib)
.arg("--crate-name").arg(&crate_name);
diff --git a/src/cargo/ops/lockfile.rs b/src/cargo/ops/lockfile.rs
index e959203726f..99c9b4bc2e2 100644
--- a/src/cargo/ops/lockfile.rs
+++ b/src/cargo/ops/lockfile.rs
@@ -26,12 +26,12 @@ pub fn load_lockfile(path: &Path, pkg: &Package, config: &Config)
};
let mut s = String::new();
- try!(f.read_to_string(&mut s));
+ f.read_to_string(&mut s)?;
- let table = toml::Value::Table(try!(cargo_toml::parse(&s, path)));
+ let table = toml::Value::Table(cargo_toml::parse(&s, path)?);
let mut d = toml::Decoder::new(table);
- let v: resolver::EncodableResolve = try!(Decodable::decode(&mut d));
- Ok(Some(try!(v.to_resolve(pkg, config))))
+ let v: resolver::EncodableResolve = Decodable::decode(&mut d)?;
+ Ok(Some(v.to_resolve(pkg, config)?))
}
pub fn write_pkg_lockfile(pkg: &Package, resolve: &Resolve) -> CargoResult<()> {
@@ -81,7 +81,7 @@ pub fn write_lockfile(dst: &Path, resolve: &Resolve) -> CargoResult<()> {
}
}
- try!(paths::write(dst, out.as_bytes()));
+ paths::write(dst, out.as_bytes())?;
Ok(())
}
diff --git a/src/cargo/ops/registry.rs b/src/cargo/ops/registry.rs
index a02b80a6ba2..86d143ab152 100644
--- a/src/cargo/ops/registry.rs
+++ b/src/cargo/ops/registry.rs
@@ -34,15 +34,15 @@ pub fn publish(manifest_path: &Path,
token: Option,
index: Option,
verify: bool) -> CargoResult<()> {
- let pkg = try!(Package::for_path(&manifest_path, config));
+ let pkg = Package::for_path(&manifest_path, config)?;
if !pkg.publish() {
bail!("some crates cannot be published.\n\
`{}` is marked as unpublishable", pkg.name());
}
- let (mut registry, reg_id) = try!(registry(config, token, index));
- try!(verify_dependencies(&pkg, ®_id));
+ let (mut registry, reg_id) = registry(config, token, index)?;
+ verify_dependencies(&pkg, ®_id)?;
// Prepare a tarball, with a non-surpressable warning if metadata
// is missing since this is being put online.
@@ -50,8 +50,8 @@ pub fn publish(manifest_path: &Path,
false, true)).unwrap();
// Upload said tarball to the specified destination
- try!(config.shell().status("Uploading", pkg.package_id().to_string()));
- try!(transmit(&pkg, &tarball, &mut registry));
+ config.shell().status("Uploading", pkg.package_id().to_string())?;
+ transmit(&pkg, &tarball, &mut registry)?;
Ok(())
}
@@ -97,7 +97,7 @@ fn transmit(pkg: &Package, tarball: &Path, registry: &mut Registry)
ref keywords, ref readme, ref repository, ref license, ref license_file,
} = *manifest.metadata();
let readme = match *readme {
- Some(ref readme) => Some(try!(paths::read(&pkg.root().join(readme)))),
+ Some(ref readme) => Some(paths::read(&pkg.root().join(readme))?),
None => None,
};
match *license_file {
@@ -128,8 +128,8 @@ fn transmit(pkg: &Package, tarball: &Path, registry: &mut Registry)
}
pub fn registry_configuration(config: &Config) -> CargoResult {
- let index = try!(config.get_string("registry.index")).map(|p| p.val);
- let token = try!(config.get_string("registry.token")).map(|p| p.val);
+ let index = config.get_string("registry.index")?.map(|p| p.val);
+ let token = config.get_string("registry.token")?.map(|p| p.val);
Ok(RegistryConfig { index: index, token: token })
}
@@ -140,19 +140,19 @@ pub fn registry(config: &Config,
let RegistryConfig {
token: token_config,
index: index_config,
- } = try!(registry_configuration(config));
+ } = registry_configuration(config)?;
let token = token.or(token_config);
let index = index.or(index_config).unwrap_or(RegistrySource::default_url());
- let index = try!(index.to_url().map_err(human));
+ let index = index.to_url().map_err(human)?;
let sid = SourceId::for_registry(&index);
let api_host = {
let mut src = RegistrySource::new(&sid, config);
try!(src.update().chain_error(|| {
human(format!("failed to update registry {}", index))
}));
- (try!(src.config())).api
+ (src.config()?).api
};
- let handle = try!(http_handle(config));
+ let handle = http_handle(config)?;
Ok((Registry::new_handle(api_host, token, handle), sid))
}
@@ -166,11 +166,11 @@ pub fn http_handle(config: &Config) -> CargoResult {
.connect_timeout(30_000 /* milliseconds */)
.low_speed_limit(10 /* bytes per second */)
.low_speed_timeout(30 /* seconds */);
- let handle = match try!(http_proxy(config)) {
+ let handle = match http_proxy(config)? {
Some(proxy) => handle.proxy(proxy),
None => handle,
};
- let handle = match try!(http_timeout(config)) {
+ let handle = match http_timeout(config)? {
Some(timeout) => handle.connect_timeout(timeout as usize)
.low_speed_timeout((timeout as usize) / 1000),
None => handle,
@@ -183,7 +183,7 @@ pub fn http_handle(config: &Config) -> CargoResult {
/// Favor cargo's `http.proxy`, then git's `http.proxy`. Proxies specified
/// via environment variables are picked up by libcurl.
fn http_proxy(config: &Config) -> CargoResult> {
- match try!(config.get_string("http.proxy")) {
+ match config.get_string("http.proxy")? {
Some(s) => return Ok(Some(s.val)),
None => {}
}
@@ -210,7 +210,7 @@ fn http_proxy(config: &Config) -> CargoResult > {
/// * https_proxy env var
/// * HTTPS_PROXY env var
pub fn http_proxy_exists(config: &Config) -> CargoResult {
- if try!(http_proxy(config)).is_some() {
+ if http_proxy(config)?.is_some() {
Ok(true)
} else {
Ok(["http_proxy", "HTTP_PROXY",
@@ -219,7 +219,7 @@ pub fn http_proxy_exists(config: &Config) -> CargoResult {
}
pub fn http_timeout(config: &Config) -> CargoResult> {
- match try!(config.get_i64("http.timeout")) {
+ match config.get_i64("http.timeout")? {
Some(s) => return Ok(Some(s.val)),
None => {}
}
@@ -227,7 +227,7 @@ pub fn http_timeout(config: &Config) -> CargoResult > {
}
pub fn registry_login(config: &Config, token: String) -> CargoResult<()> {
- let RegistryConfig { index, token: _ } = try!(registry_configuration(config));
+ let RegistryConfig { index, token: _ } = registry_configuration(config)?;
let mut map = HashMap::new();
let p = config.cwd().to_path_buf();
match index {
@@ -255,8 +255,8 @@ pub fn modify_owners(config: &Config, opts: &OwnersOptions) -> CargoResult<()> {
let name = match opts.krate {
Some(ref name) => name.clone(),
None => {
- let manifest_path = try!(find_root_manifest_for_wd(None, config.cwd()));
- let pkg = try!(Package::for_path(&manifest_path, config));
+ let manifest_path = find_root_manifest_for_wd(None, config.cwd())?;
+ let pkg = Package::for_path(&manifest_path, config)?;
pkg.name().to_string()
}
};
@@ -315,8 +315,8 @@ pub fn yank(config: &Config,
let name = match krate {
Some(name) => name,
None => {
- let manifest_path = try!(find_root_manifest_for_wd(None, config.cwd()));
- let pkg = try!(Package::for_path(&manifest_path, config));
+ let manifest_path = find_root_manifest_for_wd(None, config.cwd())?;
+ let pkg = Package::for_path(&manifest_path, config)?;
pkg.name().to_string()
}
};
@@ -325,15 +325,15 @@ pub fn yank(config: &Config,
None => bail!("a version must be specified to yank")
};
- let (mut registry, _) = try!(registry(config, token, index));
+ let (mut registry, _) = registry(config, token, index)?;
if undo {
- try!(config.shell().status("Unyank", format!("{}:{}", name, version)));
+ config.shell().status("Unyank", format!("{}:{}", name, version))?;
try!(registry.unyank(&name, &version).map_err(|e| {
human(format!("failed to undo a yank: {}", e))
}));
} else {
- try!(config.shell().status("Yank", format!("{}:{}", name, version)));
+ config.shell().status("Yank", format!("{}:{}", name, version))?;
try!(registry.yank(&name, &version).map_err(|e| {
human(format!("failed to yank: {}", e))
}));
@@ -354,7 +354,7 @@ pub fn search(query: &str,
}
}
- let (mut registry, _) = try!(registry(config, None, index));
+ let (mut registry, _) = registry(config, None, index)?;
let (crates, total_crates) = try!(registry.search(query, limit).map_err(|e| {
human(format!("failed to retrieve search results from the registry: {}", e))
}));
@@ -380,7 +380,7 @@ pub fn search(query: &str,
}
None => name
};
- try!(config.shell().say(line, BLACK));
+ config.shell().say(line, BLACK)?;
}
let search_max_limit = 100;
diff --git a/src/cargo/ops/resolve.rs b/src/cargo/ops/resolve.rs
index f5925a46c50..ef407757174 100644
--- a/src/cargo/ops/resolve.rs
+++ b/src/cargo/ops/resolve.rs
@@ -15,12 +15,12 @@ pub fn resolve_pkg(registry: &mut PackageRegistry,
package: &Package,
config: &Config)
-> CargoResult {
- let prev = try!(ops::load_pkg_lockfile(package, config));
+ let prev = ops::load_pkg_lockfile(package, config)?;
let resolve = try!(resolve_with_previous(registry, package,
Method::Everything,
prev.as_ref(), None));
if package.package_id().source_id().is_path() {
- try!(ops::write_pkg_lockfile(package, &resolve));
+ ops::write_pkg_lockfile(package, &resolve)?;
}
Ok(resolve)
}
@@ -110,7 +110,7 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry,
None => summary,
};
- let mut resolved = try!(resolver::resolve(&summary, &method, registry));
+ let mut resolved = resolver::resolve(&summary, &method, registry)?;
match previous {
Some(r) => resolved.copy_metadata(r),
None => {}
diff --git a/src/cargo/sources/git/source.rs b/src/cargo/sources/git/source.rs
index 1ac3f183fd6..974bc4b5305 100644
--- a/src/cargo/sources/git/source.rs
+++ b/src/cargo/sources/git/source.rs
@@ -53,7 +53,7 @@ impl<'cfg> GitSource<'cfg> {
pub fn read_packages(&mut self) -> CargoResult> {
if self.path_source.is_none() {
- try!(self.update());
+ self.update()?;
}
self.path_source.as_mut().unwrap().read_packages()
}
@@ -124,7 +124,7 @@ pub fn canonicalize_url(url: &Url) -> Url {
impl<'cfg> Debug for GitSource<'cfg> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
- try!(write!(f, "git repo at {}", self.remote.url()));
+ write!(f, "git repo at {}", self.remote.url())?;
match self.reference.to_ref_string() {
Some(s) => write!(f, " ({})", s),
@@ -179,17 +179,17 @@ impl<'cfg> Source for GitSource<'cfg> {
format!("git repository `{}`", self.remote.url())));
trace!("updating git source `{:?}`", self.remote);
- let repo = try!(self.remote.checkout(&db_path));
- let rev = try!(repo.rev_for(&self.reference));
+ let repo = self.remote.checkout(&db_path)?;
+ let rev = repo.rev_for(&self.reference)?;
(repo, rev)
} else {
- (try!(self.remote.db_at(&db_path)), actual_rev.unwrap())
+ (self.remote.db_at(&db_path)?, actual_rev.unwrap())
};
// Copy the database to the checkout location. After this we could drop
// the lock on the database as we no longer needed it, but we leave it
// in scope so the destructors here won't tamper with too much.
- try!(repo.copy_to(actual_rev.clone(), &checkout_path));
+ repo.copy_to(actual_rev.clone(), &checkout_path)?;
let source_id = self.source_id.with_precise(Some(actual_rev.to_string()));
let path_source = PathSource::new_recursive(&checkout_path,
diff --git a/src/cargo/sources/git/utils.rs b/src/cargo/sources/git/utils.rs
index d865262d080..b2ed15fd0f3 100644
--- a/src/cargo/sources/git/utils.rs
+++ b/src/cargo/sources/git/utils.rs
@@ -105,7 +105,7 @@ impl GitRemote {
pub fn rev_for(&self, path: &Path, reference: &GitReference)
-> CargoResult {
- let db = try!(self.db_at(path));
+ let db = self.db_at(path)?;
db.rev_for(reference)
}
@@ -132,7 +132,7 @@ impl GitRemote {
}
pub fn db_at(&self, db_path: &Path) -> CargoResult {
- let repo = try!(git2::Repository::open(db_path));
+ let repo = git2::Repository::open(db_path)?;
Ok(GitDatabase {
remote: self.clone(),
path: db_path.to_path_buf(),
@@ -150,11 +150,11 @@ impl GitRemote {
fn clone_into(&self, dst: &Path) -> CargoResult {
let url = self.url.to_string();
if fs::metadata(&dst).is_ok() {
- try!(fs::remove_dir_all(dst));
+ fs::remove_dir_all(dst)?;
}
- try!(fs::create_dir_all(dst));
- let repo = try!(git2::Repository::init_bare(dst));
- try!(fetch(&repo, &url, "refs/heads/*:refs/heads/*"));
+ fs::create_dir_all(dst)?;
+ let repo = git2::Repository::init_bare(dst)?;
+ fetch(&repo, &url, "refs/heads/*:refs/heads/*")?;
Ok(repo)
}
}
@@ -170,13 +170,13 @@ impl GitDatabase {
Ok(repo) => {
let checkout = GitCheckout::new(dest, self, rev, repo);
if !checkout.is_fresh() {
- try!(checkout.fetch());
- try!(checkout.reset());
+ checkout.fetch()?;
+ checkout.reset()?;
assert!(checkout.is_fresh());
}
checkout
}
- Err(..) => try!(GitCheckout::clone_into(dest, self, rev)),
+ Err(..) => GitCheckout::clone_into(dest, self, rev)?,
};
try!(checkout.update_submodules().chain_error(|| {
internal("failed to update submodules")
@@ -189,9 +189,9 @@ impl GitDatabase {
GitReference::Tag(ref s) => {
try!((|| {
let refname = format!("refs/tags/{}", s);
- let id = try!(self.repo.refname_to_id(&refname));
- let obj = try!(self.repo.find_object(id, None));
- let obj = try!(obj.peel(ObjectType::Commit));
+ let id = self.repo.refname_to_id(&refname)?;
+ let obj = self.repo.find_object(id, None)?;
+ let obj = obj.peel(ObjectType::Commit)?;
Ok(obj.id())
}).chain_error(|| {
human(format!("failed to find tag `{}`", s))
@@ -199,7 +199,7 @@ impl GitDatabase {
}
GitReference::Branch(ref s) => {
try!((|| {
- let b = try!(self.repo.find_branch(s, git2::BranchType::Local));
+ let b = self.repo.find_branch(s, git2::BranchType::Local)?;
b.get().target().chain_error(|| {
human(format!("branch `{}` did not have a target", s))
})
@@ -208,7 +208,7 @@ impl GitDatabase {
}))
}
GitReference::Rev(ref s) => {
- let obj = try!(self.repo.revparse_single(s));
+ let obj = self.repo.revparse_single(s)?;
obj.id()
}
};
@@ -216,7 +216,7 @@ impl GitDatabase {
}
pub fn has_ref(&self, reference: &str) -> CargoResult<()> {
- try!(self.repo.revparse_single(reference));
+ self.repo.revparse_single(reference)?;
Ok(())
}
}
@@ -238,9 +238,9 @@ impl<'a> GitCheckout<'a> {
revision: GitRevision)
-> CargoResult>
{
- let repo = try!(GitCheckout::clone_repo(database.path(), into));
+ let repo = GitCheckout::clone_repo(database.path(), into)?;
let checkout = GitCheckout::new(into, database, revision, repo);
- try!(checkout.reset());
+ checkout.reset()?;
Ok(checkout)
}
@@ -257,7 +257,7 @@ impl<'a> GitCheckout<'a> {
}));
}
- let url = try!(source.to_url().map_err(human));
+ let url = source.to_url().map_err(human)?;
let url = url.to_string();
let repo = try!(git2::Repository::clone(&url, into).chain_error(|| {
internal(format!("failed to clone {} into {}", source.display(),
@@ -278,10 +278,10 @@ impl<'a> GitCheckout<'a> {
fn fetch(&self) -> CargoResult<()> {
info!("fetch {}", self.repo.path().display());
- let url = try!(self.database.path.to_url().map_err(human));
+ let url = self.database.path.to_url().map_err(human)?;
let url = url.to_string();
let refspec = "refs/heads/*:refs/heads/*";
- try!(fetch(&self.repo, &url, refspec));
+ fetch(&self.repo, &url, refspec)?;
Ok(())
}
@@ -297,9 +297,9 @@ impl<'a> GitCheckout<'a> {
let ok_file = self.location.join(".cargo-ok");
let _ = fs::remove_file(&ok_file);
info!("reset {} to {}", self.repo.path().display(), self.revision);
- let object = try!(self.repo.find_object(self.revision.0, None));
- try!(self.repo.reset(&object, git2::ResetType::Hard, None));
- try!(File::create(ok_file));
+ let object = self.repo.find_object(self.revision.0, None)?;
+ self.repo.reset(&object, git2::ResetType::Hard, None)?;
+ File::create(ok_file)?;
Ok(())
}
@@ -309,8 +309,8 @@ impl<'a> GitCheckout<'a> {
fn update_submodules(repo: &git2::Repository) -> CargoResult<()> {
info!("update submodules for: {:?}", repo.workdir().unwrap());
- for mut child in try!(repo.submodules()).into_iter() {
- try!(child.init(false));
+ for mut child in repo.submodules()?.into_iter() {
+ child.init(false)?;
let url = try!(child.url().chain_error(|| {
internal("non-utf8 url for submodule")
}));
@@ -327,7 +327,7 @@ impl<'a> GitCheckout<'a> {
// as the submodule's head, then we can bail out and go to the
// next submodule.
let head_and_repo = child.open().and_then(|repo| {
- let target = try!(repo.head()).target();
+ let target = repo.head()?.target();
Ok((target, repo))
});
let repo = match head_and_repo {
@@ -339,7 +339,7 @@ impl<'a> GitCheckout<'a> {
}
Err(..) => {
let path = repo.workdir().unwrap().join(child.path());
- try!(git2::Repository::clone(url, &path))
+ git2::Repository::clone(url, &path)?
}
};
@@ -350,9 +350,9 @@ impl<'a> GitCheckout<'a> {
child.name().unwrap_or(""), url))
}));
- let obj = try!(repo.find_object(head, None));
- try!(repo.reset(&obj, git2::ResetType::Hard, None));
- try!(update_submodules(&repo));
+ let obj = repo.find_object(head, None)?;
+ repo.reset(&obj, git2::ResetType::Hard, None)?;
+ update_submodules(&repo)?;
}
Ok(())
}
@@ -528,14 +528,14 @@ pub fn fetch(repo: &git2::Repository, url: &str,
refspec: &str) -> CargoResult<()> {
// Create a local anonymous remote in the repository to fetch the url
- with_authentication(url, &try!(repo.config()), |f| {
+ with_authentication(url, &repo.config()?, |f| {
let mut cb = git2::RemoteCallbacks::new();
cb.credentials(f);
- let mut remote = try!(repo.remote_anonymous(&url));
+ let mut remote = repo.remote_anonymous(&url)?;
let mut opts = git2::FetchOptions::new();
opts.remote_callbacks(cb)
.download_tags(git2::AutotagOption::All);
- try!(remote.fetch(&[refspec], Some(&mut opts), None));
+ remote.fetch(&[refspec], Some(&mut opts), None)?;
Ok(())
})
}
diff --git a/src/cargo/sources/path.rs b/src/cargo/sources/path.rs
index 1492d99a70b..df6d16644b0 100644
--- a/src/cargo/sources/path.rs
+++ b/src/cargo/sources/path.rs
@@ -57,7 +57,7 @@ impl<'cfg> PathSource<'cfg> {
pub fn root_package(&mut self) -> CargoResult {
trace!("root_package; source={:?}", self);
- try!(self.update());
+ self.update()?;
match self.packages.iter().find(|p| p.root() == &*self.path) {
Some(pkg) => Ok(pkg.clone()),
@@ -123,7 +123,7 @@ impl<'cfg> PathSource<'cfg> {
// check to see if we are indeed part of the index. If not, then
// this is likely an unrelated git repo, so keep going.
if let Ok(repo) = git2::Repository::open(cur) {
- let index = try!(repo.index());
+ let index = repo.index()?;
let path = util::without_prefix(root, cur)
.unwrap().join("Cargo.toml");
if index.get_path(&path, 0).is_some() {
@@ -147,7 +147,7 @@ impl<'cfg> PathSource<'cfg> {
filter: &mut FnMut(&Path) -> bool)
-> CargoResult> {
warn!("list_files_git {}", pkg.package_id());
- let index = try!(repo.index());
+ let index = repo.index()?;
let root = try!(repo.workdir().chain_error(|| {
internal_error("Can't list files on a bare repository.", "")
}));
@@ -172,7 +172,7 @@ impl<'cfg> PathSource<'cfg> {
if let Some(suffix) = util::without_prefix(pkg_path, &root) {
opts.pathspec(suffix);
}
- let statuses = try!(repo.statuses(Some(&mut opts)));
+ let statuses = repo.statuses(Some(&mut opts))?;
let untracked = statuses.iter().filter_map(|entry| {
match entry.status() {
git2::STATUS_WT_NEW => Some((join(&root, entry.path_bytes()), None)),
@@ -183,7 +183,7 @@ impl<'cfg> PathSource<'cfg> {
let mut subpackages_found = Vec::new();
'outer: for (file_path, is_dir) in index_files.chain(untracked) {
- let file_path = try!(file_path);
+ let file_path = file_path?;
// Filter out files blatantly outside this package. This is helped a
// bit obove via the `pathspec` function call, but we need to filter
@@ -232,7 +232,7 @@ impl<'cfg> PathSource<'cfg> {
let rel = rel.replace(r"\", "/");
match repo.find_submodule(&rel).and_then(|s| s.open()) {
Ok(repo) => {
- let files = try!(self.list_files_git(pkg, repo, filter));
+ let files = self.list_files_git(pkg, repo, filter)?;
ret.extend(files.into_iter());
}
Err(..) => {
@@ -268,7 +268,7 @@ impl<'cfg> PathSource<'cfg> {
fn list_files_walk(&self, pkg: &Package, filter: &mut FnMut(&Path) -> bool)
-> CargoResult> {
let mut ret = Vec::new();
- try!(PathSource::walk(pkg.root(), &mut ret, true, filter));
+ PathSource::walk(pkg.root(), &mut ret, true, filter)?;
Ok(ret)
}
@@ -285,8 +285,8 @@ impl<'cfg> PathSource<'cfg> {
if !is_root && fs::metadata(&path.join("Cargo.toml")).is_ok() {
return Ok(())
}
- for dir in try!(fs::read_dir(path)) {
- let dir = try!(dir).path();
+ for dir in fs::read_dir(path)? {
+ let dir = dir?.path();
let name = dir.file_name().and_then(|s| s.to_str());
// Skip dotfile directories
if name.map(|s| s.starts_with(".")) == Some(true) {
@@ -298,7 +298,7 @@ impl<'cfg> PathSource<'cfg> {
_ => {}
}
}
- try!(PathSource::walk(&dir, ret, false, filter));
+ PathSource::walk(&dir, ret, false, filter)?;
}
Ok(())
}
@@ -319,7 +319,7 @@ impl<'cfg> Registry for PathSource<'cfg> {
impl<'cfg> Source for PathSource<'cfg> {
fn update(&mut self) -> CargoResult<()> {
if !self.updated {
- let packages = try!(self.read_packages());
+ let packages = self.read_packages()?;
self.packages.extend(packages.into_iter());
self.updated = true;
}
@@ -343,7 +343,7 @@ impl<'cfg> Source for PathSource<'cfg> {
let mut max = FileTime::zero();
let mut max_path = PathBuf::from("");
- for file in try!(self.list_files(pkg)) {
+ for file in self.list_files(pkg)? {
// An fs::stat error here is either because path is a
// broken symlink, a permissions error, or a race
// condition where this path was rm'ed - either way,
diff --git a/src/cargo/sources/registry.rs b/src/cargo/sources/registry.rs
index 9652d8145e8..9d6194a1598 100644
--- a/src/cargo/sources/registry.rs
+++ b/src/cargo/sources/registry.rs
@@ -251,7 +251,7 @@ impl<'cfg> RegistrySource<'cfg> {
/// This is the main cargo registry by default, but it can be overridden in
/// a .cargo/config
pub fn url(config: &Config) -> CargoResult {
- let config = try!(ops::registry_configuration(config));
+ let config = ops::registry_configuration(config)?;
let url = config.index.unwrap_or(DEFAULT.to_string());
url.to_url().map_err(human)
}
@@ -269,8 +269,8 @@ impl<'cfg> RegistrySource<'cfg> {
self.config,
"the registry index"));
let path = lock.path().parent().unwrap();
- let contents = try!(paths::read(&path.join("config.json")));
- let config = try!(json::decode(&contents));
+ let contents = paths::read(&path.join("config.json"))?;
+ let config = json::decode(&contents)?;
Ok(config)
}
@@ -285,23 +285,23 @@ impl<'cfg> RegistrySource<'cfg> {
-> CargoResult {
let filename = format!("{}-{}.crate", pkg.name(), pkg.version());
let path = Path::new(&filename);
- let mut dst = try!(self.cache_path.open_rw(path, self.config, &filename));
- let meta = try!(dst.file().metadata());
+ let mut dst = self.cache_path.open_rw(path, self.config, &filename)?;
+ let meta = dst.file().metadata()?;
if meta.len() > 0 {
return Ok(dst)
}
- try!(self.config.shell().status("Downloading", pkg));
+ self.config.shell().status("Downloading", pkg)?;
- let expected_hash = try!(self.hash(pkg));
+ let expected_hash = self.hash(pkg)?;
let handle = match self.handle {
Some(ref mut handle) => handle,
None => {
- self.handle = Some(try!(ops::http_handle(self.config)));
+ self.handle = Some(ops::http_handle(self.config)?);
self.handle.as_mut().unwrap()
}
};
// TODO: don't download into memory (curl-rust doesn't expose it)
- let resp = try!(handle.get(url.to_string()).follow_redirects(true).exec());
+ let resp = handle.get(url.to_string()).follow_redirects(true).exec()?;
if resp.get_code() != 200 && resp.get_code() != 0 {
return Err(internal(format!("failed to get 200 response from {}\n{}",
url, resp)))
@@ -317,8 +317,8 @@ impl<'cfg> RegistrySource<'cfg> {
bail!("failed to verify the checksum of `{}`", pkg)
}
- try!(dst.write_all(resp.get_body()));
- try!(dst.seek(SeekFrom::Start(0)));
+ dst.write_all(resp.get_body())?;
+ dst.seek(SeekFrom::Start(0))?;
Ok(dst)
}
@@ -329,7 +329,7 @@ impl<'cfg> RegistrySource<'cfg> {
return Ok(s.clone())
}
// Ok, we're missing the key, so parse the index file to load it.
- try!(self.summaries(pkg.name()));
+ self.summaries(pkg.name())?;
self.hashes.get(&key).chain_error(|| {
internal(format!("no hash listed for {}", pkg))
}).map(|s| s.clone())
@@ -345,7 +345,7 @@ impl<'cfg> RegistrySource<'cfg> {
-> CargoResult {
let dst = self.src_path.join(&format!("{}-{}", pkg.name(),
pkg.version()));
- try!(dst.create_dir());
+ dst.create_dir()?;
// Note that we've already got the `tarball` locked above, and that
// implies a lock on the unpacked destination as well, so this access
// via `into_path_unlocked` should be ok.
@@ -355,10 +355,10 @@ impl<'cfg> RegistrySource<'cfg> {
return Ok(dst)
}
- let gz = try!(GzDecoder::new(tarball.file()));
+ let gz = GzDecoder::new(tarball.file())?;
let mut tar = Archive::new(gz);
- try!(tar.unpack(dst.parent().unwrap()));
- try!(File::create(&ok));
+ tar.unpack(dst.parent().unwrap())?;
+ File::create(&ok)?;
Ok(dst)
}
@@ -390,7 +390,7 @@ impl<'cfg> RegistrySource<'cfg> {
let summaries = match file {
Ok(mut f) => {
let mut contents = String::new();
- try!(f.read_to_string(&mut contents));
+ f.read_to_string(&mut contents)?;
let ret: CargoResult>;
ret = contents.lines().filter(|l| l.trim().len() > 0)
.map(|l| self.parse_registry_package(l))
@@ -417,14 +417,14 @@ impl<'cfg> RegistrySource<'cfg> {
-> CargoResult<(Summary, bool)> {
let RegistryPackage {
name, vers, cksum, deps, features, yanked
- } = try!(json::decode::(line));
- let pkgid = try!(PackageId::new(&name, &vers, &self.source_id));
+ } = json::decode::(line)?;
+ let pkgid = PackageId::new(&name, &vers, &self.source_id)?;
let deps: CargoResult> = deps.into_iter().map(|dep| {
self.parse_registry_dependency(dep)
}).collect();
- let deps = try!(deps);
+ let deps = deps?;
self.hashes.insert((name, vers), cksum);
- Ok((try!(Summary::new(pkgid, deps, features)), yanked.unwrap_or(false)))
+ Ok((Summary::new(pkgid, deps, features)?, yanked.unwrap_or(false)))
}
/// Converts an encoded dependency in the registry to a cargo dependency
@@ -443,7 +443,7 @@ impl<'cfg> RegistrySource<'cfg> {
};
let platform = match target {
- Some(target) => Some(try!(target.parse())),
+ Some(target) => Some(target.parse()?),
None => None,
};
@@ -467,7 +467,7 @@ impl<'cfg> RegistrySource<'cfg> {
if self.updated {
return Ok(())
}
- try!(self.checkout_path.create_dir());
+ self.checkout_path.create_dir()?;
let lock = try!(self.checkout_path.open_rw(Path::new(INDEX_LOCK),
self.config,
"the registry index"));
@@ -489,10 +489,10 @@ impl<'cfg> RegistrySource<'cfg> {
// git reset --hard origin/master
let reference = "refs/remotes/origin/master";
- let oid = try!(repo.refname_to_id(reference));
+ let oid = repo.refname_to_id(reference)?;
trace!("[{}] updating to rev {}", self.source_id, oid);
- let object = try!(repo.find_object(oid, None));
- try!(repo.reset(&object, git2::ResetType::Hard, None));
+ let object = repo.find_object(oid, None)?;
+ repo.reset(&object, git2::ResetType::Hard, None)?;
self.updated = true;
self.cache.clear();
Ok(())
@@ -506,16 +506,16 @@ impl<'cfg> Registry for RegistrySource<'cfg> {
// come back with no summaries, then our registry may need to be
// updated, so we fall back to performing a lazy update.
if dep.source_id().precise().is_some() {
- let mut summaries = try!(self.summaries(dep.name())).iter().map(|s| {
+ let mut summaries = self.summaries(dep.name())?.iter().map(|s| {
s.0.clone()
}).collect::>();
- if try!(summaries.query(dep)).is_empty() {
- try!(self.do_update());
+ if summaries.query(dep)?.is_empty() {
+ self.do_update()?;
}
}
let mut summaries = {
- let summaries = try!(self.summaries(dep.name()));
+ let summaries = self.summaries(dep.name())?;
summaries.iter().filter(|&&(_, yanked)| {
dep.source_id().precise().is_some() || !yanked
}).map(|s| s.0.clone()).collect::>()
@@ -549,14 +549,14 @@ impl<'cfg> Source for RegistrySource<'cfg> {
// `Some("locked")` as other `Some` values indicate a `cargo update
// --precise` request
if self.source_id.precise() != Some("locked") {
- try!(self.do_update());
+ self.do_update()?;
}
Ok(())
}
fn download(&mut self, package: &PackageId) -> CargoResult {
- let config = try!(self.config());
- let url = try!(config.dl.to_url().map_err(internal));
+ let config = self.config()?;
+ let url = config.dl.to_url().map_err(internal)?;
let mut url = url.clone();
url.path_mut().unwrap().push(package.name().to_string());
url.path_mut().unwrap().push(package.version().to_string());
@@ -570,7 +570,7 @@ impl<'cfg> Source for RegistrySource<'cfg> {
}));
let mut src = PathSource::new(&path, &self.source_id, self.config);
- try!(src.update());
+ src.update()?;
src.download(package)
}
diff --git a/src/cargo/util/cfg.rs b/src/cargo/util/cfg.rs
index 95b20e508f2..eb2b2a520fd 100644
--- a/src/cargo/util/cfg.rs
+++ b/src/cargo/util/cfg.rs
@@ -42,7 +42,7 @@ impl FromStr for Cfg {
fn from_str(s: &str) -> CargoResult {
let mut p = Parser::new(s);
- let e = try!(p.cfg());
+ let e = p.cfg()?;
if p.t.next().is_some() {
bail!("malformed cfg value or key/value pair")
}
@@ -75,7 +75,7 @@ impl FromStr for CfgExpr {
fn from_str(s: &str) -> CargoResult {
let mut p = Parser::new(s);
- let e = try!(p.expr());
+ let e = p.expr()?;
if p.t.next().is_some() {
bail!("can only have one cfg-expression, consider using all() or \
any() explicitly")
@@ -101,9 +101,9 @@ impl<'a, T: fmt::Display> fmt::Display for CommaSep<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for (i, v) in self.0.iter().enumerate() {
if i > 0 {
- try!(write!(f, ", "));
+ write!(f, ", ")?;
}
- try!(write!(f, "{}", v));
+ write!(f, "{}", v)?;
}
Ok(())
}
@@ -125,11 +125,11 @@ impl<'a> Parser<'a> {
Some(&Ok(Token::Ident(op @ "any"))) => {
self.t.next();
let mut e = Vec::new();
- try!(self.eat(Token::LeftParen));
+ self.eat(Token::LeftParen)?;
while !self.try(Token::RightParen) {
- e.push(try!(self.expr()));
+ e.push(self.expr()?);
if !self.try(Token::Comma) {
- try!(self.eat(Token::RightParen));
+ self.eat(Token::RightParen)?;
break
}
}
@@ -141,9 +141,9 @@ impl<'a> Parser<'a> {
}
Some(&Ok(Token::Ident("not"))) => {
self.t.next();
- try!(self.eat(Token::LeftParen));
- let e = try!(self.expr());
- try!(self.eat(Token::RightParen));
+ self.eat(Token::LeftParen)?;
+ let e = self.expr()?;
+ self.eat(Token::RightParen)?;
Ok(CfgExpr::Not(Box::new(e)))
}
Some(&Ok(..)) => self.cfg().map(CfgExpr::Value),
diff --git a/src/cargo/util/config.rs b/src/cargo/util/config.rs
index 66268be8792..230c3a73e74 100644
--- a/src/cargo/util/config.rs
+++ b/src/cargo/util/config.rs
@@ -49,9 +49,9 @@ impl Config {
target_dir: RefCell::new(None),
};
- try!(cfg.scrape_tool_config());
- try!(cfg.scrape_rustc_version());
- try!(cfg.scrape_target_dir_config());
+ cfg.scrape_tool_config()?;
+ cfg.scrape_rustc_version()?;
+ cfg.scrape_target_dir_config()?;
Ok(cfg)
}
@@ -102,7 +102,7 @@ impl Config {
pub fn values(&self) -> CargoResult[>> {
if !self.values_loaded.get() {
- try!(self.load_values());
+ self.load_values()?;
self.values_loaded.set(true);
}
Ok(self.values.borrow())
@@ -121,7 +121,7 @@ impl Config {
}
fn get(&self, key: &str) -> CargoResult]> {
- let vals = try!(self.values());
+ let vals = self.values()?;
let mut parts = key.split('.').enumerate();
let mut val = match vals.get(parts.next().unwrap().1) {
Some(val) => val,
@@ -162,7 +162,7 @@ impl Config {
match env::var(&format!("CARGO_{}", key)) {
Ok(value) => {
Ok(Some(Value {
- val: try!(value.parse()),
+ val: value.parse()?,
definition: Definition::Environment,
}))
}
@@ -171,10 +171,10 @@ impl Config {
}
pub fn get_string(&self, key: &str) -> CargoResult >> {
- if let Some(v) = try!(self.get_env(key)) {
+ if let Some(v) = self.get_env(key)? {
return Ok(Some(v))
}
- match try!(self.get(key)) {
+ match self.get(key)? {
Some(CV::String(i, path)) => {
Ok(Some(Value {
val: i,
@@ -187,10 +187,10 @@ impl Config {
}
pub fn get_bool(&self, key: &str) -> CargoResult >> {
- if let Some(v) = try!(self.get_env(key)) {
+ if let Some(v) = self.get_env(key)? {
return Ok(Some(v))
}
- match try!(self.get(key)) {
+ match self.get(key)? {
Some(CV::Boolean(b, path)) => {
Ok(Some(Value {
val: b,
@@ -203,7 +203,7 @@ impl Config {
}
pub fn get_path(&self, key: &str) -> CargoResult >> {
- if let Some(val) = try!(self.get_string(&key)) {
+ if let Some(val) = self.get_string(&key)? {
let is_path = val.val.contains("/") ||
(cfg!(windows) && val.val.contains("\\"));
let path = if is_path {
@@ -223,7 +223,7 @@ impl Config {
pub fn get_list(&self, key: &str)
-> CargoResult >>> {
- match try!(self.get(key)) {
+ match self.get(key)? {
Some(CV::List(i, path)) => {
Ok(Some(Value {
val: i,
@@ -237,7 +237,7 @@ impl Config {
pub fn get_table(&self, key: &str)
-> CargoResult >>> {
- match try!(self.get(key)) {
+ match self.get(key)? {
Some(CV::Table(i, path)) => {
Ok(Some(Value {
val: i,
@@ -250,10 +250,10 @@ impl Config {
}
pub fn get_i64(&self, key: &str) -> CargoResult >> {
- if let Some(v) = try!(self.get_env(key)) {
+ if let Some(v) = self.get_env(key)? {
return Ok(Some(v))
}
- match try!(self.get(key)) {
+ match self.get(key)? {
Some(CV::Integer(i, path)) => {
Ok(Some(Value {
val: i,
@@ -275,14 +275,14 @@ impl Config {
verbose: Option,
quiet: Option,
color: &Option) -> CargoResult<()> {
- let cfg_verbose = try!(self.get_bool("term.verbose")).map(|v| v.val);
- let cfg_color = try!(self.get_string("term.color")).map(|v| v.val);
+ let cfg_verbose = self.get_bool("term.verbose")?.map(|v| v.val);
+ let cfg_color = self.get_string("term.color")?.map(|v| v.val);
let verbose = verbose.or(cfg_verbose).unwrap_or(false);
let quiet = quiet.unwrap_or(false);
let color = color.as_ref().or(cfg_color.as_ref());
- try!(self.shell().set_verbosity(verbose, quiet));
- try!(self.shell().set_color_config(color.map(|s| &s[..])));
+ self.shell().set_verbosity(verbose, quiet)?;
+ self.shell().set_color_config(color.map(|s| &s[..]))?;
Ok(())
}
@@ -292,7 +292,7 @@ impl Config {
try!(walk_tree(&self.cwd, |mut file, path| {
let mut contents = String::new();
- try!(file.read_to_string(&mut contents));
+ file.read_to_string(&mut contents)?;
let table = try!(cargo_toml::parse(&contents, &path).chain_error(|| {
human(format!("could not parse TOML configuration in `{}`",
path.display()))
@@ -302,7 +302,7 @@ impl Config {
human(format!("failed to load TOML configuration from `{}`",
path.display()))
}));
- try!(cfg.merge(value));
+ cfg.merge(value)?;
Ok(())
}).chain_error(|| human("Couldn't load Cargo configuration")));
@@ -315,20 +315,20 @@ impl Config {
}
fn scrape_tool_config(&mut self) -> CargoResult<()> {
- self.rustc = try!(self.get_tool("rustc"));
- self.rustdoc = try!(self.get_tool("rustdoc"));
+ self.rustc = self.get_tool("rustc")?;
+ self.rustdoc = self.get_tool("rustdoc")?;
Ok(())
}
fn scrape_rustc_version(&mut self) -> CargoResult<()> {
- self.rustc_info = try!(Rustc::new(&self.rustc));
+ self.rustc_info = Rustc::new(&self.rustc)?;
Ok(())
}
fn scrape_target_dir_config(&mut self) -> CargoResult<()> {
if let Some(dir) = env::var_os("CARGO_TARGET_DIR") {
*self.target_dir.borrow_mut() = Some(self.cwd.join(dir));
- } else if let Some(val) = try!(self.get_path("build.target-dir")) {
+ } else if let Some(val) = self.get_path("build.target-dir")? {
*self.target_dir.borrow_mut() = Some(val.val);
}
Ok(())
@@ -341,7 +341,7 @@ impl Config {
}
let var = format!("build.{}", tool);
- if let Some(tool_path) = try!(self.get_path(&var)) {
+ if let Some(tool_path) = self.get_path(&var)? {
return Ok(tool_path.val);
}
@@ -384,10 +384,10 @@ impl fmt::Debug for ConfigValue {
CV::String(ref s, ref path) => write!(f, "{} (from {})", s,
path.display()),
CV::List(ref list, ref path) => {
- try!(write!(f, "["));
+ write!(f, "[")?;
for (i, &(ref s, ref path)) in list.iter().enumerate() {
- if i > 0 { try!(write!(f, ", ")); }
- try!(write!(f, "{} (from {})", s, path.display()));
+ if i > 0 { write!(f, ", ")?; }
+ write!(f, "{} (from {})", s, path.display())?;
}
write!(f, "] (from {})", path.display())
}
@@ -590,9 +590,9 @@ fn walk_tree(pwd: &Path, mut walk: F) -> CargoResult<()>
loop {
let possible = current.join(".cargo").join("config");
if fs::metadata(&possible).is_ok() {
- let file = try!(File::open(&possible));
+ let file = File::open(&possible)?;
- try!(walk(file, &possible));
+ walk(file, &possible)?;
}
match current.parent() {
Some(p) => current = p,
@@ -610,8 +610,8 @@ fn walk_tree(pwd: &Path, mut walk: F) -> CargoResult<()>
if !pwd.starts_with(&home) {
let config = home.join("config");
if fs::metadata(&config).is_ok() {
- let file = try!(File::open(&config));
- try!(walk(file, &config));
+ let file = File::open(&config)?;
+ walk(file, &config)?;
}
}
@@ -629,7 +629,7 @@ pub fn set_config(cfg: &Config,
// 3. This blows away the previous ordering of a file.
let mut file = match loc {
Location::Global => {
- try!(cfg.home_path.create_dir());
+ cfg.home_path.create_dir()?;
try!(cfg.home_path.open_rw(Path::new("config"), cfg,
"the global config file"))
}
@@ -637,11 +637,11 @@ pub fn set_config(cfg: &Config,
};
let mut contents = String::new();
let _ = file.read_to_string(&mut contents);
- let mut toml = try!(cargo_toml::parse(&contents, file.path()));
+ let mut toml = cargo_toml::parse(&contents, file.path())?;
toml.insert(key.to_string(), value.into_toml());
let contents = toml::Value::Table(toml).to_string();
- try!(file.seek(SeekFrom::Start(0)));
- try!(file.write_all(contents.as_bytes()));
+ file.seek(SeekFrom::Start(0))?;
+ file.write_all(contents.as_bytes())?;
Ok(())
}
diff --git a/src/cargo/util/errors.rs b/src/cargo/util/errors.rs
index bb213b25ee8..9168b4c1c4e 100644
--- a/src/cargo/util/errors.rs
+++ b/src/cargo/util/errors.rs
@@ -189,9 +189,9 @@ struct ConcreteCargoError {
impl fmt::Display for ConcreteCargoError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- try!(write!(f, "{}", self.description));
+ write!(f, "{}", self.description)?;
if let Some(ref s) = self.detail {
- try!(write!(f, " ({})", s));
+ write!(f, " ({})", s)?;
}
Ok(())
}
diff --git a/src/cargo/util/flock.rs b/src/cargo/util/flock.rs
index d40978dbc7c..fb75a14393c 100644
--- a/src/cargo/util/flock.rs
+++ b/src/cargo/util/flock.rs
@@ -48,16 +48,16 @@ impl FileLock {
/// needs to be cleared out as it may be corrupt.
pub fn remove_siblings(&self) -> io::Result<()> {
let path = self.path();
- for entry in try!(path.parent().unwrap().read_dir()) {
- let entry = try!(entry);
+ for entry in path.parent().unwrap().read_dir()? {
+ let entry = entry?;
if Some(&entry.file_name()[..]) == path.file_name() {
continue
}
- let kind = try!(entry.file_type());
+ let kind = entry.file_type()?;
if kind.is_dir() {
- try!(fs::remove_dir_all(entry.path()));
+ fs::remove_dir_all(entry.path())?;
} else {
- try!(fs::remove_file(entry.path()));
+ fs::remove_file(entry.path())?;
}
}
Ok(())
@@ -193,7 +193,7 @@ impl Filesystem {
// create the directory and then continue.
let f = try!(opts.open(&path).or_else(|e| {
if e.kind() == io::ErrorKind::NotFound && state == State::Exclusive {
- try!(create_dir_all(path.parent().unwrap()));
+ create_dir_all(path.parent().unwrap())?;
opts.open(&path)
} else {
Err(e)
@@ -250,7 +250,7 @@ fn acquire(config: &Config,
}
}
let msg = format!("waiting for file lock on {}", msg);
- try!(config.shell().err().say_status("Blocking", &msg, CYAN, true));
+ config.shell().err().say_status("Blocking", &msg, CYAN, true)?;
block().chain_error(|| {
human(format!("failed to lock file: {}", path.display()))
diff --git a/src/cargo/util/graph.rs b/src/cargo/util/graph.rs
index cc0414f6188..6543c8f9179 100644
--- a/src/cargo/util/graph.rs
+++ b/src/cargo/util/graph.rs
@@ -69,17 +69,17 @@ impl Graph {
impl fmt::Debug for Graph {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
- try!(writeln!(fmt, "Graph {{"));
+ writeln!(fmt, "Graph {{")?;
for (n, e) in self.nodes.iter() {
- try!(writeln!(fmt, " - {}", n));
+ writeln!(fmt, " - {}", n)?;
for n in e.iter() {
- try!(writeln!(fmt, " - {}", n));
+ writeln!(fmt, " - {}", n)?;
}
}
- try!(write!(fmt, "}}"));
+ write!(fmt, "}}")?;
Ok(())
}
diff --git a/src/cargo/util/paths.rs b/src/cargo/util/paths.rs
index 5770b80ea6f..d8ee37c161d 100644
--- a/src/cargo/util/paths.rs
+++ b/src/cargo/util/paths.rs
@@ -70,8 +70,8 @@ pub fn without_prefix<'a>(a: &'a Path, b: &'a Path) -> Option<&'a Path> {
pub fn read(path: &Path) -> CargoResult {
(|| -> CargoResult {
let mut ret = String::new();
- let mut f = try!(File::open(path));
- try!(f.read_to_string(&mut ret));
+ let mut f = File::open(path)?;
+ f.read_to_string(&mut ret)?;
Ok(ret)
})().map_err(human).chain_error(|| {
human(format!("failed to read `{}`", path.display()))
@@ -80,8 +80,8 @@ pub fn read(path: &Path) -> CargoResult {
pub fn write(path: &Path, contents: &[u8]) -> CargoResult<()> {
(|| -> CargoResult<()> {
- let mut f = try!(File::create(path));
- try!(f.write_all(contents));
+ let mut f = File::create(path)?;
+ f.write_all(contents)?;
Ok(())
})().map_err(human).chain_error(|| {
human(format!("failed to write `{}`", path.display()))
@@ -96,7 +96,7 @@ pub fn append(path: &Path, contents: &[u8]) -> CargoResult<()> {
.create(true)
.open(path));
- try!(f.write_all(contents));
+ f.write_all(contents)?;
Ok(())
}).chain_error(|| {
internal(format!("failed to write `{}`", path.display()))
diff --git a/src/cargo/util/process_builder.rs b/src/cargo/util/process_builder.rs
index d86df727fa3..0ab8402b850 100644
--- a/src/cargo/util/process_builder.rs
+++ b/src/cargo/util/process_builder.rs
@@ -18,10 +18,10 @@ pub struct ProcessBuilder {
impl fmt::Display for ProcessBuilder {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- try!(write!(f, "`{}", self.program.to_string_lossy()));
+ write!(f, "`{}", self.program.to_string_lossy())?;
for arg in self.args.iter() {
- try!(write!(f, " {}", escape(arg.to_string_lossy())));
+ write!(f, " {}", escape(arg.to_string_lossy()))?;
}
write!(f, "`")
diff --git a/src/cargo/util/rustc.rs b/src/cargo/util/rustc.rs
index aedf6e19c67..d77139a1b8a 100644
--- a/src/cargo/util/rustc.rs
+++ b/src/cargo/util/rustc.rs
@@ -23,7 +23,7 @@ impl Rustc {
first.arg("--cap-lints").arg("allow");
let output = match first.exec_with_output() {
Ok(output) => { ret.cap_lints = true; output }
- Err(..) => try!(cmd.exec_with_output()),
+ Err(..) => cmd.exec_with_output()?,
};
ret.verbose_version = try!(String::from_utf8(output.stdout).map_err(|_| {
internal("rustc -v didn't return utf8 output")
diff --git a/src/cargo/util/toml.rs b/src/cargo/util/toml.rs
index 17f043cab15..b7fcdb4d828 100644
--- a/src/cargo/util/toml.rs
+++ b/src/cargo/util/toml.rs
@@ -115,13 +115,13 @@ pub fn to_manifest(contents: &[u8],
let contents = try!(str::from_utf8(contents).map_err(|_| {
human(format!("{} is not valid UTF-8", manifest.display()))
}));
- let root = try!(parse(contents, &manifest));
+ let root = parse(contents, &manifest)?;
let mut d = toml::Decoder::new(toml::Value::Table(root));
let manifest: TomlManifest = try!(Decodable::decode(&mut d).map_err(|e| {
human(e.to_string())
}));
- let pair = try!(manifest.to_manifest(source_id, &layout, config));
+ let pair = manifest.to_manifest(source_id, &layout, config)?;
let (mut manifest, paths) = pair;
match d.toml {
Some(ref toml) => add_unused_keys(&mut manifest, toml, "".to_string()),
@@ -269,7 +269,7 @@ pub struct TomlVersion {
impl Decodable for TomlVersion {
fn decode(d: &mut D) -> Result {
- let s = try!(d.read_str());
+ let s = d.read_str()?;
match s.to_semver() {
Ok(s) => Ok(TomlVersion { version: s }),
Err(e) => Err(d.error(&e)),
@@ -381,7 +381,7 @@ impl TomlManifest {
bail!("package name cannot be an empty string.")
}
- let pkgid = try!(project.to_package_id(source_id));
+ let pkgid = project.to_package_id(source_id)?;
let metadata = pkgid.generate_metadata();
// If we have no lib at all, use the inferred lib if available
@@ -390,7 +390,7 @@ impl TomlManifest {
let lib = match self.lib {
Some(ref lib) => {
- try!(validate_library_name(lib));
+ validate_library_name(lib)?;
Some(
TomlTarget {
name: lib.name.clone().or(Some(project.name.clone())),
@@ -409,7 +409,7 @@ impl TomlManifest {
let bin = layout.main();
for target in bins {
- try!(validate_binary_name(target));
+ validate_binary_name(target)?;
}
bins.iter().map(|t| {
@@ -438,7 +438,7 @@ impl TomlManifest {
let examples = match self.example {
Some(ref examples) => {
for target in examples {
- try!(validate_example_name(target));
+ validate_example_name(target)?;
}
examples.clone()
}
@@ -448,7 +448,7 @@ impl TomlManifest {
let tests = match self.test {
Some(ref tests) => {
for target in tests {
- try!(validate_test_name(target));
+ validate_test_name(target)?;
}
tests.clone()
}
@@ -458,7 +458,7 @@ impl TomlManifest {
let benches = match self.bench {
Some(ref benches) => {
for target in benches {
- try!(validate_bench_name(target));
+ validate_bench_name(target)?;
}
benches.clone()
}
@@ -526,7 +526,7 @@ impl TomlManifest {
if let Some(targets) = self.target.as_ref() {
for (name, platform) in targets.iter() {
- cx.platform = Some(try!(name.parse()));
+ cx.platform = Some(name.parse()?);
try!(process_dependencies(&mut cx,
platform.dependencies.as_ref(),
None));
@@ -714,7 +714,7 @@ fn process_dependencies(cx: &mut Context,
if cx.source_id.is_path() {
let path = cx.layout.root.join(path);
let path = util::normalize_path(&path);
- Some(try!(SourceId::for_path(&path)))
+ Some(SourceId::for_path(&path)?)
} else {
Some(cx.source_id.clone())
}
@@ -722,10 +722,10 @@ fn process_dependencies(cx: &mut Context,
None => None,
}
}
- }.unwrap_or(try!(SourceId::for_central(cx.config)));
+ }.unwrap_or(SourceId::for_central(cx.config)?);
let version = details.version.as_ref().map(|v| &v[..]);
- let mut dep = try!(DependencyInner::parse(&n, version, &new_source_id));
+ let mut dep = DependencyInner::parse(&n, version, &new_source_id)?;
dep = dep.set_features(details.features.unwrap_or(Vec::new()))
.set_default_features(details.default_features.unwrap_or(true))
.set_optional(details.optional.unwrap_or(false))
diff --git a/src/cargo/util/vcs.rs b/src/cargo/util/vcs.rs
index ffd260680a2..730200316a0 100644
--- a/src/cargo/util/vcs.rs
+++ b/src/cargo/util/vcs.rs
@@ -9,7 +9,7 @@ pub struct GitRepo;
impl GitRepo {
pub fn init(path: &Path, _: &Path) -> CargoResult {
- try!(git2::Repository::init(path));
+ git2::Repository::init(path)?;
Ok(GitRepo)
}
pub fn discover(path: &Path, _: &Path) -> Result {
@@ -19,11 +19,11 @@ impl GitRepo {
impl HgRepo {
pub fn init(path: &Path, cwd: &Path) -> CargoResult {
- try!(process("hg").cwd(cwd).arg("init").arg(path).exec());
+ process("hg").cwd(cwd).arg("init").arg(path).exec()?;
Ok(HgRepo)
}
pub fn discover(path: &Path, cwd: &Path) -> CargoResult {
- try!(process("hg").cwd(cwd).arg("root").cwd(path).exec_with_output());
+ process("hg").cwd(cwd).arg("root").cwd(path).exec_with_output()?;
Ok(HgRepo)
}
}
diff --git a/src/crates-io/lib.rs b/src/crates-io/lib.rs
index fd98a772c6d..33d167f5929 100644
--- a/src/crates-io/lib.rs
+++ b/src/crates-io/lib.rs
@@ -1,3 +1,5 @@
+#![feature(question_mark)]
+
extern crate curl;
extern crate url;
extern crate rustc_serialize;
@@ -123,35 +125,35 @@ impl Registry {
}
pub fn add_owners(&mut self, krate: &str, owners: &[&str]) -> Result<()> {
- let body = try!(json::encode(&OwnersReq { users: owners }));
+ let body = json::encode(&OwnersReq { users: owners })?;
let body = try!(self.put(format!("/crates/{}/owners", krate),
body.as_bytes()));
- assert!(try!(json::decode::(&body)).ok);
+ assert!(json::decode::(&body)?.ok);
Ok(())
}
pub fn remove_owners(&mut self, krate: &str, owners: &[&str]) -> Result<()> {
- let body = try!(json::encode(&OwnersReq { users: owners }));
+ let body = json::encode(&OwnersReq { users: owners })?;
let body = try!(self.delete(format!("/crates/{}/owners", krate),
Some(body.as_bytes())));
- assert!(try!(json::decode::(&body)).ok);
+ assert!(json::decode::(&body)?.ok);
Ok(())
}
pub fn list_owners(&mut self, krate: &str) -> Result> {
- let body = try!(self.get(format!("/crates/{}/owners", krate)));
- Ok(try!(json::decode::(&body)).users)
+ let body = self.get(format!("/crates/{}/owners", krate))?;
+ Ok(json::decode::(&body)?.users)
}
pub fn publish(&mut self, krate: &NewCrate, tarball: &Path) -> Result<()> {
- let json = try!(json::encode(krate));
+ let json = json::encode(krate)?;
// Prepare the body. The format of the upload request is:
//
//
// (metadata for the package)
//
//
- let stat = try!(fs::metadata(tarball).map_err(Error::Io));
+ let stat = fs::metadata(tarball).map_err(Error::Io)?;
let header = {
let mut w = Vec::new();
w.extend([
@@ -169,7 +171,7 @@ impl Registry {
].iter().map(|x| *x));
w
};
- let tarball = try!(File::open(tarball).map_err(Error::Io));
+ let tarball = File::open(tarball).map_err(Error::Io)?;
let size = stat.len() as usize + header.len();
let mut body = Cursor::new(header).chain(tarball);
@@ -184,7 +186,7 @@ impl Registry {
.header("Accept", "application/json")
.header("Authorization", &token);
let response = handle(request.exec());
- let _body = try!(response);
+ let _body = response?;
Ok(())
}
@@ -195,21 +197,21 @@ impl Registry {
None, Get, Auth::Unauthorized
));
- let crates = try!(json::decode::(&body));
+ let crates = json::decode::(&body)?;
Ok((crates.crates, crates.meta.total))
}
pub fn yank(&mut self, krate: &str, version: &str) -> Result<()> {
let body = try!(self.delete(format!("/crates/{}/{}/yank", krate, version),
None));
- assert!(try!(json::decode::(&body)).ok);
+ assert!(json::decode::(&body)?.ok);
Ok(())
}
pub fn unyank(&mut self, krate: &str, version: &str) -> Result<()> {
let body = try!(self.put(format!("/crates/{}/{}/unyank", krate, version),
&[]));
- assert!(try!(json::decode::(&body)).ok);
+ assert!(json::decode::(&body)?.ok);
Ok(())
}
@@ -249,7 +251,7 @@ impl Registry {
fn handle(response: result::Result)
-> Result {
- let response = try!(response.map_err(Error::Curl));
+ let response = response.map_err(Error::Curl)?;
match response.get_code() {
0 => {} // file upload url sometimes
200 => {}
diff --git a/src/rustversion.txt b/src/rustversion.txt
index b6ebe1778eb..dda1260d5a9 100644
--- a/src/rustversion.txt
+++ b/src/rustversion.txt
@@ -1 +1 @@
-2016-02-12
+2016-03-09
diff --git a/tests/resolve.rs b/tests/resolve.rs
index 933996a37a7..f6242a4f615 100644
--- a/tests/resolve.rs
+++ b/tests/resolve.rs
@@ -18,7 +18,7 @@ fn resolve(pkg: PackageId, deps: Vec,
-> CargoResult> {
let summary = Summary::new(pkg, deps, HashMap::new()).unwrap();
let method = Method::Everything;
- Ok(try!(resolver::resolve(&summary, &method, registry)).iter().map(|p| {
+ Ok(resolver::resolve(&summary, &method, registry)?.iter().map(|p| {
p.clone()
}).collect())
}
diff --git a/tests/support/mod.rs b/tests/support/mod.rs
index 9a55c1f53cd..79b765a8123 100644
--- a/tests/support/mod.rs
+++ b/tests/support/mod.rs
@@ -41,7 +41,7 @@ impl FileBuilder {
}
fn mk(&self) -> Result<(), String> {
- try!(mkdir_recursive(&self.dirname()));
+ mkdir_recursive(&self.dirname())?;
let mut file = try!(
fs::File::create(&self.path)
@@ -71,7 +71,7 @@ impl SymlinkBuilder {
#[cfg(unix)]
fn mk(&self) -> Result<(), String> {
- try!(mkdir_recursive(&self.dirname()));
+ mkdir_recursive(&self.dirname())?;
os::unix::fs::symlink(&self.dst, &self.src)
.with_err_msg(format!("Could not create symlink; dst={} src={}",
@@ -80,7 +80,7 @@ impl SymlinkBuilder {
#[cfg(windows)]
fn mk(&self) -> Result<(), String> {
- try!(mkdir_recursive(&self.dirname()));
+ mkdir_recursive(&self.dirname())?;
os::windows::fs::symlink_file(&self.dst, &self.src)
.with_err_msg(format!("Could not create symlink; dst={} src={}",
@@ -175,17 +175,17 @@ impl ProjectBuilder {
pub fn build_with_result(&self) -> Result<(), String> {
// First, clean the directory if it already exists
- try!(self.rm_root());
+ self.rm_root()?;
// Create the empty directory
- try!(mkdir_recursive(&self.root));
+ mkdir_recursive(&self.root)?;
for file in self.files.iter() {
- try!(file.mk());
+ file.mk()?;
}
for symlink in self.symlinks.iter() {
- try!(symlink.mk());
+ symlink.mk()?;
}
Ok(())
@@ -339,7 +339,7 @@ impl Execs {
}
if let Some(ref expect_json) = self.expect_json {
- try!(self.match_json(expect_json, &actual.stdout));
+ self.match_json(expect_json, &actual.stdout)?;
}
Ok(())
}
diff --git a/tests/support/paths.rs b/tests/support/paths.rs
index 2a25d93b1f3..f55449a6774 100644
--- a/tests/support/paths.rs
+++ b/tests/support/paths.rs
@@ -52,10 +52,10 @@ impl CargoPathExt for Path {
fn rm_rf(&self) -> io::Result<()> {
if self.c_exists() {
for file in fs::read_dir(self).unwrap() {
- let file = try!(file).path();
+ let file = file?.path();
if file.c_is_dir() {
- try!(file.rm_rf());
+ file.rm_rf()?;
} else {
// On windows we can't remove a readonly file, and git will
// often clone files as readonly. As a result, we have some
@@ -67,7 +67,7 @@ impl CargoPathExt for Path {
let mut p = file.c_metadata().unwrap().permissions();
p.set_readonly(false);
fs::set_permissions(&file, p).unwrap();
- try!(fs::remove_file(&file));
+ fs::remove_file(&file)?;
}
Err(e) => return Err(e)
}
@@ -85,9 +85,9 @@ impl CargoPathExt for Path {
fn move_into_the_past(&self) -> io::Result<()> {
if self.c_is_file() {
- try!(time_travel(self));
+ time_travel(self)?;
} else {
- try!(recurse(self, &self.join("target")));
+ recurse(self, &self.join("target"))?;
}
return Ok(());
@@ -97,16 +97,16 @@ impl CargoPathExt for Path {
} else if p.starts_with(bad) {
Ok(())
} else {
- for f in try!(fs::read_dir(p)) {
- let f = try!(f).path();
- try!(recurse(&f, bad));
+ for f in fs::read_dir(p)? {
+ let f = f?.path();
+ recurse(&f, bad)?;
}
Ok(())
}
}
fn time_travel(path: &Path) -> io::Result<()> {
- let stat = try!(path.c_metadata());
+ let stat = path.c_metadata()?;
let mtime = FileTime::from_last_modification_time(&stat);
let newtime = mtime.seconds_relative_to_1970() - 3600;
@@ -121,7 +121,7 @@ impl CargoPathExt for Path {
}
let mut perms = stat.permissions();
perms.set_readonly(false);
- try!(fs::set_permissions(path, perms));
+ fs::set_permissions(path, perms)?;
filetime::set_file_times(path, newtime, newtime)
}
}
diff --git a/tests/test_shell.rs b/tests/test_shell.rs
index 931c7b818ee..d62a6b696e8 100644
--- a/tests/test_shell.rs
+++ b/tests/test_shell.rs
@@ -87,10 +87,10 @@ test!(no_term {
fn colored_output(string: &str, color: color::Color) -> CargoResult {
let mut term = TerminfoTerminal::new(Vec::new()).unwrap();
- try!(term.reset());
- try!(term.fg(color));
- try!(write!(&mut term, "{}", string));
- try!(term.reset());
- try!(term.flush());
+ term.reset()?;
+ term.fg(color)?;
+ write!(&mut term, "{}", string)?;
+ term.reset()?;
+ term.flush()?;
Ok(String::from_utf8_lossy(term.get_ref()).to_string())
}