"docopt 0.6.35 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"flate2 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "git2 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "git2 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)",
"glob 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"hamcrest 0.1.0 (git+https://github.com/carllerche/hamcrest-rust.git)",
"log 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
[[package]]
name = "git2"
-version = "0.1.15"
+version = "0.1.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bitflags 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
try!(source.update().map_err(|err| CliError::new(err.description(), 1)));
- source
- .get_root_package()
- .map(|pkg| Some(pkg))
- .map_err(|err| CliError::from_boxed(err, 1))
+ source.root_package()
+ .map(|pkg| Some(pkg))
+ .map_err(|err| CliError::from_boxed(err, 1))
}
}
}
- /// Returns the version of the dependency that is being requested.
- pub fn get_version_req(&self) -> &VersionReq {
- &self.req
- }
-
- pub fn get_specified_req(&self) -> Option<&str> {
+ pub fn version_req(&self) -> &VersionReq { &self.req }
+ pub fn name(&self) -> &str { &self.name }
+ pub fn source_id(&self) -> &SourceId { &self.source_id }
+ pub fn kind(&self) -> Kind { self.kind }
+ pub fn specified_req(&self) -> Option<&str> {
self.specified_req.as_ref().map(|s| s.as_slice())
}
- pub fn get_name(&self) -> &str { &self.name }
-
- /// Returns the place where this dependency must be searched for.
- pub fn get_source_id(&self) -> &SourceId {
- &self.source_id
+ /// If none, this dependencies must be built for all platforms.
+ /// If some, it must only be built for the specified platform.
+ pub fn only_for_platform(&self) -> Option<&str> {
+ self.only_for_platform.as_ref().map(|s| s.as_slice())
}
- pub fn get_kind(&self) -> Kind { self.kind }
-
- pub fn kind(mut self, kind: Kind) -> Dependency {
+ pub fn set_kind(mut self, kind: Kind) -> Dependency {
self.kind = kind;
self
}
/// Sets the list of features requested for the package.
- pub fn features(mut self, features: Vec<String>) -> Dependency {
+ pub fn set_features(mut self, features: Vec<String>) -> Dependency {
self.features = features;
self
}
/// Sets whether the dependency requests default features of the package.
- pub fn default_features(mut self, default_features: bool) -> Dependency {
+ pub fn set_default_features(mut self, default_features: bool) -> Dependency {
self.default_features = default_features;
self
}
/// Sets whether the dependency is optional.
- pub fn optional(mut self, optional: bool) -> Dependency {
+ pub fn set_optional(mut self, optional: bool) -> Dependency {
self.optional = optional;
self
}
/// Set the source id for this dependency
- pub fn source_id(mut self, id: SourceId) -> Dependency {
+ pub fn set_source_id(mut self, id: SourceId) -> Dependency {
self.source_id = id;
self
}
/// Set the version requirement for this dependency
- pub fn version_req(mut self, req: VersionReq) -> Dependency {
+ pub fn set_version_req(mut self, req: VersionReq) -> Dependency {
self.req = req;
self
}
- /// Lock this dependency to depending on the specified package id
- pub fn lock_to(self, id: &PackageId) -> Dependency {
- assert_eq!(self.source_id, *id.get_source_id());
- assert!(self.req.matches(id.get_version()));
- self.version_req(VersionReq::exact(id.get_version()))
- .source_id(id.get_source_id().clone())
- }
-
- pub fn only_for_platform(mut self, platform: Option<String>) -> Dependency {
+ pub fn set_only_for_platform(mut self, platform: Option<String>)
+ -> Dependency {
self.only_for_platform = platform;
self
}
+ /// Lock this dependency to depending on the specified package id
+ pub fn lock_to(self, id: &PackageId) -> Dependency {
+ assert_eq!(self.source_id, *id.source_id());
+ assert!(self.req.matches(id.version()));
+ self.set_version_req(VersionReq::exact(id.version()))
+ .set_source_id(id.source_id().clone())
+ }
+
/// Returns false if the dependency is only used to build the local package.
pub fn is_transitive(&self) -> bool {
match self.kind {
/// Returns true if the default features of the dependency are requested.
pub fn uses_default_features(&self) -> bool { self.default_features }
/// Returns the list of features that are requested by the dependency.
- pub fn get_features(&self) -> &[String] { &self.features }
+ pub fn features(&self) -> &[String] { &self.features }
/// Returns true if the package (`sum`) can fulfill this dependency request.
pub fn matches(&self, sum: &Summary) -> bool {
- self.matches_id(sum.get_package_id())
+ self.matches_id(sum.package_id())
}
/// Returns true if the package (`id`) can fulfill this dependency request.
pub fn matches_id(&self, id: &PackageId) -> bool {
- self.name == id.get_name() &&
- (self.only_match_name || (self.req.matches(id.get_version()) &&
- &self.source_id == id.get_source_id()))
- }
-
- /// If none, this dependencies must be built for all platforms.
- /// If some, it must only be built for the specified platform.
- pub fn get_only_for_platform(&self) -> Option<&str> {
- self.only_for_platform.as_ref().map(|s| s.as_slice())
+ self.name == id.name() &&
+ (self.only_match_name || (self.req.matches(id.version()) &&
+ &self.source_id == id.source_id()))
}
/// Returns true if the dependency should be built for this platform.
impl SerializedDependency {
pub fn from_dependency(dep: &Dependency) -> SerializedDependency {
SerializedDependency {
- name: dep.get_name().to_string(),
- req: dep.get_version_req().to_string()
+ name: dep.name().to_string(),
+ req: dep.version_req().to_string()
}
}
}
impl Encodable for Manifest {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
SerializedManifest {
- name: self.summary.get_name().to_string(),
- version: self.summary.get_version().to_string(),
- dependencies: self.summary.get_dependencies().iter().map(|d| {
+ name: self.summary.name().to_string(),
+ version: self.summary.version().to_string(),
+ dependencies: self.summary.dependencies().iter().map(|d| {
SerializedDependency::from_dependency(d)
}).collect(),
targets: self.targets.clone(),
}
}
- pub fn is_compile(&self) -> bool {
- self.env == "compile"
- }
-
- pub fn is_doc(&self) -> bool {
- self.doc
- }
-
- pub fn is_test(&self) -> bool {
- self.test
- }
-
- pub fn uses_test_harness(&self) -> bool {
- self.harness
- }
-
- pub fn is_doctest(&self) -> bool {
- self.doctest
- }
-
- pub fn is_custom_build(&self) -> bool {
- self.custom_build
- }
-
- /// Returns true if the target must be built for the host instead of the target.
- pub fn is_for_host(&self) -> bool {
- self.for_host
- }
-
- pub fn get_opt_level(&self) -> u32 {
- self.opt_level
- }
-
- pub fn get_lto(&self) -> bool {
- self.lto
- }
-
- pub fn get_codegen_units(&self) -> Option<u32> {
- self.codegen_units
- }
-
- pub fn get_debug(&self) -> bool {
- self.debug
- }
-
- pub fn get_rpath(&self) -> bool {
- self.rpath
- }
-
- pub fn get_env(&self) -> &str {
- &self.env
- }
-
- pub fn get_dest(&self) -> Option<&str> {
+ pub fn codegen_units(&self) -> Option<u32> { self.codegen_units }
+ pub fn debug(&self) -> bool { self.debug }
+ pub fn env(&self) -> &str { &self.env }
+ pub fn is_compile(&self) -> bool { self.env == "compile" }
+ pub fn is_custom_build(&self) -> bool { self.custom_build }
+ pub fn is_doc(&self) -> bool { self.doc }
+ pub fn is_doctest(&self) -> bool { self.doctest }
+ pub fn is_for_host(&self) -> bool { self.for_host }
+ pub fn is_test(&self) -> bool { self.test }
+ pub fn lto(&self) -> bool { self.lto }
+ pub fn opt_level(&self) -> u32 { self.opt_level }
+ pub fn rpath(&self) -> bool { self.rpath }
+ pub fn uses_test_harness(&self) -> bool { self.harness }
+
+ pub fn dest(&self) -> Option<&str> {
self.dest.as_ref().map(|d| d.as_slice())
}
- pub fn opt_level(mut self, level: u32) -> Profile {
+ pub fn set_opt_level(mut self, level: u32) -> Profile {
self.opt_level = level;
self
}
- pub fn lto(mut self, lto: bool) -> Profile {
+ pub fn set_lto(mut self, lto: bool) -> Profile {
self.lto = lto;
self
}
- pub fn codegen_units(mut self, units: Option<u32>) -> Profile {
+ pub fn set_codegen_units(mut self, units: Option<u32>) -> Profile {
self.codegen_units = units;
self
}
- pub fn debug(mut self, debug: bool) -> Profile {
+ pub fn set_debug(mut self, debug: bool) -> Profile {
self.debug = debug;
self
}
- pub fn rpath(mut self, rpath: bool) -> Profile {
+ pub fn set_rpath(mut self, rpath: bool) -> Profile {
self.rpath = rpath;
self
}
- pub fn test(mut self, test: bool) -> Profile {
+ pub fn set_test(mut self, test: bool) -> Profile {
self.test = test;
self
}
- pub fn doctest(mut self, doctest: bool) -> Profile {
+ pub fn set_doctest(mut self, doctest: bool) -> Profile {
self.doctest = doctest;
self
}
- pub fn doc(mut self, doc: bool) -> Profile {
+ pub fn set_doc(mut self, doc: bool) -> Profile {
self.doc = doc;
self
}
- /// Sets whether the `Target` must be compiled for the host instead of the target platform.
- pub fn for_host(mut self, for_host: bool) -> Profile {
+ /// Sets whether the `Target` must be compiled for the host instead of the
+ /// target platform.
+ pub fn set_for_host(mut self, for_host: bool) -> Profile {
self.for_host = for_host;
self
}
- pub fn harness(mut self, harness: bool) -> Profile {
+ pub fn set_harness(mut self, harness: bool) -> Profile {
self.harness = harness;
self
}
/// Sets whether the `Target` is a custom build script.
- pub fn custom_build(mut self, custom_build: bool) -> Profile {
+ pub fn set_custom_build(mut self, custom_build: bool) -> Profile {
self.custom_build = custom_build;
self
}
}
}
-/// Informations about a binary, a library, an example, etc. that is part of the package.
+/// Informations about a binary, a library, an example, etc. that is part of the
+/// package.
#[derive(Clone, Hash, PartialEq, Eq, Debug)]
pub struct Target {
kind: TargetKind,
impl Encodable for Target {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
let kind = match self.kind {
- TargetKind::Lib(ref kinds) => kinds.iter().map(|k| k.crate_type()).collect(),
+ TargetKind::Lib(ref kinds) => {
+ kinds.iter().map(|k| k.crate_type()).collect()
+ }
TargetKind::Bin => vec!("bin"),
TargetKind::Example => vec!["example"],
};
}
}
- pub fn get_summary(&self) -> &Summary {
- &self.summary
- }
-
- pub fn get_package_id(&self) -> &PackageId {
- self.get_summary().get_package_id()
- }
-
- pub fn get_name(&self) -> &str {
- self.get_package_id().get_name()
- }
-
- pub fn get_version(&self) -> &Version {
- self.get_summary().get_package_id().get_version()
- }
-
- pub fn get_dependencies(&self) -> &[Dependency] {
- self.get_summary().get_dependencies()
- }
-
- pub fn get_targets(&self) -> &[Target] {
- &self.targets
- }
-
- pub fn get_target_dir(&self) -> &Path {
- &self.target_dir
- }
-
- pub fn get_doc_dir(&self) -> &Path {
- &self.doc_dir
- }
-
- pub fn get_links(&self) -> Option<&str> {
+ pub fn dependencies(&self) -> &[Dependency] { self.summary.dependencies() }
+ pub fn doc_dir(&self) -> &Path { &self.doc_dir }
+ pub fn exclude(&self) -> &[String] { &self.exclude }
+ pub fn include(&self) -> &[String] { &self.include }
+ pub fn metadata(&self) -> &ManifestMetadata { &self.metadata }
+ pub fn name(&self) -> &str { self.package_id().name() }
+ pub fn package_id(&self) -> &PackageId { self.summary.package_id() }
+ pub fn summary(&self) -> &Summary { &self.summary }
+ pub fn target_dir(&self) -> &Path { &self.target_dir }
+ pub fn targets(&self) -> &[Target] { &self.targets }
+ pub fn version(&self) -> &Version { self.package_id().version() }
+ pub fn warnings(&self) -> &[String] { &self.warnings }
+ pub fn links(&self) -> Option<&str> {
self.links.as_ref().map(|s| s.as_slice())
}
self.warnings.push(s)
}
- pub fn get_warnings(&self) -> &[String] {
- &self.warnings
- }
-
- pub fn get_exclude(&self) -> &[String] {
- &self.exclude
- }
-
- pub fn get_include(&self) -> &[String] {
- &self.include
- }
-
- pub fn get_metadata(&self) -> &ManifestMetadata { &self.metadata }
-
pub fn set_summary(&mut self, summary: Summary) {
self.summary = summary;
}
}
}
- pub fn get_name(&self) -> &str {
- &self.name
- }
-
- pub fn get_src_path(&self) -> &Path {
- &self.src_path
- }
+ pub fn name(&self) -> &str { &self.name }
+ pub fn src_path(&self) -> &Path { &self.src_path }
+ pub fn profile(&self) -> &Profile { &self.profile }
+ pub fn metadata(&self) -> Option<&Metadata> { self.metadata.as_ref() }
pub fn is_lib(&self) -> bool {
match self.kind {
}
}
- pub fn get_profile(&self) -> &Profile {
- &self.profile
- }
-
- pub fn get_metadata(&self) -> Option<&Metadata> {
- self.metadata.as_ref()
- }
-
/// Returns the arguments suitable for `--crate-type` to pass to rustc.
pub fn rustc_crate_types(&self) -> Vec<&'static str> {
match self.kind {
impl Encodable for Package {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
- let manifest = self.get_manifest();
- let summary = manifest.get_summary();
- let package_id = summary.get_package_id();
+ let manifest = self.manifest();
+ let summary = manifest.summary();
+ let package_id = summary.package_id();
SerializedPackage {
- name: package_id.get_name().to_string(),
- version: package_id.get_version().to_string(),
- dependencies: summary.get_dependencies().iter().map(|d| {
+ name: package_id.name().to_string(),
+ version: package_id.version().to_string(),
+ dependencies: summary.dependencies().iter().map(|d| {
SerializedDependency::from_dependency(d)
}).collect(),
- targets: manifest.get_targets().to_vec(),
+ targets: manifest.targets().to_vec(),
manifest_path: self.manifest_path.display().to_string()
}.encode(s)
}
}
}
- pub fn get_manifest(&self) -> &Manifest {
- &self.manifest
- }
-
- pub fn get_summary(&self) -> &Summary {
- self.manifest.get_summary()
- }
-
- pub fn get_package_id(&self) -> &PackageId {
- self.manifest.get_package_id()
- }
-
- pub fn get_name(&self) -> &str {
- self.get_package_id().get_name()
- }
-
- pub fn get_version(&self) -> &Version {
- self.get_package_id().get_version()
- }
-
- pub fn get_dependencies(&self) -> &[Dependency] {
- self.get_manifest().get_dependencies()
- }
-
- pub fn get_targets(&self) -> &[Target] {
- self.get_manifest().get_targets()
- }
-
- pub fn get_manifest_path(&self) -> &Path {
- &self.manifest_path
- }
+ pub fn dependencies(&self) -> &[Dependency] { self.manifest.dependencies() }
+ pub fn manifest(&self) -> &Manifest { &self.manifest }
+ pub fn manifest_path(&self) -> &Path { &self.manifest_path }
+ pub fn name(&self) -> &str { self.package_id().name() }
+ pub fn package_id(&self) -> &PackageId { self.manifest.package_id() }
+ pub fn root(&self) -> Path { self.manifest_path.dir_path() }
+ pub fn summary(&self) -> &Summary { self.manifest.summary() }
+ pub fn target_dir(&self) -> &Path { self.manifest.target_dir() }
+ pub fn targets(&self) -> &[Target] { self.manifest().targets() }
+ pub fn version(&self) -> &Version { self.package_id().version() }
- pub fn get_root(&self) -> Path {
- self.manifest_path.dir_path()
- }
-
- pub fn get_target_dir(&self) -> &Path {
- self.manifest.get_target_dir()
- }
-
- pub fn get_absolute_target_dir(&self) -> Path {
- self.get_root().join(self.get_target_dir())
+ pub fn absolute_target_dir(&self) -> Path {
+ self.root().join(self.target_dir())
}
pub fn has_custom_build(&self) -> bool {
- self.get_targets().iter().any(|t| t.get_profile().is_custom_build())
+ self.targets().iter().any(|t| t.profile().is_custom_build())
}
}
impl fmt::Display for Package {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- write!(f, "{}", self.get_summary().get_package_id())
+ write!(f, "{}", self.summary().package_id())
}
}
impl PartialEq for Package {
fn eq(&self, other: &Package) -> bool {
- self.get_package_id() == other.get_package_id()
+ self.package_id() == other.package_id()
}
}
impl<H: hash::Writer + hash::Hasher> hash::Hash<H> for Package {
fn hash(&self, into: &mut H) {
- self.get_package_id().hash(into)
+ self.package_id().hash(into)
}
}
/// Get a package by name out of the set
pub fn get(&self, name: &str) -> &Package {
- self.packages.iter().find(|pkg| name == pkg.get_name())
+ self.packages.iter().find(|pkg| name == pkg.name())
.expect("PackageSet.get: empty set")
}
names.iter().map(|name| self.get(*name) ).collect()
}
- pub fn get_packages(&self) -> &[Package] {
- &self.packages
- }
+ pub fn packages(&self) -> &[Package] { &self.packages }
// For now, assume that the package set contains only one package with a
// given name
let mut graph = graph::Graph::new();
for pkg in self.packages.iter() {
- let deps: Vec<&str> = pkg.get_dependencies().iter()
- .map(|dep| dep.get_name())
+ let deps: Vec<&str> = pkg.dependencies().iter()
+ .map(|dep| dep.name())
.collect();
- graph.add(pkg.get_name(), &deps);
+ graph.add(pkg.name(), &deps);
}
let pkgs = match graph.sort() {
impl Registry for PackageSet {
fn query(&mut self, name: &Dependency) -> CargoResult<Vec<Summary>> {
Ok(self.packages.iter()
- .filter(|pkg| name.get_name() == pkg.get_name())
- .map(|pkg| pkg.get_summary().clone())
+ .filter(|pkg| name.name() == pkg.name())
+ .map(|pkg| pkg.summary().clone())
.collect())
}
}
})
}
- pub fn get_name(&self) -> &str {
- &self.inner.name
- }
-
- pub fn get_version(&self) -> &semver::Version {
- &self.inner.version
- }
-
- pub fn get_source_id(&self) -> &SourceId {
- &self.inner.source_id
- }
+ pub fn name(&self) -> &str { &self.inner.name }
+ pub fn version(&self) -> &semver::Version { &self.inner.version }
+ pub fn source_id(&self) -> &SourceId { &self.inner.source_id }
pub fn generate_metadata(&self) -> Metadata {
let metadata = short_hash(
pub fn from_package_id(package_id: &PackageId) -> PackageIdSpec {
PackageIdSpec {
- name: package_id.get_name().to_string(),
- version: Some(package_id.get_version().clone()),
- url: Some(package_id.get_source_id().get_url().clone()),
+ name: package_id.name().to_string(),
+ version: Some(package_id.version().clone()),
+ url: Some(package_id.source_id().url().clone()),
}
}
})
}
- pub fn get_name(&self) -> &str { &self.name }
- pub fn get_version(&self) -> Option<&Version> { self.version.as_ref() }
- pub fn get_url(&self) -> Option<&Url> { self.url.as_ref() }
+ pub fn name(&self) -> &str { &self.name }
+ pub fn version(&self) -> Option<&Version> { self.version.as_ref() }
+ pub fn url(&self) -> Option<&Url> { self.url.as_ref() }
pub fn matches(&self, package_id: &PackageId) -> bool {
- if self.get_name() != package_id.get_name() { return false }
+ if self.name() != package_id.name() { return false }
match self.version {
- Some(ref v) => if v != package_id.get_version() { return false },
+ Some(ref v) => if v != package_id.version() { return false },
None => {}
}
match self.url {
- Some(ref u) => u == package_id.get_source_id().get_url(),
+ Some(ref u) => u == package_id.source_id().url(),
None => true
}
}
// If the previous source was not a precise source, then we can be
// sure that it's already been updated if we've already loaded it.
- Some(&(ref previous, _)) if previous.get_precise().is_none() => {
+ Some(&(ref previous, _)) if previous.precise().is_none() => {
return Ok(())
}
// then we're done, otherwise we need to need to move forward
// updating this source.
Some(&(ref previous, _)) => {
- if previous.get_precise() == namespace.get_precise() {
+ if previous.precise() == namespace.precise() {
return Ok(())
}
}
}
pub fn register_lock(&mut self, id: PackageId, deps: Vec<PackageId>) {
- let sub_map = match self.locked.entry(id.get_source_id().clone()) {
+ let sub_map = match self.locked.entry(id.source_id().clone()) {
Occupied(e) => e.into_mut(),
Vacant(e) => e.insert(HashMap::new()),
};
- let sub_vec = match sub_map.entry(id.get_name().to_string()) {
+ let sub_vec = match sub_map.entry(id.name().to_string()) {
Occupied(e) => e.into_mut(),
Vacant(e) => e.insert(Vec::new()),
};
let mut ret = Vec::new();
for s in self.overrides.iter() {
let src = self.sources.get_mut(s).unwrap();
- let dep = Dependency::new_override(dep.get_name(), s);
+ let dep = Dependency::new_override(dep.name(), s);
ret.extend(try!(src.query(&dep)).into_iter().filter(|s| {
- seen.insert(s.get_name().to_string())
+ seen.insert(s.name().to_string())
}));
}
Ok(ret)
// to be rewritten to a locked version wherever possible. If we're unable to
// map a dependency though, we just pass it on through.
fn lock(&self, summary: Summary) -> Summary {
- let pair = self.locked.get(summary.get_source_id()).and_then(|map| {
- map.get(summary.get_name())
+ let pair = self.locked.get(summary.source_id()).and_then(|map| {
+ map.get(summary.name())
}).and_then(|vec| {
- vec.iter().find(|&&(ref id, _)| id == summary.get_package_id())
+ vec.iter().find(|&&(ref id, _)| id == summary.package_id())
});
// Lock the summary's id if possible
// case it was likely an optional dependency which wasn't
// included previously so we just pass it through anyway.
Some(&(_, ref deps)) => {
- match deps.iter().find(|d| d.get_name() == dep.get_name()) {
+ match deps.iter().find(|d| d.name() == dep.name()) {
Some(lock) => {
if dep.matches_id(lock) {
dep.lock_to(lock)
// dependency. If anything does then we lock it to that and move
// on.
None => {
- let v = self.locked.get(dep.get_source_id()).and_then(|map| {
- map.get(dep.get_name())
+ let v = self.locked.get(dep.source_id()).and_then(|map| {
+ map.get(dep.name())
}).and_then(|vec| {
vec.iter().find(|&&(ref id, _)| dep.matches_id(id))
});
let ret = if overrides.len() == 0 {
// Ensure the requested source_id is loaded
- try!(self.ensure_loaded(dep.get_source_id()));
+ try!(self.ensure_loaded(dep.source_id()));
let mut ret = Vec::new();
for (id, src) in self.sources.sources_mut() {
- if id == dep.get_source_id() {
+ if id == dep.source_id() {
ret.extend(try!(src.query(dep)).into_iter());
}
}
fn query_overrides(&self, dep: &Dependency) -> Vec<Summary> {
self.overrides.iter()
- .filter(|s| s.get_name() == dep.get_name())
+ .filter(|s| s.name() == dep.name())
.map(|s| s.clone())
.collect()
}
let mut register_pkg = |&mut: pkg: &EncodableDependency|
-> CargoResult<()> {
let pkgid = try!(pkg.to_package_id(default));
- let precise = pkgid.get_source_id().get_precise()
+ let precise = pkgid.source_id().precise()
.map(|s| s.to_string());
assert!(tmp.insert(pkgid.clone(), precise).is_none(),
"a package was referenced twice in the lockfile");
deps
});
- let source = if id.get_source_id() == root.get_source_id() {
+ let source = if id.source_id() == root.source_id() {
None
} else {
- Some(id.get_source_id().clone())
+ Some(id.source_id().clone())
};
EncodableDependency {
- name: id.get_name().to_string(),
- version: id.get_version().to_string(),
+ name: id.name().to_string(),
+ version: id.version().to_string(),
source: source,
dependencies: deps,
}
}
fn encodable_package_id(id: &PackageId, root: &PackageId) -> EncodablePackageId {
- let source = if id.get_source_id() == root.get_source_id() {
+ let source = if id.source_id() == root.source_id() {
None
} else {
- Some(id.get_source_id().with_precise(None))
+ Some(id.source_id().with_precise(None))
};
EncodablePackageId {
- name: id.get_name().to_string(),
- version: id.get_version().to_string(),
+ name: id.name().to_string(),
+ version: id.version().to_string(),
source: source,
}
}
Please re-run this command \
with `-p <spec>` where `<spec>` is one \
of the following:",
- spec.get_name(), spec);
+ spec.name(), spec);
let mut vec = vec![ret, other];
vec.extend(ids);
minimize(&mut msg, vec, &spec);
spec: &PackageIdSpec) {
let mut version_cnt = HashMap::new();
for id in ids.iter() {
- match version_cnt.entry(id.get_version()) {
+ match version_cnt.entry(id.version()) {
Vacant(e) => { e.insert(1); }
Occupied(e) => *e.into_mut() += 1,
}
}
for id in ids.iter() {
- if version_cnt[id.get_version()] == 1 {
- msg.push_str(&format!("\n {}:{}", spec.get_name(),
- id.get_version()));
+ if version_cnt[id.version()] == 1 {
+ msg.push_str(&format!("\n {}:{}", spec.name(),
+ id.version()));
} else {
msg.push_str(&format!("\n {}",
PackageIdSpec::from_package_id(*id)));
/// Builds the list of all packages required to build the first argument.
pub fn resolve(summary: &Summary, method: Method,
registry: &mut Registry) -> CargoResult<Resolve> {
- trace!("resolve; summary={}", summary.get_package_id());
+ trace!("resolve; summary={}", summary.package_id());
let summary = Rc::new(summary.clone());
let cx = Box::new(Context {
- resolve: Resolve::new(summary.get_package_id().clone()),
+ resolve: Resolve::new(summary.package_id().clone()),
activations: HashMap::new(),
visited: Rc::new(RefCell::new(HashSet::new())),
});
-> CargoResult<CargoResult<Box<Context>>> {
// Dependency graphs are required to be a DAG, so we keep a set of
// packages we're visiting and bail if we hit a dupe.
- let id = parent.get_package_id();
+ let id = parent.package_id();
if !cx.visited.borrow_mut().insert(id.clone()) {
return Err(human(format!("cyclic package dependency: package `{}` \
depends on itself", id)))
cx.visited.borrow_mut().remove(id);
return Ok(Ok(cx))
}
- debug!("activating {}", parent.get_package_id());
+ debug!("activating {}", parent.package_id());
// Extracting the platform request.
let platform = match method {
// When we attempt versions for a package, we'll want to start at the
// maximum version and work our way down.
candidates.as_mut_slice().sort_by(|a, b| {
- b.get_version().cmp(a.get_version())
+ b.version().cmp(a.version())
});
let candidates = candidates.into_iter().map(Rc::new).collect::<Vec<_>>();
Ok((dep, candidates, features))
Ok(match try!(activate_deps(cx, registry, parent, platform, &deps, 0)) {
Ok(cx) => {
- cx.visited.borrow_mut().remove(parent.get_package_id());
+ cx.visited.borrow_mut().remove(parent.package_id());
Ok(cx)
}
Err(e) => Err(e),
fn flag_activated(cx: &mut Context,
summary: &Rc<Summary>,
method: &Method) -> bool {
- let id = summary.get_package_id();
- let key = (id.get_name().to_string(), id.get_source_id().clone());
+ let id = summary.package_id();
+ let key = (id.name().to_string(), id.source_id().clone());
let prev = cx.activations.entry(key).get().unwrap_or_else(|e| {
e.insert(Vec::new())
});
prev.push(summary.clone());
return false
}
- debug!("checking if {} is already activated", summary.get_package_id());
+ debug!("checking if {} is already activated", summary.package_id());
let features = match *method {
Method::Required(_, features, _, _) => features,
Method::Everything => return false,
let method = Method::Required(false, &features,
dep.uses_default_features(), platform);
- let key = (dep.get_name().to_string(), dep.get_source_id().clone());
+ let key = (dep.name().to_string(), dep.source_id().clone());
let prev_active = cx.activations.get(&key)
.map(|v| v.as_slice()).unwrap_or(&[]);
- trace!("{}[{}]>{} {} candidates", parent.get_name(), cur, dep.get_name(),
+ trace!("{}[{}]>{} {} candidates", parent.name(), cur, dep.name(),
candidates.len());
- trace!("{}[{}]>{} {} prev activations", parent.get_name(), cur,
- dep.get_name(), prev_active.len());
+ trace!("{}[{}]>{} {} prev activations", parent.name(), cur,
+ dep.name(), prev_active.len());
// Filter the set of candidates based on the previously activated
// versions for this dependency. We can actually use a version if it
let my_candidates = candidates.iter().filter(|&b| {
prev_active.iter().any(|a| a == b) ||
prev_active.iter().all(|a| {
- !compatible(a.get_version(), b.get_version())
+ !compatible(a.version(), b.version())
})
});
// each one in turn.
let mut last_err = None;
for candidate in my_candidates {
- trace!("{}[{}]>{} trying {}", parent.get_name(), cur, dep.get_name(),
- candidate.get_version());
+ trace!("{}[{}]>{} trying {}", parent.name(), cur, dep.name(),
+ candidate.version());
let mut my_cx = cx.clone();
- my_cx.resolve.graph.link(parent.get_package_id().clone(),
- candidate.get_package_id().clone());
+ my_cx.resolve.graph.link(parent.package_id().clone(),
+ candidate.package_id().clone());
// If we hit an intransitive dependency then clear out the visitation
// list as we can't induce a cycle through transitive dependencies.
Err(e) => { last_err = Some(e); }
}
}
- trace!("{}[{}]>{} -- {:?}", parent.get_name(), cur, dep.get_name(),
+ trace!("{}[{}]>{} -- {:?}", parent.name(), cur, dep.name(),
last_err);
// Oh well, we couldn't activate any of the candidates, so we just can't
(required by `{}`):\n\
all possible versions conflict with \
previously selected versions of `{}`",
- dep.get_name(), parent.get_name(),
- dep.get_name());
+ dep.name(), parent.name(),
+ dep.name());
'outer: for v in prev_active.iter() {
for node in cx.resolve.graph.iter() {
let edges = match cx.resolve.graph.edges(node) {
None => continue,
};
for edge in edges {
- if edge != v.get_package_id() { continue }
+ if edge != v.package_id() { continue }
msg.push_str(&format!("\n version {} in use by {}",
- v.get_version(), edge));
+ v.version(), edge));
continue 'outer;
}
}
msg.push_str(&format!("\n version {} in use by ??",
- v.get_version()));
+ v.version()));
}
msg.push_str(&format!("\n possible versions to select: {}",
candidates.iter()
- .map(|v| v.get_version())
+ .map(|v| v.version())
.map(|v| v.to_string())
.collect::<Vec<_>>()
.connect(", "))[]);
(required by `{}`)\n\
location searched: {}\n\
version required: {}",
- dep.get_name(), parent.get_name(),
- dep.get_source_id(),
- dep.get_version_req());
+ dep.name(), parent.name(),
+ dep.source_id(),
+ dep.version_req());
let mut msg = msg;
let all_req = semver::VersionReq::parse("*").unwrap();
- let new_dep = dep.clone().version_req(all_req);
+ let new_dep = dep.clone().set_version_req(all_req);
let mut candidates = try!(registry.query(&new_dep));
candidates.sort_by(|a, b| {
- b.get_version().cmp(a.get_version())
+ b.version().cmp(a.version())
});
if candidates.len() > 0 {
msg.push_str("\nversions found: ");
for (i, c) in candidates.iter().take(3).enumerate() {
if i != 0 { msg.push_str(", "); }
- msg.push_str(&c.get_version().to_string());
+ msg.push_str(&c.version().to_string());
}
if candidates.len() > 3 {
msg.push_str(", ...");
// If we have a path dependency with a locked version, then this may
// indicate that we updated a sub-package and forgot to run `cargo
// update`. In this case try to print a helpful error!
- if dep.get_source_id().is_path() &&
- dep.get_version_req().to_string().starts_with("=") &&
+ if dep.source_id().is_path() &&
+ dep.version_req().to_string().starts_with("=") &&
candidates.len() > 0 {
msg.push_str("\nconsider running `cargo update` to update \
a path dependency's locked version");
};
// First, filter by dev-dependencies
- let deps = parent.get_dependencies();
+ let deps = parent.dependencies();
let deps = deps.iter().filter(|d| d.is_transitive() || dev_deps);
// Second, ignoring dependencies that should not be compiled for this platform
// features that correspond to optional dependencies
for dep in deps {
// weed out optional dependencies, but not those required
- if dep.is_optional() && !feature_deps.contains_key(dep.get_name()) {
+ if dep.is_optional() && !feature_deps.contains_key(dep.name()) {
continue
}
- let mut base = feature_deps.remove(dep.get_name()).unwrap_or(vec![]);
- for feature in dep.get_features().iter() {
+ let mut base = feature_deps.remove(dep.name()).unwrap_or(vec![]);
+ for feature in dep.features().iter() {
base.push(feature.clone());
if feature.contains("/") {
return Err(human(format!("features in dependencies \
feature)));
}
}
- ret.insert(dep.get_name(), (dep, base));
+ ret.insert(dep.name(), (dep, base));
}
// All features can only point to optional dependencies, in which case they
if unknown.len() > 0 {
let features = unknown.connect(", ");
return Err(human(format!("Package `{}` does not have these features: \
- `{}`", parent.get_package_id(), features)))
+ `{}`", parent.package_id(), features)))
}
}
// Record what list of features is active for this package.
if used_features.len() > 0 {
- let pkgid = parent.get_package_id();
+ let pkgid = parent.package_id();
match cx.resolve.features.entry(pkgid.clone()) {
Occupied(entry) => entry.into_mut(),
Vacant(entry) => entry.insert(HashSet::new()),
let mut visited = HashSet::new();
match method {
Method::Everything => {
- for key in s.get_features().keys() {
+ for key in s.features().keys() {
try!(add_feature(s, key, &mut deps, &mut used, &mut visited));
}
- for dep in s.get_dependencies().iter().filter(|d| d.is_optional()) {
- try!(add_feature(s, dep.get_name(), &mut deps, &mut used,
+ for dep in s.dependencies().iter().filter(|d| d.is_optional()) {
+ try!(add_feature(s, dep.name(), &mut deps, &mut used,
&mut visited));
}
}
}
match method {
Method::Everything | Method::Required(_, _, true, _) => {
- if s.get_features().get("default").is_some() &&
+ if s.features().get("default").is_some() &&
!visited.contains("default") {
try!(add_feature(s, "default", &mut deps, &mut used,
&mut visited));
feat)))
}
used.insert(feat.to_string());
- match s.get_features().get(feat) {
+ match s.features().get(feat) {
Some(recursive) => {
for f in recursive.iter() {
try!(add_feature(s, f, deps, used,
Ok(SourceId::for_registry(&try!(RegistrySource::url(config))))
}
- pub fn get_url(&self) -> &Url { &self.inner.url }
+ pub fn url(&self) -> &Url { &self.inner.url }
pub fn is_path(&self) -> bool { self.inner.kind == Kind::Path }
pub fn is_registry(&self) -> bool { self.inner.kind == Kind::Registry }
}
}
- pub fn get_precise(&self) -> Option<&str> {
+ pub fn precise(&self) -> Option<&str> {
self.inner.precise.as_ref().map(|s| s.as_slice())
}
}
pub fn get_by_package_id(&self, pkg_id: &PackageId) -> Option<&(Source+'src)> {
- self.get(pkg_id.get_source_id())
+ self.get(pkg_id.source_id())
}
pub fn insert(&mut self, id: &SourceId, source: Box<Source+'src>) {
dependencies: Vec<Dependency>,
features: HashMap<String, Vec<String>>) -> CargoResult<Summary> {
for dep in dependencies.iter() {
- if features.get(dep.get_name()).is_some() {
+ if features.get(dep.name()).is_some() {
return Err(human(format!("Features and dependencies cannot have \
- the same name: `{}`", dep.get_name())))
+ the same name: `{}`", dep.name())))
}
if dep.is_optional() && !dep.is_transitive() {
return Err(human(format!("Dev-dependencies are not allowed \
to be optional: `{}`",
- dep.get_name())))
+ dep.name())))
}
}
for (feature, list) in features.iter() {
let dep = parts.next().unwrap();
let is_reexport = parts.next().is_some();
if !is_reexport && features.get(dep).is_some() { continue }
- match dependencies.iter().find(|d| d.get_name() == dep) {
+ match dependencies.iter().find(|d| d.name() == dep) {
Some(d) => {
if d.is_optional() || is_reexport { continue }
return Err(human(format!("Feature `{}` depends on `{}` \
})
}
- pub fn get_package_id(&self) -> &PackageId {
- &self.package_id
- }
-
- pub fn get_name(&self) -> &str {
- self.get_package_id().get_name()
- }
-
- pub fn get_version(&self) -> &Version {
- self.get_package_id().get_version()
- }
-
- pub fn get_source_id(&self) -> &SourceId {
- self.package_id.get_source_id()
- }
-
- pub fn get_dependencies(&self) -> &[Dependency] {
- &self.dependencies
- }
-
- pub fn get_features(&self) -> &HashMap<String, Vec<String>> {
- &self.features
- }
+ pub fn package_id(&self) -> &PackageId { &self.package_id }
+ pub fn name(&self) -> &str { self.package_id().name() }
+ pub fn version(&self) -> &Version { self.package_id().version() }
+ pub fn source_id(&self) -> &SourceId { self.package_id.source_id() }
+ pub fn dependencies(&self) -> &[Dependency] { &self.dependencies }
+ pub fn features(&self) -> &HashMap<String, Vec<String>> { &self.features }
pub fn override_id(mut self, id: PackageId) -> Summary {
self.package_id = id;
impl SummaryVec for Vec<Summary> {
// TODO: Move to Registry
fn names(&self) -> Vec<String> {
- self.iter().map(|summary| summary.get_name().to_string()).collect()
+ self.iter().map(|summary| summary.name().to_string()).collect()
}
}
let mut src = try!(PathSource::for_path(&manifest_path.dir_path(),
opts.config));
try!(src.update());
- let root = try!(src.get_root_package());
- let manifest = root.get_manifest();
+ let root = try!(src.root_package());
+ let manifest = root.manifest();
// If we have a spec, then we need to delete some package,s otherwise, just
// remove the whole target directory and be done with it!
let spec = match opts.spec {
Some(spec) => spec,
- None => return rm_rf(manifest.get_target_dir()),
+ None => return rm_rf(manifest.target_dir()),
};
// Load the lockfile (if one's available), and resolve spec to a pkgid
- let lockfile = root.get_root().join("Cargo.lock");
- let source_id = root.get_package_id().get_source_id();
+ let lockfile = root.root().join("Cargo.lock");
+ let source_id = root.package_id().source_id();
let resolve = match try!(ops::load_lockfile(&lockfile, source_id)) {
Some(resolve) => resolve,
None => return Err(human("A Cargo.lock must exist before cleaning"))
// Translate the PackageId to a Package
let pkg = {
- let mut source = pkgid.get_source_id().load(opts.config);
+ let mut source = pkgid.source_id().load(opts.config);
try!(source.update());
(try!(source.get(&[pkgid.clone()]))).into_iter().next().unwrap()
};
let srcs = SourceMap::new();
let pkgs = PackageSet::new(&[]);
let cx = try!(Context::new("compile", &resolve, &srcs, &pkgs, opts.config,
- Layout::at(root.get_absolute_target_dir()),
+ Layout::at(root.absolute_target_dir()),
None, &pkg, Default::default()));
// And finally, clean everything out!
- for target in pkg.get_targets().iter() {
- let layout = Layout::new(&root, opts.target,
- target.get_profile().get_dest());
+ for target in pkg.targets().iter() {
+ let layout = Layout::new(&root, opts.target, target.profile().dest());
try!(rm_rf(&layout.native(&pkg)));
try!(rm_rf(&layout.fingerprint(&pkg)));
for filename in try!(cx.target_filenames(target)).iter() {
try!(source.update());
// TODO: Move this into PathSource
- let package = try!(source.get_root_package());
+ let package = try!(source.root_package());
debug!("loaded package; package={}", package);
- for key in package.get_manifest().get_warnings().iter() {
+ for key in package.manifest().warnings().iter() {
try!(options.config.shell().warn(key))
}
compile_pkg(&package, options)
return Err(human("jobs must be at least 1"))
}
- let override_ids = try!(source_ids_from_config(config,
- package.get_root()));
+ let override_ids = try!(source_ids_from_config(config, package.root()));
let (packages, resolve_with_overrides, sources) = {
let rustc_host = config.rustc_host().to_string();
let to_build = match spec {
Some(spec) => {
let pkgid = try!(resolve_with_overrides.query(spec));
- packages.iter().find(|p| p.get_package_id() == pkgid).unwrap()
+ packages.iter().find(|p| p.package_id() == pkgid).unwrap()
}
None => package,
};
- let targets = to_build.get_targets().iter().filter(|target| {
- target.get_profile().is_custom_build() || match env {
+ let targets = to_build.targets().iter().filter(|target| {
+ target.profile().is_custom_build() || match env {
// doc-all == document everything, so look for doc targets
- "doc" | "doc-all" => target.get_profile().get_env() == "doc",
- env => target.get_profile().get_env() == env,
+ "doc" | "doc-all" => target.profile().env() == "doc",
+ env => target.profile().env() == env,
}
}).filter(|target| !lib_only || target.is_lib()).collect::<Vec<&Target>>();
let mut source = try!(PathSource::for_path(&manifest_path.dir_path(),
options.compile_opts.config));
try!(source.update());
- let package = try!(source.get_root_package());
+ let package = try!(source.root_package());
let mut lib_names = HashSet::new();
let mut bin_names = HashSet::new();
if options.compile_opts.spec.is_none() {
- for target in package.get_targets().iter().filter(|t| t.get_profile().is_doc()) {
+ for target in package.targets().iter().filter(|t| t.profile().is_doc()) {
if target.is_lib() {
- assert!(lib_names.insert(target.get_name()));
+ assert!(lib_names.insert(target.name()));
} else {
- assert!(bin_names.insert(target.get_name()));
+ assert!(bin_names.insert(target.name()));
}
}
for bin in bin_names.iter() {
if options.open_result {
let name = match options.compile_opts.spec {
- Some(spec) => try!(PackageIdSpec::parse(spec)).get_name().to_string(),
+ Some(spec) => try!(PackageIdSpec::parse(spec)).name().to_string(),
None => {
match lib_names.iter().nth(0) {
Some(s) => s.to_string(),
}
};
- let path = package.get_absolute_target_dir().join("doc").join(name)
+ let path = package.absolute_target_dir().join("doc").join(name)
.join("index.html");
if path.exists() {
open_docs(&path);
let mut source = try!(PathSource::for_path(&manifest_path.dir_path(),
config));
try!(source.update());
- let package = try!(source.get_root_package());
+ let package = try!(source.root_package());
let mut registry = PackageRegistry::new(config);
try!(ops::resolve_pkg(&mut registry, &package));
let mut source = try!(PathSource::for_path(&manifest_path.dir_path(),
config));
try!(source.update());
- let package = try!(source.get_root_package());
+ let package = try!(source.root_package());
let mut registry = PackageRegistry::new(config);
let resolve = try!(ops::resolve_with_previous(&mut registry, &package,
Method::Everything,
let mut source = try!(PathSource::for_path(&manifest_path.dir_path(),
opts.config));
try!(source.update());
- let package = try!(source.get_root_package());
+ let package = try!(source.root_package());
let previous_resolve = match try!(ops::load_pkg_lockfile(&package)) {
Some(resolve) => resolve,
// TODO: see comment in `resolve.rs` as well, but this
// seems like a pretty hokey reason to single out
// the registry as well.
- let precise = if dep.get_source_id().is_registry() {
- format!("{}={}", dep.get_name(), precise)
+ let precise = if dep.source_id().is_registry() {
+ format!("{}={}", dep.name(), precise)
} else {
precise.to_string()
};
- let precise = dep.get_source_id().clone()
+ let precise = dep.source_id().clone()
.with_precise(Some(precise));
try!(registry.add_sources(&[precise]));
}
let mut src = try!(PathSource::for_path(&manifest_path.dir_path(),
config));
try!(src.update());
- let pkg = try!(src.get_root_package());
+ let pkg = try!(src.root_package());
if metadata {
try!(check_metadata(&pkg, config));
}
if list {
- let root = pkg.get_manifest_path().dir_path();
+ let root = pkg.manifest_path().dir_path();
let mut list: Vec<_> = try!(src.list_files(&pkg)).iter().map(|file| {
file.path_relative_from(&root).unwrap()
}).collect();
return Ok(None)
}
- let filename = format!("package/{}-{}.crate", pkg.get_name(),
- pkg.get_version());
- let dst = pkg.get_absolute_target_dir().join(filename);
+ let filename = format!("package/{}-{}.crate", pkg.name(), pkg.version());
+ let dst = pkg.absolute_target_dir().join(filename);
if dst.exists() { return Ok(Some(dst)) }
let mut bomb = Bomb { path: Some(dst.clone()) };
- try!(config.shell().status("Packaging", pkg.get_package_id().to_string()));
+ try!(config.shell().status("Packaging", pkg.package_id().to_string()));
try!(tar(&pkg, &src, config, &dst).chain_error(|| {
human("failed to prepare local package for uploading")
}));
// check that the package has some piece of metadata that a human can
// use to tell what the package is about.
fn check_metadata(pkg: &Package, config: &Config) -> CargoResult<()> {
- let md = pkg.get_manifest().get_metadata();
+ let md = pkg.manifest().metadata();
let mut missing = vec![];
// Put all package files into a compressed archive
let ar = Archive::new(encoder);
- let root = pkg.get_manifest_path().dir_path();
+ let root = pkg.manifest_path().dir_path();
for file in try!(src.list_files(pkg)).iter() {
if file == dst { continue }
let relative = file.path_relative_from(&root).unwrap();
try!(config.shell().verbose(|shell| {
shell.status("Archiving", &relative)
}));
- let path = format!("{}-{}{}{}", pkg.get_name(),
- pkg.get_version(), old_path::SEP, relative);
+ let path = format!("{}-{}{}{}", pkg.name(), pkg.version(),
+ old_path::SEP, relative);
try!(ar.append(&path, &mut file).chain_error(|| {
internal(format!("could not archive source file `{}`", relative))
}));
try!(config.shell().status("Verifying", pkg));
let f = try!(GzDecoder::new(try!(File::open(tar))));
- let dst = pkg.get_root().join(format!("target/package/{}-{}",
- pkg.get_name(), pkg.get_version()));
+ let dst = pkg.root().join(format!("target/package/{}-{}",
+ pkg.name(), pkg.version()));
if dst.exists() {
try!(fs::rmdir_recursive(&dst));
}
// implicitly converted to registry-based dependencies, so we rewrite those
// dependencies here.
let registry = try!(SourceId::for_central(config));
- let new_summary = pkg.get_summary().clone().map_dependencies(|d| {
- if !d.get_source_id().is_path() { return d }
- d.source_id(registry.clone())
+ let new_summary = pkg.summary().clone().map_dependencies(|d| {
+ if !d.source_id().is_path() { return d }
+ d.set_source_id(registry.clone())
});
- let mut new_manifest = pkg.get_manifest().clone();
+ let mut new_manifest = pkg.manifest().clone();
new_manifest.set_summary(new_summary);
new_manifest.set_target_dir(dst.join("target"));
let new_pkg = Package::new(new_manifest, &manifest_path,
- pkg.get_package_id().get_source_id());
+ pkg.package_id().source_id());
// Now that we've rewritten all our path dependencies, compile it!
try!(ops::compile_pkg(&new_pkg, &ops::CompileOptions {
let mut source = try!(PathSource::for_path(&manifest_path.dir_path(),
config));
try!(source.update());
- let package = try!(source.get_root_package());
+ let package = try!(source.root_package());
- let lockfile = package.get_root().join("Cargo.lock");
- let source_id = package.get_package_id().get_source_id();
+ let lockfile = package.root().join("Cargo.lock");
+ let source_id = package.package_id().source_id();
let resolve = match try!(ops::load_lockfile(&lockfile, source_id)) {
Some(resolve) => resolve,
None => return Err(human("A Cargo.lock must exist for this command"))
let pkgid = match spec {
Some(spec) => try!(resolve.query(spec)),
- None => package.get_package_id(),
+ None => package.package_id(),
};
Ok(PackageIdSpec::from_package_id(pkgid))
}
let config = options.config;
let mut src = try!(PathSource::for_path(&manifest_path.dir_path(), config));
try!(src.update());
- let root = try!(src.get_root_package());
+ let root = try!(src.root_package());
let env = options.env;
- let mut bins = root.get_manifest().get_targets().iter().filter(|a| {
+ let mut bins = root.manifest().targets().iter().filter(|a| {
let matches_kind = match target_kind {
TargetKind::Bin => a.is_bin(),
TargetKind::Example => a.is_example(),
TargetKind::Lib(_) => false,
};
- let matches_name = name.as_ref().map_or(true, |n| *n == a.get_name());
- matches_kind && matches_name && a.get_profile().get_env() == env &&
- !a.get_profile().is_custom_build()
+ let matches_name = name.as_ref().map_or(true, |n| *n == a.name());
+ matches_kind && matches_name && a.profile().env() == env &&
+ !a.profile().is_custom_build()
});
let bin = try!(bins.next().chain_error(|| {
human("a bin target must be available for `cargo run`")
Some(target) => dst.join(target),
None => dst,
};
- let exe = match (bin.get_profile().get_dest(), bin.is_example()) {
- (Some(s), true) => dst.join(s).join("examples").join(bin.get_name()),
- (Some(s), false) => dst.join(s).join(bin.get_name()),
- (None, true) => dst.join("examples").join(bin.get_name()),
- (None, false) => dst.join(bin.get_name()),
+ let exe = match (bin.profile().dest(), bin.is_example()) {
+ (Some(s), true) => dst.join(s).join("examples").join(bin.name()),
+ (Some(s), false) => dst.join(s).join(bin.name()),
+ (None, true) => dst.join("examples").join(bin.name()),
+ (None, false) => dst.join(bin.name()),
};
let exe = match exe.path_relative_from(config.cwd()) {
Some(path) => path,
cmd = cmd.env(k, v.as_ref());
}
- Ok(cmd.env("CARGO_MANIFEST_DIR", Some(pkg.get_manifest_path().dir_path()))
+ Ok(cmd.env("CARGO_MANIFEST_DIR", Some(pkg.manifest_path().dir_path()))
.env("CARGO_PKG_VERSION_MAJOR",
- Some(pkg.get_version().major.to_string()))
+ Some(pkg.version().major.to_string()))
.env("CARGO_PKG_VERSION_MINOR",
- Some(pkg.get_version().minor.to_string()))
+ Some(pkg.version().minor.to_string()))
.env("CARGO_PKG_VERSION_PATCH",
- Some(pkg.get_version().patch.to_string()))
+ Some(pkg.version().patch.to_string()))
.env("CARGO_PKG_VERSION_PRE",
- pre_version_component(pkg.get_version()))
+ pre_version_component(pkg.version()))
.env("CARGO_PKG_VERSION",
- Some(pkg.get_version().to_string()))
- .cwd(pkg.get_root()))
+ Some(pkg.version().to_string()))
+ .cwd(pkg.root()))
}
}
try!(self.host.prepare().chain_error(|| {
internal(format!("couldn't prepare build directories for `{}`",
- pkg.get_name()))
+ pkg.name()))
}));
match self.target {
Some(ref mut target) => {
try!(target.prepare().chain_error(|| {
internal(format!("couldn't prepare build directories \
- for `{}`", pkg.get_name()))
+ for `{}`", pkg.name()))
}));
}
None => {}
}
- let targets = pkg.get_targets().iter();
- for target in targets.filter(|t| t.get_profile().is_compile()) {
+ let targets = pkg.targets().iter();
+ for target in targets.filter(|t| t.profile().is_compile()) {
self.build_requirements(pkg, target, Platform::Target);
}
fn build_requirements(&mut self, pkg: &'a Package, target: &'a Target,
req: Platform) {
- let req = if target.get_profile().is_for_host() {Platform::Plugin} else {req};
- match self.requirements.entry((pkg.get_package_id(), target.get_name())) {
+ let req = if target.profile().is_for_host() {Platform::Plugin} else {req};
+ match self.requirements.entry((pkg.package_id(), target.name())) {
Occupied(mut entry) => match (*entry.get(), req) {
(Platform::Plugin, Platform::Plugin) |
(Platform::PluginAndTarget, Platform::Plugin) |
self.build_requirements(pkg, dep, req);
}
- match pkg.get_targets().iter().find(|t| t.get_profile().is_custom_build()) {
+ match pkg.targets().iter().find(|t| t.profile().is_custom_build()) {
Some(custom_build) => {
self.build_requirements(pkg, custom_build, Platform::Plugin);
}
pub fn get_requirement(&self, pkg: &'a Package,
target: &'a Target) -> Platform {
- let default = if target.get_profile().is_for_host() {
+ let default = if target.profile().is_for_host() {
Platform::Plugin
} else {
Platform::Target
};
- self.requirements.get(&(pkg.get_package_id(), target.get_name()))
+ self.requirements.get(&(pkg.package_id(), target.name()))
.map(|a| *a).unwrap_or(default)
}
/// Returns the appropriate directory layout for either a plugin or not.
pub fn layout(&self, pkg: &Package, kind: Kind) -> LayoutProxy {
- let primary = pkg.get_package_id() == self.resolve.root();
+ let primary = pkg.package_id() == self.resolve.root();
match kind {
Kind::Host => LayoutProxy::new(&self.host, primary),
Kind::Target => LayoutProxy::new(self.target.as_ref()
/// target.
pub fn out_dir(&self, pkg: &Package, kind: Kind, target: &Target) -> Path {
let out_dir = self.layout(pkg, kind);
- if target.get_profile().is_custom_build() {
+ if target.profile().is_custom_build() {
out_dir.build(pkg)
} else if target.is_example() {
out_dir.examples().clone()
let mut ret = Vec::new();
if target.is_example() || target.is_bin() ||
- target.get_profile().is_test() {
+ target.profile().is_test() {
ret.push(format!("{}{}", stem,
- if target.get_profile().is_for_host() {
+ if target.profile().is_for_host() {
&self.host_exe
} else {
&self.target_exe
}));
} else {
if target.is_dylib() {
- let plugin = target.get_profile().is_for_host();
+ let plugin = target.profile().is_for_host();
let kind = if plugin {Kind::Host} else {Kind::Target};
let (prefix, suffix) = try!(self.dylib(kind));
ret.push(format!("{}{}{}", prefix, stem, suffix));
/// for that package.
pub fn dep_targets(&self, pkg: &Package, target: &Target)
-> Vec<(&'a Package, &'a Target)> {
- let deps = match self.resolve.deps(pkg.get_package_id()) {
+ let deps = match self.resolve.deps(pkg.package_id()) {
None => return vec!(),
Some(deps) => deps,
};
deps.map(|id| self.get_package(id)).filter(|dep| {
- let pkg_dep = pkg.get_dependencies().iter().find(|d| {
- d.get_name() == dep.get_name()
+ let pkg_dep = pkg.dependencies().iter().find(|d| {
+ d.name() == dep.name()
}).unwrap();
// If this target is a build command, then we only want build
// dependencies, otherwise we want everything *other than* build
// dependencies.
let is_correct_dep =
- target.get_profile().is_custom_build() == pkg_dep.is_build();
+ target.profile().is_custom_build() == pkg_dep.is_build();
// If this dependency is *not* a transitive dependency, then it
// only applies to test/example targets
let is_actual_dep = pkg_dep.is_transitive() ||
- target.get_profile().is_test() ||
+ target.profile().is_test() ||
target.is_example();
is_correct_dep && is_actual_dep
}).filter_map(|pkg| {
- pkg.get_targets().iter().find(|&t| self.is_relevant_target(t))
+ pkg.targets().iter().find(|&t| self.is_relevant_target(t))
.map(|t| (pkg, t))
}).collect()
}
/// Gets a package for the given package id.
pub fn get_package(&self, id: &PackageId) -> &'a Package {
self.package_set.iter()
- .find(|pkg| id == pkg.get_package_id())
+ .find(|pkg| id == pkg.package_id())
.expect("Should have found package")
}
pub fn is_relevant_target(&self, target: &Target) -> bool {
target.is_lib() && match self.env {
- "doc" | "test" => target.get_profile().is_compile(),
+ "doc" | "test" => target.profile().is_compile(),
// doc-all == document everything, so look for doc targets and
// compile targets in dependencies
- "doc-all" => target.get_profile().is_compile() ||
- (target.get_profile().get_env() == "doc" &&
- target.get_profile().is_doc()),
- _ => target.get_profile().get_env() == self.env &&
- !target.get_profile().is_test(),
+ "doc-all" => target.profile().is_compile() ||
+ (target.profile().env() == "doc" &&
+ target.profile().is_doc()),
+ _ => target.profile().env() == self.env &&
+ !target.profile().is_test(),
}
}
/// This may involve overriding some options such as debug information,
/// rpath, opt level, etc.
pub fn profile(&self, target: &Target) -> Profile {
- let mut profile = target.get_profile().clone();
+ let mut profile = target.profile().clone();
let root_package = self.get_package(self.resolve.root());
- for target in root_package.get_manifest().get_targets().iter() {
- let root_profile = target.get_profile();
- if root_profile.get_env() != profile.get_env() { continue }
- profile = profile.opt_level(root_profile.get_opt_level())
- .debug(root_profile.get_debug())
- .rpath(root_profile.get_rpath())
+ for target in root_package.manifest().targets().iter() {
+ let root_profile = target.profile();
+ if root_profile.env() != profile.env() { continue }
+ profile = profile.set_opt_level(root_profile.opt_level())
+ .set_debug(root_profile.debug())
+ .set_rpath(root_profile.rpath())
}
profile
}
// Start preparing the process to execute, starting out with some
// environment variables.
- let profile = target.get_profile();
+ let profile = target.profile();
let to_exec = CString::from_slice(to_exec.as_vec());
let p = try!(super::process(CommandType::Host(to_exec), pkg, target, cx));
let mut p = p.env("OUT_DIR", Some(&build_output))
- .env("CARGO_MANIFEST_DIR", Some(pkg.get_manifest_path()
+ .env("CARGO_MANIFEST_DIR", Some(pkg.manifest_path()
.dir_path()
.display().to_string()))
.env("NUM_JOBS", Some(cx.jobs().to_string()))
Kind::Host => cx.config.rustc_host(),
Kind::Target => cx.target_triple(),
}))
- .env("DEBUG", Some(profile.get_debug().to_string()))
- .env("OPT_LEVEL", Some(profile.get_opt_level().to_string()))
- .env("PROFILE", Some(profile.get_env()))
+ .env("DEBUG", Some(profile.debug().to_string()))
+ .env("OPT_LEVEL", Some(profile.opt_level().to_string()))
+ .env("PROFILE", Some(profile.env()))
.env("HOST", Some(cx.config.rustc_host()));
// Be sure to pass along all enabled features for this package, this is the
// last piece of statically known information that we have.
- match cx.resolve.features(pkg.get_package_id()) {
+ match cx.resolve.features(pkg.package_id()) {
Some(features) => {
for feat in features.iter() {
p = p.env(&format!("CARGO_FEATURE_{}", super::envify(feat)),
// This information will be used at build-time later on to figure out which
// sorts of variables need to be discovered at that time.
let lib_deps = {
- let non_build_target = pkg.get_targets().iter().find(|t| {
- !t.get_profile().is_custom_build()
+ let non_build_target = pkg.targets().iter().find(|t| {
+ !t.profile().is_custom_build()
}).unwrap();
cx.dep_targets(pkg, non_build_target).iter().filter_map(|&(pkg, _)| {
- pkg.get_manifest().get_links().map(|links| {
- (links.to_string(), pkg.get_package_id().clone())
+ pkg.manifest().links().map(|links| {
+ (links.to_string(), pkg.package_id().clone())
})
}).collect::<Vec<_>>()
};
let pkg_name = pkg.to_string();
let build_state = cx.build_state.clone();
- let id = pkg.get_package_id().clone();
+ let id = pkg.package_id().clone();
let all = (id.clone(), pkg_name.clone(), build_state.clone(),
build_output.clone());
let plugin_deps = super::crawl_build_deps(cx, pkg, target, Kind::Host);
packages: &PackageSet) -> BuildState {
let mut sources = HashMap::new();
for package in packages.iter() {
- match package.get_manifest().get_links() {
+ match package.manifest().links() {
Some(links) => {
sources.insert(links.to_string(),
- package.get_package_id().clone());
+ package.package_id().clone());
}
None => {}
}
target: &'a Target,
kind: Kind) -> CargoResult<Preparation> {
let _p = profile::start(format!("fingerprint: {} / {:?}",
- pkg.get_package_id(), target));
+ pkg.package_id(), target));
let new = dir(cx, pkg, kind);
let loc = new.join(filename(target));
let root = cx.out_dir(pkg, kind, target);
let mut missing_outputs = false;
- if !target.get_profile().is_doc() {
+ if !target.profile().is_doc() {
for filename in try!(cx.target_filenames(target)).iter() {
let dst = root.join(filename);
missing_outputs |= !dst.exists();
- if target.get_profile().is_test() {
- cx.compilation.tests.push((target.get_name().to_string(), dst));
+ if target.profile().is_test() {
+ cx.compilation.tests.push((target.name().to_string(), dst));
} else if target.is_bin() {
cx.compilation.binaries.push(dst);
} else if target.is_lib() {
- let pkgid = pkg.get_package_id().clone();
+ let pkgid = pkg.package_id().clone();
match cx.compilation.libraries.entry(pkgid) {
Occupied(entry) => entry.into_mut(),
Vacant(entry) => entry.insert(Vec::new()),
target: &'a Target,
kind: Kind)
-> CargoResult<Fingerprint> {
- let key = (pkg.get_package_id(), target, kind);
+ let key = (pkg.package_id(), target, kind);
match cx.fingerprints.get(&key) {
Some(s) => return Ok(s.clone()),
None => {}
// First, calculate all statically known "salt data" such as the profile
// information (compiler flags), the compiler version, activated features,
// and target configuration.
- let features = cx.resolve.features(pkg.get_package_id());
+ let features = cx.resolve.features(pkg.package_id());
let features = features.map(|s| {
let mut v = s.iter().collect::<Vec<&String>>();
v.sort();
let deps = try!(cx.dep_targets(pkg, target).into_iter().map(|(p, t)| {
let kind = match kind {
Kind::Host => Kind::Host,
- Kind::Target if t.get_profile().is_for_host() => Kind::Host,
+ Kind::Target if t.profile().is_for_host() => Kind::Host,
Kind::Target => Kind::Target,
};
calculate(cx, p, t, kind)
// change so long as the source itself remains constant (which is the
// responsibility of the source)
fn use_dep_info(pkg: &Package, target: &Target) -> bool {
- let doc = target.get_profile().is_doc();
- let path = pkg.get_summary().get_source_id().is_path();
+ let doc = target.profile().is_doc();
+ let path = pkg.summary().source_id().is_path();
!doc && path
}
}
let _p = profile::start(format!("fingerprint build cmd: {}",
- pkg.get_package_id()));
+ pkg.package_id()));
let new = dir(cx, pkg, kind);
let loc = new.join("build");
// directory as part of freshness.
if target.is_none() {
let native_dir = cx.layout(pkg, kind).native(pkg);
- cx.compilation.native_dirs.insert(pkg.get_package_id().clone(),
+ cx.compilation.native_dirs.insert(pkg.package_id().clone(),
native_dir);
}
fn calculate_pkg_fingerprint(cx: &Context, pkg: &Package) -> CargoResult<String> {
let source = cx.sources
- .get(pkg.get_package_id().get_source_id())
+ .get(pkg.package_id().source_id())
.expect("BUG: Missing package source");
source.fingerprint(pkg)
fn filename(target: &Target) -> String {
let kind = if target.is_lib() {"lib"} else {"bin"};
- let flavor = if target.get_profile().is_test() {
+ let flavor = if target.profile().is_test() {
"test-"
- } else if target.get_profile().is_doc() {
+ } else if target.profile().is_doc() {
"doc-"
} else {
""
};
- format!("{}{}-{}", flavor, kind, target.get_name())
+ format!("{}{}-{}", flavor, kind, target.name())
}
// The dep-info files emitted by the compiler all have their listed paths
// Record the freshness state of this package as dirty if any job is
// dirty or fresh otherwise
let fresh = jobs.iter().fold(Fresh, |f1, &(_, f2)| f1.combine(f2));
- match self.state.entry(pkg.get_package_id()) {
+ match self.state.entry(pkg.package_id()) {
Occupied(mut entry) => { *entry.get_mut() = entry.get().combine(fresh); }
Vacant(entry) => { entry.insert(fresh); }
};
// Add the package to the dependency graph
self.queue.enqueue(&(self.resolve, self.packages), Fresh,
- (pkg.get_package_id(), stage),
+ (pkg.package_id(), stage),
(pkg, jobs));
}
pub fn ignore(&mut self, pkg: &'a Package) {
- self.ignored.insert(pkg.get_package_id());
+ self.ignored.insert(pkg.package_id());
}
/// Execute all jobs necessary to build the dependency graph.
jobs: Vec<(Job, Freshness)>, config: &Config) -> CargoResult<()> {
let njobs = jobs.len();
let amt = if njobs == 0 {1} else {njobs as u32};
- let id = pkg.get_package_id().clone();
+ let id = pkg.package_id().clone();
// While the jobs are all running, we maintain some metadata about how
// many are running, the current state of freshness (of all the combined
// jobs), and the stage to pass to finish() later on.
self.active += amt;
- self.pending.insert((pkg.get_package_id(), stage), PendingBuild {
+ self.pending.insert((pkg.package_id(), stage), PendingBuild {
amt: amt,
fresh: fresh,
});
- let mut total_fresh = fresh.combine(self.state[pkg.get_package_id()]);
+ let mut total_fresh = fresh.combine(self.state[pkg.package_id()]);
let mut running = Vec::new();
for (job, job_freshness) in jobs.into_iter() {
let fresh = job_freshness.combine(fresh);
// In general, we try to print "Compiling" for the first nontrivial task
// run for a package, regardless of when that is. We then don't print
// out any more information for a package after we've printed it once.
- let print = !self.ignored.contains(&pkg.get_package_id());
- let print = print && !self.printed.contains(&pkg.get_package_id());
+ let print = !self.ignored.contains(&pkg.package_id());
+ let print = print && !self.printed.contains(&pkg.package_id());
if print && (stage == Stage::Libraries ||
(total_fresh == Dirty && running.len() > 0)) {
- self.printed.insert(pkg.get_package_id());
+ self.printed.insert(pkg.package_id());
match total_fresh {
Fresh => try!(config.shell().verbose(|c| {
c.status("Fresh", pkg)
// the start state which depends on the ending state of all dependent
// packages (as determined by the resolve context).
let (id, stage) = *self;
- let pkg = packages.iter().find(|p| p.get_package_id() == id).unwrap();
+ let pkg = packages.iter().find(|p| p.package_id() == id).unwrap();
let deps = resolve.deps(id).into_iter().flat_map(|a| a)
.filter(|dep| *dep != id)
.map(|dep| {
- (dep, pkg.get_dependencies().iter().find(|d| {
- d.get_name() == dep.get_name()
+ (dep, pkg.dependencies().iter().find(|d| {
+ d.name() == dep.name()
}).unwrap())
});
match stage {
impl Layout {
pub fn new(pkg: &Package, triple: Option<&str>, dest: Option<&str>) -> Layout {
- let mut path = pkg.get_absolute_target_dir();
+ let mut path = pkg.absolute_target_dir();
match triple {
Some(s) => path.push(s),
None => {}
}
fn pkg_dir(&self, pkg: &Package) -> String {
- format!("{}-{}", pkg.get_name(), short_hash(pkg.get_package_id()))
+ format!("{}-{}", pkg.name(), short_hash(pkg.package_id()))
}
}
let mut map = HashMap::new();
for dep in deps.iter() {
- let lib = match dep.get_manifest().get_links() {
+ let lib = match dep.manifest().links() {
Some(lib) => lib,
None => continue,
};
to by more than one package, and \
can only be linked to by one \
package\n\n {}\n {}",
- lib, previous, dep.get_package_id())))
+ lib, previous, dep.package_id())))
}
None => {}
}
- if !dep.get_manifest().get_targets().iter().any(|t| {
- t.get_profile().is_custom_build()
+ if !dep.manifest().targets().iter().any(|t| {
+ t.profile().is_custom_build()
}) {
return Err(human(format!("package `{}` specifies that it links to \
`{}` but does not have a custom build \
- script", dep.get_package_id(), lib)))
+ script", dep.package_id(), lib)))
}
- map.insert(lib, dep.get_package_id());
+ map.insert(lib, dep.package_id());
}
Ok(())
fn uniq_target_dest<'a>(targets: &[&'a Target]) -> Option<&'a str> {
let mut curr: Option<Option<&str>> = None;
- for t in targets.iter().filter(|t| !t.get_profile().is_custom_build()) {
- let dest = t.get_profile().get_dest();
+ for t in targets.iter().filter(|t| !t.profile().is_custom_build()) {
+ let dest = t.profile().dest();
match curr {
Some(curr) => assert!(curr == dest),
try!(links::validate(deps));
let dest = uniq_target_dest(targets);
- let root = if resolve.root() == pkg.get_package_id() {
+ let root = if resolve.root() == pkg.package_id() {
pkg
} else {
- deps.iter().find(|p| p.get_package_id() == resolve.root()).unwrap()
+ deps.iter().find(|p| p.package_id() == resolve.root()).unwrap()
};
let host_layout = Layout::new(root, None, dest);
let target_layout = build_config.requested_target.as_ref().map(|target| {
// everything in order with proper parallelism.
let mut compiled = HashSet::new();
each_dep(pkg, &cx, |dep| {
- compiled.insert(dep.get_package_id().clone());
+ compiled.insert(dep.package_id().clone());
});
for dep in deps.iter() {
if dep == pkg { continue }
// Only compile lib targets for dependencies
- let targets = dep.get_targets().iter().filter(|target| {
- target.get_profile().is_custom_build() ||
+ let targets = dep.targets().iter().filter(|target| {
+ target.profile().is_custom_build() ||
cx.is_relevant_target(*target)
}).collect::<Vec<&Target>>();
- if targets.len() == 0 && dep.get_package_id() != resolve.root() {
+ if targets.len() == 0 && dep.package_id() != resolve.root() {
return Err(human(format!("Package `{}` has no library targets", dep)))
}
- let compiled = compiled.contains(dep.get_package_id());
+ let compiled = compiled.contains(dep.package_id());
try!(compile(&targets, dep, compiled, &mut cx, &mut queue));
}
(Vec::new(), Vec::new(), Vec::new(), Vec::new());
let (mut build_custom, mut run_custom) = (Vec::new(), Vec::new());
for &target in targets.iter() {
- let work = if target.get_profile().is_doc() {
+ let work = if target.profile().is_doc() {
let rustdoc = try!(rustdoc(pkg, target, cx));
vec![(rustdoc, Kind::Target)]
} else {
// Figure out what stage this work will go into
let dst = match (target.is_lib(),
- target.get_profile().is_test(),
- target.get_profile().is_custom_build()) {
+ target.profile().is_test(),
+ target.profile().is_custom_build()) {
(_, _, true) => &mut build_custom,
(true, true, _) => &mut lib_tests,
(false, true, _) => &mut bin_tests,
(true, false, _) => &mut libs,
- (false, false, _) if target.get_profile().get_env() == "test" => &mut bin_tests,
+ (false, false, _) if target.profile().env() == "test" => &mut bin_tests,
(false, false, _) => &mut bins,
};
for (work, kind) in work.into_iter() {
// because we may need to run the build script multiple times. If the
// package is needed in both a host and target context, we need to run
// it once per context.
- if !target.get_profile().is_custom_build() { continue }
+ if !target.profile().is_custom_build() { continue }
let mut reqs = Vec::new();
let requirement = targets.iter().fold(None::<Platform>, |req, t| {
- if !t.get_profile().is_custom_build() && !t.get_profile().is_doc() {
+ if !t.profile().is_custom_build() && !t.profile().is_doc() {
let r2 = cx.get_requirement(pkg, *t);
req.map(|r| r.combine(r2)).or(Some(r2))
} else {
let before = run_custom.len();
for &req in reqs.iter() {
let kind = match req { Platform::Plugin => Kind::Host, _ => Kind::Target };
- let key = (pkg.get_package_id().clone(), kind);
- if pkg.get_manifest().get_links().is_some() &&
+ let key = (pkg.package_id().clone(), kind);
+ if pkg.manifest().links().is_some() &&
cx.build_state.outputs.lock().unwrap().contains_key(&key) {
continue
}
let plugin_deps = crawl_build_deps(cx, package, target, Kind::Host);
return rustcs.into_iter().map(|(rustc, kind)| {
- let name = package.get_name().to_string();
- let is_path_source = package.get_package_id().get_source_id().is_path();
- let show_warnings = package.get_package_id() == cx.resolve.root() ||
+ let name = package.name().to_string();
+ let is_path_source = package.package_id().source_id().is_path();
+ let show_warnings = package.package_id() == cx.resolve.root() ||
is_path_source;
let rustc = if show_warnings {rustc} else {rustc.arg("-Awarnings")};
let exec_engine = cx.exec_engine.clone();
// Prepare the native lib state (extra -L and -l flags)
let build_state = cx.build_state.clone();
- let current_id = package.get_package_id().clone();
+ let current_id = package.package_id().clone();
let plugin_deps = plugin_deps.clone();
let mut native_lib_deps = crawl_build_deps(cx, package, target, kind);
- if package.has_custom_build() && !target.get_profile().is_custom_build() {
+ if package.has_custom_build() && !target.profile().is_custom_build() {
native_lib_deps.insert(0, current_id.clone());
}
// If we are a binary and the package also contains a library, then we
// don't pass the `-l` flags.
- let pass_l_flag = target.is_lib() || !package.get_targets().iter().any(|t| {
+ let pass_l_flag = target.is_lib() || !package.targets().iter().any(|t| {
t.is_lib()
});
for &(pkg, target) in cx.dep_targets(pkg, target).iter() {
let req = cx.get_requirement(pkg, target);
if !req.includes(kind) { continue }
- if !visiting.insert(pkg.get_package_id()) { continue }
+ if !visiting.insert(pkg.package_id()) { continue }
if pkg.has_custom_build() {
- libs.insert(pkg.get_package_id().clone());
+ libs.insert(pkg.package_id().clone());
}
visit(cx, pkg, target, kind, visiting, libs);
- visiting.remove(&pkg.get_package_id());
+ visiting.remove(&pkg.package_id());
}
}
}
let mut rustdoc = rustdoc.arg(root_path(cx, package, target))
.cwd(cx.config.cwd().clone())
.arg("-o").arg(cx_root)
- .arg("--crate-name").arg(target.get_name());
+ .arg("--crate-name").arg(target.name());
- match cx.resolve.features(package.get_package_id()) {
+ match cx.resolve.features(package.package_id()) {
Some(features) => {
for feat in features.iter() {
rustdoc = rustdoc.arg("--cfg").arg(format!("feature=\"{}\"", feat));
trace!("commands={}", rustdoc);
- let primary = package.get_package_id() == cx.resolve.root();
- let name = package.get_name().to_string();
+ let primary = package.package_id() == cx.resolve.root();
+ let name = package.name().to_string();
let desc = rustdoc.to_string();
let exec_engine = cx.exec_engine.clone();
// This means that non-path dependencies (git/registry) will likely be shown as
// absolute paths instead of relative paths.
fn root_path(cx: &Context, pkg: &Package, target: &Target) -> Path {
- let absolute = pkg.get_root().join(target.get_src_path());
+ let absolute = pkg.root().join(target.src_path());
let cwd = cx.config.cwd();
if cwd.is_ancestor_of(&absolute) {
absolute.path_relative_from(cwd).unwrap_or(absolute)
pkg: &Package,
target: &Target,
crate_types: &[&str]) -> CommandPrototype {
- let metadata = target.get_metadata();
+ let metadata = target.metadata();
// Move to cwd so the root_path() passed below is actually correct
cmd = cmd.cwd(cx.config.cwd().clone());
// TODO: Handle errors in converting paths into args
cmd = cmd.arg(root_path(cx, pkg, target));
- cmd = cmd.arg("--crate-name").arg(target.get_name());
+ cmd = cmd.arg("--crate-name").arg(target.name());
for crate_type in crate_types.iter() {
cmd = cmd.arg("--crate-type").arg(*crate_type);
let prefer_dynamic = profile.is_for_host() ||
(crate_types.contains(&"dylib") &&
- pkg.get_package_id() != cx.resolve.root());
+ pkg.package_id() != cx.resolve.root());
if prefer_dynamic {
cmd = cmd.arg("-C").arg("prefer-dynamic");
}
- if profile.get_opt_level() != 0 {
- cmd = cmd.arg("-C").arg(format!("opt-level={}", profile.get_opt_level()));
+ if profile.opt_level() != 0 {
+ cmd = cmd.arg("-C").arg(format!("opt-level={}", profile.opt_level()));
}
- if (target.is_bin() || target.is_staticlib()) && profile.get_lto() {
+ if (target.is_bin() || target.is_staticlib()) && profile.lto() {
cmd = cmd.args(&["-C", "lto"]);
} else {
// There are some restrictions with LTO and codegen-units, so we
// only add codegen units when LTO is not used.
- match profile.get_codegen_units() {
+ match profile.codegen_units() {
Some(n) => cmd = cmd.arg("-C").arg(format!("codegen-units={}", n)),
None => {},
}
}
- if profile.get_debug() {
+ if profile.debug() {
cmd = cmd.arg("-g");
} else {
cmd = cmd.args(&["--cfg", "ndebug"]);
cmd = cmd.arg("--test");
}
- match cx.resolve.features(pkg.get_package_id()) {
+ match cx.resolve.features(pkg.package_id()) {
Some(features) => {
for feat in features.iter() {
cmd = cmd.arg("--cfg").arg(format!("feature=\"{}\"", feat));
None => {}
}
- if profile.get_rpath() {
+ if profile.rpath() {
cmd = cmd.arg("-C").arg("rpath");
}
cmd = try!(link_to(cmd, pkg, target, cx, kind));
}
- let targets = package.get_targets().iter().filter(|target| {
- target.is_lib() && target.get_profile().is_compile()
+ let targets = package.targets().iter().filter(|target| {
+ target.is_lib() && target.profile().is_compile()
});
if (target.is_bin() || target.is_example()) &&
- !target.get_profile().is_custom_build() {
+ !target.profile().is_custom_build() {
for target in targets.filter(|f| f.is_rlib() || f.is_dylib()) {
cmd = try!(link_to(cmd, package, target, cx, kind));
}
// target directory (hence the || here).
let layout = cx.layout(pkg, match kind {
Kind::Host => Kind::Host,
- Kind::Target if target.get_profile().is_for_host() => Kind::Host,
+ Kind::Target if target.profile().is_for_host() => Kind::Host,
Kind::Target => Kind::Target,
});
for filename in try!(cx.target_filenames(target)).iter() {
if filename.as_bytes().ends_with(b".a") { continue }
let mut v = Vec::new();
- v.push_all(target.get_name().as_bytes());
+ v.push_all(target.name().as_bytes());
v.push(b'=');
v.push_all(layout.root().as_vec());
v.push(old_path::SEP_BYTE);
where F: FnMut(&'a Package)
{
let mut visited = HashSet::new();
- let pkg = cx.get_package(pkg.get_package_id());
+ let pkg = cx.get_package(pkg.package_id());
visit_deps(pkg, cx, &mut visited, &mut f);
fn visit_deps<'a, F>(pkg: &'a Package, cx: &'a Context,
visited: &mut HashSet<&'a PackageId>, f: &mut F)
where F: FnMut(&'a Package)
{
- if !visited.insert(pkg.get_package_id()) { return }
+ if !visited.insert(pkg.package_id()) { return }
f(pkg);
- let deps = match cx.resolve.deps(pkg.get_package_id()) {
+ let deps = match cx.resolve.deps(pkg.package_id()) {
Some(deps) => deps,
None => return,
};
if options.no_run { return Ok(None) }
compile.tests.sort();
- let target_name = options.name;
+ let tarname = options.name;
let tests_to_run = compile.tests.iter().filter(|&&(ref test_name, _)| {
- target_name.map_or(true, |target_name| target_name == *test_name)
+ tarname.map_or(true, |tarname| tarname == *test_name)
});
let cwd = config.cwd();
if options.compile_opts.env == "bench" { return Ok(None) }
- let libs = compile.package.get_targets().iter().filter_map(|target| {
- if !target.get_profile().is_doctest() || !target.is_lib() {
+ let libs = compile.package.targets().iter().filter_map(|target| {
+ if !target.profile().is_doctest() || !target.is_lib() {
return None
}
- Some((target.get_src_path(), target.get_name()))
+ Some((target.src_path(), target.name()))
});
for (lib, name) in libs {
.arg("--crate-name").arg(name)
.arg("-L").arg(&compile.root_output)
.arg("-L").arg(&compile.deps_output)
- .cwd(compile.package.get_root());
+ .cwd(compile.package.root());
// FIXME(rust-lang/rust#16272): this should just always be passed.
if test_args.len() > 0 {
for (pkg, libs) in compile.libraries.iter() {
for lib in libs.iter() {
- let mut arg = pkg.get_name().as_bytes().to_vec();
+ let mut arg = pkg.name().as_bytes().to_vec();
arg.push(b'=');
arg.push_all(lib.as_vec());
p = p.arg("--extern").arg(arg);
use util::toml as cargo_toml;
pub fn load_pkg_lockfile(pkg: &Package) -> CargoResult<Option<Resolve>> {
- let lockfile = pkg.get_manifest_path().dir_path().join("Cargo.lock");
- let source_id = pkg.get_package_id().get_source_id();
+ let lockfile = pkg.manifest_path().dir_path().join("Cargo.lock");
+ let source_id = pkg.package_id().source_id();
load_lockfile(&lockfile, source_id).chain_error(|| {
human(format!("failed to parse lock file at: {}", lockfile.display()))
})
}
pub fn write_pkg_lockfile(pkg: &Package, resolve: &Resolve) -> CargoResult<()> {
- let loc = pkg.get_root().join("Cargo.lock");
+ let loc = pkg.root().join("Cargo.lock");
write_lockfile(&loc, resolve)
}
let mut src = try!(PathSource::for_path(&manifest_path.dir_path(),
config));
try!(src.update());
- let pkg = try!(src.get_root_package());
+ let pkg = try!(src.root_package());
let (mut registry, reg_id) = try!(registry(config, token, index));
try!(verify_dependencies(&pkg, ®_id));
false, true)).unwrap();
// Upload said tarball to the specified destination
- try!(config.shell().status("Uploading", pkg.get_package_id().to_string()));
+ try!(config.shell().status("Uploading", pkg.package_id().to_string()));
try!(transmit(&pkg, &tarball, &mut registry));
Ok(())
fn verify_dependencies(pkg: &Package, registry_src: &SourceId)
-> CargoResult<()> {
- for dep in pkg.get_dependencies().iter() {
- if dep.get_source_id().is_path() {
- if dep.get_specified_req().is_none() {
+ for dep in pkg.dependencies().iter() {
+ if dep.source_id().is_path() {
+ if dep.specified_req().is_none() {
return Err(human(format!("all path dependencies must have \
a version specified when \
publishing.\n\
dependency `{}` does not specify \
- a version", dep.get_name())))
+ a version", dep.name())))
}
- } else if dep.get_source_id() != registry_src {
+ } else if dep.source_id() != registry_src {
return Err(human(format!("all dependencies must come from the \
same source.\ndependency `{}` comes \
- from {} instead", dep.get_name(),
- dep.get_source_id())))
+ from {} instead", dep.name(),
+ dep.source_id())))
}
}
Ok(())
fn transmit(pkg: &Package, tarball: &Path, registry: &mut Registry)
-> CargoResult<()> {
- let deps = pkg.get_dependencies().iter().map(|dep| {
+ let deps = pkg.dependencies().iter().map(|dep| {
NewCrateDependency {
optional: dep.is_optional(),
default_features: dep.uses_default_features(),
- name: dep.get_name().to_string(),
- features: dep.get_features().to_vec(),
- version_req: dep.get_version_req().to_string(),
- target: dep.get_only_for_platform().map(|s| s.to_string()),
- kind: match dep.get_kind() {
+ name: dep.name().to_string(),
+ features: dep.features().to_vec(),
+ version_req: dep.version_req().to_string(),
+ target: dep.only_for_platform().map(|s| s.to_string()),
+ kind: match dep.kind() {
Kind::Normal => "normal",
Kind::Build => "build",
Kind::Development => "dev",
}.to_string(),
}
}).collect::<Vec<NewCrateDependency>>();
- let manifest = pkg.get_manifest();
+ let manifest = pkg.manifest();
let ManifestMetadata {
ref authors, ref description, ref homepage, ref documentation,
ref keywords, ref readme, ref repository, ref license, ref license_file,
- } = *manifest.get_metadata();
+ } = *manifest.metadata();
let readme = match *readme {
Some(ref readme) => {
- let path = pkg.get_root().join(readme);
+ let path = pkg.root().join(readme);
Some(try!(File::open(&path).read_to_string().chain_error(|| {
human("failed to read the specified README")
})))
};
match *license_file {
Some(ref file) => {
- if !pkg.get_root().join(file).exists() {
+ if !pkg.root().join(file).exists() {
return Err(human(format!("the license file `{}` does not exist",
file)))
}
None => {}
}
registry.publish(&NewCrate {
- name: pkg.get_name().to_string(),
- vers: pkg.get_version().to_string(),
+ name: pkg.name().to_string(),
+ vers: pkg.version().to_string(),
deps: deps,
- features: pkg.get_summary().get_features().clone(),
+ features: pkg.summary().features().clone(),
authors: authors.clone(),
description: description.clone(),
homepage: homepage.clone(),
let mut src = try!(PathSource::for_path(&manifest_path.dir_path(),
config));
try!(src.update());
- let pkg = try!(src.get_root_package());
- pkg.get_name().to_string()
+ let pkg = try!(src.root_package());
+ pkg.name().to_string()
}
};
let mut src = try!(PathSource::for_path(&manifest_path.dir_path(),
config));
try!(src.update());
- let pkg = try!(src.get_root_package());
- pkg.get_name().to_string()
+ let pkg = try!(src.root_package());
+ pkg.name().to_string()
}
};
let version = match version {
previous: Option<&'a Resolve>,
to_avoid: Option<&HashSet<&'a PackageId>>)
-> CargoResult<Resolve> {
- let root = package.get_package_id().get_source_id().clone();
+ let root = package.package_id().source_id().clone();
try!(registry.add_sources(&[root]));
// Here we place an artificial limitation that all non-registry sources
match to_avoid {
Some(set) => {
for package_id in set.iter() {
- let source = package_id.get_source_id();
+ let source = package_id.source_id();
if !source.is_registry() {
to_avoid_sources.insert(source);
}
None => {}
}
- let summary = package.get_summary().clone();
+ let summary = package.summary().clone();
let summary = match previous {
Some(r) => {
// In the case where a previous instance of resolve is available, we
let map = r.deps(r.root()).into_iter().flat_map(|i| i).filter(|p| {
keep(p, to_avoid, &to_avoid_sources)
}).map(|d| {
- (d.get_name(), d)
+ (d.name(), d)
}).collect::<HashMap<_, _>>();
summary.map_dependencies(|d| {
- match map.get(d.get_name()) {
+ match map.get(d.name()) {
Some(&lock) if d.matches_id(lock) => d.lock_to(lock),
_ => d,
}
to_avoid_packages: Option<&HashSet<&'a PackageId>>,
to_avoid_sources: &HashSet<&'a SourceId>)
-> bool {
- !to_avoid_sources.contains(&p.get_source_id()) && match to_avoid_packages {
+ !to_avoid_sources.contains(&p.source_id()) && match to_avoid_packages {
Some(set) => !set.contains(p),
None => true,
}
None => panic!("Not a git source; id={}", source_id),
};
- let remote = GitRemote::new(source_id.get_url());
- let ident = ident(source_id.get_url());
+ let remote = GitRemote::new(source_id.url());
+ let ident = ident(source_id.url());
let db_path = config.git_db_path().join(&ident);
.join(ident)
.join(reference_path);
- let reference = match source_id.get_precise() {
+ let reference = match source_id.precise() {
Some(s) => GitReference::Rev(s.to_string()),
None => source_id.git_reference().unwrap().clone(),
};
}
}
- pub fn get_url(&self) -> &Url {
- self.remote.get_url()
- }
+ pub fn url(&self) -> &Url { self.remote.url() }
}
fn ident(url: &Url) -> String {
impl<'a, 'b> Debug for GitSource<'a, 'b> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
- try!(write!(f, "git repo at {}", self.remote.get_url()));
+ try!(write!(f, "git repo at {}", self.remote.url()));
match self.reference.to_ref_string() {
Some(s) => write!(f, " ({})", s),
fn update(&mut self) -> CargoResult<()> {
let actual_rev = self.remote.rev_for(&self.db_path, &self.reference);
let should_update = actual_rev.is_err() ||
- self.source_id.get_precise().is_none();
+ self.source_id.precise().is_none();
let (repo, actual_rev) = if should_update {
try!(self.config.shell().status("Updating",
- format!("git repository `{}`", self.remote.get_url())));
+ format!("git repository `{}`", self.remote.url())));
trace!("updating git source `{:?}`", self.remote);
let repo = try!(self.remote.checkout(&self.db_path));
GitRemote { url: url.clone() }
}
- pub fn get_url(&self) -> &Url {
+ pub fn url(&self) -> &Url {
&self.url
}
}
impl GitDatabase {
- fn get_path<'a>(&'a self) -> &'a Path {
+ fn path<'a>(&'a self) -> &'a Path {
&self.path
}
revision: GitRevision)
-> CargoResult<GitCheckout<'a>>
{
- let repo = try!(GitCheckout::clone_repo(database.get_path(), into));
+ let repo = try!(GitCheckout::clone_repo(database.path(), into));
let checkout = GitCheckout::new(into, database, revision, repo);
try!(checkout.reset());
Ok(checkout)
}
}
- pub fn get_root_package(&self) -> CargoResult<Package> {
- trace!("get_root_package; source={:?}", self);
+ pub fn root_package(&self) -> CargoResult<Package> {
+ trace!("root_package; source={:?}", self);
if !self.updated {
return Err(internal("source has not been updated"))
}
- match self.packages.iter().find(|p| p.get_root() == self.path) {
+ match self.packages.iter().find(|p| p.root() == self.path) {
Some(pkg) => Ok(pkg.clone()),
None => Err(internal("no package found in source"))
}
/// are relevant for building this package, but it also contains logic to
/// use other methods like .gitignore to filter the list of files.
pub fn list_files(&self, pkg: &Package) -> CargoResult<Vec<Path>> {
- let root = pkg.get_manifest_path().dir_path();
+ let root = pkg.manifest_path().dir_path();
let parse = |&: p: &String| {
Pattern::new(p).map_err(|e| {
human(format!("could not parse pattern `{}`: {}", p, e))
})
};
- let exclude = try!(pkg.get_manifest().get_exclude().iter()
+ let exclude = try!(pkg.manifest().exclude().iter()
.map(|p| parse(p)).collect::<Result<Vec<_>, _>>());
- let include = try!(pkg.get_manifest().get_include().iter()
+ let include = try!(pkg.manifest().include().iter()
.map(|p| parse(p)).collect::<Result<Vec<_>, _>>());
let mut filter = |&mut: p: &Path| {
// the root of the git repository. This isn't always true, but it'll get
// us there most of the time!.
let repo = self.packages.iter()
- .map(|pkg| pkg.get_root())
+ .map(|pkg| pkg.root())
.filter(|path| path.is_ancestor_of(&root))
.filter_map(|path| git2::Repository::open(&path).ok())
.next();
-> CargoResult<Vec<Path>>
where F: FnMut(&Path) -> bool
{
- warn!("list_files_git {}", pkg.get_package_id());
+ warn!("list_files_git {}", pkg.package_id());
let index = try!(repo.index());
let root = match repo.workdir() {
Some(dir) => dir,
None => return Err(internal_error("Can't list files on a bare repository.", "")),
};
- let pkg_path = pkg.get_manifest_path().dir_path();
+ let pkg_path = pkg.manifest_path().dir_path();
let mut ret = Vec::new();
'outer: for entry in index.iter() {
// Filter out sub-packages of this package
for other_pkg in self.packages.iter().filter(|p| *p != pkg) {
- let other_path = other_pkg.get_manifest_path().dir_path();
+ let other_path = other_pkg.manifest_path().dir_path();
if pkg_path.is_ancestor_of(&other_path) &&
other_path.is_ancestor_of(&file_path) {
continue 'outer;
{
let mut ret = Vec::new();
for pkg in self.packages.iter().filter(|p| *p == pkg) {
- let loc = pkg.get_manifest_path().dir_path();
+ let loc = pkg.manifest_path().dir_path();
try!(walk(&loc, &mut ret, true, &mut filter));
}
return Ok(ret);
impl<'a, 'b> Registry for PathSource<'a, 'b> {
fn query(&mut self, dep: &Dependency) -> CargoResult<Vec<Summary>> {
let mut summaries: Vec<Summary> = self.packages.iter()
- .map(|p| p.get_summary().clone())
+ .map(|p| p.summary().clone())
.collect();
summaries.query(dep)
}
trace!("getting packages; ids={:?}", ids);
Ok(self.packages.iter()
- .filter(|pkg| ids.iter().any(|id| pkg.get_package_id() == id))
+ .filter(|pkg| ids.iter().any(|id| pkg.package_id() == id))
.map(|pkg| pkg.clone())
.collect())
}
pub fn new(source_id: &SourceId,
config: &'a Config<'b>) -> RegistrySource<'a, 'b> {
let hash = hex::short_hash(source_id);
- let ident = source_id.get_url().host().unwrap().to_string();
+ let ident = source_id.url().host().unwrap().to_string();
let part = format!("{}-{}", ident, hash);
RegistrySource {
checkout_path: config.registry_index_path().join(&part),
fn download_package(&mut self, pkg: &PackageId, url: &Url)
-> CargoResult<Path> {
// TODO: should discover from the S3 redirect
- let filename = format!("{}-{}.crate", pkg.get_name(), pkg.get_version());
+ let filename = format!("{}-{}.crate", pkg.name(), pkg.version());
let dst = self.cache_path.join(filename);
if dst.exists() { return Ok(dst) }
try!(self.config.shell().status("Downloading", pkg));
}
// Verify what we just downloaded
- let expected = self.hashes.get(&(pkg.get_name().to_string(),
- pkg.get_version().to_string()));
+ let expected = self.hashes.get(&(pkg.name().to_string(),
+ pkg.version().to_string()));
let expected = try!(expected.chain_error(|| {
internal(format!("no hash listed for {}", pkg))
}));
/// No action is taken if the source looks like it's already unpacked.
fn unpack_package(&self, pkg: &PackageId, tarball: Path)
-> CargoResult<Path> {
- let dst = self.src_path.join(format!("{}-{}", pkg.get_name(),
- pkg.get_version()));
+ let dst = self.src_path.join(format!("{}-{}", pkg.name(),
+ pkg.version()));
if dst.join(".cargo-ok").exists() { return Ok(dst) }
try!(fs::mkdir_recursive(&dst.dir_path(), old_io::USER_DIR));
Err(..) => Vec::new(),
};
let summaries = summaries.into_iter().filter(|summary| {
- summary.0.get_package_id().get_name() == name
+ summary.0.package_id().name() == name
}).collect();
self.cache.insert(name.to_string(), summaries);
Ok(self.cache.get(name).unwrap())
_ => Kind::Normal,
};
- Ok(dep.optional(optional)
- .default_features(default_features)
- .features(features)
- .only_for_platform(target)
- .kind(kind))
+ Ok(dep.set_optional(optional)
+ .set_default_features(default_features)
+ .set_features(features)
+ .set_only_for_platform(target)
+ .set_kind(kind))
}
/// Actually perform network operations to update the registry
if self.updated { return Ok(()) }
try!(self.config.shell().status("Updating",
- format!("registry `{}`", self.source_id.get_url())));
+ format!("registry `{}`", self.source_id.url())));
let repo = try!(self.open());
// git fetch origin
- let url = self.source_id.get_url().to_string();
+ let url = self.source_id.url().to_string();
let refspec = "refs/heads/*:refs/remotes/origin/*";
try!(git::fetch(&repo, &url, refspec).chain_error(|| {
internal(format!("failed to fetch `{}`", url))
// theory the registry is known to contain this version. If, however, we
// come back with no summaries, then our registry may need to be
// updated, so we fall back to performing a lazy update.
- if dep.get_source_id().get_precise().is_some() {
- let mut summaries = try!(self.summaries(dep.get_name())).iter().map(|s| {
+ if dep.source_id().precise().is_some() {
+ let mut summaries = try!(self.summaries(dep.name())).iter().map(|s| {
s.0.clone()
}).collect::<Vec<_>>();
if try!(summaries.query(dep)).len() == 0 {
}
let mut summaries = {
- let summaries = try!(self.summaries(dep.get_name()));
+ let summaries = try!(self.summaries(dep.name()));
summaries.iter().filter(|&&(_, yanked)| {
- dep.get_source_id().get_precise().is_some() || !yanked
+ dep.source_id().precise().is_some() || !yanked
}).map(|s| s.0.clone()).collect::<Vec<_>>()
};
// `<pkg>` is the name of a crate on this source and `<req>` is the
// version requested (agument to `--precise`).
summaries.retain(|s| {
- match self.source_id.get_precise() {
- Some(p) if p.starts_with(dep.get_name()) => {
- let vers = &p[dep.get_name().len() + 1..];
- s.get_version().to_string() == vers
+ match self.source_id.precise() {
+ Some(p) if p.starts_with(dep.name()) => {
+ let vers = &p[dep.name().len() + 1..];
+ s.version().to_string() == vers
}
_ => true,
}
// querying phase. Note that precise in this case is only
// `Some("locked")` as other `Some` values indicate a `cargo update
// --precise` request
- if self.source_id.get_precise() != Some("locked") {
+ if self.source_id.precise() != Some("locked") {
try!(self.do_update());
}
Ok(())
let config = try!(self.config());
let url = try!(config.dl.to_url().map_err(internal));
for package in packages.iter() {
- if self.source_id != *package.get_source_id() { continue }
+ if self.source_id != *package.source_id() { continue }
let mut url = url.clone();
- url.path_mut().unwrap().push(package.get_name().to_string());
- url.path_mut().unwrap().push(package.get_version().to_string());
+ url.path_mut().unwrap().push(package.name().to_string());
+ url.path_mut().unwrap().push(package.version().to_string());
url.path_mut().unwrap().push("download".to_string());
let path = try!(self.download_package(package, &url).chain_error(|| {
internal(format!("Failed to download package `{}` from {}",
}
fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
- Ok(pkg.get_package_id().get_version().to_string())
+ Ok(pkg.package_id().version().to_string())
}
}
Some(ref toml) => add_unused_keys(&mut manifest, toml, "".to_string()),
None => {}
}
- if manifest.get_targets().iter()
- .filter(|t| !t.get_profile().is_custom_build() )
- .next().is_none() {
+ if manifest.targets().iter()
+ .filter(|t| !t.profile().is_custom_build() )
+ .next().is_none() {
return Err(human(format!("either a [lib] or [[bin]] section must \
be present")))
}
try!(process_dependencies(&mut cx, self.dependencies.as_ref(),
|dep| dep));
try!(process_dependencies(&mut cx, self.dev_dependencies.as_ref(),
- |dep| dep.kind(Kind::Development)));
+ |dep| dep.set_kind(Kind::Development)));
try!(process_dependencies(&mut cx, self.build_dependencies.as_ref(),
- |dep| dep.kind(Kind::Build)));
+ |dep| dep.set_kind(Kind::Build)));
if let Some(targets) = self.target.as_ref() {
for (name, platform) in targets.iter() {
try!(process_dependencies(&mut cx,
platform.dependencies.as_ref(),
|dep| {
- dep.only_for_platform(Some(name.clone()))
+ dep.set_only_for_platform(Some(name.clone()))
}));
}
}
.map(|v| v.as_slice()),
&new_source_id));
let dep = f(dep)
- .features(details.features.unwrap_or(Vec::new()))
- .default_features(details.default_features.unwrap_or(true))
- .optional(details.optional.unwrap_or(false));
+ .set_features(details.features.unwrap_or(Vec::new()))
+ .set_default_features(details.default_features.unwrap_or(true))
+ .set_optional(details.optional.unwrap_or(false));
cx.deps.push(dep);
}
Some(ref toml) => toml,
None => return profile,
};
- let opt_level = toml.opt_level.unwrap_or(profile.get_opt_level());
- let lto = toml.lto.unwrap_or(profile.get_lto());
+ let opt_level = toml.opt_level.unwrap_or(profile.opt_level());
+ let lto = toml.lto.unwrap_or(profile.lto());
let codegen_units = toml.codegen_units;
- let debug = toml.debug.unwrap_or(profile.get_debug());
- let rpath = toml.rpath.unwrap_or(profile.get_rpath());
- profile.opt_level(opt_level).lto(lto).codegen_units(codegen_units)
- .debug(debug).rpath(rpath)
+ let debug = toml.debug.unwrap_or(profile.debug());
+ let rpath = toml.rpath.unwrap_or(profile.rpath());
+ profile.set_opt_level(opt_level).set_lto(lto)
+ .set_codegen_units(codegen_units)
+ .set_debug(debug).set_rpath(rpath)
}
fn target_profiles(target: &TomlTarget, profiles: &TomlProfiles,
let doctest = target.doctest.unwrap_or(true);
match target.doc {
Some(true) | None => {
- ret.push(merge(Profile::default_doc().doctest(doctest),
+ ret.push(merge(Profile::default_doc().set_doctest(doctest),
&profiles.doc));
}
Some(false) => {}
match dep {
TestDep::Needed => {
- ret.push(merge(Profile::default_test().test(false),
+ ret.push(merge(Profile::default_test().set_test(false),
&profiles.test));
- ret.push(merge(Profile::default_doc().doc(false),
+ ret.push(merge(Profile::default_doc().set_doc(false),
&profiles.doc));
- ret.push(merge(Profile::default_bench().test(false),
+ ret.push(merge(Profile::default_bench().set_test(false),
&profiles.bench));
}
_ => {}
}
if target.plugin == Some(true) {
- ret = ret.into_iter().map(|p| p.for_host(true)).collect();
+ ret = ret.into_iter().map(|p| p.set_for_host(true)).collect();
}
ret
fn custom_build_target(dst: &mut Vec<Target>, cmd: &Path,
profiles: &TomlProfiles) {
let profiles = [
- merge(Profile::default_dev().for_host(true).custom_build(true),
+ merge(Profile::default_dev().set_for_host(true).set_custom_build(true),
&profiles.dev),
];
let mut metadata = metadata.clone();
metadata.mix(&format!("test-{}", test.name));
- let profile = Profile::default_test().harness(harness);
+ let profile = Profile::default_test().set_harness(harness);
let profile = merge(profile, &profiles.test);
dst.push(Target::test_target(&test.name,
&path.to_path(),
let mut metadata = metadata.clone();
metadata.mix(&format!("bench-{}", bench.name));
- let profile = Profile::default_bench().harness(harness);
+ let profile = Profile::default_bench().set_harness(harness);
let profile = merge(profile, &profiles.bench);
dst.push(Target::bench_target(&bench.name,
&path.to_path(),
#[test]
fn test_resolving_with_dev_deps() {
let mut reg = registry(vec!(
- pkg!("foo" => ["bar", dep("baz").kind(Development)]),
- pkg!("baz" => ["bat", dep("bam").kind(Development)]),
+ pkg!("foo" => ["bar", dep("baz").set_kind(Development)]),
+ pkg!("baz" => ["bat", dep("bam").set_kind(Development)]),
pkg!("bar"),
pkg!("bat")
));
let res = resolve(pkg_id("root"),
- vec![dep("foo"), dep("baz").kind(Development)],
+ vec![dep("foo"), dep("baz").set_kind(Development)],
&mut reg).unwrap();
assert_that(&res, contains(names(&["root", "foo", "bar", "baz"])));
RUNNING)));
});
-test!(bench_target_name {
+test!(bench_tarname {
let prj = project("foo")
.file("Cargo.toml" , r#"
[package]
let subpackage = paths::root().join("foo").join("components");
fs::mkdir(&subpackage, USER_RWX).unwrap();
- assert_that(cargo_process("new").arg("foo/components/subcomponent"),
+ assert_that(cargo_process("new").arg("foo/components/subcomponent")
+ .env("USER", Some("foo")),
execs().with_status(0));
assert_that(&paths::root().join("foo/components/subcomponent/.git"),