let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path));
config.shell().set_verbose(options.flag_verbose);
+ let mut benches = Vec::new();
+ if let Some(s) = options.flag_bench {
+ benches.push(s);
+ }
+
let ops = ops::TestOptions {
- name: options.flag_bench.as_ref().map(|s| &s[..]),
no_run: options.flag_no_run,
compile_opts: ops::CompileOptions {
- env: "bench",
config: config,
jobs: options.flag_jobs,
target: options.flag_target.as_ref().map(|s| &s[..]),
- dev_deps: true,
features: &options.flag_features,
no_default_features: options.flag_no_default_features,
spec: options.flag_package.as_ref().map(|s| &s[..]),
- lib_only: false,
exec_engine: None,
+ release: true,
+ mode: ops::CompileMode::Bench,
+ filter: if benches.len() == 0 {
+ ops::CompileFilter::Everything
+ } else {
+ ops::CompileFilter::Only {
+ lib: false, bins: &[], examples: &[], tests: &[],
+ benches: &benches,
+ }
+ },
},
};
";
pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
- debug!("executing; cmd=cargo-build; args={:?}", env::args().collect::<Vec<_>>());
+ debug!("executing; cmd=cargo-build; args={:?}",
+ env::args().collect::<Vec<_>>());
config.shell().set_verbose(options.flag_verbose);
let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path));
- let env = if options.flag_release {
- "release"
- } else {
- "compile"
- };
-
let opts = CompileOptions {
- env: env,
config: config,
jobs: options.flag_jobs,
target: options.flag_target.as_ref().map(|t| &t[..]),
- dev_deps: false,
features: &options.flag_features,
no_default_features: options.flag_no_default_features,
spec: options.flag_package.as_ref().map(|s| &s[..]),
- lib_only: options.flag_lib,
exec_engine: None,
+ mode: ops::CompileMode::Build,
+ release: options.flag_release,
+ filter: if options.flag_lib {
+ ops::CompileFilter::Only {
+ lib: true, bins: &[], examples: &[], benches: &[], tests: &[]
+ }
+ } else {
+ ops::CompileFilter::Everything
+ },
};
ops::compile(&root, &opts).map(|_| None).map_err(|err| {
all: !options.flag_no_deps,
open_result: options.flag_open,
compile_opts: ops::CompileOptions {
- env: if options.flag_no_deps {"doc"} else {"doc-all"},
config: config,
jobs: options.flag_jobs,
target: None,
- dev_deps: false,
features: &options.flag_features,
no_default_features: options.flag_no_default_features,
spec: options.flag_package.as_ref().map(|s| &s[..]),
- lib_only: false,
exec_engine: None,
+ filter: ops::CompileFilter::Everything,
+ release: false,
+ mode: ops::CompileMode::Build,
},
};
use cargo::ops;
-use cargo::core::manifest::TargetKind;
-use cargo::util::{CliResult, CliError, human, Config};
+use cargo::util::{CliResult, CliError, Config};
use cargo::util::important_paths::{find_root_manifest_for_cwd};
#[derive(RustcDecodable)]
config.shell().set_verbose(options.flag_verbose);
let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path));
- let env = match (options.flag_release, options.flag_example.is_some()) {
- (true, _) => "release",
- (false, true) => "test",
- (false, false) => "compile"
- };
+ let (mut examples, mut bins) = (Vec::new(), Vec::new());
+ if let Some(s) = options.flag_bin {
+ bins.push(s);
+ }
+ if let Some(s) = options.flag_example {
+ examples.push(s);
+ }
let compile_opts = ops::CompileOptions {
- env: env,
config: config,
jobs: options.flag_jobs,
target: options.flag_target.as_ref().map(|t| &t[..]),
- dev_deps: true,
features: &options.flag_features,
no_default_features: options.flag_no_default_features,
spec: None,
- lib_only: false,
exec_engine: None,
- };
-
- let (target_kind, name) = match (options.flag_bin, options.flag_example) {
- (Some(bin), None) => (TargetKind::Bin, Some(bin)),
- (None, Some(example)) => (TargetKind::Example, Some(example)),
- (None, None) => (TargetKind::Bin, None),
- (Some(_), Some(_)) => return Err(CliError::from_boxed(
- human("specify either `--bin` or `--example`, not both"), 1)),
+ release: options.flag_release,
+ mode: ops::CompileMode::Build,
+ filter: if examples.len() == 0 && bins.len() == 0 {
+ ops::CompileFilter::Everything
+ } else {
+ ops::CompileFilter::Only {
+ lib: false, tests: &[], benches: &[],
+ bins: &bins, examples: &examples,
+ }
+ },
};
let err = try!(ops::run(&root,
- target_kind,
- name,
&compile_opts,
&options.arg_args).map_err(|err| {
CliError::from_boxed(err, 101)
let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path));
config.shell().set_verbose(options.flag_verbose);
+ let mut tests = Vec::new();
+ if let Some(s) = options.flag_test {
+ tests.push(s);
+ }
+
let ops = ops::TestOptions {
- name: options.flag_test.as_ref().map(|s| &s[..]),
no_run: options.flag_no_run,
compile_opts: ops::CompileOptions {
- env: "test",
config: config,
jobs: options.flag_jobs,
target: options.flag_target.as_ref().map(|s| &s[..]),
- dev_deps: true,
features: &options.flag_features,
no_default_features: options.flag_no_default_features,
spec: options.flag_package.as_ref().map(|s| &s[..]),
- lib_only: false,
exec_engine: None,
+ release: false,
+ mode: ops::CompileMode::Test,
+ filter: if tests.len() == 0 {
+ ops::CompileFilter::Everything
+ } else {
+ ops::CompileFilter::Only {
+ lib: false, bins: &[], examples: &[], benches: &[],
+ tests: &tests,
+ }
+ }
},
};
-use std::hash;
+use std::default::Default;
use std::path::{PathBuf, Path};
use semver::Version;
use util::{CargoResult, human};
/// Contains all the informations about a package, as loaded from a Cargo.toml.
-#[derive(PartialEq,Clone, Debug)]
+#[derive(Clone, Debug)]
pub struct Manifest {
summary: Summary,
targets: Vec<Target>,
exclude: Vec<String>,
include: Vec<String>,
metadata: ManifestMetadata,
+ profiles: Profiles,
}
/// General metadata about a package which is just blindly uploaded to the
pub enum TargetKind {
Lib(Vec<LibKind>),
Bin,
+ Test,
+ Bench,
Example,
+ CustomBuild,
}
-#[derive(RustcEncodable, RustcDecodable, Clone, PartialEq, Eq, Debug)]
+#[derive(RustcEncodable, RustcDecodable, Clone, PartialEq, Eq, Debug, Hash)]
pub struct Profile {
- env: String, // compile, test, dev, bench, etc.
- opt_level: u32,
- lto: bool,
- codegen_units: Option<u32>, // None = use rustc default
- debug: bool,
- rpath: bool,
- test: bool,
- doctest: bool,
- doc: bool,
- dest: String,
- for_host: bool,
- harness: bool, // whether to use the test harness (--test)
- custom_build: bool,
-}
-
-impl Profile {
- fn default() -> Profile {
- Profile {
- env: String::new(),
- opt_level: 0,
- lto: false,
- codegen_units: None,
- debug: false,
- rpath: false,
- test: false,
- doc: false,
- dest: "debug".to_string(),
- for_host: false,
- doctest: false,
- custom_build: false,
- harness: true,
- }
- }
-
- pub fn default_dev() -> Profile {
- Profile {
- env: "compile".to_string(), // run in the default environment only
- opt_level: 0,
- debug: true,
- .. Profile::default()
- }
- }
-
- pub fn default_test() -> Profile {
- Profile {
- env: "test".to_string(),
- debug: true,
- test: true,
- .. Profile::default()
- }
- }
-
- pub fn default_example() -> Profile {
- Profile {
- test: false,
- .. Profile::default_test()
- }
- }
-
- pub fn default_bench() -> Profile {
- Profile {
- env: "bench".to_string(),
- test: true,
- .. Profile::default_release()
- }
- }
-
- pub fn default_release() -> Profile {
- Profile {
- env: "release".to_string(),
- opt_level: 3,
- dest: "release".to_string(),
- .. Profile::default()
- }
- }
-
- pub fn default_doc() -> Profile {
- Profile {
- env: "doc".to_string(),
- doc: true,
- .. Profile::default()
- }
- }
-
- pub fn codegen_units(&self) -> Option<u32> { self.codegen_units }
- pub fn debug(&self) -> bool { self.debug }
- pub fn env(&self) -> &str { &self.env }
- pub fn is_compile(&self) -> bool { self.env == "compile" }
- pub fn is_custom_build(&self) -> bool { self.custom_build }
- pub fn is_doc(&self) -> bool { self.doc }
- pub fn is_doctest(&self) -> bool { self.doctest }
- pub fn is_for_host(&self) -> bool { self.for_host }
- pub fn is_test(&self) -> bool { self.test }
- pub fn lto(&self) -> bool { self.lto }
- pub fn opt_level(&self) -> u32 { self.opt_level }
- pub fn rpath(&self) -> bool { self.rpath }
- pub fn uses_test_harness(&self) -> bool { self.harness }
- pub fn dest(&self) -> &str { &self.dest }
-
- pub fn set_opt_level(mut self, level: u32) -> Profile {
- self.opt_level = level;
- self
- }
-
- pub fn set_lto(mut self, lto: bool) -> Profile {
- self.lto = lto;
- self
- }
-
- pub fn set_codegen_units(mut self, units: Option<u32>) -> Profile {
- self.codegen_units = units;
- self
- }
-
- pub fn set_debug(mut self, debug: bool) -> Profile {
- self.debug = debug;
- self
- }
-
- pub fn set_rpath(mut self, rpath: bool) -> Profile {
- self.rpath = rpath;
- self
- }
-
- pub fn set_test(mut self, test: bool) -> Profile {
- self.test = test;
- self
- }
-
- pub fn set_doctest(mut self, doctest: bool) -> Profile {
- self.doctest = doctest;
- self
- }
-
- pub fn set_doc(mut self, doc: bool) -> Profile {
- self.doc = doc;
- self
- }
-
- /// Sets whether the `Target` must be compiled for the host instead of the
- /// target platform.
- pub fn set_for_host(mut self, for_host: bool) -> Profile {
- self.for_host = for_host;
- self
- }
-
- pub fn set_harness(mut self, harness: bool) -> Profile {
- self.harness = harness;
- self
- }
-
- /// Sets whether the `Target` is a custom build script.
- pub fn set_custom_build(mut self, custom_build: bool) -> Profile {
- self.custom_build = custom_build;
- self
- }
+ pub opt_level: u32,
+ pub lto: bool,
+ pub codegen_units: Option<u32>, // None = use rustc default
+ pub debuginfo: bool,
+ pub ndebug: bool,
+ pub rpath: bool,
+ pub test: bool,
+ pub doc: bool,
}
-impl hash::Hash for Profile {
- fn hash<H: hash::Hasher>(&self, into: &mut H) {
- // Be sure to match all fields explicitly, but ignore those not relevant
- // to the actual hash of a profile.
- let Profile {
- opt_level,
- lto,
- codegen_units,
- debug,
- rpath,
- for_host,
- ref dest,
- harness,
-
- // test flags are separated by file, not by profile hash, and
- // env/doc also don't matter for the actual contents of the output
- // file, just where the output file is located.
- doc: _,
- env: _,
- test: _,
- doctest: _,
-
- custom_build: _,
- } = *self;
- (opt_level, lto, codegen_units, debug,
- rpath, for_host, dest, harness).hash(into)
- }
+#[derive(Default, Clone, Debug)]
+pub struct Profiles {
+ pub release: Profile,
+ pub dev: Profile,
+ pub test: Profile,
+ pub bench: Profile,
+ pub doc: Profile,
}
/// Informations about a binary, a library, an example, etc. that is part of the
kind: TargetKind,
name: String,
src_path: PathBuf,
- profile: Profile,
metadata: Option<Metadata>,
+ tested: bool,
+ benched: bool,
+ doc: bool,
+ doctest: bool,
+ harness: bool, // whether to use the test harness (--test)
+ for_host: bool,
}
#[derive(RustcEncodable)]
kind: Vec<&'static str>,
name: String,
src_path: String,
- profile: Profile,
metadata: Option<Metadata>
}
}
TargetKind::Bin => vec!("bin"),
TargetKind::Example => vec!["example"],
+ TargetKind::Test => vec!["test"],
+ TargetKind::CustomBuild => vec!["custom-build"],
+ TargetKind::Bench => vec!["bench"],
};
SerializedTarget {
kind: kind,
name: self.name.clone(),
src_path: self.src_path.display().to_string(),
- profile: self.profile.clone(),
metadata: self.metadata.clone()
}.encode(s)
}
exclude: Vec<String>,
include: Vec<String>,
links: Option<String>,
- metadata: ManifestMetadata) -> Manifest {
+ metadata: ManifestMetadata,
+ profiles: Profiles) -> Manifest {
Manifest {
summary: summary,
targets: targets,
include: include,
links: links,
metadata: metadata,
+ profiles: profiles,
}
}
pub fn targets(&self) -> &[Target] { &self.targets }
pub fn version(&self) -> &Version { self.package_id().version() }
pub fn warnings(&self) -> &[String] { &self.warnings }
+ pub fn profiles(&self) -> &Profiles { &self.profiles }
pub fn links(&self) -> Option<&str> {
self.links.as_ref().map(|s| s.as_slice())
}
}
impl Target {
- pub fn file_stem(&self) -> String {
- match self.metadata {
- Some(ref metadata) => format!("{}{}", self.name,
- metadata.extra_filename),
- None => self.name.clone()
+ fn blank() -> Target {
+ Target {
+ kind: TargetKind::Bin,
+ name: String::new(),
+ src_path: PathBuf::new(""),
+ metadata: None,
+ doc: true,
+ doctest: true,
+ harness: true,
+ for_host: false,
+ tested: true,
+ benched: true,
}
}
pub fn lib_target(name: &str, crate_targets: Vec<LibKind>,
- src_path: &Path, profile: &Profile,
+ src_path: &Path,
metadata: Metadata) -> Target {
Target {
kind: TargetKind::Lib(crate_targets),
name: name.to_string(),
src_path: src_path.to_path_buf(),
- profile: profile.clone(),
- metadata: Some(metadata)
+ metadata: Some(metadata),
+ ..Target::blank()
}
}
- pub fn bin_target(name: &str, src_path: &Path, profile: &Profile,
+ pub fn bin_target(name: &str, src_path: &Path,
metadata: Option<Metadata>) -> Target {
Target {
kind: TargetKind::Bin,
name: name.to_string(),
src_path: src_path.to_path_buf(),
- profile: profile.clone(),
metadata: metadata,
+ ..Target::blank()
}
}
/// Builds a `Target` corresponding to the `build = "build.rs"` entry.
- pub fn custom_build_target(name: &str, src_path: &Path, profile: &Profile,
+ pub fn custom_build_target(name: &str, src_path: &Path,
metadata: Option<Metadata>) -> Target {
Target {
- kind: TargetKind::Bin,
+ kind: TargetKind::CustomBuild,
name: name.to_string(),
src_path: src_path.to_path_buf(),
- profile: profile.clone(),
metadata: metadata,
+ for_host: true,
+ benched: false,
+ tested: false,
+ ..Target::blank()
}
}
- pub fn example_target(name: &str, src_path: &Path, profile: &Profile) -> Target {
+ pub fn example_target(name: &str, src_path: &Path) -> Target {
Target {
kind: TargetKind::Example,
name: name.to_string(),
src_path: src_path.to_path_buf(),
- profile: profile.clone(),
- metadata: None,
+ benched: false,
+ ..Target::blank()
}
}
pub fn test_target(name: &str, src_path: &Path,
- profile: &Profile, metadata: Metadata) -> Target {
+ metadata: Metadata) -> Target {
Target {
- kind: TargetKind::Bin,
+ kind: TargetKind::Test,
name: name.to_string(),
src_path: src_path.to_path_buf(),
- profile: profile.clone(),
metadata: Some(metadata),
+ benched: false,
+ ..Target::blank()
}
}
pub fn bench_target(name: &str, src_path: &Path,
- profile: &Profile, metadata: Metadata) -> Target {
+ metadata: Metadata) -> Target {
Target {
- kind: TargetKind::Bin,
+ kind: TargetKind::Bench,
name: name.to_string(),
src_path: src_path.to_path_buf(),
- profile: profile.clone(),
metadata: Some(metadata),
+ tested: false,
+ ..Target::blank()
}
}
pub fn name(&self) -> &str { &self.name }
pub fn src_path(&self) -> &Path { &self.src_path }
- pub fn profile(&self) -> &Profile { &self.profile }
pub fn metadata(&self) -> Option<&Metadata> { self.metadata.as_ref() }
+ pub fn kind(&self) -> &TargetKind { &self.kind }
+ pub fn tested(&self) -> bool { self.tested }
+ pub fn harness(&self) -> bool { self.harness }
+ pub fn documented(&self) -> bool { self.doc }
+ pub fn doctested(&self) -> bool { self.doctest }
+ pub fn for_host(&self) -> bool { self.for_host }
+ pub fn benched(&self) -> bool { self.benched }
pub fn is_lib(&self) -> bool {
match self.kind {
}
}
- pub fn is_dylib(&self) -> bool {
+ pub fn linkable(&self) -> bool {
match self.kind {
- TargetKind::Lib(ref kinds) => kinds.iter().any(|&k| k == LibKind::Dylib),
+ TargetKind::Lib(ref kinds) => {
+ kinds.iter().any(|k| {
+ match *k {
+ LibKind::Lib | LibKind::Rlib | LibKind::Dylib => true,
+ LibKind::StaticLib => false,
+ }
+ })
+ }
_ => false
}
}
- pub fn is_rlib(&self) -> bool {
+ pub fn is_bin(&self) -> bool { self.kind == TargetKind::Bin }
+ pub fn is_example(&self) -> bool { self.kind == TargetKind::Example }
+ pub fn is_test(&self) -> bool { self.kind == TargetKind::Test }
+ pub fn is_bench(&self) -> bool { self.kind == TargetKind::Bench }
+ pub fn is_custom_build(&self) -> bool { self.kind == TargetKind::CustomBuild }
+
+ /// Returns the arguments suitable for `--crate-type` to pass to rustc.
+ pub fn rustc_crate_types(&self) -> Vec<&'static str> {
match self.kind {
- TargetKind::Lib(ref kinds) =>
- kinds.iter().any(|&k| k == LibKind::Rlib || k == LibKind::Lib),
- _ => false
+ TargetKind::Lib(ref kinds) => {
+ kinds.iter().map(|kind| kind.crate_type()).collect()
+ },
+ TargetKind::CustomBuild |
+ TargetKind::Bench |
+ TargetKind::Test |
+ TargetKind::Example |
+ TargetKind::Bin => vec!("bin"),
}
}
- pub fn is_staticlib(&self) -> bool {
+ pub fn can_lto(&self) -> bool {
match self.kind {
- TargetKind::Lib(ref kinds) => kinds.iter().any(|&k| k == LibKind::StaticLib),
- _ => false
+ TargetKind::Lib(ref v) => *v == [LibKind::StaticLib],
+ _ => true,
}
}
- /// Returns true for binary, bench, and tests.
- pub fn is_bin(&self) -> bool {
- match self.kind {
- TargetKind::Bin => true,
- _ => false
+ pub fn set_tested(&mut self, tested: bool) -> &mut Target {
+ self.tested = tested;
+ self
+ }
+ pub fn set_benched(&mut self, benched: bool) -> &mut Target {
+ self.benched = benched;
+ self
+ }
+ pub fn set_doctest(&mut self, doctest: bool) -> &mut Target {
+ self.doctest = doctest;
+ self
+ }
+ pub fn set_for_host(&mut self, for_host: bool) -> &mut Target {
+ self.for_host = for_host;
+ self
+ }
+ pub fn set_harness(&mut self, harness: bool) -> &mut Target {
+ self.harness = harness;
+ self
+ }
+ pub fn set_doc(&mut self, doc: bool) -> &mut Target {
+ self.doc = doc;
+ self
+ }
+}
+
+impl Profile {
+ pub fn default_dev() -> Profile {
+ Profile {
+ debuginfo: true,
+ ..Profile::default()
}
}
- /// Returns true for exampels
- pub fn is_example(&self) -> bool {
- match self.kind {
- TargetKind::Example => true,
- _ => false
+ pub fn default_release() -> Profile {
+ Profile {
+ opt_level: 3,
+ debuginfo: false,
+ ndebug: true,
+ ..Profile::default()
}
}
- /// Returns the arguments suitable for `--crate-type` to pass to rustc.
- pub fn rustc_crate_types(&self) -> Vec<&'static str> {
- match self.kind {
- TargetKind::Lib(ref kinds) => {
- kinds.iter().map(|kind| kind.crate_type()).collect()
- },
- TargetKind::Example |
- TargetKind::Bin => vec!("bin"),
+ pub fn default_test() -> Profile {
+ Profile {
+ test: true,
+ ..Profile::default_dev()
+ }
+ }
+
+ pub fn default_bench() -> Profile {
+ Profile {
+ test: true,
+ ..Profile::default_release()
+ }
+ }
+
+ pub fn default_doc() -> Profile {
+ Profile {
+ doc: true,
+ ..Profile::default_dev()
+ }
+ }
+}
+
+impl Default for Profile {
+ fn default() -> Profile {
+ Profile {
+ opt_level: 0,
+ lto: false,
+ codegen_units: None,
+ debuginfo: false,
+ ndebug: false,
+ rpath: false,
+ test: false,
+ doc: false,
}
}
}
pub use self::dependency::Dependency;
-pub use self::manifest::{Manifest, Target, TargetKind, Profile};
+pub use self::manifest::{Manifest, Target, TargetKind, Profile, LibKind, Profiles};
pub use self::package::{Package, PackageSet};
-pub use self::package_id::PackageId;
+pub use self::package_id::{PackageId, Metadata};
pub use self::package_id_spec::PackageIdSpec;
pub use self::registry::Registry;
pub use self::resolver::Resolve;
}
pub fn has_custom_build(&self) -> bool {
- self.targets().iter().any(|t| t.profile().is_custom_build())
+ self.targets().iter().any(|t| t.is_custom_build())
}
}
use std::io::prelude::*;
use std::path::Path;
-use core::PackageSet;
+use core::{PackageSet, Profiles, Profile};
use core::source::{Source, SourceMap};
use sources::PathSource;
use util::{CargoResult, human, ChainError, Config};
-use ops::{self, Layout, Context};
+use ops::{self, Layout, Context, BuildConfig};
pub struct CleanOptions<'a, 'b: 'a> {
pub spec: Option<&'a str>,
// filenames and such
let srcs = SourceMap::new();
let pkgs = PackageSet::new(&[]);
- let cx = try!(Context::new("compile", &resolve, &srcs, &pkgs, opts.config,
+ let profiles = Profiles::default();
+ let cx = try!(Context::new(&resolve, &srcs, &pkgs, opts.config,
Layout::at(root.absolute_target_dir()),
- None, &pkg, Default::default()));
+ None, &pkg, BuildConfig::default(),
+ &profiles));
// And finally, clean everything out!
for target in pkg.targets().iter() {
- let layout = Layout::new(&root, opts.target, target.profile().dest());
+ // TODO: `cargo clean --release`
+ let layout = Layout::new(&root, opts.target, "debug");
try!(rm_rf(&layout.fingerprint(&pkg)));
- for filename in try!(cx.target_filenames(target)).iter() {
- try!(rm_rf(&layout.dest().join(&filename)));
- try!(rm_rf(&layout.deps().join(&filename)));
+ let profiles = [Profile::default_dev(), Profile::default_test()];
+ for profile in profiles.iter() {
+ for filename in try!(cx.target_filenames(target, profile)).iter() {
+ try!(rm_rf(&layout.dest().join(&filename)));
+ try!(rm_rf(&layout.deps().join(&filename)));
+ }
}
}
use core::registry::PackageRegistry;
use core::{Source, SourceId, PackageSet, Package, Target, PackageId};
+use core::{Profile, TargetKind};
use core::resolver::Method;
use ops::{self, BuildOutput, ExecEngine};
use sources::{PathSource};
/// Contains informations about how a package should be compiled.
pub struct CompileOptions<'a, 'b: 'a> {
- pub env: &'a str,
pub config: &'a Config<'b>,
/// Number of concurrent jobs to use.
pub jobs: Option<u32>,
/// The target platform to compile for (example: `i686-unknown-linux-gnu`).
pub target: Option<&'a str>,
- /// True if dev-dependencies must be compiled.
- pub dev_deps: bool,
+ /// Extra features to build for the root package
pub features: &'a [String],
+ /// Flag if the default feature should be built for the root package
pub no_default_features: bool,
+ /// Root package to build (if None it's the current one)
pub spec: Option<&'a str>,
- pub lib_only: bool,
+ /// Filter to apply to the root package to select which targets will be
+ /// built.
+ pub filter: CompileFilter<'a>,
+ /// Engine which drives compilation
pub exec_engine: Option<Arc<Box<ExecEngine>>>,
+ /// Whether this is a release build or not
+ pub release: bool,
+ /// Mode for this compile.
+ pub mode: CompileMode,
+}
+
+#[derive(Copy, PartialEq)]
+pub enum CompileMode {
+ Test,
+ Build,
+ Bench,
+}
+
+pub enum CompileFilter<'a> {
+ Everything,
+ Only {
+ lib: bool,
+ bins: &'a [String],
+ examples: &'a [String],
+ tests: &'a [String],
+ benches: &'a [String],
+ }
}
pub fn compile(manifest_path: &Path,
pub fn compile_pkg(package: &Package, options: &CompileOptions)
-> CargoResult<ops::Compilation> {
- let CompileOptions { env, config, jobs, target, spec,
- dev_deps, features, no_default_features,
- lib_only, ref exec_engine } = *options;
+ let CompileOptions { config, jobs, target, spec, features,
+ no_default_features, release, mode,
+ ref filter, ref exec_engine } = *options;
let target = target.map(|s| s.to_string());
let features = features.iter().flat_map(|s| {
try!(registry.add_overrides(override_ids));
- let platform = target.as_ref().map(|e| e.as_slice()).or(Some(rustc_host.as_slice()));
+ let platform = target.as_ref().map(|e| &e[..]).or(Some(&rustc_host[..]));
let method = Method::Required{
- dev_deps: dev_deps,
+ dev_deps: true, // TODO: remove this option?
features: &features,
uses_default_features: !no_default_features,
target_platform: platform};
None => package.package_id(),
};
let to_build = packages.iter().find(|p| p.package_id() == pkgid).unwrap();
- let targets = to_build.targets().iter().filter(|target| {
- target.profile().is_custom_build() || match env {
- // doc-all == document everything, so look for doc targets
- "doc" | "doc-all" => target.profile().env() == "doc",
- env => target.profile().env() == env,
- }
- }).filter(|target| !lib_only || target.is_lib()).collect::<Vec<&Target>>();
-
- if lib_only && targets.len() == 0 {
- return Err(human("There is no lib to build, remove `--lib` flag".to_string()));
- }
+ let targets = try!(generate_targets(to_build, mode, filter, release));
let ret = {
let _p = profile::start("compiling");
- let lib_overrides = try!(scrape_build_config(config, jobs, target));
+ let mut build_config = try!(scrape_build_config(config, jobs, target));
+ build_config.exec_engine = exec_engine.clone();
+ build_config.release = release;
- try!(ops::compile_targets(&env, &targets, to_build,
+ try!(ops::compile_targets(&targets, to_build,
&PackageSet::new(&packages),
&resolve_with_overrides, &sources,
- config, lib_overrides, exec_engine.clone()))
+ config,
+ build_config,
+ to_build.manifest().profiles()))
};
return Ok(ret);
}
+impl<'a> CompileFilter<'a> {
+ pub fn matches(&self, target: &Target) -> bool {
+ match *self {
+ CompileFilter::Everything => true,
+ CompileFilter::Only { lib, bins, examples, tests, benches } => {
+ let list = match *target.kind() {
+ TargetKind::Bin => bins,
+ TargetKind::Test => tests,
+ TargetKind::Bench => benches,
+ TargetKind::Example => examples,
+ TargetKind::Lib(..) => return lib,
+ TargetKind::CustomBuild => return false,
+ };
+ list.iter().any(|x| *x == target.name())
+ }
+ }
+ }
+}
+
+/// Given the configuration for a build, this function will generate all
+/// target/profile combinations needed to be built.
+fn generate_targets<'a>(pkg: &'a Package,
+ mode: CompileMode,
+ filter: &CompileFilter,
+ release: bool)
+ -> CargoResult<Vec<(&'a Target, &'a Profile)>> {
+ let profiles = pkg.manifest().profiles();
+ let build = if release {&profiles.release} else {&profiles.dev};
+ let profile = match mode {
+ CompileMode::Test => if release {&profiles.bench} else {&profiles.test},
+ CompileMode::Bench => &profiles.bench,
+ CompileMode::Build => build,
+ };
+ return match *filter {
+ CompileFilter::Everything => {
+ match mode {
+ CompileMode::Bench => {
+ Ok(pkg.targets().iter().filter(|t| t.benched()).map(|t| {
+ (t, profile)
+ }).collect::<Vec<_>>())
+ }
+ CompileMode::Test => {
+ let mut base = pkg.targets().iter().filter(|t| {
+ t.tested()
+ }).map(|t| {
+ (t, if t.is_example() {build} else {profile})
+ }).collect::<Vec<_>>();
+
+ // Always compile the library if we're testing everything as
+ // it'll be needed for doctests
+ if let Some(t) = pkg.targets().iter().find(|t| t.is_lib()) {
+ if t.doctested() {
+ base.push((t, build));
+ }
+ }
+ Ok(base)
+ }
+ CompileMode::Build => {
+ Ok(pkg.targets().iter().filter(|t| {
+ t.is_bin() || t.is_lib()
+ }).map(|t| (t, profile)).collect())
+ }
+ }
+ }
+ CompileFilter::Only { lib, bins, examples, tests, benches } => {
+ let mut targets = Vec::new();
+
+ if lib {
+ if let Some(t) = pkg.targets().iter().find(|t| t.is_lib()) {
+ targets.push((t, profile));
+ } else {
+ return Err(human(format!("no library targets found")))
+ }
+ }
+
+ {
+ let mut find = |names: &[String], desc, kind, profile| {
+ for name in names {
+ let target = pkg.targets().iter().find(|t| {
+ t.name() == *name && *t.kind() == kind
+ });
+ let t = match target {
+ Some(t) => t,
+ None => return Err(human(format!("no {} target \
+ named `{}`",
+ desc, name))),
+ };
+ targets.push((t, profile));
+ }
+ Ok(())
+ };
+ try!(find(bins, "bin", TargetKind::Bin, profile));
+ try!(find(examples, "example", TargetKind::Example,
+ &profiles.dev));
+ try!(find(tests, "test", TargetKind::Test, &profiles.test));
+ try!(find(benches, "bench", TargetKind::Bench, &profiles.bench));
+ }
+ Ok(targets)
+ }
+ };
+}
+
+/// Read the `paths` configuration variable to discover all path overrides that
+/// have been configured.
fn source_ids_from_config(config: &Config, cur_path: &Path)
-> CargoResult<Vec<SourceId>> {
}).map(|p| SourceId::for_path(&p)).collect()
}
+/// Parse all config files to learn about build configuration. Currently
+/// configured options are:
+///
+/// * build.jobs
+/// * target.$target.ar
+/// * target.$target.linker
+/// * target.$target.libfoo.metadata
fn scrape_build_config(config: &Config,
jobs: Option<u32>,
- target: Option<String>) -> CargoResult<ops::BuildConfig> {
+ target: Option<String>)
+ -> CargoResult<ops::BuildConfig> {
let cfg_jobs = match try!(config.get_i64("build.jobs")) {
Some((n, p)) => {
match n.to_u32() {
match try!(config.get(&key)).unwrap() {
ConfigValue::String(v, path) => {
if k == "rustc-flags" {
- let whence = format!("in `{}` (in {:?})", key, path);
+ let whence = format!("in `{}` (in {})", key,
+ path.display());
let (paths, links) = try!(
BuildOutput::parse_rustc_flags(&v, &whence)
);
},
ConfigValue::List(a, p) => {
if k == "rustc-link-lib" {
- output.library_links.extend(a.into_iter().map(|(v, _)| v));
+ output.library_links.extend(a.into_iter().map(|v| v.0));
} else if k == "rustc-link-search" {
- output.library_paths.extend(a.into_iter().map(|(v, _)| PathBuf::new(&v)));
+ output.library_paths.extend(a.into_iter().map(|v| {
+ PathBuf::new(&v.0)
+ }));
} else {
- try!(config.expected("string", &k, ConfigValue::List(a, p)));
+ try!(config.expected("string", &k,
+ ConfigValue::List(a, p)));
}
},
- // technically could be a list too, but that's the exception to the rule...
+ // technically could be a list too, but that's the exception to
+ // the rule...
cv => { try!(config.expected("string", &k, cv)); }
}
}
let mut lib_names = HashSet::new();
let mut bin_names = HashSet::new();
if options.compile_opts.spec.is_none() {
- for target in package.targets().iter().filter(|t| t.profile().is_doc()) {
+ for target in package.targets() {
if target.is_lib() {
assert!(lib_names.insert(target.name()));
} else {
// Now that we've rewritten all our path dependencies, compile it!
try!(ops::compile_pkg(&new_pkg, &ops::CompileOptions {
- env: "compile",
config: config,
jobs: None,
target: None,
- dev_deps: false,
features: &[],
no_default_features: false,
spec: None,
- lib_only: false,
+ filter: ops::CompileFilter::Everything,
exec_engine: None,
+ release: false,
+ mode: ops::CompileMode::Build,
}));
Ok(())
use std::path::Path;
-use ops::{self, ExecEngine};
-use util::{CargoResult, human, process, ProcessError, ChainError};
-use core::manifest::TargetKind;
+use ops::{self, ExecEngine, CompileFilter};
+use util::{CargoResult, human, process, ProcessError};
use core::source::Source;
use sources::PathSource;
pub fn run(manifest_path: &Path,
- target_kind: TargetKind,
- name: Option<String>,
options: &ops::CompileOptions,
args: &[String]) -> CargoResult<Option<ProcessError>> {
let config = options.config;
config));
try!(src.update());
let root = try!(src.root_package());
- let env = options.env;
+
+ // Make sure that we're only running at most one binary. The `compile` step
+ // will verify that we're buliding at least one binary, so we don't check
+ // for that form of existence here.
let mut bins = root.manifest().targets().iter().filter(|a| {
- let matches_kind = match target_kind {
- TargetKind::Bin => a.is_bin(),
- TargetKind::Example => a.is_example(),
- TargetKind::Lib(_) => false,
- };
- let matches_name = name.as_ref().map_or(true, |n| *n == a.name());
- matches_kind && matches_name && a.profile().env() == env &&
- !a.profile().is_custom_build()
+ options.filter.matches(a) && !a.is_lib() && !a.is_custom_build()
});
- let bin = try!(bins.next().chain_error(|| {
- match (name.as_ref(), &target_kind) {
- (Some(name), &TargetKind::Bin) => {
- human(format!("no bin target named `{}` to run", name))
+ let _ = bins.next();
+ if bins.next().is_some() {
+ match options.filter {
+ CompileFilter::Everything => {
+ return Err(human("`cargo run` requires that a project only have \
+ one executable; use the `--bin` option to \
+ specify which one to run"))
}
- (Some(name), &TargetKind::Example) => {
- human(format!("no example target named `{}` to run", name))
+ CompileFilter::Only { .. } => {
+ return Err(human("`cargo run` can run at most one executable, \
+ but multiple were specified"))
}
- (Some(_), &TargetKind::Lib(..)) => unreachable!(),
- (None, _) => human("a bin target must be available for `cargo run`"),
}
- }));
- match bins.next() {
- Some(..) => return Err(
- human("`cargo run` requires that a project only have one executable. \
- Use the `--bin` option to specify which one to run")),
- None => {}
}
let compile = try!(ops::compile(manifest_path, options));
- let dst = manifest_path.parent().unwrap().join("target");
- let dst = match options.target {
- Some(target) => dst.join(target),
- None => dst,
- };
- let exe = match (bin.profile().dest(), bin.is_example()) {
- (s, true) => dst.join(s).join("examples").join(bin.name()),
- (s, false) => dst.join(s).join(bin.name()),
- };
+ let exe = &compile.binaries[0];
let exe = match exe.relative_from(config.cwd()) {
Some(path) => path,
- None => &*exe,
+ None => &**exe,
};
let mut process = try!(compile.target_process(exe, &root))
.into_process_builder();
use std::collections::hash_map::Entry::{Occupied, Vacant};
-use std::collections::hash_map::HashMap;
+use std::collections::{HashSet, HashMap};
use std::str;
use std::sync::Arc;
use std::path::PathBuf;
use regex::Regex;
use core::{SourceMap, Package, PackageId, PackageSet, Resolve, Target, Profile};
+use core::{TargetKind, LibKind, Profiles, Metadata};
use util::{self, CargoResult, ChainError, internal, Config, profile};
use util::human;
pub compilation: Compilation,
pub build_state: Arc<BuildState>,
pub exec_engine: Arc<Box<ExecEngine>>,
- pub fingerprints: HashMap<(&'a PackageId, &'a Target, Kind), Fingerprint>,
+ pub fingerprints: HashMap<(&'a PackageId, &'a Target, &'a Profile, Kind),
+ Fingerprint>,
+ pub initialized: HashSet<&'a PackageId>,
+ pub compiled: HashSet<(&'a PackageId, &'a Target, &'a Profile)>,
+ pub build_config: BuildConfig,
- env: &'a str,
host: Layout,
target: Option<Layout>,
target_triple: String,
target_dylib: Option<(String, String)>,
target_exe: String,
requirements: HashMap<(&'a PackageId, &'a str), Platform>,
- build_config: BuildConfig,
+ profiles: &'a Profiles,
}
impl<'a, 'b: 'a> Context<'a, 'b> {
- pub fn new(env: &'a str,
- resolve: &'a Resolve,
+ pub fn new(resolve: &'a Resolve,
sources: &'a SourceMap<'a>,
deps: &'a PackageSet,
config: &'a Config<'b>,
host: Layout,
target_layout: Option<Layout>,
root_pkg: &Package,
- build_config: BuildConfig) -> CargoResult<Context<'a, 'b>> {
+ build_config: BuildConfig,
+ profiles: &'a Profiles) -> CargoResult<Context<'a, 'b>> {
let target = build_config.requested_target.clone();
let target = target.as_ref().map(|s| &s[..]);
let (target_dylib, target_exe) = try!(Context::filename_parts(target));
try!(Context::filename_parts(None))
};
let target_triple = target.unwrap_or(config.rustc_host()).to_string();
+ let engine = build_config.exec_engine.as_ref().cloned().unwrap_or({
+ Arc::new(Box::new(ProcessEngine) as Box<ExecEngine>)
+ });
Ok(Context {
target_triple: target_triple,
- env: env,
host: host,
target: target_layout,
resolve: resolve,
host_exe: host_exe,
requirements: HashMap::new(),
compilation: Compilation::new(root_pkg),
- build_state: Arc::new(BuildState::new(build_config.clone(), deps)),
+ build_state: Arc::new(BuildState::new(&build_config, deps)),
build_config: build_config,
- exec_engine: Arc::new(Box::new(ProcessEngine) as Box<ExecEngine>),
+ exec_engine: engine,
fingerprints: HashMap::new(),
+ profiles: profiles,
+ compiled: HashSet::new(),
+ initialized: HashSet::new(),
})
}
/// Prepare this context, ensuring that all filesystem directories are in
/// place.
- pub fn prepare(&mut self, pkg: &'a Package) -> CargoResult<()> {
+ pub fn prepare(&mut self, pkg: &'a Package,
+ targets: &[(&'a Target, &'a Profile)])
+ -> CargoResult<()> {
let _p = profile::start("preparing layout");
try!(self.host.prepare().chain_error(|| {
None => {}
}
- let targets = pkg.targets().iter();
- for target in targets.filter(|t| t.profile().is_compile()) {
- self.build_requirements(pkg, target, Platform::Target);
+ for &(target, profile) in targets {
+ self.build_requirements(pkg, target, profile, Platform::Target);
}
let jobs = self.jobs();
}
fn build_requirements(&mut self, pkg: &'a Package, target: &'a Target,
- req: Platform) {
- let req = if target.profile().is_for_host() {Platform::Plugin} else {req};
+ profile: &Profile, req: Platform) {
+ let req = if target.for_host() {Platform::Plugin} else {req};
match self.requirements.entry((pkg.package_id(), target.name())) {
Occupied(mut entry) => match (*entry.get(), req) {
(Platform::Plugin, Platform::Plugin) |
Vacant(entry) => { entry.insert(req); }
};
- for &(pkg, dep) in self.dep_targets(pkg, target).iter() {
- self.build_requirements(pkg, dep, req);
+ for &(pkg, dep, profile) in self.dep_targets(pkg, target, profile).iter() {
+ self.build_requirements(pkg, dep, profile, req);
}
- match pkg.targets().iter().find(|t| t.profile().is_custom_build()) {
+ match pkg.targets().iter().find(|t| t.is_custom_build()) {
Some(custom_build) => {
- self.build_requirements(pkg, custom_build, Platform::Plugin);
+ let profile = self.build_script_profile(pkg.package_id());
+ self.build_requirements(pkg, custom_build, profile,
+ Platform::Plugin);
}
None => {}
}
pub fn get_requirement(&self, pkg: &'a Package,
target: &'a Target) -> Platform {
- let default = if target.profile().is_for_host() {
+ let default = if target.for_host() {
Platform::Plugin
} else {
Platform::Target
/// target.
pub fn out_dir(&self, pkg: &Package, kind: Kind, target: &Target) -> PathBuf {
let out_dir = self.layout(pkg, kind);
- if target.profile().is_custom_build() {
+ if target.is_custom_build() {
out_dir.build(pkg)
} else if target.is_example() {
out_dir.examples().to_path_buf()
&self.target_triple
}
- /// Return the exact filename of the target.
- pub fn target_filenames(&self, target: &Target) -> CargoResult<Vec<String>> {
- let stem = target.file_stem();
+ /// Get the metadata for a target in a specific profile
+ pub fn target_metadata(&self, target: &Target, profile: &Profile)
+ -> Option<Metadata> {
+ let metadata = target.metadata();
+ if target.is_lib() && profile.test {
+ // Libs and their tests are built in parallel, so we need to make
+ // sure that their metadata is different.
+ metadata.map(|m| m.clone()).map(|mut m| {
+ m.mix(&"test");
+ m
+ })
+ } else if target.is_bin() && profile.test {
+ // Make sure that the name of this test executable doesn't
+ // conflicts with a library that has the same name and is
+ // being tested
+ let mut metadata = self.resolve.root().generate_metadata();
+ metadata.mix(&format!("bin-{}", target.name()));
+ Some(metadata)
+ } else {
+ metadata.map(|m| m.clone())
+ }
+ }
+
+ /// Returns the file stem for a given target/profile combo
+ pub fn file_stem(&self, target: &Target, profile: &Profile) -> String {
+ match self.target_metadata(target, profile) {
+ Some(ref metadata) => format!("{}{}", target.name(),
+ metadata.extra_filename),
+ None => target.name().to_string(),
+ }
+ }
+
+ /// Return the filenames that the given target for the given profile will
+ /// generate.
+ pub fn target_filenames(&self, target: &Target, profile: &Profile)
+ -> CargoResult<Vec<String>> {
+ let stem = self.file_stem(target, profile);
+ let suffix = if target.for_host() {&self.host_exe} else {&self.target_exe};
let mut ret = Vec::new();
- if target.is_example() || target.is_bin() ||
- target.profile().is_test() {
- ret.push(format!("{}{}", stem,
- if target.profile().is_for_host() {
- &self.host_exe
- } else {
- &self.target_exe
- }));
- } else {
- if target.is_dylib() {
- let plugin = target.profile().is_for_host();
- let kind = if plugin {Kind::Host} else {Kind::Target};
- let (prefix, suffix) = try!(self.dylib(kind));
- ret.push(format!("{}{}{}", prefix, stem, suffix));
+ match *target.kind() {
+ TargetKind::Example | TargetKind::Bin | TargetKind::CustomBuild |
+ TargetKind::Bench | TargetKind::Test => {
+ ret.push(format!("{}{}", stem, suffix));
}
- if target.is_rlib() {
- ret.push(format!("lib{}.rlib", stem));
+ TargetKind::Lib(..) if profile.test => {
+ ret.push(format!("{}{}", stem, suffix));
}
- if target.is_staticlib() {
- ret.push(format!("lib{}.a", stem));
+ TargetKind::Lib(ref libs) => {
+ for lib in libs.iter() {
+ match *lib {
+ LibKind::Dylib => {
+ let plugin = target.for_host();
+ let kind = if plugin {Kind::Host} else {Kind::Target};
+ let (prefix, suffix) = try!(self.dylib(kind));
+ ret.push(format!("{}{}{}", prefix, stem, suffix));
+ }
+ LibKind::Lib |
+ LibKind::Rlib => ret.push(format!("lib{}.rlib", stem)),
+ LibKind::StaticLib => ret.push(format!("lib{}.a", stem)),
+ }
+ }
}
}
assert!(ret.len() > 0);
/// For a package, return all targets which are registered as dependencies
/// for that package.
- pub fn dep_targets(&self, pkg: &Package, target: &Target)
- -> Vec<(&'a Package, &'a Target)> {
+ pub fn dep_targets(&self, pkg: &Package, target: &Target,
+ profile: &Profile)
+ -> Vec<(&'a Package, &'a Target, &'a Profile)> {
let deps = match self.resolve.deps(pkg.package_id()) {
- None => return vec!(),
+ None => return Vec::new(),
Some(deps) => deps,
};
let mut ret = deps.map(|id| self.get_package(id)).filter(|dep| {
// If this target is a build command, then we only want build
// dependencies, otherwise we want everything *other than* build
// dependencies.
- let is_correct_dep =
- target.profile().is_custom_build() == pkg_dep.is_build();
+ let is_correct_dep = target.is_custom_build() == pkg_dep.is_build();
// If this dependency is *not* a transitive dependency, then it
// only applies to test/example targets
let is_actual_dep = pkg_dep.is_transitive() ||
- target.profile().is_test() ||
- target.is_example();
+ target.is_test() ||
+ target.is_example() ||
+ profile.test;
is_correct_dep && is_actual_dep
}).filter_map(|pkg| {
- pkg.targets().iter().find(|&t| self.is_relevant_target(t))
- .map(|t| (pkg, t))
+ pkg.targets().iter().find(|t| t.is_lib()).map(|t| {
+ (pkg, t, self.lib_profile(pkg.package_id()))
+ })
}).collect::<Vec<_>>();
+ // If a target isn't actually a build script itself, then it depends on
+ // the build script if there is one.
+ if target.is_custom_build() { return ret }
+ let pkg = self.get_package(pkg.package_id());
+ if let Some(t) = pkg.targets().iter().find(|t| t.is_custom_build()) {
+ ret.push((pkg, t, self.build_script_profile(pkg.package_id())));
+ }
+
// If this target is a binary, test, example, etc, then it depends on
// the library of the same package. The call to `resolve.deps` above
// didn't include `pkg` in the return values, so we need to special case
// it here and see if we need to push `(pkg, pkg_lib_target)`.
- if !target.profile().is_custom_build() &&
- (target.is_bin() || target.is_example()) {
- let pkg = self.get_package(pkg.package_id());
- let target = pkg.targets().iter().filter(|t| {
- t.is_lib() && t.profile().is_compile() &&
- (t.is_rlib() || t.is_dylib())
- }).next();
- if let Some(t) = target {
- ret.push((pkg, t));
- }
+ if target.is_lib() { return ret }
+ if let Some(t) = pkg.targets().iter().find(|t| t.linkable()) {
+ ret.push((pkg, t, self.lib_profile(pkg.package_id())));
}
- return ret;
+
+ // If this is a test profile, then we need to ensure that all binaries
+ // are built.
+ if profile.test {
+ ret.extend(pkg.targets().iter().filter(|t| t.is_bin())
+ .map(|t| (pkg, t, self.lib_profile(pkg.package_id()))));
+ }
+ return ret
}
/// Gets a package for the given package id.
.expect("Should have found package")
}
- pub fn env(&self) -> &str {
- // The "doc-all" environment just means to document everything (see
- // below), but we want to canonicalize that the the "doc" profile
- // environment, so do that here.
- if self.env == "doc-all" {"doc"} else {self.env}
- }
-
- pub fn is_relevant_target(&self, target: &Target) -> bool {
- target.is_lib() && match self.env {
- "doc" | "test" => target.profile().is_compile(),
- // doc-all == document everything, so look for doc targets and
- // compile targets in dependencies
- "doc-all" => target.profile().is_compile() ||
- (target.profile().env() == "doc" &&
- target.profile().is_doc()),
- _ => target.profile().env() == self.env &&
- !target.profile().is_test(),
- }
- }
-
/// Get the user-specified linker for a particular host or target
pub fn linker(&self, kind: Kind) -> Option<&str> {
self.target_config(kind).linker.as_ref().map(|s| s.as_slice())
self.build_config.requested_target.as_ref().map(|s| &s[..])
}
- /// Calculate the actual profile to use for a target's compliation.
- ///
- /// This may involve overriding some options such as debug information,
- /// rpath, opt level, etc.
- pub fn profile(&self, target: &Target) -> Profile {
- let mut profile = target.profile().clone();
- let root_package = self.get_package(self.resolve.root());
- for target in root_package.manifest().targets().iter() {
- let root_profile = target.profile();
- if root_profile.env() != profile.env() { continue }
- profile = profile.set_opt_level(root_profile.opt_level())
- .set_debug(root_profile.debug())
- .set_rpath(root_profile.rpath())
+ pub fn lib_profile(&self, _pkg: &PackageId) -> &'a Profile {
+ if self.build_config.release {
+ &self.profiles.release
+ } else {
+ &self.profiles.dev
}
- profile
+ }
+
+ pub fn build_script_profile(&self, _pkg: &PackageId) -> &'a Profile {
+ // TODO: should build scripts always be built with a dev
+ // profile? How is this controlled at the CLI layer?
+ &self.profiles.dev
}
}
};
// Building the command to execute
- let to_exec = try!(cx.target_filenames(target))[0].clone();
+ let profile = cx.build_script_profile(pkg.package_id());
+ let to_exec = try!(cx.target_filenames(target, profile))[0].clone();
let to_exec = script_output.join(&to_exec);
// Start preparing the process to execute, starting out with some
// environment variables. Note that the profile-related environment
// variables are not set with this the build script's profile but rather the
- // package's profile (some target which isn't a build script).
- let profile_target = pkg.targets().iter().find(|t| {
- cx.is_relevant_target(t) && !t.profile().is_custom_build()
- }).unwrap_or(target);
- let profile = cx.profile(profile_target);
+ // package's library profile.
+ let profile = cx.lib_profile(pkg.package_id());
let to_exec = to_exec.into_os_string();
let mut p = try!(super::process(CommandType::Host(to_exec), pkg, target, cx));
p.env("OUT_DIR", &build_output)
Kind::Host => cx.config.rustc_host(),
Kind::Target => cx.target_triple(),
})
- .env("DEBUG", &profile.debug().to_string())
- .env("OPT_LEVEL", &profile.opt_level().to_string())
- .env("PROFILE", &profile.env())
+ .env("DEBUG", &profile.debuginfo.to_string())
+ .env("OPT_LEVEL", &profile.opt_level.to_string())
+ .env("PROFILE", if cx.build_config.release {"release"} else {"debug"})
.env("HOST", &cx.config.rustc_host());
// Be sure to pass along all enabled features for this package, this is the
// This information will be used at build-time later on to figure out which
// sorts of variables need to be discovered at that time.
let lib_deps = {
- let non_build_target = pkg.targets().iter().find(|t| {
- !t.profile().is_custom_build()
+ let not_custom = pkg.targets().iter().find(|t| {
+ !t.is_custom_build()
}).unwrap();
- cx.dep_targets(pkg, non_build_target).iter().filter_map(|&(pkg, _)| {
+ cx.dep_targets(pkg, not_custom, profile).iter().filter_map(|&(pkg, t, _)| {
+ if !t.linkable() { return None }
pkg.manifest().links().map(|links| {
(links.to_string(), pkg.package_id().clone())
})
let id = pkg.package_id().clone();
let all = (id.clone(), pkg_name.clone(), build_state.clone(),
build_output.clone());
- let plugin_deps = super::crawl_build_deps(cx, pkg, target, Kind::Host);
+ let plugin_deps = super::crawl_build_deps(cx, pkg, target, profile,
+ Kind::Host);
try!(fs::create_dir_all(&cx.layout(pkg, Kind::Target).build(pkg)));
try!(fs::create_dir_all(&cx.layout(pkg, Kind::Host).build(pkg)));
}
impl BuildState {
- pub fn new(config: super::BuildConfig,
+ pub fn new(config: &super::BuildConfig,
packages: &PackageSet) -> BuildState {
let mut sources = HashMap::new();
for package in packages.iter() {
}
}
let mut outputs = HashMap::new();
- let i1 = config.host.overrides.into_iter().map(|p| (p, Kind::Host));
- let i2 = config.target.overrides.into_iter().map(|p| (p, Kind::Target));
+ let i1 = config.host.overrides.iter().map(|p| (p, Kind::Host));
+ let i2 = config.target.overrides.iter().map(|p| (p, Kind::Target));
for ((name, output), kind) in i1.chain(i2) {
- match sources.get(&name) {
- Some(id) => { outputs.insert((id.clone(), kind), output); }
-
- // If no package is using the library named `name`, then this is
- // just an override that we ignore.
- None => {}
+ // If no package is using the library named `name`, then this is
+ // just an override that we ignore.
+ if let Some(id) = sources.get(name) {
+ outputs.insert((id.clone(), kind), output.clone());
}
}
BuildState { outputs: Mutex::new(outputs) }
use std::io::{BufReader, SeekFrom};
use std::path::{Path, PathBuf};
-use core::{Package, Target};
+use core::{Package, Target, Profile};
use util;
use util::{CargoResult, Fresh, Dirty, Freshness, internal, profile, ChainError};
pub fn prepare_target<'a, 'b>(cx: &mut Context<'a, 'b>,
pkg: &'a Package,
target: &'a Target,
+ profile: &'a Profile,
kind: Kind) -> CargoResult<Preparation> {
let _p = profile::start(format!("fingerprint: {} / {}",
pkg.package_id(), target.name()));
let new = dir(cx, pkg, kind);
- let loc = new.join(&filename(target));
+ let loc = new.join(&filename(target, profile));
info!("fingerprint at: {}", loc.display());
- let fingerprint = try!(calculate(cx, pkg, target, kind));
+ let fingerprint = try!(calculate(cx, pkg, target, profile, kind));
let is_fresh = try!(is_fresh(&loc, &fingerprint));
let root = cx.out_dir(pkg, kind, target);
let mut missing_outputs = false;
- if !target.profile().is_doc() {
- for filename in try!(cx.target_filenames(target)).iter() {
+ if !profile.doc {
+ for filename in try!(cx.target_filenames(target, profile)).iter() {
let dst = root.join(filename);
missing_outputs |= fs::metadata(&dst).is_err();
- if target.profile().is_test() {
+ if target.is_test() || profile.test {
cx.compilation.tests.push((target.name().to_string(), dst));
} else if target.is_bin() {
cx.compilation.binaries.push(dst);
fn calculate<'a, 'b>(cx: &mut Context<'a, 'b>,
pkg: &'a Package,
target: &'a Target,
+ profile: &'a Profile,
kind: Kind)
-> CargoResult<Fingerprint> {
- let key = (pkg.package_id(), target, kind);
+ let key = (pkg.package_id(), target, profile, kind);
match cx.fingerprints.get(&key) {
Some(s) => return Ok(s.clone()),
None => {}
v
});
let extra = util::short_hash(&(cx.config.rustc_version(), target, &features,
- cx.profile(target)));
+ profile));
// Next, recursively calculate the fingerprint for all of our dependencies.
- let deps = try!(cx.dep_targets(pkg, target).into_iter().map(|(p, t)| {
+ let deps = try!(cx.dep_targets(pkg, target, profile).into_iter()
+ .map(|(pkg, target, profile)| {
let kind = match kind {
Kind::Host => Kind::Host,
- Kind::Target if t.profile().is_for_host() => Kind::Host,
+ Kind::Target if target.for_host() => Kind::Host,
Kind::Target => Kind::Target,
};
- calculate(cx, p, t, kind)
+ calculate(cx, pkg, target, profile, kind)
}).collect::<CargoResult<Vec<_>>>());
// And finally, calculate what our own local fingerprint is
- let local = if use_dep_info(pkg, target) {
- let dep_info = dep_info_loc(cx, pkg, target, kind);
+ let local = if use_dep_info(pkg, profile) {
+ let dep_info = dep_info_loc(cx, pkg, target, profile, kind);
let mtime = try!(calculate_target_mtime(&dep_info));
// if the mtime listed is not fresh, then remove the `dep_info` file to
// git/registry source, then the mtime of files may fluctuate, but they won't
// change so long as the source itself remains constant (which is the
// responsibility of the source)
-fn use_dep_info(pkg: &Package, target: &Target) -> bool {
- let doc = target.profile().is_doc();
+fn use_dep_info(pkg: &Package, profile: &Profile) -> bool {
let path = pkg.summary().source_id().is_path();
- !doc && path
+ !profile.doc && path
}
/// Prepare the necessary work for the fingerprint of a build command.
/// Returns the (old, new) location for the dep info file of a target.
pub fn dep_info_loc(cx: &Context, pkg: &Package, target: &Target,
- kind: Kind) -> PathBuf {
- dir(cx, pkg, kind).join(&format!("dep-{}", filename(target)))
+ profile: &Profile, kind: Kind) -> PathBuf {
+ dir(cx, pkg, kind).join(&format!("dep-{}", filename(target, profile)))
}
fn is_fresh(loc: &Path, new_fingerprint: &Fingerprint) -> CargoResult<bool> {
source.fingerprint(pkg)
}
-fn filename(target: &Target) -> String {
+fn filename(target: &Target, profile: &Profile) -> String {
let kind = if target.is_lib() {"lib"} else {"bin"};
- let flavor = if target.profile().is_test() {
+ let flavor = if target.is_test() || profile.test {
"test-"
- } else if target.profile().is_doc() {
+ } else if profile.doc {
"doc-"
} else {
""
use std::collections::HashSet;
use std::collections::hash_map::HashMap;
-use std::collections::hash_map::Entry::{Occupied, Vacant};
+// use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::sync::mpsc::{channel, Sender, Receiver};
use threadpool::ThreadPool;
packages: &'a PackageSet,
active: u32,
pending: HashMap<(&'a PackageId, Stage), PendingBuild>,
- state: HashMap<&'a PackageId, Freshness>,
+ pkgids: HashSet<&'a PackageId>,
printed: HashSet<&'a PackageId>,
}
packages: packages,
active: 0,
pending: HashMap::new(),
- state: HashMap::new(),
+ pkgids: HashSet::new(),
printed: HashSet::new(),
}
}
- pub fn enqueue(&mut self, pkg: &'a Package, stage: Stage,
- jobs: Vec<(Job, Freshness)>) {
- // Record the freshness state of this package as dirty if any job is
- // dirty or fresh otherwise
- let fresh = jobs.iter().fold(Fresh, |f1, &(_, f2)| f1.combine(f2));
- match self.state.entry(pkg.package_id()) {
- Occupied(mut entry) => { *entry.get_mut() = entry.get().combine(fresh); }
- Vacant(entry) => { entry.insert(fresh); }
- };
-
- // Add the package to the dependency graph
- self.queue.enqueue(&(self.resolve, self.packages), Fresh,
- (pkg.package_id(), stage),
- (pkg, jobs));
+ pub fn queue(&mut self, pkg: &'a Package, stage: Stage)
+ -> &mut Vec<(Job, Freshness)> {
+ self.pkgids.insert(pkg.package_id());
+ &mut self.queue.queue(&(self.resolve, self.packages), Fresh,
+ (pkg.package_id(), stage),
+ (pkg, Vec::new())).1
}
/// Execute all jobs necessary to build the dependency graph.
// scheduling work as quickly as possibly.
let (id, stage, fresh, result) = self.rx.recv().unwrap();
info!(" end: {} {:?}", id, stage);
- let id = *self.state.keys().find(|&k| *k == &id).unwrap();
+ let id = *self.pkgids.iter().find(|&k| *k == &id).unwrap();
self.active -= 1;
match result {
Ok(()) => {
- let state = &mut self.pending[(id, stage)];
+ let state = self.pending.get_mut(&(id, stage)).unwrap();
state.amt -= 1;
state.fresh = state.fresh.combine(fresh);
if state.amt == 0 {
fresh: fresh,
});
- let mut total_fresh = fresh.combine(self.state[pkg.package_id()]);
+ let mut total_fresh = fresh;
let mut running = Vec::new();
+ debug!("start {:?} at {:?} for {}", total_fresh, stage, pkg);
for (job, job_freshness) in jobs.into_iter() {
+ debug!("job: {:?} ({:?})", job_freshness, total_fresh);
let fresh = job_freshness.combine(fresh);
total_fresh = total_fresh.combine(fresh);
let my_tx = self.tx.clone();
// run for a package, regardless of when that is. We then don't print
// out any more information for a package after we've printed it once.
let print = !self.printed.contains(&pkg.package_id());
- if print && (stage == Stage::Libraries ||
- (total_fresh == Dirty && running.len() > 0)) {
+ if print && total_fresh == Dirty && running.len() > 0 {
self.printed.insert(pkg.package_id());
match total_fresh {
Fresh => try!(config.shell().verbose(|c| {
}
None => {}
}
- if !dep.manifest().targets().iter().any(|t| {
- t.profile().is_custom_build()
- }) {
+ if !dep.manifest().targets().iter().any(|t| t.is_custom_build()) {
return Err(human(format!("package `{}` specifies that it links to \
`{}` but does not have a custom build \
script", dep.package_id(), lib)))
use std::sync::Arc;
use core::{SourceMap, Package, PackageId, PackageSet, Target, Resolve};
+use core::{Profile, Profiles};
use util::{self, CargoResult, human, caused_human};
use util::{Config, internal, ChainError, Fresh, profile, join_paths};
pub target: TargetConfig,
pub jobs: u32,
pub requested_target: Option<String>,
+ pub exec_engine: Option<Arc<Box<ExecEngine>>>,
+ pub release: bool,
}
#[derive(Clone, Default)]
Ok((output, triple))
}
-// This is a temporary assert that ensures the consistency of the arguments
-// given the current limitations of Cargo. The long term fix is to have each
-// Target know the absolute path to the build location.
-fn uniq_target_dest<'a>(targets: &[&'a Target]) -> &'a str {
- let mut curr: Option<&str> = None;
-
- for t in targets.iter().filter(|t| !t.profile().is_custom_build()) {
- let dest = t.profile().dest();
-
- match curr {
- Some(curr) => assert_eq!(curr, dest),
- None => curr = Some(dest)
- }
- }
-
- curr.unwrap()
-}
-
// Returns a mapping of the root package plus its immediate dependencies to
// where the compiled libraries are all located.
-pub fn compile_targets<'a, 'b>(env: &str,
- targets: &[&'a Target],
+pub fn compile_targets<'a, 'b>(targets: &[(&'a Target, &'a Profile)],
pkg: &'a Package,
deps: &PackageSet,
resolve: &'a Resolve,
sources: &'a SourceMap<'a>,
config: &'a Config<'b>,
build_config: BuildConfig,
- exec_engine: Option<Arc<Box<ExecEngine>>>)
+ profiles: &'a Profiles)
-> CargoResult<Compilation> {
if targets.is_empty() {
return Ok(Compilation::new(pkg))
try!(links::validate(deps));
- let dest = uniq_target_dest(targets);
+ let dest = if build_config.release {"release"} else {"debug"};
let root = if resolve.root() == pkg.package_id() {
pkg
} else {
deps.iter().find(|p| p.package_id() == resolve.root()).unwrap()
};
- let host_layout = Layout::new(root, None, dest);
+ let host_layout = Layout::new(root, None, &dest);
let target_layout = build_config.requested_target.as_ref().map(|target| {
- layout::Layout::new(root, Some(&target), dest)
+ layout::Layout::new(root, Some(&target), &dest)
});
- let mut cx = try!(Context::new(env, resolve, sources, deps, config,
+ let mut cx = try!(Context::new(resolve, sources, deps, config,
host_layout, target_layout, pkg,
- build_config));
- if let Some(exec_engine) = exec_engine {
- cx.exec_engine = exec_engine.clone();
- }
+ build_config, profiles));
let mut queue = JobQueue::new(cx.resolve, deps, cx.jobs());
- // First ensure that the destination directory exists
- try!(cx.prepare(pkg));
+ // Prep the context's build requirements and see the job graph for all
+ // packages initially.
+ {
+ let _p = profile::start("preparing build directories");
+ try!(cx.prepare(pkg, targets));
+ prepare_init(&mut cx, pkg, &mut queue, &mut HashSet::new());
+ }
// Build up a list of pending jobs, each of which represent compiling a
// particular package. No actual work is executed as part of this, that's
- // all done later as part of the `execute` function which will run
+ // all done next as part of the `execute` function which will run
// everything in order with proper parallelism.
- let mut compiled = HashSet::new();
- each_dep(pkg, &cx, |dep| {
- compiled.insert(dep.package_id().clone());
- });
- for dep in deps.iter() {
- if dep == pkg || !compiled.contains(dep.package_id()) { continue }
-
- // Only compile lib targets for dependencies
- let targets = dep.targets().iter().filter(|target| {
- target.profile().is_custom_build() ||
- cx.is_relevant_target(*target)
- }).collect::<Vec<&Target>>();
-
- if targets.len() == 0 && dep.package_id() != resolve.root() {
- return Err(human(format!("Package `{}` has no library targets", dep)))
- }
-
- try!(compile(&targets, dep, &mut cx, &mut queue));
- }
-
try!(compile(targets, pkg, &mut cx, &mut queue));
// Now that we've figured out everything that we're going to do, do it!
for (&(ref pkg, _), output) in cx.build_state.outputs.lock().unwrap().iter() {
let any_dylib = output.library_links.iter().any(|l| {
- !l.ends_with(":static") && !l.ends_with(":framework")
+ !l.starts_with("static=") && !l.starts_with("framework=")
});
if !any_dylib { continue }
for dir in output.library_paths.iter() {
Ok(cx.compilation)
}
-fn compile<'a, 'b>(targets: &[&'a Target], pkg: &'a Package,
+fn compile<'a, 'b>(targets: &[(&'a Target, &'a Profile)],
+ pkg: &'a Package,
cx: &mut Context<'a, 'b>,
jobs: &mut JobQueue<'a>) -> CargoResult<()> {
debug!("compile_pkg; pkg={}", pkg);
- let _p = profile::start(format!("preparing: {}", pkg));
-
- if targets.is_empty() {
- return Ok(())
- }
-
- // Prepare the fingerprint directory as the first step of building a package
- let (target1, target2) = fingerprint::prepare_init(cx, pkg, Kind::Target);
- let mut init = vec![(Job::new(target1, target2), Fresh)];
- if cx.requested_target().is_some() {
- let (plugin1, plugin2) = fingerprint::prepare_init(cx, pkg, Kind::Host);
- init.push((Job::new(plugin1, plugin2), Fresh));
- }
- jobs.enqueue(pkg, Stage::Start, init);
+ let profiling_marker = profile::start(format!("preparing: {}", pkg));
- // After the custom command has run, execute rustc for all targets of our
- // package.
+ // For each target/profile run the compiler or rustdoc accordingly. After
+ // having done so we enqueue the job in the right portion of the dependency
+ // graph and then move on to the next.
//
- // Each target has its own concept of freshness to ensure incremental
- // rebuilds on the *target* granularity, not the *package* granularity.
- let (mut libs, mut bins, mut lib_tests, mut bin_tests) =
- (Vec::new(), Vec::new(), Vec::new(), Vec::new());
- let (mut build_custom, mut run_custom) = (Vec::new(), Vec::new());
- for &target in targets.iter() {
- let work = if target.profile().is_doc() {
- let rustdoc = try!(rustdoc(pkg, target, cx));
+ // This loop also takes care of enqueueing the work needed to actually run
+ // the custom build commands as well.
+ for &(target, profile) in targets {
+ if !cx.compiled.insert((pkg.package_id(), target, profile)) {
+ continue
+ }
+
+ let work = if profile.doc {
+ let rustdoc = try!(rustdoc(pkg, target, profile, cx));
vec![(rustdoc, Kind::Target)]
} else {
let req = cx.get_requirement(pkg, target);
- try!(rustc(pkg, target, cx, req))
+ try!(rustc(pkg, target, profile, cx, req))
};
- // Figure out what stage this work will go into
- let dst = match (target.is_lib(),
- target.profile().is_test(),
- target.profile().is_custom_build()) {
- (_, _, true) => &mut build_custom,
- (true, true, _) => &mut lib_tests,
- (false, true, _) => &mut bin_tests,
- (true, false, _) => &mut libs,
- (false, false, _) if target.profile().env() == "test" => &mut bin_tests,
- (false, false, _) => &mut bins,
- };
for (work, kind) in work.into_iter() {
let (freshness, dirty, fresh) =
- try!(fingerprint::prepare_target(cx, pkg, target, kind));
+ try!(fingerprint::prepare_target(cx, pkg, target, profile, kind));
let dirty = Work::new(move |desc_tx| {
try!(work.call(desc_tx.clone()));
dirty.call(desc_tx)
});
+
+ // Figure out what stage this work will go into
+ let dst = match (target.is_lib(),
+ profile.test,
+ target.is_custom_build()) {
+ (_, _, true) => jobs.queue(pkg, Stage::BuildCustomBuild),
+ (true, true, _) => jobs.queue(pkg, Stage::LibraryTests),
+ (false, true, _) => jobs.queue(pkg, Stage::BinaryTests),
+ (true, false, _) => jobs.queue(pkg, Stage::Libraries),
+ (false, false, _) if !target.is_bin() => {
+ jobs.queue(pkg, Stage::BinaryTests)
+ }
+ (false, false, _) => jobs.queue(pkg, Stage::Binaries),
+ };
dst.push((Job::new(dirty, fresh), freshness));
}
// because we may need to run the build script multiple times. If the
// package is needed in both a host and target context, we need to run
// it once per context.
- if !target.profile().is_custom_build() { continue }
+ if !target.is_custom_build() { continue }
+ let run_custom = jobs.queue(pkg, Stage::RunCustomBuild);
let mut reqs = Vec::new();
- let requirement = targets.iter().fold(None::<Platform>, |req, t| {
- if !t.profile().is_custom_build() && !t.profile().is_doc() {
- let r2 = cx.get_requirement(pkg, *t);
- req.map(|r| r.combine(r2)).or(Some(r2))
- } else {
- req
- }
+ let requirement = pkg.targets().iter().filter(|t| !t.is_custom_build())
+ .fold(None::<Platform>, |req, t| {
+ let r2 = cx.get_requirement(pkg, t);
+ req.map(|r| r.combine(r2)).or(Some(r2))
}).unwrap_or(Platform::Target);
match requirement {
Platform::Target => reqs.push(Platform::Target),
}
}
}
- let before = run_custom.len();
for &req in reqs.iter() {
let kind = match req { Platform::Plugin => Kind::Host, _ => Kind::Target };
let key = (pkg.package_id().clone(), kind);
try!(custom_build::prepare(pkg, target, req, cx));
run_custom.push((Job::new(dirty, fresh), freshness));
}
+ }
+ drop(profiling_marker);
- // If no build scripts were run, no need to compile the build script!
- if run_custom.len() == before {
- dst.pop();
+ // Be sure to compile all dependencies of this target as well. Don't recurse
+ // if we've already recursed, however.
+ for &(target, profile) in targets {
+ for &(pkg, target, p) in cx.dep_targets(pkg, target, profile).iter() {
+ try!(compile(&[(target, p)], pkg, cx, jobs));
}
}
- jobs.enqueue(pkg, Stage::BuildCustomBuild, build_custom);
- jobs.enqueue(pkg, Stage::RunCustomBuild, run_custom);
- jobs.enqueue(pkg, Stage::Libraries, libs);
- jobs.enqueue(pkg, Stage::Binaries, bins);
- jobs.enqueue(pkg, Stage::BinaryTests, bin_tests);
- jobs.enqueue(pkg, Stage::LibraryTests, lib_tests);
Ok(())
}
-fn rustc(package: &Package, target: &Target,
+fn prepare_init<'a, 'b>(cx: &mut Context<'a, 'b>,
+ pkg: &'a Package,
+ jobs: &mut JobQueue<'a>,
+ visited: &mut HashSet<&'a PackageId>) {
+ if !visited.insert(pkg.package_id()) { return }
+
+ // Set up all dependencies
+ for dep in cx.resolve.deps(pkg.package_id()).into_iter().flat_map(|a| a) {
+ let dep = cx.get_package(dep);
+ prepare_init(cx, dep, jobs, visited);
+ }
+
+ // Initialize blank queues for each stage
+ jobs.queue(pkg, Stage::BuildCustomBuild);
+ jobs.queue(pkg, Stage::RunCustomBuild);
+ jobs.queue(pkg, Stage::Libraries);
+ jobs.queue(pkg, Stage::Binaries);
+ jobs.queue(pkg, Stage::LibraryTests);
+ jobs.queue(pkg, Stage::BinaryTests);
+
+ // Prepare the fingerprint directory as the first step of building a package
+ let (target1, target2) = fingerprint::prepare_init(cx, pkg, Kind::Target);
+ let init = jobs.queue(pkg, Stage::Start);
+ if cx.requested_target().is_some() {
+ let (plugin1, plugin2) = fingerprint::prepare_init(cx, pkg,
+ Kind::Host);
+ init.push((Job::new(plugin1, plugin2), Fresh));
+ }
+ init.push((Job::new(target1, target2), Fresh));
+}
+
+fn rustc(package: &Package, target: &Target, profile: &Profile,
cx: &mut Context, req: Platform)
-> CargoResult<Vec<(Work, Kind)> >{
let crate_types = target.rustc_crate_types();
- let rustcs = try!(prepare_rustc(package, target, crate_types, cx, req));
+ let rustcs = try!(prepare_rustc(package, target, profile, crate_types,
+ cx, req));
- let plugin_deps = crawl_build_deps(cx, package, target, Kind::Host);
+ let plugin_deps = crawl_build_deps(cx, package, target, profile, Kind::Host);
return rustcs.into_iter().map(|(mut rustc, kind)| {
let name = package.name().to_string();
}
let exec_engine = cx.exec_engine.clone();
- let filenames = try!(cx.target_filenames(target));
+ let filenames = try!(cx.target_filenames(target, profile));
let root = cx.out_dir(package, kind, target);
// Prepare the native lib state (extra -L and -l flags)
let build_state = cx.build_state.clone();
let current_id = package.package_id().clone();
let plugin_deps = plugin_deps.clone();
- let mut native_lib_deps = crawl_build_deps(cx, package, target, kind);
- if package.has_custom_build() && !target.profile().is_custom_build() {
+ let mut native_lib_deps = crawl_build_deps(cx, package, target,
+ profile, kind);
+ if package.has_custom_build() && !target.is_custom_build() {
native_lib_deps.insert(0, current_id.clone());
}
t.is_lib()
});
- let rustc_dep_info_loc = root.join(&target.file_stem())
+ let rustc_dep_info_loc = root.join(&cx.file_stem(target, profile))
.with_extension("d");
- let dep_info_loc = fingerprint::dep_info_loc(cx, package, target, kind);
+ let dep_info_loc = fingerprint::dep_info_loc(cx, package, target,
+ profile, kind);
let cwd = cx.config.cwd().to_path_buf();
Ok((Work::new(move |desc_tx| {
}
}
-fn crawl_build_deps<'a>(cx: &'a Context, pkg: &'a Package,
- target: &Target, kind: Kind) -> Vec<PackageId> {
+fn crawl_build_deps<'a>(cx: &'a Context,
+ pkg: &'a Package,
+ target: &Target,
+ profile: &Profile,
+ kind: Kind) -> Vec<PackageId> {
let mut deps = HashSet::new();
- visit(cx, pkg, target, kind, &mut HashSet::new(), &mut deps);
+ visit(cx, pkg, target, profile, kind, &mut HashSet::new(), &mut deps);
let mut ret: Vec<_> = deps.into_iter().collect();
ret.sort();
return ret;
- fn visit<'a>(cx: &'a Context, pkg: &'a Package, target: &Target,
+ fn visit<'a>(cx: &'a Context,
+ pkg: &'a Package, target: &Target, profile: &Profile,
kind: Kind,
visiting: &mut HashSet<&'a PackageId>,
libs: &mut HashSet<PackageId>) {
- for &(pkg, target) in cx.dep_targets(pkg, target).iter() {
+ for &(pkg, target, p) in cx.dep_targets(pkg, target, profile).iter() {
+ if !target.linkable() { continue }
let req = cx.get_requirement(pkg, target);
if !req.includes(kind) { continue }
if !visiting.insert(pkg.package_id()) { continue }
if pkg.has_custom_build() {
libs.insert(pkg.package_id().clone());
}
- visit(cx, pkg, target, kind, visiting, libs);
+ visit(cx, pkg, target, p, kind, visiting, libs);
visiting.remove(&pkg.package_id());
}
}
let search_path = rustc.get_env(var).unwrap_or(OsString::new());
let mut search_path = env::split_paths(&search_path).collect::<Vec<_>>();
for id in plugin_deps.into_iter() {
+ debug!("adding libs for plugin dep: {}", id);
let output = &build_state[(id, Kind::Host)];
for path in output.library_paths.iter() {
search_path.push(path.clone());
Ok(())
}
-fn prepare_rustc(package: &Package, target: &Target, crate_types: Vec<&str>,
+fn prepare_rustc(package: &Package, target: &Target, profile: &Profile,
+ crate_types: Vec<&str>,
cx: &Context, req: Platform)
-> CargoResult<Vec<(CommandPrototype, Kind)>> {
let mut base = try!(process(CommandType::Rustc, package, target, cx));
- build_base_args(cx, &mut base, package, target, &crate_types);
+ build_base_args(cx, &mut base, package, target, profile, &crate_types);
- let mut target_cmd = base.clone();
- let mut plugin_cmd = base;
- build_plugin_args(&mut target_cmd, cx, package, target, Kind::Target);
- build_plugin_args(&mut plugin_cmd, cx, package, target, Kind::Host);
- try!(build_deps_args(&mut target_cmd, target, package, cx, Kind::Target));
- try!(build_deps_args(&mut plugin_cmd, target, package, cx, Kind::Host));
+ let mut targ_cmd = base.clone();
+ let mut host_cmd = base;
+ build_plugin_args(&mut targ_cmd, cx, package, target, Kind::Target);
+ build_plugin_args(&mut host_cmd, cx, package, target, Kind::Host);
+ try!(build_deps_args(&mut targ_cmd, target, profile, package, cx, Kind::Target));
+ try!(build_deps_args(&mut host_cmd, target, profile, package, cx, Kind::Host));
Ok(match req {
- Platform::Target => vec![(target_cmd, Kind::Target)],
- Platform::Plugin => vec![(plugin_cmd, Kind::Host)],
- Platform::PluginAndTarget if cx.requested_target().is_none() =>
- vec![(target_cmd, Kind::Target)],
- Platform::PluginAndTarget => vec![(target_cmd, Kind::Target),
- (plugin_cmd, Kind::Host)],
+ Platform::Target => vec![(targ_cmd, Kind::Target)],
+ Platform::Plugin => vec![(host_cmd, Kind::Host)],
+ Platform::PluginAndTarget if cx.requested_target().is_none() => {
+ vec![(targ_cmd, Kind::Target)]
+ }
+ Platform::PluginAndTarget => vec![(targ_cmd, Kind::Target),
+ (host_cmd, Kind::Host)],
})
}
-fn rustdoc(package: &Package, target: &Target,
+fn rustdoc(package: &Package, target: &Target, profile: &Profile,
cx: &mut Context) -> CargoResult<Work> {
let kind = Kind::Target;
let cx_root = cx.get_package(cx.resolve.root()).absolute_target_dir()
None => {}
}
- try!(build_deps_args(&mut rustdoc, target, package, cx, kind));
+ try!(build_deps_args(&mut rustdoc, target, profile, package, cx, kind));
if package.has_custom_build() {
rustdoc.env("OUT_DIR", &cx.layout(package, kind).build_out(package));
cmd: &mut CommandPrototype,
pkg: &Package,
target: &Target,
+ profile: &Profile,
crate_types: &[&str]) {
- let metadata = target.metadata();
+ let Profile {
+ opt_level, lto, codegen_units, debuginfo, ndebug, rpath, test,
+ doc: _doc,
+ } = *profile;
// Move to cwd so the root_path() passed below is actually correct
cmd.cwd(cx.config.cwd());
cmd.arg("--crate-type").arg(crate_type);
}
- // Despite whatever this target's profile says, we need to configure it
- // based off the profile found in the root package's targets.
- let profile = cx.profile(target);
-
- let prefer_dynamic = profile.is_for_host() ||
+ let prefer_dynamic = target.for_host() ||
(crate_types.contains(&"dylib") &&
pkg.package_id() != cx.resolve.root());
if prefer_dynamic {
cmd.arg("-C").arg("prefer-dynamic");
}
- if profile.opt_level() != 0 {
- cmd.arg("-C").arg(&format!("opt-level={}", profile.opt_level()));
+ if opt_level != 0 {
+ cmd.arg("-C").arg(&format!("opt-level={}", opt_level));
}
// Disable LTO for host builds as prefer_dynamic and it are mutually
// exclusive.
- let lto = (target.is_bin() || target.is_staticlib()) && profile.lto() &&
- !profile.is_for_host();
- if lto {
+ if target.can_lto() && lto && !target.for_host() {
cmd.args(&["-C", "lto"]);
} else {
// There are some restrictions with LTO and codegen-units, so we
// only add codegen units when LTO is not used.
- match profile.codegen_units() {
+ match codegen_units {
Some(n) => { cmd.arg("-C").arg(&format!("codegen-units={}", n)); }
None => {},
}
}
- if profile.debug() {
+ if debuginfo {
cmd.arg("-g");
- } else {
+ }
+
+ if ndebug {
cmd.args(&["--cfg", "ndebug"]);
}
- if profile.is_test() && profile.uses_test_harness() {
+ if test && target.harness() {
cmd.arg("--test");
}
None => {}
}
- match metadata {
+ match cx.target_metadata(target, profile) {
Some(m) => {
cmd.arg("-C").arg(&format!("metadata={}", m.metadata));
cmd.arg("-C").arg(&format!("extra-filename={}", m.extra_filename));
None => {}
}
- if profile.rpath() {
+ if rpath {
cmd.arg("-C").arg("rpath");
}
}
}
}
-fn build_deps_args(cmd: &mut CommandPrototype, target: &Target,
- package: &Package, cx: &Context, kind: Kind)
+fn build_deps_args(cmd: &mut CommandPrototype,
+ target: &Target,
+ profile: &Profile,
+ package: &Package,
+ cx: &Context,
+ kind: Kind)
-> CargoResult<()> {
let layout = cx.layout(package, kind);
cmd.arg("-L").arg(&{
cmd.env("OUT_DIR", &layout.build_out(package));
}
- for &(pkg, target) in cx.dep_targets(package, target).iter() {
- try!(link_to(cmd, pkg, target, cx, kind));
+ for &(pkg, target, p) in cx.dep_targets(package, target, profile).iter() {
+ if target.linkable() {
+ try!(link_to(cmd, pkg, target, p, cx, kind));
+ }
}
return Ok(());
fn link_to(cmd: &mut CommandPrototype, pkg: &Package, target: &Target,
- cx: &Context, kind: Kind) -> CargoResult<()> {
+ profile: &Profile, cx: &Context, kind: Kind) -> CargoResult<()> {
// If this target is itself a plugin *or* if it's being linked to a
// plugin, then we want the plugin directory. Otherwise we want the
// target directory (hence the || here).
let layout = cx.layout(pkg, match kind {
Kind::Host => Kind::Host,
- Kind::Target if target.profile().is_for_host() => Kind::Host,
+ Kind::Target if target.for_host() => Kind::Host,
Kind::Target => Kind::Target,
});
- for filename in try!(cx.target_filenames(target)).iter() {
+ for filename in try!(cx.target_filenames(target, profile)).iter() {
if filename.ends_with(".a") { continue }
let mut v = OsString::new();
v.push(target.name());
Ok(cmd)
}
-fn each_dep<'a, F>(pkg: &Package, cx: &'a Context, mut f: F)
- where F: FnMut(&'a Package)
-{
- let mut visited = HashSet::new();
- let pkg = cx.get_package(pkg.package_id());
- visit_deps(pkg, cx, &mut visited, &mut f);
-
- fn visit_deps<'a, F>(pkg: &'a Package, cx: &'a Context,
- visited: &mut HashSet<&'a PackageId>, f: &mut F)
- where F: FnMut(&'a Package)
- {
- if !visited.insert(pkg.package_id()) { return }
- f(pkg);
- let deps = match cx.resolve.deps(pkg.package_id()) {
- Some(deps) => deps,
- None => return,
- };
- for dep_id in deps {
- visit_deps(cx.get_package(dep_id), cx, visited, f);
- }
- }
-}
-
fn envify(s: &str) -> String {
s.chars()
.flat_map(|c| c.to_uppercase())
use core::Source;
use sources::PathSource;
-use ops::{self, ExecEngine, ProcessEngine};
+use ops::{self, ExecEngine, ProcessEngine, Compilation};
use util::{CargoResult, ProcessError};
pub struct TestOptions<'a, 'b: 'a> {
pub compile_opts: ops::CompileOptions<'a, 'b>,
pub no_run: bool,
- pub name: Option<&'a str>,
}
pub fn run_tests(manifest_path: &Path,
options: &TestOptions,
test_args: &[String]) -> CargoResult<Option<ProcessError>> {
let config = options.compile_opts.config;
- let mut source = try!(PathSource::for_path(&manifest_path.parent().unwrap(),
- config));
- try!(source.update());
-
- let mut compile = try!(ops::compile(manifest_path, &options.compile_opts));
- if options.no_run { return Ok(None) }
- compile.tests.sort();
-
- let tarname = options.name;
- let tests_to_run = compile.tests.iter().filter(|&&(ref test_name, _)| {
- tarname.map_or(true, |tarname| tarname == *test_name)
- });
-
- let cwd = config.cwd();
- for &(_, ref exe) in tests_to_run {
- let to_display = match exe.relative_from(&cwd) {
- Some(path) => path,
- None => &**exe,
- };
- let mut cmd = try!(compile.target_process(exe, &compile.package));
- cmd.args(test_args);
- try!(config.shell().concise(|shell| {
- shell.status("Running", to_display.display().to_string())
- }));
- try!(config.shell().verbose(|shell| {
- shell.status("Running", cmd.to_string())
- }));
- match ExecEngine::exec(&mut ProcessEngine, cmd) {
- Ok(()) => {}
- Err(e) => return Ok(Some(e))
- }
- }
-
- if options.name.is_some() { return Ok(None) }
-
- if options.compile_opts.env == "bench" { return Ok(None) }
+ let compile = match try!(build_and_run(manifest_path, options, test_args)) {
+ Ok(compile) => compile,
+ Err(e) => return Ok(Some(e)),
+ };
let libs = compile.package.targets().iter().filter_map(|target| {
- if !target.profile().is_doctest() || !target.is_lib() {
+ if !target.doctested() || !target.is_lib() {
return None
}
Some((target.src_path(), target.name()))
let mut args = args.to_vec();
args.push("--bench".to_string());
- run_tests(manifest_path, options, &args)
+ Ok(try!(build_and_run(manifest_path, options, &args)).err())
+}
+
+fn build_and_run(manifest_path: &Path,
+ options: &TestOptions,
+ test_args: &[String])
+ -> CargoResult<Result<Compilation, ProcessError>> {
+ let config = options.compile_opts.config;
+ let mut source = try!(PathSource::for_path(&manifest_path.parent().unwrap(),
+ config));
+ try!(source.update());
+
+ let mut compile = try!(ops::compile(manifest_path, &options.compile_opts));
+ if options.no_run { return Ok(Ok(compile)) }
+ compile.tests.sort();
+
+ let cwd = config.cwd();
+ for &(_, ref exe) in &compile.tests {
+ let to_display = match exe.relative_from(&cwd) {
+ Some(path) => path,
+ None => &**exe,
+ };
+ let mut cmd = try!(compile.target_process(exe, &compile.package));
+ cmd.args(test_args);
+ try!(config.shell().concise(|shell| {
+ shell.status("Running", to_display.display().to_string())
+ }));
+ try!(config.shell().verbose(|shell| {
+ shell.status("Running", cmd.to_string())
+ }));
+ match ExecEngine::exec(&mut ProcessEngine, cmd) {
+ Ok(()) => {}
+ Err(e) => return Ok(Err(e))
+ }
+ }
+
+ Ok(Ok(compile))
}
pub use self::cargo_clean::{clean, CleanOptions};
pub use self::cargo_compile::{compile, compile_pkg, CompileOptions};
+pub use self::cargo_compile::{CompileFilter, CompileMode};
pub use self::cargo_read_manifest::{read_manifest,read_package,read_packages};
pub use self::cargo_rustc::{compile_targets, Compilation, Layout, Kind, rustc_version};
pub use self::cargo_rustc::{Context, LayoutProxy};
//! This structure is used to store the dependency graph and dynamically update
//! it to figure out when a dependency should be built.
-use std::collections::hash_set::HashSet;
-use std::collections::hash_map::HashMap;
use std::collections::hash_map::Entry::{Occupied, Vacant};
+use std::collections::{HashMap, HashSet};
use std::hash::Hash;
pub use self::Freshness::{Fresh, Dirty};
///
/// It is assumed that any dependencies of this package will eventually also
/// be added to the dependency queue.
- pub fn enqueue(&mut self, cx: &K::Context, fresh: Freshness, key: K,
- value: V) {
- // ignore self-deps
- if self.dep_map.contains_key(&key) { return }
+ pub fn queue(&mut self, cx: &K::Context, fresh: Freshness,
+ key: K, value: V) -> &mut V {
+ let slot = match self.dep_map.entry(key.clone()) {
+ Occupied(v) => return &mut v.into_mut().1,
+ Vacant(v) => v,
+ };
if fresh == Dirty {
self.dirty.insert(key.clone());
};
assert!(rev.insert(key.clone()));
}
- assert!(self.dep_map.insert(key, (my_dependencies, value)).is_none());
+ &mut slot.insert((my_dependencies, value)).1
}
/// Dequeues a package that is ready to be built.
use semver;
use rustc_serialize::{Decodable, Decoder};
-use core::SourceId;
+use core::{SourceId, Profiles};
use core::{Summary, Manifest, Target, Dependency, PackageId, GitReference};
use core::dependency::Kind;
use core::manifest::{LibKind, Profile, ManifestMetadata};
Some(ref toml) => add_unused_keys(&mut manifest, toml, "".to_string()),
None => {}
}
- if manifest.targets().iter()
- .filter(|t| !t.profile().is_custom_build() )
- .next().is_none() {
+ if !manifest.targets().iter().any(|t| !t.is_custom_build()) {
return Err(human(format!("either a [lib] or [[bin]] section must \
be present")))
}
let new_build = project.build.as_ref().map(PathBuf::new);
// Get targets
- let profiles = self.profile.clone().unwrap_or(Default::default());
let targets = normalize(&lib,
&bins,
new_build,
&examples,
&tests,
&benches,
- &metadata,
- &profiles);
+ &metadata);
if targets.is_empty() {
debug!("manifest has no build targets");
repository: project.repository.clone(),
keywords: project.keywords.clone().unwrap_or(Vec::new()),
};
+ let profiles = build_profiles(&self.profile);
let mut manifest = Manifest::new(summary,
targets,
layout.root.join("target"),
exclude,
include,
project.links.clone(),
- metadata);
+ metadata,
+ profiles);
if used_deprecated_lib {
manifest.add_warning(format!("the [[lib]] section has been \
deprecated in favor of [lib]"));
examples: &[TomlExampleTarget],
tests: &[TomlTestTarget],
benches: &[TomlBenchTarget],
- metadata: &Metadata,
- profiles: &TomlProfiles) -> Vec<Target> {
- #[derive(Copy)]
- enum TestDep { Needed, NotNeeded }
-
- fn merge(profile: Profile, toml: &Option<TomlProfile>) -> Profile {
- let toml = match *toml {
- Some(ref toml) => toml,
- None => return profile,
- };
- let opt_level = toml.opt_level.unwrap_or(profile.opt_level());
- let lto = toml.lto.unwrap_or(profile.lto());
- let codegen_units = toml.codegen_units;
- let debug = toml.debug.unwrap_or(profile.debug());
- let rpath = toml.rpath.unwrap_or(profile.rpath());
- profile.set_opt_level(opt_level).set_lto(lto)
- .set_codegen_units(codegen_units)
- .set_debug(debug).set_rpath(rpath)
- }
-
- fn target_profiles(target: &TomlTarget, profiles: &TomlProfiles,
- dep: TestDep) -> Vec<Profile> {
- let mut ret = vec![
- merge(Profile::default_dev(), &profiles.dev),
- merge(Profile::default_release(), &profiles.release),
- ];
-
- match target.test {
- Some(true) | None => {
- ret.push(merge(Profile::default_test(), &profiles.test));
- }
- Some(false) => {}
- }
-
- let doctest = target.doctest.unwrap_or(true);
- match target.doc {
- Some(true) | None => {
- ret.push(merge(Profile::default_doc().set_doctest(doctest),
- &profiles.doc));
- }
- Some(false) => {}
- }
-
- match target.bench {
- Some(true) | None => {
- ret.push(merge(Profile::default_bench(), &profiles.bench));
- }
- Some(false) => {}
- }
-
- match dep {
- TestDep::Needed => {
- ret.push(merge(Profile::default_test().set_test(false),
- &profiles.test));
- ret.push(merge(Profile::default_doc().set_doc(false),
- &profiles.doc));
- ret.push(merge(Profile::default_bench().set_test(false),
- &profiles.bench));
- }
- _ => {}
- }
-
- if target.plugin == Some(true) {
- ret = ret.into_iter().map(|p| p.set_for_host(true)).collect();
- }
-
- ret
+ metadata: &Metadata) -> Vec<Target> {
+ fn configure(toml: &TomlTarget, target: &mut Target) {
+ let t2 = target.clone();
+ target.set_tested(toml.test.unwrap_or(t2.tested()))
+ .set_doc(toml.doc.unwrap_or(t2.documented()))
+ .set_doctest(toml.doctest.unwrap_or(t2.doctested()))
+ .set_benched(toml.bench.unwrap_or(t2.benched()))
+ .set_harness(toml.harness.unwrap_or(t2.harness()))
+ .set_for_host(toml.plugin.unwrap_or(t2.for_host()));
}
fn lib_targets(dst: &mut Vec<Target>, libs: &[TomlLibTarget],
- dep: TestDep, metadata: &Metadata, profiles: &TomlProfiles) {
+ metadata: &Metadata) {
let l = &libs[0];
let path = l.path.clone().unwrap_or_else(|| {
PathValue::Path(Path::new("src").join(&format!("{}.rs", l.name)))
vec![if l.plugin == Some(true) {LibKind::Dylib} else {LibKind::Lib}]
});
- for profile in target_profiles(l, profiles, dep).iter() {
- let mut metadata = metadata.clone();
- // Libs and their tests are built in parallel, so we need to make
- // sure that their metadata is different.
- if profile.is_test() {
- metadata.mix(&"test");
- }
- dst.push(Target::lib_target(&l.name, crate_types.clone(),
- &path.to_path(), profile,
- metadata));
- }
+ dst.push(Target::lib_target(&l.name, crate_types.clone(),
+ &path.to_path(),
+ metadata.clone()));
}
fn bin_targets(dst: &mut Vec<Target>, bins: &[TomlBinTarget],
- dep: TestDep, metadata: &Metadata,
- profiles: &TomlProfiles,
default: &mut FnMut(&TomlBinTarget) -> PathBuf) {
for bin in bins.iter() {
let path = bin.path.clone().unwrap_or_else(|| {
PathValue::Path(default(bin))
});
-
- for profile in target_profiles(bin, profiles, dep).iter() {
- let metadata = if profile.is_test() {
- // Make sure that the name of this test executable doesn't
- // conflicts with a library that has the same name and is
- // being tested
- let mut metadata = metadata.clone();
- metadata.mix(&format!("bin-{}", bin.name));
- Some(metadata)
- } else {
- None
- };
- dst.push(Target::bin_target(&bin.name,
- &path.to_path(),
- profile,
- metadata));
- }
+ let mut target = Target::bin_target(&bin.name, &path.to_path(),
+ None);
+ configure(bin, &mut target);
+ dst.push(target);
}
}
- fn custom_build_target(dst: &mut Vec<Target>, cmd: &Path,
- profiles: &TomlProfiles) {
- let profiles = [
- merge(Profile::default_dev().set_for_host(true).set_custom_build(true),
- &profiles.dev),
- ];
-
+ fn custom_build_target(dst: &mut Vec<Target>, cmd: &Path) {
let name = format!("build-script-{}",
cmd.file_stem().and_then(|s| s.to_str()).unwrap_or(""));
- for profile in profiles.iter() {
- dst.push(Target::custom_build_target(&name, cmd, profile, None));
- }
+ dst.push(Target::custom_build_target(&name, cmd, None));
}
- fn example_targets(dst: &mut Vec<Target>, examples: &[TomlExampleTarget],
- profiles: &TomlProfiles,
+ fn example_targets(dst: &mut Vec<Target>,
+ examples: &[TomlExampleTarget],
default: &mut FnMut(&TomlExampleTarget) -> PathBuf) {
for ex in examples.iter() {
let path = ex.path.clone().unwrap_or_else(|| {
PathValue::Path(default(ex))
});
- let profile = merge(Profile::default_example(), &profiles.test);
- let profile_release = merge(Profile::default_release(), &profiles.release);
- dst.push(Target::example_target(&ex.name,
- &path.to_path(),
- &profile));
- dst.push(Target::example_target(&ex.name,
- &path.to_path(),
- &profile_release));
+ let mut target = Target::example_target(&ex.name, &path.to_path());
+ configure(ex, &mut target);
+ dst.push(target);
}
}
fn test_targets(dst: &mut Vec<Target>, tests: &[TomlTestTarget],
- metadata: &Metadata, profiles: &TomlProfiles,
+ metadata: &Metadata,
default: &mut FnMut(&TomlTestTarget) -> PathBuf) {
for test in tests.iter() {
let path = test.path.clone().unwrap_or_else(|| {
PathValue::Path(default(test))
});
- let harness = test.harness.unwrap_or(true);
// make sure this metadata is different from any same-named libs.
let mut metadata = metadata.clone();
metadata.mix(&format!("test-{}", test.name));
- let profile = Profile::default_test().set_harness(harness);
- let profile = merge(profile, &profiles.test);
- dst.push(Target::test_target(&test.name,
- &path.to_path(),
- &profile,
- metadata));
+ let mut target = Target::test_target(&test.name, &path.to_path(),
+ metadata);
+ configure(test, &mut target);
+ dst.push(target);
}
}
fn bench_targets(dst: &mut Vec<Target>, benches: &[TomlBenchTarget],
- metadata: &Metadata, profiles: &TomlProfiles,
+ metadata: &Metadata,
default: &mut FnMut(&TomlBenchTarget) -> PathBuf) {
for bench in benches.iter() {
let path = bench.path.clone().unwrap_or_else(|| {
PathValue::Path(default(bench))
});
- let harness = bench.harness.unwrap_or(true);
// make sure this metadata is different from any same-named libs.
let mut metadata = metadata.clone();
metadata.mix(&format!("bench-{}", bench.name));
- let profile = Profile::default_bench().set_harness(harness);
- let profile = merge(profile, &profiles.bench);
- dst.push(Target::bench_target(&bench.name,
- &path.to_path(),
- &profile,
- metadata));
+ let mut target = Target::bench_target(&bench.name,
+ &path.to_path(),
+ metadata);
+ configure(bench, &mut target);
+ dst.push(target);
}
}
let mut ret = Vec::new();
- let test_dep = if examples.len() > 0 || tests.len() > 0 || benches.len() > 0 {
- TestDep::Needed
- } else {
- TestDep::NotNeeded
- };
-
match (libs, bins) {
([_, ..], [_, ..]) => {
- lib_targets(&mut ret, libs, TestDep::Needed, metadata, profiles);
- bin_targets(&mut ret, bins, test_dep, metadata, profiles,
+ lib_targets(&mut ret, libs, metadata);
+ bin_targets(&mut ret, bins,
&mut |bin| Path::new("src").join("bin")
.join(&format!("{}.rs", bin.name)));
},
([_, ..], []) => {
- lib_targets(&mut ret, libs, TestDep::Needed, metadata, profiles);
+ lib_targets(&mut ret, libs, metadata);
},
([], [_, ..]) => {
- bin_targets(&mut ret, bins, test_dep, metadata, profiles,
+ bin_targets(&mut ret, bins,
&mut |bin| Path::new("src")
.join(&format!("{}.rs", bin.name)));
},
}
if let Some(custom_build) = custom_build {
- custom_build_target(&mut ret, &custom_build, profiles);
+ custom_build_target(&mut ret, &custom_build);
}
- example_targets(&mut ret, examples, profiles,
+ example_targets(&mut ret, examples,
&mut |ex| Path::new("examples")
.join(&format!("{}.rs", ex.name)));
- test_targets(&mut ret, tests, metadata, profiles, &mut |test| {
+ test_targets(&mut ret, tests, metadata, &mut |test| {
if test.name == "test" {
Path::new("src").join("test.rs")
} else {
}
});
- bench_targets(&mut ret, benches, metadata, profiles, &mut |bench| {
+ bench_targets(&mut ret, benches, metadata, &mut |bench| {
if bench.name == "bench" {
Path::new("src").join("bench.rs")
} else {
ret
}
+
+fn build_profiles(profiles: &Option<TomlProfiles>) -> Profiles {
+ let profiles = profiles.as_ref();
+ return Profiles {
+ release: merge(Profile::default_release(),
+ profiles.and_then(|p| p.release.as_ref())),
+ dev: merge(Profile::default_dev(),
+ profiles.and_then(|p| p.dev.as_ref())),
+ test: merge(Profile::default_test(),
+ profiles.and_then(|p| p.test.as_ref())),
+ bench: merge(Profile::default_bench(),
+ profiles.and_then(|p| p.bench.as_ref())),
+ doc: merge(Profile::default_doc(),
+ profiles.and_then(|p| p.doc.as_ref())),
+ };
+
+ fn merge(profile: Profile, toml: Option<&TomlProfile>) -> Profile {
+ let &TomlProfile {
+ opt_level, lto, codegen_units, debug, rpath
+ } = match toml {
+ Some(toml) => toml,
+ None => return profile,
+ };
+ Profile {
+ opt_level: opt_level.unwrap_or(profile.opt_level),
+ lto: lto.unwrap_or(profile.lto),
+ codegen_units: codegen_units,
+ debuginfo: debug.unwrap_or(profile.debuginfo),
+ ndebug: !debug.unwrap_or(!profile.ndebug),
+ rpath: rpath.unwrap_or(profile.rpath),
+ test: profile.test,
+ doc: profile.doc,
+ }
+ }
+}
name = "foo"
version = "0.0.1"
authors = []
-
- [[bin]]
- name="bin1"
- path="src/bin1.rs"
-
- [[bin]]
- name="bin2"
- path="src/bin2.rs"
"#)
- .file("src/bin1.rs", r#"
+ .file("benches/bin1.rs", r#"
extern crate test;
#[bench] fn run1(_ben: &mut test::Bencher) { }"#)
- .file("src/bin2.rs", r#"
+ .file("benches/bin2.rs", r#"
extern crate test;
#[bench] fn run2(_ben: &mut test::Bencher) { }"#);
}
"#);
- assert_that(p.cargo_process("build"), execs().with_status(0));
+ println!("build");
+ assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0));
+ println!("bin");
assert_that(process(&p.bin("foo")).unwrap(),
execs().with_stdout(format!("0-5-1 @ alpha.1 in {}\n",
p.root().display()).as_slice()));
- assert_that(p.cargo("test"),
+ println!("test");
+ assert_that(p.cargo("test").arg("-v"),
execs().with_status(0));
});
fn main() { println!("{}, {}!", world::get_goodbye(), world::get_world()); }
"#);
- assert_that(p.cargo_process("test"), execs().with_status(0));
+ assert_that(p.cargo_process("test").arg("-v"), execs().with_status(0));
assert_that(process(&p.bin("examples/hello")).unwrap(),
execs().with_stdout("Hello, World!\n"));
assert_that(process(&p.bin("examples/goodbye")).unwrap(),
.file("src/main.rs", "fn main() {}")
.file("examples/foo.rs", "fn main() {}");
- p.cargo_process("test").arg("--no-run")
+ p.cargo_process("test").arg("--no-run").arg("-v")
.exec_with_output()
.unwrap();
assert_that(&p.bin("foo"), existing_file());
assert_that(&p.bin("examples/foo"), existing_file());
- p.cargo("test").arg("--no-run")
- .exec_with_output()
- .unwrap();
+ p.cargo("test").arg("--no-run").arg("-v")
+ .exec_with_output()
+ .unwrap();
assert_that(&p.bin("foo"), existing_file());
assert_that(&p.bin("examples/foo"), existing_file());
.file("bar/src/lib.rs", r#"
#![feature(macro_rules)]
// make sure this file takes awhile to compile
- macro_rules! f0( () => (1u) );
+ macro_rules! f0( () => (1) );
macro_rules! f1( () => ({(f0!()) + (f0!())}) );
macro_rules! f2( () => ({(f1!()) + (f1!())}) );
macro_rules! f3( () => ({(f2!()) + (f2!())}) );
"#);
assert_that(p.cargo_process("test"),
execs().with_status(0));
+ assert_that(p.cargo("run")
+ .arg("--example").arg("e1").arg("--release").arg("-v"),
+ execs().with_status(0));
});
test!(selective_testing_with_docs {