]> git.proxmox.com Git - cargo.git/blame - src/cargo/ops/vendor.rs
Auto merge of #9467 - ehuss:install-metadata, r=alexcrichton
[cargo.git] / src / cargo / ops / vendor.rs
CommitLineData
3842d8e6
AC
1use crate::core::shell::Verbosity;
2use crate::core::{GitReference, Workspace};
3use crate::ops;
4use crate::sources::path::PathSource;
ebca5190
WL
5use crate::util::{CargoResult, Config};
6use anyhow::{bail, Context as _};
4aa1ec24 7use cargo_util::{paths, Sha256};
3842d8e6
AC
8use serde::Serialize;
9use std::collections::HashSet;
10use std::collections::{BTreeMap, BTreeSet, HashMap};
33f648ab 11use std::fs::{self, File, OpenOptions};
01f1c01e 12use std::io::{Read, Write};
3842d8e6
AC
13use std::path::{Path, PathBuf};
14
15pub struct VendorOptions<'a> {
16 pub no_delete: bool,
fd807955 17 pub versioned_dirs: bool,
3842d8e6
AC
18 pub destination: &'a Path,
19 pub extra: Vec<PathBuf>,
20}
21
22pub fn vendor(ws: &Workspace<'_>, opts: &VendorOptions<'_>) -> CargoResult<()> {
7274307a 23 let config = ws.config();
3842d8e6
AC
24 let mut extra_workspaces = Vec::new();
25 for extra in opts.extra.iter() {
7274307a
EH
26 let extra = config.cwd().join(extra);
27 let ws = Workspace::new(&extra, config)?;
3842d8e6
AC
28 extra_workspaces.push(ws);
29 }
30 let workspaces = extra_workspaces.iter().chain(Some(ws)).collect::<Vec<_>>();
9099b491 31 let vendor_config = sync(config, &workspaces, opts).with_context(|| "failed to sync")?;
3842d8e6 32
7274307a
EH
33 if config.shell().verbosity() != Verbosity::Quiet {
34 crate::drop_eprint!(
35 config,
936acbc4 36 "To use vendored sources, add this to your .cargo/config.toml for this project:\n\n"
7274307a
EH
37 );
38 crate::drop_print!(config, "{}", &toml::to_string(&vendor_config).unwrap());
3842d8e6
AC
39 }
40
41 Ok(())
42}
43
44#[derive(Serialize)]
45struct VendorConfig {
46 source: BTreeMap<String, VendorSource>,
47}
48
49#[derive(Serialize)]
50#[serde(rename_all = "lowercase", untagged)]
51enum VendorSource {
52 Directory {
53 directory: PathBuf,
54 },
55 Registry {
56 registry: Option<String>,
57 #[serde(rename = "replace-with")]
58 replace_with: String,
59 },
60 Git {
61 git: String,
62 branch: Option<String>,
63 tag: Option<String>,
64 rev: Option<String>,
65 #[serde(rename = "replace-with")]
66 replace_with: String,
67 },
68}
69
70fn sync(
71 config: &Config,
72 workspaces: &[&Workspace<'_>],
73 opts: &VendorOptions<'_>,
74) -> CargoResult<VendorConfig> {
75 let canonical_destination = opts.destination.canonicalize();
dfe4e670 76 let canonical_destination = canonical_destination.as_deref().unwrap_or(opts.destination);
3842d8e6 77
5102de2b 78 paths::create_dir_all(&canonical_destination)?;
3842d8e6
AC
79 let mut to_remove = HashSet::new();
80 if !opts.no_delete {
81 for entry in canonical_destination.read_dir()? {
82 let entry = entry?;
492abb35
EH
83 if !entry
84 .file_name()
85 .to_str()
86 .map_or(false, |s| s.starts_with('.'))
87 {
88 to_remove.insert(entry.path());
89 }
3842d8e6
AC
90 }
91 }
92
93 // First up attempt to work around rust-lang/cargo#5956. Apparently build
94 // artifacts sprout up in Cargo's global cache for whatever reason, although
95 // it's unsure what tool is causing these issues at this time. For now we
96 // apply a heavy-hammer approach which is to delete Cargo's unpacked version
97 // of each crate to start off with. After we do this we'll re-resolve and
98 // redownload again, which should trigger Cargo to re-extract all the
99 // crates.
100 //
101 // Note that errors are largely ignored here as this is a best-effort
102 // attempt. If anything fails here we basically just move on to the next
103 // crate to work with.
104 for ws in workspaces {
105 let (packages, resolve) =
ebca5190 106 ops::resolve_ws(ws).with_context(|| "failed to load pkg lockfile")?;
3842d8e6
AC
107
108 packages
109 .get_many(resolve.iter())
ebca5190 110 .with_context(|| "failed to download packages")?;
3842d8e6
AC
111
112 for pkg in resolve.iter() {
113 // Don't delete actual source code!
114 if pkg.source_id().is_path() {
115 if let Ok(path) = pkg.source_id().url().to_file_path() {
116 if let Ok(path) = path.canonicalize() {
117 to_remove.remove(&path);
118 }
119 }
120 continue;
121 }
122 if pkg.source_id().is_git() {
123 continue;
124 }
125 if let Ok(pkg) = packages.get_one(pkg) {
126 drop(fs::remove_dir_all(pkg.manifest_path().parent().unwrap()));
127 }
128 }
129 }
130
131 let mut checksums = HashMap::new();
132 let mut ids = BTreeMap::new();
133
134 // Next up let's actually download all crates and start storing internal
135 // tables about them.
136 for ws in workspaces {
137 let (packages, resolve) =
ebca5190 138 ops::resolve_ws(ws).with_context(|| "failed to load pkg lockfile")?;
3842d8e6
AC
139
140 packages
141 .get_many(resolve.iter())
ebca5190 142 .with_context(|| "failed to download packages")?;
3842d8e6
AC
143
144 for pkg in resolve.iter() {
145 // No need to vendor path crates since they're already in the
146 // repository
147 if pkg.source_id().is_path() {
148 continue;
149 }
150 ids.insert(
64ae3277 151 pkg,
3842d8e6
AC
152 packages
153 .get_one(pkg)
ebca5190 154 .with_context(|| "failed to fetch package")?
3842d8e6
AC
155 .clone(),
156 );
157
64ae3277 158 checksums.insert(pkg, resolve.checksums().get(&pkg).cloned());
3842d8e6
AC
159 }
160 }
161
162 let mut versions = HashMap::new();
163 for id in ids.keys() {
164 let map = versions.entry(id.name()).or_insert_with(BTreeMap::default);
165 if let Some(prev) = map.get(&id.version()) {
166 bail!(
167 "found duplicate version of package `{} v{}` \
168 vendored from two sources:\n\
169 \n\
170 \tsource 1: {}\n\
171 \tsource 2: {}",
172 id.name(),
173 id.version(),
174 prev,
175 id.source_id()
176 );
177 }
178 map.insert(id.version(), id.source_id());
179 }
180
181 let mut sources = BTreeSet::new();
01f1c01e 182 let mut tmp_buf = [0; 64 * 1024];
3842d8e6
AC
183 for (id, pkg) in ids.iter() {
184 // Next up, copy it to the vendor directory
185 let src = pkg
186 .manifest_path()
187 .parent()
188 .expect("manifest_path should point to a file");
189 let max_version = *versions[&id.name()].iter().rev().next().unwrap().0;
fd807955 190 let dir_has_version_suffix = opts.versioned_dirs || id.version() != max_version;
3842d8e6
AC
191 let dst_name = if dir_has_version_suffix {
192 // Eg vendor/futures-0.1.13
193 format!("{}-{}", id.name(), id.version())
194 } else {
195 // Eg vendor/futures
196 id.name().to_string()
197 };
198
199 sources.insert(id.source_id());
200 let dst = canonical_destination.join(&dst_name);
201 to_remove.remove(&dst);
202 let cksum = dst.join(".cargo-checksum.json");
203 if dir_has_version_suffix && cksum.exists() {
204 // Always re-copy directory without version suffix in case the version changed
205 continue;
206 }
207
208 config.shell().status(
209 "Vendoring",
210 &format!("{} ({}) to {}", id, src.to_string_lossy(), dst.display()),
211 )?;
212
213 let _ = fs::remove_dir_all(&dst);
64ae3277
MK
214 let pathsource = PathSource::new(src, id.source_id(), config);
215 let paths = pathsource.list_files(pkg)?;
3842d8e6 216 let mut map = BTreeMap::new();
01f1c01e 217 cp_sources(src, &paths, &dst, &mut map, &mut tmp_buf)
ebca5190 218 .with_context(|| format!("failed to copy over vendored sources for: {}", id))?;
3842d8e6
AC
219
220 // Finally, emit the metadata about this package
221 let json = serde_json::json!({
222 "package": checksums.get(id),
223 "files": map,
224 });
225
4ae79d2f 226 paths::write(&cksum, json.to_string())?;
3842d8e6
AC
227 }
228
229 for path in to_remove {
230 if path.is_dir() {
231 paths::remove_dir_all(&path)?;
232 } else {
233 paths::remove_file(&path)?;
234 }
235 }
236
237 // add our vendored source
238 let mut config = BTreeMap::new();
239
240 let merged_source_name = "vendored-sources";
241 config.insert(
242 merged_source_name.to_string(),
243 VendorSource::Directory {
4c67a2d9 244 directory: opts.destination.to_path_buf(),
3842d8e6
AC
245 },
246 );
247
248 // replace original sources with vendor
249 for source_id in sources {
250 let name = if source_id.is_default_registry() {
251 "crates-io".to_string()
252 } else {
253 source_id.url().to_string()
254 };
255
256 let source = if source_id.is_default_registry() {
257 VendorSource::Registry {
258 registry: None,
259 replace_with: merged_source_name.to_string(),
260 }
723748fa
EH
261 } else if source_id.is_remote_registry() {
262 let registry = source_id.url().to_string();
263 VendorSource::Registry {
264 registry: Some(registry),
265 replace_with: merged_source_name.to_string(),
266 }
3842d8e6
AC
267 } else if source_id.is_git() {
268 let mut branch = None;
269 let mut tag = None;
270 let mut rev = None;
271 if let Some(reference) = source_id.git_reference() {
272 match *reference {
273 GitReference::Branch(ref b) => branch = Some(b.clone()),
274 GitReference::Tag(ref t) => tag = Some(t.clone()),
275 GitReference::Rev(ref r) => rev = Some(r.clone()),
4c02977c 276 GitReference::DefaultBranch => {}
3842d8e6
AC
277 }
278 }
279 VendorSource::Git {
280 git: source_id.url().to_string(),
281 branch,
282 tag,
283 rev,
284 replace_with: merged_source_name.to_string(),
285 }
286 } else {
287 panic!("Invalid source ID: {}", source_id)
288 };
289 config.insert(name, source);
290 }
291
292 Ok(VendorConfig { source: config })
293}
294
295fn cp_sources(
296 src: &Path,
297 paths: &[PathBuf],
298 dst: &Path,
299 cksums: &mut BTreeMap<String, String>,
01f1c01e 300 tmp_buf: &mut [u8],
3842d8e6
AC
301) -> CargoResult<()> {
302 for p in paths {
303 let relative = p.strip_prefix(&src).unwrap();
304
305 match relative.to_str() {
306 // Skip git config files as they're not relevant to builds most of
307 // the time and if we respect them (e.g. in git) then it'll
308 // probably mess with the checksums when a vendor dir is checked
309 // into someone else's source control
310 Some(".gitattributes") | Some(".gitignore") | Some(".git") => continue,
311
312 // Temporary Cargo files
313 Some(".cargo-ok") => continue,
314
315 // Skip patch-style orig/rej files. Published crates on crates.io
316 // have `Cargo.toml.orig` which we don't want to use here and
317 // otherwise these are rarely used as part of the build process.
318 Some(filename) => {
319 if filename.ends_with(".orig") || filename.ends_with(".rej") {
320 continue;
321 }
322 }
323 _ => {}
324 };
325
326 // Join pathname components individually to make sure that the joined
327 // path uses the correct directory separators everywhere, since
328 // `relative` may use Unix-style and `dst` may require Windows-style
329 // backslashes.
330 let dst = relative
331 .iter()
332 .fold(dst.to_owned(), |acc, component| acc.join(&component));
333
5102de2b 334 paths::create_dir_all(dst.parent().unwrap())?;
3842d8e6 335
c4e5670b 336 let cksum = copy_and_checksum(p, &dst, tmp_buf)?;
3842d8e6
AC
337 cksums.insert(relative.to_str().unwrap().replace("\\", "/"), cksum);
338 }
339 Ok(())
340}
01f1c01e
AC
341
342fn copy_and_checksum(src_path: &Path, dst_path: &Path, buf: &mut [u8]) -> CargoResult<String> {
ebca5190 343 let mut src = File::open(src_path).with_context(|| format!("failed to open {:?}", src_path))?;
33f648ab
EH
344 let mut dst_opts = OpenOptions::new();
345 dst_opts.write(true).create(true).truncate(true);
346 #[cfg(unix)]
347 {
348 use std::os::unix::fs::{MetadataExt, OpenOptionsExt};
349 let src_metadata = src
350 .metadata()
ebca5190 351 .with_context(|| format!("failed to stat {:?}", src_path))?;
33f648ab
EH
352 dst_opts.mode(src_metadata.mode());
353 }
354 let mut dst = dst_opts
355 .open(dst_path)
ebca5190 356 .with_context(|| format!("failed to create {:?}", dst_path))?;
33f648ab
EH
357 // Not going to bother setting mode on pre-existing files, since there
358 // shouldn't be any under normal conditions.
01f1c01e
AC
359 let mut cksum = Sha256::new();
360 loop {
361 let n = src
362 .read(buf)
ebca5190 363 .with_context(|| format!("failed to read from {:?}", src_path))?;
01f1c01e
AC
364 if n == 0 {
365 break Ok(cksum.finish_hex());
366 }
367 let data = &buf[..n];
368 cksum.update(data);
369 dst.write_all(data)
ebca5190 370 .with_context(|| format!("failed to write to {:?}", dst_path))?;
01f1c01e
AC
371 }
372}