]> git.proxmox.com Git - rustc.git/blob - src/tools/linkchecker/main.rs
New upstream version 1.27.1+dfsg1
[rustc.git] / src / tools / linkchecker / main.rs
1 // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! Script to check the validity of `href` links in our HTML documentation.
12 //!
13 //! In the past we've been quite error prone to writing in broken links as most
14 //! of them are manually rather than automatically added. As files move over
15 //! time or apis change old links become stale or broken. The purpose of this
16 //! script is to check all relative links in our documentation to make sure they
17 //! actually point to a valid place.
18 //!
19 //! Currently this doesn't actually do any HTML parsing or anything fancy like
20 //! that, it just has a simple "regex" to search for `href` and `id` tags.
21 //! These values are then translated to file URLs if possible and then the
22 //! destination is asserted to exist.
23 //!
24 //! A few whitelisted exceptions are allowed as there's known bugs in rustdoc,
25 //! but this should catch the majority of "broken link" cases.
26
27 use std::env;
28 use std::fs::File;
29 use std::io::prelude::*;
30 use std::path::{Path, PathBuf, Component};
31 use std::collections::{HashMap, HashSet};
32 use std::collections::hash_map::Entry;
33
34 use Redirect::*;
35
36 macro_rules! t {
37 ($e:expr) => (match $e {
38 Ok(e) => e,
39 Err(e) => panic!("{} failed with {:?}", stringify!($e), e),
40 })
41 }
42
43 fn main() {
44 let docs = env::args_os().nth(1).unwrap();
45 let docs = env::current_dir().unwrap().join(docs);
46 let mut errors = false;
47 walk(&mut HashMap::new(), &docs, &docs, &mut errors);
48 if errors {
49 panic!("found some broken links");
50 }
51 }
52
53 #[derive(Debug)]
54 pub enum LoadError {
55 IOError(std::io::Error),
56 BrokenRedirect(PathBuf, std::io::Error),
57 IsRedirect,
58 }
59
60 enum Redirect {
61 SkipRedirect,
62 FromRedirect(bool),
63 }
64
65 struct FileEntry {
66 source: String,
67 ids: HashSet<String>,
68 }
69
70 type Cache = HashMap<PathBuf, FileEntry>;
71
72 fn small_url_encode(s: &str) -> String {
73 s.replace("<", "%3C")
74 .replace(">", "%3E")
75 .replace(" ", "%20")
76 .replace("?", "%3F")
77 .replace("'", "%27")
78 .replace("&", "%26")
79 .replace(",", "%2C")
80 .replace(":", "%3A")
81 .replace(";", "%3B")
82 .replace("[", "%5B")
83 .replace("]", "%5D")
84 .replace("\"", "%22")
85 }
86
87 impl FileEntry {
88 fn parse_ids(&mut self, file: &Path, contents: &str, errors: &mut bool) {
89 if self.ids.is_empty() {
90 with_attrs_in_source(contents, " id", |fragment, i, _| {
91 let frag = fragment.trim_left_matches("#").to_owned();
92 let encoded = small_url_encode(&frag);
93 if !self.ids.insert(frag) {
94 *errors = true;
95 println!("{}:{}: id is not unique: `{}`", file.display(), i, fragment);
96 }
97 // Just in case, we also add the encoded id.
98 self.ids.insert(encoded);
99 });
100 }
101 }
102 }
103
104 fn walk(cache: &mut Cache, root: &Path, dir: &Path, errors: &mut bool) {
105 for entry in t!(dir.read_dir()).map(|e| t!(e)) {
106 let path = entry.path();
107 let kind = t!(entry.file_type());
108 if kind.is_dir() {
109 walk(cache, root, &path, errors);
110 } else {
111 let pretty_path = check(cache, root, &path, errors);
112 if let Some(pretty_path) = pretty_path {
113 let entry = cache.get_mut(&pretty_path).unwrap();
114 // we don't need the source anymore,
115 // so drop to reduce memory-usage
116 entry.source = String::new();
117 }
118 }
119 }
120 }
121
122 fn check(cache: &mut Cache,
123 root: &Path,
124 file: &Path,
125 errors: &mut bool)
126 -> Option<PathBuf> {
127 // Ignore none HTML files.
128 if file.extension().and_then(|s| s.to_str()) != Some("html") {
129 return None;
130 }
131
132 // Unfortunately we're not 100% full of valid links today to we need a few
133 // whitelists to get this past `make check` today.
134 // FIXME(#32129)
135 if file.ends_with("std/string/struct.String.html") ||
136 file.ends_with("interpret/struct.ValTy.html") ||
137 file.ends_with("symbol/struct.InternedString.html") ||
138 file.ends_with("ast/struct.ThinVec.html") ||
139 file.ends_with("util/struct.ThinVec.html") ||
140 file.ends_with("util/struct.RcSlice.html") ||
141 file.ends_with("layout/struct.TyLayout.html") ||
142 file.ends_with("humantime/struct.Timestamp.html") ||
143 file.ends_with("log/index.html") ||
144 file.ends_with("ty/struct.Slice.html") ||
145 file.ends_with("ty/enum.Attributes.html") ||
146 file.ends_with("ty/struct.SymbolName.html") {
147 return None;
148 }
149 // FIXME(#32553)
150 if file.ends_with("string/struct.String.html") {
151 return None;
152 }
153 // FIXME(#32130)
154 if file.ends_with("btree_set/struct.BTreeSet.html") ||
155 file.ends_with("struct.BTreeSet.html") ||
156 file.ends_with("btree_map/struct.BTreeMap.html") ||
157 file.ends_with("hash_map/struct.HashMap.html") ||
158 file.ends_with("hash_set/struct.HashSet.html") ||
159 file.ends_with("sync/struct.Lrc.html") ||
160 file.ends_with("sync/struct.RwLock.html") {
161 return None;
162 }
163
164 let res = load_file(cache, root, file, SkipRedirect);
165 let (pretty_file, contents) = match res {
166 Ok(res) => res,
167 Err(_) => return None,
168 };
169 {
170 cache.get_mut(&pretty_file)
171 .unwrap()
172 .parse_ids(&pretty_file, &contents, errors);
173 }
174
175 // Search for anything that's the regex 'href[ ]*=[ ]*".*?"'
176 with_attrs_in_source(&contents, " href", |url, i, base| {
177 // Ignore external URLs
178 if url.starts_with("http:") || url.starts_with("https:") ||
179 url.starts_with("javascript:") || url.starts_with("ftp:") ||
180 url.starts_with("irc:") || url.starts_with("data:") {
181 return;
182 }
183 let mut parts = url.splitn(2, "#");
184 let url = parts.next().unwrap();
185 let fragment = parts.next();
186 let mut parts = url.splitn(2, "?");
187 let url = parts.next().unwrap();
188
189 // Once we've plucked out the URL, parse it using our base url and
190 // then try to extract a file path.
191 let mut path = file.to_path_buf();
192 if !base.is_empty() || !url.is_empty() {
193 path.pop();
194 for part in Path::new(base).join(url).components() {
195 match part {
196 Component::Prefix(_) |
197 Component::RootDir => {
198 // Avoid absolute paths as they make the docs not
199 // relocatable by making assumptions on where the docs
200 // are hosted relative to the site root.
201 *errors = true;
202 println!("{}:{}: absolute path - {}",
203 pretty_file.display(),
204 i + 1,
205 Path::new(base).join(url).display());
206 return;
207 }
208 Component::CurDir => {}
209 Component::ParentDir => { path.pop(); }
210 Component::Normal(s) => { path.push(s); }
211 }
212 }
213 }
214
215 // Alright, if we've found a file name then this file had better
216 // exist! If it doesn't then we register and print an error.
217 if path.exists() {
218 if path.is_dir() {
219 // Links to directories show as directory listings when viewing
220 // the docs offline so it's best to avoid them.
221 *errors = true;
222 let pretty_path = path.strip_prefix(root).unwrap_or(&path);
223 println!("{}:{}: directory link - {}",
224 pretty_file.display(),
225 i + 1,
226 pretty_path.display());
227 return;
228 }
229 if let Some(extension) = path.extension() {
230 // Ignore none HTML files.
231 if extension != "html" {
232 return;
233 }
234 }
235 let res = load_file(cache, root, &path, FromRedirect(false));
236 let (pretty_path, contents) = match res {
237 Ok(res) => res,
238 Err(LoadError::IOError(err)) => {
239 panic!("error loading {}: {}", path.display(), err);
240 }
241 Err(LoadError::BrokenRedirect(target, _)) => {
242 *errors = true;
243 println!("{}:{}: broken redirect to {}",
244 pretty_file.display(),
245 i + 1,
246 target.display());
247 return;
248 }
249 Err(LoadError::IsRedirect) => unreachable!(),
250 };
251
252 if let Some(ref fragment) = fragment {
253 // Fragments like `#1-6` are most likely line numbers to be
254 // interpreted by javascript, so we're ignoring these
255 if fragment.splitn(2, '-')
256 .all(|f| f.chars().all(|c| c.is_numeric())) {
257 return;
258 }
259
260 // These appear to be broken in mdbook right now?
261 if fragment.starts_with("-") {
262 return;
263 }
264
265 let entry = &mut cache.get_mut(&pretty_path).unwrap();
266 entry.parse_ids(&pretty_path, &contents, errors);
267
268 if !entry.ids.contains(*fragment) {
269 *errors = true;
270 print!("{}:{}: broken link fragment ",
271 pretty_file.display(),
272 i + 1);
273 println!("`#{}` pointing to `{}`", fragment, pretty_path.display());
274 };
275 }
276 } else {
277 *errors = true;
278 print!("{}:{}: broken link - ", pretty_file.display(), i + 1);
279 let pretty_path = path.strip_prefix(root).unwrap_or(&path);
280 println!("{}", pretty_path.display());
281 }
282 });
283 Some(pretty_file)
284 }
285
286 fn load_file(cache: &mut Cache,
287 root: &Path,
288 file: &Path,
289 redirect: Redirect)
290 -> Result<(PathBuf, String), LoadError> {
291 let mut contents = String::new();
292 let pretty_file = PathBuf::from(file.strip_prefix(root).unwrap_or(&file));
293
294 let maybe_redirect = match cache.entry(pretty_file.clone()) {
295 Entry::Occupied(entry) => {
296 contents = entry.get().source.clone();
297 None
298 }
299 Entry::Vacant(entry) => {
300 let mut fp = File::open(file).map_err(|err| {
301 if let FromRedirect(true) = redirect {
302 LoadError::BrokenRedirect(file.to_path_buf(), err)
303 } else {
304 LoadError::IOError(err)
305 }
306 })?;
307 fp.read_to_string(&mut contents).map_err(|err| LoadError::IOError(err))?;
308
309 let maybe = maybe_redirect(&contents);
310 if maybe.is_some() {
311 if let SkipRedirect = redirect {
312 return Err(LoadError::IsRedirect);
313 }
314 } else {
315 entry.insert(FileEntry {
316 source: contents.clone(),
317 ids: HashSet::new(),
318 });
319 }
320 maybe
321 }
322 };
323 match maybe_redirect.map(|url| file.parent().unwrap().join(url)) {
324 Some(redirect_file) => {
325 load_file(cache, root, &redirect_file, FromRedirect(true))
326 }
327 None => Ok((pretty_file, contents)),
328 }
329 }
330
331 fn maybe_redirect(source: &str) -> Option<String> {
332 const REDIRECT: &'static str = "<p>Redirecting to <a href=";
333
334 let mut lines = source.lines();
335 let redirect_line = match lines.nth(6) {
336 Some(l) => l,
337 None => return None,
338 };
339
340 redirect_line.find(REDIRECT).map(|i| {
341 let rest = &redirect_line[(i + REDIRECT.len() + 1)..];
342 let pos_quote = rest.find('"').unwrap();
343 rest[..pos_quote].to_owned()
344 })
345 }
346
347 fn with_attrs_in_source<F: FnMut(&str, usize, &str)>(contents: &str, attr: &str, mut f: F) {
348 let mut base = "";
349 for (i, mut line) in contents.lines().enumerate() {
350 while let Some(j) = line.find(attr) {
351 let rest = &line[j + attr.len()..];
352 // The base tag should always be the first link in the document so
353 // we can get away with using one pass.
354 let is_base = line[..j].ends_with("<base");
355 line = rest;
356 let pos_equals = match rest.find("=") {
357 Some(i) => i,
358 None => continue,
359 };
360 if rest[..pos_equals].trim_left_matches(" ") != "" {
361 continue;
362 }
363
364 let rest = &rest[pos_equals + 1..];
365
366 let pos_quote = match rest.find(&['"', '\''][..]) {
367 Some(i) => i,
368 None => continue,
369 };
370 let quote_delim = rest.as_bytes()[pos_quote] as char;
371
372 if rest[..pos_quote].trim_left_matches(" ") != "" {
373 continue;
374 }
375 let rest = &rest[pos_quote + 1..];
376 let url = match rest.find(quote_delim) {
377 Some(i) => &rest[..i],
378 None => continue,
379 };
380 if is_base {
381 base = url;
382 continue;
383 }
384 f(url, i, base)
385 }
386 }
387 }