]> git.proxmox.com Git - rustc.git/blob - src/tools/linkchecker/main.rs
New upstream version 1.46.0~beta.2+dfsg1
[rustc.git] / src / tools / linkchecker / main.rs
1 //! Script to check the validity of `href` links in our HTML documentation.
2 //!
3 //! In the past we've been quite error prone to writing in broken links as most
4 //! of them are manually rather than automatically added. As files move over
5 //! time or apis change old links become stale or broken. The purpose of this
6 //! script is to check all relative links in our documentation to make sure they
7 //! actually point to a valid place.
8 //!
9 //! Currently this doesn't actually do any HTML parsing or anything fancy like
10 //! that, it just has a simple "regex" to search for `href` and `id` tags.
11 //! These values are then translated to file URLs if possible and then the
12 //! destination is asserted to exist.
13 //!
14 //! A few exceptions are allowed as there's known bugs in rustdoc, but this
15 //! should catch the majority of "broken link" cases.
16
17 use std::collections::hash_map::Entry;
18 use std::collections::{HashMap, HashSet};
19 use std::env;
20 use std::fs;
21 use std::path::{Component, Path, PathBuf};
22 use std::rc::Rc;
23
24 use crate::Redirect::*;
25
26 macro_rules! t {
27 ($e:expr) => {
28 match $e {
29 Ok(e) => e,
30 Err(e) => panic!("{} failed with {:?}", stringify!($e), e),
31 }
32 };
33 }
34
35 fn main() {
36 let docs = env::args_os().nth(1).unwrap();
37 let docs = env::current_dir().unwrap().join(docs);
38 let mut errors = false;
39 walk(&mut HashMap::new(), &docs, &docs, &mut errors);
40 if errors {
41 panic!("found some broken links");
42 }
43 }
44
45 #[derive(Debug)]
46 pub enum LoadError {
47 IOError(std::io::Error),
48 BrokenRedirect(PathBuf, std::io::Error),
49 IsRedirect,
50 }
51
52 enum Redirect {
53 SkipRedirect,
54 FromRedirect(bool),
55 }
56
57 struct FileEntry {
58 source: Rc<String>,
59 ids: HashSet<String>,
60 }
61
62 type Cache = HashMap<PathBuf, FileEntry>;
63
64 fn small_url_encode(s: &str) -> String {
65 s.replace("<", "%3C")
66 .replace(">", "%3E")
67 .replace(" ", "%20")
68 .replace("?", "%3F")
69 .replace("'", "%27")
70 .replace("&", "%26")
71 .replace(",", "%2C")
72 .replace(":", "%3A")
73 .replace(";", "%3B")
74 .replace("[", "%5B")
75 .replace("]", "%5D")
76 .replace("\"", "%22")
77 }
78
79 impl FileEntry {
80 fn parse_ids(&mut self, file: &Path, contents: &str, errors: &mut bool) {
81 if self.ids.is_empty() {
82 with_attrs_in_source(contents, " id", |fragment, i, _| {
83 let frag = fragment.trim_start_matches("#").to_owned();
84 let encoded = small_url_encode(&frag);
85 if !self.ids.insert(frag) {
86 *errors = true;
87 println!("{}:{}: id is not unique: `{}`", file.display(), i, fragment);
88 }
89 // Just in case, we also add the encoded id.
90 self.ids.insert(encoded);
91 });
92 }
93 }
94 }
95
96 fn walk(cache: &mut Cache, root: &Path, dir: &Path, errors: &mut bool) {
97 for entry in t!(dir.read_dir()).map(|e| t!(e)) {
98 let path = entry.path();
99 let kind = t!(entry.file_type());
100 if kind.is_dir() {
101 walk(cache, root, &path, errors);
102 } else {
103 let pretty_path = check(cache, root, &path, errors);
104 if let Some(pretty_path) = pretty_path {
105 let entry = cache.get_mut(&pretty_path).unwrap();
106 // we don't need the source anymore,
107 // so drop to reduce memory-usage
108 entry.source = Rc::new(String::new());
109 }
110 }
111 }
112 }
113
114 fn check(cache: &mut Cache, root: &Path, file: &Path, errors: &mut bool) -> Option<PathBuf> {
115 // Ignore non-HTML files.
116 if file.extension().and_then(|s| s.to_str()) != Some("html") {
117 return None;
118 }
119
120 // Unfortunately we're not 100% full of valid links today to we need a few
121 // exceptions to get this past `make check` today.
122 // FIXME(#32129)
123 if file.ends_with("std/io/struct.IoSlice.html")
124 || file.ends_with("std/string/struct.String.html")
125 {
126 return None;
127 }
128 // FIXME(#32553)
129 if file.ends_with("alloc/string/struct.String.html") {
130 return None;
131 }
132 // FIXME(#32130)
133 if file.ends_with("alloc/collections/btree_map/struct.BTreeMap.html")
134 || file.ends_with("alloc/collections/btree_set/struct.BTreeSet.html")
135 || file.ends_with("std/collections/btree_map/struct.BTreeMap.html")
136 || file.ends_with("std/collections/btree_set/struct.BTreeSet.html")
137 || file.ends_with("std/collections/hash_map/struct.HashMap.html")
138 || file.ends_with("std/collections/hash_set/struct.HashSet.html")
139 {
140 return None;
141 }
142
143 let res = load_file(cache, root, file, SkipRedirect);
144 let (pretty_file, contents) = match res {
145 Ok(res) => res,
146 Err(_) => return None,
147 };
148 {
149 cache.get_mut(&pretty_file).unwrap().parse_ids(&pretty_file, &contents, errors);
150 }
151
152 // Search for anything that's the regex 'href[ ]*=[ ]*".*?"'
153 with_attrs_in_source(&contents, " href", |url, i, base| {
154 // Ignore external URLs
155 if url.starts_with("http:")
156 || url.starts_with("https:")
157 || url.starts_with("javascript:")
158 || url.starts_with("ftp:")
159 || url.starts_with("irc:")
160 || url.starts_with("data:")
161 {
162 return;
163 }
164 let mut parts = url.splitn(2, "#");
165 let url = parts.next().unwrap();
166 let fragment = parts.next();
167 let mut parts = url.splitn(2, "?");
168 let url = parts.next().unwrap();
169
170 // Once we've plucked out the URL, parse it using our base url and
171 // then try to extract a file path.
172 let mut path = file.to_path_buf();
173 if !base.is_empty() || !url.is_empty() {
174 path.pop();
175 for part in Path::new(base).join(url).components() {
176 match part {
177 Component::Prefix(_) | Component::RootDir => {
178 // Avoid absolute paths as they make the docs not
179 // relocatable by making assumptions on where the docs
180 // are hosted relative to the site root.
181 *errors = true;
182 println!(
183 "{}:{}: absolute path - {}",
184 pretty_file.display(),
185 i + 1,
186 Path::new(base).join(url).display()
187 );
188 return;
189 }
190 Component::CurDir => {}
191 Component::ParentDir => {
192 path.pop();
193 }
194 Component::Normal(s) => {
195 path.push(s);
196 }
197 }
198 }
199 }
200
201 // Alright, if we've found a file name then this file had better
202 // exist! If it doesn't then we register and print an error.
203 if path.exists() {
204 if path.is_dir() {
205 // Links to directories show as directory listings when viewing
206 // the docs offline so it's best to avoid them.
207 *errors = true;
208 let pretty_path = path.strip_prefix(root).unwrap_or(&path);
209 println!(
210 "{}:{}: directory link - {}",
211 pretty_file.display(),
212 i + 1,
213 pretty_path.display()
214 );
215 return;
216 }
217 if let Some(extension) = path.extension() {
218 // Ignore none HTML files.
219 if extension != "html" {
220 return;
221 }
222 }
223 let res = load_file(cache, root, &path, FromRedirect(false));
224 let (pretty_path, contents) = match res {
225 Ok(res) => res,
226 Err(LoadError::IOError(err)) => {
227 panic!("error loading {}: {}", path.display(), err);
228 }
229 Err(LoadError::BrokenRedirect(target, _)) => {
230 *errors = true;
231 println!(
232 "{}:{}: broken redirect to {}",
233 pretty_file.display(),
234 i + 1,
235 target.display()
236 );
237 return;
238 }
239 Err(LoadError::IsRedirect) => unreachable!(),
240 };
241
242 if let Some(ref fragment) = fragment {
243 // Fragments like `#1-6` are most likely line numbers to be
244 // interpreted by javascript, so we're ignoring these
245 if fragment.splitn(2, '-').all(|f| f.chars().all(|c| c.is_numeric())) {
246 return;
247 }
248
249 // These appear to be broken in mdbook right now?
250 if fragment.starts_with("-") {
251 return;
252 }
253
254 let entry = &mut cache.get_mut(&pretty_path).unwrap();
255 entry.parse_ids(&pretty_path, &contents, errors);
256
257 if !entry.ids.contains(*fragment) {
258 *errors = true;
259 print!("{}:{}: broken link fragment ", pretty_file.display(), i + 1);
260 println!("`#{}` pointing to `{}`", fragment, pretty_path.display());
261 };
262 }
263 } else {
264 *errors = true;
265 print!("{}:{}: broken link - ", pretty_file.display(), i + 1);
266 let pretty_path = path.strip_prefix(root).unwrap_or(&path);
267 println!("{}", pretty_path.display());
268 }
269 });
270 Some(pretty_file)
271 }
272
273 fn load_file(
274 cache: &mut Cache,
275 root: &Path,
276 file: &Path,
277 redirect: Redirect,
278 ) -> Result<(PathBuf, Rc<String>), LoadError> {
279 let pretty_file = PathBuf::from(file.strip_prefix(root).unwrap_or(&file));
280
281 let (maybe_redirect, contents) = match cache.entry(pretty_file.clone()) {
282 Entry::Occupied(entry) => (None, entry.get().source.clone()),
283 Entry::Vacant(entry) => {
284 let contents = match fs::read_to_string(file) {
285 Ok(s) => Rc::new(s),
286 Err(err) => {
287 return Err(if let FromRedirect(true) = redirect {
288 LoadError::BrokenRedirect(file.to_path_buf(), err)
289 } else {
290 LoadError::IOError(err)
291 });
292 }
293 };
294
295 let maybe = maybe_redirect(&contents);
296 if maybe.is_some() {
297 if let SkipRedirect = redirect {
298 return Err(LoadError::IsRedirect);
299 }
300 } else {
301 entry.insert(FileEntry { source: contents.clone(), ids: HashSet::new() });
302 }
303 (maybe, contents)
304 }
305 };
306 match maybe_redirect.map(|url| file.parent().unwrap().join(url)) {
307 Some(redirect_file) => load_file(cache, root, &redirect_file, FromRedirect(true)),
308 None => Ok((pretty_file, contents)),
309 }
310 }
311
312 fn maybe_redirect(source: &str) -> Option<String> {
313 const REDIRECT: &'static str = "<p>Redirecting to <a href=";
314
315 let mut lines = source.lines();
316 let redirect_line = lines.nth(6)?;
317
318 redirect_line.find(REDIRECT).map(|i| {
319 let rest = &redirect_line[(i + REDIRECT.len() + 1)..];
320 let pos_quote = rest.find('"').unwrap();
321 rest[..pos_quote].to_owned()
322 })
323 }
324
325 fn with_attrs_in_source<F: FnMut(&str, usize, &str)>(contents: &str, attr: &str, mut f: F) {
326 let mut base = "";
327 for (i, mut line) in contents.lines().enumerate() {
328 while let Some(j) = line.find(attr) {
329 let rest = &line[j + attr.len()..];
330 // The base tag should always be the first link in the document so
331 // we can get away with using one pass.
332 let is_base = line[..j].ends_with("<base");
333 line = rest;
334 let pos_equals = match rest.find("=") {
335 Some(i) => i,
336 None => continue,
337 };
338 if rest[..pos_equals].trim_start_matches(" ") != "" {
339 continue;
340 }
341
342 let rest = &rest[pos_equals + 1..];
343
344 let pos_quote = match rest.find(&['"', '\''][..]) {
345 Some(i) => i,
346 None => continue,
347 };
348 let quote_delim = rest.as_bytes()[pos_quote] as char;
349
350 if rest[..pos_quote].trim_start_matches(" ") != "" {
351 continue;
352 }
353 let rest = &rest[pos_quote + 1..];
354 let url = match rest.find(quote_delim) {
355 Some(i) => &rest[..i],
356 None => continue,
357 };
358 if is_base {
359 base = url;
360 continue;
361 }
362 f(url, i, base)
363 }
364 }
365 }