]> git.proxmox.com Git - rustc.git/blob - src/tools/clippy/clippy_lints/src/doc.rs
New upstream version 1.66.0+dfsg1
[rustc.git] / src / tools / clippy / clippy_lints / src / doc.rs
1 use clippy_utils::attrs::is_doc_hidden;
2 use clippy_utils::diagnostics::{span_lint, span_lint_and_help, span_lint_and_note, span_lint_and_then};
3 use clippy_utils::macros::{is_panic, root_macro_call_first_node};
4 use clippy_utils::source::{first_line_of_span, snippet_with_applicability};
5 use clippy_utils::ty::{implements_trait, is_type_diagnostic_item};
6 use clippy_utils::{is_entrypoint_fn, method_chain_args, return_ty};
7 use if_chain::if_chain;
8 use itertools::Itertools;
9 use rustc_ast::ast::{Async, AttrKind, Attribute, Fn, FnRetTy, ItemKind};
10 use rustc_ast::token::CommentKind;
11 use rustc_data_structures::fx::FxHashSet;
12 use rustc_data_structures::sync::Lrc;
13 use rustc_errors::emitter::EmitterWriter;
14 use rustc_errors::{Applicability, Handler, MultiSpan, SuggestionStyle};
15 use rustc_hir as hir;
16 use rustc_hir::intravisit::{self, Visitor};
17 use rustc_hir::{AnonConst, Expr};
18 use rustc_lint::{LateContext, LateLintPass};
19 use rustc_middle::hir::nested_filter;
20 use rustc_middle::lint::in_external_macro;
21 use rustc_middle::ty;
22 use rustc_parse::maybe_new_parser_from_source_str;
23 use rustc_parse::parser::ForceCollect;
24 use rustc_session::parse::ParseSess;
25 use rustc_session::{declare_tool_lint, impl_lint_pass};
26 use rustc_span::def_id::LocalDefId;
27 use rustc_span::edition::Edition;
28 use rustc_span::source_map::{BytePos, FilePathMapping, SourceMap, Span};
29 use rustc_span::{sym, FileName, Pos};
30 use std::io;
31 use std::ops::Range;
32 use std::thread;
33 use url::Url;
34
35 declare_clippy_lint! {
36 /// ### What it does
37 /// Checks for the presence of `_`, `::` or camel-case words
38 /// outside ticks in documentation.
39 ///
40 /// ### Why is this bad?
41 /// *Rustdoc* supports markdown formatting, `_`, `::` and
42 /// camel-case probably indicates some code which should be included between
43 /// ticks. `_` can also be used for emphasis in markdown, this lint tries to
44 /// consider that.
45 ///
46 /// ### Known problems
47 /// Lots of bad docs won’t be fixed, what the lint checks
48 /// for is limited, and there are still false positives. HTML elements and their
49 /// content are not linted.
50 ///
51 /// In addition, when writing documentation comments, including `[]` brackets
52 /// inside a link text would trip the parser. Therefore, documenting link with
53 /// `[`SmallVec<[T; INLINE_CAPACITY]>`]` and then [`SmallVec<[T; INLINE_CAPACITY]>`]: SmallVec
54 /// would fail.
55 ///
56 /// ### Examples
57 /// ```rust
58 /// /// Do something with the foo_bar parameter. See also
59 /// /// that::other::module::foo.
60 /// // ^ `foo_bar` and `that::other::module::foo` should be ticked.
61 /// fn doit(foo_bar: usize) {}
62 /// ```
63 ///
64 /// ```rust
65 /// // Link text with `[]` brackets should be written as following:
66 /// /// Consume the array and return the inner
67 /// /// [`SmallVec<[T; INLINE_CAPACITY]>`][SmallVec].
68 /// /// [SmallVec]: SmallVec
69 /// fn main() {}
70 /// ```
71 #[clippy::version = "pre 1.29.0"]
72 pub DOC_MARKDOWN,
73 pedantic,
74 "presence of `_`, `::` or camel-case outside backticks in documentation"
75 }
76
77 declare_clippy_lint! {
78 /// ### What it does
79 /// Checks for the doc comments of publicly visible
80 /// unsafe functions and warns if there is no `# Safety` section.
81 ///
82 /// ### Why is this bad?
83 /// Unsafe functions should document their safety
84 /// preconditions, so that users can be sure they are using them safely.
85 ///
86 /// ### Examples
87 /// ```rust
88 ///# type Universe = ();
89 /// /// This function should really be documented
90 /// pub unsafe fn start_apocalypse(u: &mut Universe) {
91 /// unimplemented!();
92 /// }
93 /// ```
94 ///
95 /// At least write a line about safety:
96 ///
97 /// ```rust
98 ///# type Universe = ();
99 /// /// # Safety
100 /// ///
101 /// /// This function should not be called before the horsemen are ready.
102 /// pub unsafe fn start_apocalypse(u: &mut Universe) {
103 /// unimplemented!();
104 /// }
105 /// ```
106 #[clippy::version = "1.39.0"]
107 pub MISSING_SAFETY_DOC,
108 style,
109 "`pub unsafe fn` without `# Safety` docs"
110 }
111
112 declare_clippy_lint! {
113 /// ### What it does
114 /// Checks the doc comments of publicly visible functions that
115 /// return a `Result` type and warns if there is no `# Errors` section.
116 ///
117 /// ### Why is this bad?
118 /// Documenting the type of errors that can be returned from a
119 /// function can help callers write code to handle the errors appropriately.
120 ///
121 /// ### Examples
122 /// Since the following function returns a `Result` it has an `# Errors` section in
123 /// its doc comment:
124 ///
125 /// ```rust
126 ///# use std::io;
127 /// /// # Errors
128 /// ///
129 /// /// Will return `Err` if `filename` does not exist or the user does not have
130 /// /// permission to read it.
131 /// pub fn read(filename: String) -> io::Result<String> {
132 /// unimplemented!();
133 /// }
134 /// ```
135 #[clippy::version = "1.41.0"]
136 pub MISSING_ERRORS_DOC,
137 pedantic,
138 "`pub fn` returns `Result` without `# Errors` in doc comment"
139 }
140
141 declare_clippy_lint! {
142 /// ### What it does
143 /// Checks the doc comments of publicly visible functions that
144 /// may panic and warns if there is no `# Panics` section.
145 ///
146 /// ### Why is this bad?
147 /// Documenting the scenarios in which panicking occurs
148 /// can help callers who do not want to panic to avoid those situations.
149 ///
150 /// ### Examples
151 /// Since the following function may panic it has a `# Panics` section in
152 /// its doc comment:
153 ///
154 /// ```rust
155 /// /// # Panics
156 /// ///
157 /// /// Will panic if y is 0
158 /// pub fn divide_by(x: i32, y: i32) -> i32 {
159 /// if y == 0 {
160 /// panic!("Cannot divide by 0")
161 /// } else {
162 /// x / y
163 /// }
164 /// }
165 /// ```
166 #[clippy::version = "1.51.0"]
167 pub MISSING_PANICS_DOC,
168 pedantic,
169 "`pub fn` may panic without `# Panics` in doc comment"
170 }
171
172 declare_clippy_lint! {
173 /// ### What it does
174 /// Checks for `fn main() { .. }` in doctests
175 ///
176 /// ### Why is this bad?
177 /// The test can be shorter (and likely more readable)
178 /// if the `fn main()` is left implicit.
179 ///
180 /// ### Examples
181 /// ```rust
182 /// /// An example of a doctest with a `main()` function
183 /// ///
184 /// /// # Examples
185 /// ///
186 /// /// ```
187 /// /// fn main() {
188 /// /// // this needs not be in an `fn`
189 /// /// }
190 /// /// ```
191 /// fn needless_main() {
192 /// unimplemented!();
193 /// }
194 /// ```
195 #[clippy::version = "1.40.0"]
196 pub NEEDLESS_DOCTEST_MAIN,
197 style,
198 "presence of `fn main() {` in code examples"
199 }
200
201 declare_clippy_lint! {
202 /// ### What it does
203 /// Detects the syntax `['foo']` in documentation comments (notice quotes instead of backticks)
204 /// outside of code blocks
205 /// ### Why is this bad?
206 /// It is likely a typo when defining an intra-doc link
207 ///
208 /// ### Example
209 /// ```rust
210 /// /// See also: ['foo']
211 /// fn bar() {}
212 /// ```
213 /// Use instead:
214 /// ```rust
215 /// /// See also: [`foo`]
216 /// fn bar() {}
217 /// ```
218 #[clippy::version = "1.63.0"]
219 pub DOC_LINK_WITH_QUOTES,
220 pedantic,
221 "possible typo for an intra-doc link"
222 }
223
224 #[expect(clippy::module_name_repetitions)]
225 #[derive(Clone)]
226 pub struct DocMarkdown {
227 valid_idents: FxHashSet<String>,
228 in_trait_impl: bool,
229 }
230
231 impl DocMarkdown {
232 pub fn new(valid_idents: FxHashSet<String>) -> Self {
233 Self {
234 valid_idents,
235 in_trait_impl: false,
236 }
237 }
238 }
239
240 impl_lint_pass!(DocMarkdown => [
241 DOC_LINK_WITH_QUOTES,
242 DOC_MARKDOWN,
243 MISSING_SAFETY_DOC,
244 MISSING_ERRORS_DOC,
245 MISSING_PANICS_DOC,
246 NEEDLESS_DOCTEST_MAIN
247 ]);
248
249 impl<'tcx> LateLintPass<'tcx> for DocMarkdown {
250 fn check_crate(&mut self, cx: &LateContext<'tcx>) {
251 let attrs = cx.tcx.hir().attrs(hir::CRATE_HIR_ID);
252 check_attrs(cx, &self.valid_idents, attrs);
253 }
254
255 fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_>) {
256 let attrs = cx.tcx.hir().attrs(item.hir_id());
257 let headers = check_attrs(cx, &self.valid_idents, attrs);
258 match item.kind {
259 hir::ItemKind::Fn(ref sig, _, body_id) => {
260 if !(is_entrypoint_fn(cx, item.owner_id.to_def_id()) || in_external_macro(cx.tcx.sess, item.span)) {
261 let body = cx.tcx.hir().body(body_id);
262 let mut fpu = FindPanicUnwrap {
263 cx,
264 typeck_results: cx.tcx.typeck(item.owner_id.def_id),
265 panic_span: None,
266 };
267 fpu.visit_expr(body.value);
268 lint_for_missing_headers(
269 cx,
270 item.owner_id.def_id,
271 item.span,
272 sig,
273 headers,
274 Some(body_id),
275 fpu.panic_span,
276 );
277 }
278 },
279 hir::ItemKind::Impl(impl_) => {
280 self.in_trait_impl = impl_.of_trait.is_some();
281 },
282 hir::ItemKind::Trait(_, unsafety, ..) => {
283 if !headers.safety && unsafety == hir::Unsafety::Unsafe {
284 span_lint(
285 cx,
286 MISSING_SAFETY_DOC,
287 item.span,
288 "docs for unsafe trait missing `# Safety` section",
289 );
290 }
291 },
292 _ => (),
293 }
294 }
295
296 fn check_item_post(&mut self, _cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_>) {
297 if let hir::ItemKind::Impl { .. } = item.kind {
298 self.in_trait_impl = false;
299 }
300 }
301
302 fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::TraitItem<'_>) {
303 let attrs = cx.tcx.hir().attrs(item.hir_id());
304 let headers = check_attrs(cx, &self.valid_idents, attrs);
305 if let hir::TraitItemKind::Fn(ref sig, ..) = item.kind {
306 if !in_external_macro(cx.tcx.sess, item.span) {
307 lint_for_missing_headers(cx, item.owner_id.def_id, item.span, sig, headers, None, None);
308 }
309 }
310 }
311
312 fn check_impl_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::ImplItem<'_>) {
313 let attrs = cx.tcx.hir().attrs(item.hir_id());
314 let headers = check_attrs(cx, &self.valid_idents, attrs);
315 if self.in_trait_impl || in_external_macro(cx.tcx.sess, item.span) {
316 return;
317 }
318 if let hir::ImplItemKind::Fn(ref sig, body_id) = item.kind {
319 let body = cx.tcx.hir().body(body_id);
320 let mut fpu = FindPanicUnwrap {
321 cx,
322 typeck_results: cx.tcx.typeck(item.owner_id.def_id),
323 panic_span: None,
324 };
325 fpu.visit_expr(body.value);
326 lint_for_missing_headers(
327 cx,
328 item.owner_id.def_id,
329 item.span,
330 sig,
331 headers,
332 Some(body_id),
333 fpu.panic_span,
334 );
335 }
336 }
337 }
338
339 fn lint_for_missing_headers<'tcx>(
340 cx: &LateContext<'tcx>,
341 def_id: LocalDefId,
342 span: impl Into<MultiSpan> + Copy,
343 sig: &hir::FnSig<'_>,
344 headers: DocHeaders,
345 body_id: Option<hir::BodyId>,
346 panic_span: Option<Span>,
347 ) {
348 if !cx.effective_visibilities.is_exported(def_id) {
349 return; // Private functions do not require doc comments
350 }
351
352 // do not lint if any parent has `#[doc(hidden)]` attribute (#7347)
353 if cx
354 .tcx
355 .hir()
356 .parent_iter(cx.tcx.hir().local_def_id_to_hir_id(def_id))
357 .any(|(id, _node)| is_doc_hidden(cx.tcx.hir().attrs(id)))
358 {
359 return;
360 }
361
362 if !headers.safety && sig.header.unsafety == hir::Unsafety::Unsafe {
363 span_lint(
364 cx,
365 MISSING_SAFETY_DOC,
366 span,
367 "unsafe function's docs miss `# Safety` section",
368 );
369 }
370 if !headers.panics && panic_span.is_some() {
371 span_lint_and_note(
372 cx,
373 MISSING_PANICS_DOC,
374 span,
375 "docs for function which may panic missing `# Panics` section",
376 panic_span,
377 "first possible panic found here",
378 );
379 }
380 if !headers.errors {
381 let hir_id = cx.tcx.hir().local_def_id_to_hir_id(def_id);
382 if is_type_diagnostic_item(cx, return_ty(cx, hir_id), sym::Result) {
383 span_lint(
384 cx,
385 MISSING_ERRORS_DOC,
386 span,
387 "docs for function returning `Result` missing `# Errors` section",
388 );
389 } else {
390 if_chain! {
391 if let Some(body_id) = body_id;
392 if let Some(future) = cx.tcx.lang_items().future_trait();
393 let typeck = cx.tcx.typeck_body(body_id);
394 let body = cx.tcx.hir().body(body_id);
395 let ret_ty = typeck.expr_ty(body.value);
396 if implements_trait(cx, ret_ty, future, &[]);
397 if let ty::Opaque(_, subs) = ret_ty.kind();
398 if let Some(gen) = subs.types().next();
399 if let ty::Generator(_, subs, _) = gen.kind();
400 if is_type_diagnostic_item(cx, subs.as_generator().return_ty(), sym::Result);
401 then {
402 span_lint(
403 cx,
404 MISSING_ERRORS_DOC,
405 span,
406 "docs for function returning `Result` missing `# Errors` section",
407 );
408 }
409 }
410 }
411 }
412 }
413
414 /// Cleanup documentation decoration.
415 ///
416 /// We can't use `rustc_ast::attr::AttributeMethods::with_desugared_doc` or
417 /// `rustc_ast::parse::lexer::comments::strip_doc_comment_decoration` because we
418 /// need to keep track of
419 /// the spans but this function is inspired from the later.
420 #[expect(clippy::cast_possible_truncation)]
421 #[must_use]
422 pub fn strip_doc_comment_decoration(doc: &str, comment_kind: CommentKind, span: Span) -> (String, Vec<(usize, Span)>) {
423 // one-line comments lose their prefix
424 if comment_kind == CommentKind::Line {
425 let mut doc = doc.to_owned();
426 doc.push('\n');
427 let len = doc.len();
428 // +3 skips the opening delimiter
429 return (doc, vec![(len, span.with_lo(span.lo() + BytePos(3)))]);
430 }
431
432 let mut sizes = vec![];
433 let mut contains_initial_stars = false;
434 for line in doc.lines() {
435 let offset = line.as_ptr() as usize - doc.as_ptr() as usize;
436 debug_assert_eq!(offset as u32 as usize, offset);
437 contains_initial_stars |= line.trim_start().starts_with('*');
438 // +1 adds the newline, +3 skips the opening delimiter
439 sizes.push((line.len() + 1, span.with_lo(span.lo() + BytePos(3 + offset as u32))));
440 }
441 if !contains_initial_stars {
442 return (doc.to_string(), sizes);
443 }
444 // remove the initial '*'s if any
445 let mut no_stars = String::with_capacity(doc.len());
446 for line in doc.lines() {
447 let mut chars = line.chars();
448 for c in &mut chars {
449 if c.is_whitespace() {
450 no_stars.push(c);
451 } else {
452 no_stars.push(if c == '*' { ' ' } else { c });
453 break;
454 }
455 }
456 no_stars.push_str(chars.as_str());
457 no_stars.push('\n');
458 }
459
460 (no_stars, sizes)
461 }
462
463 #[derive(Copy, Clone, Default)]
464 struct DocHeaders {
465 safety: bool,
466 errors: bool,
467 panics: bool,
468 }
469
470 fn check_attrs<'a>(cx: &LateContext<'_>, valid_idents: &FxHashSet<String>, attrs: &'a [Attribute]) -> DocHeaders {
471 use pulldown_cmark::{BrokenLink, CowStr, Options};
472 /// We don't want the parser to choke on intra doc links. Since we don't
473 /// actually care about rendering them, just pretend that all broken links are
474 /// point to a fake address.
475 #[expect(clippy::unnecessary_wraps)] // we're following a type signature
476 fn fake_broken_link_callback<'a>(_: BrokenLink<'_>) -> Option<(CowStr<'a>, CowStr<'a>)> {
477 Some(("fake".into(), "fake".into()))
478 }
479
480 let mut doc = String::new();
481 let mut spans = vec![];
482
483 for attr in attrs {
484 if let AttrKind::DocComment(comment_kind, comment) = attr.kind {
485 let (comment, current_spans) = strip_doc_comment_decoration(comment.as_str(), comment_kind, attr.span);
486 spans.extend_from_slice(&current_spans);
487 doc.push_str(&comment);
488 } else if attr.has_name(sym::doc) {
489 // ignore mix of sugared and non-sugared doc
490 // don't trigger the safety or errors check
491 return DocHeaders {
492 safety: true,
493 errors: true,
494 panics: true,
495 };
496 }
497 }
498
499 let mut current = 0;
500 for &mut (ref mut offset, _) in &mut spans {
501 let offset_copy = *offset;
502 *offset = current;
503 current += offset_copy;
504 }
505
506 if doc.is_empty() {
507 return DocHeaders::default();
508 }
509
510 let mut cb = fake_broken_link_callback;
511
512 let parser =
513 pulldown_cmark::Parser::new_with_broken_link_callback(&doc, Options::empty(), Some(&mut cb)).into_offset_iter();
514 // Iterate over all `Events` and combine consecutive events into one
515 let events = parser.coalesce(|previous, current| {
516 use pulldown_cmark::Event::Text;
517
518 let previous_range = previous.1;
519 let current_range = current.1;
520
521 match (previous.0, current.0) {
522 (Text(previous), Text(current)) => {
523 let mut previous = previous.to_string();
524 previous.push_str(&current);
525 Ok((Text(previous.into()), previous_range))
526 },
527 (previous, current) => Err(((previous, previous_range), (current, current_range))),
528 }
529 });
530 check_doc(cx, valid_idents, events, &spans)
531 }
532
533 const RUST_CODE: &[&str] = &["rust", "no_run", "should_panic", "compile_fail"];
534
535 fn check_doc<'a, Events: Iterator<Item = (pulldown_cmark::Event<'a>, Range<usize>)>>(
536 cx: &LateContext<'_>,
537 valid_idents: &FxHashSet<String>,
538 events: Events,
539 spans: &[(usize, Span)],
540 ) -> DocHeaders {
541 // true if a safety header was found
542 use pulldown_cmark::Event::{
543 Code, End, FootnoteReference, HardBreak, Html, Rule, SoftBreak, Start, TaskListMarker, Text,
544 };
545 use pulldown_cmark::Tag::{CodeBlock, Heading, Item, Link, Paragraph};
546 use pulldown_cmark::{CodeBlockKind, CowStr};
547
548 let mut headers = DocHeaders::default();
549 let mut in_code = false;
550 let mut in_link = None;
551 let mut in_heading = false;
552 let mut is_rust = false;
553 let mut edition = None;
554 let mut ticks_unbalanced = false;
555 let mut text_to_check: Vec<(CowStr<'_>, Span)> = Vec::new();
556 let mut paragraph_span = spans.get(0).expect("function isn't called if doc comment is empty").1;
557 for (event, range) in events {
558 match event {
559 Start(CodeBlock(ref kind)) => {
560 in_code = true;
561 if let CodeBlockKind::Fenced(lang) = kind {
562 for item in lang.split(',') {
563 if item == "ignore" {
564 is_rust = false;
565 break;
566 }
567 if let Some(stripped) = item.strip_prefix("edition") {
568 is_rust = true;
569 edition = stripped.parse::<Edition>().ok();
570 } else if item.is_empty() || RUST_CODE.contains(&item) {
571 is_rust = true;
572 }
573 }
574 }
575 },
576 End(CodeBlock(_)) => {
577 in_code = false;
578 is_rust = false;
579 },
580 Start(Link(_, url, _)) => in_link = Some(url),
581 End(Link(..)) => in_link = None,
582 Start(Heading(_, _, _) | Paragraph | Item) => {
583 if let Start(Heading(_, _, _)) = event {
584 in_heading = true;
585 }
586 ticks_unbalanced = false;
587 let (_, span) = get_current_span(spans, range.start);
588 paragraph_span = first_line_of_span(cx, span);
589 },
590 End(Heading(_, _, _) | Paragraph | Item) => {
591 if let End(Heading(_, _, _)) = event {
592 in_heading = false;
593 }
594 if ticks_unbalanced {
595 span_lint_and_help(
596 cx,
597 DOC_MARKDOWN,
598 paragraph_span,
599 "backticks are unbalanced",
600 None,
601 "a backtick may be missing a pair",
602 );
603 } else {
604 for (text, span) in text_to_check {
605 check_text(cx, valid_idents, &text, span);
606 }
607 }
608 text_to_check = Vec::new();
609 },
610 Start(_tag) | End(_tag) => (), // We don't care about other tags
611 Html(_html) => (), // HTML is weird, just ignore it
612 SoftBreak | HardBreak | TaskListMarker(_) | Code(_) | Rule => (),
613 FootnoteReference(text) | Text(text) => {
614 let (begin, span) = get_current_span(spans, range.start);
615 paragraph_span = paragraph_span.with_hi(span.hi());
616 ticks_unbalanced |= text.contains('`') && !in_code;
617 if Some(&text) == in_link.as_ref() || ticks_unbalanced {
618 // Probably a link of the form `<http://example.com>`
619 // Which are represented as a link to "http://example.com" with
620 // text "http://example.com" by pulldown-cmark
621 continue;
622 }
623 let trimmed_text = text.trim();
624 headers.safety |= in_heading && trimmed_text == "Safety";
625 headers.safety |= in_heading && trimmed_text == "Implementation safety";
626 headers.safety |= in_heading && trimmed_text == "Implementation Safety";
627 headers.errors |= in_heading && trimmed_text == "Errors";
628 headers.panics |= in_heading && trimmed_text == "Panics";
629 if in_code {
630 if is_rust {
631 let edition = edition.unwrap_or_else(|| cx.tcx.sess.edition());
632 check_code(cx, &text, edition, span);
633 }
634 } else {
635 check_link_quotes(cx, in_link.is_some(), trimmed_text, span, &range, begin, text.len());
636 // Adjust for the beginning of the current `Event`
637 let span = span.with_lo(span.lo() + BytePos::from_usize(range.start - begin));
638 text_to_check.push((text, span));
639 }
640 },
641 }
642 }
643 headers
644 }
645
646 fn check_link_quotes(
647 cx: &LateContext<'_>,
648 in_link: bool,
649 trimmed_text: &str,
650 span: Span,
651 range: &Range<usize>,
652 begin: usize,
653 text_len: usize,
654 ) {
655 if in_link && trimmed_text.starts_with('\'') && trimmed_text.ends_with('\'') {
656 // fix the span to only point at the text within the link
657 let lo = span.lo() + BytePos::from_usize(range.start - begin);
658 span_lint(
659 cx,
660 DOC_LINK_WITH_QUOTES,
661 span.with_lo(lo).with_hi(lo + BytePos::from_usize(text_len)),
662 "possible intra-doc link using quotes instead of backticks",
663 );
664 }
665 }
666
667 fn get_current_span(spans: &[(usize, Span)], idx: usize) -> (usize, Span) {
668 let index = match spans.binary_search_by(|c| c.0.cmp(&idx)) {
669 Ok(o) => o,
670 Err(e) => e - 1,
671 };
672 spans[index]
673 }
674
675 fn check_code(cx: &LateContext<'_>, text: &str, edition: Edition, span: Span) {
676 fn has_needless_main(code: String, edition: Edition) -> bool {
677 rustc_driver::catch_fatal_errors(|| {
678 rustc_span::create_session_globals_then(edition, || {
679 let filename = FileName::anon_source_code(&code);
680
681 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
682 let fallback_bundle =
683 rustc_errors::fallback_fluent_bundle(rustc_errors::DEFAULT_LOCALE_RESOURCES, false);
684 let emitter = EmitterWriter::new(
685 Box::new(io::sink()),
686 None,
687 None,
688 fallback_bundle,
689 false,
690 false,
691 false,
692 None,
693 false,
694 );
695 let handler = Handler::with_emitter(false, None, Box::new(emitter));
696 let sess = ParseSess::with_span_handler(handler, sm);
697
698 let mut parser = match maybe_new_parser_from_source_str(&sess, filename, code) {
699 Ok(p) => p,
700 Err(errs) => {
701 drop(errs);
702 return false;
703 },
704 };
705
706 let mut relevant_main_found = false;
707 loop {
708 match parser.parse_item(ForceCollect::No) {
709 Ok(Some(item)) => match &item.kind {
710 ItemKind::Fn(box Fn {
711 sig, body: Some(block), ..
712 }) if item.ident.name == sym::main => {
713 let is_async = matches!(sig.header.asyncness, Async::Yes { .. });
714 let returns_nothing = match &sig.decl.output {
715 FnRetTy::Default(..) => true,
716 FnRetTy::Ty(ty) if ty.kind.is_unit() => true,
717 FnRetTy::Ty(_) => false,
718 };
719
720 if returns_nothing && !is_async && !block.stmts.is_empty() {
721 // This main function should be linted, but only if there are no other functions
722 relevant_main_found = true;
723 } else {
724 // This main function should not be linted, we're done
725 return false;
726 }
727 },
728 // Tests with one of these items are ignored
729 ItemKind::Static(..)
730 | ItemKind::Const(..)
731 | ItemKind::ExternCrate(..)
732 | ItemKind::ForeignMod(..)
733 // Another function was found; this case is ignored
734 | ItemKind::Fn(..) => return false,
735 _ => {},
736 },
737 Ok(None) => break,
738 Err(e) => {
739 e.cancel();
740 return false;
741 },
742 }
743 }
744
745 relevant_main_found
746 })
747 })
748 .ok()
749 .unwrap_or_default()
750 }
751
752 // Because of the global session, we need to create a new session in a different thread with
753 // the edition we need.
754 let text = text.to_owned();
755 if thread::spawn(move || has_needless_main(text, edition))
756 .join()
757 .expect("thread::spawn failed")
758 {
759 span_lint(cx, NEEDLESS_DOCTEST_MAIN, span, "needless `fn main` in doctest");
760 }
761 }
762
763 fn check_text(cx: &LateContext<'_>, valid_idents: &FxHashSet<String>, text: &str, span: Span) {
764 for word in text.split(|c: char| c.is_whitespace() || c == '\'') {
765 // Trim punctuation as in `some comment (see foo::bar).`
766 // ^^
767 // Or even as in `_foo bar_` which is emphasized. Also preserve `::` as a prefix/suffix.
768 let mut word = word.trim_matches(|c: char| !c.is_alphanumeric() && c != ':');
769
770 // Remove leading or trailing single `:` which may be part of a sentence.
771 if word.starts_with(':') && !word.starts_with("::") {
772 word = word.trim_start_matches(':');
773 }
774 if word.ends_with(':') && !word.ends_with("::") {
775 word = word.trim_end_matches(':');
776 }
777
778 if valid_idents.contains(word) || word.chars().all(|c| c == ':') {
779 continue;
780 }
781
782 // Adjust for the current word
783 let offset = word.as_ptr() as usize - text.as_ptr() as usize;
784 let span = Span::new(
785 span.lo() + BytePos::from_usize(offset),
786 span.lo() + BytePos::from_usize(offset + word.len()),
787 span.ctxt(),
788 span.parent(),
789 );
790
791 check_word(cx, word, span);
792 }
793 }
794
795 fn check_word(cx: &LateContext<'_>, word: &str, span: Span) {
796 /// Checks if a string is camel-case, i.e., contains at least two uppercase
797 /// letters (`Clippy` is ok) and one lower-case letter (`NASA` is ok).
798 /// Plurals are also excluded (`IDs` is ok).
799 fn is_camel_case(s: &str) -> bool {
800 if s.starts_with(|c: char| c.is_ascii_digit()) {
801 return false;
802 }
803
804 let s = s.strip_suffix('s').unwrap_or(s);
805
806 s.chars().all(char::is_alphanumeric)
807 && s.chars().filter(|&c| c.is_uppercase()).take(2).count() > 1
808 && s.chars().filter(|&c| c.is_lowercase()).take(1).count() > 0
809 }
810
811 fn has_underscore(s: &str) -> bool {
812 s != "_" && !s.contains("\\_") && s.contains('_')
813 }
814
815 fn has_hyphen(s: &str) -> bool {
816 s != "-" && s.contains('-')
817 }
818
819 if let Ok(url) = Url::parse(word) {
820 // try to get around the fact that `foo::bar` parses as a valid URL
821 if !url.cannot_be_a_base() {
822 span_lint(
823 cx,
824 DOC_MARKDOWN,
825 span,
826 "you should put bare URLs between `<`/`>` or make a proper Markdown link",
827 );
828
829 return;
830 }
831 }
832
833 // We assume that mixed-case words are not meant to be put inside backticks. (Issue #2343)
834 if has_underscore(word) && has_hyphen(word) {
835 return;
836 }
837
838 if has_underscore(word) || word.contains("::") || is_camel_case(word) {
839 let mut applicability = Applicability::MachineApplicable;
840
841 span_lint_and_then(
842 cx,
843 DOC_MARKDOWN,
844 span,
845 "item in documentation is missing backticks",
846 |diag| {
847 let snippet = snippet_with_applicability(cx, span, "..", &mut applicability);
848 diag.span_suggestion_with_style(
849 span,
850 "try",
851 format!("`{snippet}`"),
852 applicability,
853 // always show the suggestion in a separate line, since the
854 // inline presentation adds another pair of backticks
855 SuggestionStyle::ShowAlways,
856 );
857 },
858 );
859 }
860 }
861
862 struct FindPanicUnwrap<'a, 'tcx> {
863 cx: &'a LateContext<'tcx>,
864 panic_span: Option<Span>,
865 typeck_results: &'tcx ty::TypeckResults<'tcx>,
866 }
867
868 impl<'a, 'tcx> Visitor<'tcx> for FindPanicUnwrap<'a, 'tcx> {
869 type NestedFilter = nested_filter::OnlyBodies;
870
871 fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
872 if self.panic_span.is_some() {
873 return;
874 }
875
876 if let Some(macro_call) = root_macro_call_first_node(self.cx, expr) {
877 if is_panic(self.cx, macro_call.def_id)
878 || matches!(
879 self.cx.tcx.item_name(macro_call.def_id).as_str(),
880 "assert" | "assert_eq" | "assert_ne" | "todo"
881 )
882 {
883 self.panic_span = Some(macro_call.span);
884 }
885 }
886
887 // check for `unwrap`
888 if let Some(arglists) = method_chain_args(expr, &["unwrap"]) {
889 let receiver_ty = self.typeck_results.expr_ty(arglists[0].0).peel_refs();
890 if is_type_diagnostic_item(self.cx, receiver_ty, sym::Option)
891 || is_type_diagnostic_item(self.cx, receiver_ty, sym::Result)
892 {
893 self.panic_span = Some(expr.span);
894 }
895 }
896
897 // and check sub-expressions
898 intravisit::walk_expr(self, expr);
899 }
900
901 // Panics in const blocks will cause compilation to fail.
902 fn visit_anon_const(&mut self, _: &'tcx AnonConst) {}
903
904 fn nested_visit_map(&mut self) -> Self::Map {
905 self.cx.tcx.hir()
906 }
907 }