]> git.proxmox.com Git - rustc.git/blob - src/librustdoc/html/highlight.rs
Imported Upstream version 1.9.0+dfsg1
[rustc.git] / src / librustdoc / html / highlight.rs
1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! Basic html highlighting functionality
12 //!
13 //! This module uses libsyntax's lexer to provide token-based highlighting for
14 //! the HTML documentation generated by rustdoc.
15
16 use html::escape::Escape;
17
18 use std::io;
19 use std::io::prelude::*;
20 use syntax::parse::lexer::{self, Reader};
21 use syntax::parse::token;
22 use syntax::parse;
23
24 /// Highlights `src`, returning the HTML output.
25 pub fn render_with_highlighting(src: &str, class: Option<&str>, id: Option<&str>) -> String {
26 debug!("highlighting: ================\n{}\n==============", src);
27 let sess = parse::ParseSess::new();
28 let fm = sess.codemap().new_filemap("<stdin>".to_string(), src.to_string());
29
30 let mut out = Vec::new();
31 write_header(class, id, &mut out).unwrap();
32 write_source(&sess,
33 lexer::StringReader::new(&sess.span_diagnostic, fm),
34 &mut out).unwrap();
35 write_footer(&mut out).unwrap();
36 String::from_utf8_lossy(&out[..]).into_owned()
37 }
38
39 /// Highlights `src`, returning the HTML output. Returns only the inner html to
40 /// be inserted into an element. C.f., `render_with_highlighting` which includes
41 /// an enclosing `<pre>` block.
42 pub fn render_inner_with_highlighting(src: &str) -> String {
43 let sess = parse::ParseSess::new();
44 let fm = sess.codemap().new_filemap("<stdin>".to_string(), src.to_string());
45
46 let mut out = Vec::new();
47 write_source(&sess,
48 lexer::StringReader::new(&sess.span_diagnostic, fm),
49 &mut out).unwrap();
50 String::from_utf8_lossy(&out[..]).into_owned()
51 }
52
53 /// Exhausts the `lexer` writing the output into `out`.
54 ///
55 /// The general structure for this method is to iterate over each token,
56 /// possibly giving it an HTML span with a class specifying what flavor of token
57 /// it's used. All source code emission is done as slices from the source map,
58 /// not from the tokens themselves, in order to stay true to the original
59 /// source.
60 fn write_source(sess: &parse::ParseSess,
61 mut lexer: lexer::StringReader,
62 out: &mut Write)
63 -> io::Result<()> {
64 let mut is_attribute = false;
65 let mut is_macro = false;
66 let mut is_macro_nonterminal = false;
67 loop {
68 let next = lexer.next_token();
69
70 let snip = |sp| sess.codemap().span_to_snippet(sp).unwrap();
71
72 if next.tok == token::Eof { break }
73
74 let klass = match next.tok {
75 token::Whitespace => {
76 write!(out, "{}", Escape(&snip(next.sp)))?;
77 continue
78 },
79 token::Comment => {
80 write!(out, "<span class='comment'>{}</span>",
81 Escape(&snip(next.sp)))?;
82 continue
83 },
84 token::Shebang(s) => {
85 write!(out, "{}", Escape(&s.as_str()))?;
86 continue
87 },
88 // If this '&' token is directly adjacent to another token, assume
89 // that it's the address-of operator instead of the and-operator.
90 // This allows us to give all pointers their own class (`Box` and
91 // `@` are below).
92 token::BinOp(token::And) if lexer.peek().sp.lo == next.sp.hi => "kw-2",
93 token::At | token::Tilde => "kw-2",
94
95 // consider this as part of a macro invocation if there was a
96 // leading identifier
97 token::Not if is_macro => { is_macro = false; "macro" }
98
99 // operators
100 token::Eq | token::Lt | token::Le | token::EqEq | token::Ne | token::Ge | token::Gt |
101 token::AndAnd | token::OrOr | token::Not | token::BinOp(..) | token::RArrow |
102 token::BinOpEq(..) | token::FatArrow => "op",
103
104 // miscellaneous, no highlighting
105 token::Dot | token::DotDot | token::DotDotDot | token::Comma | token::Semi |
106 token::Colon | token::ModSep | token::LArrow | token::OpenDelim(_) |
107 token::CloseDelim(token::Brace) | token::CloseDelim(token::Paren) |
108 token::Question => "",
109 token::Dollar => {
110 if lexer.peek().tok.is_ident() {
111 is_macro_nonterminal = true;
112 "macro-nonterminal"
113 } else {
114 ""
115 }
116 }
117
118 // This is the start of an attribute. We're going to want to
119 // continue highlighting it as an attribute until the ending ']' is
120 // seen, so skip out early. Down below we terminate the attribute
121 // span when we see the ']'.
122 token::Pound => {
123 is_attribute = true;
124 write!(out, r"<span class='attribute'>#")?;
125 continue
126 }
127 token::CloseDelim(token::Bracket) => {
128 if is_attribute {
129 is_attribute = false;
130 write!(out, "]</span>")?;
131 continue
132 } else {
133 ""
134 }
135 }
136
137 token::Literal(lit, _suf) => {
138 match lit {
139 // text literals
140 token::Byte(..) | token::Char(..) |
141 token::ByteStr(..) | token::ByteStrRaw(..) |
142 token::Str_(..) | token::StrRaw(..) => "string",
143
144 // number literals
145 token::Integer(..) | token::Float(..) => "number",
146 }
147 }
148
149 // keywords are also included in the identifier set
150 token::Ident(ident, _is_mod_sep) => {
151 match &*ident.name.as_str() {
152 "ref" | "mut" => "kw-2",
153
154 "self" => "self",
155 "false" | "true" => "boolval",
156
157 "Option" | "Result" => "prelude-ty",
158 "Some" | "None" | "Ok" | "Err" => "prelude-val",
159
160 _ if next.tok.is_any_keyword() => "kw",
161 _ => {
162 if is_macro_nonterminal {
163 is_macro_nonterminal = false;
164 "macro-nonterminal"
165 } else if lexer.peek().tok == token::Not {
166 is_macro = true;
167 "macro"
168 } else {
169 "ident"
170 }
171 }
172 }
173 }
174
175 // Special macro vars are like keywords
176 token::SpecialVarNt(_) => "kw-2",
177
178 token::Lifetime(..) => "lifetime",
179 token::DocComment(..) => "doccomment",
180 token::Underscore | token::Eof | token::Interpolated(..) |
181 token::MatchNt(..) | token::SubstNt(..) => "",
182 };
183
184 // as mentioned above, use the original source code instead of
185 // stringifying this token
186 let snip = sess.codemap().span_to_snippet(next.sp).unwrap();
187 if klass == "" {
188 write!(out, "{}", Escape(&snip))?;
189 } else {
190 write!(out, "<span class='{}'>{}</span>", klass, Escape(&snip))?;
191 }
192 }
193
194 Ok(())
195 }
196
197 fn write_header(class: Option<&str>,
198 id: Option<&str>,
199 out: &mut Write)
200 -> io::Result<()> {
201 write!(out, "<pre ")?;
202 match id {
203 Some(id) => write!(out, "id='{}' ", id)?,
204 None => {}
205 }
206 write!(out, "class='rust {}'>\n", class.unwrap_or(""))
207 }
208
209 fn write_footer(out: &mut Write) -> io::Result<()> {
210 write!(out, "</pre>\n")
211 }