]> git.proxmox.com Git - rustc.git/blob - src/libsyntax/parse/common.rs
Imported Upstream version 0.6
[rustc.git] / src / libsyntax / parse / common.rs
1 // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 use core::prelude::*;
12
13 use ast;
14 use codemap::{BytePos, spanned};
15 use parse::lexer::reader;
16 use parse::parser::Parser;
17 use parse::token;
18
19 use core::option::{None, Option, Some};
20
21 use opt_vec;
22 use opt_vec::OptVec;
23
24 // SeqSep : a sequence separator (token)
25 // and whether a trailing separator is allowed.
26 pub struct SeqSep {
27 sep: Option<token::Token>,
28 trailing_sep_allowed: bool
29 }
30
31 pub fn seq_sep_trailing_disallowed(+t: token::Token) -> SeqSep {
32 SeqSep {
33 sep: Some(t),
34 trailing_sep_allowed: false,
35 }
36 }
37 pub fn seq_sep_trailing_allowed(+t: token::Token) -> SeqSep {
38 SeqSep {
39 sep: Some(t),
40 trailing_sep_allowed: true,
41 }
42 }
43 pub fn seq_sep_none() -> SeqSep {
44 SeqSep {
45 sep: None,
46 trailing_sep_allowed: false,
47 }
48 }
49
50 pub fn token_to_str(reader: @reader, token: &token::Token) -> ~str {
51 token::to_str(reader.interner(), token)
52 }
53
54 pub impl Parser {
55 fn unexpected_last(&self, t: &token::Token) -> ! {
56 self.span_fatal(
57 *self.last_span,
58 fmt!(
59 "unexpected token: `%s`",
60 token_to_str(self.reader, t)
61 )
62 );
63 }
64
65 fn unexpected(&self) -> ! {
66 self.fatal(
67 fmt!(
68 "unexpected token: `%s`",
69 token_to_str(self.reader, &copy *self.token)
70 )
71 );
72 }
73
74 // expect and consume the token t. Signal an error if
75 // the next token is not t.
76 fn expect(&self, t: &token::Token) {
77 if *self.token == *t {
78 self.bump();
79 } else {
80 self.fatal(
81 fmt!(
82 "expected `%s` but found `%s`",
83 token_to_str(self.reader, t),
84 token_to_str(self.reader, &copy *self.token)
85 )
86 )
87 }
88 }
89
90 fn parse_ident(&self) -> ast::ident {
91 self.check_strict_keywords();
92 self.check_reserved_keywords();
93 match *self.token {
94 token::IDENT(i, _) => {
95 self.bump();
96 i
97 }
98 token::INTERPOLATED(token::nt_ident(*)) => {
99 self.bug(
100 ~"ident interpolation not converted to real token"
101 );
102 }
103 _ => {
104 self.fatal(
105 fmt!(
106 "expected ident, found `%s`",
107 token_to_str(self.reader, &copy *self.token)
108 )
109 );
110 }
111 }
112 }
113
114 fn parse_path_list_ident(&self) -> ast::path_list_ident {
115 let lo = self.span.lo;
116 let ident = self.parse_ident();
117 let hi = self.span.hi;
118 spanned(lo, hi, ast::path_list_ident_ { name: ident,
119 id: self.get_id() })
120 }
121
122 // consume token 'tok' if it exists. Returns true if the given
123 // token was present, false otherwise.
124 fn eat(&self, tok: &token::Token) -> bool {
125 return if *self.token == *tok { self.bump(); true } else { false };
126 }
127
128 // Storing keywords as interned idents instead of strings would be nifty.
129
130 // A sanity check that the word we are asking for is a known keyword
131 fn require_keyword(&self, word: &~str) {
132 if !self.keywords.contains(word) {
133 self.bug(fmt!("unknown keyword: %s", *word));
134 }
135 }
136
137 fn token_is_word(&self, word: &~str, tok: &token::Token) -> bool {
138 match *tok {
139 token::IDENT(sid, false) => { *self.id_to_str(sid) == *word }
140 _ => { false }
141 }
142 }
143
144 fn token_is_keyword(&self, word: &~str, tok: &token::Token) -> bool {
145 self.require_keyword(word);
146 self.token_is_word(word, tok)
147 }
148
149 fn is_keyword(&self, word: &~str) -> bool {
150 self.token_is_keyword(word, &copy *self.token)
151 }
152
153 fn is_any_keyword(&self, tok: &token::Token) -> bool {
154 match *tok {
155 token::IDENT(sid, false) => {
156 self.keywords.contains(self.id_to_str(sid))
157 }
158 _ => false
159 }
160 }
161
162 fn eat_keyword(&self, word: &~str) -> bool {
163 self.require_keyword(word);
164 let is_kw = match *self.token {
165 token::IDENT(sid, false) => *word == *self.id_to_str(sid),
166 _ => false
167 };
168 if is_kw { self.bump() }
169 is_kw
170 }
171
172 fn expect_keyword(&self, word: &~str) {
173 self.require_keyword(word);
174 if !self.eat_keyword(word) {
175 self.fatal(
176 fmt!(
177 "expected `%s`, found `%s`",
178 *word,
179 token_to_str(self.reader, &copy *self.token)
180 )
181 );
182 }
183 }
184
185 fn is_strict_keyword(&self, word: &~str) -> bool {
186 self.strict_keywords.contains(word)
187 }
188
189 fn check_strict_keywords(&self) {
190 match *self.token {
191 token::IDENT(_, false) => {
192 let w = token_to_str(self.reader, &copy *self.token);
193 self.check_strict_keywords_(&w);
194 }
195 _ => ()
196 }
197 }
198
199 fn check_strict_keywords_(&self, w: &~str) {
200 if self.is_strict_keyword(w) {
201 self.fatal(fmt!("found `%s` in ident position", *w));
202 }
203 }
204
205 fn is_reserved_keyword(&self, word: &~str) -> bool {
206 self.reserved_keywords.contains(word)
207 }
208
209 fn check_reserved_keywords(&self) {
210 match *self.token {
211 token::IDENT(_, false) => {
212 let w = token_to_str(self.reader, &copy *self.token);
213 self.check_reserved_keywords_(&w);
214 }
215 _ => ()
216 }
217 }
218
219 fn check_reserved_keywords_(&self, w: &~str) {
220 if self.is_reserved_keyword(w) {
221 self.fatal(fmt!("`%s` is a reserved keyword", *w));
222 }
223 }
224
225 // expect and consume a GT. if a >> is seen, replace it
226 // with a single > and continue.
227 fn expect_gt(&self) {
228 if *self.token == token::GT {
229 self.bump();
230 } else if *self.token == token::BINOP(token::SHR) {
231 self.replace_token(
232 token::GT,
233 self.span.lo + BytePos(1u),
234 self.span.hi
235 );
236 } else {
237 let mut s: ~str = ~"expected `";
238 s += token_to_str(self.reader, &token::GT);
239 s += ~"`, found `";
240 s += token_to_str(self.reader, &copy *self.token);
241 s += ~"`";
242 self.fatal(s);
243 }
244 }
245
246 // parse a sequence bracketed by '<' and '>', stopping
247 // before the '>'.
248 fn parse_seq_to_before_gt<T: Copy>(
249 &self,
250 sep: Option<token::Token>,
251 f: &fn(&Parser) -> T
252 ) -> OptVec<T> {
253 let mut first = true;
254 let mut v = opt_vec::Empty;
255 while *self.token != token::GT
256 && *self.token != token::BINOP(token::SHR) {
257 match sep {
258 Some(ref t) => {
259 if first { first = false; }
260 else { self.expect(t); }
261 }
262 _ => ()
263 }
264 v.push(f(self));
265 }
266 return v;
267 }
268
269 fn parse_seq_to_gt<T: Copy>(
270 &self,
271 sep: Option<token::Token>,
272 f: &fn(&Parser) -> T
273 ) -> OptVec<T> {
274 let v = self.parse_seq_to_before_gt(sep, f);
275 self.expect_gt();
276 return v;
277 }
278
279 // parse a sequence, including the closing delimiter. The function
280 // f must consume tokens until reaching the next separator or
281 // closing bracket.
282 fn parse_seq_to_end<T: Copy>(
283 &self,
284 ket: &token::Token,
285 sep: SeqSep,
286 f: &fn(&Parser) -> T
287 ) -> ~[T] {
288 let val = self.parse_seq_to_before_end(ket, sep, f);
289 self.bump();
290 val
291 }
292
293 // parse a sequence, not including the closing delimiter. The function
294 // f must consume tokens until reaching the next separator or
295 // closing bracket.
296 fn parse_seq_to_before_end<T: Copy>(
297 &self,
298 ket: &token::Token,
299 sep: SeqSep,
300 f: &fn(&Parser) -> T
301 ) -> ~[T] {
302 let mut first: bool = true;
303 let mut v: ~[T] = ~[];
304 while *self.token != *ket {
305 match sep.sep {
306 Some(ref t) => {
307 if first { first = false; }
308 else { self.expect(t); }
309 }
310 _ => ()
311 }
312 if sep.trailing_sep_allowed && *self.token == *ket { break; }
313 v.push(f(self));
314 }
315 return v;
316 }
317
318 // parse a sequence, including the closing delimiter. The function
319 // f must consume tokens until reaching the next separator or
320 // closing bracket.
321 fn parse_unspanned_seq<T: Copy>(
322 &self,
323 bra: &token::Token,
324 ket: &token::Token,
325 sep: SeqSep,
326 f: &fn(&Parser) -> T
327 ) -> ~[T] {
328 self.expect(bra);
329 let result = self.parse_seq_to_before_end(ket, sep, f);
330 self.bump();
331 result
332 }
333
334 // NB: Do not use this function unless you actually plan to place the
335 // spanned list in the AST.
336 fn parse_seq<T: Copy>(
337 &self,
338 bra: &token::Token,
339 ket: &token::Token,
340 sep: SeqSep,
341 f: &fn(&Parser) -> T
342 ) -> spanned<~[T]> {
343 let lo = self.span.lo;
344 self.expect(bra);
345 let result = self.parse_seq_to_before_end(ket, sep, f);
346 let hi = self.span.hi;
347 self.bump();
348 spanned(lo, hi, result)
349 }
350 }