1 // Copyright (c) 2001-2011 Hartmut Kaiser
3 // Distributed under the Boost Software License, Version 1.0. (See accompanying
4 // file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
6 // #define BOOST_SPIRIT_LEXERTL_DEBUG 1
8 #include <boost/config/warning_disable.hpp>
10 #include <boost/spirit/include/lex_lexertl.hpp>
11 #include <boost/spirit/include/qi.hpp>
12 #include <boost/spirit/include/phoenix.hpp>
14 namespace lex
= boost::spirit::lex
;
15 namespace qi
= boost::spirit::qi
;
16 namespace phoenix
= boost::phoenix
;
18 ///////////////////////////////////////////////////////////////////////////////
19 template <typename Lexer
>
20 struct language_tokens
: lex::lexer
<Lexer
>
26 floatlit
= "[0-9]+\\.[0-9]*";
29 identifier
= "[a-zA-Z_][a-zA-Z_0-9]*";
31 this->self
= ws
[lex::_pass
= lex::pass_flags::pass_ignore
];
32 this->self
+= tok_float
| tok_int
| floatlit
| intlit
| identifier
;
33 this->self
+= lex::char_('=');
36 lex::token_def
<> tok_float
, tok_int
;
38 lex::token_def
<double> floatlit
;
39 lex::token_def
<int> intlit
;
40 lex::token_def
<> identifier
;
43 ///////////////////////////////////////////////////////////////////////////////
44 template <typename Iterator
>
45 struct language_grammar
: qi::grammar
<Iterator
>
47 template <typename Lexer
>
48 language_grammar(language_tokens
<Lexer
> const& tok
)
49 : language_grammar::base_type(declarations
)
51 declarations
= +number
;
53 tok
.tok_float
>> tok
.identifier
>> '=' >> tok
.floatlit
54 | tok
.tok_int
>> tok
.identifier
>> '=' >> tok
.intlit
57 declarations
.name("declarations");
58 number
.name("number");
63 qi::rule
<Iterator
> declarations
;
64 qi::rule
<Iterator
> number
;
67 ///////////////////////////////////////////////////////////////////////////////
68 int main(int argc
, char* argv
[])
70 // iterator type used to expose the underlying input stream
71 typedef std::string::iterator base_iterator_type
;
74 typedef lex::lexertl::actor_lexer
<
76 base_iterator_type
, boost::mpl::vector2
<double, int>
79 // iterator type exposed by the lexer
80 typedef language_tokens
<lexer_type
>::iterator_type iterator_type
;
82 // now we use the types defined above to create the lexer and grammar
83 // object instances needed to invoke the parsing process
84 language_tokens
<lexer_type
> tokenizer
; // Our lexer
85 language_grammar
<iterator_type
> g (tokenizer
); // Our parser
87 // Parsing is done based on the token stream, not the character
88 // stream read from the input.
89 std::string
str ("float f = 3.4\nint i = 6\n");
90 base_iterator_type first
= str
.begin();
92 bool r
= lex::tokenize_and_parse(first
, str
.end(), tokenizer
, g
);
95 std::cout
<< "-------------------------\n";
96 std::cout
<< "Parsing succeeded\n";
97 std::cout
<< "-------------------------\n";
100 std::string
rest(first
, str
.end());
101 std::cout
<< "-------------------------\n";
102 std::cout
<< "Parsing failed\n";
103 std::cout
<< "stopped at: \"" << rest
<< "\"\n";
104 std::cout
<< "-------------------------\n";
107 std::cout
<< "Bye... :-) \n\n";