]> git.proxmox.com Git - ceph.git/blob - ceph/src/boost/libs/spirit/example/lex/lexer_debug_support.cpp
update ceph source to reef 18.1.2
[ceph.git] / ceph / src / boost / libs / spirit / example / lex / lexer_debug_support.cpp
1 // Copyright (c) 2001-2011 Hartmut Kaiser
2 //
3 // Distributed under the Boost Software License, Version 1.0. (See accompanying
4 // file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
5
6 // #define BOOST_SPIRIT_LEXERTL_DEBUG 1
7
8 #include <boost/spirit/include/lex_lexertl.hpp>
9 #include <boost/spirit/include/qi.hpp>
10 #include <boost/phoenix.hpp>
11
12 namespace lex = boost::spirit::lex;
13 namespace qi = boost::spirit::qi;
14 namespace phoenix = boost::phoenix;
15
16 ///////////////////////////////////////////////////////////////////////////////
17 template <typename Lexer>
18 struct language_tokens : lex::lexer<Lexer>
19 {
20 language_tokens()
21 {
22 tok_float = "float";
23 tok_int = "int";
24 floatlit = "[0-9]+\\.[0-9]*";
25 intlit = "[0-9]+";
26 ws = "[ \t\n]+";
27 identifier = "[a-zA-Z_][a-zA-Z_0-9]*";
28
29 this->self = ws [lex::_pass = lex::pass_flags::pass_ignore];
30 this->self += tok_float | tok_int | floatlit | intlit | identifier;
31 this->self += lex::char_('=');
32 }
33
34 lex::token_def<> tok_float, tok_int;
35 lex::token_def<> ws;
36 lex::token_def<double> floatlit;
37 lex::token_def<int> intlit;
38 lex::token_def<> identifier;
39 };
40
41 ///////////////////////////////////////////////////////////////////////////////
42 template <typename Iterator>
43 struct language_grammar : qi::grammar<Iterator>
44 {
45 template <typename Lexer>
46 language_grammar(language_tokens<Lexer> const& tok)
47 : language_grammar::base_type(declarations)
48 {
49 declarations = +number;
50 number =
51 tok.tok_float >> tok.identifier >> '=' >> tok.floatlit
52 | tok.tok_int >> tok.identifier >> '=' >> tok.intlit
53 ;
54
55 declarations.name("declarations");
56 number.name("number");
57 debug(declarations);
58 debug(number);
59 }
60
61 qi::rule<Iterator> declarations;
62 qi::rule<Iterator> number;
63 };
64
65 ///////////////////////////////////////////////////////////////////////////////
66 int main()
67 {
68 // iterator type used to expose the underlying input stream
69 typedef std::string::iterator base_iterator_type;
70
71 // lexer type
72 typedef lex::lexertl::actor_lexer<
73 lex::lexertl::token<
74 base_iterator_type, boost::mpl::vector2<double, int>
75 > > lexer_type;
76
77 // iterator type exposed by the lexer
78 typedef language_tokens<lexer_type>::iterator_type iterator_type;
79
80 // now we use the types defined above to create the lexer and grammar
81 // object instances needed to invoke the parsing process
82 language_tokens<lexer_type> tokenizer; // Our lexer
83 language_grammar<iterator_type> g (tokenizer); // Our parser
84
85 // Parsing is done based on the token stream, not the character
86 // stream read from the input.
87 std::string str ("float f = 3.4\nint i = 6\n");
88 base_iterator_type first = str.begin();
89
90 bool r = lex::tokenize_and_parse(first, str.end(), tokenizer, g);
91
92 if (r) {
93 std::cout << "-------------------------\n";
94 std::cout << "Parsing succeeded\n";
95 std::cout << "-------------------------\n";
96 }
97 else {
98 std::string rest(first, str.end());
99 std::cout << "-------------------------\n";
100 std::cout << "Parsing failed\n";
101 std::cout << "stopped at: \"" << rest << "\"\n";
102 std::cout << "-------------------------\n";
103 }
104
105 std::cout << "Bye... :-) \n\n";
106 return 0;
107 }