]> git.proxmox.com Git - ceph.git/blob - ceph/src/boost/libs/wave/test/testlexers/test_lexertl_lexer.cpp
import quincy beta 17.1.0
[ceph.git] / ceph / src / boost / libs / wave / test / testlexers / test_lexertl_lexer.cpp
1 /*=============================================================================
2 Boost.Wave: A Standard compliant C++ preprocessor library
3 http://www.boost.org/
4
5 Copyright (c) 2001-2012 Hartmut Kaiser. Distributed under the Boost
6 Software License, Version 1.0. (See accompanying file
7 LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
8 =============================================================================*/
9
10 // system headers
11 #include <string>
12 #include <limits>
13 #if defined(TESTLEXERS_TIMING)
14 #include <iostream>
15 #endif
16
17 #include <boost/wave/wave_config.hpp>
18 #undef BOOST_WAVE_SEPARATE_LEXER_INSTANTIATION
19
20 #include <boost/detail/lightweight_test.hpp>
21 #if defined(TESTLEXERS_TIMING)
22 #include "high_resolution_timer.hpp"
23 #endif
24
25 // include the lexertl lexer related stuff
26 #include <boost/wave/cpplexer/cpp_lex_token.hpp> // token type
27 #include <libs/wave/samples/list_includes/lexertl/lexertl_lexer.hpp> // lexer type
28
29 typedef boost::wave::cpplexer::lex_token<> token_type;
30 typedef boost::wave::cpplexer::lexertl::lex_iterator<token_type> lexer_type;
31
32 ///////////////////////////////////////////////////////////////////////////////
33 // include test data
34 #include "cpp_tokens.hpp"
35
36 ///////////////////////////////////////////////////////////////////////////////
37 int
38 main(int argc, char *argv[])
39 {
40 try {
41 token_type::position_type pos("<testdata>");
42
43 #if defined(TESTLEXERS_TIMING)
44 boost::high_resolution_timer tim;
45 for (int i = 0; i < 1000; ++i) {
46 #endif
47
48 for (lexem const* data = lexems; NULL != data->token; ++data) {
49 // feed the token to the lexer
50 token_type::string_type instr(data->token);
51
52 lexer_type it = lexer_type(instr.begin(), instr.end(), pos,
53 boost::wave::support_cpp2a);
54 lexer_type end = lexer_type();
55
56 // verify the correct outcome of the tokenization
57 #if defined(TESTLEXERS_VERBOSE)
58 std::cerr << boost::wave::get_token_name(data->id) << std::endl;
59 #endif
60
61 if (data->id != boost::wave::token_id(*it)) {
62 BOOST_TEST(data->id == boost::wave::token_id(*it));
63 std::cerr << data->token << ": expected: "
64 << boost::wave::get_token_name(data->id);
65 std::cerr << ", found: "
66 << boost::wave::get_token_name(boost::wave::token_id(*it))
67 << std::endl;
68 }
69 BOOST_TEST(++it != end);
70 if (boost::wave::T_EOF != boost::wave::token_id(*it)) {
71 BOOST_TEST(boost::wave::T_EOF == boost::wave::token_id(*it));
72 std::cerr << data->token << ": not fully matched, "
73 << "first non-matched token was: " << (*it).get_value()
74 << std::endl;
75 }
76 }
77
78 #if defined(TESTLEXERS_TIMING)
79 }
80 std::cout << tim.elapsed() << " [s]" << std::endl;
81 #endif
82 }
83 catch (boost::wave::cpplexer::lexing_exception &e) {
84 // some lexing error
85 std::cerr
86 << "test_lexertl_lexer: "
87 << e.description() << std::endl;
88 return (std::numeric_limits<int>::max)() - 1;
89 }
90
91 return boost::report_errors();
92 }