1 //  Copyright (c) 2001-2010 Hartmut Kaiser
2 //
3 //  Distributed under the Boost Software License, Version 1.0. (See accompanying
4 //  file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
5 
6 #if !defined(SPIRIT_LEXER_EXAMPLE_WORD_COUNT_TOKENS_FEB_10_2008_0739PM)
7 #define SPIRIT_LEXER_EXAMPLE_WORD_COUNT_TOKENS_FEB_10_2008_0739PM
8 
9 ///////////////////////////////////////////////////////////////////////////////
10 //  Token definition: We keep the base class for the token definition as a
11 //                    template parameter to allow this class to be used for
12 //                    both: the code generation and the lexical analysis
13 ///////////////////////////////////////////////////////////////////////////////
14 //[wc_static_tokenids
15 enum tokenids
16 {
17     IDANY = boost::spirit::lex::min_token_id + 1,
18 };
19 //]
20 
21 //[wc_static_tokendef
22 // This token definition class can be used without any change for all three
23 // possible use cases: a dynamic lexical analyzer, a code generator, and a
24 // static lexical analyzer.
25 template <typename BaseLexer>
26 struct word_count_tokens : boost::spirit::lex::lexer<BaseLexer>
27 {
word_count_tokensword_count_tokens28     word_count_tokens()
29       : word_count_tokens::base_type(
30           boost::spirit::lex::match_flags::match_not_dot_newline)
31     {
32         // define tokens and associate them with the lexer
33         word = "[^ \t\n]+";
34         this->self = word | '\n' | boost::spirit::lex::token_def<>(".", IDANY);
35     }
36 
37     boost::spirit::lex::token_def<std::string> word;
38 };
39 //]
40 
41 #endif
42