1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68
|
#include <boost/config/warning_disable.hpp>
#include <boost/spirit/include/lex.hpp>
#include <boost/bind.hpp>
#include <boost/ref.hpp>
#include <iostream>
#include <string>
namespace lex = boost::spirit::lex;
enum token_ids {
ID_WORD = 1000, ID_EOL, ID_CHAR
};
template<typename Lexer>
struct word_count_tokens: lex::lexer<Lexer> {
word_count_tokens() {
this->self.add("[^ \t\n]+", ID_WORD)
("\n", ID_EOL)
(".", ID_CHAR)
;
}
};
struct counter {
typedef bool result_type;
template<typename Token>
bool operator()(Token const& t, std::size_t& c, std::size_t& w, std::size_t& l) const {
switch (t.id()) {
case ID_WORD:
++w;
c += t.value().size();
break;
case ID_EOL:
++l;
++c;
break;
case ID_CHAR:
++c;
break;
}
return true;
}
};
int main(int argc, char* argv[]) {
std::size_t c = 0, w = 0, l = 0;
std::string str("some text\n and a new line");
word_count_tokens< lex::lexer<> > word_count_functor; //Problematic line, apperantly lex::lexer<>'s template argument isn't defaulted
char const* first = str.c_str();
char const* last = &first[str.size()];
bool r = lex::tokenize(first, last, word_count_functor, boost::bind(counter(), _1, boost::ref(c), boost::ref(w), boost::ref(l)));
if (r) {
std::cout << "lines: " << l << ", words: " << w << ", characters: " << c << "\n";
} else {
std::string rest(first, last);
std::cout << "Lexical analysis failed\n" << "stopped at: \"" << rest << "\"\n";
}
return 0;
}
|