source_end = empty.end();
}
-void Tokenizer::begin(const string &name, const string &src)
+void Tokenizer::begin(const string &src, const string &name)
{
iter = src.begin();
source_end = src.end();
{
while(next_tokens.size()<=index)
next_tokens.push_back(parse_token_());
- return (last_token = next_tokens[index]);
+ return next_tokens[index];
}
const string &Tokenizer::parse_token()
{
+ progress_mark = true;
+
if(!next_tokens.empty())
{
last_token = next_tokens.front();
void Tokenizer::set_location(const Location &loc)
{
location = loc;
+ suppress_line_advance = true;
}
string Tokenizer::parse_token_()