]> git.tdb.fi Git - libs/gl.git/blobdiff - source/glsl/tokenizer.cpp
Split tokenizer and preprocessor out of the GLSL parser
[libs/gl.git] / source / glsl / tokenizer.cpp
diff --git a/source/glsl/tokenizer.cpp b/source/glsl/tokenizer.cpp
new file mode 100644 (file)
index 0000000..be43214
--- /dev/null
@@ -0,0 +1,206 @@
+#include <msp/core/raii.h>
+#include "glsl_error.h"
+#include "preprocessor.h"
+#include "syntax.h"
+#include "tokenizer.h"
+
+using namespace std;
+
+namespace Msp {
+namespace GL {
+namespace SL {
+
+Tokenizer::Tokenizer():
+       allow_preprocess(true)
+{
+       static string empty;
+       iter = empty.begin();
+       source_end = empty.end();
+       location.line = 0;
+}
+
+void Tokenizer::begin(const string &name, const string &src)
+{
+       iter = src.begin();
+       source_end = src.end();
+       location.name = name;
+       location.line = 1;
+       allow_preprocess = true;
+       last_token.clear();
+       next_tokens.clear();
+}
+
+const string &Tokenizer::peek_token(unsigned index)
+{
+       while(next_tokens.size()<=index)
+               next_tokens.push_back(parse_token_());
+       return (last_token = next_tokens[index]);
+}
+
+const string &Tokenizer::parse_token()
+{
+       if(!next_tokens.empty())
+       {
+               last_token = next_tokens.front();
+               next_tokens.pop_front();
+               return last_token;
+       }
+
+       return (last_token = parse_token_());
+}
+
+void Tokenizer::expect(const string &token)
+{
+       string parsed = parse_token();
+       if(parsed!=token)
+               throw parse_error(location, parsed, format("'%s'", token));
+}
+
+string Tokenizer::parse_token_()
+{
+       while(1)
+       {
+               skip_comment_and_whitespace();
+               if(iter==source_end)
+                       return string();
+               else if(allow_preprocess && *iter=='#')
+               {
+                       allow_preprocess = false;
+                       preprocess();
+               }
+               else if(isalpha(*iter) || *iter=='_')
+                       return parse_identifier();
+               else if(isdigit(*iter))
+                       return parse_number();
+               else
+                       return parse_other();
+       }
+}
+
+void Tokenizer::preprocess()
+{
+       SetForScope<deque<string> > clear_tokens(next_tokens, deque<string>());
+
+       string::const_iterator line_end = iter;
+       for(; (line_end!=source_end && *line_end!='\n'); ++line_end) ;
+       SetForScope<string::const_iterator> stop_at_line_end(source_end, line_end);
+
+       signal_preprocess.emit();
+
+       iter = line_end;
+}
+
+string Tokenizer::parse_identifier()
+{
+       string ident;
+       while(iter!=source_end)
+       {
+               if(isalnum(*iter) || *iter=='_')
+                       ident += *iter++;
+               else
+                       break;
+       }
+
+       return ident;
+}
+
+string Tokenizer::parse_number()
+{
+       bool accept_sign = false;
+       string number;
+       while(iter!=source_end)
+       {
+               if(isdigit(*iter) || *iter=='.')
+                       number += *iter++;
+               else if(*iter=='e' || *iter=='E')
+               {
+                       number += *iter++;
+                       accept_sign = true;
+               }
+               else if(accept_sign && (*iter=='+' || *iter=='-'))
+                       number += *iter++;
+               else
+                       break;
+       }
+
+       return number;
+}
+
+string Tokenizer::parse_other()
+{
+       if(iter==source_end)
+               return string();
+
+       string token(1, *iter++);
+       for(unsigned i=1; (i<3 && iter!=source_end); ++i)
+       {
+               bool matched = false;
+               for(const Operator *j=Operator::operators; (!matched && j->type); ++j)
+               {
+                       matched = (j->token[i]==*iter);
+                       for(unsigned k=0; (matched && k<i && j->token[k]); ++k)
+                               matched = (j->token[k]==token[k]);
+               }
+
+               if(!matched)
+                       break;
+
+               token += *iter++;
+       }
+
+       return token;
+}
+
+void Tokenizer::skip_comment_and_whitespace()
+{
+       unsigned comment = 0;
+       while(iter!=source_end)
+       {
+               if(comment==0)
+               {
+                       if(*iter=='/')
+                               comment = 1;
+                       else if(!isspace(*iter))
+                               break;
+               }
+               else if(comment==1)
+               {
+                       if(*iter=='/')
+                               comment = 2;
+                       else if(*iter=='*')
+                               comment = 3;
+                       else
+                       {
+                               comment = 0;
+                               --iter;
+                               break;
+                       }
+               }
+               else if(comment==2)
+               {
+                       if(*iter=='\n')
+                               comment = 0;
+               }
+               else if(comment==3 && *iter=='*')
+                       comment = 4;
+               else if(comment==4)
+               {
+                       if(*iter=='/')
+                               comment = 0;
+                       else if(*iter!='*')
+                               comment = 3;
+               }
+
+               if(*iter=='\n')
+               {
+                       ++location.line;
+                       allow_preprocess = (comment<3);
+               }
+
+               ++iter;
+       }
+}
+
+} // namespace SL
+} // namespace GL
+} // namespace Msp