X-Git-Url: http://git.tdb.fi/?p=libs%2Fgl.git;a=blobdiff_plain;f=source%2Fglsl%2Fparser.cpp;h=9e1f1913491cf98800352d25619557a3e120bfdc;hp=c6f3f5be3cdd186818cbee6322ad966dff46181d;hb=d8bdf61007978e2c3670a22a58e2f105e8347537;hpb=19a24f859cd7fcf581442319499ae24b3e7385a4 diff --git a/source/glsl/parser.cpp b/source/glsl/parser.cpp index c6f3f5be..9e1f1913 100644 --- a/source/glsl/parser.cpp +++ b/source/glsl/parser.cpp @@ -2,6 +2,7 @@ #include #include #include +#include "builtin.h" #include "glsl_error.h" #include "parser.h" @@ -53,12 +54,28 @@ void Parser::parse_source(const string &name, int index) { delete module; module = new Module; + cur_stage = &module->shared; base_index = index; source_index = index; if(index>=0) source_reference(1, name); - tokenizer.begin(name, source); + + // TODO Need to somehow get type names from imports + if(const Stage *builtin = get_builtins(Stage::SHARED)) + { + for(map::const_iterator i=builtin->types.begin(); i!=builtin->types.end(); ++i) + declared_types.insert(i->first); + } + else + { + declared_types.insert("void"); + declared_types.insert("bool"); + declared_types.insert("int"); + declared_types.insert("float"); + } + + tokenizer.begin(source, name); allow_stage_change = true; while(!tokenizer.peek_token().empty()) if(RefPtr statement = parse_with_recovery(&Parser::parse_global_declaration)) @@ -171,15 +188,9 @@ bool Parser::is_qualifier(const string &token) is_precision_qualifier(token)); } -bool Parser::is_builtin_type(const string &token) -{ - static Regex re("^(void|float|int|bool|[ib]?vec[234]|mat[234](x[234])?|sampler((1D|2D|Cube)(Array)?(Shadow)?|3D))$"); - return re.match(token); -} - bool Parser::is_type(const string &token) { - return is_builtin_type(token) || declared_types.count(token); + return declared_types.count(token); } bool Parser::is_identifier(const string &token) @@ -188,6 +199,15 @@ bool Parser::is_identifier(const string &token) return re.match(token); } +template +RefPtr Parser::create_node() +{ + RefPtr node = new T; + node->source = source_index; + node->line = tokenizer.get_location().line; + return node; +} + template RefPtr Parser::parse_with_recovery(RefPtr (Parser::*parse_func)()) { @@ -247,9 +267,7 @@ RefPtr Parser::parse_global_declaration() token = tokenizer.peek_token(); if(is_interface_qualifier(token) && tokenizer.peek_token(1)==";") { - RefPtr iface_lo = new InterfaceLayout; - iface_lo->source = source_index; - iface_lo->line = tokenizer.get_location().line; + RefPtr iface_lo = create_node(); iface_lo->layout.qualifiers = layout->qualifiers; iface_lo->interface = tokenizer.parse_token(); tokenizer.expect(";"); @@ -262,6 +280,8 @@ RefPtr Parser::parse_global_declaration() return var; } } + else if(token=="typedef") + return parse_type_declaration(); else if(token=="struct") return parse_struct_declaration(); else if(is_interface_qualifier(token)) @@ -274,6 +294,8 @@ RefPtr Parser::parse_global_declaration() } else if(is_qualifier(token)) return parse_variable_declaration(); + else if(token=="virtual") + return parse_function_declaration(); else if(is_type(token)) { if(tokenizer.peek_token(2)=="(") @@ -302,9 +324,7 @@ RefPtr Parser::parse_statement() return parse_return(); else if(token=="break" || token=="continue" || token=="discard") { - RefPtr jump = new Jump; - jump->source = source_index; - jump->line = tokenizer.get_location().line; + RefPtr jump = create_node(); jump->keyword = tokenizer.parse_token(); tokenizer.expect(";"); @@ -319,9 +339,7 @@ RefPtr Parser::parse_statement() } else if(!token.empty()) { - RefPtr expr = new ExpressionStatement; - expr->source = source_index; - expr->line = tokenizer.get_location().line; + RefPtr expr = create_node(); expr->expression = parse_expression(); tokenizer.expect(";"); @@ -337,9 +355,7 @@ RefPtr Parser::parse_import() throw invalid_shader_source(tokenizer.get_location(), "Imports are only allowed in the shared section"); tokenizer.expect("import"); - RefPtr import = new Import; - import->source = source_index; - import->line = tokenizer.get_location().line; + RefPtr import = create_node(); import->module = expect_identifier(); tokenizer.expect(";"); return import; @@ -348,18 +364,14 @@ RefPtr Parser::parse_import() RefPtr Parser::parse_precision() { tokenizer.expect("precision"); - RefPtr precision = new Precision; - precision->source = source_index; - precision->line = tokenizer.get_location().line; + RefPtr precision = create_node(); precision->precision = tokenizer.parse_token(); if(!is_precision_qualifier(precision->precision)) throw parse_error(tokenizer.get_location(), precision->precision, "a precision qualifier"); - precision->type = tokenizer.parse_token(); - // Not entirely accurate; only float, int and sampler types are allowed - if(!is_builtin_type(precision->type)) - throw parse_error(tokenizer.get_location(), precision->type, "a builtin type"); + // TODO Add validation for this + precision->type = expect_type(); tokenizer.expect(";"); @@ -370,7 +382,7 @@ RefPtr Parser::parse_layout() { tokenizer.expect("layout"); tokenizer.expect("("); - RefPtr layout = new Layout; + RefPtr layout = create_node(); while(1) { string token = tokenizer.parse_token(); @@ -424,8 +436,9 @@ void Parser::parse_block(Block &block, bool require_braces, RefPtr (Parser::* tokenizer.expect("}"); } -RefPtr Parser::parse_expression(unsigned precedence) +RefPtr Parser::parse_expression(const Operator *outer_oper) { + unsigned outer_precedence = (outer_oper ? outer_oper->precedence+(outer_oper->assoc==Operator::RIGHT_TO_LEFT) : 20); RefPtr left; VariableReference *left_var = 0; while(1) @@ -437,7 +450,8 @@ RefPtr Parser::parse_expression(unsigned precedence) if(token==i->token && (!left || i->type!=Operator::PREFIX) && (left || i->type!=Operator::POSTFIX)) oper = i; - if(token==";" || token==")" || token=="]" || token=="," || (oper && precedence && oper->precedence>=precedence)) + bool lower_precedence = (oper && oper->type!=Operator::PREFIX && oper->precedence>=outer_precedence); + if(token==";" || token==")" || token=="]" || token=="," || token==":" || lower_precedence) { if(left) return left; @@ -454,7 +468,7 @@ RefPtr Parser::parse_expression(unsigned precedence) } else if(token==".") { - RefPtr memacc = new MemberAccess; + RefPtr memacc = create_node(); memacc->left = left; memacc->oper = oper; tokenizer.parse_token(); @@ -463,7 +477,7 @@ RefPtr Parser::parse_expression(unsigned precedence) } else if(oper && oper->type==Operator::POSTFIX) { - RefPtr unary = new UnaryExpression; + RefPtr unary = create_node(); unary->oper = oper; tokenizer.parse_token(); unary->expression = left; @@ -471,6 +485,8 @@ RefPtr Parser::parse_expression(unsigned precedence) } else if(oper && oper->type==Operator::BINARY) left = parse_binary(left, *oper); + else if(oper && oper->type==Operator::TERNARY) + left = parse_ternary(left, *oper); else throw parse_error(tokenizer.get_location(), token, "an operator"); left_var = 0; @@ -480,30 +496,24 @@ RefPtr Parser::parse_expression(unsigned precedence) if(token=="(") { tokenizer.parse_token(); - RefPtr parexpr = new ParenthesizedExpression; - parexpr->expression = parse_expression(); + left = parse_expression(); tokenizer.expect(")"); - left = parexpr; } else if(isdigit(token[0]) || token=="true" || token=="false") - { - RefPtr literal = new Literal; - literal->token = tokenizer.parse_token(); - left = literal; - } + left = parse_literal(); else if(is_identifier(token)) { - RefPtr var = new VariableReference; + RefPtr var = create_node(); var->name = expect_identifier(); left = var; left_var = var.get(); } else if(oper && oper->type==Operator::PREFIX) { - RefPtr unary = new UnaryExpression; + RefPtr unary = create_node(); unary->oper = oper; tokenizer.parse_token(); - unary->expression = parse_expression(oper->precedence); + unary->expression = parse_expression(oper); left = unary; } else @@ -512,27 +522,59 @@ RefPtr Parser::parse_expression(unsigned precedence) } } +RefPtr Parser::parse_literal() +{ + RefPtr literal = create_node(); + literal->token = tokenizer.parse_token(); + if(isdigit(literal->token[0])) + { + // TODO have the tokenizer return the type of the token + if(isnumrc(literal->token)) + literal->value = lexical_cast(literal->token); + else + literal->value = lexical_cast(literal->token); + } + else if(literal->token=="true" || literal->token=="false") + literal->value = (literal->token=="true"); + else + throw parse_error(tokenizer.get_location(), literal->token, "a literal"); + + return literal; +} + RefPtr Parser::parse_binary(const RefPtr &left, const Operator &oper) { - RefPtr binary = (oper.precedence==16 ? new Assignment : new BinaryExpression); + RefPtr binary = (oper.precedence==16 ? + static_cast >(create_node()) : create_node()); binary->left = left; binary->oper = &oper; tokenizer.expect(oper.token); - if(oper.token[0]=='[') + if(oper.token2[0]) { binary->right = parse_expression(); - tokenizer.expect("]"); + tokenizer.expect(oper.token2); } else - binary->right = parse_expression(oper.precedence+(oper.assoc==Operator::RIGHT_TO_LEFT)); + binary->right = parse_expression(&oper); return binary; } +RefPtr Parser::parse_ternary(const RefPtr &cond, const Operator &oper) +{ + RefPtr ternary = create_node(); + ternary->condition = cond; + ternary->oper = &oper; + tokenizer.expect("?"); + ternary->true_expr = parse_expression(&oper); + tokenizer.expect(":"); + ternary->false_expr = parse_expression(&oper); + return ternary; +} + RefPtr Parser::parse_function_call(const VariableReference &var) { - RefPtr call = new FunctionCall; + RefPtr call = create_node(); call->name = var.name; - call->constructor = is_type(call->name); call->oper = &Operator::get_operator("(", Operator::POSTFIX); tokenizer.expect("("); while(tokenizer.peek_token()!=")") @@ -545,12 +587,102 @@ RefPtr Parser::parse_function_call(const VariableReference &var) return call; } +RefPtr Parser::parse_type_declaration() +{ + tokenizer.expect("typedef"); + + RefPtr type; + if(tokenizer.peek_token()=="image") + type = parse_image_type_declaration(); + else + type = parse_basic_type_declaration(); + + tokenizer.expect(";"); + declared_types.insert(type->name); + return type; +} + +RefPtr Parser::parse_basic_type_declaration() +{ + RefPtr type = create_node(); + + if(tokenizer.peek_token()=="vector") + { + type->kind = BasicTypeDeclaration::VECTOR; + + tokenizer.parse_token(); + tokenizer.expect("("); + type->size = expect_integer(); + tokenizer.expect(")"); + } + + type->base = expect_type(); + type->name = expect_identifier(); + + if(type->kind==BasicTypeDeclaration::ALIAS && check("[")) + { + type->kind = BasicTypeDeclaration::ARRAY; + tokenizer.expect("]"); + } + + return type; +} + +RefPtr Parser::parse_image_type_declaration() +{ + tokenizer.expect("image"); + tokenizer.expect("("); + + RefPtr type = create_node(); + while(1) + { + string token = tokenizer.parse_token(); + if(token=="dimensions") + { + tokenizer.expect("="); + token = tokenizer.parse_token(); + if(token=="1") + type->dimensions = ImageTypeDeclaration::ONE; + else if(token=="2") + type->dimensions = ImageTypeDeclaration::TWO; + else if(token=="3") + type->dimensions = ImageTypeDeclaration::THREE; + else if(token=="cube") + type->dimensions = ImageTypeDeclaration::CUBE; + else + throw parse_error(tokenizer.get_location(), token, "dimensions"); + + if(check("[")) + { + type->array = true; + tokenizer.expect("]"); + } + } + else if(token=="sampled") + type->sampled = true; + else if(token=="shadow") + type->shadow = true; + else + throw parse_error(tokenizer.get_location(), token, "image type attribute"); + + token = tokenizer.peek_token(); + if(token==")") + break; + + tokenizer.expect(","); + } + tokenizer.expect(")"); + + type->base = expect_type(); + type->name = expect_identifier(); + + return type; +} + RefPtr Parser::parse_struct_declaration() { tokenizer.expect("struct"); - RefPtr strct = new StructDeclaration; - strct->source = source_index; - strct->line = tokenizer.get_location().line; + RefPtr strct = create_node(); strct->name = expect_identifier(); parse_block(strct->members, true, &Parser::parse_variable_declaration); @@ -562,9 +694,7 @@ RefPtr Parser::parse_struct_declaration() RefPtr Parser::parse_variable_declaration() { - RefPtr var = new VariableDeclaration; - var->source = source_index; - var->line = tokenizer.get_location().line; + RefPtr var = create_node(); string token = tokenizer.peek_token(); while(is_qualifier(token)) @@ -617,10 +747,9 @@ RefPtr Parser::parse_variable_declaration_with_layout() RefPtr Parser::parse_function_declaration() { - RefPtr func = new FunctionDeclaration; - func->source = source_index; - func->line = tokenizer.get_location().line; + RefPtr func = create_node(); + func->virtua = check("virtual"); func->return_type = expect_type(); func->name = expect_identifier(); tokenizer.expect("("); @@ -629,7 +758,7 @@ RefPtr Parser::parse_function_declaration() if(!func->parameters.empty()) tokenizer.expect(","); - RefPtr var = new VariableDeclaration; + RefPtr var = create_node(); string token = tokenizer.peek_token(); if(token=="in" || token=="out" || token=="inout") var->interface = tokenizer.parse_token(); @@ -639,6 +768,8 @@ RefPtr Parser::parse_function_declaration() } tokenizer.expect(")"); + func->overrd = check("override"); + string token = tokenizer.peek_token(); if(token=="{") { @@ -655,16 +786,15 @@ RefPtr Parser::parse_function_declaration() RefPtr Parser::parse_interface_block() { - RefPtr iface = new InterfaceBlock; - iface->source = source_index; - iface->line = tokenizer.get_location().line; + RefPtr iface = create_node(); iface->interface = tokenizer.parse_token(); if(!is_interface_qualifier(iface->interface)) throw parse_error(tokenizer.get_location(), iface->interface, "an interface qualifier"); - iface->name = expect_identifier(); - parse_block(iface->members, true, &Parser::parse_variable_declaration_with_layout); + iface->block_name = expect_identifier(); + iface->members = new Block; + parse_block(*iface->members, true, &Parser::parse_variable_declaration_with_layout); if(!check(";")) { iface->instance_name = expect_identifier(); @@ -682,9 +812,7 @@ RefPtr Parser::parse_interface_block() RefPtr Parser::parse_conditional() { tokenizer.expect("if"); - RefPtr cond = new Conditional; - cond->source = source_index; - cond->line = tokenizer.get_location().line; + RefPtr cond = create_node(); tokenizer.expect("("); cond->condition = parse_expression(); tokenizer.expect(")"); @@ -704,9 +832,7 @@ RefPtr Parser::parse_conditional() RefPtr Parser::parse_for() { tokenizer.expect("for"); - RefPtr loop = new Iteration; - loop->source = source_index; - loop->line = tokenizer.get_location().line; + RefPtr loop = create_node(); tokenizer.expect("("); string token = tokenizer.peek_token(); if(is_type(token)) @@ -715,7 +841,7 @@ RefPtr Parser::parse_for() { if(token!=";") { - RefPtr expr = new ExpressionStatement; + RefPtr expr = create_node(); expr->expression = parse_expression(); loop->init_statement = expr; } @@ -736,9 +862,7 @@ RefPtr Parser::parse_for() RefPtr Parser::parse_while() { tokenizer.expect("while"); - RefPtr loop = new Iteration; - loop->source = source_index; - loop->line = tokenizer.get_location().line; + RefPtr loop = create_node(); tokenizer.expect("("); loop->condition = parse_expression(); tokenizer.expect(")"); @@ -751,9 +875,7 @@ RefPtr Parser::parse_while() RefPtr Parser::parse_passthrough() { tokenizer.expect("passthrough"); - RefPtr pass = new Passthrough; - pass->source = source_index; - pass->line = tokenizer.get_location().line; + RefPtr pass = create_node(); if(cur_stage->type==Stage::GEOMETRY) { tokenizer.expect("["); @@ -767,9 +889,7 @@ RefPtr Parser::parse_passthrough() RefPtr Parser::parse_return() { tokenizer.expect("return"); - RefPtr ret = new Return; - ret->source = source_index; - ret->line = tokenizer.get_location().line; + RefPtr ret = create_node(); if(tokenizer.peek_token()!=";") ret->expression = parse_expression(); tokenizer.expect(";");