package gott import ( "strconv" ) // Parser parses a token stream into an AST type Parser struct { lexer *Lexer token Token // current token peekToken Token // lookahead token errors []error // accumulated parse errors } // NewParser creates a new parser for the given input func NewParser(input string) *Parser { p := &Parser{ lexer: NewLexer(input), } // Load first two tokens p.advance() p.advance() return p } // advance moves to the next token func (p *Parser) advance() { p.token = p.peekToken p.peekToken = p.lexer.NextToken() } // expect checks that the current token is of the expected type and advances func (p *Parser) expect(t TokenType) bool { if p.token.Type != t { p.errorf("expected %s, got %s", t, p.token.Type) return false } p.advance() return true } // errorf records a parse error func (p *Parser) errorf(format string, args ...any) { err := &ParseError{ Pos: p.token.Pos, Message: sprintf(format, args...), } p.errors = append(p.errors, err) } // Parse parses the input and returns the AST func (p *Parser) Parse() (*Template, []error) { t := &Template{ Position: Position{Line: 1, Column: 1, Offset: 0}, Nodes: []Node{}, } for p.token.Type != TokenEOF && p.token.Type != TokenError { node := p.parseNode() if node != nil { t.Nodes = append(t.Nodes, node) } } if p.token.Type == TokenError { p.errors = append(p.errors, &ParseError{ Pos: p.token.Pos, Message: p.token.Value, }) } return t, p.errors } // parseNode parses a single node (text or tag) func (p *Parser) parseNode() Node { switch p.token.Type { case TokenText: return p.parseText() case TokenTagOpen: return p.parseTag() default: p.errorf("unexpected token: %s", p.token.Type) p.advance() return nil } } // parseText parses a text node func (p *Parser) parseText() Node { node := &TextNode{ Position: p.token.Pos, Text: p.token.Value, } p.advance() return node } // parseTag parses a [% ... %] tag func (p *Parser) parseTag() Node { p.expect(TokenTagOpen) switch p.token.Type { case TokenIF: return p.parseIf() case TokenUNLESS: return p.parseUnless() case TokenFOREACH: return p.parseForeach() case TokenBLOCK: return p.parseBlock() case TokenINCLUDE: return p.parseInclude() case TokenWRAPPER: return p.parseWrapper() case TokenSET: return p.parseSet() case TokenTRY: return p.parseTry() default: // Expression output: [% expr %] return p.parseOutput() } } // parseIf parses an IF statement with optional ELSIF and ELSE func (p *Parser) parseIf() *IfStmt { pos := p.token.Pos p.expect(TokenIF) cond := p.parseExpr() if !p.expect(TokenTagClose) { return nil } stmt := &IfStmt{ Position: pos, Condition: cond, } // Parse body until ELSIF, ELSE, or END stmt.Body = p.parseBody(TokenELSIF, TokenELSE, TokenEND) // Parse ELSIF chain for p.token.Type == TokenTagOpen && p.peekToken.Type == TokenELSIF { p.expect(TokenTagOpen) elsifPos := p.token.Pos p.expect(TokenELSIF) elsifCond := p.parseExpr() if !p.expect(TokenTagClose) { return nil } elsifBody := p.parseBody(TokenELSIF, TokenELSE, TokenEND) stmt.ElsIf = append(stmt.ElsIf, &ElsIfClause{ Position: elsifPos, Condition: elsifCond, Body: elsifBody, }) } // Parse optional ELSE if p.token.Type == TokenTagOpen && p.peekToken.Type == TokenELSE { p.expect(TokenTagOpen) p.expect(TokenELSE) p.expect(TokenTagClose) stmt.Else = p.parseBody(TokenEND) } // Expect END p.expectEndTag() return stmt } // parseUnless parses an UNLESS statement func (p *Parser) parseUnless() *UnlessStmt { pos := p.token.Pos p.expect(TokenUNLESS) cond := p.parseExpr() if !p.expect(TokenTagClose) { return nil } stmt := &UnlessStmt{ Position: pos, Condition: cond, } stmt.Body = p.parseBody(TokenELSE, TokenEND) // Parse optional ELSE if p.token.Type == TokenTagOpen && p.peekToken.Type == TokenELSE { p.expect(TokenTagOpen) p.expect(TokenELSE) p.expect(TokenTagClose) stmt.Else = p.parseBody(TokenEND) } p.expectEndTag() return stmt } // parseForeach parses a FOREACH loop func (p *Parser) parseForeach() *ForeachStmt { pos := p.token.Pos p.expect(TokenFOREACH) if p.token.Type != TokenIdent { p.errorf("expected identifier, got %s", p.token.Type) return nil } itemVar := p.token.Value p.advance() if !p.expect(TokenIN) { return nil } listExpr := p.parseExpr() if !p.expect(TokenTagClose) { return nil } body := p.parseBody(TokenEND) p.expectEndTag() return &ForeachStmt{ Position: pos, ItemVar: itemVar, ListExpr: listExpr, Body: body, } } // parseBlock parses a BLOCK definition func (p *Parser) parseBlock() *BlockStmt { pos := p.token.Pos p.expect(TokenBLOCK) if p.token.Type != TokenIdent { p.errorf("expected block name, got %s", p.token.Type) return nil } name := p.token.Value p.advance() if !p.expect(TokenTagClose) { return nil } body := p.parseBody(TokenEND) p.expectEndTag() return &BlockStmt{ Position: pos, Name: name, Body: body, } } // parseInclude parses an INCLUDE directive // Supports both static paths and dynamic paths with $variable interpolation: // [% INCLUDE templates/header.html %] // [% INCLUDE templates/$category/page.html %] func (p *Parser) parseInclude() *IncludeStmt { pos := p.token.Pos p.expect(TokenINCLUDE) name, pathParts := p.parsePath() p.expect(TokenTagClose) return &IncludeStmt{ Position: pos, Name: name, PathParts: pathParts, } } // parsePath parses a path that may contain $variable interpolations. // Returns (staticPath, nil) for static paths, or ("", pathParts) for dynamic paths. func (p *Parser) parsePath() (string, []PathPart) { var pathParts []PathPart var staticPath string hasDynamic := false // Handle quoted string paths (which can still contain variables in our syntax) if p.token.Type == TokenString { // For now, string literals are static-only (could be extended) staticPath = p.token.Value p.advance() return staticPath, nil } // Parse path components: identifiers, /, ., and $variables for { switch p.token.Type { case TokenIdent: // Literal path segment if hasDynamic { pathParts = append(pathParts, PathPart{ IsVariable: false, Value: p.token.Value, }) } else { staticPath += p.token.Value } p.advance() case TokenDiv: // Path separator / if hasDynamic { pathParts = append(pathParts, PathPart{ IsVariable: false, Value: "/", }) } else { staticPath += "/" } p.advance() case TokenDot: // File extension separator . if hasDynamic { pathParts = append(pathParts, PathPart{ IsVariable: false, Value: ".", }) } else { staticPath += "." } p.advance() case TokenDollar: // Variable interpolation: $varname or $var.name hasDynamic = true p.advance() // Convert any accumulated static path to pathParts if staticPath != "" { pathParts = append(pathParts, PathPart{ IsVariable: false, Value: staticPath, }) staticPath = "" } // Parse variable name with optional dot notation if p.token.Type != TokenIdent { p.errorf("expected variable name after $, got %s", p.token.Type) return "", pathParts } varParts := []string{p.token.Value} p.advance() // Check for dot notation: $user.name.value for p.token.Type == TokenDot && p.peekToken.Type == TokenIdent { p.advance() // consume dot varParts = append(varParts, p.token.Value) p.advance() // consume ident } pathParts = append(pathParts, PathPart{ IsVariable: true, Parts: varParts, }) default: // End of path if hasDynamic { return "", pathParts } return staticPath, nil } } } // parseWrapper parses a WRAPPER directive // Supports both static paths and dynamic paths with $variable interpolation: // [% WRAPPER layouts/main.html %]content[% END %] // [% WRAPPER layouts/$theme/main.html %]content[% END %] func (p *Parser) parseWrapper() *WrapperStmt { pos := p.token.Pos p.expect(TokenWRAPPER) name, pathParts := p.parsePath() if !p.expect(TokenTagClose) { return nil } content := p.parseBody(TokenEND) p.expectEndTag() return &WrapperStmt{ Position: pos, Name: name, PathParts: pathParts, Content: content, } } // parseSet parses a SET directive func (p *Parser) parseSet() *SetStmt { pos := p.token.Pos p.expect(TokenSET) if p.token.Type != TokenIdent { p.errorf("expected variable name, got %s", p.token.Type) return nil } varName := p.token.Value p.advance() if !p.expect(TokenAssign) { return nil } value := p.parseExpr() p.expect(TokenTagClose) return &SetStmt{ Position: pos, Var: varName, Value: value, } } // parseTry parses a TRY/CATCH block func (p *Parser) parseTry() *TryStmt { pos := p.token.Pos p.expect(TokenTRY) p.expect(TokenTagClose) stmt := &TryStmt{ Position: pos, } // Parse try body until CATCH or END stmt.Try = p.parseBody(TokenCATCH, TokenEND) // Parse optional CATCH if p.token.Type == TokenTagOpen && p.peekToken.Type == TokenCATCH { p.expect(TokenTagOpen) p.expect(TokenCATCH) p.expect(TokenTagClose) stmt.Catch = p.parseBody(TokenEND) } // Expect END p.expectEndTag() return stmt } // parseOutput parses an expression output: [% expr %] func (p *Parser) parseOutput() *OutputStmt { pos := p.token.Pos expr := p.parseExpr() p.expect(TokenTagClose) return &OutputStmt{ Position: pos, Expr: expr, } } // parseBody parses nodes until one of the stop tokens is seen as the next keyword func (p *Parser) parseBody(stopTokens ...TokenType) []Node { var nodes []Node for { // Check for EOF if p.token.Type == TokenEOF { break } // Check if next tag starts with a stop token if p.token.Type == TokenTagOpen { for _, stop := range stopTokens { if p.peekToken.Type == stop { return nodes } } } node := p.parseNode() if node != nil { nodes = append(nodes, node) } } return nodes } // expectEndTag expects [% END %] func (p *Parser) expectEndTag() { if p.token.Type == TokenTagOpen && p.peekToken.Type == TokenEND { p.expect(TokenTagOpen) p.expect(TokenEND) p.expect(TokenTagClose) } else { p.errorf("expected [%% END %%], got %s", p.token.Type) } } // ---- Expression Parsing (with precedence) ---- // parseExpr is the entry point for expression parsing func (p *Parser) parseExpr() Expr { return p.parseOr() } // parseOr handles || (logical OR) and || (default value) // When || is followed by a literal and left is an identifier/filter expr, treat as default func (p *Parser) parseOr() Expr { left := p.parseAnd() for p.token.Type == TokenOr { pos := p.token.Pos p.advance() right := p.parseAnd() // Check if this looks like a default value expression: // left is identifier/filter, right is a literal if isDefaultCandidate(left) && isLiteralExpr(right) { left = &DefaultExpr{ Position: pos, Expr: left, Default: right, } } else { left = &BinaryExpr{ Position: pos, Op: TokenOr, Left: left, Right: right, } } } return left } // isDefaultCandidate returns true if the expression can have a default value func isDefaultCandidate(e Expr) bool { switch e.(type) { case *IdentExpr, *FilterExpr: return true } return false } // isLiteralExpr returns true if the expression is a literal func isLiteralExpr(e Expr) bool { _, ok := e.(*LiteralExpr) return ok } // parseAnd handles && (logical AND) func (p *Parser) parseAnd() Expr { left := p.parseComparison() for p.token.Type == TokenAnd { op := p.token.Type pos := p.token.Pos p.advance() right := p.parseComparison() left = &BinaryExpr{ Position: pos, Op: op, Left: left, Right: right, } } return left } // parseComparison handles ==, !=, <, <=, >, >= func (p *Parser) parseComparison() Expr { left := p.parseAdditive() if isComparisonOp(p.token.Type) { op := p.token.Type pos := p.token.Pos p.advance() right := p.parseAdditive() return &BinaryExpr{ Position: pos, Op: op, Left: left, Right: right, } } return left } // parseAdditive handles + and - func (p *Parser) parseAdditive() Expr { left := p.parseMultiplicative() for p.token.Type == TokenPlus || p.token.Type == TokenMinus { op := p.token.Type pos := p.token.Pos p.advance() right := p.parseMultiplicative() left = &BinaryExpr{ Position: pos, Op: op, Left: left, Right: right, } } return left } // parseMultiplicative handles *, /, % func (p *Parser) parseMultiplicative() Expr { left := p.parseUnary() for p.token.Type == TokenMul || p.token.Type == TokenDiv || p.token.Type == TokenMod { op := p.token.Type pos := p.token.Pos p.advance() right := p.parseUnary() left = &BinaryExpr{ Position: pos, Op: op, Left: left, Right: right, } } return left } // parseUnary handles unary - (negation) func (p *Parser) parseUnary() Expr { if p.token.Type == TokenMinus { pos := p.token.Pos p.advance() return &UnaryExpr{ Position: pos, Op: TokenMinus, X: p.parseUnary(), } } return p.parsePrimary() } // parsePrimary handles literals, identifiers, function calls, and parentheses func (p *Parser) parsePrimary() Expr { switch p.token.Type { case TokenNumber: val, _ := strconv.ParseFloat(p.token.Value, 64) expr := &LiteralExpr{ Position: p.token.Pos, Value: val, } p.advance() return expr case TokenString: expr := &LiteralExpr{ Position: p.token.Pos, Value: p.token.Value, } p.advance() return expr case TokenIdent: return p.parseIdentOrCall() case TokenLParen: p.advance() expr := p.parseExpr() p.expect(TokenRParen) return expr } p.errorf("unexpected token in expression: %s", p.token.Type) return &LiteralExpr{Position: p.token.Pos, Value: ""} } // parseIdentOrCall parses an identifier, possibly with dots, function calls, or filters func (p *Parser) parseIdentOrCall() Expr { pos := p.token.Pos // Collect dot-separated parts: foo.bar.baz parts := []string{p.token.Value} p.advance() for p.token.Type == TokenDot { p.advance() if p.token.Type != TokenIdent { p.errorf("expected identifier after '.', got %s", p.token.Type) break } parts = append(parts, p.token.Value) p.advance() } var expr Expr = &IdentExpr{ Position: pos, Parts: parts, } // Check for function call: func(args) if p.token.Type == TokenLParen && len(parts) == 1 { p.advance() args := p.parseArgList() p.expect(TokenRParen) expr = &CallExpr{ Position: pos, Func: parts[0], Args: args, } } // Check for filter chain: expr | filter | filter(args) for p.token.Type == TokenPipe { p.advance() if p.token.Type != TokenIdent { p.errorf("expected filter name after '|', got %s", p.token.Type) break } filterName := p.token.Value filterPos := p.token.Pos p.advance() var filterArgs []Expr if p.token.Type == TokenLParen { p.advance() filterArgs = p.parseArgList() p.expect(TokenRParen) } expr = &FilterExpr{ Position: filterPos, Input: expr, Filter: filterName, Args: filterArgs, } } return expr } // parseArgList parses a comma-separated list of expressions func (p *Parser) parseArgList() []Expr { var args []Expr if p.token.Type == TokenRParen { return args } args = append(args, p.parseExpr()) for p.token.Type == TokenComma { p.advance() args = append(args, p.parseExpr()) } return args } // isComparisonOp returns true if the token is a comparison operator func isComparisonOp(t TokenType) bool { switch t { case TokenEq, TokenNe, TokenLt, TokenLe, TokenGt, TokenGe: return true } return false }