diff options
Diffstat (limited to 'spec')
-rw-r--r-- | spec/grammar.go | 1 | ||||
-rw-r--r-- | spec/lexer.go | 13 | ||||
-rw-r--r-- | spec/lexer_test.go | 6 | ||||
-rw-r--r-- | spec/parser.go | 27 |
4 files changed, 39 insertions, 8 deletions
diff --git a/spec/grammar.go b/spec/grammar.go index 7901957..42e6dc2 100644 --- a/spec/grammar.go +++ b/spec/grammar.go @@ -18,6 +18,7 @@ type Maleeni struct { KindToTerminal []int `json:"kind_to_terminal"` TerminalToKind []int `json:"terminal_to_kind"` Skip []int `json:"skip"` + KindAliases []string `json:"kind_aliases"` } type ParsingTable struct { diff --git a/spec/lexer.go b/spec/lexer.go index 258faae..d513d7d 100644 --- a/spec/lexer.go +++ b/spec/lexer.go @@ -21,11 +21,12 @@ const ( tokenKindKWFragment = tokenKind("fragment") tokenKindID = tokenKind("id") tokenKindTerminalPattern = tokenKind("terminal pattern") + tokenKindStringLiteral = tokenKind("string") tokenKindColon = tokenKind(":") tokenKindOr = tokenKind("|") tokenKindSemicolon = tokenKind(";") tokenKindDirectiveMarker = tokenKind("#") - tokenKindTreeNodeOpen = tokenKind("'(") + tokenKindTreeNodeOpen = tokenKind("#(") tokenKindTreeNodeClose = tokenKind(")") tokenKindPosition = tokenKind("$") tokenKindExpantion = tokenKind("...") @@ -77,6 +78,14 @@ func newTerminalPatternToken(text string, pos Position) *token { } } +func newStringLiteralToken(text string, pos Position) *token { + return &token{ + kind: tokenKindStringLiteral, + text: text, + pos: pos, + } +} + func newPositionToken(num int, pos Position) *token { return &token{ kind: tokenKindPosition, @@ -234,7 +243,7 @@ func (l *lexer) lexAndSkipWSs() (*token, error) { Col: tok.Col + 1, } } - return newTerminalPatternToken(mlspec.EscapePattern(pat), newPosition(tok.Row+1, tok.Col+1)), nil + return newStringLiteralToken(pat, newPosition(tok.Row+1, tok.Col+1)), nil case "colon": return newSymbolToken(tokenKindColon, newPosition(tok.Row+1, tok.Col+1)), nil case "or": diff --git a/spec/lexer_test.go b/spec/lexer_test.go index 51e5f59..c3540f6 100644 --- a/spec/lexer_test.go +++ b/spec/lexer_test.go @@ -16,6 +16,10 @@ func TestLexer_Run(t *testing.T) { return newTerminalPatternToken(text, newPosition(1, 0)) } + strTok := func(text string) *token { + return newStringLiteralToken(text, newPosition(1, 0)) + } + symTok := func(kind tokenKind) *token { return newSymbolToken(kind, newPosition(1, 0)) } @@ -40,7 +44,7 @@ func TestLexer_Run(t *testing.T) { tokens: []*token{ idTok("id"), termPatTok("terminal"), - termPatTok(`\.\*\+\?\|\(\)\[\\`), + strTok(`.*+?|()[\`), symTok(tokenKindColon), symTok(tokenKindOr), symTok(tokenKindSemicolon), diff --git a/spec/parser.go b/spec/parser.go index 53bf491..9778521 100644 --- a/spec/parser.go +++ b/spec/parser.go @@ -4,6 +4,7 @@ import ( "fmt" "io" + mlspec "github.com/nihei9/maleeni/spec" verr "github.com/nihei9/vartan/error" ) @@ -47,9 +48,10 @@ type DirectiveNode struct { } type ParameterNode struct { - ID string - Tree *TreeStructNode - Pos Position + ID string + String string + Tree *TreeStructNode + Pos Position } type TreeStructNode struct { @@ -262,10 +264,15 @@ func (p *parser) parseFragment() *FragmentNode { raiseSyntaxError(p.pos.Row, synErrNoColon) } - if !p.consume(tokenKindTerminalPattern) { + var rhs string + switch { + case p.consume(tokenKindTerminalPattern): + rhs = p.lastTok.text + case p.consume(tokenKindStringLiteral): + rhs = mlspec.EscapePattern(p.lastTok.text) + default: raiseSyntaxError(p.pos.Row, synErrFragmentNoPattern) } - rhs := p.lastTok.text p.consume(tokenKindNewline) @@ -398,6 +405,11 @@ func (p *parser) parseElement() *ElementNode { Pattern: p.lastTok.text, Pos: p.lastTok.pos, } + case p.consume(tokenKindStringLiteral): + return &ElementNode{ + Pattern: mlspec.EscapePattern(p.lastTok.text), + Pos: p.lastTok.pos, + } } return nil } @@ -436,6 +448,11 @@ func (p *parser) parseParameter() *ParameterNode { ID: p.lastTok.text, Pos: p.lastTok.pos, } + case p.consume(tokenKindStringLiteral): + return &ParameterNode{ + String: p.lastTok.text, + Pos: p.lastTok.pos, + } case p.consume(tokenKindTreeNodeOpen): if !p.consume(tokenKindID) { raiseSyntaxError(p.pos.Row, synErrTreeInvalidFirstElem) |