From f89d021bbe134e3efa0d015a41e9712960cdd009 Mon Sep 17 00:00:00 2001 From: Ryo Nihei Date: Sun, 6 Nov 2022 21:31:46 +0900 Subject: Import source code of lexer generator From: https://github.com/nihei9/maleeni --- driver/parser/token_stream.go | 61 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) create mode 100644 driver/parser/token_stream.go (limited to 'driver/parser/token_stream.go') diff --git a/driver/parser/token_stream.go b/driver/parser/token_stream.go new file mode 100644 index 0000000..0bc9e32 --- /dev/null +++ b/driver/parser/token_stream.go @@ -0,0 +1,61 @@ +package parser + +import ( + "io" + + "github.com/nihei9/vartan/driver/lexer" + spec "github.com/nihei9/vartan/spec/grammar" +) + +type vToken struct { + terminalID int + tok *lexer.Token +} + +func (t *vToken) TerminalID() int { + return t.terminalID +} + +func (t *vToken) Lexeme() []byte { + return t.tok.Lexeme +} + +func (t *vToken) EOF() bool { + return t.tok.EOF +} + +func (t *vToken) Invalid() bool { + return t.tok.Invalid +} + +func (t *vToken) Position() (int, int) { + return t.tok.Row, t.tok.Col +} + +type tokenStream struct { + lex *lexer.Lexer + kindToTerminal []int +} + +func NewTokenStream(g *spec.CompiledGrammar, src io.Reader) (TokenStream, error) { + lex, err := lexer.NewLexer(lexer.NewLexSpec(g.Lexical), src) + if err != nil { + return nil, err + } + + return &tokenStream{ + lex: lex, + kindToTerminal: g.Syntactic.KindToTerminal, + }, nil +} + +func (l *tokenStream) Next() (VToken, error) { + tok, err := l.lex.Next() + if err != nil { + return nil, err + } + return &vToken{ + terminalID: l.kindToTerminal[tok.KindID], + tok: tok, + }, nil +} -- cgit v1.2.3