aboutsummaryrefslogtreecommitdiff
path: root/driver
diff options
context:
space:
mode:
authorRyo Nihei <nihei.dev@gmail.com>2021-09-18 21:25:48 +0900
committerRyo Nihei <nihei.dev@gmail.com>2021-09-18 23:48:51 +0900
commit7be1d273429765907af0abad182666d77eb557e4 (patch)
tree1dcb0d68d8d65e4a8f847a6790eed5c7c0854401 /driver
parentGenerate constant values representing mode IDs, mode names, kind IDs, and kin... (diff)
downloadtre-7be1d273429765907af0abad182666d77eb557e4.tar.gz
tre-7be1d273429765907af0abad182666d77eb557e4.tar.xz
Add name field to the lexical specification
Diffstat (limited to 'driver')
-rw-r--r--driver/lexer_test.go27
-rw-r--r--driver/template.go26
2 files changed, 43 insertions, 10 deletions
diff --git a/driver/lexer_test.go b/driver/lexer_test.go
index a742bad..8af3817 100644
--- a/driver/lexer_test.go
+++ b/driver/lexer_test.go
@@ -103,6 +103,7 @@ func TestLexer_Next(t *testing.T) {
}{
{
lspec: &spec.LexSpec{
+ Name: "test",
Entries: []*spec.LexEntry{
newLexEntryDefaultNOP("t1", "(a|b)*abb"),
newLexEntryDefaultNOP("t2", " +"),
@@ -126,6 +127,7 @@ func TestLexer_Next(t *testing.T) {
},
{
lspec: &spec.LexSpec{
+ Name: "test",
Entries: []*spec.LexEntry{
newLexEntryDefaultNOP("t1", "b?a+"),
newLexEntryDefaultNOP("t2", "(ab)?(cd)+"),
@@ -154,6 +156,7 @@ func TestLexer_Next(t *testing.T) {
},
{
lspec: &spec.LexSpec{
+ Name: "test",
Entries: []*spec.LexEntry{
newLexEntryDefaultNOP("t1", "."),
},
@@ -198,6 +201,7 @@ func TestLexer_Next(t *testing.T) {
},
{
lspec: &spec.LexSpec{
+ Name: "test",
Entries: []*spec.LexEntry{
newLexEntryDefaultNOP("t1", "[ab.*+?|()[\\]]"),
},
@@ -220,6 +224,7 @@ func TestLexer_Next(t *testing.T) {
},
{
lspec: &spec.LexSpec{
+ Name: "test",
Entries: []*spec.LexEntry{
// all 1 byte characters except null character (U+0000)
//
@@ -246,6 +251,7 @@ func TestLexer_Next(t *testing.T) {
},
{
lspec: &spec.LexSpec{
+ Name: "test",
Entries: []*spec.LexEntry{
// all 2 byte characters
newLexEntryDefaultNOP("char2Byte", "[\xc2\x80-\xdf\xbf]"),
@@ -267,6 +273,7 @@ func TestLexer_Next(t *testing.T) {
},
{
lspec: &spec.LexSpec{
+ Name: "test",
Entries: []*spec.LexEntry{
// All bytes are the same.
newLexEntryDefaultNOP("char3Byte", "[\xe0\xa0\x80-\xe0\xa0\x80]"),
@@ -282,6 +289,7 @@ func TestLexer_Next(t *testing.T) {
},
{
lspec: &spec.LexSpec{
+ Name: "test",
Entries: []*spec.LexEntry{
// The first two bytes are the same.
newLexEntryDefaultNOP("char3Byte", "[\xe0\xa0\x80-\xe0\xa0\xbf]"),
@@ -303,6 +311,7 @@ func TestLexer_Next(t *testing.T) {
},
{
lspec: &spec.LexSpec{
+ Name: "test",
Entries: []*spec.LexEntry{
// The first byte are the same.
newLexEntryDefaultNOP("char3Byte", "[\xe0\xa0\x80-\xe0\xbf\xbf]"),
@@ -324,6 +333,7 @@ func TestLexer_Next(t *testing.T) {
},
{
lspec: &spec.LexSpec{
+ Name: "test",
Entries: []*spec.LexEntry{
// all 3 byte characters
newLexEntryDefaultNOP("char3Byte", "[\xe0\xa0\x80-\xef\xbf\xbf]"),
@@ -369,6 +379,7 @@ func TestLexer_Next(t *testing.T) {
},
{
lspec: &spec.LexSpec{
+ Name: "test",
Entries: []*spec.LexEntry{
// All bytes are the same.
newLexEntryDefaultNOP("char4Byte", "[\xf0\x90\x80\x80-\xf0\x90\x80\x80]"),
@@ -384,6 +395,7 @@ func TestLexer_Next(t *testing.T) {
},
{
lspec: &spec.LexSpec{
+ Name: "test",
Entries: []*spec.LexEntry{
// The first 3 bytes are the same.
newLexEntryDefaultNOP("char4Byte", "[\xf0\x90\x80\x80-\xf0\x90\x80\xbf]"),
@@ -405,6 +417,7 @@ func TestLexer_Next(t *testing.T) {
},
{
lspec: &spec.LexSpec{
+ Name: "test",
Entries: []*spec.LexEntry{
// The first 2 bytes are the same.
newLexEntryDefaultNOP("char4Byte", "[\xf0\x90\x80\x80-\xf0\x90\xbf\xbf]"),
@@ -426,6 +439,7 @@ func TestLexer_Next(t *testing.T) {
},
{
lspec: &spec.LexSpec{
+ Name: "test",
Entries: []*spec.LexEntry{
// The first byte are the same.
newLexEntryDefaultNOP("char4Byte", "[\xf0\x90\x80\x80-\xf0\xbf\xbf\xbf]"),
@@ -447,6 +461,7 @@ func TestLexer_Next(t *testing.T) {
},
{
lspec: &spec.LexSpec{
+ Name: "test",
Entries: []*spec.LexEntry{
// all 4 byte characters
newLexEntryDefaultNOP("char4Byte", "[\xf0\x90\x80\x80-\xf4\x8f\xbf\xbf]"),
@@ -484,6 +499,7 @@ func TestLexer_Next(t *testing.T) {
},
{
lspec: &spec.LexSpec{
+ Name: "test",
Entries: []*spec.LexEntry{
newLexEntryDefaultNOP("NonNumber", "[^0-9]+[0-9]"),
},
@@ -496,6 +512,7 @@ func TestLexer_Next(t *testing.T) {
},
{
lspec: &spec.LexSpec{
+ Name: "test",
Entries: []*spec.LexEntry{
newLexEntryDefaultNOP("char1Byte", "\\u{006E}"),
newLexEntryDefaultNOP("char2Byte", "\\u{03BD}"),
@@ -514,6 +531,7 @@ func TestLexer_Next(t *testing.T) {
},
{
lspec: &spec.LexSpec{
+ Name: "test",
Entries: []*spec.LexEntry{
newLexEntryDefaultNOP("codePointsAlt", "[\\u{006E}\\u{03BD}\\u{306B}\\u{01F638}]"),
},
@@ -529,6 +547,7 @@ func TestLexer_Next(t *testing.T) {
},
{
lspec: &spec.LexSpec{
+ Name: "test",
Entries: []*spec.LexEntry{
newLexEntryDefaultNOP("t1", "\\f{a2c}\\f{d2f}+"),
newLexEntryFragment("a2c", "abc"),
@@ -544,6 +563,7 @@ func TestLexer_Next(t *testing.T) {
},
{
lspec: &spec.LexSpec{
+ Name: "test",
Entries: []*spec.LexEntry{
newLexEntryDefaultNOP("t1", "(\\f{a2c}|\\f{d2f})+"),
newLexEntryFragment("a2c", "abc"),
@@ -558,6 +578,7 @@ func TestLexer_Next(t *testing.T) {
},
{
lspec: &spec.LexSpec{
+ Name: "test",
Entries: []*spec.LexEntry{
newLexEntryDefaultNOP("t1", "\\f{a2c_or_d2f}+"),
newLexEntryFragment("a2c_or_d2f", "\\f{a2c}|\\f{d2f}"),
@@ -573,6 +594,7 @@ func TestLexer_Next(t *testing.T) {
},
{
lspec: &spec.LexSpec{
+ Name: "test",
Entries: []*spec.LexEntry{
newLexEntryDefaultNOP("white_space", ` *`),
newLexEntry([]string{"default"}, "string_open", `"`, "string", false),
@@ -598,6 +620,7 @@ func TestLexer_Next(t *testing.T) {
},
{
lspec: &spec.LexSpec{
+ Name: "test",
Entries: []*spec.LexEntry{
// `white_space` is enabled in multiple modes.
newLexEntry([]string{"default", "state_a", "state_b"}, "white_space", ` *`, "", false),
@@ -623,6 +646,7 @@ func TestLexer_Next(t *testing.T) {
},
{
lspec: &spec.LexSpec{
+ Name: "test",
Entries: []*spec.LexEntry{
newLexEntry([]string{"default", "mode_1", "mode_2"}, "white_space", ` *`, "", false),
newLexEntry([]string{"default"}, "char", `.`, "", false),
@@ -671,6 +695,7 @@ func TestLexer_Next(t *testing.T) {
},
{
lspec: &spec.LexSpec{
+ Name: "test",
Entries: []*spec.LexEntry{
newLexEntry([]string{"default", "mode_1", "mode_2"}, "white_space", ` *`, "", false),
newLexEntry([]string{"default"}, "char", `.`, "", false),
@@ -710,6 +735,7 @@ func TestLexer_Next(t *testing.T) {
},
{
lspec: &spec.LexSpec{
+ Name: "test",
Entries: []*spec.LexEntry{
newLexEntryDefaultNOP("dot", spec.EscapePattern(`.`)),
newLexEntryDefaultNOP("star", spec.EscapePattern(`*`)),
@@ -778,6 +804,7 @@ func TestLexer_Next(t *testing.T) {
func TestLexer_Next_WithPosition(t *testing.T) {
lspec := &spec.LexSpec{
+ Name: "test",
Entries: []*spec.LexEntry{
newLexEntryDefaultNOP("newline", `\u{000A}+`),
newLexEntryDefaultNOP("any", `.`),
diff --git a/driver/template.go b/driver/template.go
index 2772135..d2772ae 100644
--- a/driver/template.go
+++ b/driver/template.go
@@ -1,13 +1,13 @@
package driver
import (
+ "bytes"
_ "embed"
"fmt"
"go/ast"
"go/format"
"go/parser"
"go/token"
- "os"
"strings"
"text/template"
@@ -17,19 +17,19 @@ import (
//go:embed lexer.go
var lexerCoreSrc string
-func GenLexer(clspec *spec.CompiledLexSpec, pkgName string) error {
+func GenLexer(clspec *spec.CompiledLexSpec, pkgName string) ([]byte, error) {
var lexerSrc string
{
fset := token.NewFileSet()
f, err := parser.ParseFile(fset, "lexer.go", lexerCoreSrc, parser.ParseComments)
if err != nil {
- return err
+ return nil, err
}
var b strings.Builder
err = format.Node(&b, fset, f)
if err != nil {
- return err
+ return nil, err
}
lexerSrc = b.String()
@@ -100,7 +100,7 @@ func GenLexer(clspec *spec.CompiledLexSpec, pkgName string) error {
{
t, err := template.New("").Funcs(genTemplateFuncs(clspec)).Parse(lexSpecTemplate)
if err != nil {
- return err
+ return nil, err
}
var b strings.Builder
@@ -112,7 +112,7 @@ func GenLexer(clspec *spec.CompiledLexSpec, pkgName string) error {
"compressionLevel": clspec.CompressionLevel,
})
if err != nil {
- return err
+ return nil, err
}
specSrc = b.String()
@@ -136,7 +136,7 @@ func GenLexer(clspec *spec.CompiledLexSpec, pkgName string) error {
t, err := template.New("").Parse(tmpl)
if err != nil {
- return err
+ return nil, err
}
var b strings.Builder
@@ -149,7 +149,7 @@ func GenLexer(clspec *spec.CompiledLexSpec, pkgName string) error {
"specSrc": specSrc,
})
if err != nil {
- return err
+ return nil, err
}
src = b.String()
@@ -158,12 +158,18 @@ func GenLexer(clspec *spec.CompiledLexSpec, pkgName string) error {
fset := token.NewFileSet()
f, err := parser.ParseFile(fset, "", src, parser.ParseComments)
if err != nil {
- return err
+ return nil, err
}
f.Name = ast.NewIdent(pkgName)
- return format.Node(os.Stdout, fset, f)
+ var b bytes.Buffer
+ err = format.Node(&b, fset, f)
+ if err != nil {
+ return nil, err
+ }
+
+ return b.Bytes(), nil
}
const lexSpecTemplate = `