aboutsummaryrefslogtreecommitdiff
path: root/driver/lexer_test.go
diff options
context:
space:
mode:
authorRyo Nihei <nihei.dev@gmail.com>2021-05-25 21:55:17 +0900
committerRyo Nihei <nihei.dev@gmail.com>2021-05-25 21:57:45 +0900
commit520bf02582be7ab36b17fd78f8931cfdb702b07f (patch)
treea1e7ad54915152fce6f96a18312e28f34f256c84 /driver/lexer_test.go
parentFix the initial state number (diff)
downloadtre-520bf02582be7ab36b17fd78f8931cfdb702b07f.tar.gz
tre-520bf02582be7ab36b17fd78f8931cfdb702b07f.tar.xz
Add fragment expression
A fragment entry is defined by an entry whose `fragment` field is `true`, and is referenced by a fragment expression (`\f{...}`).
Diffstat (limited to 'driver/lexer_test.go')
-rw-r--r--driver/lexer_test.go52
1 files changed, 52 insertions, 0 deletions
diff --git a/driver/lexer_test.go b/driver/lexer_test.go
index 1d0e887..87a381c 100644
--- a/driver/lexer_test.go
+++ b/driver/lexer_test.go
@@ -34,6 +34,14 @@ func newLexEntryDefaultNOP(kind string, pattern string) *spec.LexEntry {
}
}
+func newLexEntryFragment(kind string, pattern string) *spec.LexEntry {
+ return &spec.LexEntry{
+ Kind: spec.LexKind(kind),
+ Pattern: spec.LexPattern(pattern),
+ Fragment: true,
+ }
+}
+
func newTokenDefault(id int, kind string, match byteSequence) *Token {
return newToken(spec.LexModeNumDefault, spec.LexModeNameDefault, id, kind, match)
}
@@ -477,6 +485,50 @@ func TestLexer_Next(t *testing.T) {
{
lspec: &spec.LexSpec{
Entries: []*spec.LexEntry{
+ newLexEntryDefaultNOP("t1", "\\f{a2c}\\f{d2f}+"),
+ newLexEntryFragment("a2c", "abc"),
+ newLexEntryFragment("d2f", "def"),
+ },
+ },
+ src: "abcdefdefabcdef",
+ tokens: []*Token{
+ newTokenDefault(1, "t1", newByteSequence([]byte("abcdefdef"))),
+ newTokenDefault(1, "t1", newByteSequence([]byte("abcdef"))),
+ newEOFTokenDefault(),
+ },
+ },
+ {
+ lspec: &spec.LexSpec{
+ Entries: []*spec.LexEntry{
+ newLexEntryDefaultNOP("t1", "(\\f{a2c}|\\f{d2f})+"),
+ newLexEntryFragment("a2c", "abc"),
+ newLexEntryFragment("d2f", "def"),
+ },
+ },
+ src: "abcdefdefabc",
+ tokens: []*Token{
+ newTokenDefault(1, "t1", newByteSequence([]byte("abcdefdefabc"))),
+ newEOFTokenDefault(),
+ },
+ },
+ {
+ lspec: &spec.LexSpec{
+ Entries: []*spec.LexEntry{
+ newLexEntryDefaultNOP("t1", "\\f{a2c_or_d2f}+"),
+ newLexEntryFragment("a2c_or_d2f", "\\f{a2c}|\\f{d2f}"),
+ newLexEntryFragment("a2c", "abc"),
+ newLexEntryFragment("d2f", "def"),
+ },
+ },
+ src: "abcdefdefabc",
+ tokens: []*Token{
+ newTokenDefault(1, "t1", newByteSequence([]byte("abcdefdefabc"))),
+ newEOFTokenDefault(),
+ },
+ },
+ {
+ lspec: &spec.LexSpec{
+ Entries: []*spec.LexEntry{
newLexEntryDefaultNOP("white_space", ` *`),
newLexEntry([]string{"default"}, "string_open", `"`, "string", false),
newLexEntry([]string{"string"}, "escape_sequence", `\\[n"\\]`, "", false),