aboutsummaryrefslogtreecommitdiff
path: root/driver
diff options
context:
space:
mode:
authorRyo Nihei <nihei.dev@gmail.com>2021-02-20 17:36:16 +0900
committerRyo Nihei <nihei.dev@gmail.com>2021-02-20 17:36:16 +0900
commit9357758697305753a68b541b42452a8cb13eebe2 (patch)
treebc10befd50c9a0fd1c5ccc894e1e18cfbbdacb33 /driver
parentFix computation of last positions (diff)
downloadtre-9357758697305753a68b541b42452a8cb13eebe2.tar.gz
tre-9357758697305753a68b541b42452a8cb13eebe2.tar.xz
Add + and ? operators
* a+ matches 'a' one or more times. This is equivalent to aa*. * a? matches 'a' zero or one time.
Diffstat (limited to 'driver')
-rw-r--r--driver/lexer_test.go40
1 files changed, 35 insertions, 5 deletions
diff --git a/driver/lexer_test.go b/driver/lexer_test.go
index 133b758..283d5fe 100644
--- a/driver/lexer_test.go
+++ b/driver/lexer_test.go
@@ -19,15 +19,15 @@ func TestLexer_Next(t *testing.T) {
lspec: &spec.LexSpec{
Entries: []*spec.LexEntry{
spec.NewLexEntry("t1", "(a|b)*abb"),
- spec.NewLexEntry("t2", " *"),
+ spec.NewLexEntry("t2", " +"),
},
},
- src: "abb aabb aaabb babb bbabb abbbabb",
+ src: "abb aabb aaabb babb bbabb abbbabb",
tokens: []*Token{
newToken(1, "t1", []byte("abb")),
newToken(2, "t2", []byte(" ")),
newToken(1, "t1", []byte("aabb")),
- newToken(2, "t2", []byte(" ")),
+ newToken(2, "t2", []byte(" ")),
newToken(1, "t1", []byte("aaabb")),
newToken(2, "t2", []byte(" ")),
newToken(1, "t1", []byte("babb")),
@@ -41,6 +41,34 @@ func TestLexer_Next(t *testing.T) {
{
lspec: &spec.LexSpec{
Entries: []*spec.LexEntry{
+ spec.NewLexEntry("t1", "b?a+"),
+ spec.NewLexEntry("t2", "(ab)?(cd)+"),
+ spec.NewLexEntry("t3", " +"),
+ },
+ },
+ src: "ba baaa a aaa abcd abcdcdcd cd cdcdcd",
+ tokens: []*Token{
+ newToken(1, "t1", []byte("ba")),
+ newToken(3, "t3", []byte(" ")),
+ newToken(1, "t1", []byte("baaa")),
+ newToken(3, "t3", []byte(" ")),
+ newToken(1, "t1", []byte("a")),
+ newToken(3, "t3", []byte(" ")),
+ newToken(1, "t1", []byte("aaa")),
+ newToken(3, "t3", []byte(" ")),
+ newToken(2, "t2", []byte("abcd")),
+ newToken(3, "t3", []byte(" ")),
+ newToken(2, "t2", []byte("abcdcdcd")),
+ newToken(3, "t3", []byte(" ")),
+ newToken(2, "t2", []byte("cd")),
+ newToken(3, "t3", []byte(" ")),
+ newToken(2, "t2", []byte("cdcdcd")),
+ newEOFToken(),
+ },
+ },
+ {
+ lspec: &spec.LexSpec{
+ Entries: []*spec.LexEntry{
spec.NewLexEntry("t1", "."),
},
},
@@ -85,15 +113,17 @@ func TestLexer_Next(t *testing.T) {
{
lspec: &spec.LexSpec{
Entries: []*spec.LexEntry{
- spec.NewLexEntry("t1", "[ab.*|()[\\]]"),
+ spec.NewLexEntry("t1", "[ab.*+?|()[\\]]"),
},
},
- src: "ab.*|()[]",
+ src: "ab.*+?|()[]",
tokens: []*Token{
newToken(1, "t1", []byte("a")),
newToken(1, "t1", []byte("b")),
newToken(1, "t1", []byte(".")),
newToken(1, "t1", []byte("*")),
+ newToken(1, "t1", []byte("+")),
+ newToken(1, "t1", []byte("?")),
newToken(1, "t1", []byte("|")),
newToken(1, "t1", []byte("(")),
newToken(1, "t1", []byte(")")),