aboutsummaryrefslogtreecommitdiff
path: root/compiler/lexer_test.go
diff options
context:
space:
mode:
authorRyo Nihei <nihei.dev@gmail.com>2021-02-24 01:30:08 +0900
committerRyo Nihei <nihei.dev@gmail.com>2021-02-24 01:30:08 +0900
commite5fb2fe4f4dfc7dff550b934933b88e9392a6e11 (patch)
tree7ffa58f2106d8b3bbbe931b84f73a9fb5c2b51a1 /compiler/lexer_test.go
parentAdd + and ? operators (diff)
downloadtre-e5fb2fe4f4dfc7dff550b934933b88e9392a6e11.tar.gz
tre-e5fb2fe4f4dfc7dff550b934933b88e9392a6e11.tar.xz
Add range expression
[a-z] matches any one character from a to z. The order of the characters depends on Unicode code points.
Diffstat (limited to 'compiler/lexer_test.go')
-rw-r--r--compiler/lexer_test.go11
1 files changed, 8 insertions, 3 deletions
diff --git a/compiler/lexer_test.go b/compiler/lexer_test.go
index e2e7d7a..be00317 100644
--- a/compiler/lexer_test.go
+++ b/compiler/lexer_test.go
@@ -31,7 +31,7 @@ func TestLexer(t *testing.T) {
},
{
caption: "lexer can recognize the special characters",
- src: ".*+?|()[]",
+ src: ".*+?|()[-]",
tokens: []*token{
newToken(tokenKindAnyChar, nullChar),
newToken(tokenKindRepeat, nullChar),
@@ -41,13 +41,14 @@ func TestLexer(t *testing.T) {
newToken(tokenKindGroupOpen, nullChar),
newToken(tokenKindGroupClose, nullChar),
newToken(tokenKindBExpOpen, nullChar),
+ newToken(tokenKindCharRange, nullChar),
newToken(tokenKindBExpClose, nullChar),
newToken(tokenKindEOF, nullChar),
},
},
{
caption: "lexer can recognize the escape sequences",
- src: "\\\\\\.\\*\\+\\?\\|\\(\\)\\[\\]",
+ src: "\\\\\\.\\*\\+\\?\\|\\(\\)\\[\\][\\-]",
tokens: []*token{
newToken(tokenKindChar, '\\'),
newToken(tokenKindChar, '.'),
@@ -59,12 +60,15 @@ func TestLexer(t *testing.T) {
newToken(tokenKindChar, ')'),
newToken(tokenKindChar, '['),
newToken(tokenKindChar, ']'),
+ newToken(tokenKindBExpOpen, nullChar),
+ newToken(tokenKindChar, '-'),
+ newToken(tokenKindBExpClose, nullChar),
newToken(tokenKindEOF, nullChar),
},
},
{
caption: "in a bracket expression, the special characters are also handled as normal characters",
- src: "[\\\\.*+?|()[\\]].*|()][",
+ src: "[\\\\.*+?|()[\\]].*|()-][",
tokens: []*token{
newToken(tokenKindBExpOpen, nullChar),
newToken(tokenKindChar, '\\'),
@@ -83,6 +87,7 @@ func TestLexer(t *testing.T) {
newToken(tokenKindAlt, nullChar),
newToken(tokenKindGroupOpen, nullChar),
newToken(tokenKindGroupClose, nullChar),
+ newToken(tokenKindChar, '-'),
newToken(tokenKindBExpClose, nullChar),
newToken(tokenKindBExpOpen, nullChar),
newToken(tokenKindEOF, nullChar),