1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
|
package cmd
import (
"encoding/json"
"fmt"
"io"
"io/ioutil"
"os"
"time"
"github.com/nihei9/maleeni/driver"
"github.com/nihei9/maleeni/spec"
"github.com/spf13/cobra"
)
func init() {
cmd := &cobra.Command{
Use: "lex clexspec",
Short: "Tokenize a text stream",
Long: `lex takes a text stream and tokenizes it according to a compiled lexical specification.
As use ` + "`maleeni compile`" + `, you can generate the specification.`,
Example: ` cat src | maleeni lex clexspec.json`,
Args: cobra.ExactArgs(1),
RunE: runLex,
}
rootCmd.AddCommand(cmd)
}
func runLex(cmd *cobra.Command, args []string) (retErr error) {
var clspec *spec.CompiledLexSpec
{
clspecPath := args[0]
f, err := os.Open(clspecPath)
if err != nil {
return err
}
data, err := ioutil.ReadAll(f)
if err != nil {
return err
}
clspec = &spec.CompiledLexSpec{}
err = json.Unmarshal(data, clspec)
if err != nil {
return err
}
}
var w io.Writer
{
f, err := os.OpenFile("maleeni-lex.log", os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644)
if err != nil {
return err
}
defer f.Close()
w = f
}
fmt.Fprintf(w, `maleeni lex starts.
Date time: %v
---
`, time.Now().Format(time.RFC3339))
defer func() {
fmt.Fprintf(w, "---\n")
if retErr != nil {
fmt.Fprintf(w, "maleeni lex failed: %v\n", retErr)
} else {
fmt.Fprintf(w, "maleeni lex succeeded.\n")
}
}()
lex, err := driver.NewLexer(clspec, os.Stdin, driver.EnableLogging(w))
if err != nil {
return err
}
for {
tok, err := lex.Next()
if err != nil {
return err
}
data, err := json.Marshal(tok)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to marshal a token; token: %v, error: %v\n", tok, err)
}
fmt.Fprintf(os.Stdout, "%v\n", string(data))
if tok.EOF {
break
}
}
return nil
}
|