-
Notifications
You must be signed in to change notification settings - Fork 8
/
parser.go
152 lines (131 loc) · 2.3 KB
/
parser.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
package main
import (
"errors"
"io"
"strings"
)
type tokenKind int
const (
tokenSeparator tokenKind = iota
tokenString
tokenSubstitute
)
type token struct {
kind tokenKind
text string
}
type tokenizer struct {
quote bool
}
func (tokenizer *tokenizer) nextToken(r *strings.Reader) (t token, err error) {
text := ""
for {
var c rune
c, _, err = r.ReadRune()
if err == io.EOF && len(text) > 0 {
t = token{
kind: tokenString,
text: text,
}
err = nil
return
}
if err != nil {
return
}
if c == '"' {
tokenizer.quote = !tokenizer.quote
} else if c == '$' {
if len(text) > 0 {
r.UnreadRune()
t = token{
kind: tokenString,
text: text,
}
return
}
c, _, err = r.ReadRune()
if err != nil {
return
}
text += string(c)
if c == '{' {
text = ""
for c != '}' {
c, _, err = r.ReadRune()
if err != nil {
return
}
text += string(c)
}
text = text[:len(text)-1]
t = token{
kind: tokenSubstitute,
text: text,
}
return
}
text += string(c)
} else if c == '\\' {
c, _, err = r.ReadRune()
if err != nil {
return
}
if c == 'n' {
c = '\n'
}
text += string(c)
} else if (c == ' ' || c == '\t') && !tokenizer.quote {
if len(text) > 0 {
r.UnreadByte()
t = token{
kind: tokenString,
text: text,
}
return
}
t = token{
kind: tokenSeparator,
text: string(c),
}
return
} else {
text += string(c)
}
}
}
func parse(subst func(string) (string, bool), text string) ([]string, error) {
reader := strings.NewReader(text)
var previous []string
current := ""
var t tokenizer
for {
token, err := t.nextToken(reader)
if err == io.EOF {
break
}
if err != nil {
return nil, err
}
switch token.kind {
case tokenSeparator:
previous = append(previous, current)
current = ""
case tokenSubstitute:
val, ok := subst(token.text)
if !ok {
return nil, errors.New(tl("invalid.substitute") + ": " + token.text)
}
current += val
case tokenString:
current += token.text
default:
panic("Unreachable code: All tokens should be checked")
}
}
if t.quote {
return nil, errors.New(tl("invalid.unmatched.quote"))
}
previous = append(previous, current)
return previous, nil
}