-
Notifications
You must be signed in to change notification settings - Fork 1
/
tokenize.go
50 lines (40 loc) · 1005 Bytes
/
tokenize.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
package liquid
import "io"
// Tokenizer allows iteration through a list of tokens
type Tokenizer struct {
tokens []string
index int
}
// Next returns token, if available, and an EOF if the end has been reached
func (t *Tokenizer) Next() (string, error) {
if t.index >= len(t.tokens) {
return "", io.EOF
}
token := t.tokens[t.index]
t.index++
var err error
if t.index >= len(t.tokens) {
err = io.EOF
}
return token, err
}
// NewTokenizer creates a *Tokenizer instance specific to the supplied template
func NewTokenizer(template string) *Tokenizer {
indices := templateParserRegexp.FindAllStringIndex(template, -1)
var tokens []string
var before int
for _, loc := range indices {
if loc[0] > before {
tokens = append(tokens, template[before:loc[0]])
}
tokens = append(tokens, template[loc[0]:loc[1]])
before = loc[1]
}
if before < len(template) {
tokens = append(tokens, template[before:len(template)])
}
return &Tokenizer{
tokens: tokens,
index: 0,
}
}