1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
|
package jsonpath
import (
"testing"
"src.userspace.com.au/query/lexer"
)
func TestValidStates(t *testing.T) {
tests := []struct {
path string
tokens []lexer.Token
}{
{
path: "$.test",
tokens: []lexer.Token{
lexer.Token{Type: TAbsolute, Value: "$"},
lexer.Token{Type: TChildDot, Value: "."},
lexer.Token{Type: TName, Value: "test"},
},
},
{
path: "$[test]",
tokens: []lexer.Token{
lexer.Token{Type: TAbsolute, Value: "$"},
lexer.Token{Type: TChildStart, Value: "["},
lexer.Token{Type: TName, Value: "test"},
lexer.Token{Type: TChildEnd, Value: "]"},
},
},
{
path: "$[one][two]",
tokens: []lexer.Token{
lexer.Token{Type: TAbsolute, Value: "$"},
lexer.Token{Type: TChildStart, Value: "["},
lexer.Token{Type: TName, Value: "one"},
lexer.Token{Type: TChildEnd, Value: "]"},
lexer.Token{Type: TChildStart, Value: "["},
lexer.Token{Type: TName, Value: "two"},
lexer.Token{Type: TChildEnd, Value: "]"},
},
},
{
path: "$.one.two",
tokens: []lexer.Token{
lexer.Token{Type: TAbsolute, Value: "$"},
lexer.Token{Type: TChildDot, Value: "."},
lexer.Token{Type: TName, Value: "one"},
lexer.Token{Type: TChildEnd, Value: ""},
lexer.Token{Type: TChildDot, Value: "."},
lexer.Token{Type: TName, Value: "two"},
},
},
{
path: "$[*]",
tokens: []lexer.Token{
lexer.Token{Type: TAbsolute, Value: "$"},
lexer.Token{Type: TChildStart, Value: "["},
lexer.Token{Type: TWildcard, Value: "*"},
lexer.Token{Type: TChildEnd, Value: "]"},
},
},
{
path: "$[one][*]",
tokens: []lexer.Token{
lexer.Token{Type: TAbsolute, Value: "$"},
lexer.Token{Type: TChildStart, Value: "["},
lexer.Token{Type: TName, Value: "one"},
lexer.Token{Type: TChildEnd, Value: "]"},
lexer.Token{Type: TChildStart, Value: "["},
lexer.Token{Type: TWildcard, Value: "*"},
lexer.Token{Type: TChildEnd, Value: "]"},
},
},
{
path: "$.one[1,2,3]",
tokens: []lexer.Token{
lexer.Token{Type: TAbsolute, Value: "$"},
lexer.Token{Type: TChildDot, Value: "."},
lexer.Token{Type: TName, Value: "one"},
lexer.Token{Type: TPredicateStart, Value: "["},
lexer.Token{Type: TNumber, Value: "1"},
lexer.Token{Type: TUnion, Value: ","},
lexer.Token{Type: TNumber, Value: "2"},
lexer.Token{Type: TUnion, Value: ","},
lexer.Token{Type: TNumber, Value: "3"},
lexer.Token{Type: TPredicateEnd, Value: "]"},
},
},
{
path: "$.one[1:3]",
tokens: []lexer.Token{
lexer.Token{Type: TAbsolute, Value: "$"},
lexer.Token{Type: TChildDot, Value: "."},
lexer.Token{Type: TName, Value: "one"},
lexer.Token{Type: TPredicateStart, Value: "["},
lexer.Token{Type: TNumber, Value: "1"},
lexer.Token{Type: TRange, Value: ":"},
lexer.Token{Type: TNumber, Value: "3"},
lexer.Token{Type: TPredicateEnd, Value: "]"},
},
},
{
path: "$.one[:3]",
tokens: []lexer.Token{
lexer.Token{Type: TAbsolute, Value: "$"},
lexer.Token{Type: TChildDot, Value: "."},
lexer.Token{Type: TName, Value: "one"},
lexer.Token{Type: TPredicateStart, Value: "["},
lexer.Token{Type: TNumber, Value: ""},
lexer.Token{Type: TRange, Value: ":"},
lexer.Token{Type: TNumber, Value: "3"},
lexer.Token{Type: TPredicateEnd, Value: "]"},
},
},
{
path: "$.one[3:]",
tokens: []lexer.Token{
lexer.Token{Type: TAbsolute, Value: "$"},
lexer.Token{Type: TChildDot, Value: "."},
lexer.Token{Type: TName, Value: "one"},
lexer.Token{Type: TPredicateStart, Value: "["},
lexer.Token{Type: TNumber, Value: "3"},
lexer.Token{Type: TRange, Value: ":"},
lexer.Token{Type: TNumber, Value: ""},
lexer.Token{Type: TPredicateEnd, Value: "]"},
},
},
{
path: "$..one",
tokens: []lexer.Token{
lexer.Token{Type: TAbsolute, Value: "$"},
lexer.Token{Type: TRecursive, Value: "."},
lexer.Token{Type: TChildDot, Value: "."},
lexer.Token{Type: TName, Value: "one"},
},
},
{
path: "$['one']",
tokens: []lexer.Token{
lexer.Token{Type: TAbsolute, Value: "$"},
lexer.Token{Type: TChildStart, Value: "["},
lexer.Token{Type: TQuotedName, Value: "'one'"},
lexer.Token{Type: TChildEnd, Value: "]"},
},
},
}
for _, tt := range tests {
t.Log("testing path: ", tt.path)
l := lexer.New(tt.path, pathState)
l.Start()
func() {
for i, expected := range tt.tokens {
actual, done := l.NextToken()
if done || actual == nil {
t.Errorf("Lexer(%q) finished early, expecting %v", tt.path, expected)
return
}
if actual.Type != expected.Type {
t.Errorf("Lexer(%q) token %d => %s, expected %s", tt.path, i, tokenNames[actual.Type], tokenNames[expected.Type])
return
}
if actual.Value != expected.Value {
t.Errorf("Lexer(%q) token %d => %v, expected %v", tt.path, i, actual, expected)
return
}
}
}()
}
}
|