summaryrefslogtreecommitdiff
path: root/jsonpath/lexer_test.go
diff options
context:
space:
mode:
authorFelix Hanley <felix@userspace.com.au>2018-11-16 06:35:28 +0000
committerFelix Hanley <felix@userspace.com.au>2018-11-16 06:35:28 +0000
commit44da796e192961b614e3540c4c1ec52f4bc0a290 (patch)
treec0dcc69363c401d123bfdba0662d990d6804df77 /jsonpath/lexer_test.go
parentd6882bd9403c588415c1906a7015d16e92aa1ad3 (diff)
downloadquery-44da796e192961b614e3540c4c1ec52f4bc0a290.tar.gz
query-44da796e192961b614e3540c4c1ec52f4bc0a290.tar.bz2
Add start of jsonpath parser
Diffstat (limited to 'jsonpath/lexer_test.go')
-rw-r--r--jsonpath/lexer_test.go172
1 files changed, 172 insertions, 0 deletions
diff --git a/jsonpath/lexer_test.go b/jsonpath/lexer_test.go
new file mode 100644
index 0000000..504d413
--- /dev/null
+++ b/jsonpath/lexer_test.go
@@ -0,0 +1,172 @@
+package jsonpath
+
+import (
+ "testing"
+
+ "src.userspace.com.au/query/lexer"
+)
+
+func TestValidStates(t *testing.T) {
+ tests := []struct {
+ path string
+ tokens []lexer.Token
+ }{
+ {
+ path: "$.test",
+ tokens: []lexer.Token{
+ lexer.Token{Type: TAbsolute, Value: "$"},
+ lexer.Token{Type: TChildDot, Value: "."},
+ lexer.Token{Type: TName, Value: "test"},
+ },
+ },
+ {
+ path: "$[test]",
+ tokens: []lexer.Token{
+ lexer.Token{Type: TAbsolute, Value: "$"},
+ lexer.Token{Type: TChildStart, Value: "["},
+ lexer.Token{Type: TName, Value: "test"},
+ lexer.Token{Type: TChildEnd, Value: "]"},
+ },
+ },
+ {
+ path: "$[one][two]",
+ tokens: []lexer.Token{
+ lexer.Token{Type: TAbsolute, Value: "$"},
+ lexer.Token{Type: TChildStart, Value: "["},
+ lexer.Token{Type: TName, Value: "one"},
+ lexer.Token{Type: TChildEnd, Value: "]"},
+ lexer.Token{Type: TChildStart, Value: "["},
+ lexer.Token{Type: TName, Value: "two"},
+ lexer.Token{Type: TChildEnd, Value: "]"},
+ },
+ },
+ {
+ path: "$.one.two",
+ tokens: []lexer.Token{
+ lexer.Token{Type: TAbsolute, Value: "$"},
+ lexer.Token{Type: TChildDot, Value: "."},
+ lexer.Token{Type: TName, Value: "one"},
+ lexer.Token{Type: TChildEnd, Value: ""},
+ lexer.Token{Type: TChildDot, Value: "."},
+ lexer.Token{Type: TName, Value: "two"},
+ },
+ },
+ {
+ path: "$[*]",
+ tokens: []lexer.Token{
+ lexer.Token{Type: TAbsolute, Value: "$"},
+ lexer.Token{Type: TChildStart, Value: "["},
+ lexer.Token{Type: TWildcard, Value: "*"},
+ lexer.Token{Type: TChildEnd, Value: "]"},
+ },
+ },
+ {
+ path: "$[one][*]",
+ tokens: []lexer.Token{
+ lexer.Token{Type: TAbsolute, Value: "$"},
+ lexer.Token{Type: TChildStart, Value: "["},
+ lexer.Token{Type: TName, Value: "one"},
+ lexer.Token{Type: TChildEnd, Value: "]"},
+ lexer.Token{Type: TChildStart, Value: "["},
+ lexer.Token{Type: TWildcard, Value: "*"},
+ lexer.Token{Type: TChildEnd, Value: "]"},
+ },
+ },
+ {
+ path: "$.one[1,2,3]",
+ tokens: []lexer.Token{
+ lexer.Token{Type: TAbsolute, Value: "$"},
+ lexer.Token{Type: TChildDot, Value: "."},
+ lexer.Token{Type: TName, Value: "one"},
+ lexer.Token{Type: TPredicateStart, Value: "["},
+ lexer.Token{Type: TNumber, Value: "1"},
+ lexer.Token{Type: TUnion, Value: ","},
+ lexer.Token{Type: TNumber, Value: "2"},
+ lexer.Token{Type: TUnion, Value: ","},
+ lexer.Token{Type: TNumber, Value: "3"},
+ lexer.Token{Type: TPredicateEnd, Value: "]"},
+ },
+ },
+ {
+ path: "$.one[1:3]",
+ tokens: []lexer.Token{
+ lexer.Token{Type: TAbsolute, Value: "$"},
+ lexer.Token{Type: TChildDot, Value: "."},
+ lexer.Token{Type: TName, Value: "one"},
+ lexer.Token{Type: TPredicateStart, Value: "["},
+ lexer.Token{Type: TNumber, Value: "1"},
+ lexer.Token{Type: TRange, Value: ":"},
+ lexer.Token{Type: TNumber, Value: "3"},
+ lexer.Token{Type: TPredicateEnd, Value: "]"},
+ },
+ },
+ {
+ path: "$.one[:3]",
+ tokens: []lexer.Token{
+ lexer.Token{Type: TAbsolute, Value: "$"},
+ lexer.Token{Type: TChildDot, Value: "."},
+ lexer.Token{Type: TName, Value: "one"},
+ lexer.Token{Type: TPredicateStart, Value: "["},
+ lexer.Token{Type: TNumber, Value: ""},
+ lexer.Token{Type: TRange, Value: ":"},
+ lexer.Token{Type: TNumber, Value: "3"},
+ lexer.Token{Type: TPredicateEnd, Value: "]"},
+ },
+ },
+ {
+ path: "$.one[3:]",
+ tokens: []lexer.Token{
+ lexer.Token{Type: TAbsolute, Value: "$"},
+ lexer.Token{Type: TChildDot, Value: "."},
+ lexer.Token{Type: TName, Value: "one"},
+ lexer.Token{Type: TPredicateStart, Value: "["},
+ lexer.Token{Type: TNumber, Value: "3"},
+ lexer.Token{Type: TRange, Value: ":"},
+ lexer.Token{Type: TNumber, Value: ""},
+ lexer.Token{Type: TPredicateEnd, Value: "]"},
+ },
+ },
+ {
+ path: "$..one",
+ tokens: []lexer.Token{
+ lexer.Token{Type: TAbsolute, Value: "$"},
+ lexer.Token{Type: TRecursive, Value: "."},
+ lexer.Token{Type: TChildDot, Value: "."},
+ lexer.Token{Type: TName, Value: "one"},
+ },
+ },
+ {
+ path: "$['one']",
+ tokens: []lexer.Token{
+ lexer.Token{Type: TAbsolute, Value: "$"},
+ lexer.Token{Type: TChildStart, Value: "["},
+ lexer.Token{Type: TQuotedName, Value: "'one'"},
+ lexer.Token{Type: TChildEnd, Value: "]"},
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ t.Log("testing path: ", tt.path)
+ l := lexer.New(tt.path, pathState)
+ l.Start()
+
+ func() {
+ for i, expected := range tt.tokens {
+ actual, done := l.NextToken()
+ if done || actual == nil {
+ t.Errorf("Lexer(%q) finished early, expecting %v", tt.path, expected)
+ return
+ }
+ if actual.Type != expected.Type {
+ t.Errorf("Lexer(%q) token %d => %s, expected %s", tt.path, i, tokenNames[actual.Type], tokenNames[expected.Type])
+ return
+ }
+ if actual.Value != expected.Value {
+ t.Errorf("Lexer(%q) token %d => %v, expected %v", tt.path, i, actual, expected)
+ return
+ }
+ }
+ }()
+ }
+}