Checking in vendor folder for ease of using go get.

This commit is contained in:
Renan DelValle 2018-10-23 23:32:59 -07:00
parent 7a1251853b
commit cdb4b5a1d0
No known key found for this signature in database
GPG key ID: C240AD6D6F443EC9
3554 changed files with 1270116 additions and 0 deletions

175
vendor/github.com/pelletier/go-toml/query/doc.go generated vendored Normal file
View file

@ -0,0 +1,175 @@
// Package query performs JSONPath-like queries on a TOML document.
//
// The query path implementation is based loosely on the JSONPath specification:
// http://goessner.net/articles/JsonPath/.
//
// The idea behind a query path is to allow quick access to any element, or set
// of elements within TOML document, with a single expression.
//
// result, err := query.CompileAndExecute("$.foo.bar.baz", tree)
//
// This is roughly equivalent to:
//
// next := tree.Get("foo")
// if next != nil {
// next = next.Get("bar")
// if next != nil {
// next = next.Get("baz")
// }
// }
// result := next
//
// err is nil if any parsing exception occurs.
//
// If no node in the tree matches the query, result will simply contain an empty list of
// items.
//
// As illustrated above, the query path is much more efficient, especially since
// the structure of the TOML file can vary. Rather than making assumptions about
// a document's structure, a query allows the programmer to make structured
// requests into the document, and get zero or more values as a result.
//
// Query syntax
//
// The syntax of a query begins with a root token, followed by any number
// sub-expressions:
//
// $
// Root of the TOML tree. This must always come first.
// .name
// Selects child of this node, where 'name' is a TOML key
// name.
// ['name']
// Selects child of this node, where 'name' is a string
// containing a TOML key name.
// [index]
// Selcts child array element at 'index'.
// ..expr
// Recursively selects all children, filtered by an a union,
// index, or slice expression.
// ..*
// Recursive selection of all nodes at this point in the
// tree.
// .*
// Selects all children of the current node.
// [expr,expr]
// Union operator - a logical 'or' grouping of two or more
// sub-expressions: index, key name, or filter.
// [start:end:step]
// Slice operator - selects array elements from start to
// end-1, at the given step. All three arguments are
// optional.
// [?(filter)]
// Named filter expression - the function 'filter' is
// used to filter children at this node.
//
// Query Indexes And Slices
//
// Index expressions perform no bounds checking, and will contribute no
// values to the result set if the provided index or index range is invalid.
// Negative indexes represent values from the end of the array, counting backwards.
//
// // select the last index of the array named 'foo'
// query.CompileAndExecute("$.foo[-1]", tree)
//
// Slice expressions are supported, by using ':' to separate a start/end index pair.
//
// // select up to the first five elements in the array
// query.CompileAndExecute("$.foo[0:5]", tree)
//
// Slice expressions also allow negative indexes for the start and stop
// arguments.
//
// // select all array elements.
// query.CompileAndExecute("$.foo[0:-1]", tree)
//
// Slice expressions may have an optional stride/step parameter:
//
// // select every other element
// query.CompileAndExecute("$.foo[0:-1:2]", tree)
//
// Slice start and end parameters are also optional:
//
// // these are all equivalent and select all the values in the array
// query.CompileAndExecute("$.foo[:]", tree)
// query.CompileAndExecute("$.foo[0:]", tree)
// query.CompileAndExecute("$.foo[:-1]", tree)
// query.CompileAndExecute("$.foo[0:-1:]", tree)
// query.CompileAndExecute("$.foo[::1]", tree)
// query.CompileAndExecute("$.foo[0::1]", tree)
// query.CompileAndExecute("$.foo[:-1:1]", tree)
// query.CompileAndExecute("$.foo[0:-1:1]", tree)
//
// Query Filters
//
// Query filters are used within a Union [,] or single Filter [] expression.
// A filter only allows nodes that qualify through to the next expression,
// and/or into the result set.
//
// // returns children of foo that are permitted by the 'bar' filter.
// query.CompileAndExecute("$.foo[?(bar)]", tree)
//
// There are several filters provided with the library:
//
// tree
// Allows nodes of type Tree.
// int
// Allows nodes of type int64.
// float
// Allows nodes of type float64.
// string
// Allows nodes of type string.
// time
// Allows nodes of type time.Time.
// bool
// Allows nodes of type bool.
//
// Query Results
//
// An executed query returns a Result object. This contains the nodes
// in the TOML tree that qualify the query expression. Position information
// is also available for each value in the set.
//
// // display the results of a query
// results := query.CompileAndExecute("$.foo.bar.baz", tree)
// for idx, value := results.Values() {
// fmt.Println("%v: %v", results.Positions()[idx], value)
// }
//
// Compiled Queries
//
// Queries may be executed directly on a Tree object, or compiled ahead
// of time and executed discretely. The former is more convenient, but has the
// penalty of having to recompile the query expression each time.
//
// // basic query
// results := query.CompileAndExecute("$.foo.bar.baz", tree)
//
// // compiled query
// query, err := toml.Compile("$.foo.bar.baz")
// results := query.Execute(tree)
//
// // run the compiled query again on a different tree
// moreResults := query.Execute(anotherTree)
//
// User Defined Query Filters
//
// Filter expressions may also be user defined by using the SetFilter()
// function on the Query object. The function must return true/false, which
// signifies if the passed node is kept or discarded, respectively.
//
// // create a query that references a user-defined filter
// query, _ := query.Compile("$[?(bazOnly)]")
//
// // define the filter, and assign it to the query
// query.SetFilter("bazOnly", func(node interface{}) bool{
// if tree, ok := node.(*Tree); ok {
// return tree.Has("baz")
// }
// return false // reject all other node types
// })
//
// // run the query
// query.Execute(tree)
//
package query

357
vendor/github.com/pelletier/go-toml/query/lexer.go generated vendored Normal file
View file

@ -0,0 +1,357 @@
// TOML JSONPath lexer.
//
// Written using the principles developed by Rob Pike in
// http://www.youtube.com/watch?v=HxaD_trXwRE
package query
import (
"fmt"
"github.com/pelletier/go-toml"
"strconv"
"strings"
"unicode/utf8"
)
// Lexer state function
type queryLexStateFn func() queryLexStateFn
// Lexer definition
type queryLexer struct {
input string
start int
pos int
width int
tokens chan token
depth int
line int
col int
stringTerm string
}
func (l *queryLexer) run() {
for state := l.lexVoid; state != nil; {
state = state()
}
close(l.tokens)
}
func (l *queryLexer) nextStart() {
// iterate by runes (utf8 characters)
// search for newlines and advance line/col counts
for i := l.start; i < l.pos; {
r, width := utf8.DecodeRuneInString(l.input[i:])
if r == '\n' {
l.line++
l.col = 1
} else {
l.col++
}
i += width
}
// advance start position to next token
l.start = l.pos
}
func (l *queryLexer) emit(t tokenType) {
l.tokens <- token{
Position: toml.Position{Line: l.line, Col: l.col},
typ: t,
val: l.input[l.start:l.pos],
}
l.nextStart()
}
func (l *queryLexer) emitWithValue(t tokenType, value string) {
l.tokens <- token{
Position: toml.Position{Line: l.line, Col: l.col},
typ: t,
val: value,
}
l.nextStart()
}
func (l *queryLexer) next() rune {
if l.pos >= len(l.input) {
l.width = 0
return eof
}
var r rune
r, l.width = utf8.DecodeRuneInString(l.input[l.pos:])
l.pos += l.width
return r
}
func (l *queryLexer) ignore() {
l.nextStart()
}
func (l *queryLexer) backup() {
l.pos -= l.width
}
func (l *queryLexer) errorf(format string, args ...interface{}) queryLexStateFn {
l.tokens <- token{
Position: toml.Position{Line: l.line, Col: l.col},
typ: tokenError,
val: fmt.Sprintf(format, args...),
}
return nil
}
func (l *queryLexer) peek() rune {
r := l.next()
l.backup()
return r
}
func (l *queryLexer) accept(valid string) bool {
if strings.ContainsRune(valid, l.next()) {
return true
}
l.backup()
return false
}
func (l *queryLexer) follow(next string) bool {
return strings.HasPrefix(l.input[l.pos:], next)
}
func (l *queryLexer) lexVoid() queryLexStateFn {
for {
next := l.peek()
switch next {
case '$':
l.pos++
l.emit(tokenDollar)
continue
case '.':
if l.follow("..") {
l.pos += 2
l.emit(tokenDotDot)
} else {
l.pos++
l.emit(tokenDot)
}
continue
case '[':
l.pos++
l.emit(tokenLeftBracket)
continue
case ']':
l.pos++
l.emit(tokenRightBracket)
continue
case ',':
l.pos++
l.emit(tokenComma)
continue
case '*':
l.pos++
l.emit(tokenStar)
continue
case '(':
l.pos++
l.emit(tokenLeftParen)
continue
case ')':
l.pos++
l.emit(tokenRightParen)
continue
case '?':
l.pos++
l.emit(tokenQuestion)
continue
case ':':
l.pos++
l.emit(tokenColon)
continue
case '\'':
l.ignore()
l.stringTerm = string(next)
return l.lexString
case '"':
l.ignore()
l.stringTerm = string(next)
return l.lexString
}
if isSpace(next) {
l.next()
l.ignore()
continue
}
if isAlphanumeric(next) {
return l.lexKey
}
if next == '+' || next == '-' || isDigit(next) {
return l.lexNumber
}
if l.next() == eof {
break
}
return l.errorf("unexpected char: '%v'", next)
}
l.emit(tokenEOF)
return nil
}
func (l *queryLexer) lexKey() queryLexStateFn {
for {
next := l.peek()
if !isAlphanumeric(next) {
l.emit(tokenKey)
return l.lexVoid
}
if l.next() == eof {
break
}
}
l.emit(tokenEOF)
return nil
}
func (l *queryLexer) lexString() queryLexStateFn {
l.pos++
l.ignore()
growingString := ""
for {
if l.follow(l.stringTerm) {
l.emitWithValue(tokenString, growingString)
l.pos++
l.ignore()
return l.lexVoid
}
if l.follow("\\\"") {
l.pos++
growingString += "\""
} else if l.follow("\\'") {
l.pos++
growingString += "'"
} else if l.follow("\\n") {
l.pos++
growingString += "\n"
} else if l.follow("\\b") {
l.pos++
growingString += "\b"
} else if l.follow("\\f") {
l.pos++
growingString += "\f"
} else if l.follow("\\/") {
l.pos++
growingString += "/"
} else if l.follow("\\t") {
l.pos++
growingString += "\t"
} else if l.follow("\\r") {
l.pos++
growingString += "\r"
} else if l.follow("\\\\") {
l.pos++
growingString += "\\"
} else if l.follow("\\u") {
l.pos += 2
code := ""
for i := 0; i < 4; i++ {
c := l.peek()
l.pos++
if !isHexDigit(c) {
return l.errorf("unfinished unicode escape")
}
code = code + string(c)
}
l.pos--
intcode, err := strconv.ParseInt(code, 16, 32)
if err != nil {
return l.errorf("invalid unicode escape: \\u" + code)
}
growingString += string(rune(intcode))
} else if l.follow("\\U") {
l.pos += 2
code := ""
for i := 0; i < 8; i++ {
c := l.peek()
l.pos++
if !isHexDigit(c) {
return l.errorf("unfinished unicode escape")
}
code = code + string(c)
}
l.pos--
intcode, err := strconv.ParseInt(code, 16, 32)
if err != nil {
return l.errorf("invalid unicode escape: \\u" + code)
}
growingString += string(rune(intcode))
} else if l.follow("\\") {
l.pos++
return l.errorf("invalid escape sequence: \\" + string(l.peek()))
} else {
growingString += string(l.peek())
}
if l.next() == eof {
break
}
}
return l.errorf("unclosed string")
}
func (l *queryLexer) lexNumber() queryLexStateFn {
l.ignore()
if !l.accept("+") {
l.accept("-")
}
pointSeen := false
digitSeen := false
for {
next := l.next()
if next == '.' {
if pointSeen {
return l.errorf("cannot have two dots in one float")
}
if !isDigit(l.peek()) {
return l.errorf("float cannot end with a dot")
}
pointSeen = true
} else if isDigit(next) {
digitSeen = true
} else {
l.backup()
break
}
if pointSeen && !digitSeen {
return l.errorf("cannot start float with a dot")
}
}
if !digitSeen {
return l.errorf("no digit in that number")
}
if pointSeen {
l.emit(tokenFloat)
} else {
l.emit(tokenInteger)
}
return l.lexVoid
}
// Entry point
func lexQuery(input string) chan token {
l := &queryLexer{
input: input,
tokens: make(chan token),
line: 1,
col: 1,
}
go l.run()
return l.tokens
}

179
vendor/github.com/pelletier/go-toml/query/lexer_test.go generated vendored Normal file
View file

@ -0,0 +1,179 @@
package query
import (
"github.com/pelletier/go-toml"
"testing"
)
func testQLFlow(t *testing.T, input string, expectedFlow []token) {
ch := lexQuery(input)
for idx, expected := range expectedFlow {
token := <-ch
if token != expected {
t.Log("While testing #", idx, ":", input)
t.Log("compared (got)", token, "to (expected)", expected)
t.Log("\tvalue:", token.val, "<->", expected.val)
t.Log("\tvalue as bytes:", []byte(token.val), "<->", []byte(expected.val))
t.Log("\ttype:", token.typ.String(), "<->", expected.typ.String())
t.Log("\tline:", token.Line, "<->", expected.Line)
t.Log("\tcolumn:", token.Col, "<->", expected.Col)
t.Log("compared", token, "to", expected)
t.FailNow()
}
}
tok, ok := <-ch
if ok {
t.Log("channel is not closed!")
t.Log(len(ch)+1, "tokens remaining:")
t.Log("token ->", tok)
for token := range ch {
t.Log("token ->", token)
}
t.FailNow()
}
}
func TestLexSpecialChars(t *testing.T) {
testQLFlow(t, " .$[]..()?*", []token{
{toml.Position{1, 2}, tokenDot, "."},
{toml.Position{1, 3}, tokenDollar, "$"},
{toml.Position{1, 4}, tokenLeftBracket, "["},
{toml.Position{1, 5}, tokenRightBracket, "]"},
{toml.Position{1, 6}, tokenDotDot, ".."},
{toml.Position{1, 8}, tokenLeftParen, "("},
{toml.Position{1, 9}, tokenRightParen, ")"},
{toml.Position{1, 10}, tokenQuestion, "?"},
{toml.Position{1, 11}, tokenStar, "*"},
{toml.Position{1, 12}, tokenEOF, ""},
})
}
func TestLexString(t *testing.T) {
testQLFlow(t, "'foo\n'", []token{
{toml.Position{1, 2}, tokenString, "foo\n"},
{toml.Position{2, 2}, tokenEOF, ""},
})
}
func TestLexDoubleString(t *testing.T) {
testQLFlow(t, `"bar"`, []token{
{toml.Position{1, 2}, tokenString, "bar"},
{toml.Position{1, 6}, tokenEOF, ""},
})
}
func TestLexStringEscapes(t *testing.T) {
testQLFlow(t, `"foo \" \' \b \f \/ \t \r \\ \u03A9 \U00012345 \n bar"`, []token{
{toml.Position{1, 2}, tokenString, "foo \" ' \b \f / \t \r \\ \u03A9 \U00012345 \n bar"},
{toml.Position{1, 55}, tokenEOF, ""},
})
}
func TestLexStringUnfinishedUnicode4(t *testing.T) {
testQLFlow(t, `"\u000"`, []token{
{toml.Position{1, 2}, tokenError, "unfinished unicode escape"},
})
}
func TestLexStringUnfinishedUnicode8(t *testing.T) {
testQLFlow(t, `"\U0000"`, []token{
{toml.Position{1, 2}, tokenError, "unfinished unicode escape"},
})
}
func TestLexStringInvalidEscape(t *testing.T) {
testQLFlow(t, `"\x"`, []token{
{toml.Position{1, 2}, tokenError, "invalid escape sequence: \\x"},
})
}
func TestLexStringUnfinished(t *testing.T) {
testQLFlow(t, `"bar`, []token{
{toml.Position{1, 2}, tokenError, "unclosed string"},
})
}
func TestLexKey(t *testing.T) {
testQLFlow(t, "foo", []token{
{toml.Position{1, 1}, tokenKey, "foo"},
{toml.Position{1, 4}, tokenEOF, ""},
})
}
func TestLexRecurse(t *testing.T) {
testQLFlow(t, "$..*", []token{
{toml.Position{1, 1}, tokenDollar, "$"},
{toml.Position{1, 2}, tokenDotDot, ".."},
{toml.Position{1, 4}, tokenStar, "*"},
{toml.Position{1, 5}, tokenEOF, ""},
})
}
func TestLexBracketKey(t *testing.T) {
testQLFlow(t, "$[foo]", []token{
{toml.Position{1, 1}, tokenDollar, "$"},
{toml.Position{1, 2}, tokenLeftBracket, "["},
{toml.Position{1, 3}, tokenKey, "foo"},
{toml.Position{1, 6}, tokenRightBracket, "]"},
{toml.Position{1, 7}, tokenEOF, ""},
})
}
func TestLexSpace(t *testing.T) {
testQLFlow(t, "foo bar baz", []token{
{toml.Position{1, 1}, tokenKey, "foo"},
{toml.Position{1, 5}, tokenKey, "bar"},
{toml.Position{1, 9}, tokenKey, "baz"},
{toml.Position{1, 12}, tokenEOF, ""},
})
}
func TestLexInteger(t *testing.T) {
testQLFlow(t, "100 +200 -300", []token{
{toml.Position{1, 1}, tokenInteger, "100"},
{toml.Position{1, 5}, tokenInteger, "+200"},
{toml.Position{1, 10}, tokenInteger, "-300"},
{toml.Position{1, 14}, tokenEOF, ""},
})
}
func TestLexFloat(t *testing.T) {
testQLFlow(t, "100.0 +200.0 -300.0", []token{
{toml.Position{1, 1}, tokenFloat, "100.0"},
{toml.Position{1, 7}, tokenFloat, "+200.0"},
{toml.Position{1, 14}, tokenFloat, "-300.0"},
{toml.Position{1, 20}, tokenEOF, ""},
})
}
func TestLexFloatWithMultipleDots(t *testing.T) {
testQLFlow(t, "4.2.", []token{
{toml.Position{1, 1}, tokenError, "cannot have two dots in one float"},
})
}
func TestLexFloatLeadingDot(t *testing.T) {
testQLFlow(t, "+.1", []token{
{toml.Position{1, 1}, tokenError, "cannot start float with a dot"},
})
}
func TestLexFloatWithTrailingDot(t *testing.T) {
testQLFlow(t, "42.", []token{
{toml.Position{1, 1}, tokenError, "float cannot end with a dot"},
})
}
func TestLexNumberWithoutDigit(t *testing.T) {
testQLFlow(t, "+", []token{
{toml.Position{1, 1}, tokenError, "no digit in that number"},
})
}
func TestLexUnknown(t *testing.T) {
testQLFlow(t, "^", []token{
{toml.Position{1, 1}, tokenError, "unexpected char: '94'"},
})
}

232
vendor/github.com/pelletier/go-toml/query/match.go generated vendored Normal file
View file

@ -0,0 +1,232 @@
package query
import (
"fmt"
"github.com/pelletier/go-toml"
)
// base match
type matchBase struct {
next pathFn
}
func (f *matchBase) setNext(next pathFn) {
f.next = next
}
// terminating functor - gathers results
type terminatingFn struct {
// empty
}
func newTerminatingFn() *terminatingFn {
return &terminatingFn{}
}
func (f *terminatingFn) setNext(next pathFn) {
// do nothing
}
func (f *terminatingFn) call(node interface{}, ctx *queryContext) {
ctx.result.appendResult(node, ctx.lastPosition)
}
// match single key
type matchKeyFn struct {
matchBase
Name string
}
func newMatchKeyFn(name string) *matchKeyFn {
return &matchKeyFn{Name: name}
}
func (f *matchKeyFn) call(node interface{}, ctx *queryContext) {
if array, ok := node.([]*toml.Tree); ok {
for _, tree := range array {
item := tree.Get(f.Name)
if item != nil {
ctx.lastPosition = tree.GetPosition(f.Name)
f.next.call(item, ctx)
}
}
} else if tree, ok := node.(*toml.Tree); ok {
item := tree.Get(f.Name)
if item != nil {
ctx.lastPosition = tree.GetPosition(f.Name)
f.next.call(item, ctx)
}
}
}
// match single index
type matchIndexFn struct {
matchBase
Idx int
}
func newMatchIndexFn(idx int) *matchIndexFn {
return &matchIndexFn{Idx: idx}
}
func (f *matchIndexFn) call(node interface{}, ctx *queryContext) {
if arr, ok := node.([]interface{}); ok {
if f.Idx < len(arr) && f.Idx >= 0 {
if treesArray, ok := node.([]*toml.Tree); ok {
if len(treesArray) > 0 {
ctx.lastPosition = treesArray[0].Position()
}
}
f.next.call(arr[f.Idx], ctx)
}
}
}
// filter by slicing
type matchSliceFn struct {
matchBase
Start, End, Step int
}
func newMatchSliceFn(start, end, step int) *matchSliceFn {
return &matchSliceFn{Start: start, End: end, Step: step}
}
func (f *matchSliceFn) call(node interface{}, ctx *queryContext) {
if arr, ok := node.([]interface{}); ok {
// adjust indexes for negative values, reverse ordering
realStart, realEnd := f.Start, f.End
if realStart < 0 {
realStart = len(arr) + realStart
}
if realEnd < 0 {
realEnd = len(arr) + realEnd
}
if realEnd < realStart {
realEnd, realStart = realStart, realEnd // swap
}
// loop and gather
for idx := realStart; idx < realEnd; idx += f.Step {
if treesArray, ok := node.([]*toml.Tree); ok {
if len(treesArray) > 0 {
ctx.lastPosition = treesArray[0].Position()
}
}
f.next.call(arr[idx], ctx)
}
}
}
// match anything
type matchAnyFn struct {
matchBase
}
func newMatchAnyFn() *matchAnyFn {
return &matchAnyFn{}
}
func (f *matchAnyFn) call(node interface{}, ctx *queryContext) {
if tree, ok := node.(*toml.Tree); ok {
for _, k := range tree.Keys() {
v := tree.Get(k)
ctx.lastPosition = tree.GetPosition(k)
f.next.call(v, ctx)
}
}
}
// filter through union
type matchUnionFn struct {
Union []pathFn
}
func (f *matchUnionFn) setNext(next pathFn) {
for _, fn := range f.Union {
fn.setNext(next)
}
}
func (f *matchUnionFn) call(node interface{}, ctx *queryContext) {
for _, fn := range f.Union {
fn.call(node, ctx)
}
}
// match every single last node in the tree
type matchRecursiveFn struct {
matchBase
}
func newMatchRecursiveFn() *matchRecursiveFn {
return &matchRecursiveFn{}
}
func (f *matchRecursiveFn) call(node interface{}, ctx *queryContext) {
originalPosition := ctx.lastPosition
if tree, ok := node.(*toml.Tree); ok {
var visit func(tree *toml.Tree)
visit = func(tree *toml.Tree) {
for _, k := range tree.Keys() {
v := tree.Get(k)
ctx.lastPosition = tree.GetPosition(k)
f.next.call(v, ctx)
switch node := v.(type) {
case *toml.Tree:
visit(node)
case []*toml.Tree:
for _, subtree := range node {
visit(subtree)
}
}
}
}
ctx.lastPosition = originalPosition
f.next.call(tree, ctx)
visit(tree)
}
}
// match based on an externally provided functional filter
type matchFilterFn struct {
matchBase
Pos toml.Position
Name string
}
func newMatchFilterFn(name string, pos toml.Position) *matchFilterFn {
return &matchFilterFn{Name: name, Pos: pos}
}
func (f *matchFilterFn) call(node interface{}, ctx *queryContext) {
fn, ok := (*ctx.filters)[f.Name]
if !ok {
panic(fmt.Sprintf("%s: query context does not have filter '%s'",
f.Pos.String(), f.Name))
}
switch castNode := node.(type) {
case *toml.Tree:
for _, k := range castNode.Keys() {
v := castNode.Get(k)
if fn(v) {
ctx.lastPosition = castNode.GetPosition(k)
f.next.call(v, ctx)
}
}
case []*toml.Tree:
for _, v := range castNode {
if fn(v) {
if len(castNode) > 0 {
ctx.lastPosition = castNode[0].Position()
}
f.next.call(v, ctx)
}
}
case []interface{}:
for _, v := range castNode {
if fn(v) {
f.next.call(v, ctx)
}
}
}
}

202
vendor/github.com/pelletier/go-toml/query/match_test.go generated vendored Normal file
View file

@ -0,0 +1,202 @@
package query
import (
"fmt"
"github.com/pelletier/go-toml"
"testing"
)
// dump path tree to a string
func pathString(root pathFn) string {
result := fmt.Sprintf("%T:", root)
switch fn := root.(type) {
case *terminatingFn:
result += "{}"
case *matchKeyFn:
result += fmt.Sprintf("{%s}", fn.Name)
result += pathString(fn.next)
case *matchIndexFn:
result += fmt.Sprintf("{%d}", fn.Idx)
result += pathString(fn.next)
case *matchSliceFn:
result += fmt.Sprintf("{%d:%d:%d}",
fn.Start, fn.End, fn.Step)
result += pathString(fn.next)
case *matchAnyFn:
result += "{}"
result += pathString(fn.next)
case *matchUnionFn:
result += "{["
for _, v := range fn.Union {
result += pathString(v) + ", "
}
result += "]}"
case *matchRecursiveFn:
result += "{}"
result += pathString(fn.next)
case *matchFilterFn:
result += fmt.Sprintf("{%s}", fn.Name)
result += pathString(fn.next)
}
return result
}
func assertPathMatch(t *testing.T, path, ref *Query) bool {
pathStr := pathString(path.root)
refStr := pathString(ref.root)
if pathStr != refStr {
t.Errorf("paths do not match")
t.Log("test:", pathStr)
t.Log("ref: ", refStr)
return false
}
return true
}
func assertPath(t *testing.T, query string, ref *Query) {
path, _ := parseQuery(lexQuery(query))
assertPathMatch(t, path, ref)
}
func buildPath(parts ...pathFn) *Query {
query := newQuery()
for _, v := range parts {
query.appendPath(v)
}
return query
}
func TestPathRoot(t *testing.T) {
assertPath(t,
"$",
buildPath(
// empty
))
}
func TestPathKey(t *testing.T) {
assertPath(t,
"$.foo",
buildPath(
newMatchKeyFn("foo"),
))
}
func TestPathBracketKey(t *testing.T) {
assertPath(t,
"$[foo]",
buildPath(
newMatchKeyFn("foo"),
))
}
func TestPathBracketStringKey(t *testing.T) {
assertPath(t,
"$['foo']",
buildPath(
newMatchKeyFn("foo"),
))
}
func TestPathIndex(t *testing.T) {
assertPath(t,
"$[123]",
buildPath(
newMatchIndexFn(123),
))
}
func TestPathSliceStart(t *testing.T) {
assertPath(t,
"$[123:]",
buildPath(
newMatchSliceFn(123, maxInt, 1),
))
}
func TestPathSliceStartEnd(t *testing.T) {
assertPath(t,
"$[123:456]",
buildPath(
newMatchSliceFn(123, 456, 1),
))
}
func TestPathSliceStartEndColon(t *testing.T) {
assertPath(t,
"$[123:456:]",
buildPath(
newMatchSliceFn(123, 456, 1),
))
}
func TestPathSliceStartStep(t *testing.T) {
assertPath(t,
"$[123::7]",
buildPath(
newMatchSliceFn(123, maxInt, 7),
))
}
func TestPathSliceEndStep(t *testing.T) {
assertPath(t,
"$[:456:7]",
buildPath(
newMatchSliceFn(0, 456, 7),
))
}
func TestPathSliceStep(t *testing.T) {
assertPath(t,
"$[::7]",
buildPath(
newMatchSliceFn(0, maxInt, 7),
))
}
func TestPathSliceAll(t *testing.T) {
assertPath(t,
"$[123:456:7]",
buildPath(
newMatchSliceFn(123, 456, 7),
))
}
func TestPathAny(t *testing.T) {
assertPath(t,
"$.*",
buildPath(
newMatchAnyFn(),
))
}
func TestPathUnion(t *testing.T) {
assertPath(t,
"$[foo, bar, baz]",
buildPath(
&matchUnionFn{[]pathFn{
newMatchKeyFn("foo"),
newMatchKeyFn("bar"),
newMatchKeyFn("baz"),
}},
))
}
func TestPathRecurse(t *testing.T) {
assertPath(t,
"$..*",
buildPath(
newMatchRecursiveFn(),
))
}
func TestPathFilterExpr(t *testing.T) {
assertPath(t,
"$[?('foo'),?(bar)]",
buildPath(
&matchUnionFn{[]pathFn{
newMatchFilterFn("foo", toml.Position{}),
newMatchFilterFn("bar", toml.Position{}),
}},
))
}

275
vendor/github.com/pelletier/go-toml/query/parser.go generated vendored Normal file
View file

@ -0,0 +1,275 @@
/*
Based on the "jsonpath" spec/concept.
http://goessner.net/articles/JsonPath/
https://code.google.com/p/json-path/
*/
package query
import (
"fmt"
)
const maxInt = int(^uint(0) >> 1)
type queryParser struct {
flow chan token
tokensBuffer []token
query *Query
union []pathFn
err error
}
type queryParserStateFn func() queryParserStateFn
// Formats and panics an error message based on a token
func (p *queryParser) parseError(tok *token, msg string, args ...interface{}) queryParserStateFn {
p.err = fmt.Errorf(tok.Position.String()+": "+msg, args...)
return nil // trigger parse to end
}
func (p *queryParser) run() {
for state := p.parseStart; state != nil; {
state = state()
}
}
func (p *queryParser) backup(tok *token) {
p.tokensBuffer = append(p.tokensBuffer, *tok)
}
func (p *queryParser) peek() *token {
if len(p.tokensBuffer) != 0 {
return &(p.tokensBuffer[0])
}
tok, ok := <-p.flow
if !ok {
return nil
}
p.backup(&tok)
return &tok
}
func (p *queryParser) lookahead(types ...tokenType) bool {
result := true
buffer := []token{}
for _, typ := range types {
tok := p.getToken()
if tok == nil {
result = false
break
}
buffer = append(buffer, *tok)
if tok.typ != typ {
result = false
break
}
}
// add the tokens back to the buffer, and return
p.tokensBuffer = append(p.tokensBuffer, buffer...)
return result
}
func (p *queryParser) getToken() *token {
if len(p.tokensBuffer) != 0 {
tok := p.tokensBuffer[0]
p.tokensBuffer = p.tokensBuffer[1:]
return &tok
}
tok, ok := <-p.flow
if !ok {
return nil
}
return &tok
}
func (p *queryParser) parseStart() queryParserStateFn {
tok := p.getToken()
if tok == nil || tok.typ == tokenEOF {
return nil
}
if tok.typ != tokenDollar {
return p.parseError(tok, "Expected '$' at start of expression")
}
return p.parseMatchExpr
}
// handle '.' prefix, '[]', and '..'
func (p *queryParser) parseMatchExpr() queryParserStateFn {
tok := p.getToken()
switch tok.typ {
case tokenDotDot:
p.query.appendPath(&matchRecursiveFn{})
// nested parse for '..'
tok := p.getToken()
switch tok.typ {
case tokenKey:
p.query.appendPath(newMatchKeyFn(tok.val))
return p.parseMatchExpr
case tokenLeftBracket:
return p.parseBracketExpr
case tokenStar:
// do nothing - the recursive predicate is enough
return p.parseMatchExpr
}
case tokenDot:
// nested parse for '.'
tok := p.getToken()
switch tok.typ {
case tokenKey:
p.query.appendPath(newMatchKeyFn(tok.val))
return p.parseMatchExpr
case tokenStar:
p.query.appendPath(&matchAnyFn{})
return p.parseMatchExpr
}
case tokenLeftBracket:
return p.parseBracketExpr
case tokenEOF:
return nil // allow EOF at this stage
}
return p.parseError(tok, "expected match expression")
}
func (p *queryParser) parseBracketExpr() queryParserStateFn {
if p.lookahead(tokenInteger, tokenColon) {
return p.parseSliceExpr
}
if p.peek().typ == tokenColon {
return p.parseSliceExpr
}
return p.parseUnionExpr
}
func (p *queryParser) parseUnionExpr() queryParserStateFn {
var tok *token
// this state can be traversed after some sub-expressions
// so be careful when setting up state in the parser
if p.union == nil {
p.union = []pathFn{}
}
loop: // labeled loop for easy breaking
for {
if len(p.union) > 0 {
// parse delimiter or terminator
tok = p.getToken()
switch tok.typ {
case tokenComma:
// do nothing
case tokenRightBracket:
break loop
default:
return p.parseError(tok, "expected ',' or ']', not '%s'", tok.val)
}
}
// parse sub expression
tok = p.getToken()
switch tok.typ {
case tokenInteger:
p.union = append(p.union, newMatchIndexFn(tok.Int()))
case tokenKey:
p.union = append(p.union, newMatchKeyFn(tok.val))
case tokenString:
p.union = append(p.union, newMatchKeyFn(tok.val))
case tokenQuestion:
return p.parseFilterExpr
default:
return p.parseError(tok, "expected union sub expression, not '%s', %d", tok.val, len(p.union))
}
}
// if there is only one sub-expression, use that instead
if len(p.union) == 1 {
p.query.appendPath(p.union[0])
} else {
p.query.appendPath(&matchUnionFn{p.union})
}
p.union = nil // clear out state
return p.parseMatchExpr
}
func (p *queryParser) parseSliceExpr() queryParserStateFn {
// init slice to grab all elements
start, end, step := 0, maxInt, 1
// parse optional start
tok := p.getToken()
if tok.typ == tokenInteger {
start = tok.Int()
tok = p.getToken()
}
if tok.typ != tokenColon {
return p.parseError(tok, "expected ':'")
}
// parse optional end
tok = p.getToken()
if tok.typ == tokenInteger {
end = tok.Int()
tok = p.getToken()
}
if tok.typ == tokenRightBracket {
p.query.appendPath(newMatchSliceFn(start, end, step))
return p.parseMatchExpr
}
if tok.typ != tokenColon {
return p.parseError(tok, "expected ']' or ':'")
}
// parse optional step
tok = p.getToken()
if tok.typ == tokenInteger {
step = tok.Int()
if step < 0 {
return p.parseError(tok, "step must be a positive value")
}
tok = p.getToken()
}
if tok.typ != tokenRightBracket {
return p.parseError(tok, "expected ']'")
}
p.query.appendPath(newMatchSliceFn(start, end, step))
return p.parseMatchExpr
}
func (p *queryParser) parseFilterExpr() queryParserStateFn {
tok := p.getToken()
if tok.typ != tokenLeftParen {
return p.parseError(tok, "expected left-parenthesis for filter expression")
}
tok = p.getToken()
if tok.typ != tokenKey && tok.typ != tokenString {
return p.parseError(tok, "expected key or string for filter function name")
}
name := tok.val
tok = p.getToken()
if tok.typ != tokenRightParen {
return p.parseError(tok, "expected right-parenthesis for filter expression")
}
p.union = append(p.union, newMatchFilterFn(name, tok.Position))
return p.parseUnionExpr
}
func parseQuery(flow chan token) (*Query, error) {
parser := &queryParser{
flow: flow,
tokensBuffer: []token{},
query: newQuery(),
}
parser.run()
return parser.query, parser.err
}

View file

@ -0,0 +1,482 @@
package query
import (
"fmt"
"io/ioutil"
"sort"
"strings"
"testing"
"time"
"github.com/pelletier/go-toml"
)
type queryTestNode struct {
value interface{}
position toml.Position
}
func valueString(root interface{}) string {
result := "" //fmt.Sprintf("%T:", root)
switch node := root.(type) {
case *Result:
items := []string{}
for i, v := range node.Values() {
items = append(items, fmt.Sprintf("%s:%s",
node.Positions()[i].String(), valueString(v)))
}
sort.Strings(items)
result = "[" + strings.Join(items, ", ") + "]"
case queryTestNode:
result = fmt.Sprintf("%s:%s",
node.position.String(), valueString(node.value))
case []interface{}:
items := []string{}
for _, v := range node {
items = append(items, valueString(v))
}
sort.Strings(items)
result = "[" + strings.Join(items, ", ") + "]"
case *toml.Tree:
// workaround for unreliable map key ordering
items := []string{}
for _, k := range node.Keys() {
v := node.GetPath([]string{k})
items = append(items, k+":"+valueString(v))
}
sort.Strings(items)
result = "{" + strings.Join(items, ", ") + "}"
case map[string]interface{}:
// workaround for unreliable map key ordering
items := []string{}
for k, v := range node {
items = append(items, k+":"+valueString(v))
}
sort.Strings(items)
result = "{" + strings.Join(items, ", ") + "}"
case int64:
result += fmt.Sprintf("%d", node)
case string:
result += "'" + node + "'"
case float64:
result += fmt.Sprintf("%f", node)
case bool:
result += fmt.Sprintf("%t", node)
case time.Time:
result += fmt.Sprintf("'%v'", node)
}
return result
}
func assertValue(t *testing.T, result, ref interface{}) {
pathStr := valueString(result)
refStr := valueString(ref)
if pathStr != refStr {
t.Errorf("values do not match")
t.Log("test:", pathStr)
t.Log("ref: ", refStr)
}
}
func assertQueryPositions(t *testing.T, tomlDoc string, query string, ref []interface{}) {
tree, err := toml.Load(tomlDoc)
if err != nil {
t.Errorf("Non-nil toml parse error: %v", err)
return
}
q, err := Compile(query)
if err != nil {
t.Error(err)
return
}
results := q.Execute(tree)
assertValue(t, results, ref)
}
func TestQueryRoot(t *testing.T) {
assertQueryPositions(t,
"a = 42",
"$",
[]interface{}{
queryTestNode{
map[string]interface{}{
"a": int64(42),
}, toml.Position{1, 1},
},
})
}
func TestQueryKey(t *testing.T) {
assertQueryPositions(t,
"[foo]\na = 42",
"$.foo.a",
[]interface{}{
queryTestNode{
int64(42), toml.Position{2, 1},
},
})
}
func TestQueryKeyString(t *testing.T) {
assertQueryPositions(t,
"[foo]\na = 42",
"$.foo['a']",
[]interface{}{
queryTestNode{
int64(42), toml.Position{2, 1},
},
})
}
func TestQueryIndex(t *testing.T) {
assertQueryPositions(t,
"[foo]\na = [1,2,3,4,5,6,7,8,9,0]",
"$.foo.a[5]",
[]interface{}{
queryTestNode{
int64(6), toml.Position{2, 1},
},
})
}
func TestQuerySliceRange(t *testing.T) {
assertQueryPositions(t,
"[foo]\na = [1,2,3,4,5,6,7,8,9,0]",
"$.foo.a[0:5]",
[]interface{}{
queryTestNode{
int64(1), toml.Position{2, 1},
},
queryTestNode{
int64(2), toml.Position{2, 1},
},
queryTestNode{
int64(3), toml.Position{2, 1},
},
queryTestNode{
int64(4), toml.Position{2, 1},
},
queryTestNode{
int64(5), toml.Position{2, 1},
},
})
}
func TestQuerySliceStep(t *testing.T) {
assertQueryPositions(t,
"[foo]\na = [1,2,3,4,5,6,7,8,9,0]",
"$.foo.a[0:5:2]",
[]interface{}{
queryTestNode{
int64(1), toml.Position{2, 1},
},
queryTestNode{
int64(3), toml.Position{2, 1},
},
queryTestNode{
int64(5), toml.Position{2, 1},
},
})
}
func TestQueryAny(t *testing.T) {
assertQueryPositions(t,
"[foo.bar]\na=1\nb=2\n[foo.baz]\na=3\nb=4",
"$.foo.*",
[]interface{}{
queryTestNode{
map[string]interface{}{
"a": int64(1),
"b": int64(2),
}, toml.Position{1, 1},
},
queryTestNode{
map[string]interface{}{
"a": int64(3),
"b": int64(4),
}, toml.Position{4, 1},
},
})
}
func TestQueryUnionSimple(t *testing.T) {
assertQueryPositions(t,
"[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6",
"$.*[bar,foo]",
[]interface{}{
queryTestNode{
map[string]interface{}{
"a": int64(1),
"b": int64(2),
}, toml.Position{1, 1},
},
queryTestNode{
map[string]interface{}{
"a": int64(3),
"b": int64(4),
}, toml.Position{4, 1},
},
queryTestNode{
map[string]interface{}{
"a": int64(5),
"b": int64(6),
}, toml.Position{7, 1},
},
})
}
func TestQueryRecursionAll(t *testing.T) {
assertQueryPositions(t,
"[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6",
"$..*",
[]interface{}{
queryTestNode{
map[string]interface{}{
"foo": map[string]interface{}{
"bar": map[string]interface{}{
"a": int64(1),
"b": int64(2),
},
},
"baz": map[string]interface{}{
"foo": map[string]interface{}{
"a": int64(3),
"b": int64(4),
},
},
"gorf": map[string]interface{}{
"foo": map[string]interface{}{
"a": int64(5),
"b": int64(6),
},
},
}, toml.Position{1, 1},
},
queryTestNode{
map[string]interface{}{
"bar": map[string]interface{}{
"a": int64(1),
"b": int64(2),
},
}, toml.Position{1, 1},
},
queryTestNode{
map[string]interface{}{
"a": int64(1),
"b": int64(2),
}, toml.Position{1, 1},
},
queryTestNode{
int64(1), toml.Position{2, 1},
},
queryTestNode{
int64(2), toml.Position{3, 1},
},
queryTestNode{
map[string]interface{}{
"foo": map[string]interface{}{
"a": int64(3),
"b": int64(4),
},
}, toml.Position{4, 1},
},
queryTestNode{
map[string]interface{}{
"a": int64(3),
"b": int64(4),
}, toml.Position{4, 1},
},
queryTestNode{
int64(3), toml.Position{5, 1},
},
queryTestNode{
int64(4), toml.Position{6, 1},
},
queryTestNode{
map[string]interface{}{
"foo": map[string]interface{}{
"a": int64(5),
"b": int64(6),
},
}, toml.Position{7, 1},
},
queryTestNode{
map[string]interface{}{
"a": int64(5),
"b": int64(6),
}, toml.Position{7, 1},
},
queryTestNode{
int64(5), toml.Position{8, 1},
},
queryTestNode{
int64(6), toml.Position{9, 1},
},
})
}
func TestQueryRecursionUnionSimple(t *testing.T) {
assertQueryPositions(t,
"[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6",
"$..['foo','bar']",
[]interface{}{
queryTestNode{
map[string]interface{}{
"bar": map[string]interface{}{
"a": int64(1),
"b": int64(2),
},
}, toml.Position{1, 1},
},
queryTestNode{
map[string]interface{}{
"a": int64(3),
"b": int64(4),
}, toml.Position{4, 1},
},
queryTestNode{
map[string]interface{}{
"a": int64(1),
"b": int64(2),
}, toml.Position{1, 1},
},
queryTestNode{
map[string]interface{}{
"a": int64(5),
"b": int64(6),
}, toml.Position{7, 1},
},
})
}
func TestQueryFilterFn(t *testing.T) {
buff, err := ioutil.ReadFile("../example.toml")
if err != nil {
t.Error(err)
return
}
assertQueryPositions(t, string(buff),
"$..[?(int)]",
[]interface{}{
queryTestNode{
int64(8001), toml.Position{13, 1},
},
queryTestNode{
int64(8001), toml.Position{13, 1},
},
queryTestNode{
int64(8002), toml.Position{13, 1},
},
queryTestNode{
int64(5000), toml.Position{14, 1},
},
})
assertQueryPositions(t, string(buff),
"$..[?(string)]",
[]interface{}{
queryTestNode{
"TOML Example", toml.Position{3, 1},
},
queryTestNode{
"Tom Preston-Werner", toml.Position{6, 1},
},
queryTestNode{
"GitHub", toml.Position{7, 1},
},
queryTestNode{
"GitHub Cofounder & CEO\nLikes tater tots and beer.",
toml.Position{8, 1},
},
queryTestNode{
"192.168.1.1", toml.Position{12, 1},
},
queryTestNode{
"10.0.0.1", toml.Position{21, 3},
},
queryTestNode{
"eqdc10", toml.Position{22, 3},
},
queryTestNode{
"10.0.0.2", toml.Position{25, 3},
},
queryTestNode{
"eqdc10", toml.Position{26, 3},
},
})
assertQueryPositions(t, string(buff),
"$..[?(float)]",
[]interface{}{ // no float values in document
})
tv, _ := time.Parse(time.RFC3339, "1979-05-27T07:32:00Z")
assertQueryPositions(t, string(buff),
"$..[?(tree)]",
[]interface{}{
queryTestNode{
map[string]interface{}{
"name": "Tom Preston-Werner",
"organization": "GitHub",
"bio": "GitHub Cofounder & CEO\nLikes tater tots and beer.",
"dob": tv,
}, toml.Position{5, 1},
},
queryTestNode{
map[string]interface{}{
"server": "192.168.1.1",
"ports": []interface{}{int64(8001), int64(8001), int64(8002)},
"connection_max": int64(5000),
"enabled": true,
}, toml.Position{11, 1},
},
queryTestNode{
map[string]interface{}{
"alpha": map[string]interface{}{
"ip": "10.0.0.1",
"dc": "eqdc10",
},
"beta": map[string]interface{}{
"ip": "10.0.0.2",
"dc": "eqdc10",
},
}, toml.Position{17, 1},
},
queryTestNode{
map[string]interface{}{
"ip": "10.0.0.1",
"dc": "eqdc10",
}, toml.Position{20, 3},
},
queryTestNode{
map[string]interface{}{
"ip": "10.0.0.2",
"dc": "eqdc10",
}, toml.Position{24, 3},
},
queryTestNode{
map[string]interface{}{
"data": []interface{}{
[]interface{}{"gamma", "delta"},
[]interface{}{int64(1), int64(2)},
},
}, toml.Position{28, 1},
},
})
assertQueryPositions(t, string(buff),
"$..[?(time)]",
[]interface{}{
queryTestNode{
tv, toml.Position{9, 1},
},
})
assertQueryPositions(t, string(buff),
"$..[?(bool)]",
[]interface{}{
queryTestNode{
true, toml.Position{15, 1},
},
})
}

158
vendor/github.com/pelletier/go-toml/query/query.go generated vendored Normal file
View file

@ -0,0 +1,158 @@
package query
import (
"time"
"github.com/pelletier/go-toml"
)
// NodeFilterFn represents a user-defined filter function, for use with
// Query.SetFilter().
//
// The return value of the function must indicate if 'node' is to be included
// at this stage of the TOML path. Returning true will include the node, and
// returning false will exclude it.
//
// NOTE: Care should be taken to write script callbacks such that they are safe
// to use from multiple goroutines.
type NodeFilterFn func(node interface{}) bool
// Result is the result of Executing a Query.
type Result struct {
items []interface{}
positions []toml.Position
}
// appends a value/position pair to the result set.
func (r *Result) appendResult(node interface{}, pos toml.Position) {
r.items = append(r.items, node)
r.positions = append(r.positions, pos)
}
// Values is a set of values within a Result. The order of values is not
// guaranteed to be in document order, and may be different each time a query is
// executed.
func (r Result) Values() []interface{} {
return r.items
}
// Positions is a set of positions for values within a Result. Each index
// in Positions() corresponds to the entry in Value() of the same index.
func (r Result) Positions() []toml.Position {
return r.positions
}
// runtime context for executing query paths
type queryContext struct {
result *Result
filters *map[string]NodeFilterFn
lastPosition toml.Position
}
// generic path functor interface
type pathFn interface {
setNext(next pathFn)
// it is the caller's responsibility to set the ctx.lastPosition before invoking call()
// node can be one of: *toml.Tree, []*toml.Tree, or a scalar
call(node interface{}, ctx *queryContext)
}
// A Query is the representation of a compiled TOML path. A Query is safe
// for concurrent use by multiple goroutines.
type Query struct {
root pathFn
tail pathFn
filters *map[string]NodeFilterFn
}
func newQuery() *Query {
return &Query{
root: nil,
tail: nil,
filters: &defaultFilterFunctions,
}
}
func (q *Query) appendPath(next pathFn) {
if q.root == nil {
q.root = next
} else {
q.tail.setNext(next)
}
q.tail = next
next.setNext(newTerminatingFn()) // init the next functor
}
// Compile compiles a TOML path expression. The returned Query can be used
// to match elements within a Tree and its descendants. See Execute.
func Compile(path string) (*Query, error) {
return parseQuery(lexQuery(path))
}
// Execute executes a query against a Tree, and returns the result of the query.
func (q *Query) Execute(tree *toml.Tree) *Result {
result := &Result{
items: []interface{}{},
positions: []toml.Position{},
}
if q.root == nil {
result.appendResult(tree, tree.GetPosition(""))
} else {
ctx := &queryContext{
result: result,
filters: q.filters,
}
ctx.lastPosition = tree.Position()
q.root.call(tree, ctx)
}
return result
}
// CompileAndExecute is a shorthand for Compile(path) followed by Execute(tree).
func CompileAndExecute(path string, tree *toml.Tree) (*Result, error) {
query, err := Compile(path)
if err != nil {
return nil, err
}
return query.Execute(tree), nil
}
// SetFilter sets a user-defined filter function. These may be used inside
// "?(..)" query expressions to filter TOML document elements within a query.
func (q *Query) SetFilter(name string, fn NodeFilterFn) {
if q.filters == &defaultFilterFunctions {
// clone the static table
q.filters = &map[string]NodeFilterFn{}
for k, v := range defaultFilterFunctions {
(*q.filters)[k] = v
}
}
(*q.filters)[name] = fn
}
var defaultFilterFunctions = map[string]NodeFilterFn{
"tree": func(node interface{}) bool {
_, ok := node.(*toml.Tree)
return ok
},
"int": func(node interface{}) bool {
_, ok := node.(int64)
return ok
},
"float": func(node interface{}) bool {
_, ok := node.(float64)
return ok
},
"string": func(node interface{}) bool {
_, ok := node.(string)
return ok
},
"time": func(node interface{}) bool {
_, ok := node.(time.Time)
return ok
},
"bool": func(node interface{}) bool {
_, ok := node.(bool)
return ok
},
}

157
vendor/github.com/pelletier/go-toml/query/query_test.go generated vendored Normal file
View file

@ -0,0 +1,157 @@
package query
import (
"fmt"
"testing"
"github.com/pelletier/go-toml"
)
func assertArrayContainsInAnyOrder(t *testing.T, array []interface{}, objects ...interface{}) {
if len(array) != len(objects) {
t.Fatalf("array contains %d objects but %d are expected", len(array), len(objects))
}
for _, o := range objects {
found := false
for _, a := range array {
if a == o {
found = true
break
}
}
if !found {
t.Fatal(o, "not found in array", array)
}
}
}
func TestQueryExample(t *testing.T) {
config, _ := toml.Load(`
[[book]]
title = "The Stand"
author = "Stephen King"
[[book]]
title = "For Whom the Bell Tolls"
author = "Ernest Hemmingway"
[[book]]
title = "Neuromancer"
author = "William Gibson"
`)
authors, err := CompileAndExecute("$.book.author", config)
if err != nil {
t.Fatal("unexpected error:", err)
}
names := authors.Values()
if len(names) != 3 {
t.Fatalf("query should return 3 names but returned %d", len(names))
}
assertArrayContainsInAnyOrder(t, names, "Stephen King", "Ernest Hemmingway", "William Gibson")
}
func TestQueryReadmeExample(t *testing.T) {
config, _ := toml.Load(`
[postgres]
user = "pelletier"
password = "mypassword"
`)
query, err := Compile("$..[user,password]")
if err != nil {
t.Fatal("unexpected error:", err)
}
results := query.Execute(config)
values := results.Values()
if len(values) != 2 {
t.Fatalf("query should return 2 values but returned %d", len(values))
}
assertArrayContainsInAnyOrder(t, values, "pelletier", "mypassword")
}
func TestQueryPathNotPresent(t *testing.T) {
config, _ := toml.Load(`a = "hello"`)
query, err := Compile("$.foo.bar")
if err != nil {
t.Fatal("unexpected error:", err)
}
results := query.Execute(config)
if err != nil {
t.Fatalf("err should be nil. got %s instead", err)
}
if len(results.items) != 0 {
t.Fatalf("no items should be matched. %d matched instead", len(results.items))
}
}
func ExampleNodeFilterFn_filterExample() {
tree, _ := toml.Load(`
[struct_one]
foo = "foo"
bar = "bar"
[struct_two]
baz = "baz"
gorf = "gorf"
`)
// create a query that references a user-defined-filter
query, _ := Compile("$[?(bazOnly)]")
// define the filter, and assign it to the query
query.SetFilter("bazOnly", func(node interface{}) bool {
if tree, ok := node.(*toml.Tree); ok {
return tree.Has("baz")
}
return false // reject all other node types
})
// results contain only the 'struct_two' Tree
query.Execute(tree)
}
func ExampleQuery_queryExample() {
config, _ := toml.Load(`
[[book]]
title = "The Stand"
author = "Stephen King"
[[book]]
title = "For Whom the Bell Tolls"
author = "Ernest Hemmingway"
[[book]]
title = "Neuromancer"
author = "William Gibson"
`)
// find and print all the authors in the document
query, _ := Compile("$.book.author")
authors := query.Execute(config)
for _, name := range authors.Values() {
fmt.Println(name)
}
}
func TestTomlQuery(t *testing.T) {
tree, err := toml.Load("[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6")
if err != nil {
t.Error(err)
return
}
query, err := Compile("$.foo.bar")
if err != nil {
t.Error(err)
return
}
result := query.Execute(tree)
values := result.Values()
if len(values) != 1 {
t.Errorf("Expected resultset of 1, got %d instead: %v", len(values), values)
}
if tt, ok := values[0].(*toml.Tree); !ok {
t.Errorf("Expected type of Tree: %T", values[0])
} else if tt.Get("a") != int64(1) {
t.Errorf("Expected 'a' with a value 1: %v", tt.Get("a"))
} else if tt.Get("b") != int64(2) {
t.Errorf("Expected 'b' with a value 2: %v", tt.Get("b"))
}
}

106
vendor/github.com/pelletier/go-toml/query/tokens.go generated vendored Normal file
View file

@ -0,0 +1,106 @@
package query
import (
"fmt"
"github.com/pelletier/go-toml"
"strconv"
"unicode"
)
// Define tokens
type tokenType int
const (
eof = -(iota + 1)
)
const (
tokenError tokenType = iota
tokenEOF
tokenKey
tokenString
tokenInteger
tokenFloat
tokenLeftBracket
tokenRightBracket
tokenLeftParen
tokenRightParen
tokenComma
tokenColon
tokenDollar
tokenStar
tokenQuestion
tokenDot
tokenDotDot
)
var tokenTypeNames = []string{
"Error",
"EOF",
"Key",
"String",
"Integer",
"Float",
"[",
"]",
"(",
")",
",",
":",
"$",
"*",
"?",
".",
"..",
}
type token struct {
toml.Position
typ tokenType
val string
}
func (tt tokenType) String() string {
idx := int(tt)
if idx < len(tokenTypeNames) {
return tokenTypeNames[idx]
}
return "Unknown"
}
func (t token) Int() int {
if result, err := strconv.Atoi(t.val); err != nil {
panic(err)
} else {
return result
}
}
func (t token) String() string {
switch t.typ {
case tokenEOF:
return "EOF"
case tokenError:
return t.val
}
return fmt.Sprintf("%q", t.val)
}
func isSpace(r rune) bool {
return r == ' ' || r == '\t'
}
func isAlphanumeric(r rune) bool {
return unicode.IsLetter(r) || r == '_'
}
func isDigit(r rune) bool {
return unicode.IsNumber(r)
}
func isHexDigit(r rune) bool {
return isDigit(r) ||
(r >= 'a' && r <= 'f') ||
(r >= 'A' && r <= 'F')
}