1
0
mirror of https://github.com/taigrr/yq synced 2025-01-18 04:53:17 -08:00
This commit is contained in:
Mike Farah
2020-10-09 10:59:03 +11:00
parent f7d4695837
commit f479a7e8e3
14 changed files with 411 additions and 81 deletions

View File

@@ -0,0 +1,75 @@
package treeops
type dataTreeNavigator struct {
traverser Traverser
}
type NavigationPrefs struct {
FollowAlias bool
}
type DataTreeNavigator interface {
GetMatchingNodes(matchingNodes []*CandidateNode, pathNode *PathTreeNode) ([]*CandidateNode, error)
}
func NewDataTreeNavigator(navigationPrefs NavigationPrefs) DataTreeNavigator {
traverse := NewTraverser(navigationPrefs)
return &dataTreeNavigator{traverse}
}
func (d *dataTreeNavigator) traverse(matchingNodes []*CandidateNode, pathNode *PathElement) ([]*CandidateNode, error) {
log.Debugf("-- Traversing")
var newMatchingNodes = make([]*CandidateNode, 0)
var newNodes []*CandidateNode
var err error
for _, node := range matchingNodes {
newNodes, err = d.traverser.Traverse(node, pathNode)
if err != nil {
return nil, err
}
newMatchingNodes = append(newMatchingNodes, newNodes...)
}
return newMatchingNodes, nil
}
func (d *dataTreeNavigator) GetMatchingNodes(matchingNodes []*CandidateNode, pathNode *PathTreeNode) ([]*CandidateNode, error) {
log.Debugf("Processing Path: %v", pathNode.PathElement.toString())
if pathNode.PathElement.PathElementType == PathKey || pathNode.PathElement.PathElementType == ArrayIndex {
return d.traverse(matchingNodes, pathNode.PathElement)
} else {
var lhs []*CandidateNode //, rhs
var err error
switch pathNode.PathElement.OperationType {
case Traverse:
lhs, err = d.GetMatchingNodes(matchingNodes, pathNode.Lhs)
if err != nil {
return nil, err
}
return d.GetMatchingNodes(lhs, pathNode.Rhs)
// case Or, And:
// lhs, err = d.GetMatchingNodes(matchingNodes, pathNode.Lhs)
// if err != nil {
// return nil, err
// }
// rhs, err = d.GetMatchingNodes(matchingNodes, pathNode.Rhs)
// if err != nil {
// return nil, err
// }
// return d.setFunction(pathNode.PathElement, lhs, rhs), nil
// case Equals:
// lhs, err = d.GetMatchingNodes(matchingNodes, pathNode.Lhs)
// if err != nil {
// return nil, err
// }
// return d.findMatchingValues(lhs, pathNode.Rhs)
// case EqualsSelf:
// return d.findMatchingValues(matchingNodes, pathNode.Rhs)
default:
return nil, nil
}
}
}

View File

@@ -0,0 +1,125 @@
package treeops
import (
"strings"
"testing"
"github.com/mikefarah/yq/v3/test"
yaml "gopkg.in/yaml.v3"
)
var treeNavigator = NewDataTreeNavigator(NavigationPrefs{})
var treeCreator = NewPathTreeCreator()
func readDoc(t *testing.T, content string) []*CandidateNode {
decoder := yaml.NewDecoder(strings.NewReader(content))
var dataBucket yaml.Node
err := decoder.Decode(&dataBucket)
if err != nil {
t.Error(err)
}
return []*CandidateNode{&CandidateNode{Node: &dataBucket, Document: 0}}
}
func resultsToString(results []*CandidateNode) string {
var pretty string = ""
for _, n := range results {
pretty = pretty + "\n" + NodeToString(n)
}
return pretty
}
func TestDataTreeNavigatorSimple(t *testing.T) {
nodes := readDoc(t, `a:
b: apple`)
path, errPath := treeCreator.ParsePath("a")
if errPath != nil {
t.Error(errPath)
}
results, errNav := treeNavigator.GetMatchingNodes(nodes, path)
if errNav != nil {
t.Error(errNav)
}
expected := `
-- Node --
Document 0, path: [a]
Tag: !!map, Kind: MappingNode, Anchor:
b: apple
`
test.AssertResult(t, expected, resultsToString(results))
}
func TestDataTreeNavigatorSimpleDeep(t *testing.T) {
nodes := readDoc(t, `a:
b: apple`)
path, errPath := treeCreator.ParsePath("a.b")
if errPath != nil {
t.Error(errPath)
}
results, errNav := treeNavigator.GetMatchingNodes(nodes, path)
if errNav != nil {
t.Error(errNav)
}
expected := `
-- Node --
Document 0, path: [a b]
Tag: !!str, Kind: ScalarNode, Anchor:
apple
`
test.AssertResult(t, expected, resultsToString(results))
}
func TestDataTreeNavigatorSimpleMismatch(t *testing.T) {
nodes := readDoc(t, `a:
c: apple`)
path, errPath := treeCreator.ParsePath("a.b")
if errPath != nil {
t.Error(errPath)
}
results, errNav := treeNavigator.GetMatchingNodes(nodes, path)
if errNav != nil {
t.Error(errNav)
}
expected := ``
test.AssertResult(t, expected, resultsToString(results))
}
func TestDataTreeNavigatorWild(t *testing.T) {
nodes := readDoc(t, `a:
cat: apple`)
path, errPath := treeCreator.ParsePath("a.*a*")
if errPath != nil {
t.Error(errPath)
}
results, errNav := treeNavigator.GetMatchingNodes(nodes, path)
if errNav != nil {
t.Error(errNav)
}
expected := `
-- Node --
Document 0, path: [a cat]
Tag: !!str, Kind: ScalarNode, Anchor:
apple
`
test.AssertResult(t, expected, resultsToString(results))
}

59
pkg/yqlib/treeops/lib.go Normal file
View File

@@ -0,0 +1,59 @@
package treeops
import (
"bytes"
"fmt"
"gopkg.in/op/go-logging.v1"
"gopkg.in/yaml.v3"
)
type CandidateNode struct {
Node *yaml.Node // the actual node
Path []interface{} /// the path we took to get to this node
Document uint // the document index of this node
// middle nodes are nodes that match along the original path, but not a
// target match of the path. This is only relevant when ShouldOnlyDeeplyVisitLeaves is false.
IsMiddleNode bool
}
var log = logging.MustGetLogger("yq-treeops")
func NodeToString(node *CandidateNode) string {
if !log.IsEnabledFor(logging.DEBUG) {
return ""
}
value := node.Node
if value == nil {
return "-- node is nil --"
}
buf := new(bytes.Buffer)
encoder := yaml.NewEncoder(buf)
errorEncoding := encoder.Encode(value)
if errorEncoding != nil {
log.Error("Error debugging node, %v", errorEncoding.Error())
}
encoder.Close()
return fmt.Sprintf(`-- Node --
Document %v, path: %v
Tag: %v, Kind: %v, Anchor: %v
%v`, node.Document, node.Path, value.Tag, KindString(value.Kind), value.Anchor, buf.String())
}
func KindString(kind yaml.Kind) string {
switch kind {
case yaml.ScalarNode:
return "ScalarNode"
case yaml.SequenceNode:
return "SequenceNode"
case yaml.MappingNode:
return "MappingNode"
case yaml.DocumentNode:
return "DocumentNode"
case yaml.AliasNode:
return "AliasNode"
default:
return "unknown!"
}
}

View File

@@ -0,0 +1,34 @@
package treeops
func Match(name string, pattern string) (matched bool) {
if pattern == "" {
return name == pattern
}
log.Debug("pattern: %v", pattern)
if pattern == "*" {
log.Debug("wild!")
return true
}
return deepMatch([]rune(name), []rune(pattern))
}
func deepMatch(str, pattern []rune) bool {
for len(pattern) > 0 {
switch pattern[0] {
default:
if len(str) == 0 || str[0] != pattern[0] {
return false
}
case '?':
if len(str) == 0 {
return false
}
case '*':
return deepMatch(str, pattern[1:]) ||
(len(str) > 0 && deepMatch(str[1:], pattern))
}
str = str[1:]
pattern = pattern[1:]
}
return len(str) == 0 && len(pattern) == 0
}

View File

@@ -0,0 +1,144 @@
package treeops
import (
"errors"
"fmt"
lex "github.com/timtadh/lexmachine"
)
var precedenceMap map[int]int
type PathElementType uint32
const (
PathKey PathElementType = 1 << iota
ArrayIndex
Operation
)
type OperationType uint32
const (
None OperationType = 1 << iota
Traverse
Or
And
Equals
EqualsSelf
)
type PathElement struct {
PathElementType PathElementType
OperationType OperationType
Value interface{}
}
// debugging purposes only
func (p *PathElement) toString() string {
var result string = ``
switch p.PathElementType {
case PathKey:
result = result + fmt.Sprintf("PathKey - '%v'\n", p.Value)
case ArrayIndex:
result = result + fmt.Sprintf("ArrayIndex - '%v'\n", p.Value)
case Operation:
result = result + "Operation - "
switch p.OperationType {
case Or:
result = result + "OR\n"
case And:
result = result + "AND\n"
case Equals:
result = result + "EQUALS\n"
case EqualsSelf:
result = result + "EQUALS SELF\n"
case Traverse:
result = result + "TRAVERSE\n"
}
}
return result
}
var operationTypeMapper map[int]OperationType
func initMaps() {
precedenceMap = make(map[int]int)
operationTypeMapper = make(map[int]OperationType)
precedenceMap[TokenIds["("]] = 0
precedenceMap[TokenIds["OR_OPERATOR"]] = 10
operationTypeMapper[TokenIds["OR_OPERATOR"]] = Or
precedenceMap[TokenIds["AND_OPERATOR"]] = 20
operationTypeMapper[TokenIds["AND_OPERATOR"]] = And
precedenceMap[TokenIds["EQUALS_OPERATOR"]] = 30
operationTypeMapper[TokenIds["EQUALS_OPERATOR"]] = Equals
precedenceMap[TokenIds["EQUALS_SELF_OPERATOR"]] = 30
operationTypeMapper[TokenIds["EQUALS_SELF_OPERATOR"]] = EqualsSelf
precedenceMap[TokenIds["TRAVERSE_OPERATOR"]] = 40
operationTypeMapper[TokenIds["TRAVERSE_OPERATOR"]] = Traverse
}
func createOperationPathElement(opToken *lex.Token) PathElement {
var pathElement = PathElement{PathElementType: Operation, OperationType: operationTypeMapper[opToken.Type]}
return pathElement
}
type PathPostFixer interface {
ConvertToPostfix([]*lex.Token) ([]*PathElement, error)
}
type pathPostFixer struct {
}
func NewPathPostFixer() PathPostFixer {
return &pathPostFixer{}
}
func popOpToResult(opStack []*lex.Token, result []*PathElement) ([]*lex.Token, []*PathElement) {
var operatorToPushToPostFix *lex.Token
opStack, operatorToPushToPostFix = opStack[0:len(opStack)-1], opStack[len(opStack)-1]
var pathElement = createOperationPathElement(operatorToPushToPostFix)
return opStack, append(result, &pathElement)
}
func (p *pathPostFixer) ConvertToPostfix(infixTokens []*lex.Token) ([]*PathElement, error) {
var result []*PathElement
// surround the whole thing with quotes
var opStack = []*lex.Token{&lex.Token{Type: TokenIds["("]}}
var tokens = append(infixTokens, &lex.Token{Type: TokenIds[")"]})
for _, token := range tokens {
switch token.Type {
case TokenIds["PATH_KEY"], TokenIds["ARRAY_INDEX"], TokenIds["[+]"], TokenIds["[*]"], TokenIds["**"]:
var pathElement = PathElement{PathElementType: PathKey, Value: token.Value}
result = append(result, &pathElement)
case TokenIds["("]:
opStack = append(opStack, token)
case TokenIds["OR_OPERATOR"], TokenIds["AND_OPERATOR"], TokenIds["EQUALS_OPERATOR"], TokenIds["EQUALS_SELF_OPERATOR"], TokenIds["TRAVERSE_OPERATOR"]:
var currentPrecedence = precedenceMap[token.Type]
// pop off higher precedent operators onto the result
for len(opStack) > 0 && precedenceMap[opStack[len(opStack)-1].Type] >= currentPrecedence {
opStack, result = popOpToResult(opStack, result)
}
// add this operator to the opStack
opStack = append(opStack, token)
case TokenIds[")"]:
for len(opStack) > 0 && opStack[len(opStack)-1].Type != TokenIds["("] {
opStack, result = popOpToResult(opStack, result)
}
if len(opStack) == 0 {
return nil, errors.New("Bad path expression, got close brackets without matching opening bracket")
}
// now we should have ( as the last element on the opStack, get rid of it
opStack = opStack[0 : len(opStack)-1]
}
}
return result, nil
}

View File

@@ -0,0 +1,228 @@
package treeops
import (
"testing"
"github.com/mikefarah/yq/v3/test"
)
// var tokeniser = NewPathTokeniser()
var postFixer = NewPathPostFixer()
func testExpression(expression string) (string, error) {
tokens, err := tokeniser.Tokenise(expression)
if err != nil {
return "", err
}
results, errorP := postFixer.ConvertToPostfix(tokens)
if errorP != nil {
return "", errorP
}
formatted := ""
for _, path := range results {
formatted = formatted + path.toString() + "--------\n"
}
return formatted, nil
}
func TestPostFixSimpleExample(t *testing.T) {
var infix = "a"
var expectedOutput = `PathKey - 'a'
--------
`
actual, err := testExpression(infix)
if err != nil {
t.Error(err)
}
test.AssertResultComplex(t, expectedOutput, actual)
}
func TestPostFixSimplePathExample(t *testing.T) {
var infix = "apples.bananas*.cat"
var expectedOutput = `PathKey - 'apples'
--------
PathKey - 'bananas*'
--------
Operation - TRAVERSE
--------
PathKey - 'cat'
--------
Operation - TRAVERSE
--------
`
actual, err := testExpression(infix)
if err != nil {
t.Error(err)
}
test.AssertResultComplex(t, expectedOutput, actual)
}
func TestPostFixSimplePathNumbersExample(t *testing.T) {
var infix = "apples[0].cat"
var expectedOutput = `PathKey - 'apples'
--------
PathKey - '0'
--------
Operation - TRAVERSE
--------
PathKey - 'cat'
--------
Operation - TRAVERSE
--------
`
actual, err := testExpression(infix)
if err != nil {
t.Error(err)
}
test.AssertResultComplex(t, expectedOutput, actual)
}
func TestPostFixSimplePathAppendArrayExample(t *testing.T) {
var infix = "apples[+].cat"
var expectedOutput = `PathKey - 'apples'
--------
PathKey - '[+]'
--------
Operation - TRAVERSE
--------
PathKey - 'cat'
--------
Operation - TRAVERSE
--------
`
actual, err := testExpression(infix)
if err != nil {
t.Error(err)
}
test.AssertResultComplex(t, expectedOutput, actual)
}
func TestPostFixSimplePathSplatArrayExample(t *testing.T) {
var infix = "apples.[*]cat"
var expectedOutput = `PathKey - 'apples'
--------
PathKey - '[*]'
--------
Operation - TRAVERSE
--------
PathKey - 'cat'
--------
Operation - TRAVERSE
--------
`
actual, err := testExpression(infix)
if err != nil {
t.Error(err)
}
test.AssertResultComplex(t, expectedOutput, actual)
}
func TestPostFixDeepMatchExample(t *testing.T) {
var infix = "apples.**.cat"
var expectedOutput = `PathKey - 'apples'
--------
PathKey - '**'
--------
Operation - TRAVERSE
--------
PathKey - 'cat'
--------
Operation - TRAVERSE
--------
`
actual, err := testExpression(infix)
if err != nil {
t.Error(err)
}
test.AssertResultComplex(t, expectedOutput, actual)
}
func TestPostFixOrExample(t *testing.T) {
var infix = "a OR b"
var expectedOutput = `PathKey - 'a'
--------
PathKey - 'b'
--------
Operation - OR
--------
`
actual, err := testExpression(infix)
if err != nil {
t.Error(err)
}
test.AssertResultComplex(t, expectedOutput, actual)
}
func TestPostFixOrWithEqualsExample(t *testing.T) {
var infix = "a==thing OR b==thongs"
var expectedOutput = `PathKey - 'a'
--------
PathKey - 'thing'
--------
Operation - EQUALS
--------
PathKey - 'b'
--------
PathKey - 'thongs'
--------
Operation - EQUALS
--------
Operation - OR
--------
`
actual, err := testExpression(infix)
if err != nil {
t.Error(err)
}
test.AssertResultComplex(t, expectedOutput, actual)
}
func TestPostFixOrWithEqualsPathExample(t *testing.T) {
var infix = "apples.monkeys==thing OR bogs.bobos==thongs"
var expectedOutput = `PathKey - 'apples'
--------
PathKey - 'monkeys'
--------
Operation - TRAVERSE
--------
PathKey - 'thing'
--------
Operation - EQUALS
--------
PathKey - 'bogs'
--------
PathKey - 'bobos'
--------
Operation - TRAVERSE
--------
PathKey - 'thongs'
--------
Operation - EQUALS
--------
Operation - OR
--------
`
actual, err := testExpression(infix)
if err != nil {
t.Error(err)
}
test.AssertResultComplex(t, expectedOutput, actual)
}

View File

@@ -0,0 +1,159 @@
package treeops
import (
"strconv"
"strings"
lex "github.com/timtadh/lexmachine"
"github.com/timtadh/lexmachine/machines"
)
var Literals []string // The tokens representing literal strings
var Keywords []string // The keyword tokens
var Tokens []string // All of the tokens (including literals and keywords)
var TokenIds map[string]int // A map from the token names to their int ids
var bracketLiterals []string
func initTokens() {
bracketLiterals = []string{"(", ")"}
Literals = []string{ // these need a traverse operator infront
"[+]",
"[*]",
"**",
}
Tokens = []string{
"OR_OPERATOR",
"AND_OPERATOR",
"EQUALS_OPERATOR",
"EQUALS_SELF_OPERATOR",
"TRAVERSE_OPERATOR",
"PATH_KEY", // apples
"ARRAY_INDEX", // 123
}
Tokens = append(Tokens, bracketLiterals...)
Tokens = append(Tokens, Literals...)
TokenIds = make(map[string]int)
for i, tok := range Tokens {
TokenIds[tok] = i
}
initMaps()
}
func skip(*lex.Scanner, *machines.Match) (interface{}, error) {
return nil, nil
}
func token(name string) lex.Action {
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
return s.Token(TokenIds[name], string(m.Bytes), m), nil
}
}
func unwrap(value string) string {
return value[1 : len(value)-1]
}
func wrappedToken(name string) lex.Action {
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
return s.Token(TokenIds[name], unwrap(string(m.Bytes)), m), nil
}
}
func numberToken(name string, wrapped bool) lex.Action {
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
var numberString = string(m.Bytes)
if wrapped {
numberString = unwrap(numberString)
}
var number, errParsingInt = strconv.ParseInt(numberString, 10, 64) // nolint
if errParsingInt != nil {
return nil, errParsingInt
}
return s.Token(TokenIds[name], number, m), nil
}
}
// Creates the lexer object and compiles the NFA.
func initLexer() (*lex.Lexer, error) {
lexer := lex.NewLexer()
for _, lit := range bracketLiterals {
r := "\\" + strings.Join(strings.Split(lit, ""), "\\")
lexer.Add([]byte(r), token(lit))
}
for _, lit := range Literals {
r := "\\" + strings.Join(strings.Split(lit, ""), "\\")
lexer.Add([]byte(r), token(lit))
}
lexer.Add([]byte(`([Oo][Rr])`), token("OR_OPERATOR"))
lexer.Add([]byte(`([Aa][Nn][Dd])`), token("AND_OPERATOR"))
lexer.Add([]byte(`\.\s*==\s*`), token("EQUALS_SELF_OPERATOR"))
lexer.Add([]byte(`\s*==\s*`), token("EQUALS_OPERATOR"))
lexer.Add([]byte(`\[-?[0-9]+\]`), numberToken("ARRAY_INDEX", true))
lexer.Add([]byte(`-?[0-9]+`), numberToken("ARRAY_INDEX", false))
lexer.Add([]byte("( |\t|\n|\r)+"), skip)
lexer.Add([]byte(`"[^ "]+"`), wrappedToken("PATH_KEY"))
lexer.Add([]byte(`[^ \.\[\(\)=]+`), token("PATH_KEY"))
lexer.Add([]byte(`\.`), token("TRAVERSE_OPERATOR"))
err := lexer.Compile()
if err != nil {
return nil, err
}
return lexer, nil
}
type PathTokeniser interface {
Tokenise(path string) ([]*lex.Token, error)
}
type pathTokeniser struct {
lexer *lex.Lexer
}
func NewPathTokeniser() PathTokeniser {
initTokens()
var lexer, err = initLexer()
if err != nil {
panic(err)
}
return &pathTokeniser{lexer}
}
func (p *pathTokeniser) Tokenise(path string) ([]*lex.Token, error) {
scanner, err := p.lexer.Scanner([]byte(path))
if err != nil {
return nil, err
}
var tokens []*lex.Token
for tok, err, eof := scanner.Next(); !eof; tok, err, eof = scanner.Next() {
if tok != nil {
token := tok.(*lex.Token)
log.Debugf("Processing %v - %v", token.Value, Tokens[token.Type])
tokens = append(tokens, token)
}
if err != nil {
return nil, err
}
}
var postProcessedTokens []*lex.Token = make([]*lex.Token, 0)
for index, token := range tokens {
for _, literalTokenDef := range append(Literals, "ARRAY_INDEX", "(") {
if index > 0 && token.Type == TokenIds[literalTokenDef] && tokens[index-1].Type != TokenIds["TRAVERSE_OPERATOR"] {
postProcessedTokens = append(postProcessedTokens, &lex.Token{Type: TokenIds["TRAVERSE_OPERATOR"], Value: "."})
}
}
postProcessedTokens = append(postProcessedTokens, token)
for _, literalTokenDef := range append(Literals, "ARRAY_INDEX", ")") {
if index != len(tokens)-1 && token.Type == TokenIds[literalTokenDef] && tokens[index+1].Type != TokenIds["TRAVERSE_OPERATOR"] {
postProcessedTokens = append(postProcessedTokens, &lex.Token{Type: TokenIds["TRAVERSE_OPERATOR"], Value: "."})
}
}
}
return postProcessedTokens, nil
}

View File

@@ -0,0 +1,67 @@
package treeops
import (
"testing"
"github.com/mikefarah/yq/v3/test"
)
var tokeniserTests = []struct {
path string
expectedTokens []interface{}
}{ // TODO: Ensure ALL documented examples have tests! sheesh
{"apples.BANANAS", append(make([]interface{}, 0), "apples", ".", "BANANAS")},
{"appl*.BANA*", append(make([]interface{}, 0), "appl*", ".", "BANA*")},
{"a.b.**", append(make([]interface{}, 0), "a", ".", "b", ".", "**")},
{"a.\"=\".frog", append(make([]interface{}, 0), "a", ".", "=", ".", "frog")},
{"a.b.*", append(make([]interface{}, 0), "a", ".", "b", ".", "*")},
{"a.b.thin*", append(make([]interface{}, 0), "a", ".", "b", ".", "thin*")},
{"a.b[0]", append(make([]interface{}, 0), "a", ".", "b", ".", int64(0))},
{"a.b.[0]", append(make([]interface{}, 0), "a", ".", "b", ".", int64(0))},
{"a.b[*]", append(make([]interface{}, 0), "a", ".", "b", ".", "[*]")},
{"a.b.[*]", append(make([]interface{}, 0), "a", ".", "b", ".", "[*]")},
{"a.b[+]", append(make([]interface{}, 0), "a", ".", "b", ".", "[+]")},
{"a.b.[+]", append(make([]interface{}, 0), "a", ".", "b", ".", "[+]")},
{"a.b[-12]", append(make([]interface{}, 0), "a", ".", "b", ".", int64(-12))},
{"a.b.0", append(make([]interface{}, 0), "a", ".", "b", ".", int64(0))},
{"a.b.-12", append(make([]interface{}, 0), "a", ".", "b", ".", int64(-12))},
{"a", append(make([]interface{}, 0), "a")},
{"\"a.b\".c", append(make([]interface{}, 0), "a.b", ".", "c")},
{`b."foo.bar"`, append(make([]interface{}, 0), "b", ".", "foo.bar")},
{"animals(.==cat)", append(make([]interface{}, 0), "animals", ".", "(", ".==", "cat", ")")},
{"animals.(.==cat)", append(make([]interface{}, 0), "animals", ".", "(", ".==", "cat", ")")},
{"animals(. == cat)", append(make([]interface{}, 0), "animals", ".", "(", ". == ", "cat", ")")},
{"animals(.==c*)", append(make([]interface{}, 0), "animals", ".", "(", ".==", "c*", ")")},
{"animals(a.b==c*)", append(make([]interface{}, 0), "animals", ".", "(", "a", ".", "b", "==", "c*", ")")},
{"animals.(a.b==c*)", append(make([]interface{}, 0), "animals", ".", "(", "a", ".", "b", "==", "c*", ")")},
{"(a.b==c*).animals", append(make([]interface{}, 0), "(", "a", ".", "b", "==", "c*", ")", ".", "animals")},
{"(a.b==c*)animals", append(make([]interface{}, 0), "(", "a", ".", "b", "==", "c*", ")", ".", "animals")},
{"[1].a.d", append(make([]interface{}, 0), int64(1), ".", "a", ".", "d")},
{"[1]a.d", append(make([]interface{}, 0), int64(1), ".", "a", ".", "d")},
{"a[0]c", append(make([]interface{}, 0), "a", ".", int64(0), ".", "c")},
{"a.[0].c", append(make([]interface{}, 0), "a", ".", int64(0), ".", "c")},
{"[0]", append(make([]interface{}, 0), int64(0))},
{"0", append(make([]interface{}, 0), int64(0))},
{"a.b[+]c", append(make([]interface{}, 0), "a", ".", "b", ".", "[+]", ".", "c")},
{"a.cool(s.d.f == cool)", append(make([]interface{}, 0), "a", ".", "cool", ".", "(", "s", ".", "d", ".", "f", " == ", "cool", ")")},
{"a.cool.(s.d.f==cool OR t.b.h==frog).caterpillar", append(make([]interface{}, 0), "a", ".", "cool", ".", "(", "s", ".", "d", ".", "f", "==", "cool", "OR", "t", ".", "b", ".", "h", "==", "frog", ")", ".", "caterpillar")},
{"a.cool(s.d.f==cool and t.b.h==frog)*", append(make([]interface{}, 0), "a", ".", "cool", ".", "(", "s", ".", "d", ".", "f", "==", "cool", "and", "t", ".", "b", ".", "h", "==", "frog", ")", ".", "*")},
{"a.cool(s.d.f==cool and t.b.h==frog).th*", append(make([]interface{}, 0), "a", ".", "cool", ".", "(", "s", ".", "d", ".", "f", "==", "cool", "and", "t", ".", "b", ".", "h", "==", "frog", ")", ".", "th*")},
}
var tokeniser = NewPathTokeniser()
func TestTokeniser(t *testing.T) {
for _, tt := range tokeniserTests {
tokens, err := tokeniser.Tokenise(tt.path)
if err != nil {
t.Error(tt.path, err)
}
var tokenValues []interface{}
for _, token := range tokens {
tokenValues = append(tokenValues, token.Value)
}
test.AssertResultComplex(t, tt.expectedTokens, tokenValues)
}
}

View File

@@ -0,0 +1,56 @@
package treeops
import "fmt"
var myPathTokeniser = NewPathTokeniser()
var myPathPostfixer = NewPathPostFixer()
type PathTreeNode struct {
PathElement *PathElement
Lhs *PathTreeNode
Rhs *PathTreeNode
}
type PathTreeCreator interface {
ParsePath(path string) (*PathTreeNode, error)
CreatePathTree(postFixPath []*PathElement) (*PathTreeNode, error)
}
type pathTreeCreator struct {
}
func NewPathTreeCreator() PathTreeCreator {
return &pathTreeCreator{}
}
func (p *pathTreeCreator) ParsePath(path string) (*PathTreeNode, error) {
tokens, err := myPathTokeniser.Tokenise(path)
if err != nil {
return nil, err
}
var pathElements []*PathElement
pathElements, err = myPathPostfixer.ConvertToPostfix(tokens)
if err != nil {
return nil, err
}
return p.CreatePathTree(pathElements)
}
func (p *pathTreeCreator) CreatePathTree(postFixPath []*PathElement) (*PathTreeNode, error) {
var stack = make([]*PathTreeNode, 0)
for _, pathElement := range postFixPath {
var newNode = PathTreeNode{PathElement: pathElement}
if pathElement.PathElementType == Operation {
remaining, lhs, rhs := stack[:len(stack)-2], stack[len(stack)-2], stack[len(stack)-1]
newNode.Lhs = lhs
newNode.Rhs = rhs
stack = remaining
}
stack = append(stack, &newNode)
}
if len(stack) != 1 {
return nil, fmt.Errorf("expected stack to have 1 thing but its %v", stack)
}
return stack[0], nil
}

View File

@@ -0,0 +1 @@
package treeops

View File

@@ -0,0 +1,94 @@
package treeops
import (
"fmt"
"gopkg.in/yaml.v3"
)
type traverser struct {
prefs NavigationPrefs
}
type Traverser interface {
Traverse(matchingNode *CandidateNode, pathNode *PathElement) ([]*CandidateNode, error)
}
func NewTraverser(navigationPrefs NavigationPrefs) Traverser {
return &traverser{navigationPrefs}
}
func (t *traverser) keyMatches(key *yaml.Node, pathNode *PathElement) bool {
return Match(key.Value, fmt.Sprintf("%v", pathNode.Value))
}
func (t *traverser) traverseMap(candidate *CandidateNode, pathNode *PathElement) ([]*CandidateNode, error) {
// value.Content is a concatenated array of key, value,
// so keys are in the even indexes, values in odd.
// merge aliases are defined first, but we only want to traverse them
// if we don't find a match directly on this node first.
//TODO ALIASES, auto creation?
var newMatches = make([]*CandidateNode, 0)
node := candidate.Node
var contents = node.Content
for index := 0; index < len(contents); index = index + 2 {
key := contents[index]
value := contents[index+1]
log.Debug("checking %v (%v)", key.Value, key.Tag)
if t.keyMatches(key, pathNode) {
log.Debug("MATCHED")
newMatches = append(newMatches, &CandidateNode{
Node: value,
Path: append(candidate.Path, key.Value),
Document: candidate.Document,
})
}
}
return newMatches, nil
}
func (t *traverser) Traverse(matchingNode *CandidateNode, pathNode *PathElement) ([]*CandidateNode, error) {
log.Debug(NodeToString(matchingNode))
value := matchingNode.Node
switch value.Kind {
case yaml.MappingNode:
log.Debug("its a map with %v entries", len(value.Content)/2)
return t.traverseMap(matchingNode, pathNode)
// case yaml.SequenceNode:
// log.Debug("its a sequence of %v things!", len(value.Content))
// switch head := head.(type) {
// case int64:
// return n.recurseArray(value, head, head, tail, pathStack)
// default:
// if head == "+" {
// return n.appendArray(value, head, tail, pathStack)
// } else if len(value.Content) == 0 && head == "**" {
// return n.navigationStrategy.Visit(nodeContext)
// }
// return n.splatArray(value, head, tail, pathStack)
// }
// case yaml.AliasNode:
// log.Debug("its an alias!")
// DebugNode(value.Alias)
// if n.navigationStrategy.FollowAlias(nodeContext) {
// log.Debug("following the alias")
// return n.recurse(value.Alias, head, tail, pathStack)
// }
// return nil
case yaml.DocumentNode:
log.Debug("digging into doc node")
return t.Traverse(&CandidateNode{
Node: matchingNode.Node.Content[0],
Document: matchingNode.Document}, pathNode)
default:
return nil, nil
}
}