1
0
mirror of https://github.com/taigrr/yq synced 2025-01-18 04:53:17 -08:00

removed docs, added recursive decent

This commit is contained in:
Mike Farah
2020-10-18 11:31:36 +11:00
parent b026ebf2c3
commit 391ab8d70c
51 changed files with 93 additions and 926 deletions

View File

@@ -45,6 +45,7 @@ var Pipe = &OperationType{Type: "PIPE", NumArgs: 2, Precedence: 45, Handler: Pip
var Length = &OperationType{Type: "LENGTH", NumArgs: 0, Precedence: 50, Handler: LengthOperator}
var Collect = &OperationType{Type: "COLLECT", NumArgs: 0, Precedence: 50, Handler: CollectOperator}
var RecursiveDescent = &OperationType{Type: "RECURSIVE_DESCENT", NumArgs: 0, Precedence: 50, Handler: RecursiveDescentOperator}
// not sure yet

View File

@@ -47,29 +47,6 @@ var assignOperatorScenarios = []expressionScenario{
expected: []string{
"D0, P[], (!!seq)::[bogs, apple, bogs]\n",
},
// document: `{}`,
// expression: `["cat", "dog"]`,
// expected: []string{
// "D0, P[], (!!seq)::- cat\n- dog\n",
// },
// }, {
// document: `{}`,
// expression: `1 | collect`,
// expected: []string{
// "D0, P[], (!!seq)::- 1\n",
// },
// }, {
// document: `[1,2,3]`,
// expression: `[.[]]`,
// expected: []string{
// "D0, P[], (!!seq)::- 1\n- 2\n- 3\n",
// },
// }, {
// document: `a: {b: [1,2,3]}`,
// expression: `[.a.b[]]`,
// expected: []string{
// "D0, P[a b], (!!seq)::- 1\n- 2\n- 3\n",
// },
},
}

View File

@@ -0,0 +1,34 @@
package treeops
import (
"github.com/elliotchance/orderedmap"
)
func RecursiveDescentOperator(d *dataTreeNavigator, matchMap *orderedmap.OrderedMap, pathNode *PathTreeNode) (*orderedmap.OrderedMap, error) {
var results = orderedmap.NewOrderedMap()
err := recursiveDecent(d, results, matchMap)
if err != nil {
return nil, err
}
return results, nil
}
func recursiveDecent(d *dataTreeNavigator, results *orderedmap.OrderedMap, matchMap *orderedmap.OrderedMap) error {
for el := matchMap.Front(); el != nil; el = el.Next() {
candidate := el.Value.(*CandidateNode)
results.Set(candidate.GetKey(), candidate)
children, err := d.traverse(nodeToMap(candidate), &PathElement{PathElementType: PathKey, Value: "[]"})
if err != nil {
return err
}
err = recursiveDecent(d, results, children)
if err != nil {
return err
}
}
return nil
}

View File

@@ -0,0 +1,55 @@
package treeops
import (
"testing"
)
var recursiveDescentOperatorScenarios = []expressionScenario{
{
document: `cat`,
expression: `..`,
expected: []string{
"D0, P[], (!!str)::cat\n",
},
}, {
document: `{a: frog}`,
expression: `..`,
expected: []string{
"D0, P[], (!!map)::{a: frog}\n",
"D0, P[a], (!!str)::frog\n",
},
}, {
document: `{a: {b: apple}}`,
expression: `..`,
expected: []string{
"D0, P[], (!!map)::{a: {b: apple}}\n",
"D0, P[a], (!!map)::{b: apple}\n",
"D0, P[a b], (!!str)::apple\n",
},
}, {
document: `[1,2,3]`,
expression: `..`,
expected: []string{
"D0, P[], (!!seq)::[1, 2, 3]\n",
"D0, P[0], (!!int)::1\n",
"D0, P[1], (!!int)::2\n",
"D0, P[2], (!!int)::3\n",
},
}, {
document: `[{a: cat},2,true]`,
expression: `..`,
expected: []string{
"D0, P[], (!!seq)::[{a: cat}, 2, true]\n",
"D0, P[0], (!!map)::{a: cat}\n",
"D0, P[0 a], (!!str)::cat\n",
"D0, P[1], (!!int)::2\n",
"D0, P[2], (!!bool)::true\n",
},
},
}
func TestRecursiveDescentOperatorScenarios(t *testing.T) {
for _, tt := range recursiveDescentOperatorScenarios {
testScenario(t, &tt)
}
}

View File

@@ -1,6 +1,7 @@
package treeops
import (
"fmt"
"testing"
"github.com/mikefarah/yq/v3/test"
@@ -26,5 +27,5 @@ func testScenario(t *testing.T, s *expressionScenario) {
t.Error(errNav)
return
}
test.AssertResultComplexWithContext(t, s.expected, resultsToString(results), s.expression)
test.AssertResultComplexWithContext(t, s.expected, resultsToString(results), fmt.Sprintf("exp: %v\ndoc: %v", s.expression, s.document))
}

View File

@@ -116,7 +116,7 @@ func initLexer() (*lex.Lexer, error) {
lexer.Add([]byte(`\)`), literalToken(CloseBracket, ")", true))
lexer.Add([]byte(`\.?\[\]`), literalToken(PathKey, "[]", true))
lexer.Add([]byte(`\.\.`), literalToken(PathKey, "..", true))
lexer.Add([]byte(`\.\.`), opToken(RecursiveDescent))
lexer.Add([]byte(`,`), opToken(Union))
lexer.Add([]byte(`length`), opToken(Length))