mirror of
https://github.com/taigrr/yq
synced 2025-01-18 04:53:17 -08:00
Compare commits
169 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
fae2b2643c | ||
|
dd86b5e7f2 | ||
|
f1f75683c1 | ||
|
38b9856f50 | ||
|
48eeb2a9df | ||
|
af283315f2 | ||
|
d18a6963f6 | ||
|
77edbb9f5c | ||
|
179c44aacc | ||
|
bc70c1fb16 | ||
|
0b71a40797 | ||
|
3f51a44596 | ||
|
afebf0e621 | ||
|
dc464a5b10 | ||
|
5340ed0ad3 | ||
|
7b52c5fe0e | ||
|
f4392f8658 | ||
|
8e14b3b393 | ||
|
8627441705 | ||
|
aa95ecd012 | ||
|
a2bd463a91 | ||
|
6c3965dca3 | ||
|
bb3ffd40b5 | ||
|
cc08afc435 | ||
|
941a453163 | ||
|
77630ca179 | ||
|
ae4b606707 | ||
|
37f3e21970 | ||
|
25d0787011 | ||
|
b5b8da0a1d | ||
|
fa21510194 | ||
|
f541194250 | ||
|
38666f4db6 | ||
|
8b327d0414 | ||
|
c8630fe4f3 | ||
|
87df9b1ae6 | ||
|
2483c38eeb | ||
|
b2a538bdfc | ||
|
6c26344449 | ||
|
daf0bfe1b9 | ||
|
750a00ec35 | ||
|
25e0a824c5 | ||
|
095f921f62 | ||
|
12d3425b4a | ||
|
0249f00bd5 | ||
|
42b1bf9678 | ||
|
c632fd3641 | ||
|
0470e5cd69 | ||
|
35c6d07248 | ||
|
3f0be107d4 | ||
|
21a9e506cb | ||
|
3722367fbb | ||
|
f7b50e9853 | ||
|
4f3fe256aa | ||
|
2595a929c9 | ||
|
b2186d5404 | ||
|
e93c43f7a0 | ||
|
b1a5387cdd | ||
|
5911ab2929 | ||
|
2ed5b2ff59 | ||
|
111c6e0be1 | ||
|
81136ad57e | ||
|
a6cd250987 | ||
|
ee1f55630f | ||
|
9072e8d3b3 | ||
|
99b08fd612 | ||
|
b2317a14ef | ||
|
3e5f7b147f | ||
|
c4faa70143 | ||
|
4d6d07ec43 | ||
|
bd0818c481 | ||
|
26742b2597 | ||
|
64c618c041 | ||
|
c4c8e5e7b0 | ||
|
e02ad4d7e8 | ||
|
dd17f072cf | ||
|
429c3ca65b | ||
|
cfcac6d1dc | ||
|
a5ddbca97f | ||
|
30027a8cf4 | ||
|
f92a42e4f8 | ||
|
3c466dc66e | ||
|
0816e16d30 | ||
|
802d54e14e | ||
|
10600dd29a | ||
|
3a464272d4 | ||
|
691efadfac | ||
|
6efe4c4797 | ||
|
9e56b364c2 | ||
|
85ec32e3db | ||
|
5c73132c8e | ||
|
c6efd5519b | ||
|
3bae44be68 | ||
|
48a7c59c4b | ||
|
851fbd8cf5 | ||
|
820a3320be | ||
|
61f569aebb | ||
|
9ff51cd066 | ||
|
9dd6d11362 | ||
|
83139e21d9 | ||
|
c77001f969 | ||
|
1be3b31bbc | ||
|
6c14a80991 | ||
|
76bd1896e9 | ||
|
c63801a8a5 | ||
|
f7cfdc29e1 | ||
|
07c6549a58 | ||
|
29f40dad59 | ||
|
fe33e7fcfe | ||
|
0707525b29 | ||
|
62acee54c3 | ||
|
d21c94cf4f | ||
|
626e9cacaf | ||
|
02ef99560d | ||
|
c59209f041 | ||
|
947ffb6986 | ||
|
1a03031297 | ||
|
2c7db0071a | ||
|
0484d0232b | ||
|
91c72d2d9e | ||
|
09ec740d45 | ||
|
532dbd81a5 | ||
|
e86f83fb69 | ||
|
7d5b6b5442 | ||
|
b749973fe0 | ||
|
ba223df4ac | ||
|
e6336bcb85 | ||
|
9ae03e0a1c | ||
|
55712afea6 | ||
|
7518dac99c | ||
|
49ac2bac13 | ||
|
e28df367eb | ||
|
90ec05be54 | ||
|
8f5270cc63 | ||
|
286590b01e | ||
|
c1cf8b4e34 | ||
|
461661112c | ||
|
578f2c27f9 | ||
|
6ed037a9f6 | ||
|
69386316f3 | ||
|
a0e1f65b20 | ||
|
8027f4c568 | ||
|
b13eb7083e | ||
|
b505240d09 | ||
|
7a184bef78 | ||
|
34bc33d5c5 | ||
|
4d8b64d05c | ||
|
2d9cc3c107 | ||
|
018a6e616d | ||
|
7fa2b20b48 | ||
|
316e0d4d5a | ||
|
a0d4c51e27 | ||
|
ceff2cc18d | ||
|
db62a16007 | ||
|
2a6e423d2d | ||
|
5a1b81cbfc | ||
|
8c1f7dfbd7 | ||
|
2e81384eed | ||
|
fbf36037c9 | ||
|
2957210e65 | ||
|
bde419aaee | ||
|
9b185a4409 | ||
|
0c777a4967 | ||
|
e9591e0cd5 | ||
|
04491e13c3 | ||
|
5aff50a345 | ||
|
90d55fb52a | ||
|
e6f97518f3 | ||
|
f4a44e7313 |
@ -1 +1 @@
|
|||||||
bin
|
bin/*
|
||||||
|
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
github: mikefarah
|
12
.github/workflows/release.yml
vendored
12
.github/workflows/release.yml
vendored
@ -40,17 +40,7 @@ jobs:
|
|||||||
IMAGE_NAME: mikefarah/yq
|
IMAGE_NAME: mikefarah/yq
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Get latest release tag
|
- uses: actions/checkout@v2
|
||||||
uses: oprypin/find-latest-tag@v1
|
|
||||||
with:
|
|
||||||
repository: mikefarah/yq # The repository to scan.
|
|
||||||
releases-only: true # We know that all relevant tags have a GitHub release for them.
|
|
||||||
id: yq
|
|
||||||
|
|
||||||
- name: Clone source code
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
ref: ${{ steps.yq.outputs.tag }}
|
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v1
|
uses: docker/setup-qemu-action@v1
|
||||||
|
@ -6,26 +6,5 @@ RUN set -e -x \
|
|||||||
&& /opt/devtools.sh
|
&& /opt/devtools.sh
|
||||||
ENV PATH=/go/bin:$PATH
|
ENV PATH=/go/bin:$PATH
|
||||||
|
|
||||||
# install mkdocs
|
|
||||||
RUN set -ex \
|
|
||||||
&& buildDeps=' \
|
|
||||||
build-essential \
|
|
||||||
python3-dev \
|
|
||||||
' \
|
|
||||||
&& apt-get update && apt-get install -y --no-install-recommends \
|
|
||||||
$buildDeps \
|
|
||||||
python3 \
|
|
||||||
python3-setuptools \
|
|
||||||
python3-wheel \
|
|
||||||
python3-pip \
|
|
||||||
&& pip3 install --upgrade \
|
|
||||||
pip \
|
|
||||||
'Markdown>=2.6.9' \
|
|
||||||
'mkdocs>=0.16.3' \
|
|
||||||
'mkdocs-material>=1.10.1' \
|
|
||||||
'markdown-include>=0.5.1' \
|
|
||||||
&& apt-get purge -y --auto-remove $buildDeps \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
ENV CGO_ENABLED 0
|
ENV CGO_ENABLED 0
|
||||||
ENV GOPATH /go:/yq
|
ENV GOPATH /go:/yq
|
||||||
|
10
Makefile
10
Makefile
@ -17,6 +17,7 @@ help:
|
|||||||
@echo ' make vendor Install dependencies to vendor directory.'
|
@echo ' make vendor Install dependencies to vendor directory.'
|
||||||
@echo ' make format Run code formatter.'
|
@echo ' make format Run code formatter.'
|
||||||
@echo ' make check Run static code analysis (lint).'
|
@echo ' make check Run static code analysis (lint).'
|
||||||
|
@echo ' make secure Run gosec.'
|
||||||
@echo ' make test Run tests on project.'
|
@echo ' make test Run tests on project.'
|
||||||
@echo ' make cover Run tests and capture code coverage metrics on project.'
|
@echo ' make cover Run tests and capture code coverage metrics on project.'
|
||||||
@echo ' make clean Clean the directory tree of produced artifacts.'
|
@echo ' make clean Clean the directory tree of produced artifacts.'
|
||||||
@ -84,6 +85,10 @@ format: vendor
|
|||||||
check: format
|
check: format
|
||||||
${DOCKRUN} bash ./scripts/check.sh
|
${DOCKRUN} bash ./scripts/check.sh
|
||||||
|
|
||||||
|
.PHONY: secure
|
||||||
|
secure:
|
||||||
|
${DOCKRUN} bash ./scripts/secure.sh
|
||||||
|
|
||||||
.PHONY: test
|
.PHONY: test
|
||||||
test: check
|
test: check
|
||||||
${DOCKRUN} bash ./scripts/test.sh
|
${DOCKRUN} bash ./scripts/test.sh
|
||||||
@ -96,11 +101,6 @@ cover: check
|
|||||||
@find cover -type d -exec chmod 755 {} \; || :
|
@find cover -type d -exec chmod 755 {} \; || :
|
||||||
@find cover -type f -exec chmod 644 {} \; || :
|
@find cover -type f -exec chmod 644 {} \; || :
|
||||||
|
|
||||||
.PHONY: build-docs
|
|
||||||
build-docs: prepare mkdocs.yml mkdocs/*
|
|
||||||
${DOCKRUN} mkdocs build
|
|
||||||
@find docs -type d -exec chmod 755 {} \; || :
|
|
||||||
@find docs -type f -exec chmod 644 {} \; || :
|
|
||||||
|
|
||||||
.PHONY: release
|
.PHONY: release
|
||||||
release: xcompile
|
release: xcompile
|
||||||
|
16
README.md
16
README.md
@ -90,7 +90,7 @@ docker run --rm -v "${PWD}":/workdir mikefarah/yq <command> [flags] [expression
|
|||||||
#### Run commands interactively:
|
#### Run commands interactively:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker run --rm -it -v "${PWD}":/workdir mikefarah/yq sh
|
docker run --rm -it -v "${PWD}":/workdir --entrypoint sh mikefarah/yq
|
||||||
```
|
```
|
||||||
|
|
||||||
It can be useful to have a bash function to avoid typing the whole docker command:
|
It can be useful to have a bash function to avoid typing the whole docker command:
|
||||||
@ -103,7 +103,7 @@ yq() {
|
|||||||
|
|
||||||
### Go Get:
|
### Go Get:
|
||||||
```
|
```
|
||||||
GO111MODULE=on go get github.com/mikefarah/yq
|
GO111MODULE=on go get github.com/mikefarah/yq/v4
|
||||||
```
|
```
|
||||||
|
|
||||||
## Community Supported Installation methods
|
## Community Supported Installation methods
|
||||||
@ -119,6 +119,8 @@ See [webi](https://webinstall.dev/)
|
|||||||
Supported by @adithyasunil26 (https://github.com/webinstall/webi-installers/tree/master/yq)
|
Supported by @adithyasunil26 (https://github.com/webinstall/webi-installers/tree/master/yq)
|
||||||
|
|
||||||
### Windows:
|
### Windows:
|
||||||
|
[](https://chocolatey.org/packages/yq)
|
||||||
|
[](https://chocolatey.org/packages/yq)
|
||||||
```
|
```
|
||||||
choco install yq
|
choco install yq
|
||||||
```
|
```
|
||||||
@ -151,19 +153,21 @@ sudo apt install yq -y
|
|||||||
Supported by @rmescandon (https://launchpad.net/~rmescandon/+archive/ubuntu/yq)
|
Supported by @rmescandon (https://launchpad.net/~rmescandon/+archive/ubuntu/yq)
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
- [Detailed documentation with many examples](https://mikefarah.gitbook.io/yq/)
|
||||||
- Written in portable go, so you can download a lovely dependency free binary
|
- Written in portable go, so you can download a lovely dependency free binary
|
||||||
- Uses similar syntax as `jq` but works with YAML and JSON files
|
- Uses similar syntax as `jq` but works with YAML and JSON files
|
||||||
- Fully supports multi document yaml files
|
- Fully supports multi document yaml files
|
||||||
- Colorized yaml output
|
- Colorized yaml output
|
||||||
- [Deeply traverse yaml](https://mikefarah.gitbook.io/yq/v/v4.x/traverse)
|
- [Deeply traverse yaml](https://mikefarah.gitbook.io/yq/operators/traverse-read)
|
||||||
- [Sort yaml by keys](https://mikefarah.gitbook.io/yq/v/v4.x/sort-keys)
|
- [Sort yaml by keys](https://mikefarah.gitbook.io/yq/operators/sort-keys)
|
||||||
- Manipulate yaml [comments](https://mikefarah.gitbook.io/yq/comment-operators), [styling](https://mikefarah.gitbook.io/yq/style), [tags](https://mikefarah.gitbook.io/yq/tag) and [anchors and aliases](https://mikefarah.gitbook.io/yq/anchor-and-alias-operators).
|
- Manipulate yaml [comments](https://mikefarah.gitbook.io/yq/operators/comment-operators), [styling](https://mikefarah.gitbook.io/yq/operators/style), [tags](https://mikefarah.gitbook.io/yq/operators/tag) and [anchors and aliases](https://mikefarah.gitbook.io/yq/operators/anchor-and-alias-operators).
|
||||||
- [Update yaml inplace](https://mikefarah.gitbook.io/yq/v/v4.x/commands/evaluate#flags)
|
- [Update yaml inplace](https://mikefarah.gitbook.io/yq/v/v4.x/commands/evaluate#flags)
|
||||||
- [Complex expressions to select and update](https://mikefarah.gitbook.io/yq/v/v4.x/select#select-and-update-matching-values-in-map)
|
- [Complex expressions to select and update](https://mikefarah.gitbook.io/yq/operators/select#select-and-update-matching-values-in-map)
|
||||||
- Keeps yaml formatting and comments when updating (though there are issues with whitespace)
|
- Keeps yaml formatting and comments when updating (though there are issues with whitespace)
|
||||||
- [Convert to/from json to yaml](https://mikefarah.gitbook.io/yq/v/v4.x/usage/convert)
|
- [Convert to/from json to yaml](https://mikefarah.gitbook.io/yq/v/v4.x/usage/convert)
|
||||||
- [Pipe data in by using '-'](https://mikefarah.gitbook.io/yq/v/v4.x/commands/evaluate)
|
- [Pipe data in by using '-'](https://mikefarah.gitbook.io/yq/v/v4.x/commands/evaluate)
|
||||||
- [General shell completion scripts (bash/zsh/fish/powershell)](https://mikefarah.gitbook.io/yq/v/v4.x/commands/shell-completion)
|
- [General shell completion scripts (bash/zsh/fish/powershell)](https://mikefarah.gitbook.io/yq/v/v4.x/commands/shell-completion)
|
||||||
|
- [Reduce](https://mikefarah.gitbook.io/yq/operators/reduce) to merge multiple files or sum an array or other fancy things.
|
||||||
|
|
||||||
## [Usage](https://mikefarah.gitbook.io/yq/)
|
## [Usage](https://mikefarah.gitbook.io/yq/)
|
||||||
|
|
||||||
|
@ -17,6 +17,9 @@ func createEvaluateAllCommand() *cobra.Command {
|
|||||||
Example: `
|
Example: `
|
||||||
# merges f2.yml into f1.yml (inplace)
|
# merges f2.yml into f1.yml (inplace)
|
||||||
yq eval-all --inplace 'select(fileIndex == 0) * select(fileIndex == 1)' f1.yml f2.yml
|
yq eval-all --inplace 'select(fileIndex == 0) * select(fileIndex == 1)' f1.yml f2.yml
|
||||||
|
|
||||||
|
# use '-' as a filename to read from STDIN
|
||||||
|
cat file2.yml | yq ea '.a.b' file1.yml - file3.yml
|
||||||
`,
|
`,
|
||||||
Long: "Evaluate All:\nUseful when you need to run an expression across several yaml documents or files. Consumes more memory than eval",
|
Long: "Evaluate All:\nUseful when you need to run an expression across several yaml documents or files. Consumes more memory than eval",
|
||||||
RunE: evaluateAll,
|
RunE: evaluateAll,
|
||||||
@ -42,14 +45,21 @@ func evaluateAll(cmd *cobra.Command, args []string) error {
|
|||||||
colorsEnabled = true
|
colorsEnabled = true
|
||||||
}
|
}
|
||||||
|
|
||||||
if writeInplace && len(args) < 2 {
|
firstFileIndex := -1
|
||||||
|
if !nullInput && len(args) == 1 {
|
||||||
|
firstFileIndex = 0
|
||||||
|
} else if len(args) > 1 {
|
||||||
|
firstFileIndex = 1
|
||||||
|
}
|
||||||
|
|
||||||
|
if writeInplace && (firstFileIndex == -1) {
|
||||||
return fmt.Errorf("Write inplace flag only applicable when giving an expression and at least one file")
|
return fmt.Errorf("Write inplace flag only applicable when giving an expression and at least one file")
|
||||||
}
|
}
|
||||||
|
|
||||||
if writeInplace {
|
if writeInplace {
|
||||||
// only use colors if its forced
|
// only use colors if its forced
|
||||||
colorsEnabled = forceColor
|
colorsEnabled = forceColor
|
||||||
writeInPlaceHandler := yqlib.NewWriteInPlaceHandler(args[1])
|
writeInPlaceHandler := yqlib.NewWriteInPlaceHandler(args[firstFileIndex])
|
||||||
out, err = writeInPlaceHandler.CreateTempFile()
|
out, err = writeInPlaceHandler.CreateTempFile()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -21,6 +21,9 @@ yq e '.a.b | length' f1.yml f2.yml
|
|||||||
# prints out the file
|
# prints out the file
|
||||||
yq e sample.yaml
|
yq e sample.yaml
|
||||||
|
|
||||||
|
# use '-' as a filename to read from STDIN
|
||||||
|
cat file2.yml | yq e '.a.b' file1.yml - file3.yml
|
||||||
|
|
||||||
# prints a new yaml document
|
# prints a new yaml document
|
||||||
yq e -n '.a.b.c = "cat"'
|
yq e -n '.a.b.c = "cat"'
|
||||||
|
|
||||||
@ -62,14 +65,21 @@ func evaluateSequence(cmd *cobra.Command, args []string) error {
|
|||||||
colorsEnabled = true
|
colorsEnabled = true
|
||||||
}
|
}
|
||||||
|
|
||||||
if writeInplace && len(args) < 2 {
|
firstFileIndex := -1
|
||||||
|
if !nullInput && len(args) == 1 {
|
||||||
|
firstFileIndex = 0
|
||||||
|
} else if len(args) > 1 {
|
||||||
|
firstFileIndex = 1
|
||||||
|
}
|
||||||
|
|
||||||
|
if writeInplace && (firstFileIndex == -1) {
|
||||||
return fmt.Errorf("Write inplace flag only applicable when giving an expression and at least one file")
|
return fmt.Errorf("Write inplace flag only applicable when giving an expression and at least one file")
|
||||||
}
|
}
|
||||||
|
|
||||||
if writeInplace {
|
if writeInplace {
|
||||||
// only use colors if its forced
|
// only use colors if its forced
|
||||||
colorsEnabled = forceColor
|
colorsEnabled = forceColor
|
||||||
writeInPlaceHandler := yqlib.NewWriteInPlaceHandler(args[1])
|
writeInPlaceHandler := yqlib.NewWriteInPlaceHandler(args[firstFileIndex])
|
||||||
out, err = writeInPlaceHandler.CreateTempFile()
|
out, err = writeInPlaceHandler.CreateTempFile()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -102,7 +112,7 @@ func evaluateSequence(cmd *cobra.Command, args []string) error {
|
|||||||
err = streamEvaluator.EvaluateFiles(processExpression(""), []string{args[0]}, printer)
|
err = streamEvaluator.EvaluateFiles(processExpression(""), []string{args[0]}, printer)
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
err = streamEvaluator.EvaluateFiles(args[0], args[1:], printer)
|
err = streamEvaluator.EvaluateFiles(processExpression(args[0]), args[1:], printer)
|
||||||
}
|
}
|
||||||
completedSuccessfully = err == nil
|
completedSuccessfully = err == nil
|
||||||
|
|
||||||
|
@ -11,7 +11,7 @@ var (
|
|||||||
GitDescribe string
|
GitDescribe string
|
||||||
|
|
||||||
// Version is main version number that is being run at the moment.
|
// Version is main version number that is being run at the moment.
|
||||||
Version = "4.3.2"
|
Version = "4.9.3"
|
||||||
|
|
||||||
// VersionPrerelease is a pre-release marker for the version. If this is "" (empty string)
|
// VersionPrerelease is a pre-release marker for the version. If this is "" (empty string)
|
||||||
// then it means that it is a final release. Otherwise, this is a pre-release
|
// then it means that it is a final release. Otherwise, this is a pre-release
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
a: simple # just the best
|
a:
|
||||||
b: [1, 2]
|
key1: "value1"
|
||||||
c:
|
key2: 2.6
|
||||||
test: 1
|
ab:
|
||||||
|
key1: 6
|
||||||
|
key2: "h"
|
@ -1,4 +1,4 @@
|
|||||||
FROM mikefarah/yq:4.3.2
|
FROM mikefarah/yq:4.9.3
|
||||||
|
|
||||||
COPY entrypoint.sh /entrypoint.sh
|
COPY entrypoint.sh /entrypoint.sh
|
||||||
|
|
||||||
|
10
go.mod
10
go.mod
@ -1,14 +1,14 @@
|
|||||||
module github.com/mikefarah/yq/v4
|
module github.com/mikefarah/yq/v4
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/elliotchance/orderedmap v1.3.0
|
github.com/elliotchance/orderedmap v1.4.0
|
||||||
github.com/fatih/color v1.10.0
|
github.com/fatih/color v1.10.0
|
||||||
github.com/goccy/go-yaml v1.8.4
|
github.com/goccy/go-yaml v1.8.9
|
||||||
github.com/jinzhu/copier v0.1.0
|
github.com/jinzhu/copier v0.2.8
|
||||||
github.com/spf13/cobra v1.1.1
|
github.com/spf13/cobra v1.1.3
|
||||||
github.com/timtadh/data-structures v0.5.3 // indirect
|
github.com/timtadh/data-structures v0.5.3 // indirect
|
||||||
github.com/timtadh/lexmachine v0.2.2
|
github.com/timtadh/lexmachine v0.2.2
|
||||||
golang.org/x/sys v0.0.0-20210105210732-16f7687f5001 // indirect
|
golang.org/x/sys v0.0.0-20210317225723-c4fcb01b228e // indirect
|
||||||
gopkg.in/op/go-logging.v1 v1.0.0-20160211212156-b2cb9fa56473
|
gopkg.in/op/go-logging.v1 v1.0.0-20160211212156-b2cb9fa56473
|
||||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b
|
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b
|
||||||
)
|
)
|
||||||
|
33
go.sum
33
go.sum
@ -36,8 +36,8 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c
|
|||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
|
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
|
||||||
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
|
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
|
||||||
github.com/elliotchance/orderedmap v1.3.0 h1:k6m77/d0zCXTjsk12nX40TkEBkSICq8T4s6R6bpCqU0=
|
github.com/elliotchance/orderedmap v1.4.0 h1:wZtfeEONCbx6in1CZyE6bELEt/vFayMvsxqI5SgsR+A=
|
||||||
github.com/elliotchance/orderedmap v1.3.0/go.mod h1:8hdSl6jmveQw8ScByd3AaNHNk51RhbTazdqtTty+NFw=
|
github.com/elliotchance/orderedmap v1.4.0/go.mod h1:wsDwEaX5jEoyhbs7x93zk2H/qv0zwuhg4inXhDkYqys=
|
||||||
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
|
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
|
||||||
github.com/fatih/color v1.10.0 h1:s36xzo75JdqLaaWoiEHk767eHiwo0598uUxyfiPkDsg=
|
github.com/fatih/color v1.10.0 h1:s36xzo75JdqLaaWoiEHk767eHiwo0598uUxyfiPkDsg=
|
||||||
github.com/fatih/color v1.10.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM=
|
github.com/fatih/color v1.10.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM=
|
||||||
@ -52,8 +52,8 @@ github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTM
|
|||||||
github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA=
|
github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA=
|
||||||
github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4=
|
github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4=
|
||||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||||
github.com/goccy/go-yaml v1.8.4 h1:AOEdR7aQgbgwHznGe3BLkDQVujxCPUpHOZZcQcp8Y3M=
|
github.com/goccy/go-yaml v1.8.9 h1:4AEXg2qx+/w29jXnXpMY6mTckmYu1TMoHteKuMf0HFg=
|
||||||
github.com/goccy/go-yaml v1.8.4/go.mod h1:U/jl18uSupI5rdI2jmuCswEA2htH9eXfferR3KfscvA=
|
github.com/goccy/go-yaml v1.8.9/go.mod h1:U/jl18uSupI5rdI2jmuCswEA2htH9eXfferR3KfscvA=
|
||||||
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
|
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
|
||||||
github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4=
|
github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4=
|
||||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
||||||
@ -101,8 +101,8 @@ github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2p
|
|||||||
github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc=
|
github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc=
|
||||||
github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
|
github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
|
||||||
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
|
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
|
||||||
github.com/jinzhu/copier v0.1.0 h1:Vh8xALtH3rrKGB/XIRe5d0yCTHPZFauWPLvdpDAbi88=
|
github.com/jinzhu/copier v0.2.8 h1:N8MbL5niMwE3P4dOwurJixz5rMkKfujmMRFmAanSzWE=
|
||||||
github.com/jinzhu/copier v0.1.0/go.mod h1:24xnZezI2Yqac9J61UC6/dG/k76ttpq0DdJI3QmUvro=
|
github.com/jinzhu/copier v0.2.8/go.mod h1:24xnZezI2Yqac9J61UC6/dG/k76ttpq0DdJI3QmUvro=
|
||||||
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
|
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
|
||||||
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
|
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
|
||||||
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
||||||
@ -168,10 +168,9 @@ github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4k
|
|||||||
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
|
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
|
||||||
github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ=
|
github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ=
|
||||||
github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
|
github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
|
||||||
github.com/spf13/cobra v1.1.1 h1:KfztREH0tPxJJ+geloSLaAkaPkr4ki2Er5quFV1TDo4=
|
github.com/spf13/cobra v1.1.3 h1:xghbfqPkxzxP3C/f3n5DdpAbdKLj4ZE4BWQI362l53M=
|
||||||
github.com/spf13/cobra v1.1.1/go.mod h1:WnodtKOvamDL/PwE2M4iKs8aMDBZ5Q5klgD3qfVJQMI=
|
github.com/spf13/cobra v1.1.3/go.mod h1:pGADOWyqRD/YMrPZigI/zbliZ2wVD/23d+is3pSWzOo=
|
||||||
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
|
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
|
||||||
github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg=
|
|
||||||
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
||||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||||
@ -180,8 +179,9 @@ github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+
|
|||||||
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
|
|
||||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||||
|
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
|
||||||
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
|
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
|
||||||
github.com/timtadh/data-structures v0.5.3 h1:F2tEjoG9qWIyUjbvXVgJqEOGJPMIiYn7U5W5mE+i/vQ=
|
github.com/timtadh/data-structures v0.5.3 h1:F2tEjoG9qWIyUjbvXVgJqEOGJPMIiYn7U5W5mE+i/vQ=
|
||||||
github.com/timtadh/data-structures v0.5.3/go.mod h1:9R4XODhJ8JdWFEI8P/HJKqxuJctfBQw6fDibMQny2oU=
|
github.com/timtadh/data-structures v0.5.3/go.mod h1:9R4XODhJ8JdWFEI8P/HJKqxuJctfBQw6fDibMQny2oU=
|
||||||
@ -255,10 +255,9 @@ golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7w
|
|||||||
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae h1:/WDfKMnPU+m5M4xB+6x4kaepxRw6jWvR5iDRdvjHgy8=
|
|
||||||
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210105210732-16f7687f5001 h1:/dSxr6gT0FNI1MO5WLJo8mTmItROeOKTkDn+7OwWBos=
|
golang.org/x/sys v0.0.0-20210317225723-c4fcb01b228e h1:XNp2Flc/1eWQGk5BLzqTAN7fQIwIbfyVTuVxXxZh73M=
|
||||||
golang.org/x/sys v0.0.0-20210105210732-16f7687f5001/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210317225723-c4fcb01b228e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||||
@ -268,7 +267,6 @@ golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGm
|
|||||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
|
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
|
||||||
golang.org/x/tools v0.0.0-20190311212946-11955173bddd h1:/e+gpKk9r3dJobndpTytxS2gOy6m5uvpg+ISQoEcusQ=
|
|
||||||
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||||
golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||||
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||||
@ -281,7 +279,6 @@ golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgw
|
|||||||
golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc h1:NCy3Ohtk6Iny5V/reW2Ktypo4zIpWBdRJ1uFMjBxdg8=
|
|
||||||
golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
|
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
|
||||||
@ -308,7 +305,6 @@ google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZi
|
|||||||
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
|
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
|
||||||
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
||||||
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
|
||||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
@ -319,11 +315,10 @@ gopkg.in/op/go-logging.v1 v1.0.0-20160211212156-b2cb9fa56473/go.mod h1:N1eN2tsCx
|
|||||||
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
|
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
|
||||||
gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
|
gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
|
||||||
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
|
|
||||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10=
|
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||||
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo=
|
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo=
|
||||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||||
|
@ -39,7 +39,11 @@ func (e *allAtOnceEvaluator) EvaluateCandidateNodes(expression string, inputCand
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
return e.treeNavigator.GetMatchingNodes(inputCandidates, node)
|
context, err := e.treeNavigator.GetMatchingNodes(Context{MatchingNodes: inputCandidates}, node)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return context.MatchingNodes, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e *allAtOnceEvaluator) EvaluateFiles(expression string, filenames []string, printer Printer) error {
|
func (e *allAtOnceEvaluator) EvaluateFiles(expression string, filenames []string, printer Printer) error {
|
||||||
|
@ -2,29 +2,37 @@ package yqlib
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/jinzhu/copier"
|
"github.com/jinzhu/copier"
|
||||||
yaml "gopkg.in/yaml.v3"
|
yaml "gopkg.in/yaml.v3"
|
||||||
)
|
)
|
||||||
|
|
||||||
type CandidateNode struct {
|
type CandidateNode struct {
|
||||||
Node *yaml.Node // the actual node
|
Node *yaml.Node // the actual node
|
||||||
Path []interface{} /// the path we took to get to this node
|
Parent *CandidateNode // parent node
|
||||||
Document uint // the document index of this node
|
Path []interface{} /// the path we took to get to this node
|
||||||
|
Document uint // the document index of this node
|
||||||
Filename string
|
Filename string
|
||||||
FileIndex int
|
FileIndex int
|
||||||
|
// when performing op against all nodes given, this will treat all the nodes as one
|
||||||
|
// (e.g. top level cross document merge). This property does not propegate to child nodes.
|
||||||
|
EvaluateTogether bool
|
||||||
|
IsMapKey bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *CandidateNode) GetKey() string {
|
func (n *CandidateNode) GetKey() string {
|
||||||
return fmt.Sprintf("%v - %v", n.Document, n.Path)
|
keyPrefix := ""
|
||||||
|
if n.IsMapKey {
|
||||||
|
keyPrefix = "key-"
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("%v%v - %v", keyPrefix, n.Document, n.Path)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *CandidateNode) CreateChild(path interface{}, node *yaml.Node) *CandidateNode {
|
func (n *CandidateNode) CreateChild(path interface{}, node *yaml.Node) *CandidateNode {
|
||||||
return &CandidateNode{
|
return &CandidateNode{
|
||||||
Node: node,
|
Node: node,
|
||||||
Path: n.createChildPath(path),
|
Path: n.createChildPath(path),
|
||||||
|
Parent: n,
|
||||||
Document: n.Document,
|
Document: n.Document,
|
||||||
Filename: n.Filename,
|
Filename: n.Filename,
|
||||||
FileIndex: n.FileIndex,
|
FileIndex: n.FileIndex,
|
||||||
@ -60,11 +68,10 @@ func (n *CandidateNode) UpdateFrom(other *CandidateNode) {
|
|||||||
n.UpdateAttributesFrom(other)
|
n.UpdateAttributesFrom(other)
|
||||||
n.Node.Content = other.Node.Content
|
n.Node.Content = other.Node.Content
|
||||||
n.Node.Value = other.Node.Value
|
n.Node.Value = other.Node.Value
|
||||||
n.Node.Alias = other.Node.Alias
|
|
||||||
n.Node.Anchor = other.Node.Anchor
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *CandidateNode) UpdateAttributesFrom(other *CandidateNode) {
|
func (n *CandidateNode) UpdateAttributesFrom(other *CandidateNode) {
|
||||||
|
log.Debug("UpdateAttributesFrom: n: %v other: %v", n.GetKey(), other.GetKey())
|
||||||
if n.Node.Kind != other.Node.Kind {
|
if n.Node.Kind != other.Node.Kind {
|
||||||
// clear out the contents when switching to a different type
|
// clear out the contents when switching to a different type
|
||||||
// e.g. map to array
|
// e.g. map to array
|
||||||
@ -73,56 +80,22 @@ func (n *CandidateNode) UpdateAttributesFrom(other *CandidateNode) {
|
|||||||
}
|
}
|
||||||
n.Node.Kind = other.Node.Kind
|
n.Node.Kind = other.Node.Kind
|
||||||
n.Node.Tag = other.Node.Tag
|
n.Node.Tag = other.Node.Tag
|
||||||
|
n.Node.Alias = other.Node.Alias
|
||||||
|
n.Node.Anchor = other.Node.Anchor
|
||||||
|
|
||||||
// merge will pickup the style of the new thing
|
// merge will pickup the style of the new thing
|
||||||
// when autocreating nodes
|
// when autocreating nodes
|
||||||
if n.Node.Style == 0 {
|
if n.Node.Style == 0 {
|
||||||
n.Node.Style = other.Node.Style
|
n.Node.Style = other.Node.Style
|
||||||
}
|
}
|
||||||
n.Node.FootComment = n.Node.FootComment + other.Node.FootComment
|
|
||||||
n.Node.HeadComment = n.Node.HeadComment + other.Node.HeadComment
|
|
||||||
n.Node.LineComment = n.Node.LineComment + other.Node.LineComment
|
|
||||||
}
|
|
||||||
|
|
||||||
func (n *CandidateNode) PathStackToString() string {
|
if other.Node.FootComment != "" {
|
||||||
return mergePathStackToString(n.Path)
|
n.Node.FootComment = other.Node.FootComment
|
||||||
}
|
}
|
||||||
|
if other.Node.HeadComment != "" {
|
||||||
func mergePathStackToString(pathStack []interface{}) string {
|
n.Node.HeadComment = other.Node.HeadComment
|
||||||
var sb strings.Builder
|
}
|
||||||
for index, path := range pathStack {
|
if other.Node.LineComment != "" {
|
||||||
switch path.(type) {
|
n.Node.LineComment = other.Node.LineComment
|
||||||
case int, int64:
|
|
||||||
// if arrayMergeStrategy == AppendArrayMergeStrategy {
|
|
||||||
// sb.WriteString("[+]")
|
|
||||||
// } else {
|
|
||||||
sb.WriteString(fmt.Sprintf("[%v]", path))
|
|
||||||
// }
|
|
||||||
|
|
||||||
default:
|
|
||||||
s := fmt.Sprintf("%v", path)
|
|
||||||
var _, errParsingInt = strconv.ParseInt(s, 10, 64) // nolint
|
|
||||||
|
|
||||||
hasSpecial := strings.Contains(s, ".") || strings.Contains(s, "[") || strings.Contains(s, "]") || strings.Contains(s, "\"")
|
|
||||||
hasDoubleQuotes := strings.Contains(s, "\"")
|
|
||||||
wrappingCharacterStart := "\""
|
|
||||||
wrappingCharacterEnd := "\""
|
|
||||||
if hasDoubleQuotes {
|
|
||||||
wrappingCharacterStart = "("
|
|
||||||
wrappingCharacterEnd = ")"
|
|
||||||
}
|
|
||||||
if hasSpecial || errParsingInt == nil {
|
|
||||||
sb.WriteString(wrappingCharacterStart)
|
|
||||||
}
|
|
||||||
sb.WriteString(s)
|
|
||||||
if hasSpecial || errParsingInt == nil {
|
|
||||||
sb.WriteString(wrappingCharacterEnd)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if index < len(pathStack)-1 {
|
|
||||||
sb.WriteString(".")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return sb.String()
|
|
||||||
}
|
}
|
||||||
|
68
pkg/yqlib/context.go
Normal file
68
pkg/yqlib/context.go
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
package yqlib
|
||||||
|
|
||||||
|
import (
|
||||||
|
"container/list"
|
||||||
|
|
||||||
|
"github.com/jinzhu/copier"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Context struct {
|
||||||
|
MatchingNodes *list.List
|
||||||
|
Variables map[string]*list.List
|
||||||
|
DontAutoCreate bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *Context) SingleReadonlyChildContext(candidate *CandidateNode) Context {
|
||||||
|
list := list.New()
|
||||||
|
list.PushBack(candidate)
|
||||||
|
newContext := n.ChildContext(list)
|
||||||
|
newContext.DontAutoCreate = true
|
||||||
|
return newContext
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *Context) SingleChildContext(candidate *CandidateNode) Context {
|
||||||
|
list := list.New()
|
||||||
|
list.PushBack(candidate)
|
||||||
|
return n.ChildContext(list)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *Context) GetVariable(name string) *list.List {
|
||||||
|
if n.Variables == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return n.Variables[name]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *Context) SetVariable(name string, value *list.List) {
|
||||||
|
if n.Variables == nil {
|
||||||
|
n.Variables = make(map[string]*list.List)
|
||||||
|
}
|
||||||
|
n.Variables[name] = value
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *Context) ChildContext(results *list.List) Context {
|
||||||
|
clone := Context{}
|
||||||
|
err := copier.Copy(&clone, n)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Error cloning context :(")
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
clone.MatchingNodes = results
|
||||||
|
return clone
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *Context) Clone() Context {
|
||||||
|
clone := Context{}
|
||||||
|
err := copier.Copy(&clone, n)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Error cloning context :(")
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return clone
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *Context) ReadOnlyClone() Context {
|
||||||
|
clone := n.Clone()
|
||||||
|
clone.DontAutoCreate = true
|
||||||
|
return clone
|
||||||
|
}
|
@ -3,16 +3,14 @@ package yqlib
|
|||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"container/list"
|
|
||||||
|
|
||||||
logging "gopkg.in/op/go-logging.v1"
|
logging "gopkg.in/op/go-logging.v1"
|
||||||
)
|
)
|
||||||
|
|
||||||
type DataTreeNavigator interface {
|
type DataTreeNavigator interface {
|
||||||
// given a list of CandidateEntities and a expressionNode,
|
// given the context and a expressionNode,
|
||||||
// this will process the list against the given expressionNode and return
|
// this will process the against the given expressionNode and return
|
||||||
// a new list of matching candidates
|
// a new context of matching candidates
|
||||||
GetMatchingNodes(matchingNodes *list.List, expressionNode *ExpressionNode) (*list.List, error)
|
GetMatchingNodes(context Context, expressionNode *ExpressionNode) (Context, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
type dataTreeNavigator struct {
|
type dataTreeNavigator struct {
|
||||||
@ -22,22 +20,22 @@ func NewDataTreeNavigator() DataTreeNavigator {
|
|||||||
return &dataTreeNavigator{}
|
return &dataTreeNavigator{}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *dataTreeNavigator) GetMatchingNodes(matchingNodes *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
func (d *dataTreeNavigator) GetMatchingNodes(context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
if expressionNode == nil {
|
if expressionNode == nil {
|
||||||
log.Debugf("getMatchingNodes - nothing to do")
|
log.Debugf("getMatchingNodes - nothing to do")
|
||||||
return matchingNodes, nil
|
return context, nil
|
||||||
}
|
}
|
||||||
log.Debugf("Processing Op: %v", expressionNode.Operation.toString())
|
log.Debugf("Processing Op: %v", expressionNode.Operation.toString())
|
||||||
if log.IsEnabledFor(logging.DEBUG) {
|
if log.IsEnabledFor(logging.DEBUG) {
|
||||||
for el := matchingNodes.Front(); el != nil; el = el.Next() {
|
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||||
log.Debug(NodeToString(el.Value.(*CandidateNode)))
|
log.Debug(NodeToString(el.Value.(*CandidateNode)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
log.Debug(">>")
|
log.Debug(">>")
|
||||||
handler := expressionNode.Operation.OperationType.Handler
|
handler := expressionNode.Operation.OperationType.Handler
|
||||||
if handler != nil {
|
if handler != nil {
|
||||||
return handler(d, matchingNodes, expressionNode)
|
return handler(d, context, expressionNode)
|
||||||
}
|
}
|
||||||
return nil, fmt.Errorf("Unknown operator %v", expressionNode.Operation.OperationType)
|
return Context{}, fmt.Errorf("Unknown operator %v", expressionNode.Operation.OperationType)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -101,7 +101,7 @@ will output
|
|||||||
- hello
|
- hello
|
||||||
```
|
```
|
||||||
|
|
||||||
## Update array (append)
|
## Append to array
|
||||||
Given a sample.yml file of:
|
Given a sample.yml file of:
|
||||||
```yaml
|
```yaml
|
||||||
a:
|
a:
|
||||||
@ -127,6 +127,36 @@ b:
|
|||||||
- 4
|
- 4
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Relative append
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a:
|
||||||
|
a1:
|
||||||
|
b:
|
||||||
|
- cat
|
||||||
|
a2:
|
||||||
|
b:
|
||||||
|
- dog
|
||||||
|
a3: {}
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.a[].b += ["mouse"]' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
a:
|
||||||
|
a1:
|
||||||
|
b:
|
||||||
|
- cat
|
||||||
|
- mouse
|
||||||
|
a2:
|
||||||
|
b:
|
||||||
|
- dog
|
||||||
|
- mouse
|
||||||
|
a3: {b: [mouse]}
|
||||||
|
```
|
||||||
|
|
||||||
## String concatenation
|
## String concatenation
|
||||||
Given a sample.yml file of:
|
Given a sample.yml file of:
|
||||||
```yaml
|
```yaml
|
||||||
@ -143,22 +173,6 @@ a: catmeow
|
|||||||
b: meow
|
b: meow
|
||||||
```
|
```
|
||||||
|
|
||||||
## Relative string concatenation
|
|
||||||
Given a sample.yml file of:
|
|
||||||
```yaml
|
|
||||||
a: cat
|
|
||||||
b: meow
|
|
||||||
```
|
|
||||||
then
|
|
||||||
```bash
|
|
||||||
yq eval '.a += .b' sample.yml
|
|
||||||
```
|
|
||||||
will output
|
|
||||||
```yaml
|
|
||||||
a: catmeow
|
|
||||||
b: meow
|
|
||||||
```
|
|
||||||
|
|
||||||
## Number addition - float
|
## Number addition - float
|
||||||
If the lhs or rhs are floats then the expression will be calculated with floats.
|
If the lhs or rhs are floats then the expression will be calculated with floats.
|
||||||
|
|
||||||
@ -195,17 +209,31 @@ a: 7
|
|||||||
b: 4
|
b: 4
|
||||||
```
|
```
|
||||||
|
|
||||||
## Increment number
|
## Increment numbers
|
||||||
Given a sample.yml file of:
|
Given a sample.yml file of:
|
||||||
```yaml
|
```yaml
|
||||||
a: 3
|
a: 3
|
||||||
|
b: 5
|
||||||
```
|
```
|
||||||
then
|
then
|
||||||
```bash
|
```bash
|
||||||
yq eval '.a += 1' sample.yml
|
yq eval '.[] += 1' sample.yml
|
||||||
```
|
```
|
||||||
will output
|
will output
|
||||||
```yaml
|
```yaml
|
||||||
a: 4
|
a: 4
|
||||||
|
b: 6
|
||||||
|
```
|
||||||
|
|
||||||
|
## Add to null
|
||||||
|
Adding to null simply returns the rhs
|
||||||
|
|
||||||
|
Running
|
||||||
|
```bash
|
||||||
|
yq eval --null-input 'null + "cat"'
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
cat
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -1,8 +1,99 @@
|
|||||||
Use the `alias` and `anchor` operators to read and write yaml aliases and anchors. The `explode` operator normalises a yaml file (dereference aliases and remove anchor names).
|
Use the `alias` and `anchor` operators to read and write yaml aliases and anchors. The `explode` operator normalises a yaml file (dereference (or expands) aliases and remove anchor names).
|
||||||
|
|
||||||
`yq` supports merge aliases (like `<<: *blah`) however this is no longer in the standard yaml spec (1.2) and so `yq` will automatically add the `!!merge` tag to these nodes as it is effectively a custom tag.
|
`yq` supports merge aliases (like `<<: *blah`) however this is no longer in the standard yaml spec (1.2) and so `yq` will automatically add the `!!merge` tag to these nodes as it is effectively a custom tag.
|
||||||
|
|
||||||
|
|
||||||
|
## Merge one map
|
||||||
|
see https://yaml.org/type/merge.html
|
||||||
|
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
- &CENTER
|
||||||
|
x: 1
|
||||||
|
y: 2
|
||||||
|
- &LEFT
|
||||||
|
x: 0
|
||||||
|
y: 2
|
||||||
|
- &BIG
|
||||||
|
r: 10
|
||||||
|
- &SMALL
|
||||||
|
r: 1
|
||||||
|
- !!merge <<: *CENTER
|
||||||
|
r: 10
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.[4] | explode(.)' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
x: 1
|
||||||
|
y: 2
|
||||||
|
r: 10
|
||||||
|
```
|
||||||
|
|
||||||
|
## Merge multiple maps
|
||||||
|
see https://yaml.org/type/merge.html
|
||||||
|
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
- &CENTER
|
||||||
|
x: 1
|
||||||
|
y: 2
|
||||||
|
- &LEFT
|
||||||
|
x: 0
|
||||||
|
y: 2
|
||||||
|
- &BIG
|
||||||
|
r: 10
|
||||||
|
- &SMALL
|
||||||
|
r: 1
|
||||||
|
- !!merge <<:
|
||||||
|
- *CENTER
|
||||||
|
- *BIG
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.[4] | explode(.)' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
r: 10
|
||||||
|
x: 1
|
||||||
|
y: 2
|
||||||
|
```
|
||||||
|
|
||||||
|
## Override
|
||||||
|
see https://yaml.org/type/merge.html
|
||||||
|
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
- &CENTER
|
||||||
|
x: 1
|
||||||
|
y: 2
|
||||||
|
- &LEFT
|
||||||
|
x: 0
|
||||||
|
y: 2
|
||||||
|
- &BIG
|
||||||
|
r: 10
|
||||||
|
- &SMALL
|
||||||
|
r: 1
|
||||||
|
- !!merge <<:
|
||||||
|
- *BIG
|
||||||
|
- *LEFT
|
||||||
|
- *SMALL
|
||||||
|
x: 1
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.[4] | explode(.)' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
r: 10
|
||||||
|
x: 1
|
||||||
|
y: 2
|
||||||
|
```
|
||||||
|
|
||||||
## Get anchor
|
## Get anchor
|
||||||
Given a sample.yml file of:
|
Given a sample.yml file of:
|
||||||
```yaml
|
```yaml
|
||||||
@ -78,6 +169,22 @@ b: &meow purr
|
|||||||
a: *meow
|
a: *meow
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Set alias to blank does nothing
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
b: &meow purr
|
||||||
|
a: cat
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.a alias = ""' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
b: &meow purr
|
||||||
|
a: cat
|
||||||
|
```
|
||||||
|
|
||||||
## Set alias relatively using assign-update
|
## Set alias relatively using assign-update
|
||||||
Given a sample.yml file of:
|
Given a sample.yml file of:
|
||||||
```yaml
|
```yaml
|
||||||
@ -183,9 +290,9 @@ bar:
|
|||||||
c: bar_c
|
c: bar_c
|
||||||
foobarList:
|
foobarList:
|
||||||
b: bar_b
|
b: bar_b
|
||||||
a: foo_a
|
thing: foo_thing
|
||||||
thing: bar_thing
|
|
||||||
c: foobarList_c
|
c: foobarList_c
|
||||||
|
a: foo_a
|
||||||
foobar:
|
foobar:
|
||||||
c: foo_c
|
c: foo_c
|
||||||
a: foo_a
|
a: foo_a
|
||||||
|
@ -1,10 +1,4 @@
|
|||||||
This operator is used to update node values. It can be used in either the:
|
|
||||||
|
|
||||||
### plain form: `=`
|
|
||||||
Which will assign the LHS node values to the RHS node values. The RHS expression is run against the matching nodes in the pipeline.
|
|
||||||
|
|
||||||
### relative form: `|=`
|
|
||||||
This will do a similar thing to the plain form, however, the RHS expression is run against _the LHS nodes_. This is useful for updating values based on old values, e.g. increment.
|
|
||||||
## Create yaml file
|
## Create yaml file
|
||||||
Running
|
Running
|
||||||
```bash
|
```bash
|
@ -1,5 +1,14 @@
|
|||||||
The `or` and `and` operators take two parameters and return a boolean result. `not` flips a boolean from true to false, or vice versa. These are most commonly used with the `select` operator to filter particular nodes.
|
The `or` and `and` operators take two parameters and return a boolean result.
|
||||||
## OR example
|
|
||||||
|
`not` flips a boolean from true to false, or vice versa.
|
||||||
|
|
||||||
|
`any` will return `true` if there are any `true` values in a array sequence, and `all` will return true if _all_ elements in an array are true.
|
||||||
|
|
||||||
|
`any_c(condition)` and `all_c(condition)` are like `any` and `all` but they take a condition expression that is used against each element to determine if it's `true`. Note: in `jq` you can simply pass a condition to `any` or `all` and it simply works - `yq` isn't that clever..yet
|
||||||
|
|
||||||
|
These are most commonly used with the `select` operator to filter particular nodes.
|
||||||
|
|
||||||
|
## `or` example
|
||||||
Running
|
Running
|
||||||
```bash
|
```bash
|
||||||
yq eval --null-input 'true or false'
|
yq eval --null-input 'true or false'
|
||||||
@ -9,7 +18,7 @@ will output
|
|||||||
true
|
true
|
||||||
```
|
```
|
||||||
|
|
||||||
## AND example
|
## `and` example
|
||||||
Running
|
Running
|
||||||
```bash
|
```bash
|
||||||
yq eval --null-input 'true and false'
|
yq eval --null-input 'true and false'
|
||||||
@ -41,6 +50,104 @@ will output
|
|||||||
b: fly
|
b: fly
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## `any` returns true if any boolean in a given array is true
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
- false
|
||||||
|
- true
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval 'any' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
true
|
||||||
|
```
|
||||||
|
|
||||||
|
## `any` returns false for an empty array
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
[]
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval 'any' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
false
|
||||||
|
```
|
||||||
|
|
||||||
|
## `any_c` returns true if any element in the array is true for the given condition.
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a:
|
||||||
|
- rad
|
||||||
|
- awesome
|
||||||
|
b:
|
||||||
|
- meh
|
||||||
|
- whatever
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.[] |= any_c(. == "awesome")' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
a: true
|
||||||
|
b: false
|
||||||
|
```
|
||||||
|
|
||||||
|
## `all` returns true if all booleans in a given array are true
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
- true
|
||||||
|
- true
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval 'all' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
true
|
||||||
|
```
|
||||||
|
|
||||||
|
## `all` returns true for an empty array
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
[]
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval 'all' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
true
|
||||||
|
```
|
||||||
|
|
||||||
|
## `all_c` returns true if all elements in the array are true for the given condition.
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a:
|
||||||
|
- rad
|
||||||
|
- awesome
|
||||||
|
b:
|
||||||
|
- meh
|
||||||
|
- 12
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.[] |= all_c(tag == "!!str")' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
a: true
|
||||||
|
b: false
|
||||||
|
```
|
||||||
|
|
||||||
## Not true is false
|
## Not true is false
|
||||||
Running
|
Running
|
||||||
```bash
|
```bash
|
||||||
|
@ -1,4 +1,15 @@
|
|||||||
Use these comment operators to set or retrieve comments.
|
Use these comment operators to set or retrieve comments.
|
||||||
|
|
||||||
|
Like the `=` and `|=` assign operators, the same syntax applies when updating comments:
|
||||||
|
|
||||||
|
|
||||||
|
### plain form: `=`
|
||||||
|
This will assign the LHS nodes comments to the expression on the RHS. The RHS is run against the matching nodes in the pipeline
|
||||||
|
|
||||||
|
### relative form: `|=`
|
||||||
|
Similar to the plain form, however the RHS evaluates against each matching LHS node! This is useful if you want to set the comments as a relative expression of the node, for instance its value or path.
|
||||||
|
|
||||||
|
|
||||||
## Set line comment
|
## Set line comment
|
||||||
Given a sample.yml file of:
|
Given a sample.yml file of:
|
||||||
```yaml
|
```yaml
|
||||||
@ -77,18 +88,23 @@ a: cat
|
|||||||
b: dog # leave this
|
b: dog # leave this
|
||||||
```
|
```
|
||||||
|
|
||||||
## Remove all comments
|
## Remove (strip) all comments
|
||||||
|
Note the use of `...` to ensure key nodes are included.
|
||||||
|
|
||||||
Given a sample.yml file of:
|
Given a sample.yml file of:
|
||||||
```yaml
|
```yaml
|
||||||
a: cat # comment
|
a: cat # comment
|
||||||
|
# great
|
||||||
|
b: # key comment
|
||||||
```
|
```
|
||||||
then
|
then
|
||||||
```bash
|
```bash
|
||||||
yq eval '.. comments=""' sample.yml
|
yq eval '... comments=""' sample.yml
|
||||||
```
|
```
|
||||||
will output
|
will output
|
||||||
```yaml
|
```yaml
|
||||||
a: cat
|
a: cat
|
||||||
|
b:
|
||||||
```
|
```
|
||||||
|
|
||||||
## Get line comment
|
## Get line comment
|
||||||
|
100
pkg/yqlib/doc/Entries.md
Normal file
100
pkg/yqlib/doc/Entries.md
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
Similar to the same named functions in `jq` these functions convert to/from an object and an array of key-value pairs. This is most useful for performing operations on keys of maps.
|
||||||
|
## to_entries Map
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a: 1
|
||||||
|
b: 2
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval 'to_entries' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
- key: a
|
||||||
|
value: 1
|
||||||
|
- key: b
|
||||||
|
value: 2
|
||||||
|
```
|
||||||
|
|
||||||
|
## to_entries Array
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
- a
|
||||||
|
- b
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval 'to_entries' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
- key: 0
|
||||||
|
value: a
|
||||||
|
- key: 1
|
||||||
|
value: b
|
||||||
|
```
|
||||||
|
|
||||||
|
## to_entries null
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
null
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval 'to_entries' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
## from_entries map
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a: 1
|
||||||
|
b: 2
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval 'to_entries | from_entries' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
a: 1
|
||||||
|
b: 2
|
||||||
|
```
|
||||||
|
|
||||||
|
## from_entries with numeric key indexes
|
||||||
|
from_entries always creates a map, even for numeric keys
|
||||||
|
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
- a
|
||||||
|
- b
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval 'to_entries | from_entries' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
0: a
|
||||||
|
1: b
|
||||||
|
```
|
||||||
|
|
||||||
|
## Use with_entries to update keys
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a: 1
|
||||||
|
b: 2
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval 'with_entries(.key |= "KEY_" + .)' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
KEY_a: 1
|
||||||
|
KEY_b: 2
|
||||||
|
```
|
||||||
|
|
@ -29,6 +29,24 @@ true
|
|||||||
false
|
false
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Don't match string
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
- cat
|
||||||
|
- goat
|
||||||
|
- dog
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.[] | (. != "*at")' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
false
|
||||||
|
false
|
||||||
|
true
|
||||||
|
```
|
||||||
|
|
||||||
## Match number
|
## Match number
|
||||||
Given a sample.yml file of:
|
Given a sample.yml file of:
|
||||||
```yaml
|
```yaml
|
||||||
@ -47,6 +65,24 @@ true
|
|||||||
false
|
false
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Dont match number
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
- 3
|
||||||
|
- 4
|
||||||
|
- 5
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.[] | (. != 4)' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
true
|
||||||
|
false
|
||||||
|
true
|
||||||
|
```
|
||||||
|
|
||||||
## Match nulls
|
## Match nulls
|
||||||
Running
|
Running
|
||||||
```bash
|
```bash
|
||||||
@ -57,3 +93,31 @@ will output
|
|||||||
true
|
true
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Non exisitant key doesn't equal a value
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a: frog
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval 'select(.b != "thing")' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
a: frog
|
||||||
|
```
|
||||||
|
|
||||||
|
## Two non existant keys are equal
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a: frog
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval 'select(.b == .c)' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
a: frog
|
||||||
|
```
|
||||||
|
|
||||||
|
@ -35,6 +35,26 @@ will output
|
|||||||
0
|
0
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Get file indices of multiple documents
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a: cat
|
||||||
|
```
|
||||||
|
And another sample another.yml file of:
|
||||||
|
```yaml
|
||||||
|
a: cat
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval-all 'fileIndex' sample.yml another.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
0
|
||||||
|
---
|
||||||
|
1
|
||||||
|
```
|
||||||
|
|
||||||
## Get file index alias
|
## Get file index alias
|
||||||
Given a sample.yml file of:
|
Given a sample.yml file of:
|
||||||
```yaml
|
```yaml
|
||||||
|
@ -19,6 +19,29 @@ true
|
|||||||
false
|
false
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Select, checking for existence of deep paths
|
||||||
|
Simply pipe in parent expressions into `has`
|
||||||
|
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
- a:
|
||||||
|
b:
|
||||||
|
c: cat
|
||||||
|
- a:
|
||||||
|
b:
|
||||||
|
d: dog
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.[] | select(.a.b | has("c"))' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
a:
|
||||||
|
b:
|
||||||
|
c: cat
|
||||||
|
```
|
||||||
|
|
||||||
## Has array index
|
## Has array index
|
||||||
Given a sample.yml file of:
|
Given a sample.yml file of:
|
||||||
```yaml
|
```yaml
|
||||||
|
35
pkg/yqlib/doc/Keys.md
Normal file
35
pkg/yqlib/doc/Keys.md
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
# Keys
|
||||||
|
|
||||||
|
Use the `keys` operator to return map keys or array indices.
|
||||||
|
## Map keys
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
dog: woof
|
||||||
|
cat: meow
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval 'keys' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
- dog
|
||||||
|
- cat
|
||||||
|
```
|
||||||
|
|
||||||
|
## Array keys
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
- apple
|
||||||
|
- banana
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval 'keys' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
- 0
|
||||||
|
- 1
|
||||||
|
```
|
||||||
|
|
@ -16,6 +16,20 @@ will output
|
|||||||
3
|
3
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## null length
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a: null
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.a | length' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
0
|
||||||
|
```
|
||||||
|
|
||||||
## Map length
|
## Map length
|
||||||
returns number of entries
|
returns number of entries
|
||||||
|
|
||||||
|
366
pkg/yqlib/doc/Multiply (Merge).md
Normal file
366
pkg/yqlib/doc/Multiply (Merge).md
Normal file
@ -0,0 +1,366 @@
|
|||||||
|
Like the multiple operator in jq, depending on the operands, this multiply operator will do different things. Currently numbers, arrays and objects are supported.
|
||||||
|
|
||||||
|
## Objects and arrays - merging
|
||||||
|
Objects are merged deeply matching on matching keys. By default, array values override and are not deeply merged.
|
||||||
|
|
||||||
|
Note that when merging objects, this operator returns the merged object (not the parent). This will be clearer in the examples below.
|
||||||
|
|
||||||
|
### Merge Flags
|
||||||
|
You can control how objects are merged by using one or more of the following flags. Multiple flags can be used together, e.g. `.a *+? .b`. See examples below
|
||||||
|
|
||||||
|
- `+` to append arrays
|
||||||
|
- `?` to only merge existing fields
|
||||||
|
- `d` to deeply merge arrays
|
||||||
|
|
||||||
|
### Merging files
|
||||||
|
Note the use of `eval-all` to ensure all documents are loaded into memory.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
yq eval-all 'select(fileIndex == 0) * select(fileIndex == 1)' file1.yaml file2.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
## Multiply integers
|
||||||
|
Running
|
||||||
|
```bash
|
||||||
|
yq eval --null-input '3 * 4'
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
12
|
||||||
|
```
|
||||||
|
|
||||||
|
## Merge objects together, returning merged result only
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a:
|
||||||
|
field: me
|
||||||
|
fieldA: cat
|
||||||
|
b:
|
||||||
|
field:
|
||||||
|
g: wizz
|
||||||
|
fieldB: dog
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.a * .b' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
field:
|
||||||
|
g: wizz
|
||||||
|
fieldA: cat
|
||||||
|
fieldB: dog
|
||||||
|
```
|
||||||
|
|
||||||
|
## Merge objects together, returning parent object
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a:
|
||||||
|
field: me
|
||||||
|
fieldA: cat
|
||||||
|
b:
|
||||||
|
field:
|
||||||
|
g: wizz
|
||||||
|
fieldB: dog
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '. * {"a":.b}' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
a:
|
||||||
|
field:
|
||||||
|
g: wizz
|
||||||
|
fieldA: cat
|
||||||
|
fieldB: dog
|
||||||
|
b:
|
||||||
|
field:
|
||||||
|
g: wizz
|
||||||
|
fieldB: dog
|
||||||
|
```
|
||||||
|
|
||||||
|
## Merge keeps style of LHS
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a: {things: great}
|
||||||
|
b:
|
||||||
|
also: "me"
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '. * {"a":.b}' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
a: {things: great, also: "me"}
|
||||||
|
b:
|
||||||
|
also: "me"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Merge arrays
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a:
|
||||||
|
- 1
|
||||||
|
- 2
|
||||||
|
- 3
|
||||||
|
b:
|
||||||
|
- 3
|
||||||
|
- 4
|
||||||
|
- 5
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '. * {"a":.b}' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
a:
|
||||||
|
- 3
|
||||||
|
- 4
|
||||||
|
- 5
|
||||||
|
b:
|
||||||
|
- 3
|
||||||
|
- 4
|
||||||
|
- 5
|
||||||
|
```
|
||||||
|
|
||||||
|
## Merge, only existing fields
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a:
|
||||||
|
thing: one
|
||||||
|
cat: frog
|
||||||
|
b:
|
||||||
|
missing: two
|
||||||
|
thing: two
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.a *? .b' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
thing: two
|
||||||
|
cat: frog
|
||||||
|
```
|
||||||
|
|
||||||
|
## Merge, appending arrays
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a:
|
||||||
|
array:
|
||||||
|
- 1
|
||||||
|
- 2
|
||||||
|
- animal: dog
|
||||||
|
value: coconut
|
||||||
|
b:
|
||||||
|
array:
|
||||||
|
- 3
|
||||||
|
- 4
|
||||||
|
- animal: cat
|
||||||
|
value: banana
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.a *+ .b' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
array:
|
||||||
|
- 1
|
||||||
|
- 2
|
||||||
|
- animal: dog
|
||||||
|
- 3
|
||||||
|
- 4
|
||||||
|
- animal: cat
|
||||||
|
value: banana
|
||||||
|
```
|
||||||
|
|
||||||
|
## Merge, only existing fields, appending arrays
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a:
|
||||||
|
thing:
|
||||||
|
- 1
|
||||||
|
- 2
|
||||||
|
b:
|
||||||
|
thing:
|
||||||
|
- 3
|
||||||
|
- 4
|
||||||
|
another:
|
||||||
|
- 1
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.a *?+ .b' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
thing:
|
||||||
|
- 1
|
||||||
|
- 2
|
||||||
|
- 3
|
||||||
|
- 4
|
||||||
|
```
|
||||||
|
|
||||||
|
## Merge, deeply merging arrays
|
||||||
|
Merging arrays deeply means arrays are merge like objects, with indexes as their key. In this case, we merge the first item in the array, and do nothing with the second.
|
||||||
|
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a:
|
||||||
|
- name: fred
|
||||||
|
age: 12
|
||||||
|
- name: bob
|
||||||
|
age: 32
|
||||||
|
b:
|
||||||
|
- name: fred
|
||||||
|
age: 34
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.a *d .b' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
- name: fred
|
||||||
|
age: 34
|
||||||
|
- name: bob
|
||||||
|
age: 32
|
||||||
|
```
|
||||||
|
|
||||||
|
## Merge arrays of objects together, matching on a key
|
||||||
|
It's a complex command, the trickyness comes from needing to have the right context in the expressions.
|
||||||
|
First we save the second array into a variable '$two' which lets us reference it later.
|
||||||
|
We then need to update the first array. We will use the relative update (|=) because we need to update relative to the current element of the array in the LHS in the RHS expression.
|
||||||
|
We set the current element of the first array as $cur. Now we multiply (merge) $cur with the matching entry in $two, by passing $two through a select filter.
|
||||||
|
|
||||||
|
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
- a: apple
|
||||||
|
b: appleB
|
||||||
|
- a: kiwi
|
||||||
|
b: kiwiB
|
||||||
|
- a: banana
|
||||||
|
b: bananaB
|
||||||
|
```
|
||||||
|
And another sample another.yml file of:
|
||||||
|
```yaml
|
||||||
|
- a: banana
|
||||||
|
c: bananaC
|
||||||
|
- a: apple
|
||||||
|
b: appleB2
|
||||||
|
- a: dingo
|
||||||
|
c: dingoC
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval-all '(select(fi==1) | .[]) as $two | select(fi==0) | .[] |= (. as $cur | $cur * ($two | select(.a == $cur.a)))' sample.yml another.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
- a: apple
|
||||||
|
b: appleB2
|
||||||
|
- a: kiwi
|
||||||
|
b: kiwiB
|
||||||
|
- a: banana
|
||||||
|
b: bananaB
|
||||||
|
c: bananaC
|
||||||
|
```
|
||||||
|
|
||||||
|
## Merge to prefix an element
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a: cat
|
||||||
|
b: dog
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '. * {"a": {"c": .a}}' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
a:
|
||||||
|
c: cat
|
||||||
|
b: dog
|
||||||
|
```
|
||||||
|
|
||||||
|
## Merge with simple aliases
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a: &cat
|
||||||
|
c: frog
|
||||||
|
b:
|
||||||
|
f: *cat
|
||||||
|
c:
|
||||||
|
g: thongs
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.c * .b' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
g: thongs
|
||||||
|
f: *cat
|
||||||
|
```
|
||||||
|
|
||||||
|
## Merge copies anchor names
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a:
|
||||||
|
c: &cat frog
|
||||||
|
b:
|
||||||
|
f: *cat
|
||||||
|
c:
|
||||||
|
g: thongs
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.c * .a' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
g: thongs
|
||||||
|
c: &cat frog
|
||||||
|
```
|
||||||
|
|
||||||
|
## Merge with merge anchors
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
foo: &foo
|
||||||
|
a: foo_a
|
||||||
|
thing: foo_thing
|
||||||
|
c: foo_c
|
||||||
|
bar: &bar
|
||||||
|
b: bar_b
|
||||||
|
thing: bar_thing
|
||||||
|
c: bar_c
|
||||||
|
foobarList:
|
||||||
|
b: foobarList_b
|
||||||
|
!!merge <<:
|
||||||
|
- *foo
|
||||||
|
- *bar
|
||||||
|
c: foobarList_c
|
||||||
|
foobar:
|
||||||
|
c: foobar_c
|
||||||
|
!!merge <<: *foo
|
||||||
|
thing: foobar_thing
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.foobar * .foobarList' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
c: foobarList_c
|
||||||
|
<<:
|
||||||
|
- *foo
|
||||||
|
- *bar
|
||||||
|
thing: foobar_thing
|
||||||
|
b: foobarList_b
|
||||||
|
```
|
||||||
|
|
@ -1,238 +0,0 @@
|
|||||||
Like the multiple operator in `jq`, depending on the operands, this multiply operator will do different things. Currently only objects are supported, which have the effect of merging the RHS into the LHS.
|
|
||||||
|
|
||||||
Upcoming versions of `yq` will add support for other types of multiplication (numbers, strings).
|
|
||||||
|
|
||||||
To concatenate when merging objects, use the `*+` form (see examples below). This will recursively merge objects, appending arrays when it encounters them.
|
|
||||||
|
|
||||||
Note that when merging objects, this operator returns the merged object (not the parent). This will be clearer in the examples below.
|
|
||||||
|
|
||||||
## Merging files
|
|
||||||
Note the use of eval-all to ensure all documents are loaded into memory.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
yq eval-all 'select(fileIndex == 0) * select(fileIndex == 1)' file1.yaml file2.yaml
|
|
||||||
```
|
|
||||||
|
|
||||||
## Merge objects together, returning merged result only
|
|
||||||
Given a sample.yml file of:
|
|
||||||
```yaml
|
|
||||||
a:
|
|
||||||
field: me
|
|
||||||
fieldA: cat
|
|
||||||
b:
|
|
||||||
field:
|
|
||||||
g: wizz
|
|
||||||
fieldB: dog
|
|
||||||
```
|
|
||||||
then
|
|
||||||
```bash
|
|
||||||
yq eval '.a * .b' sample.yml
|
|
||||||
```
|
|
||||||
will output
|
|
||||||
```yaml
|
|
||||||
field:
|
|
||||||
g: wizz
|
|
||||||
fieldA: cat
|
|
||||||
fieldB: dog
|
|
||||||
```
|
|
||||||
|
|
||||||
## Merge objects together, returning parent object
|
|
||||||
Given a sample.yml file of:
|
|
||||||
```yaml
|
|
||||||
a:
|
|
||||||
field: me
|
|
||||||
fieldA: cat
|
|
||||||
b:
|
|
||||||
field:
|
|
||||||
g: wizz
|
|
||||||
fieldB: dog
|
|
||||||
```
|
|
||||||
then
|
|
||||||
```bash
|
|
||||||
yq eval '. * {"a":.b}' sample.yml
|
|
||||||
```
|
|
||||||
will output
|
|
||||||
```yaml
|
|
||||||
a:
|
|
||||||
field:
|
|
||||||
g: wizz
|
|
||||||
fieldA: cat
|
|
||||||
fieldB: dog
|
|
||||||
b:
|
|
||||||
field:
|
|
||||||
g: wizz
|
|
||||||
fieldB: dog
|
|
||||||
```
|
|
||||||
|
|
||||||
## Merge keeps style of LHS
|
|
||||||
Given a sample.yml file of:
|
|
||||||
```yaml
|
|
||||||
a: {things: great}
|
|
||||||
b:
|
|
||||||
also: "me"
|
|
||||||
|
|
||||||
```
|
|
||||||
then
|
|
||||||
```bash
|
|
||||||
yq eval '. * {"a":.b}' sample.yml
|
|
||||||
```
|
|
||||||
will output
|
|
||||||
```yaml
|
|
||||||
a: {things: great, also: "me"}
|
|
||||||
b:
|
|
||||||
also: "me"
|
|
||||||
```
|
|
||||||
|
|
||||||
## Merge arrays
|
|
||||||
Given a sample.yml file of:
|
|
||||||
```yaml
|
|
||||||
a:
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
- 3
|
|
||||||
b:
|
|
||||||
- 3
|
|
||||||
- 4
|
|
||||||
- 5
|
|
||||||
```
|
|
||||||
then
|
|
||||||
```bash
|
|
||||||
yq eval '. * {"a":.b}' sample.yml
|
|
||||||
```
|
|
||||||
will output
|
|
||||||
```yaml
|
|
||||||
a:
|
|
||||||
- 3
|
|
||||||
- 4
|
|
||||||
- 5
|
|
||||||
b:
|
|
||||||
- 3
|
|
||||||
- 4
|
|
||||||
- 5
|
|
||||||
```
|
|
||||||
|
|
||||||
## Merge, appending arrays
|
|
||||||
Given a sample.yml file of:
|
|
||||||
```yaml
|
|
||||||
a:
|
|
||||||
array:
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
- animal: dog
|
|
||||||
value: coconut
|
|
||||||
b:
|
|
||||||
array:
|
|
||||||
- 3
|
|
||||||
- 4
|
|
||||||
- animal: cat
|
|
||||||
value: banana
|
|
||||||
```
|
|
||||||
then
|
|
||||||
```bash
|
|
||||||
yq eval '.a *+ .b' sample.yml
|
|
||||||
```
|
|
||||||
will output
|
|
||||||
```yaml
|
|
||||||
array:
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
- animal: dog
|
|
||||||
- 3
|
|
||||||
- 4
|
|
||||||
- animal: cat
|
|
||||||
value: banana
|
|
||||||
```
|
|
||||||
|
|
||||||
## Merge to prefix an element
|
|
||||||
Given a sample.yml file of:
|
|
||||||
```yaml
|
|
||||||
a: cat
|
|
||||||
b: dog
|
|
||||||
```
|
|
||||||
then
|
|
||||||
```bash
|
|
||||||
yq eval '. * {"a": {"c": .a}}' sample.yml
|
|
||||||
```
|
|
||||||
will output
|
|
||||||
```yaml
|
|
||||||
a:
|
|
||||||
c: cat
|
|
||||||
b: dog
|
|
||||||
```
|
|
||||||
|
|
||||||
## Merge with simple aliases
|
|
||||||
Given a sample.yml file of:
|
|
||||||
```yaml
|
|
||||||
a: &cat
|
|
||||||
c: frog
|
|
||||||
b:
|
|
||||||
f: *cat
|
|
||||||
c:
|
|
||||||
g: thongs
|
|
||||||
```
|
|
||||||
then
|
|
||||||
```bash
|
|
||||||
yq eval '.c * .b' sample.yml
|
|
||||||
```
|
|
||||||
will output
|
|
||||||
```yaml
|
|
||||||
g: thongs
|
|
||||||
f: *cat
|
|
||||||
```
|
|
||||||
|
|
||||||
## Merge copies anchor names
|
|
||||||
Given a sample.yml file of:
|
|
||||||
```yaml
|
|
||||||
a:
|
|
||||||
c: &cat frog
|
|
||||||
b:
|
|
||||||
f: *cat
|
|
||||||
c:
|
|
||||||
g: thongs
|
|
||||||
```
|
|
||||||
then
|
|
||||||
```bash
|
|
||||||
yq eval '.c * .a' sample.yml
|
|
||||||
```
|
|
||||||
will output
|
|
||||||
```yaml
|
|
||||||
g: thongs
|
|
||||||
c: &cat frog
|
|
||||||
```
|
|
||||||
|
|
||||||
## Merge with merge anchors
|
|
||||||
Given a sample.yml file of:
|
|
||||||
```yaml
|
|
||||||
foo: &foo
|
|
||||||
a: foo_a
|
|
||||||
thing: foo_thing
|
|
||||||
c: foo_c
|
|
||||||
bar: &bar
|
|
||||||
b: bar_b
|
|
||||||
thing: bar_thing
|
|
||||||
c: bar_c
|
|
||||||
foobarList:
|
|
||||||
b: foobarList_b
|
|
||||||
!!merge <<:
|
|
||||||
- *foo
|
|
||||||
- *bar
|
|
||||||
c: foobarList_c
|
|
||||||
foobar:
|
|
||||||
c: foobar_c
|
|
||||||
!!merge <<: *foo
|
|
||||||
thing: foobar_thing
|
|
||||||
```
|
|
||||||
then
|
|
||||||
```bash
|
|
||||||
yq eval '.foobar * .foobarList' sample.yml
|
|
||||||
```
|
|
||||||
will output
|
|
||||||
```yaml
|
|
||||||
c: foobarList_c
|
|
||||||
<<:
|
|
||||||
- *foo
|
|
||||||
- *bar
|
|
||||||
thing: foobar_thing
|
|
||||||
b: foobarList_b
|
|
||||||
```
|
|
||||||
|
|
76
pkg/yqlib/doc/Reduce.md
Normal file
76
pkg/yqlib/doc/Reduce.md
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
Reduce is a powerful way to process a collection of data into a new form.
|
||||||
|
|
||||||
|
```
|
||||||
|
<exp> as $<name> ireduce (<init>; <block>)
|
||||||
|
```
|
||||||
|
|
||||||
|
e.g.
|
||||||
|
|
||||||
|
```
|
||||||
|
.[] as $item ireduce (0; . + $item)
|
||||||
|
```
|
||||||
|
|
||||||
|
On the LHS we are configuring the collection of items that will be reduced `<exp>` as well as what each element will be called `$<name>`. Note that the array has been splatted into its individual elements.
|
||||||
|
|
||||||
|
On the RHS there is `<init>`, the starting value of the accumulator and `<block>`, the expression that will update the accumulator for each element in the collection. Note that within the block expression, `.` will evaluate to the current value of the accumulator.
|
||||||
|
|
||||||
|
## yq vs jq syntax
|
||||||
|
Reduce syntax in `yq` is a little different from `jq` - as `yq` (currently) isn't as sophisticated as `jq` and its only supports infix notation (e.g. a + b, where the operator is in the middle of the two parameters) - where as `jq` uses a mix of infix notation with _prefix_ notation (e.g. `reduce a b` is like writing `+ a b`).
|
||||||
|
|
||||||
|
To that end, the reduce operator is called `ireduce` for backwards compatability if a `jq` like prefix version of `reduce` is ever added.
|
||||||
|
|
||||||
|
|
||||||
|
## Sum numbers
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
- 10
|
||||||
|
- 2
|
||||||
|
- 5
|
||||||
|
- 3
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.[] as $item ireduce (0; . + $item)' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
20
|
||||||
|
```
|
||||||
|
|
||||||
|
## Merge all yaml files together
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a: cat
|
||||||
|
```
|
||||||
|
And another sample another.yml file of:
|
||||||
|
```yaml
|
||||||
|
b: dog
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval-all '. as $item ireduce ({}; . * $item )' sample.yml another.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
a: cat
|
||||||
|
b: dog
|
||||||
|
```
|
||||||
|
|
||||||
|
## Convert an array to an object
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
- name: Cathy
|
||||||
|
has: apples
|
||||||
|
- name: Bob
|
||||||
|
has: bananas
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.[] as $item ireduce ({}; .[$item | .name] = ($item | .has) )' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
Cathy: apples
|
||||||
|
Bob: bananas
|
||||||
|
```
|
||||||
|
|
31
pkg/yqlib/doc/Split into Documents.md
Normal file
31
pkg/yqlib/doc/Split into Documents.md
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
# Split into Documents
|
||||||
|
|
||||||
|
This operator splits all matches into separate documents
|
||||||
|
|
||||||
|
## Split empty
|
||||||
|
Running
|
||||||
|
```bash
|
||||||
|
yq eval --null-input 'splitDoc'
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
## Split array
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
- a: cat
|
||||||
|
- b: dog
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.[] | splitDoc' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
a: cat
|
||||||
|
---
|
||||||
|
b: dog
|
||||||
|
```
|
||||||
|
|
88
pkg/yqlib/doc/String Operators.md
Normal file
88
pkg/yqlib/doc/String Operators.md
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
# String Operators
|
||||||
|
|
||||||
|
## Join strings
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
- cat
|
||||||
|
- meow
|
||||||
|
- 1
|
||||||
|
- null
|
||||||
|
- true
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval 'join("; ")' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
cat; meow; 1; ; true
|
||||||
|
```
|
||||||
|
|
||||||
|
## Substitute / Replace string
|
||||||
|
This uses golang regex, described [here](https://github.com/google/re2/wiki/Syntax)
|
||||||
|
Note the use of `|=` to run in context of the current string value.
|
||||||
|
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a: dogs are great
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.a |= sub("dogs", "cats")' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
a: cats are great
|
||||||
|
```
|
||||||
|
|
||||||
|
## Substitute / Replace string with regex
|
||||||
|
This uses golang regex, described [here](https://github.com/google/re2/wiki/Syntax)
|
||||||
|
Note the use of `|=` to run in context of the current string value.
|
||||||
|
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a: cat
|
||||||
|
b: heat
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.[] |= sub("(a)", "${1}r")' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
a: cart
|
||||||
|
b: heart
|
||||||
|
```
|
||||||
|
|
||||||
|
## Split strings
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
cat; meow; 1; ; true
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval 'split("; ")' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
- cat
|
||||||
|
- meow
|
||||||
|
- "1"
|
||||||
|
- ""
|
||||||
|
- "true"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Split strings one match
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
word
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval 'split("; ")' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
- word
|
||||||
|
```
|
||||||
|
|
@ -1,4 +1,42 @@
|
|||||||
The style operator can be used to get or set the style of nodes (e.g. string style, yaml style)
|
The style operator can be used to get or set the style of nodes (e.g. string style, yaml style)
|
||||||
|
## Update and set style of a particular node (simple)
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a:
|
||||||
|
b: thing
|
||||||
|
c: something
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.a.b = "new" | .a.b style="double"' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
a:
|
||||||
|
b: "new"
|
||||||
|
c: something
|
||||||
|
```
|
||||||
|
|
||||||
|
## Update and set style of a particular node using path variables
|
||||||
|
You can use a variable to re-use a path
|
||||||
|
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a:
|
||||||
|
b: thing
|
||||||
|
c: something
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.a.b as $x | $x = "new" | $x style="double"' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
a:
|
||||||
|
b: "new"
|
||||||
|
c: something
|
||||||
|
```
|
||||||
|
|
||||||
## Set tagged style
|
## Set tagged style
|
||||||
Given a sample.yml file of:
|
Given a sample.yml file of:
|
||||||
```yaml
|
```yaml
|
||||||
|
71
pkg/yqlib/doc/Subtract.md
Normal file
71
pkg/yqlib/doc/Subtract.md
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
|
||||||
|
## Number subtraction - float
|
||||||
|
If the lhs or rhs are floats then the expression will be calculated with floats.
|
||||||
|
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a: 3
|
||||||
|
b: 4.5
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.a = .a - .b' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
a: -1.5
|
||||||
|
b: 4.5
|
||||||
|
```
|
||||||
|
|
||||||
|
## Number subtraction - float
|
||||||
|
If the lhs or rhs are floats then the expression will be calculated with floats.
|
||||||
|
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a: 3
|
||||||
|
b: 4.5
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.a = .a - .b' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
a: -1.5
|
||||||
|
b: 4.5
|
||||||
|
```
|
||||||
|
|
||||||
|
## Number subtraction - int
|
||||||
|
If both the lhs and rhs are ints then the expression will be calculated with ints.
|
||||||
|
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a: 3
|
||||||
|
b: 4
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.a = .a - .b' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
a: -1
|
||||||
|
b: 4
|
||||||
|
```
|
||||||
|
|
||||||
|
## Decrement numbers
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a: 3
|
||||||
|
b: 5
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.[] -= 1' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
a: 2
|
||||||
|
b: 4
|
||||||
|
```
|
||||||
|
|
@ -32,8 +32,23 @@ b: apple
|
|||||||
c: banana
|
c: banana
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Optional Splat
|
||||||
|
Just like splat, but won't error if you run it against scalars
|
||||||
|
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
cat
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.[]' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
```
|
||||||
|
|
||||||
## Special characters
|
## Special characters
|
||||||
Use quotes around path elements with special characters
|
Use quotes with brackets around path elements with special characters
|
||||||
|
|
||||||
Given a sample.yml file of:
|
Given a sample.yml file of:
|
||||||
```yaml
|
```yaml
|
||||||
@ -41,7 +56,23 @@ Given a sample.yml file of:
|
|||||||
```
|
```
|
||||||
then
|
then
|
||||||
```bash
|
```bash
|
||||||
yq eval '."{}"' sample.yml
|
yq eval '.["{}"]' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
frog
|
||||||
|
```
|
||||||
|
|
||||||
|
## Keys with spaces
|
||||||
|
Use quotes with brackets around path elements with special characters
|
||||||
|
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
"red rabbit": frog
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.["red rabbit"]' sample.yml
|
||||||
```
|
```
|
||||||
will output
|
will output
|
||||||
```yaml
|
```yaml
|
||||||
@ -82,6 +113,23 @@ will output
|
|||||||
null
|
null
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Optional identifier
|
||||||
|
Like jq, does not output an error when the yaml is not an array or object as expected
|
||||||
|
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
- 1
|
||||||
|
- 2
|
||||||
|
- 3
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.a?' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
```
|
||||||
|
|
||||||
## Wildcard matching
|
## Wildcard matching
|
||||||
Given a sample.yml file of:
|
Given a sample.yml file of:
|
||||||
```yaml
|
```yaml
|
82
pkg/yqlib/doc/Unique.md
Normal file
82
pkg/yqlib/doc/Unique.md
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
This is used to filter out duplicated items in an array.
|
||||||
|
|
||||||
|
## Unique array of scalars (string/numbers)
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
- 1
|
||||||
|
- 2
|
||||||
|
- 3
|
||||||
|
- 2
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval 'unique' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
- 1
|
||||||
|
- 2
|
||||||
|
- 3
|
||||||
|
```
|
||||||
|
|
||||||
|
## Unique nulls
|
||||||
|
Unique works on the node value, so it considers different representations of nulls to be different
|
||||||
|
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
- ~
|
||||||
|
- null
|
||||||
|
- ~
|
||||||
|
- null
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval 'unique' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
- ~
|
||||||
|
- null
|
||||||
|
```
|
||||||
|
|
||||||
|
## Unique all nulls
|
||||||
|
Run against the node tag to unique all the nulls
|
||||||
|
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
- ~
|
||||||
|
- null
|
||||||
|
- ~
|
||||||
|
- null
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval 'unique_by(tag)' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
- ~
|
||||||
|
```
|
||||||
|
|
||||||
|
## Unique array object fields
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
- name: harry
|
||||||
|
pet: cat
|
||||||
|
- name: billy
|
||||||
|
pet: dog
|
||||||
|
- name: harry
|
||||||
|
pet: dog
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval 'unique_by(.name)' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
- name: harry
|
||||||
|
pet: cat
|
||||||
|
- name: billy
|
||||||
|
pet: dog
|
||||||
|
```
|
||||||
|
|
58
pkg/yqlib/doc/Variable Operators.md
Normal file
58
pkg/yqlib/doc/Variable Operators.md
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
For more complex scenarios, variables can be used to hold values of expression to be used in other expressions.
|
||||||
|
|
||||||
|
## Single value variable
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a: cat
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.a as $foo | $foo' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
cat
|
||||||
|
```
|
||||||
|
|
||||||
|
## Multi value variable
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
- cat
|
||||||
|
- dog
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.[] as $foo | $foo' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
cat
|
||||||
|
dog
|
||||||
|
```
|
||||||
|
|
||||||
|
## Using variables as a lookup
|
||||||
|
Example taken from [jq](https://stedolan.github.io/jq/manual/#Variable/SymbolicBindingOperator:...as$identifier|...)
|
||||||
|
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
"posts":
|
||||||
|
- "title": Frist psot
|
||||||
|
"author": anon
|
||||||
|
- "title": A well-written article
|
||||||
|
"author": person1
|
||||||
|
"realnames":
|
||||||
|
"anon": Anonymous Coward
|
||||||
|
"person1": Person McPherson
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.realnames as $names | .posts[] | {"title":.title, "author": $names[.author]}' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
title: Frist psot
|
||||||
|
author: Anonymous Coward
|
||||||
|
title: A well-written article
|
||||||
|
author: Person McPherson
|
||||||
|
```
|
||||||
|
|
@ -0,0 +1,116 @@
|
|||||||
|
# Operators
|
||||||
|
|
||||||
|
In `yq` expressions are made up of operators and pipes. A context of nodes is passed through the expression and each operation takes the context as input and returns a new context as output. That output is piped in as input for the next operation in the expression. To begin with, the context is set to the first yaml document of the first yaml file (if processing in sequence using eval).
|
||||||
|
|
||||||
|
Lets look at a couple of examples.
|
||||||
|
|
||||||
|
## Example with a simple operator
|
||||||
|
|
||||||
|
Given a document like:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- [a]
|
||||||
|
- "cat"
|
||||||
|
```
|
||||||
|
|
||||||
|
with an expression:
|
||||||
|
|
||||||
|
```
|
||||||
|
.[] | length
|
||||||
|
```
|
||||||
|
|
||||||
|
`yq` will initially set the context as single node of the entire yaml document, an array of two elements.
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- [a]
|
||||||
|
- "cat"
|
||||||
|
```
|
||||||
|
|
||||||
|
This gets piped into the splat operator `.[]` which will split out the context into a collection of two nodes `[a]` and `"cat"`. Note that this is _not_ a yaml array.
|
||||||
|
|
||||||
|
The `length` operator take no arguments, and will simply return the length of _each_ matching node in the context. So for the context of `[a]` and `"cat"`, it will return a new context of `1` and `3`.
|
||||||
|
|
||||||
|
This being the last operation in the expression, the results will be printed out:
|
||||||
|
|
||||||
|
```
|
||||||
|
1
|
||||||
|
3
|
||||||
|
```
|
||||||
|
|
||||||
|
# Example with an operator that takes arguments.
|
||||||
|
|
||||||
|
Given a document like:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
a: cat
|
||||||
|
b: dog
|
||||||
|
```
|
||||||
|
|
||||||
|
with an expression:
|
||||||
|
|
||||||
|
```
|
||||||
|
.a = .b
|
||||||
|
```
|
||||||
|
|
||||||
|
The `=` operator takes two arguments, a `lhs` expression, which in this case is `.a` and `rhs` expression which is `.b`.
|
||||||
|
|
||||||
|
It pipes the current, lets call it 'root' context through the `lhs` expression of `.a` to return the node
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
cat
|
||||||
|
```
|
||||||
|
|
||||||
|
Note that this node holds not only its value 'cat', but comments and metadata too, including path and parent information.
|
||||||
|
|
||||||
|
The `=` operator then pipes the 'root' context through the `rhs` expression of `.b` to return the node
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
dog
|
||||||
|
```
|
||||||
|
|
||||||
|
Both sides have now been evaluated, so now the operator copies across the value from the RHS to the value on the LHS, and it returns the now updated context:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
a: dog
|
||||||
|
b: dog
|
||||||
|
```
|
||||||
|
|
||||||
|
# Relative update (e.g. `|=`)
|
||||||
|
There is another form of the `=` operator which we call the relative form. It's very similar to `=` but with one key difference when evaluating the RHS expression.
|
||||||
|
|
||||||
|
In the plain form, we pass in the 'root' level context to the RHS expression. In relative form, we pass in _each result of the LHS_ to the RHS expression. Let's go through an example.
|
||||||
|
|
||||||
|
Given a document like:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
a: 1
|
||||||
|
b: thing
|
||||||
|
```
|
||||||
|
|
||||||
|
with an expression:
|
||||||
|
|
||||||
|
```
|
||||||
|
.a |= . + 1
|
||||||
|
```
|
||||||
|
|
||||||
|
Similar to the `=` operator, `|=` takes two operands, the LHS and RHS.
|
||||||
|
|
||||||
|
It pipes the current context (the whole document) through the LHS expression of `.a` to get the node value:
|
||||||
|
|
||||||
|
```
|
||||||
|
1
|
||||||
|
```
|
||||||
|
|
||||||
|
Now it pipes _that LHS context_ into the RHS expression `. + 1` (whereas in the `=` plain form it piped the original document context into the RHS) to yield:
|
||||||
|
|
||||||
|
|
||||||
|
```
|
||||||
|
2
|
||||||
|
```
|
||||||
|
|
||||||
|
The assignment operator then copies across the value from the RHS to the value on the LHS, and it returns the now updated 'root' context:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
a: 2
|
||||||
|
b: thing
|
||||||
|
```
|
@ -1,4 +1,4 @@
|
|||||||
Use the `alias` and `anchor` operators to read and write yaml aliases and anchors. The `explode` operator normalises a yaml file (dereference aliases and remove anchor names).
|
Use the `alias` and `anchor` operators to read and write yaml aliases and anchors. The `explode` operator normalises a yaml file (dereference (or expands) aliases and remove anchor names).
|
||||||
|
|
||||||
`yq` supports merge aliases (like `<<: *blah`) however this is no longer in the standard yaml spec (1.2) and so `yq` will automatically add the `!!merge` tag to these nodes as it is effectively a custom tag.
|
`yq` supports merge aliases (like `<<: *blah`) however this is no longer in the standard yaml spec (1.2) and so `yq` will automatically add the `!!merge` tag to these nodes as it is effectively a custom tag.
|
||||||
|
|
||||||
|
@ -1 +1,9 @@
|
|||||||
The `or` and `and` operators take two parameters and return a boolean result. `not` flips a boolean from true to false, or vice versa. These are most commonly used with the `select` operator to filter particular nodes.
|
The `or` and `and` operators take two parameters and return a boolean result.
|
||||||
|
|
||||||
|
`not` flips a boolean from true to false, or vice versa.
|
||||||
|
|
||||||
|
`any` will return `true` if there are any `true` values in a array sequence, and `all` will return true if _all_ elements in an array are true.
|
||||||
|
|
||||||
|
`any_c(condition)` and `all_c(condition)` are like `any` and `all` but they take a condition expression that is used against each element to determine if it's `true`. Note: in `jq` you can simply pass a condition to `any` or `all` and it simply works - `yq` isn't that clever..yet
|
||||||
|
|
||||||
|
These are most commonly used with the `select` operator to filter particular nodes.
|
||||||
|
@ -1 +1,11 @@
|
|||||||
Use these comment operators to set or retrieve comments.
|
Use these comment operators to set or retrieve comments.
|
||||||
|
|
||||||
|
Like the `=` and `|=` assign operators, the same syntax applies when updating comments:
|
||||||
|
|
||||||
|
|
||||||
|
### plain form: `=`
|
||||||
|
This will assign the LHS nodes comments to the expression on the RHS. The RHS is run against the matching nodes in the pipeline
|
||||||
|
|
||||||
|
### relative form: `|=`
|
||||||
|
Similar to the plain form, however the RHS evaluates against each matching LHS node! This is useful if you want to set the comments as a relative expression of the node, for instance its value or path.
|
||||||
|
|
||||||
|
1
pkg/yqlib/doc/headers/Entries.md
Normal file
1
pkg/yqlib/doc/headers/Entries.md
Normal file
@ -0,0 +1 @@
|
|||||||
|
Similar to the same named functions in `jq` these functions convert to/from an object and an array of key-value pairs. This is most useful for performing operations on keys of maps.
|
3
pkg/yqlib/doc/headers/Keys.md
Normal file
3
pkg/yqlib/doc/headers/Keys.md
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# Keys
|
||||||
|
|
||||||
|
Use the `keys` operator to return map keys or array indices.
|
20
pkg/yqlib/doc/headers/Multiply (Merge).md
Normal file
20
pkg/yqlib/doc/headers/Multiply (Merge).md
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
Like the multiple operator in jq, depending on the operands, this multiply operator will do different things. Currently numbers, arrays and objects are supported.
|
||||||
|
|
||||||
|
## Objects and arrays - merging
|
||||||
|
Objects are merged deeply matching on matching keys. By default, array values override and are not deeply merged.
|
||||||
|
|
||||||
|
Note that when merging objects, this operator returns the merged object (not the parent). This will be clearer in the examples below.
|
||||||
|
|
||||||
|
### Merge Flags
|
||||||
|
You can control how objects are merged by using one or more of the following flags. Multiple flags can be used together, e.g. `.a *+? .b`. See examples below
|
||||||
|
|
||||||
|
- `+` to append arrays
|
||||||
|
- `?` to only merge existing fields
|
||||||
|
- `d` to deeply merge arrays
|
||||||
|
|
||||||
|
### Merging files
|
||||||
|
Note the use of `eval-all` to ensure all documents are loaded into memory.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
yq eval-all 'select(fileIndex == 0) * select(fileIndex == 1)' file1.yaml file2.yaml
|
||||||
|
```
|
@ -1,14 +0,0 @@
|
|||||||
Like the multiple operator in `jq`, depending on the operands, this multiply operator will do different things. Currently only objects are supported, which have the effect of merging the RHS into the LHS.
|
|
||||||
|
|
||||||
Upcoming versions of `yq` will add support for other types of multiplication (numbers, strings).
|
|
||||||
|
|
||||||
To concatenate when merging objects, use the `*+` form (see examples below). This will recursively merge objects, appending arrays when it encounters them.
|
|
||||||
|
|
||||||
Note that when merging objects, this operator returns the merged object (not the parent). This will be clearer in the examples below.
|
|
||||||
|
|
||||||
## Merging files
|
|
||||||
Note the use of eval-all to ensure all documents are loaded into memory.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
yq eval-all 'select(fileIndex == 0) * select(fileIndex == 1)' file1.yaml file2.yaml
|
|
||||||
```
|
|
21
pkg/yqlib/doc/headers/Reduce.md
Normal file
21
pkg/yqlib/doc/headers/Reduce.md
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
Reduce is a powerful way to process a collection of data into a new form.
|
||||||
|
|
||||||
|
```
|
||||||
|
<exp> as $<name> ireduce (<init>; <block>)
|
||||||
|
```
|
||||||
|
|
||||||
|
e.g.
|
||||||
|
|
||||||
|
```
|
||||||
|
.[] as $item ireduce (0; . + $item)
|
||||||
|
```
|
||||||
|
|
||||||
|
On the LHS we are configuring the collection of items that will be reduced `<exp>` as well as what each element will be called `$<name>`. Note that the array has been splatted into its individual elements.
|
||||||
|
|
||||||
|
On the RHS there is `<init>`, the starting value of the accumulator and `<block>`, the expression that will update the accumulator for each element in the collection. Note that within the block expression, `.` will evaluate to the current value of the accumulator.
|
||||||
|
|
||||||
|
## yq vs jq syntax
|
||||||
|
Reduce syntax in `yq` is a little different from `jq` - as `yq` (currently) isn't as sophisticated as `jq` and its only supports infix notation (e.g. a + b, where the operator is in the middle of the two parameters) - where as `jq` uses a mix of infix notation with _prefix_ notation (e.g. `reduce a b` is like writing `+ a b`).
|
||||||
|
|
||||||
|
To that end, the reduce operator is called `ireduce` for backwards compatability if a `jq` like prefix version of `reduce` is ever added.
|
||||||
|
|
3
pkg/yqlib/doc/headers/Split into Documents.md
Normal file
3
pkg/yqlib/doc/headers/Split into Documents.md
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# Split into Documents
|
||||||
|
|
||||||
|
This operator splits all matches into separate documents
|
1
pkg/yqlib/doc/headers/String Operators.md
Normal file
1
pkg/yqlib/doc/headers/String Operators.md
Normal file
@ -0,0 +1 @@
|
|||||||
|
# String Operators
|
1
pkg/yqlib/doc/headers/Unique.md
Normal file
1
pkg/yqlib/doc/headers/Unique.md
Normal file
@ -0,0 +1 @@
|
|||||||
|
This is used to filter out duplicated items in an array.
|
1
pkg/yqlib/doc/headers/Variable Operators.md
Normal file
1
pkg/yqlib/doc/headers/Variable Operators.md
Normal file
@ -0,0 +1 @@
|
|||||||
|
For more complex scenarios, variables can be used to hold values of expression to be used in other expressions.
|
@ -76,6 +76,8 @@ func mapKeysToStrings(node *yaml.Node) {
|
|||||||
|
|
||||||
func NewJsonEncoder(destination io.Writer, indent int) Encoder {
|
func NewJsonEncoder(destination io.Writer, indent int) Encoder {
|
||||||
var encoder = json.NewEncoder(destination)
|
var encoder = json.NewEncoder(destination)
|
||||||
|
encoder.SetEscapeHTML(false) // do not escape html chars e.g. &, <, >
|
||||||
|
|
||||||
var indentString = ""
|
var indentString = ""
|
||||||
|
|
||||||
for index := 0; index < indent; index++ {
|
for index := 0; index < indent; index++ {
|
||||||
@ -153,11 +155,15 @@ func (o *orderedMap) UnmarshalJSON(data []byte) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (o orderedMap) MarshalJSON() ([]byte, error) {
|
func (o orderedMap) MarshalJSON() ([]byte, error) {
|
||||||
if o.kv == nil {
|
|
||||||
return json.Marshal(o.altVal)
|
|
||||||
}
|
|
||||||
buf := new(bytes.Buffer)
|
buf := new(bytes.Buffer)
|
||||||
enc := json.NewEncoder(buf)
|
enc := json.NewEncoder(buf)
|
||||||
|
enc.SetEscapeHTML(false) // do not escape html chars e.g. &, <, >
|
||||||
|
if o.kv == nil {
|
||||||
|
if err := enc.Encode(o.altVal); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return buf.Bytes(), nil
|
||||||
|
}
|
||||||
buf.WriteByte('{')
|
buf.WriteByte('{')
|
||||||
for idx, el := range o.kv {
|
for idx, el := range o.kv {
|
||||||
if err := enc.Encode(el.K); err != nil {
|
if err := enc.Encode(el.K); err != nil {
|
||||||
|
@ -9,29 +9,11 @@ import (
|
|||||||
"github.com/mikefarah/yq/v4/test"
|
"github.com/mikefarah/yq/v4/test"
|
||||||
)
|
)
|
||||||
|
|
||||||
var sampleYaml = `zabbix: winner
|
func yamlToJson(sampleYaml string, indent int) string {
|
||||||
apple: great
|
|
||||||
banana:
|
|
||||||
- {cobra: kai, angus: bob}
|
|
||||||
`
|
|
||||||
|
|
||||||
var expectedJson = `{
|
|
||||||
"zabbix": "winner",
|
|
||||||
"apple": "great",
|
|
||||||
"banana": [
|
|
||||||
{
|
|
||||||
"cobra": "kai",
|
|
||||||
"angus": "bob"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
`
|
|
||||||
|
|
||||||
func TestJsonEncoderPreservesObjectOrder(t *testing.T) {
|
|
||||||
var output bytes.Buffer
|
var output bytes.Buffer
|
||||||
writer := bufio.NewWriter(&output)
|
writer := bufio.NewWriter(&output)
|
||||||
|
|
||||||
var jsonEncoder = NewJsonEncoder(writer, 2)
|
var jsonEncoder = NewJsonEncoder(writer, indent)
|
||||||
inputs, err := readDocuments(strings.NewReader(sampleYaml), "sample.yml", 0)
|
inputs, err := readDocuments(strings.NewReader(sampleYaml), "sample.yml", 0)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
@ -42,6 +24,33 @@ func TestJsonEncoderPreservesObjectOrder(t *testing.T) {
|
|||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
writer.Flush()
|
writer.Flush()
|
||||||
test.AssertResult(t, expectedJson, output.String())
|
|
||||||
|
|
||||||
|
return strings.TrimSuffix(output.String(), "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestJsonEncoderPreservesObjectOrder(t *testing.T) {
|
||||||
|
var sampleYaml = `zabbix: winner
|
||||||
|
apple: great
|
||||||
|
banana:
|
||||||
|
- {cobra: kai, angus: bob}
|
||||||
|
`
|
||||||
|
var expectedJson = `{
|
||||||
|
"zabbix": "winner",
|
||||||
|
"apple": "great",
|
||||||
|
"banana": [
|
||||||
|
{
|
||||||
|
"cobra": "kai",
|
||||||
|
"angus": "bob"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}`
|
||||||
|
var actualJson = yamlToJson(sampleYaml, 2)
|
||||||
|
test.AssertResult(t, expectedJson, actualJson)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestJsonEncoderDoesNotEscapeHTMLChars(t *testing.T) {
|
||||||
|
var sampleYaml = `build: "( ./lint && ./format && ./compile ) < src.code"`
|
||||||
|
var expectedJson = `{"build":"( ./lint && ./format && ./compile ) < src.code"}`
|
||||||
|
var actualJson = yamlToJson(sampleYaml, 0)
|
||||||
|
test.AssertResult(t, expectedJson, actualJson)
|
||||||
}
|
}
|
||||||
|
@ -5,9 +5,6 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
var myPathTokeniser = newExpressionTokeniser()
|
|
||||||
var myPathPostfixer = newExpressionPostFixer()
|
|
||||||
|
|
||||||
type ExpressionNode struct {
|
type ExpressionNode struct {
|
||||||
Operation *Operation
|
Operation *Operation
|
||||||
Lhs *ExpressionNode
|
Lhs *ExpressionNode
|
||||||
@ -19,19 +16,21 @@ type ExpressionParser interface {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type expressionParserImpl struct {
|
type expressionParserImpl struct {
|
||||||
|
pathTokeniser expressionTokeniser
|
||||||
|
pathPostFixer expressionPostFixer
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewExpressionParser() ExpressionParser {
|
func NewExpressionParser() ExpressionParser {
|
||||||
return &expressionParserImpl{}
|
return &expressionParserImpl{newExpressionTokeniser(), newExpressionPostFixer()}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *expressionParserImpl) ParseExpression(expression string) (*ExpressionNode, error) {
|
func (p *expressionParserImpl) ParseExpression(expression string) (*ExpressionNode, error) {
|
||||||
tokens, err := myPathTokeniser.Tokenise(expression)
|
tokens, err := p.pathTokeniser.Tokenise(expression)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
var Operations []*Operation
|
var Operations []*Operation
|
||||||
Operations, err = myPathPostfixer.ConvertToPostfix(tokens)
|
Operations, err = p.pathPostFixer.ConvertToPostfix(tokens)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -70,7 +69,7 @@ func (p *expressionParserImpl) createExpressionTree(postFixPath []*Operation) (*
|
|||||||
stack = append(stack, &newNode)
|
stack = append(stack, &newNode)
|
||||||
}
|
}
|
||||||
if len(stack) != 1 {
|
if len(stack) != 1 {
|
||||||
return nil, fmt.Errorf("expected end of expression but found '%v', please check expression syntax", strings.TrimSpace(stack[1].Operation.StringValue))
|
return nil, fmt.Errorf("Bad expression, please check expression syntax")
|
||||||
}
|
}
|
||||||
return stack[0], nil
|
return stack[0], nil
|
||||||
}
|
}
|
||||||
|
@ -38,5 +38,5 @@ func TestPathTreeOneArgForOneArgOp(t *testing.T) {
|
|||||||
|
|
||||||
func TestPathTreeExtraArgs(t *testing.T) {
|
func TestPathTreeExtraArgs(t *testing.T) {
|
||||||
_, err := NewExpressionParser().ParseExpression("sortKeys(.) explode(.)")
|
_, err := NewExpressionParser().ParseExpression("sortKeys(.) explode(.)")
|
||||||
test.AssertResultComplex(t, "expected end of expression but found 'explode', please check expression syntax", err.Error())
|
test.AssertResultComplex(t, "Bad expression, please check expression syntax", err.Error())
|
||||||
}
|
}
|
||||||
|
@ -20,20 +20,22 @@ func newExpressionPostFixer() expressionPostFixer {
|
|||||||
func popOpToResult(opStack []*token, result []*Operation) ([]*token, []*Operation) {
|
func popOpToResult(opStack []*token, result []*Operation) ([]*token, []*Operation) {
|
||||||
var newOp *token
|
var newOp *token
|
||||||
opStack, newOp = opStack[0:len(opStack)-1], opStack[len(opStack)-1]
|
opStack, newOp = opStack[0:len(opStack)-1], opStack[len(opStack)-1]
|
||||||
|
log.Debugf("popped %v from opstack to results", newOp.toString(true))
|
||||||
return opStack, append(result, newOp.Operation)
|
return opStack, append(result, newOp.Operation)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *expressionPostFixerImpl) ConvertToPostfix(infixTokens []*token) ([]*Operation, error) {
|
func (p *expressionPostFixerImpl) ConvertToPostfix(infixTokens []*token) ([]*Operation, error) {
|
||||||
var result []*Operation
|
var result []*Operation
|
||||||
// surround the whole thing with quotes
|
// surround the whole thing with brackets
|
||||||
var opStack = []*token{&token{TokenType: openBracket}}
|
var opStack = []*token{{TokenType: openBracket}}
|
||||||
var tokens = append(infixTokens, &token{TokenType: closeBracket})
|
var tokens = append(infixTokens, &token{TokenType: closeBracket})
|
||||||
|
|
||||||
for _, currentToken := range tokens {
|
for _, currentToken := range tokens {
|
||||||
log.Debugf("postfix processing currentToken %v, %v", currentToken.toString(), currentToken.Operation)
|
log.Debugf("postfix processing currentToken %v", currentToken.toString(true))
|
||||||
switch currentToken.TokenType {
|
switch currentToken.TokenType {
|
||||||
case openBracket, openCollect, openCollectObject:
|
case openBracket, openCollect, openCollectObject:
|
||||||
opStack = append(opStack, currentToken)
|
opStack = append(opStack, currentToken)
|
||||||
|
log.Debugf("put %v onto the opstack", currentToken.toString(true))
|
||||||
case closeCollect, closeCollectObject:
|
case closeCollect, closeCollectObject:
|
||||||
var opener tokenType = openCollect
|
var opener tokenType = openCollect
|
||||||
var collectOperator *operationType = collectOpType
|
var collectOperator *operationType = collectOpType
|
||||||
@ -41,23 +43,37 @@ func (p *expressionPostFixerImpl) ConvertToPostfix(infixTokens []*token) ([]*Ope
|
|||||||
opener = openCollectObject
|
opener = openCollectObject
|
||||||
collectOperator = collectObjectOpType
|
collectOperator = collectObjectOpType
|
||||||
}
|
}
|
||||||
itemsInMiddle := false
|
|
||||||
for len(opStack) > 0 && opStack[len(opStack)-1].TokenType != opener {
|
for len(opStack) > 0 && opStack[len(opStack)-1].TokenType != opener {
|
||||||
opStack, result = popOpToResult(opStack, result)
|
opStack, result = popOpToResult(opStack, result)
|
||||||
itemsInMiddle = true
|
|
||||||
}
|
|
||||||
if !itemsInMiddle {
|
|
||||||
// must be an empty collection, add the empty object as a LHS parameter
|
|
||||||
result = append(result, &Operation{OperationType: emptyOpType})
|
|
||||||
}
|
}
|
||||||
if len(opStack) == 0 {
|
if len(opStack) == 0 {
|
||||||
return nil, errors.New("Bad path expression, got close collect brackets without matching opening bracket")
|
return nil, errors.New("Bad path expression, got close collect brackets without matching opening bracket")
|
||||||
}
|
}
|
||||||
// now we should have [] as the last element on the opStack, get rid of it
|
// now we should have [ as the last element on the opStack, get rid of it
|
||||||
opStack = opStack[0 : len(opStack)-1]
|
opStack = opStack[0 : len(opStack)-1]
|
||||||
//and append a collect to the opStack
|
log.Debugf("deleteing open bracket from opstack")
|
||||||
opStack = append(opStack, &token{TokenType: operationToken, Operation: &Operation{OperationType: shortPipeOpType}})
|
|
||||||
opStack = append(opStack, &token{TokenType: operationToken, Operation: &Operation{OperationType: collectOperator}})
|
//and append a collect to the result
|
||||||
|
// hack - see if there's the optional traverse flag
|
||||||
|
// on the close op - move it to the collect op.
|
||||||
|
// allows for .["cat"]?
|
||||||
|
prefs := traversePreferences{}
|
||||||
|
closeTokenMatch := string(currentToken.Match.Bytes)
|
||||||
|
if closeTokenMatch[len(closeTokenMatch)-1:] == "?" {
|
||||||
|
prefs.OptionalTraverse = true
|
||||||
|
}
|
||||||
|
result = append(result, &Operation{OperationType: collectOperator, Preferences: prefs})
|
||||||
|
log.Debugf("put collect onto the result")
|
||||||
|
result = append(result, &Operation{OperationType: shortPipeOpType})
|
||||||
|
log.Debugf("put shortpipe onto the result")
|
||||||
|
|
||||||
|
//traverseArrayCollect is a sneaky op that needs to be included too
|
||||||
|
//when closing a []
|
||||||
|
if len(opStack) > 0 && opStack[len(opStack)-1].Operation != nil && opStack[len(opStack)-1].Operation.OperationType == traverseArrayOpType {
|
||||||
|
opStack, result = popOpToResult(opStack, result)
|
||||||
|
}
|
||||||
|
|
||||||
case closeBracket:
|
case closeBracket:
|
||||||
for len(opStack) > 0 && opStack[len(opStack)-1].TokenType != openBracket {
|
for len(opStack) > 0 && opStack[len(opStack)-1].TokenType != openBracket {
|
||||||
opStack, result = popOpToResult(opStack, result)
|
opStack, result = popOpToResult(opStack, result)
|
||||||
@ -73,11 +89,12 @@ func (p *expressionPostFixerImpl) ConvertToPostfix(infixTokens []*token) ([]*Ope
|
|||||||
// pop off higher precedent operators onto the result
|
// pop off higher precedent operators onto the result
|
||||||
for len(opStack) > 0 &&
|
for len(opStack) > 0 &&
|
||||||
opStack[len(opStack)-1].TokenType == operationToken &&
|
opStack[len(opStack)-1].TokenType == operationToken &&
|
||||||
opStack[len(opStack)-1].Operation.OperationType.Precedence >= currentPrecedence {
|
opStack[len(opStack)-1].Operation.OperationType.Precedence > currentPrecedence {
|
||||||
opStack, result = popOpToResult(opStack, result)
|
opStack, result = popOpToResult(opStack, result)
|
||||||
}
|
}
|
||||||
// add this operator to the opStack
|
// add this operator to the opStack
|
||||||
opStack = append(opStack, currentToken)
|
opStack = append(opStack, currentToken)
|
||||||
|
log.Debugf("put %v onto the opstack", currentToken.toString(true))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -12,40 +12,115 @@ var pathTests = []struct {
|
|||||||
expectedTokens []interface{}
|
expectedTokens []interface{}
|
||||||
expectedPostFix []interface{}
|
expectedPostFix []interface{}
|
||||||
}{
|
}{
|
||||||
|
{
|
||||||
|
`.[0]`,
|
||||||
|
append(make([]interface{}, 0), "SELF", "TRAVERSE_ARRAY", "[", "0 (int64)", "]"),
|
||||||
|
append(make([]interface{}, 0), "SELF", "0 (int64)", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`.[0][1]`,
|
||||||
|
append(make([]interface{}, 0), "SELF", "TRAVERSE_ARRAY", "[", "0 (int64)", "]", "TRAVERSE_ARRAY", "[", "1 (int64)", "]"),
|
||||||
|
append(make([]interface{}, 0), "SELF", "0 (int64)", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY", "1 (int64)", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`"\""`,
|
||||||
|
append(make([]interface{}, 0), "\" (string)"),
|
||||||
|
append(make([]interface{}, 0), "\" (string)"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`[]|join(".")`,
|
||||||
|
append(make([]interface{}, 0), "[", "EMPTY", "]", "PIPE", "JOIN", "(", ". (string)", ")"),
|
||||||
|
append(make([]interface{}, 0), "EMPTY", "COLLECT", "SHORT_PIPE", ". (string)", "JOIN", "PIPE"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`{"cool": .b or .c}`,
|
||||||
|
append(make([]interface{}, 0), "{", "cool (string)", "CREATE_MAP", "b", "OR", "c", "}"),
|
||||||
|
append(make([]interface{}, 0), "cool (string)", "b", "c", "OR", "CREATE_MAP", "COLLECT_OBJECT", "SHORT_PIPE"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`{"cool": []|join(".")}`,
|
||||||
|
append(make([]interface{}, 0), "{", "cool (string)", "CREATE_MAP", "[", "EMPTY", "]", "PIPE", "JOIN", "(", ". (string)", ")", "}"),
|
||||||
|
append(make([]interface{}, 0), "cool (string)", "EMPTY", "COLLECT", "SHORT_PIPE", ". (string)", "JOIN", "PIPE", "CREATE_MAP", "COLLECT_OBJECT", "SHORT_PIPE"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`.a as $item ireduce (0; . + $item)`, // note - add code to shuffle reduce to this position for postfix
|
||||||
|
append(make([]interface{}, 0), "a", "ASSIGN_VARIABLE", "GET_VARIABLE", "REDUCE", "(", "0 (int64)", "BLOCK", "SELF", "ADD", "GET_VARIABLE", ")"),
|
||||||
|
append(make([]interface{}, 0), "a", "GET_VARIABLE", "ASSIGN_VARIABLE", "0 (int64)", "SELF", "GET_VARIABLE", "ADD", "BLOCK", "REDUCE"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`.a | .b | .c`,
|
||||||
|
append(make([]interface{}, 0), "a", "PIPE", "b", "PIPE", "c"),
|
||||||
|
append(make([]interface{}, 0), "a", "b", "c", "PIPE", "PIPE"),
|
||||||
|
},
|
||||||
{
|
{
|
||||||
`[]`,
|
`[]`,
|
||||||
append(make([]interface{}, 0), "[", "]"),
|
append(make([]interface{}, 0), "[", "EMPTY", "]"),
|
||||||
append(make([]interface{}, 0), "EMPTY", "COLLECT", "SHORT_PIPE"),
|
append(make([]interface{}, 0), "EMPTY", "COLLECT", "SHORT_PIPE"),
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
`{}`,
|
||||||
|
append(make([]interface{}, 0), "{", "EMPTY", "}"),
|
||||||
|
append(make([]interface{}, 0), "EMPTY", "COLLECT_OBJECT", "SHORT_PIPE"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`[{}]`,
|
||||||
|
append(make([]interface{}, 0), "[", "{", "EMPTY", "}", "]"),
|
||||||
|
append(make([]interface{}, 0), "EMPTY", "COLLECT_OBJECT", "SHORT_PIPE", "COLLECT", "SHORT_PIPE"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`.realnames as $names | $names["anon"]`,
|
||||||
|
append(make([]interface{}, 0), "realnames", "ASSIGN_VARIABLE", "GET_VARIABLE", "PIPE", "GET_VARIABLE", "TRAVERSE_ARRAY", "[", "anon (string)", "]"),
|
||||||
|
append(make([]interface{}, 0), "realnames", "GET_VARIABLE", "ASSIGN_VARIABLE", "GET_VARIABLE", "anon (string)", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY", "PIPE"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`.b[.a]`,
|
||||||
|
append(make([]interface{}, 0), "b", "TRAVERSE_ARRAY", "[", "a", "]"),
|
||||||
|
append(make([]interface{}, 0), "b", "a", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`.b[.a]?`,
|
||||||
|
append(make([]interface{}, 0), "b", "TRAVERSE_ARRAY", "[", "a", "]"),
|
||||||
|
append(make([]interface{}, 0), "b", "a", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY"),
|
||||||
|
},
|
||||||
{
|
{
|
||||||
`.[]`,
|
`.[]`,
|
||||||
append(make([]interface{}, 0), "TRAVERSE_ARRAY", "[", "]"),
|
append(make([]interface{}, 0), "SELF", "TRAVERSE_ARRAY", "[", "EMPTY", "]"),
|
||||||
append(make([]interface{}, 0), "EMPTY", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY"),
|
append(make([]interface{}, 0), "SELF", "EMPTY", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
`.a[]`,
|
`.a[]`,
|
||||||
append(make([]interface{}, 0), "a", "SHORT_PIPE", "TRAVERSE_ARRAY", "[", "]"),
|
append(make([]interface{}, 0), "a", "TRAVERSE_ARRAY", "[", "EMPTY", "]"),
|
||||||
append(make([]interface{}, 0), "a", "EMPTY", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY", "SHORT_PIPE"),
|
append(make([]interface{}, 0), "a", "EMPTY", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`.a[]?`,
|
||||||
|
append(make([]interface{}, 0), "a", "TRAVERSE_ARRAY", "[", "EMPTY", "]"),
|
||||||
|
append(make([]interface{}, 0), "a", "EMPTY", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
`.a.[]`,
|
`.a.[]`,
|
||||||
append(make([]interface{}, 0), "a", "SHORT_PIPE", "TRAVERSE_ARRAY", "[", "]"),
|
append(make([]interface{}, 0), "a", "TRAVERSE_ARRAY", "[", "EMPTY", "]"),
|
||||||
append(make([]interface{}, 0), "a", "EMPTY", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY", "SHORT_PIPE"),
|
append(make([]interface{}, 0), "a", "EMPTY", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
`.a[0]`,
|
`.a[0]`,
|
||||||
append(make([]interface{}, 0), "a", "SHORT_PIPE", "TRAVERSE_ARRAY", "[", "0 (int64)", "]"),
|
append(make([]interface{}, 0), "a", "TRAVERSE_ARRAY", "[", "0 (int64)", "]"),
|
||||||
append(make([]interface{}, 0), "a", "0 (int64)", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY", "SHORT_PIPE"),
|
append(make([]interface{}, 0), "a", "0 (int64)", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`.a[0]?`,
|
||||||
|
append(make([]interface{}, 0), "a", "TRAVERSE_ARRAY", "[", "0 (int64)", "]"),
|
||||||
|
append(make([]interface{}, 0), "a", "0 (int64)", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
`.a.[0]`,
|
`.a.[0]`,
|
||||||
append(make([]interface{}, 0), "a", "SHORT_PIPE", "TRAVERSE_ARRAY", "[", "0 (int64)", "]"),
|
append(make([]interface{}, 0), "a", "TRAVERSE_ARRAY", "[", "0 (int64)", "]"),
|
||||||
append(make([]interface{}, 0), "a", "0 (int64)", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY", "SHORT_PIPE"),
|
append(make([]interface{}, 0), "a", "0 (int64)", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
`.a[].c`,
|
`.a[].c`,
|
||||||
append(make([]interface{}, 0), "a", "SHORT_PIPE", "TRAVERSE_ARRAY", "[", "]", "SHORT_PIPE", "c"),
|
append(make([]interface{}, 0), "a", "TRAVERSE_ARRAY", "[", "EMPTY", "]", "SHORT_PIPE", "c"),
|
||||||
append(make([]interface{}, 0), "a", "EMPTY", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY", "SHORT_PIPE", "c", "SHORT_PIPE"),
|
append(make([]interface{}, 0), "a", "EMPTY", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY", "c", "SHORT_PIPE"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
`[3]`,
|
`[3]`,
|
||||||
@ -53,24 +128,34 @@ var pathTests = []struct {
|
|||||||
append(make([]interface{}, 0), "3 (int64)", "COLLECT", "SHORT_PIPE"),
|
append(make([]interface{}, 0), "3 (int64)", "COLLECT", "SHORT_PIPE"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
`d0.a`,
|
`.key.array + .key.array2`,
|
||||||
append(make([]interface{}, 0), "d0", "SHORT_PIPE", "a"),
|
append(make([]interface{}, 0), "key", "SHORT_PIPE", "array", "ADD", "key", "SHORT_PIPE", "array2"),
|
||||||
append(make([]interface{}, 0), "d0", "a", "SHORT_PIPE"),
|
append(make([]interface{}, 0), "key", "array", "SHORT_PIPE", "key", "array2", "SHORT_PIPE", "ADD"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`.key.array * .key.array2`,
|
||||||
|
append(make([]interface{}, 0), "key", "SHORT_PIPE", "array", "MULTIPLY", "key", "SHORT_PIPE", "array2"),
|
||||||
|
append(make([]interface{}, 0), "key", "array", "SHORT_PIPE", "key", "array2", "SHORT_PIPE", "MULTIPLY"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`.key.array // .key.array2`,
|
||||||
|
append(make([]interface{}, 0), "key", "SHORT_PIPE", "array", "ALTERNATIVE", "key", "SHORT_PIPE", "array2"),
|
||||||
|
append(make([]interface{}, 0), "key", "array", "SHORT_PIPE", "key", "array2", "SHORT_PIPE", "ALTERNATIVE"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
`.a | .[].b == "apple"`,
|
`.a | .[].b == "apple"`,
|
||||||
append(make([]interface{}, 0), "a", "PIPE", "TRAVERSE_ARRAY", "[", "]", "SHORT_PIPE", "b", "EQUALS", "apple (string)"),
|
append(make([]interface{}, 0), "a", "PIPE", "SELF", "TRAVERSE_ARRAY", "[", "EMPTY", "]", "SHORT_PIPE", "b", "EQUALS", "apple (string)"),
|
||||||
append(make([]interface{}, 0), "a", "EMPTY", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY", "b", "SHORT_PIPE", "apple (string)", "EQUALS", "PIPE"),
|
append(make([]interface{}, 0), "a", "SELF", "EMPTY", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY", "b", "SHORT_PIPE", "apple (string)", "EQUALS", "PIPE"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
`(.a | .[].b) == "apple"`,
|
`(.a | .[].b) == "apple"`,
|
||||||
append(make([]interface{}, 0), "(", "a", "PIPE", "TRAVERSE_ARRAY", "[", "]", "SHORT_PIPE", "b", ")", "EQUALS", "apple (string)"),
|
append(make([]interface{}, 0), "(", "a", "PIPE", "SELF", "TRAVERSE_ARRAY", "[", "EMPTY", "]", "SHORT_PIPE", "b", ")", "EQUALS", "apple (string)"),
|
||||||
append(make([]interface{}, 0), "a", "EMPTY", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY", "b", "SHORT_PIPE", "PIPE", "apple (string)", "EQUALS"),
|
append(make([]interface{}, 0), "a", "SELF", "EMPTY", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY", "b", "SHORT_PIPE", "PIPE", "apple (string)", "EQUALS"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
`.[] | select(. == "*at")`,
|
`.[] | select(. == "*at")`,
|
||||||
append(make([]interface{}, 0), "TRAVERSE_ARRAY", "[", "]", "PIPE", "SELECT", "(", "SELF", "EQUALS", "*at (string)", ")"),
|
append(make([]interface{}, 0), "SELF", "TRAVERSE_ARRAY", "[", "EMPTY", "]", "PIPE", "SELECT", "(", "SELF", "EQUALS", "*at (string)", ")"),
|
||||||
append(make([]interface{}, 0), "EMPTY", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY", "SELF", "*at (string)", "EQUALS", "SELECT", "PIPE"),
|
append(make([]interface{}, 0), "SELF", "EMPTY", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY", "SELF", "*at (string)", "EQUALS", "SELECT", "PIPE"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
`[true]`,
|
`[true]`,
|
||||||
@ -103,9 +188,9 @@ var pathTests = []struct {
|
|||||||
append(make([]interface{}, 0), "a", "mike (string)", "CREATE_MAP", "COLLECT_OBJECT", "SHORT_PIPE"),
|
append(make([]interface{}, 0), "a", "mike (string)", "CREATE_MAP", "COLLECT_OBJECT", "SHORT_PIPE"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
`{.a: .c, .b.[]: .f.g.[]}`,
|
`{.a: .c, .b.[]: .f.g[]}`,
|
||||||
append(make([]interface{}, 0), "{", "a", "CREATE_MAP", "c", "UNION", "b", "SHORT_PIPE", "TRAVERSE_ARRAY", "[", "]", "CREATE_MAP", "f", "SHORT_PIPE", "g", "SHORT_PIPE", "TRAVERSE_ARRAY", "[", "]", "}"),
|
append(make([]interface{}, 0), "{", "a", "CREATE_MAP", "c", "UNION", "b", "TRAVERSE_ARRAY", "[", "EMPTY", "]", "CREATE_MAP", "f", "SHORT_PIPE", "g", "TRAVERSE_ARRAY", "[", "EMPTY", "]", "}"),
|
||||||
append(make([]interface{}, 0), "a", "c", "CREATE_MAP", "b", "EMPTY", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY", "SHORT_PIPE", "f", "g", "SHORT_PIPE", "EMPTY", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY", "SHORT_PIPE", "CREATE_MAP", "UNION", "COLLECT_OBJECT", "SHORT_PIPE"),
|
append(make([]interface{}, 0), "a", "c", "CREATE_MAP", "b", "EMPTY", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY", "f", "g", "EMPTY", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY", "SHORT_PIPE", "CREATE_MAP", "UNION", "COLLECT_OBJECT", "SHORT_PIPE"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
`explode(.a.b)`,
|
`explode(.a.b)`,
|
||||||
@ -157,11 +242,6 @@ var pathTests = []struct {
|
|||||||
append(make([]interface{}, 0), "foo*", "PIPE", "(", "SELF", "ASSIGN_STYLE", "flow (string)", ")"),
|
append(make([]interface{}, 0), "foo*", "PIPE", "(", "SELF", "ASSIGN_STYLE", "flow (string)", ")"),
|
||||||
append(make([]interface{}, 0), "foo*", "SELF", "flow (string)", "ASSIGN_STYLE", "PIPE"),
|
append(make([]interface{}, 0), "foo*", "SELF", "flow (string)", "ASSIGN_STYLE", "PIPE"),
|
||||||
},
|
},
|
||||||
{
|
|
||||||
`{}`,
|
|
||||||
append(make([]interface{}, 0), "{", "}"),
|
|
||||||
append(make([]interface{}, 0), "EMPTY", "COLLECT_OBJECT", "SHORT_PIPE"),
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var tokeniser = newExpressionTokeniser()
|
var tokeniser = newExpressionTokeniser()
|
||||||
@ -175,7 +255,7 @@ func TestPathParsing(t *testing.T) {
|
|||||||
}
|
}
|
||||||
var tokenValues []interface{}
|
var tokenValues []interface{}
|
||||||
for _, token := range tokens {
|
for _, token := range tokens {
|
||||||
tokenValues = append(tokenValues, token.toString())
|
tokenValues = append(tokenValues, token.toString(false))
|
||||||
}
|
}
|
||||||
test.AssertResultComplexWithContext(t, tt.expectedTokens, tokenValues, fmt.Sprintf("tokenise: %v", tt.path))
|
test.AssertResultComplexWithContext(t, tt.expectedTokens, tokenValues, fmt.Sprintf("tokenise: %v", tt.path))
|
||||||
|
|
||||||
|
@ -3,6 +3,7 @@ package yqlib
|
|||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
lex "github.com/timtadh/lexmachine"
|
lex "github.com/timtadh/lexmachine"
|
||||||
"github.com/timtadh/lexmachine/machines"
|
"github.com/timtadh/lexmachine/machines"
|
||||||
@ -28,14 +29,17 @@ const (
|
|||||||
type token struct {
|
type token struct {
|
||||||
TokenType tokenType
|
TokenType tokenType
|
||||||
Operation *Operation
|
Operation *Operation
|
||||||
AssignOperation *Operation // e.g. tag (GetTag) op becomes AssignTag if '=' follows it
|
AssignOperation *Operation // e.g. tag (GetTag) op becomes AssignTag if '=' follows it
|
||||||
CheckForPostTraverse bool // e.g. [1]cat should really be [1].cat
|
CheckForPostTraverse bool // e.g. [1]cat should really be [1].cat
|
||||||
|
Match *machines.Match // match that created this token
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *token) toString() string {
|
func (t *token) toString(detail bool) string {
|
||||||
if t.TokenType == operationToken {
|
if t.TokenType == operationToken {
|
||||||
log.Debug("toString, its an op")
|
if detail {
|
||||||
|
return fmt.Sprintf("%v (%v)", t.Operation.toString(), t.Operation.OperationType.Precedence)
|
||||||
|
}
|
||||||
return t.Operation.toString()
|
return t.Operation.toString()
|
||||||
} else if t.TokenType == openBracket {
|
} else if t.TokenType == openBracket {
|
||||||
return "("
|
return "("
|
||||||
@ -60,26 +64,19 @@ func (t *token) toString() string {
|
|||||||
func pathToken(wrapped bool) lex.Action {
|
func pathToken(wrapped bool) lex.Action {
|
||||||
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
|
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
|
||||||
value := string(m.Bytes)
|
value := string(m.Bytes)
|
||||||
|
prefs := traversePreferences{}
|
||||||
|
|
||||||
|
if value[len(value)-1:] == "?" {
|
||||||
|
prefs.OptionalTraverse = true
|
||||||
|
value = value[:len(value)-1]
|
||||||
|
}
|
||||||
|
|
||||||
value = value[1:]
|
value = value[1:]
|
||||||
if wrapped {
|
if wrapped {
|
||||||
value = unwrap(value)
|
value = unwrap(value)
|
||||||
}
|
}
|
||||||
log.Debug("PathToken %v", value)
|
log.Debug("PathToken %v", value)
|
||||||
op := &Operation{OperationType: traversePathOpType, Value: value, StringValue: value}
|
op := &Operation{OperationType: traversePathOpType, Value: value, StringValue: value, Preferences: prefs}
|
||||||
return &token{TokenType: operationToken, Operation: op, CheckForPostTraverse: true}, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func documentToken() lex.Action {
|
|
||||||
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
|
|
||||||
var numberString = string(m.Bytes)
|
|
||||||
numberString = numberString[1:]
|
|
||||||
var number, errParsingInt = strconv.ParseInt(numberString, 10, 64) // nolint
|
|
||||||
if errParsingInt != nil {
|
|
||||||
return nil, errParsingInt
|
|
||||||
}
|
|
||||||
log.Debug("documentToken %v", string(m.Bytes))
|
|
||||||
op := &Operation{OperationType: documentFilterOpType, Value: number, StringValue: numberString}
|
|
||||||
return &token{TokenType: operationToken, Operation: op, CheckForPostTraverse: true}, nil
|
return &token{TokenType: operationToken, Operation: op, CheckForPostTraverse: true}, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -101,6 +98,24 @@ func assignOpToken(updateAssign bool) lex.Action {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func multiplyWithPrefs() lex.Action {
|
||||||
|
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
|
||||||
|
prefs := multiplyPreferences{}
|
||||||
|
options := string(m.Bytes)
|
||||||
|
if strings.Contains(options, "+") {
|
||||||
|
prefs.AppendArrays = true
|
||||||
|
}
|
||||||
|
if strings.Contains(options, "?") {
|
||||||
|
prefs.TraversePrefs = traversePreferences{DontAutoCreate: true}
|
||||||
|
}
|
||||||
|
if strings.Contains(options, "d") {
|
||||||
|
prefs.DeepMergeArrays = true
|
||||||
|
}
|
||||||
|
op := &Operation{OperationType: multiplyOpType, Value: multiplyOpType.Type, StringValue: options, Preferences: prefs}
|
||||||
|
return &token{TokenType: operationToken, Operation: op}, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func opTokenWithPrefs(op *operationType, assignOpType *operationType, preferences interface{}) lex.Action {
|
func opTokenWithPrefs(op *operationType, assignOpType *operationType, preferences interface{}) lex.Action {
|
||||||
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
|
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
|
||||||
log.Debug("opTokenWithPrefs %v", string(m.Bytes))
|
log.Debug("opTokenWithPrefs %v", string(m.Bytes))
|
||||||
@ -123,7 +138,7 @@ func assignAllCommentsOp(updateAssign bool) lex.Action {
|
|||||||
Value: assignCommentOpType.Type,
|
Value: assignCommentOpType.Type,
|
||||||
StringValue: value,
|
StringValue: value,
|
||||||
UpdateAssign: updateAssign,
|
UpdateAssign: updateAssign,
|
||||||
Preferences: &commentOpPreferences{LineComment: true, HeadComment: true, FootComment: true},
|
Preferences: commentOpPreferences{LineComment: true, HeadComment: true, FootComment: true},
|
||||||
}
|
}
|
||||||
return &token{TokenType: operationToken, Operation: op}, nil
|
return &token{TokenType: operationToken, Operation: op}, nil
|
||||||
}
|
}
|
||||||
@ -131,7 +146,7 @@ func assignAllCommentsOp(updateAssign bool) lex.Action {
|
|||||||
|
|
||||||
func literalToken(pType tokenType, checkForPost bool) lex.Action {
|
func literalToken(pType tokenType, checkForPost bool) lex.Action {
|
||||||
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
|
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
|
||||||
return &token{TokenType: pType, CheckForPostTraverse: checkForPost}, nil
|
return &token{TokenType: pType, CheckForPostTraverse: checkForPost, Match: m}, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -174,14 +189,28 @@ func stringValue(wrapped bool) lex.Action {
|
|||||||
if wrapped {
|
if wrapped {
|
||||||
value = unwrap(value)
|
value = unwrap(value)
|
||||||
}
|
}
|
||||||
|
value = strings.ReplaceAll(value, "\\\"", "\"")
|
||||||
return &token{TokenType: operationToken, Operation: createValueOperation(value, value)}, nil
|
return &token{TokenType: operationToken, Operation: createValueOperation(value, value)}, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func getVariableOpToken() lex.Action {
|
||||||
|
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
|
||||||
|
value := string(m.Bytes)
|
||||||
|
|
||||||
|
value = value[1:]
|
||||||
|
|
||||||
|
getVarOperation := createValueOperation(value, value)
|
||||||
|
getVarOperation.OperationType = getVariableOpType
|
||||||
|
|
||||||
|
return &token{TokenType: operationToken, Operation: getVarOperation, CheckForPostTraverse: true}, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func envOp(strenv bool) lex.Action {
|
func envOp(strenv bool) lex.Action {
|
||||||
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
|
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
|
||||||
value := string(m.Bytes)
|
value := string(m.Bytes)
|
||||||
preferences := &envOpPreferences{}
|
preferences := envOpPreferences{}
|
||||||
|
|
||||||
if strenv {
|
if strenv {
|
||||||
// strenv( )
|
// strenv( )
|
||||||
@ -219,11 +248,11 @@ func initLexer() (*lex.Lexer, error) {
|
|||||||
lexer.Add([]byte(`\)`), literalToken(closeBracket, true))
|
lexer.Add([]byte(`\)`), literalToken(closeBracket, true))
|
||||||
|
|
||||||
lexer.Add([]byte(`\.\[`), literalToken(traverseArrayCollect, false))
|
lexer.Add([]byte(`\.\[`), literalToken(traverseArrayCollect, false))
|
||||||
lexer.Add([]byte(`\.\.`), opTokenWithPrefs(recursiveDescentOpType, nil, &recursiveDescentPreferences{RecurseArray: true,
|
lexer.Add([]byte(`\.\.`), opTokenWithPrefs(recursiveDescentOpType, nil, recursiveDescentPreferences{RecurseArray: true,
|
||||||
TraversePreferences: &traversePreferences{FollowAlias: false, IncludeMapKeys: false}}))
|
TraversePreferences: traversePreferences{DontFollowAlias: true, IncludeMapKeys: false}}))
|
||||||
|
|
||||||
lexer.Add([]byte(`\.\.\.`), opTokenWithPrefs(recursiveDescentOpType, nil, &recursiveDescentPreferences{RecurseArray: true,
|
lexer.Add([]byte(`\.\.\.`), opTokenWithPrefs(recursiveDescentOpType, nil, recursiveDescentPreferences{RecurseArray: true,
|
||||||
TraversePreferences: &traversePreferences{FollowAlias: false, IncludeMapKeys: true}}))
|
TraversePreferences: traversePreferences{DontFollowAlias: true, IncludeMapKeys: true}}))
|
||||||
|
|
||||||
lexer.Add([]byte(`,`), opToken(unionOpType))
|
lexer.Add([]byte(`,`), opToken(unionOpType))
|
||||||
lexer.Add([]byte(`:\s*`), opToken(createMapOpType))
|
lexer.Add([]byte(`:\s*`), opToken(createMapOpType))
|
||||||
@ -231,14 +260,30 @@ func initLexer() (*lex.Lexer, error) {
|
|||||||
lexer.Add([]byte(`sortKeys`), opToken(sortKeysOpType))
|
lexer.Add([]byte(`sortKeys`), opToken(sortKeysOpType))
|
||||||
lexer.Add([]byte(`select`), opToken(selectOpType))
|
lexer.Add([]byte(`select`), opToken(selectOpType))
|
||||||
lexer.Add([]byte(`has`), opToken(hasOpType))
|
lexer.Add([]byte(`has`), opToken(hasOpType))
|
||||||
|
lexer.Add([]byte(`unique`), opToken(uniqueOpType))
|
||||||
|
lexer.Add([]byte(`unique_by`), opToken(uniqueByOpType))
|
||||||
lexer.Add([]byte(`explode`), opToken(explodeOpType))
|
lexer.Add([]byte(`explode`), opToken(explodeOpType))
|
||||||
lexer.Add([]byte(`or`), opToken(orOpType))
|
lexer.Add([]byte(`or`), opToken(orOpType))
|
||||||
lexer.Add([]byte(`and`), opToken(andOpType))
|
lexer.Add([]byte(`and`), opToken(andOpType))
|
||||||
lexer.Add([]byte(`not`), opToken(notOpType))
|
lexer.Add([]byte(`not`), opToken(notOpType))
|
||||||
|
lexer.Add([]byte(`ireduce`), opToken(reduceOpType))
|
||||||
|
lexer.Add([]byte(`;`), opToken(blockOpType))
|
||||||
lexer.Add([]byte(`\/\/`), opToken(alternativeOpType))
|
lexer.Add([]byte(`\/\/`), opToken(alternativeOpType))
|
||||||
|
|
||||||
lexer.Add([]byte(`documentIndex`), opToken(getDocumentIndexOpType))
|
lexer.Add([]byte(`documentIndex`), opToken(getDocumentIndexOpType))
|
||||||
lexer.Add([]byte(`di`), opToken(getDocumentIndexOpType))
|
lexer.Add([]byte(`di`), opToken(getDocumentIndexOpType))
|
||||||
|
lexer.Add([]byte(`splitDoc`), opToken(splitDocumentOpType))
|
||||||
|
|
||||||
|
lexer.Add([]byte(`join`), opToken(joinStringOpType))
|
||||||
|
lexer.Add([]byte(`sub`), opToken(subStringOpType))
|
||||||
|
|
||||||
|
lexer.Add([]byte(`any`), opToken(anyOpType))
|
||||||
|
lexer.Add([]byte(`any_c`), opToken(anyConditionOpType))
|
||||||
|
lexer.Add([]byte(`all`), opToken(allOpType))
|
||||||
|
lexer.Add([]byte(`all_c`), opToken(allConditionOpType))
|
||||||
|
|
||||||
|
lexer.Add([]byte(`split`), opToken(splitStringOpType))
|
||||||
|
lexer.Add([]byte(`keys`), opToken(keysOpType))
|
||||||
|
|
||||||
lexer.Add([]byte(`style`), opAssignableToken(getStyleOpType, assignStyleOpType))
|
lexer.Add([]byte(`style`), opAssignableToken(getStyleOpType, assignStyleOpType))
|
||||||
|
|
||||||
@ -249,12 +294,15 @@ func initLexer() (*lex.Lexer, error) {
|
|||||||
lexer.Add([]byte(`fileIndex`), opToken(getFileIndexOpType))
|
lexer.Add([]byte(`fileIndex`), opToken(getFileIndexOpType))
|
||||||
lexer.Add([]byte(`fi`), opToken(getFileIndexOpType))
|
lexer.Add([]byte(`fi`), opToken(getFileIndexOpType))
|
||||||
lexer.Add([]byte(`path`), opToken(getPathOpType))
|
lexer.Add([]byte(`path`), opToken(getPathOpType))
|
||||||
|
lexer.Add([]byte(`to_entries`), opToken(toEntriesOpType))
|
||||||
|
lexer.Add([]byte(`from_entries`), opToken(fromEntriesOpType))
|
||||||
|
lexer.Add([]byte(`with_entries`), opToken(withEntriesOpType))
|
||||||
|
|
||||||
lexer.Add([]byte(`lineComment`), opTokenWithPrefs(getCommentOpType, assignCommentOpType, &commentOpPreferences{LineComment: true}))
|
lexer.Add([]byte(`lineComment`), opTokenWithPrefs(getCommentOpType, assignCommentOpType, commentOpPreferences{LineComment: true}))
|
||||||
|
|
||||||
lexer.Add([]byte(`headComment`), opTokenWithPrefs(getCommentOpType, assignCommentOpType, &commentOpPreferences{HeadComment: true}))
|
lexer.Add([]byte(`headComment`), opTokenWithPrefs(getCommentOpType, assignCommentOpType, commentOpPreferences{HeadComment: true}))
|
||||||
|
|
||||||
lexer.Add([]byte(`footComment`), opTokenWithPrefs(getCommentOpType, assignCommentOpType, &commentOpPreferences{FootComment: true}))
|
lexer.Add([]byte(`footComment`), opTokenWithPrefs(getCommentOpType, assignCommentOpType, commentOpPreferences{FootComment: true}))
|
||||||
|
|
||||||
lexer.Add([]byte(`comments\s*=`), assignAllCommentsOp(false))
|
lexer.Add([]byte(`comments\s*=`), assignAllCommentsOp(false))
|
||||||
lexer.Add([]byte(`comments\s*\|=`), assignAllCommentsOp(true))
|
lexer.Add([]byte(`comments\s*\|=`), assignAllCommentsOp(true))
|
||||||
@ -262,6 +310,7 @@ func initLexer() (*lex.Lexer, error) {
|
|||||||
lexer.Add([]byte(`collect`), opToken(collectOpType))
|
lexer.Add([]byte(`collect`), opToken(collectOpType))
|
||||||
|
|
||||||
lexer.Add([]byte(`\s*==\s*`), opToken(equalsOpType))
|
lexer.Add([]byte(`\s*==\s*`), opToken(equalsOpType))
|
||||||
|
lexer.Add([]byte(`\s*!=\s*`), opToken(notEqualsOpType))
|
||||||
lexer.Add([]byte(`\s*=\s*`), assignOpToken(false))
|
lexer.Add([]byte(`\s*=\s*`), assignOpToken(false))
|
||||||
|
|
||||||
lexer.Add([]byte(`del`), opToken(deleteChildOpType))
|
lexer.Add([]byte(`del`), opToken(deleteChildOpType))
|
||||||
@ -270,9 +319,8 @@ func initLexer() (*lex.Lexer, error) {
|
|||||||
|
|
||||||
lexer.Add([]byte("( |\t|\n|\r)+"), skip)
|
lexer.Add([]byte("( |\t|\n|\r)+"), skip)
|
||||||
|
|
||||||
lexer.Add([]byte(`d[0-9]+`), documentToken())
|
lexer.Add([]byte(`\."[^ "]+"\??`), pathToken(true))
|
||||||
lexer.Add([]byte(`\."[^ "]+"`), pathToken(true))
|
lexer.Add([]byte(`\.[^ \}\{\:\[\],\|\.\[\(\)=]+\??`), pathToken(false))
|
||||||
lexer.Add([]byte(`\.[^ \}\{\:\[\],\|\.\[\(\)=]+`), pathToken(false))
|
|
||||||
lexer.Add([]byte(`\.`), selfToken())
|
lexer.Add([]byte(`\.`), selfToken())
|
||||||
|
|
||||||
lexer.Add([]byte(`\|`), opToken(pipeOpType))
|
lexer.Add([]byte(`\|`), opToken(pipeOpType))
|
||||||
@ -287,20 +335,23 @@ func initLexer() (*lex.Lexer, error) {
|
|||||||
lexer.Add([]byte(`[Nn][Uu][Ll][Ll]`), nullValue())
|
lexer.Add([]byte(`[Nn][Uu][Ll][Ll]`), nullValue())
|
||||||
lexer.Add([]byte(`~`), nullValue())
|
lexer.Add([]byte(`~`), nullValue())
|
||||||
|
|
||||||
lexer.Add([]byte(`"[^"]*"`), stringValue(true))
|
lexer.Add([]byte(`"([^"\\]*(\\.[^"\\]*)*)"`), stringValue(true))
|
||||||
lexer.Add([]byte(`strenv\([^\)]+\)`), envOp(true))
|
lexer.Add([]byte(`strenv\([^\)]+\)`), envOp(true))
|
||||||
lexer.Add([]byte(`env\([^\)]+\)`), envOp(false))
|
lexer.Add([]byte(`env\([^\)]+\)`), envOp(false))
|
||||||
|
|
||||||
lexer.Add([]byte(`\[`), literalToken(openCollect, false))
|
lexer.Add([]byte(`\[`), literalToken(openCollect, false))
|
||||||
lexer.Add([]byte(`\]`), literalToken(closeCollect, true))
|
lexer.Add([]byte(`\]\??`), literalToken(closeCollect, true))
|
||||||
lexer.Add([]byte(`\{`), literalToken(openCollectObject, false))
|
lexer.Add([]byte(`\{`), literalToken(openCollectObject, false))
|
||||||
lexer.Add([]byte(`\}`), literalToken(closeCollectObject, true))
|
lexer.Add([]byte(`\}`), literalToken(closeCollectObject, true))
|
||||||
lexer.Add([]byte(`\*`), opTokenWithPrefs(multiplyOpType, nil, &multiplyPreferences{AppendArrays: false}))
|
lexer.Add([]byte(`\*[\+|\?d]*`), multiplyWithPrefs())
|
||||||
lexer.Add([]byte(`\*\+`), opTokenWithPrefs(multiplyOpType, nil, &multiplyPreferences{AppendArrays: true}))
|
|
||||||
lexer.Add([]byte(`\+`), opToken(addOpType))
|
lexer.Add([]byte(`\+`), opToken(addOpType))
|
||||||
lexer.Add([]byte(`\+=`), opToken(addAssignOpType))
|
lexer.Add([]byte(`\+=`), opToken(addAssignOpType))
|
||||||
|
lexer.Add([]byte(`\-`), opToken(subtractOpType))
|
||||||
|
lexer.Add([]byte(`\-=`), opToken(subtractAssignOpType))
|
||||||
|
lexer.Add([]byte(`\$[a-zA-Z_-0-9]+`), getVariableOpToken())
|
||||||
|
lexer.Add([]byte(`as`), opToken(assignVariableOpType))
|
||||||
|
|
||||||
err := lexer.Compile()
|
err := lexer.CompileNFA()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -334,7 +385,7 @@ func (p *expressionTokeniserImpl) Tokenise(expression string) ([]*token, error)
|
|||||||
|
|
||||||
if tok != nil {
|
if tok != nil {
|
||||||
currentToken := tok.(*token)
|
currentToken := tok.(*token)
|
||||||
log.Debugf("Tokenising %v", currentToken.toString())
|
log.Debugf("Tokenising %v", currentToken.toString(true))
|
||||||
tokens = append(tokens, currentToken)
|
tokens = append(tokens, currentToken)
|
||||||
}
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -360,10 +411,19 @@ func (p *expressionTokeniserImpl) handleToken(tokens []*token, index int, postPr
|
|||||||
skipNextToken = false
|
skipNextToken = false
|
||||||
currentToken := tokens[index]
|
currentToken := tokens[index]
|
||||||
|
|
||||||
|
log.Debug("processing %v", currentToken.toString(true))
|
||||||
|
|
||||||
if currentToken.TokenType == traverseArrayCollect {
|
if currentToken.TokenType == traverseArrayCollect {
|
||||||
//need to put a traverse array then a collect currentToken
|
//need to put a traverse array then a collect currentToken
|
||||||
// do this by adding traverse then converting currentToken to collect
|
// do this by adding traverse then converting currentToken to collect
|
||||||
|
|
||||||
|
if index == 0 || tokens[index-1].TokenType != operationToken ||
|
||||||
|
tokens[index-1].Operation.OperationType != traversePathOpType {
|
||||||
|
log.Debug(" adding self")
|
||||||
|
op := &Operation{OperationType: selfReferenceOpType, StringValue: "SELF"}
|
||||||
|
postProcessedTokens = append(postProcessedTokens, &token{TokenType: operationToken, Operation: op})
|
||||||
|
}
|
||||||
|
log.Debug(" adding traverse array")
|
||||||
op := &Operation{OperationType: traverseArrayOpType, StringValue: "TRAVERSE_ARRAY"}
|
op := &Operation{OperationType: traverseArrayOpType, StringValue: "TRAVERSE_ARRAY"}
|
||||||
postProcessedTokens = append(postProcessedTokens, &token{TokenType: operationToken, Operation: op})
|
postProcessedTokens = append(postProcessedTokens, &token{TokenType: operationToken, Operation: op})
|
||||||
|
|
||||||
@ -374,34 +434,41 @@ func (p *expressionTokeniserImpl) handleToken(tokens []*token, index int, postPr
|
|||||||
if index != len(tokens)-1 && currentToken.AssignOperation != nil &&
|
if index != len(tokens)-1 && currentToken.AssignOperation != nil &&
|
||||||
tokens[index+1].TokenType == operationToken &&
|
tokens[index+1].TokenType == operationToken &&
|
||||||
tokens[index+1].Operation.OperationType == assignOpType {
|
tokens[index+1].Operation.OperationType == assignOpType {
|
||||||
|
log.Debug(" its an update assign")
|
||||||
currentToken.Operation = currentToken.AssignOperation
|
currentToken.Operation = currentToken.AssignOperation
|
||||||
currentToken.Operation.UpdateAssign = tokens[index+1].Operation.UpdateAssign
|
currentToken.Operation.UpdateAssign = tokens[index+1].Operation.UpdateAssign
|
||||||
skipNextToken = true
|
skipNextToken = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
log.Debug(" adding token to the fixed list")
|
||||||
postProcessedTokens = append(postProcessedTokens, currentToken)
|
postProcessedTokens = append(postProcessedTokens, currentToken)
|
||||||
|
|
||||||
|
if index != len(tokens)-1 &&
|
||||||
|
((currentToken.TokenType == openCollect && tokens[index+1].TokenType == closeCollect) ||
|
||||||
|
(currentToken.TokenType == openCollectObject && tokens[index+1].TokenType == closeCollectObject)) {
|
||||||
|
log.Debug(" adding empty")
|
||||||
|
op := &Operation{OperationType: emptyOpType, StringValue: "EMPTY"}
|
||||||
|
postProcessedTokens = append(postProcessedTokens, &token{TokenType: operationToken, Operation: op})
|
||||||
|
}
|
||||||
|
|
||||||
if index != len(tokens)-1 && currentToken.CheckForPostTraverse &&
|
if index != len(tokens)-1 && currentToken.CheckForPostTraverse &&
|
||||||
tokens[index+1].TokenType == operationToken &&
|
tokens[index+1].TokenType == operationToken &&
|
||||||
tokens[index+1].Operation.OperationType == traversePathOpType {
|
tokens[index+1].Operation.OperationType == traversePathOpType {
|
||||||
|
log.Debug(" adding pipe because the next thing is traverse")
|
||||||
op := &Operation{OperationType: shortPipeOpType, Value: "PIPE"}
|
op := &Operation{OperationType: shortPipeOpType, Value: "PIPE"}
|
||||||
postProcessedTokens = append(postProcessedTokens, &token{TokenType: operationToken, Operation: op})
|
postProcessedTokens = append(postProcessedTokens, &token{TokenType: operationToken, Operation: op})
|
||||||
}
|
}
|
||||||
if index != len(tokens)-1 && currentToken.CheckForPostTraverse &&
|
if index != len(tokens)-1 && currentToken.CheckForPostTraverse &&
|
||||||
tokens[index+1].TokenType == openCollect {
|
tokens[index+1].TokenType == openCollect {
|
||||||
|
|
||||||
op := &Operation{OperationType: shortPipeOpType, Value: "PIPE"}
|
// if tokens[index].TokenType == closeCollect {
|
||||||
|
// log.Debug(" adding pipe because next is opencollect")
|
||||||
|
// op := &Operation{OperationType: shortPipeOpType, Value: "PIPE"}
|
||||||
|
// postProcessedTokens = append(postProcessedTokens, &token{TokenType: operationToken, Operation: op})
|
||||||
|
// }
|
||||||
|
log.Debug(" adding traverArray because next is opencollect")
|
||||||
|
op := &Operation{OperationType: traverseArrayOpType}
|
||||||
postProcessedTokens = append(postProcessedTokens, &token{TokenType: operationToken, Operation: op})
|
postProcessedTokens = append(postProcessedTokens, &token{TokenType: operationToken, Operation: op})
|
||||||
|
|
||||||
op = &Operation{OperationType: traverseArrayOpType}
|
|
||||||
postProcessedTokens = append(postProcessedTokens, &token{TokenType: operationToken, Operation: op})
|
|
||||||
}
|
|
||||||
if index != len(tokens)-1 && currentToken.CheckForPostTraverse &&
|
|
||||||
tokens[index+1].TokenType == traverseArrayCollect {
|
|
||||||
|
|
||||||
op := &Operation{OperationType: shortPipeOpType, Value: "PIPE"}
|
|
||||||
postProcessedTokens = append(postProcessedTokens, &token{TokenType: operationToken, Operation: op})
|
|
||||||
|
|
||||||
}
|
}
|
||||||
return postProcessedTokens, skipNextToken
|
return postProcessedTokens, skipNextToken
|
||||||
}
|
}
|
||||||
|
@ -20,11 +20,11 @@ type operationType struct {
|
|||||||
Handler operatorHandler
|
Handler operatorHandler
|
||||||
}
|
}
|
||||||
|
|
||||||
// operators TODO:
|
|
||||||
// - mergeEmpty (sets only if the document is empty, do I do that now?)
|
|
||||||
|
|
||||||
var orOpType = &operationType{Type: "OR", NumArgs: 2, Precedence: 20, Handler: orOperator}
|
var orOpType = &operationType{Type: "OR", NumArgs: 2, Precedence: 20, Handler: orOperator}
|
||||||
var andOpType = &operationType{Type: "AND", NumArgs: 2, Precedence: 20, Handler: andOperator}
|
var andOpType = &operationType{Type: "AND", NumArgs: 2, Precedence: 20, Handler: andOperator}
|
||||||
|
var reduceOpType = &operationType{Type: "REDUCE", NumArgs: 2, Precedence: 35, Handler: reduceOperator}
|
||||||
|
|
||||||
|
var blockOpType = &operationType{Type: "BLOCK", Precedence: 10, NumArgs: 2, Handler: emptyOperator}
|
||||||
|
|
||||||
var unionOpType = &operationType{Type: "UNION", NumArgs: 2, Precedence: 10, Handler: unionOperator}
|
var unionOpType = &operationType{Type: "UNION", NumArgs: 2, Precedence: 10, Handler: unionOperator}
|
||||||
|
|
||||||
@ -32,25 +32,43 @@ var pipeOpType = &operationType{Type: "PIPE", NumArgs: 2, Precedence: 30, Handle
|
|||||||
|
|
||||||
var assignOpType = &operationType{Type: "ASSIGN", NumArgs: 2, Precedence: 40, Handler: assignUpdateOperator}
|
var assignOpType = &operationType{Type: "ASSIGN", NumArgs: 2, Precedence: 40, Handler: assignUpdateOperator}
|
||||||
var addAssignOpType = &operationType{Type: "ADD_ASSIGN", NumArgs: 2, Precedence: 40, Handler: addAssignOperator}
|
var addAssignOpType = &operationType{Type: "ADD_ASSIGN", NumArgs: 2, Precedence: 40, Handler: addAssignOperator}
|
||||||
|
var subtractAssignOpType = &operationType{Type: "SUBTRACT_ASSIGN", NumArgs: 2, Precedence: 40, Handler: subtractAssignOperator}
|
||||||
|
|
||||||
var assignAttributesOpType = &operationType{Type: "ASSIGN_ATTRIBUTES", NumArgs: 2, Precedence: 40, Handler: assignAttributesOperator}
|
var assignAttributesOpType = &operationType{Type: "ASSIGN_ATTRIBUTES", NumArgs: 2, Precedence: 40, Handler: assignAttributesOperator}
|
||||||
var assignStyleOpType = &operationType{Type: "ASSIGN_STYLE", NumArgs: 2, Precedence: 40, Handler: assignStyleOperator}
|
var assignStyleOpType = &operationType{Type: "ASSIGN_STYLE", NumArgs: 2, Precedence: 40, Handler: assignStyleOperator}
|
||||||
|
var assignVariableOpType = &operationType{Type: "ASSIGN_VARIABLE", NumArgs: 2, Precedence: 40, Handler: assignVariableOperator}
|
||||||
var assignTagOpType = &operationType{Type: "ASSIGN_TAG", NumArgs: 2, Precedence: 40, Handler: assignTagOperator}
|
var assignTagOpType = &operationType{Type: "ASSIGN_TAG", NumArgs: 2, Precedence: 40, Handler: assignTagOperator}
|
||||||
var assignCommentOpType = &operationType{Type: "ASSIGN_COMMENT", NumArgs: 2, Precedence: 40, Handler: assignCommentsOperator}
|
var assignCommentOpType = &operationType{Type: "ASSIGN_COMMENT", NumArgs: 2, Precedence: 40, Handler: assignCommentsOperator}
|
||||||
var assignAnchorOpType = &operationType{Type: "ASSIGN_ANCHOR", NumArgs: 2, Precedence: 40, Handler: assignAnchorOperator}
|
var assignAnchorOpType = &operationType{Type: "ASSIGN_ANCHOR", NumArgs: 2, Precedence: 40, Handler: assignAnchorOperator}
|
||||||
var assignAliasOpType = &operationType{Type: "ASSIGN_ALIAS", NumArgs: 2, Precedence: 40, Handler: assignAliasOperator}
|
var assignAliasOpType = &operationType{Type: "ASSIGN_ALIAS", NumArgs: 2, Precedence: 40, Handler: assignAliasOperator}
|
||||||
|
|
||||||
var multiplyOpType = &operationType{Type: "MULTIPLY", NumArgs: 2, Precedence: 45, Handler: multiplyOperator}
|
var multiplyOpType = &operationType{Type: "MULTIPLY", NumArgs: 2, Precedence: 42, Handler: multiplyOperator}
|
||||||
var addOpType = &operationType{Type: "ADD", NumArgs: 2, Precedence: 45, Handler: addOperator}
|
var addOpType = &operationType{Type: "ADD", NumArgs: 2, Precedence: 42, Handler: addOperator}
|
||||||
var alternativeOpType = &operationType{Type: "ALTERNATIVE", NumArgs: 2, Precedence: 45, Handler: alternativeOperator}
|
var subtractOpType = &operationType{Type: "SUBTRACT", NumArgs: 2, Precedence: 42, Handler: subtractOperator}
|
||||||
|
var alternativeOpType = &operationType{Type: "ALTERNATIVE", NumArgs: 2, Precedence: 42, Handler: alternativeOperator}
|
||||||
|
|
||||||
var equalsOpType = &operationType{Type: "EQUALS", NumArgs: 2, Precedence: 40, Handler: equalsOperator}
|
var equalsOpType = &operationType{Type: "EQUALS", NumArgs: 2, Precedence: 40, Handler: equalsOperator}
|
||||||
var createMapOpType = &operationType{Type: "CREATE_MAP", NumArgs: 2, Precedence: 40, Handler: createMapOperator}
|
var notEqualsOpType = &operationType{Type: "EQUALS", NumArgs: 2, Precedence: 40, Handler: notEqualsOperator}
|
||||||
|
|
||||||
|
//createmap needs to be above union, as we use union to build the components of the objects
|
||||||
|
var createMapOpType = &operationType{Type: "CREATE_MAP", NumArgs: 2, Precedence: 15, Handler: createMapOperator}
|
||||||
|
|
||||||
var shortPipeOpType = &operationType{Type: "SHORT_PIPE", NumArgs: 2, Precedence: 45, Handler: pipeOperator}
|
var shortPipeOpType = &operationType{Type: "SHORT_PIPE", NumArgs: 2, Precedence: 45, Handler: pipeOperator}
|
||||||
|
|
||||||
var lengthOpType = &operationType{Type: "LENGTH", NumArgs: 0, Precedence: 50, Handler: lengthOperator}
|
var lengthOpType = &operationType{Type: "LENGTH", NumArgs: 0, Precedence: 50, Handler: lengthOperator}
|
||||||
var collectOpType = &operationType{Type: "COLLECT", NumArgs: 0, Precedence: 50, Handler: collectOperator}
|
var collectOpType = &operationType{Type: "COLLECT", NumArgs: 0, Precedence: 50, Handler: collectOperator}
|
||||||
|
|
||||||
|
var anyOpType = &operationType{Type: "ANY", NumArgs: 0, Precedence: 50, Handler: anyOperator}
|
||||||
|
var allOpType = &operationType{Type: "ALL", NumArgs: 0, Precedence: 50, Handler: allOperator}
|
||||||
|
var anyConditionOpType = &operationType{Type: "ANY_CONDITION", NumArgs: 1, Precedence: 50, Handler: anyOperator}
|
||||||
|
var allConditionOpType = &operationType{Type: "ALL_CONDITION", NumArgs: 1, Precedence: 50, Handler: allOperator}
|
||||||
|
|
||||||
|
var toEntriesOpType = &operationType{Type: "TO_ENTRIES", NumArgs: 0, Precedence: 50, Handler: toEntriesOperator}
|
||||||
|
var fromEntriesOpType = &operationType{Type: "FROM_ENTRIES", NumArgs: 0, Precedence: 50, Handler: fromEntriesOperator}
|
||||||
|
var withEntriesOpType = &operationType{Type: "WITH_ENTRIES", NumArgs: 1, Precedence: 50, Handler: withEntriesOperator}
|
||||||
|
|
||||||
|
var splitDocumentOpType = &operationType{Type: "SPLIT_DOC", NumArgs: 0, Precedence: 50, Handler: splitDocumentOperator}
|
||||||
|
var getVariableOpType = &operationType{Type: "GET_VARIABLE", NumArgs: 0, Precedence: 55, Handler: getVariableOperator}
|
||||||
var getStyleOpType = &operationType{Type: "GET_STYLE", NumArgs: 0, Precedence: 50, Handler: getStyleOperator}
|
var getStyleOpType = &operationType{Type: "GET_STYLE", NumArgs: 0, Precedence: 50, Handler: getStyleOperator}
|
||||||
var getTagOpType = &operationType{Type: "GET_TAG", NumArgs: 0, Precedence: 50, Handler: getTagOperator}
|
var getTagOpType = &operationType{Type: "GET_TAG", NumArgs: 0, Precedence: 50, Handler: getTagOperator}
|
||||||
var getCommentOpType = &operationType{Type: "GET_COMMENT", NumArgs: 0, Precedence: 50, Handler: getCommentsOperator}
|
var getCommentOpType = &operationType{Type: "GET_COMMENT", NumArgs: 0, Precedence: 50, Handler: getCommentsOperator}
|
||||||
@ -63,24 +81,29 @@ var getPathOpType = &operationType{Type: "GET_PATH", NumArgs: 0, Precedence: 50,
|
|||||||
|
|
||||||
var explodeOpType = &operationType{Type: "EXPLODE", NumArgs: 1, Precedence: 50, Handler: explodeOperator}
|
var explodeOpType = &operationType{Type: "EXPLODE", NumArgs: 1, Precedence: 50, Handler: explodeOperator}
|
||||||
var sortKeysOpType = &operationType{Type: "SORT_KEYS", NumArgs: 1, Precedence: 50, Handler: sortKeysOperator}
|
var sortKeysOpType = &operationType{Type: "SORT_KEYS", NumArgs: 1, Precedence: 50, Handler: sortKeysOperator}
|
||||||
|
var joinStringOpType = &operationType{Type: "JOIN", NumArgs: 1, Precedence: 50, Handler: joinStringOperator}
|
||||||
|
var subStringOpType = &operationType{Type: "SUBSTR", NumArgs: 1, Precedence: 50, Handler: substituteStringOperator}
|
||||||
|
var splitStringOpType = &operationType{Type: "SPLIT", NumArgs: 1, Precedence: 50, Handler: splitStringOperator}
|
||||||
|
|
||||||
|
var keysOpType = &operationType{Type: "KEYS", NumArgs: 0, Precedence: 50, Handler: keysOperator}
|
||||||
|
|
||||||
var collectObjectOpType = &operationType{Type: "COLLECT_OBJECT", NumArgs: 0, Precedence: 50, Handler: collectObjectOperator}
|
var collectObjectOpType = &operationType{Type: "COLLECT_OBJECT", NumArgs: 0, Precedence: 50, Handler: collectObjectOperator}
|
||||||
var traversePathOpType = &operationType{Type: "TRAVERSE_PATH", NumArgs: 0, Precedence: 50, Handler: traversePathOperator}
|
var traversePathOpType = &operationType{Type: "TRAVERSE_PATH", NumArgs: 0, Precedence: 55, Handler: traversePathOperator}
|
||||||
var traverseArrayOpType = &operationType{Type: "TRAVERSE_ARRAY", NumArgs: 1, Precedence: 50, Handler: traverseArrayOperator}
|
var traverseArrayOpType = &operationType{Type: "TRAVERSE_ARRAY", NumArgs: 2, Precedence: 50, Handler: traverseArrayOperator}
|
||||||
|
|
||||||
var documentFilterOpType = &operationType{Type: "DOCUMENT_FILTER", NumArgs: 0, Precedence: 50, Handler: traversePathOperator}
|
var selfReferenceOpType = &operationType{Type: "SELF", NumArgs: 0, Precedence: 55, Handler: selfOperator}
|
||||||
var selfReferenceOpType = &operationType{Type: "SELF", NumArgs: 0, Precedence: 50, Handler: selfOperator}
|
|
||||||
var valueOpType = &operationType{Type: "VALUE", NumArgs: 0, Precedence: 50, Handler: valueOperator}
|
var valueOpType = &operationType{Type: "VALUE", NumArgs: 0, Precedence: 50, Handler: valueOperator}
|
||||||
var envOpType = &operationType{Type: "ENV", NumArgs: 0, Precedence: 50, Handler: envOperator}
|
var envOpType = &operationType{Type: "ENV", NumArgs: 0, Precedence: 50, Handler: envOperator}
|
||||||
var notOpType = &operationType{Type: "NOT", NumArgs: 0, Precedence: 50, Handler: notOperator}
|
var notOpType = &operationType{Type: "NOT", NumArgs: 0, Precedence: 50, Handler: notOperator}
|
||||||
var emptyOpType = &operationType{Type: "EMPTY", NumArgs: 50, Handler: emptyOperator}
|
var emptyOpType = &operationType{Type: "EMPTY", Precedence: 50, Handler: emptyOperator}
|
||||||
|
|
||||||
var recursiveDescentOpType = &operationType{Type: "RECURSIVE_DESCENT", NumArgs: 0, Precedence: 50, Handler: recursiveDescentOperator}
|
var recursiveDescentOpType = &operationType{Type: "RECURSIVE_DESCENT", NumArgs: 0, Precedence: 50, Handler: recursiveDescentOperator}
|
||||||
|
|
||||||
var selectOpType = &operationType{Type: "SELECT", NumArgs: 1, Precedence: 50, Handler: selectOperator}
|
var selectOpType = &operationType{Type: "SELECT", NumArgs: 1, Precedence: 50, Handler: selectOperator}
|
||||||
var hasOpType = &operationType{Type: "HAS", NumArgs: 1, Precedence: 50, Handler: hasOperator}
|
var hasOpType = &operationType{Type: "HAS", NumArgs: 1, Precedence: 50, Handler: hasOperator}
|
||||||
|
var uniqueOpType = &operationType{Type: "UNIQUE", NumArgs: 0, Precedence: 50, Handler: unique}
|
||||||
|
var uniqueByOpType = &operationType{Type: "UNIQUE_BY", NumArgs: 1, Precedence: 50, Handler: uniqueBy}
|
||||||
var deleteChildOpType = &operationType{Type: "DELETE", NumArgs: 1, Precedence: 40, Handler: deleteChildOperator}
|
var deleteChildOpType = &operationType{Type: "DELETE", NumArgs: 1, Precedence: 40, Handler: deleteChildOperator}
|
||||||
var deleteImmediateChildOpType = &operationType{Type: "DELETE_IMMEDIATE_CHILD", NumArgs: 1, Precedence: 40, Handler: deleteImmediateChildOperator}
|
|
||||||
|
|
||||||
type Operation struct {
|
type Operation struct {
|
||||||
OperationType *operationType
|
OperationType *operationType
|
||||||
@ -120,8 +143,6 @@ func createValueOperation(value interface{}, stringValue string) *Operation {
|
|||||||
func (p *Operation) toString() string {
|
func (p *Operation) toString() string {
|
||||||
if p.OperationType == traversePathOpType {
|
if p.OperationType == traversePathOpType {
|
||||||
return fmt.Sprintf("%v", p.Value)
|
return fmt.Sprintf("%v", p.Value)
|
||||||
} else if p.OperationType == documentFilterOpType {
|
|
||||||
return fmt.Sprintf("d%v", p.Value)
|
|
||||||
} else if p.OperationType == selfReferenceOpType {
|
} else if p.OperationType == selfReferenceOpType {
|
||||||
return "SELF"
|
return "SELF"
|
||||||
} else if p.OperationType == valueOpType {
|
} else if p.OperationType == valueOpType {
|
||||||
|
@ -3,7 +3,6 @@ package yqlib
|
|||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"container/list"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
|
|
||||||
yaml "gopkg.in/yaml.v3"
|
yaml "gopkg.in/yaml.v3"
|
||||||
@ -15,12 +14,12 @@ func createAddOp(lhs *ExpressionNode, rhs *ExpressionNode) *ExpressionNode {
|
|||||||
Rhs: rhs}
|
Rhs: rhs}
|
||||||
}
|
}
|
||||||
|
|
||||||
func addAssignOperator(d *dataTreeNavigator, matchingNodes *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
func addAssignOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
assignmentOp := &Operation{OperationType: assignOpType}
|
assignmentOp := &Operation{OperationType: assignOpType}
|
||||||
assignmentOp.UpdateAssign = false
|
assignmentOp.UpdateAssign = true
|
||||||
|
selfExpression := &ExpressionNode{Operation: &Operation{OperationType: selfReferenceOpType}}
|
||||||
assignmentOpNode := &ExpressionNode{Operation: assignmentOp, Lhs: expressionNode.Lhs, Rhs: createAddOp(expressionNode.Lhs, expressionNode.Rhs)}
|
assignmentOpNode := &ExpressionNode{Operation: assignmentOp, Lhs: expressionNode.Lhs, Rhs: createAddOp(selfExpression, expressionNode.Rhs)}
|
||||||
return d.GetMatchingNodes(matchingNodes, assignmentOpNode)
|
return d.GetMatchingNodes(context, assignmentOpNode)
|
||||||
}
|
}
|
||||||
|
|
||||||
func toNodes(candidate *CandidateNode) []*yaml.Node {
|
func toNodes(candidate *CandidateNode) []*yaml.Node {
|
||||||
@ -37,19 +36,24 @@ func toNodes(candidate *CandidateNode) []*yaml.Node {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func addOperator(d *dataTreeNavigator, matchingNodes *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
func addOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
log.Debugf("Add operator")
|
log.Debugf("Add operator")
|
||||||
|
|
||||||
return crossFunction(d, matchingNodes, expressionNode, add)
|
return crossFunction(d, context.ReadOnlyClone(), expressionNode, add, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
func add(d *dataTreeNavigator, lhs *CandidateNode, rhs *CandidateNode) (*CandidateNode, error) {
|
func add(d *dataTreeNavigator, context Context, lhs *CandidateNode, rhs *CandidateNode) (*CandidateNode, error) {
|
||||||
lhs.Node = unwrapDoc(lhs.Node)
|
lhs.Node = unwrapDoc(lhs.Node)
|
||||||
rhs.Node = unwrapDoc(rhs.Node)
|
rhs.Node = unwrapDoc(rhs.Node)
|
||||||
|
|
||||||
target := lhs.CreateChild(nil, &yaml.Node{})
|
|
||||||
lhsNode := lhs.Node
|
lhsNode := lhs.Node
|
||||||
|
|
||||||
|
if lhsNode.Tag == "!!null" {
|
||||||
|
return lhs.CreateChild(nil, rhs.Node), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
target := lhs.CreateChild(nil, &yaml.Node{})
|
||||||
|
|
||||||
switch lhsNode.Kind {
|
switch lhsNode.Kind {
|
||||||
case yaml.MappingNode:
|
case yaml.MappingNode:
|
||||||
return nil, fmt.Errorf("Maps not yet supported for addition")
|
return nil, fmt.Errorf("Maps not yet supported for addition")
|
||||||
|
@ -5,6 +5,31 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
var addOperatorScenarios = []expressionScenario{
|
var addOperatorScenarios = []expressionScenario{
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `[{a: foo, b: bar}, {a: 1, b: 2}]`,
|
||||||
|
expression: ".[] | .a + .b",
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[0 a], (!!str)::foobar\n",
|
||||||
|
"D0, P[1 a], (!!int)::3\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `{}`,
|
||||||
|
expression: "(.a + .b) as $x",
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::{}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `a: 0`,
|
||||||
|
expression: ".a += .b.c",
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::a: 0\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
description: "Concatenate and assign arrays",
|
description: "Concatenate and assign arrays",
|
||||||
document: `{a: {val: thing, b: [cat,dog]}}`,
|
document: `{a: {val: thing, b: [cat,dog]}}`,
|
||||||
@ -54,7 +79,7 @@ var addOperatorScenarios = []expressionScenario{
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
description: "Update array (append)",
|
description: "Append to array",
|
||||||
document: `{a: [1,2], b: [3,4]}`,
|
document: `{a: [1,2], b: [3,4]}`,
|
||||||
expression: `.a = .a + .b`,
|
expression: `.a = .a + .b`,
|
||||||
expected: []string{
|
expected: []string{
|
||||||
@ -62,17 +87,17 @@ var addOperatorScenarios = []expressionScenario{
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
description: "String concatenation",
|
description: "Relative append",
|
||||||
document: `{a: cat, b: meow}`,
|
document: `a: { a1: {b: [cat]}, a2: {b: [dog]}, a3: {} }`,
|
||||||
expression: `.a = .a + .b`,
|
expression: `.a[].b += ["mouse"]`,
|
||||||
expected: []string{
|
expected: []string{
|
||||||
"D0, P[], (doc)::{a: catmeow, b: meow}\n",
|
"D0, P[], (doc)::a: {a1: {b: [cat, mouse]}, a2: {b: [dog, mouse]}, a3: {b: [mouse]}}\n",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
description: "Relative string concatenation",
|
description: "String concatenation",
|
||||||
document: `{a: cat, b: meow}`,
|
document: `{a: cat, b: meow}`,
|
||||||
expression: `.a += .b`,
|
expression: `.a = .a + .b`,
|
||||||
expected: []string{
|
expected: []string{
|
||||||
"D0, P[], (doc)::{a: catmeow, b: meow}\n",
|
"D0, P[], (doc)::{a: catmeow, b: meow}\n",
|
||||||
},
|
},
|
||||||
@ -96,11 +121,19 @@ var addOperatorScenarios = []expressionScenario{
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
description: "Increment number",
|
description: "Increment numbers",
|
||||||
document: `{a: 3}`,
|
document: `{a: 3, b: 5}`,
|
||||||
expression: `.a += 1`,
|
expression: `.[] += 1`,
|
||||||
expected: []string{
|
expected: []string{
|
||||||
"D0, P[], (doc)::{a: 4}\n",
|
"D0, P[], (doc)::{a: 4, b: 6}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "Add to null",
|
||||||
|
subdescription: "Adding to null simply returns the rhs",
|
||||||
|
expression: `null + "cat"`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!str)::cat\n",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -1,18 +1,14 @@
|
|||||||
package yqlib
|
package yqlib
|
||||||
|
|
||||||
import (
|
func alternativeOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
"container/list"
|
|
||||||
)
|
|
||||||
|
|
||||||
// corssFunction no matches
|
|
||||||
// can boolean use crossfunction
|
|
||||||
|
|
||||||
func alternativeOperator(d *dataTreeNavigator, matchingNodes *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
|
||||||
log.Debugf("-- alternative")
|
log.Debugf("-- alternative")
|
||||||
return crossFunction(d, matchingNodes, expressionNode, alternativeFunc)
|
return crossFunction(d, context.ReadOnlyClone(), expressionNode, alternativeFunc, true)
|
||||||
}
|
}
|
||||||
|
|
||||||
func alternativeFunc(d *dataTreeNavigator, lhs *CandidateNode, rhs *CandidateNode) (*CandidateNode, error) {
|
func alternativeFunc(d *dataTreeNavigator, context Context, lhs *CandidateNode, rhs *CandidateNode) (*CandidateNode, error) {
|
||||||
|
if lhs == nil {
|
||||||
|
return rhs, nil
|
||||||
|
}
|
||||||
lhs.Node = unwrapDoc(lhs.Node)
|
lhs.Node = unwrapDoc(lhs.Node)
|
||||||
rhs.Node = unwrapDoc(rhs.Node)
|
rhs.Node = unwrapDoc(rhs.Node)
|
||||||
log.Debugf("Alternative LHS: %v", lhs.Node.Tag)
|
log.Debugf("Alternative LHS: %v", lhs.Node.Tag)
|
||||||
|
@ -5,6 +5,20 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
var alternativeOperatorScenarios = []expressionScenario{
|
var alternativeOperatorScenarios = []expressionScenario{
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
expression: `.b // .c`,
|
||||||
|
document: `a: bridge`,
|
||||||
|
expected: []string{},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
expression: `(.b // "hello") as $x`,
|
||||||
|
document: `a: bridge`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::a: bridge\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
description: "LHS is defined",
|
description: "LHS is defined",
|
||||||
expression: `.a // "hello"`,
|
expression: `.a // "hello"`,
|
||||||
@ -13,6 +27,14 @@ var alternativeOperatorScenarios = []expressionScenario{
|
|||||||
"D0, P[a], (!!str)::bridge\n",
|
"D0, P[a], (!!str)::bridge\n",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
expression: `select(tag == "seq") // "cat"`,
|
||||||
|
skipDoc: true,
|
||||||
|
document: `a: frog`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!str)::cat\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
description: "LHS is not defined",
|
description: "LHS is not defined",
|
||||||
expression: `.a // "hello"`,
|
expression: `.a // "hello"`,
|
||||||
|
@ -6,146 +6,148 @@ import (
|
|||||||
yaml "gopkg.in/yaml.v3"
|
yaml "gopkg.in/yaml.v3"
|
||||||
)
|
)
|
||||||
|
|
||||||
func assignAliasOperator(d *dataTreeNavigator, matchingNodes *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
func assignAliasOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
|
|
||||||
log.Debugf("AssignAlias operator!")
|
log.Debugf("AssignAlias operator!")
|
||||||
|
|
||||||
aliasName := ""
|
aliasName := ""
|
||||||
if !expressionNode.Operation.UpdateAssign {
|
if !expressionNode.Operation.UpdateAssign {
|
||||||
rhs, err := d.GetMatchingNodes(matchingNodes, expressionNode.Rhs)
|
rhs, err := d.GetMatchingNodes(context.ReadOnlyClone(), expressionNode.Rhs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return Context{}, err
|
||||||
}
|
}
|
||||||
if rhs.Front() != nil {
|
if rhs.MatchingNodes.Front() != nil {
|
||||||
aliasName = rhs.Front().Value.(*CandidateNode).Node.Value
|
aliasName = rhs.MatchingNodes.Front().Value.(*CandidateNode).Node.Value
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
lhs, err := d.GetMatchingNodes(matchingNodes, expressionNode.Lhs)
|
lhs, err := d.GetMatchingNodes(context, expressionNode.Lhs)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return Context{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
for el := lhs.Front(); el != nil; el = el.Next() {
|
for el := lhs.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||||
candidate := el.Value.(*CandidateNode)
|
candidate := el.Value.(*CandidateNode)
|
||||||
log.Debugf("Setting aliasName : %v", candidate.GetKey())
|
log.Debugf("Setting aliasName : %v", candidate.GetKey())
|
||||||
|
|
||||||
if expressionNode.Operation.UpdateAssign {
|
if expressionNode.Operation.UpdateAssign {
|
||||||
rhs, err := d.GetMatchingNodes(nodeToMap(candidate), expressionNode.Rhs)
|
rhs, err := d.GetMatchingNodes(context.SingleReadonlyChildContext(candidate), expressionNode.Rhs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return Context{}, err
|
||||||
}
|
}
|
||||||
if rhs.Front() != nil {
|
if rhs.MatchingNodes.Front() != nil {
|
||||||
aliasName = rhs.Front().Value.(*CandidateNode).Node.Value
|
aliasName = rhs.MatchingNodes.Front().Value.(*CandidateNode).Node.Value
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
candidate.Node.Kind = yaml.AliasNode
|
if aliasName != "" {
|
||||||
candidate.Node.Value = aliasName
|
candidate.Node.Kind = yaml.AliasNode
|
||||||
|
candidate.Node.Value = aliasName
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return matchingNodes, nil
|
return context, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func getAliasOperator(d *dataTreeNavigator, matchingNodes *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
func getAliasOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
log.Debugf("GetAlias operator!")
|
log.Debugf("GetAlias operator!")
|
||||||
var results = list.New()
|
var results = list.New()
|
||||||
|
|
||||||
for el := matchingNodes.Front(); el != nil; el = el.Next() {
|
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||||
candidate := el.Value.(*CandidateNode)
|
candidate := el.Value.(*CandidateNode)
|
||||||
node := &yaml.Node{Kind: yaml.ScalarNode, Value: candidate.Node.Value, Tag: "!!str"}
|
node := &yaml.Node{Kind: yaml.ScalarNode, Value: candidate.Node.Value, Tag: "!!str"}
|
||||||
result := candidate.CreateChild(nil, node)
|
result := candidate.CreateChild(nil, node)
|
||||||
results.PushBack(result)
|
results.PushBack(result)
|
||||||
}
|
}
|
||||||
return results, nil
|
return context.ChildContext(results), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func assignAnchorOperator(d *dataTreeNavigator, matchingNodes *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
func assignAnchorOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
|
|
||||||
log.Debugf("AssignAnchor operator!")
|
log.Debugf("AssignAnchor operator!")
|
||||||
|
|
||||||
anchorName := ""
|
anchorName := ""
|
||||||
if !expressionNode.Operation.UpdateAssign {
|
if !expressionNode.Operation.UpdateAssign {
|
||||||
rhs, err := d.GetMatchingNodes(matchingNodes, expressionNode.Rhs)
|
rhs, err := d.GetMatchingNodes(context.ReadOnlyClone(), expressionNode.Rhs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return Context{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if rhs.Front() != nil {
|
if rhs.MatchingNodes.Front() != nil {
|
||||||
anchorName = rhs.Front().Value.(*CandidateNode).Node.Value
|
anchorName = rhs.MatchingNodes.Front().Value.(*CandidateNode).Node.Value
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
lhs, err := d.GetMatchingNodes(matchingNodes, expressionNode.Lhs)
|
lhs, err := d.GetMatchingNodes(context, expressionNode.Lhs)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return Context{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
for el := lhs.Front(); el != nil; el = el.Next() {
|
for el := lhs.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||||
candidate := el.Value.(*CandidateNode)
|
candidate := el.Value.(*CandidateNode)
|
||||||
log.Debugf("Setting anchorName of : %v", candidate.GetKey())
|
log.Debugf("Setting anchorName of : %v", candidate.GetKey())
|
||||||
|
|
||||||
if expressionNode.Operation.UpdateAssign {
|
if expressionNode.Operation.UpdateAssign {
|
||||||
rhs, err := d.GetMatchingNodes(nodeToMap(candidate), expressionNode.Rhs)
|
rhs, err := d.GetMatchingNodes(context.SingleReadonlyChildContext(candidate), expressionNode.Rhs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return Context{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if rhs.Front() != nil {
|
if rhs.MatchingNodes.Front() != nil {
|
||||||
anchorName = rhs.Front().Value.(*CandidateNode).Node.Value
|
anchorName = rhs.MatchingNodes.Front().Value.(*CandidateNode).Node.Value
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
candidate.Node.Anchor = anchorName
|
candidate.Node.Anchor = anchorName
|
||||||
}
|
}
|
||||||
return matchingNodes, nil
|
return context, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func getAnchorOperator(d *dataTreeNavigator, matchingNodes *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
func getAnchorOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
log.Debugf("GetAnchor operator!")
|
log.Debugf("GetAnchor operator!")
|
||||||
var results = list.New()
|
var results = list.New()
|
||||||
|
|
||||||
for el := matchingNodes.Front(); el != nil; el = el.Next() {
|
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||||
candidate := el.Value.(*CandidateNode)
|
candidate := el.Value.(*CandidateNode)
|
||||||
anchor := candidate.Node.Anchor
|
anchor := candidate.Node.Anchor
|
||||||
node := &yaml.Node{Kind: yaml.ScalarNode, Value: anchor, Tag: "!!str"}
|
node := &yaml.Node{Kind: yaml.ScalarNode, Value: anchor, Tag: "!!str"}
|
||||||
result := candidate.CreateChild(nil, node)
|
result := candidate.CreateChild(nil, node)
|
||||||
results.PushBack(result)
|
results.PushBack(result)
|
||||||
}
|
}
|
||||||
return results, nil
|
return context.ChildContext(results), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func explodeOperator(d *dataTreeNavigator, matchMap *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
func explodeOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
log.Debugf("-- ExplodeOperation")
|
log.Debugf("-- ExplodeOperation")
|
||||||
|
|
||||||
for el := matchMap.Front(); el != nil; el = el.Next() {
|
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||||
candidate := el.Value.(*CandidateNode)
|
candidate := el.Value.(*CandidateNode)
|
||||||
|
|
||||||
rhs, err := d.GetMatchingNodes(nodeToMap(candidate), expressionNode.Rhs)
|
rhs, err := d.GetMatchingNodes(context.SingleChildContext(candidate), expressionNode.Rhs)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return Context{}, err
|
||||||
}
|
}
|
||||||
for childEl := rhs.Front(); childEl != nil; childEl = childEl.Next() {
|
for childEl := rhs.MatchingNodes.Front(); childEl != nil; childEl = childEl.Next() {
|
||||||
err = explodeNode(childEl.Value.(*CandidateNode).Node)
|
err = explodeNode(childEl.Value.(*CandidateNode).Node, context)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return Context{}, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return matchMap, nil
|
return context, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func explodeNode(node *yaml.Node) error {
|
func explodeNode(node *yaml.Node, context Context) error {
|
||||||
node.Anchor = ""
|
node.Anchor = ""
|
||||||
switch node.Kind {
|
switch node.Kind {
|
||||||
case yaml.SequenceNode, yaml.DocumentNode:
|
case yaml.SequenceNode, yaml.DocumentNode:
|
||||||
for index, contentNode := range node.Content {
|
for index, contentNode := range node.Content {
|
||||||
log.Debugf("exploding index %v", index)
|
log.Debugf("exploding index %v", index)
|
||||||
errorInContent := explodeNode(contentNode)
|
errorInContent := explodeNode(contentNode, context)
|
||||||
if errorInContent != nil {
|
if errorInContent != nil {
|
||||||
return errorInContent
|
return errorInContent
|
||||||
}
|
}
|
||||||
@ -169,23 +171,23 @@ func explodeNode(node *yaml.Node) error {
|
|||||||
valueNode := node.Content[index+1]
|
valueNode := node.Content[index+1]
|
||||||
log.Debugf("traversing %v", keyNode.Value)
|
log.Debugf("traversing %v", keyNode.Value)
|
||||||
if keyNode.Value != "<<" {
|
if keyNode.Value != "<<" {
|
||||||
err := overrideEntry(node, keyNode, valueNode, index, newContent)
|
err := overrideEntry(node, keyNode, valueNode, index, context.ChildContext(newContent))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if valueNode.Kind == yaml.SequenceNode {
|
if valueNode.Kind == yaml.SequenceNode {
|
||||||
log.Debugf("an alias merge list!")
|
log.Debugf("an alias merge list!")
|
||||||
for index := 0; index < len(valueNode.Content); index = index + 1 {
|
for index := len(valueNode.Content) - 1; index >= 0; index = index - 1 {
|
||||||
aliasNode := valueNode.Content[index]
|
aliasNode := valueNode.Content[index]
|
||||||
err := applyAlias(node, aliasNode.Alias, index, newContent)
|
err := applyAlias(node, aliasNode.Alias, index, context.ChildContext(newContent))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
log.Debugf("an alias merge!")
|
log.Debugf("an alias merge!")
|
||||||
err := applyAlias(node, valueNode.Alias, index, newContent)
|
err := applyAlias(node, valueNode.Alias, index, context.ChildContext(newContent))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -205,7 +207,7 @@ func explodeNode(node *yaml.Node) error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func applyAlias(node *yaml.Node, alias *yaml.Node, aliasIndex int, newContent *list.List) error {
|
func applyAlias(node *yaml.Node, alias *yaml.Node, aliasIndex int, newContent Context) error {
|
||||||
if alias == nil {
|
if alias == nil {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@ -221,15 +223,15 @@ func applyAlias(node *yaml.Node, alias *yaml.Node, aliasIndex int, newContent *l
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func overrideEntry(node *yaml.Node, key *yaml.Node, value *yaml.Node, startIndex int, newContent *list.List) error {
|
func overrideEntry(node *yaml.Node, key *yaml.Node, value *yaml.Node, startIndex int, newContent Context) error {
|
||||||
|
|
||||||
err := explodeNode(value)
|
err := explodeNode(value, newContent)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
for newEl := newContent.Front(); newEl != nil; newEl = newEl.Next() {
|
for newEl := newContent.MatchingNodes.Front(); newEl != nil; newEl = newEl.Next() {
|
||||||
valueEl := newEl.Next() // move forward twice
|
valueEl := newEl.Next() // move forward twice
|
||||||
keyNode := newEl.Value.(*yaml.Node)
|
keyNode := newEl.Value.(*yaml.Node)
|
||||||
log.Debugf("checking new content %v:%v", keyNode.Value, valueEl.Value.(*yaml.Node).Value)
|
log.Debugf("checking new content %v:%v", keyNode.Value, valueEl.Value.(*yaml.Node).Value)
|
||||||
@ -250,12 +252,12 @@ func overrideEntry(node *yaml.Node, key *yaml.Node, value *yaml.Node, startIndex
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
err = explodeNode(key)
|
err = explodeNode(key, newContent)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
log.Debugf("adding %v:%v", key.Value, value.Value)
|
log.Debugf("adding %v:%v", key.Value, value.Value)
|
||||||
newContent.PushBack(key)
|
newContent.MatchingNodes.PushBack(key)
|
||||||
newContent.PushBack(value)
|
newContent.MatchingNodes.PushBack(value)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,36 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var specDocument = `- &CENTER { x: 1, y: 2 }
|
||||||
|
- &LEFT { x: 0, y: 2 }
|
||||||
|
- &BIG { r: 10 }
|
||||||
|
- &SMALL { r: 1 }
|
||||||
|
`
|
||||||
|
|
||||||
|
var expectedSpecResult = "D0, P[4], (!!map)::x: 1\ny: 2\nr: 10\n"
|
||||||
|
|
||||||
var anchorOperatorScenarios = []expressionScenario{
|
var anchorOperatorScenarios = []expressionScenario{
|
||||||
|
{
|
||||||
|
description: "Merge one map",
|
||||||
|
subdescription: "see https://yaml.org/type/merge.html",
|
||||||
|
document: specDocument + "- << : *CENTER\n r: 10\n",
|
||||||
|
expression: ".[4] | explode(.)",
|
||||||
|
expected: []string{expectedSpecResult},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "Merge multiple maps",
|
||||||
|
subdescription: "see https://yaml.org/type/merge.html",
|
||||||
|
document: specDocument + "- << : [ *CENTER, *BIG ]\n",
|
||||||
|
expression: ".[4] | explode(.)",
|
||||||
|
expected: []string{"D0, P[4], (!!map)::r: 10\nx: 1\ny: 2\n"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "Override",
|
||||||
|
subdescription: "see https://yaml.org/type/merge.html",
|
||||||
|
document: specDocument + "- << : [ *BIG, *LEFT, *SMALL ]\n x: 1\n",
|
||||||
|
expression: ".[4] | explode(.)",
|
||||||
|
expected: []string{"D0, P[4], (!!map)::r: 10\nx: 1\ny: 2\n"},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
description: "Get anchor",
|
description: "Get anchor",
|
||||||
document: `a: &billyBob cat`,
|
document: `a: &billyBob cat`,
|
||||||
@ -29,6 +58,22 @@ var anchorOperatorScenarios = []expressionScenario{
|
|||||||
"D0, P[], (doc)::a: &cat {b: cat}\n",
|
"D0, P[], (doc)::a: &cat {b: cat}\n",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `a: {c: cat}`,
|
||||||
|
expression: `.a anchor |= .b`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::a: {c: cat}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `a: {c: cat}`,
|
||||||
|
expression: `.a anchor = .b`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::a: {c: cat}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
description: "Get alias",
|
description: "Get alias",
|
||||||
document: `{b: &billyBob meow, a: *billyBob}`,
|
document: `{b: &billyBob meow, a: *billyBob}`,
|
||||||
@ -45,6 +90,30 @@ var anchorOperatorScenarios = []expressionScenario{
|
|||||||
"D0, P[], (doc)::{b: &meow purr, a: *meow}\n",
|
"D0, P[], (doc)::{b: &meow purr, a: *meow}\n",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
description: "Set alias to blank does nothing",
|
||||||
|
document: `{b: &meow purr, a: cat}`,
|
||||||
|
expression: `.a alias = ""`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::{b: &meow purr, a: cat}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `{b: &meow purr, a: cat}`,
|
||||||
|
expression: `.a alias = .c`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::{b: &meow purr, a: cat}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `{b: &meow purr, a: cat}`,
|
||||||
|
expression: `.a alias |= .c`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::{b: &meow purr, a: cat}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
description: "Set alias relatively using assign-update",
|
description: "Set alias relatively using assign-update",
|
||||||
document: `{b: &meow purr, a: {f: meow}}`,
|
document: `{b: &meow purr, a: {f: meow}}`,
|
||||||
@ -91,9 +160,9 @@ bar:
|
|||||||
c: bar_c
|
c: bar_c
|
||||||
foobarList:
|
foobarList:
|
||||||
b: bar_b
|
b: bar_b
|
||||||
a: foo_a
|
thing: foo_thing
|
||||||
thing: bar_thing
|
|
||||||
c: foobarList_c
|
c: foobarList_c
|
||||||
|
a: foo_a
|
||||||
foobar:
|
foobar:
|
||||||
c: foo_c
|
c: foo_c
|
||||||
a: foo_a
|
a: foo_a
|
||||||
@ -106,7 +175,7 @@ foobar:
|
|||||||
expression: `.foo* | explode(.) | (. style="flow")`,
|
expression: `.foo* | explode(.) | (. style="flow")`,
|
||||||
expected: []string{
|
expected: []string{
|
||||||
"D0, P[foo], (!!map)::{a: foo_a, thing: foo_thing, c: foo_c}\n",
|
"D0, P[foo], (!!map)::{a: foo_a, thing: foo_thing, c: foo_c}\n",
|
||||||
"D0, P[foobarList], (!!map)::{b: bar_b, a: foo_a, thing: bar_thing, c: foobarList_c}\n",
|
"D0, P[foobarList], (!!map)::{b: bar_b, thing: foo_thing, c: foobarList_c, a: foo_a}\n",
|
||||||
"D0, P[foobar], (!!map)::{c: foo_c, a: foo_a, thing: foobar_thing}\n",
|
"D0, P[foobar], (!!map)::{c: foo_c, a: foo_a, thing: foobar_thing}\n",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -116,7 +185,7 @@ foobar:
|
|||||||
expression: `.foo* | explode(explode(.)) | (. style="flow")`,
|
expression: `.foo* | explode(explode(.)) | (. style="flow")`,
|
||||||
expected: []string{
|
expected: []string{
|
||||||
"D0, P[foo], (!!map)::{a: foo_a, thing: foo_thing, c: foo_c}\n",
|
"D0, P[foo], (!!map)::{a: foo_a, thing: foo_thing, c: foo_c}\n",
|
||||||
"D0, P[foobarList], (!!map)::{b: bar_b, a: foo_a, thing: bar_thing, c: foobarList_c}\n",
|
"D0, P[foobarList], (!!map)::{b: bar_b, thing: foo_thing, c: foobarList_c, a: foo_a}\n",
|
||||||
"D0, P[foobar], (!!map)::{c: foo_c, a: foo_a, thing: foobar_thing}\n",
|
"D0, P[foobar], (!!map)::{c: foo_c, a: foo_a, thing: foobar_thing}\n",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -130,7 +199,7 @@ foobar:
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestAnchorAliaseOperatorScenarios(t *testing.T) {
|
func TestAnchorAliasOperatorScenarios(t *testing.T) {
|
||||||
for _, tt := range anchorOperatorScenarios {
|
for _, tt := range anchorOperatorScenarios {
|
||||||
testScenario(t, &tt)
|
testScenario(t, &tt)
|
||||||
}
|
}
|
||||||
|
@ -1,30 +1,28 @@
|
|||||||
package yqlib
|
package yqlib
|
||||||
|
|
||||||
import "container/list"
|
func assignUpdateOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
|
lhs, err := d.GetMatchingNodes(context, expressionNode.Lhs)
|
||||||
func assignUpdateOperator(d *dataTreeNavigator, matchingNodes *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
|
||||||
lhs, err := d.GetMatchingNodes(matchingNodes, expressionNode.Lhs)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return Context{}, err
|
||||||
}
|
}
|
||||||
var rhs *list.List
|
var rhs Context
|
||||||
if !expressionNode.Operation.UpdateAssign {
|
if !expressionNode.Operation.UpdateAssign {
|
||||||
rhs, err = d.GetMatchingNodes(matchingNodes, expressionNode.Rhs)
|
rhs, err = d.GetMatchingNodes(context.ReadOnlyClone(), expressionNode.Rhs)
|
||||||
}
|
}
|
||||||
|
|
||||||
for el := lhs.Front(); el != nil; el = el.Next() {
|
for el := lhs.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||||
candidate := el.Value.(*CandidateNode)
|
candidate := el.Value.(*CandidateNode)
|
||||||
|
|
||||||
if expressionNode.Operation.UpdateAssign {
|
if expressionNode.Operation.UpdateAssign {
|
||||||
rhs, err = d.GetMatchingNodes(nodeToMap(candidate), expressionNode.Rhs)
|
rhs, err = d.GetMatchingNodes(context.SingleChildContext(candidate), expressionNode.Rhs)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return Context{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// grab the first value
|
// grab the first value
|
||||||
first := rhs.Front()
|
first := rhs.MatchingNodes.Front()
|
||||||
|
|
||||||
if first != nil {
|
if first != nil {
|
||||||
rhsCandidate := first.Value.(*CandidateNode)
|
rhsCandidate := first.Value.(*CandidateNode)
|
||||||
@ -33,30 +31,31 @@ func assignUpdateOperator(d *dataTreeNavigator, matchingNodes *list.List, expres
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return matchingNodes, nil
|
return context, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// does not update content or values
|
// does not update content or values
|
||||||
func assignAttributesOperator(d *dataTreeNavigator, matchingNodes *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
func assignAttributesOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
lhs, err := d.GetMatchingNodes(matchingNodes, expressionNode.Lhs)
|
log.Debug("getting lhs matching nodes for update")
|
||||||
|
lhs, err := d.GetMatchingNodes(context, expressionNode.Lhs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return Context{}, err
|
||||||
}
|
}
|
||||||
for el := lhs.Front(); el != nil; el = el.Next() {
|
for el := lhs.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||||
candidate := el.Value.(*CandidateNode)
|
candidate := el.Value.(*CandidateNode)
|
||||||
|
|
||||||
rhs, err := d.GetMatchingNodes(nodeToMap(candidate), expressionNode.Rhs)
|
rhs, err := d.GetMatchingNodes(context.SingleReadonlyChildContext(candidate), expressionNode.Rhs)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return Context{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// grab the first value
|
// grab the first value
|
||||||
first := rhs.Front()
|
first := rhs.MatchingNodes.Front()
|
||||||
|
|
||||||
if first != nil {
|
if first != nil {
|
||||||
candidate.UpdateAttributesFrom(first.Value.(*CandidateNode))
|
candidate.UpdateAttributesFrom(first.Value.(*CandidateNode))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return matchingNodes, nil
|
return context, nil
|
||||||
}
|
}
|
||||||
|
@ -12,6 +12,22 @@ var assignOperatorScenarios = []expressionScenario{
|
|||||||
"D0, P[], ()::a:\n b: cat\nx: frog\n",
|
"D0, P[], ()::a:\n b: cat\nx: frog\n",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: "{}",
|
||||||
|
expression: `.a |= .b`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::{a: null}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: "{}",
|
||||||
|
expression: `.a = .b`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::{a: null}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
description: "Update node to be the child value",
|
description: "Update node to be the child value",
|
||||||
document: `{a: {b: {g: foof}}}`,
|
document: `{a: {b: {g: foof}}}`,
|
||||||
@ -143,5 +159,5 @@ func TestAssignOperatorScenarios(t *testing.T) {
|
|||||||
for _, tt := range assignOperatorScenarios {
|
for _, tt := range assignOperatorScenarios {
|
||||||
testScenario(t, &tt)
|
testScenario(t, &tt)
|
||||||
}
|
}
|
||||||
documentScenarios(t, "Assign", assignOperatorScenarios)
|
documentScenarios(t, "Assign (Update)", assignOperatorScenarios)
|
||||||
}
|
}
|
||||||
|
@ -2,14 +2,13 @@ package yqlib
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"container/list"
|
"container/list"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
yaml "gopkg.in/yaml.v3"
|
yaml "gopkg.in/yaml.v3"
|
||||||
)
|
)
|
||||||
|
|
||||||
func isTruthy(c *CandidateNode) (bool, error) {
|
func isTruthyNode(node *yaml.Node) (bool, error) {
|
||||||
node := unwrapDoc(c.Node)
|
|
||||||
value := true
|
value := true
|
||||||
|
|
||||||
if node.Tag == "!!null" {
|
if node.Tag == "!!null" {
|
||||||
return false, nil
|
return false, nil
|
||||||
}
|
}
|
||||||
@ -23,56 +22,146 @@ func isTruthy(c *CandidateNode) (bool, error) {
|
|||||||
return value, nil
|
return value, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func isTruthy(c *CandidateNode) (bool, error) {
|
||||||
|
node := unwrapDoc(c.Node)
|
||||||
|
return isTruthyNode(node)
|
||||||
|
}
|
||||||
|
|
||||||
type boolOp func(bool, bool) bool
|
type boolOp func(bool, bool) bool
|
||||||
|
|
||||||
func performBoolOp(op boolOp) func(d *dataTreeNavigator, lhs *CandidateNode, rhs *CandidateNode) (*CandidateNode, error) {
|
func performBoolOp(op boolOp) func(d *dataTreeNavigator, context Context, lhs *CandidateNode, rhs *CandidateNode) (*CandidateNode, error) {
|
||||||
return func(d *dataTreeNavigator, lhs *CandidateNode, rhs *CandidateNode) (*CandidateNode, error) {
|
return func(d *dataTreeNavigator, context Context, lhs *CandidateNode, rhs *CandidateNode) (*CandidateNode, error) {
|
||||||
lhs.Node = unwrapDoc(lhs.Node)
|
owner := lhs
|
||||||
rhs.Node = unwrapDoc(rhs.Node)
|
|
||||||
|
|
||||||
lhsTrue, errDecoding := isTruthy(lhs)
|
if lhs == nil && rhs == nil {
|
||||||
if errDecoding != nil {
|
owner = &CandidateNode{}
|
||||||
return nil, errDecoding
|
} else if lhs == nil {
|
||||||
|
owner = rhs
|
||||||
}
|
}
|
||||||
|
|
||||||
rhsTrue, errDecoding := isTruthy(rhs)
|
var errDecoding error
|
||||||
if errDecoding != nil {
|
lhsTrue := false
|
||||||
return nil, errDecoding
|
if lhs != nil {
|
||||||
}
|
lhs.Node = unwrapDoc(lhs.Node)
|
||||||
|
lhsTrue, errDecoding = isTruthy(lhs)
|
||||||
|
|
||||||
return createBooleanCandidate(lhs, op(lhsTrue, rhsTrue)), nil
|
if errDecoding != nil {
|
||||||
|
return nil, errDecoding
|
||||||
|
}
|
||||||
|
}
|
||||||
|
log.Debugf("-- lhsTrue", lhsTrue)
|
||||||
|
|
||||||
|
rhsTrue := false
|
||||||
|
if rhs != nil {
|
||||||
|
rhs.Node = unwrapDoc(rhs.Node)
|
||||||
|
rhsTrue, errDecoding = isTruthy(rhs)
|
||||||
|
if errDecoding != nil {
|
||||||
|
return nil, errDecoding
|
||||||
|
}
|
||||||
|
}
|
||||||
|
log.Debugf("-- rhsTrue", rhsTrue)
|
||||||
|
|
||||||
|
return createBooleanCandidate(owner, op(lhsTrue, rhsTrue)), nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func orOperator(d *dataTreeNavigator, matchingNodes *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
func findBoolean(wantBool bool, d *dataTreeNavigator, context Context, expressionNode *ExpressionNode, sequenceNode *yaml.Node) (bool, error) {
|
||||||
log.Debugf("-- orOp")
|
for _, node := range sequenceNode.Content {
|
||||||
return crossFunction(d, matchingNodes, expressionNode, performBoolOp(
|
|
||||||
func(b1 bool, b2 bool) bool {
|
if expressionNode != nil {
|
||||||
return b1 || b2
|
//need to evaluate the expression against the node
|
||||||
}))
|
candidate := &CandidateNode{Node: node}
|
||||||
|
rhs, err := d.GetMatchingNodes(context.SingleReadonlyChildContext(candidate), expressionNode)
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
if rhs.MatchingNodes.Len() > 0 {
|
||||||
|
node = rhs.MatchingNodes.Front().Value.(*CandidateNode).Node
|
||||||
|
} else {
|
||||||
|
// no results found, ignore this entry
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
truthy, err := isTruthyNode(node)
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
if truthy == wantBool {
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func andOperator(d *dataTreeNavigator, matchingNodes *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
func allOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
|
var results = list.New()
|
||||||
|
|
||||||
|
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||||
|
candidate := el.Value.(*CandidateNode)
|
||||||
|
candidateNode := unwrapDoc(candidate.Node)
|
||||||
|
if candidateNode.Kind != yaml.SequenceNode {
|
||||||
|
return Context{}, fmt.Errorf("any only supports arrays, was %v", candidateNode.Tag)
|
||||||
|
}
|
||||||
|
booleanResult, err := findBoolean(false, d, context, expressionNode.Rhs, candidateNode)
|
||||||
|
if err != nil {
|
||||||
|
return Context{}, err
|
||||||
|
}
|
||||||
|
result := createBooleanCandidate(candidate, !booleanResult)
|
||||||
|
results.PushBack(result)
|
||||||
|
}
|
||||||
|
return context.ChildContext(results), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func anyOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
|
var results = list.New()
|
||||||
|
|
||||||
|
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||||
|
candidate := el.Value.(*CandidateNode)
|
||||||
|
candidateNode := unwrapDoc(candidate.Node)
|
||||||
|
if candidateNode.Kind != yaml.SequenceNode {
|
||||||
|
return Context{}, fmt.Errorf("any only supports arrays, was %v", candidateNode.Tag)
|
||||||
|
}
|
||||||
|
booleanResult, err := findBoolean(true, d, context, expressionNode.Rhs, candidateNode)
|
||||||
|
if err != nil {
|
||||||
|
return Context{}, err
|
||||||
|
}
|
||||||
|
result := createBooleanCandidate(candidate, booleanResult)
|
||||||
|
results.PushBack(result)
|
||||||
|
}
|
||||||
|
return context.ChildContext(results), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func orOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
|
log.Debugf("-- orOp")
|
||||||
|
return crossFunction(d, context.ReadOnlyClone(), expressionNode, performBoolOp(
|
||||||
|
func(b1 bool, b2 bool) bool {
|
||||||
|
log.Debugf("-- peformingOrOp with %v and %v", b1, b2)
|
||||||
|
return b1 || b2
|
||||||
|
}), true)
|
||||||
|
}
|
||||||
|
|
||||||
|
func andOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
log.Debugf("-- AndOp")
|
log.Debugf("-- AndOp")
|
||||||
return crossFunction(d, matchingNodes, expressionNode, performBoolOp(
|
return crossFunction(d, context.ReadOnlyClone(), expressionNode, performBoolOp(
|
||||||
func(b1 bool, b2 bool) bool {
|
func(b1 bool, b2 bool) bool {
|
||||||
return b1 && b2
|
return b1 && b2
|
||||||
}))
|
}), true)
|
||||||
}
|
}
|
||||||
|
|
||||||
func notOperator(d *dataTreeNavigator, matchMap *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
func notOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
log.Debugf("-- notOperation")
|
log.Debugf("-- notOperation")
|
||||||
var results = list.New()
|
var results = list.New()
|
||||||
|
|
||||||
for el := matchMap.Front(); el != nil; el = el.Next() {
|
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||||
candidate := el.Value.(*CandidateNode)
|
candidate := el.Value.(*CandidateNode)
|
||||||
log.Debug("notOperation checking %v", candidate)
|
log.Debug("notOperation checking %v", candidate)
|
||||||
truthy, errDecoding := isTruthy(candidate)
|
truthy, errDecoding := isTruthy(candidate)
|
||||||
if errDecoding != nil {
|
if errDecoding != nil {
|
||||||
return nil, errDecoding
|
return Context{}, errDecoding
|
||||||
}
|
}
|
||||||
result := createBooleanCandidate(candidate, !truthy)
|
result := createBooleanCandidate(candidate, !truthy)
|
||||||
results.PushBack(result)
|
results.PushBack(result)
|
||||||
}
|
}
|
||||||
return results, nil
|
return context.ChildContext(results), nil
|
||||||
}
|
}
|
||||||
|
@ -6,14 +6,38 @@ import (
|
|||||||
|
|
||||||
var booleanOperatorScenarios = []expressionScenario{
|
var booleanOperatorScenarios = []expressionScenario{
|
||||||
{
|
{
|
||||||
description: "OR example",
|
description: "`or` example",
|
||||||
expression: `true or false`,
|
expression: `true or false`,
|
||||||
expected: []string{
|
expected: []string{
|
||||||
"D0, P[], (!!bool)::true\n",
|
"D0, P[], (!!bool)::true\n",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
description: "AND example",
|
skipDoc: true,
|
||||||
|
document: "b: hi",
|
||||||
|
expression: `.a or .c`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!bool)::false\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: "b: hi",
|
||||||
|
expression: `select(.a or .b)`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::b: hi\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: "b: hi",
|
||||||
|
expression: `select((.a and .b) | not)`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::b: hi\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "`and` example",
|
||||||
expression: `true and false`,
|
expression: `true and false`,
|
||||||
expected: []string{
|
expected: []string{
|
||||||
"D0, P[], (!!bool)::false\n",
|
"D0, P[], (!!bool)::false\n",
|
||||||
@ -27,6 +51,86 @@ var booleanOperatorScenarios = []expressionScenario{
|
|||||||
"D0, P[], (!!seq)::- {a: bird, b: dog}\n- {a: cat, b: fly}\n",
|
"D0, P[], (!!seq)::- {a: bird, b: dog}\n- {a: cat, b: fly}\n",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
description: "`any` returns true if any boolean in a given array is true",
|
||||||
|
document: `[false, true]`,
|
||||||
|
expression: "any",
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!bool)::true\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "`any` returns false for an empty array",
|
||||||
|
document: `[]`,
|
||||||
|
expression: "any",
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!bool)::false\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "`any_c` returns true if any element in the array is true for the given condition.",
|
||||||
|
document: "a: [rad, awesome]\nb: [meh, whatever]",
|
||||||
|
expression: `.[] |= any_c(. == "awesome")`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::a: true\nb: false\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `[{pet: cat}]`,
|
||||||
|
expression: `any_c(.name == "harry") as $c`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::[{pet: cat}]\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `[{pet: cat}]`,
|
||||||
|
expression: `all_c(.name == "harry") as $c`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::[{pet: cat}]\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `[false, false]`,
|
||||||
|
expression: "any",
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!bool)::false\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "`all` returns true if all booleans in a given array are true",
|
||||||
|
document: `[true, true]`,
|
||||||
|
expression: "all",
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!bool)::true\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `[false, true]`,
|
||||||
|
expression: "all",
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!bool)::false\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "`all` returns true for an empty array",
|
||||||
|
document: `[]`,
|
||||||
|
expression: "all",
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!bool)::true\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "`all_c` returns true if all elements in the array are true for the given condition.",
|
||||||
|
document: "a: [rad, awesome]\nb: [meh, 12]",
|
||||||
|
expression: `.[] |= all_c(tag == "!!str")`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::a: true\nb: false\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
skipDoc: true,
|
skipDoc: true,
|
||||||
expression: `false or false`,
|
expression: `false or false`,
|
||||||
@ -45,6 +149,22 @@ var booleanOperatorScenarios = []expressionScenario{
|
|||||||
"D0, P[b], (!!bool)::true\n",
|
"D0, P[b], (!!bool)::true\n",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `{}`,
|
||||||
|
expression: `(.a.b or .c) as $x`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::{}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `{}`,
|
||||||
|
expression: `(.a.b and .c) as $x`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::{}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
description: "Not true is false",
|
description: "Not true is false",
|
||||||
expression: `true | not`,
|
expression: `true | not`,
|
||||||
|
@ -6,21 +6,21 @@ import (
|
|||||||
yaml "gopkg.in/yaml.v3"
|
yaml "gopkg.in/yaml.v3"
|
||||||
)
|
)
|
||||||
|
|
||||||
func collectOperator(d *dataTreeNavigator, matchMap *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
func collectOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
log.Debugf("-- collectOperation")
|
log.Debugf("-- collectOperation")
|
||||||
|
|
||||||
if matchMap.Len() == 0 {
|
if context.MatchingNodes.Len() == 0 {
|
||||||
node := &yaml.Node{Kind: yaml.SequenceNode, Tag: "!!seq", Value: "[]"}
|
node := &yaml.Node{Kind: yaml.SequenceNode, Tag: "!!seq", Value: "[]"}
|
||||||
candidate := &CandidateNode{Node: node}
|
candidate := &CandidateNode{Node: node}
|
||||||
return nodeToMap(candidate), nil
|
return context.SingleChildContext(candidate), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
var results = list.New()
|
var results = list.New()
|
||||||
|
|
||||||
node := &yaml.Node{Kind: yaml.SequenceNode, Tag: "!!seq"}
|
node := &yaml.Node{Kind: yaml.SequenceNode, Tag: "!!seq"}
|
||||||
var collectC *CandidateNode
|
var collectC *CandidateNode
|
||||||
if matchMap.Front() != nil {
|
if context.MatchingNodes.Front() != nil {
|
||||||
collectC = matchMap.Front().Value.(*CandidateNode).CreateChild(nil, node)
|
collectC = context.MatchingNodes.Front().Value.(*CandidateNode).CreateChild(nil, node)
|
||||||
if len(collectC.Path) > 0 {
|
if len(collectC.Path) > 0 {
|
||||||
collectC.Path = collectC.Path[:len(collectC.Path)-1]
|
collectC.Path = collectC.Path[:len(collectC.Path)-1]
|
||||||
}
|
}
|
||||||
@ -28,7 +28,7 @@ func collectOperator(d *dataTreeNavigator, matchMap *list.List, expressionNode *
|
|||||||
collectC = &CandidateNode{Node: node}
|
collectC = &CandidateNode{Node: node}
|
||||||
}
|
}
|
||||||
|
|
||||||
for el := matchMap.Front(); el != nil; el = el.Next() {
|
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||||
candidate := el.Value.(*CandidateNode)
|
candidate := el.Value.(*CandidateNode)
|
||||||
log.Debugf("Collecting %v", NodeToString(candidate))
|
log.Debugf("Collecting %v", NodeToString(candidate))
|
||||||
node.Content = append(node.Content, unwrapDoc(candidate.Node))
|
node.Content = append(node.Content, unwrapDoc(candidate.Node))
|
||||||
@ -36,5 +36,5 @@ func collectOperator(d *dataTreeNavigator, matchMap *list.List, expressionNode *
|
|||||||
|
|
||||||
results.PushBack(collectC)
|
results.PushBack(collectC)
|
||||||
|
|
||||||
return results, nil
|
return context.ChildContext(results), nil
|
||||||
}
|
}
|
||||||
|
@ -17,22 +17,25 @@ import (
|
|||||||
...
|
...
|
||||||
*/
|
*/
|
||||||
|
|
||||||
func collectObjectOperator(d *dataTreeNavigator, matchMap *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
func collectObjectOperator(d *dataTreeNavigator, originalContext Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
log.Debugf("-- collectObjectOperation")
|
log.Debugf("-- collectObjectOperation")
|
||||||
|
|
||||||
if matchMap.Len() == 0 {
|
context := originalContext.Clone()
|
||||||
|
context.DontAutoCreate = false
|
||||||
|
|
||||||
|
if context.MatchingNodes.Len() == 0 {
|
||||||
node := &yaml.Node{Kind: yaml.MappingNode, Tag: "!!map", Value: "{}"}
|
node := &yaml.Node{Kind: yaml.MappingNode, Tag: "!!map", Value: "{}"}
|
||||||
candidate := &CandidateNode{Node: node}
|
candidate := &CandidateNode{Node: node}
|
||||||
return nodeToMap(candidate), nil
|
return context.SingleChildContext(candidate), nil
|
||||||
}
|
}
|
||||||
first := matchMap.Front().Value.(*CandidateNode)
|
first := context.MatchingNodes.Front().Value.(*CandidateNode)
|
||||||
var rotated []*list.List = make([]*list.List, len(first.Node.Content))
|
var rotated []*list.List = make([]*list.List, len(first.Node.Content))
|
||||||
|
|
||||||
for i := 0; i < len(first.Node.Content); i++ {
|
for i := 0; i < len(first.Node.Content); i++ {
|
||||||
rotated[i] = list.New()
|
rotated[i] = list.New()
|
||||||
}
|
}
|
||||||
|
|
||||||
for el := matchMap.Front(); el != nil; el = el.Next() {
|
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||||
candidateNode := el.Value.(*CandidateNode)
|
candidateNode := el.Value.(*CandidateNode)
|
||||||
for i := 0; i < len(first.Node.Content); i++ {
|
for i := 0; i < len(first.Node.Content); i++ {
|
||||||
rotated[i].PushBack(candidateNode.CreateChild(i, candidateNode.Node.Content[i]))
|
rotated[i].PushBack(candidateNode.CreateChild(i, candidateNode.Node.Content[i]))
|
||||||
@ -41,59 +44,59 @@ func collectObjectOperator(d *dataTreeNavigator, matchMap *list.List, expression
|
|||||||
|
|
||||||
newObject := list.New()
|
newObject := list.New()
|
||||||
for i := 0; i < len(first.Node.Content); i++ {
|
for i := 0; i < len(first.Node.Content); i++ {
|
||||||
additions, err := collect(d, list.New(), rotated[i])
|
additions, err := collect(d, context.ChildContext(list.New()), rotated[i])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return Context{}, err
|
||||||
}
|
}
|
||||||
newObject.PushBackList(additions)
|
newObject.PushBackList(additions.MatchingNodes)
|
||||||
}
|
}
|
||||||
|
|
||||||
return newObject, nil
|
return context.ChildContext(newObject), nil
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func collect(d *dataTreeNavigator, aggregate *list.List, remainingMatches *list.List) (*list.List, error) {
|
func collect(d *dataTreeNavigator, context Context, remainingMatches *list.List) (Context, error) {
|
||||||
if remainingMatches.Len() == 0 {
|
if remainingMatches.Len() == 0 {
|
||||||
return aggregate, nil
|
return context, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
candidate := remainingMatches.Remove(remainingMatches.Front()).(*CandidateNode)
|
candidate := remainingMatches.Remove(remainingMatches.Front()).(*CandidateNode)
|
||||||
|
|
||||||
splatted, err := splat(d, nodeToMap(candidate),
|
splatted, err := splat(d, context.SingleChildContext(candidate),
|
||||||
&traversePreferences{FollowAlias: false, IncludeMapKeys: false})
|
traversePreferences{DontFollowAlias: true, IncludeMapKeys: false})
|
||||||
|
|
||||||
for splatEl := splatted.Front(); splatEl != nil; splatEl = splatEl.Next() {
|
for splatEl := splatted.MatchingNodes.Front(); splatEl != nil; splatEl = splatEl.Next() {
|
||||||
splatEl.Value.(*CandidateNode).Path = nil
|
splatEl.Value.(*CandidateNode).Path = nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return Context{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if aggregate.Len() == 0 {
|
if context.MatchingNodes.Len() == 0 {
|
||||||
return collect(d, splatted, remainingMatches)
|
return collect(d, splatted, remainingMatches)
|
||||||
}
|
}
|
||||||
|
|
||||||
newAgg := list.New()
|
newAgg := list.New()
|
||||||
|
|
||||||
for el := aggregate.Front(); el != nil; el = el.Next() {
|
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||||
aggCandidate := el.Value.(*CandidateNode)
|
aggCandidate := el.Value.(*CandidateNode)
|
||||||
for splatEl := splatted.Front(); splatEl != nil; splatEl = splatEl.Next() {
|
for splatEl := splatted.MatchingNodes.Front(); splatEl != nil; splatEl = splatEl.Next() {
|
||||||
splatCandidate := splatEl.Value.(*CandidateNode)
|
splatCandidate := splatEl.Value.(*CandidateNode)
|
||||||
newCandidate, err := aggCandidate.Copy()
|
newCandidate, err := aggCandidate.Copy()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return Context{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
newCandidate.Path = nil
|
newCandidate.Path = nil
|
||||||
|
|
||||||
newCandidate, err = multiply(&multiplyPreferences{AppendArrays: false})(d, newCandidate, splatCandidate)
|
newCandidate, err = multiply(multiplyPreferences{AppendArrays: false})(d, context, newCandidate, splatCandidate)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return Context{}, err
|
||||||
}
|
}
|
||||||
newAgg.PushBack(newCandidate)
|
newAgg.PushBack(newCandidate)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return collect(d, newAgg, remainingMatches)
|
return collect(d, context.ChildContext(newAgg), remainingMatches)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -5,6 +5,30 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
var collectObjectOperatorScenarios = []expressionScenario{
|
var collectObjectOperatorScenarios = []expressionScenario{
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: "a: []",
|
||||||
|
expression: `.a += [{"key": "att2", "value": "val2"}]`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::a: [{key: att2, value: val2}]\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: "",
|
||||||
|
expression: `.a += {"key": "att2", "value": "val2"}`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], ()::a:\n key: att2\n value: val2\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: "",
|
||||||
|
expression: `.a += [0]`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], ()::a:\n - 0\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
description: `Collect empty object`,
|
description: `Collect empty object`,
|
||||||
document: ``,
|
document: ``,
|
||||||
|
@ -5,6 +5,14 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
var collectOperatorScenarios = []expressionScenario{
|
var collectOperatorScenarios = []expressionScenario{
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: ``,
|
||||||
|
expression: `.a += [0]`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], ()::a:\n - 0\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
description: "Collect empty",
|
description: "Collect empty",
|
||||||
document: ``,
|
document: ``,
|
||||||
|
@ -13,41 +13,41 @@ type commentOpPreferences struct {
|
|||||||
FootComment bool
|
FootComment bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func assignCommentsOperator(d *dataTreeNavigator, matchingNodes *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
func assignCommentsOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
|
|
||||||
log.Debugf("AssignComments operator!")
|
log.Debugf("AssignComments operator!")
|
||||||
|
|
||||||
lhs, err := d.GetMatchingNodes(matchingNodes, expressionNode.Lhs)
|
lhs, err := d.GetMatchingNodes(context, expressionNode.Lhs)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return Context{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
preferences := expressionNode.Operation.Preferences.(*commentOpPreferences)
|
preferences := expressionNode.Operation.Preferences.(commentOpPreferences)
|
||||||
|
|
||||||
comment := ""
|
comment := ""
|
||||||
if !expressionNode.Operation.UpdateAssign {
|
if !expressionNode.Operation.UpdateAssign {
|
||||||
rhs, err := d.GetMatchingNodes(matchingNodes, expressionNode.Rhs)
|
rhs, err := d.GetMatchingNodes(context.ReadOnlyClone(), expressionNode.Rhs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return Context{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if rhs.Front() != nil {
|
if rhs.MatchingNodes.Front() != nil {
|
||||||
comment = rhs.Front().Value.(*CandidateNode).Node.Value
|
comment = rhs.MatchingNodes.Front().Value.(*CandidateNode).Node.Value
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for el := lhs.Front(); el != nil; el = el.Next() {
|
for el := lhs.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||||
candidate := el.Value.(*CandidateNode)
|
candidate := el.Value.(*CandidateNode)
|
||||||
|
|
||||||
if expressionNode.Operation.UpdateAssign {
|
if expressionNode.Operation.UpdateAssign {
|
||||||
rhs, err := d.GetMatchingNodes(nodeToMap(candidate), expressionNode.Rhs)
|
rhs, err := d.GetMatchingNodes(context.SingleReadonlyChildContext(candidate), expressionNode.Rhs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return Context{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if rhs.Front() != nil {
|
if rhs.MatchingNodes.Front() != nil {
|
||||||
comment = rhs.Front().Value.(*CandidateNode).Node.Value
|
comment = rhs.MatchingNodes.Front().Value.(*CandidateNode).Node.Value
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -63,15 +63,15 @@ func assignCommentsOperator(d *dataTreeNavigator, matchingNodes *list.List, expr
|
|||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
return matchingNodes, nil
|
return context, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func getCommentsOperator(d *dataTreeNavigator, matchingNodes *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
func getCommentsOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
preferences := expressionNode.Operation.Preferences.(*commentOpPreferences)
|
preferences := expressionNode.Operation.Preferences.(commentOpPreferences)
|
||||||
log.Debugf("GetComments operator!")
|
log.Debugf("GetComments operator!")
|
||||||
var results = list.New()
|
var results = list.New()
|
||||||
|
|
||||||
for el := matchingNodes.Front(); el != nil; el = el.Next() {
|
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||||
candidate := el.Value.(*CandidateNode)
|
candidate := el.Value.(*CandidateNode)
|
||||||
comment := ""
|
comment := ""
|
||||||
if preferences.LineComment {
|
if preferences.LineComment {
|
||||||
@ -87,5 +87,5 @@ func getCommentsOperator(d *dataTreeNavigator, matchingNodes *list.List, express
|
|||||||
result := candidate.CreateChild(nil, node)
|
result := candidate.CreateChild(nil, node)
|
||||||
results.PushBack(result)
|
results.PushBack(result)
|
||||||
}
|
}
|
||||||
return results, nil
|
return context.ChildContext(results), nil
|
||||||
}
|
}
|
||||||
|
@ -62,6 +62,22 @@ var commentOperatorScenarios = []expressionScenario{
|
|||||||
"D0, P[], (doc)::a: cat\n\n# cat\n",
|
"D0, P[], (doc)::a: cat\n\n# cat\n",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `a: cat`,
|
||||||
|
expression: `. footComment=.b.d`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::a: cat\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `a: cat`,
|
||||||
|
expression: `. footComment|=.b.d`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::a: cat\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
description: "Remove comment",
|
description: "Remove comment",
|
||||||
document: "a: cat # comment\nb: dog # leave this",
|
document: "a: cat # comment\nb: dog # leave this",
|
||||||
@ -71,11 +87,12 @@ var commentOperatorScenarios = []expressionScenario{
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
description: "Remove all comments",
|
description: "Remove (strip) all comments",
|
||||||
document: "# hi\n\na: cat # comment\n\n# great\n",
|
subdescription: "Note the use of `...` to ensure key nodes are included.",
|
||||||
expression: `.. comments=""`,
|
document: "# hi\n\na: cat # comment\n\n# great\n\nb: # key comment",
|
||||||
|
expression: `... comments=""`,
|
||||||
expected: []string{
|
expected: []string{
|
||||||
"D0, P[], (!!map)::a: cat\n",
|
"D0, P[], (!!map)::a: cat\nb:\n",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -6,7 +6,7 @@ import (
|
|||||||
"gopkg.in/yaml.v3"
|
"gopkg.in/yaml.v3"
|
||||||
)
|
)
|
||||||
|
|
||||||
func createMapOperator(d *dataTreeNavigator, matchingNodes *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
func createMapOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
log.Debugf("-- createMapOperation")
|
log.Debugf("-- createMapOperation")
|
||||||
|
|
||||||
//each matchingNodes entry should turn into a sequence of keys to create.
|
//each matchingNodes entry should turn into a sequence of keys to create.
|
||||||
@ -18,29 +18,29 @@ func createMapOperator(d *dataTreeNavigator, matchingNodes *list.List, expressio
|
|||||||
|
|
||||||
sequences := list.New()
|
sequences := list.New()
|
||||||
|
|
||||||
if matchingNodes.Len() > 0 {
|
if context.MatchingNodes.Len() > 0 {
|
||||||
|
|
||||||
for matchingNodeEl := matchingNodes.Front(); matchingNodeEl != nil; matchingNodeEl = matchingNodeEl.Next() {
|
for matchingNodeEl := context.MatchingNodes.Front(); matchingNodeEl != nil; matchingNodeEl = matchingNodeEl.Next() {
|
||||||
matchingNode := matchingNodeEl.Value.(*CandidateNode)
|
matchingNode := matchingNodeEl.Value.(*CandidateNode)
|
||||||
sequenceNode, err := sequenceFor(d, matchingNode, expressionNode)
|
sequenceNode, err := sequenceFor(d, context, matchingNode, expressionNode)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return Context{}, err
|
||||||
}
|
}
|
||||||
sequences.PushBack(sequenceNode)
|
sequences.PushBack(sequenceNode)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
sequenceNode, err := sequenceFor(d, nil, expressionNode)
|
sequenceNode, err := sequenceFor(d, context, nil, expressionNode)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return Context{}, err
|
||||||
}
|
}
|
||||||
sequences.PushBack(sequenceNode)
|
sequences.PushBack(sequenceNode)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nodeToMap(&CandidateNode{Node: listToNodeSeq(sequences), Document: document, Path: path}), nil
|
return context.SingleChildContext(&CandidateNode{Node: listToNodeSeq(sequences), Document: document, Path: path}), nil
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func sequenceFor(d *dataTreeNavigator, matchingNode *CandidateNode, expressionNode *ExpressionNode) (*CandidateNode, error) {
|
func sequenceFor(d *dataTreeNavigator, context Context, matchingNode *CandidateNode, expressionNode *ExpressionNode) (*CandidateNode, error) {
|
||||||
var path []interface{}
|
var path []interface{}
|
||||||
var document uint = 0
|
var document uint = 0
|
||||||
var matches = list.New()
|
var matches = list.New()
|
||||||
@ -48,11 +48,11 @@ func sequenceFor(d *dataTreeNavigator, matchingNode *CandidateNode, expressionNo
|
|||||||
if matchingNode != nil {
|
if matchingNode != nil {
|
||||||
path = matchingNode.Path
|
path = matchingNode.Path
|
||||||
document = matchingNode.Document
|
document = matchingNode.Document
|
||||||
matches = nodeToMap(matchingNode)
|
matches.PushBack(matchingNode)
|
||||||
}
|
}
|
||||||
|
|
||||||
mapPairs, err := crossFunction(d, matches, expressionNode,
|
mapPairs, err := crossFunction(d, context.ChildContext(matches), expressionNode,
|
||||||
func(d *dataTreeNavigator, lhs *CandidateNode, rhs *CandidateNode) (*CandidateNode, error) {
|
func(d *dataTreeNavigator, context Context, lhs *CandidateNode, rhs *CandidateNode) (*CandidateNode, error) {
|
||||||
node := yaml.Node{Kind: yaml.MappingNode, Tag: "!!map"}
|
node := yaml.Node{Kind: yaml.MappingNode, Tag: "!!map"}
|
||||||
log.Debugf("LHS:", NodeToString(lhs))
|
log.Debugf("LHS:", NodeToString(lhs))
|
||||||
log.Debugf("RHS:", NodeToString(rhs))
|
log.Debugf("RHS:", NodeToString(rhs))
|
||||||
@ -62,12 +62,12 @@ func sequenceFor(d *dataTreeNavigator, matchingNode *CandidateNode, expressionNo
|
|||||||
}
|
}
|
||||||
|
|
||||||
return &CandidateNode{Node: &node, Document: document, Path: path}, nil
|
return &CandidateNode{Node: &node, Document: document, Path: path}, nil
|
||||||
})
|
}, false)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
innerList := listToNodeSeq(mapPairs)
|
innerList := listToNodeSeq(mapPairs.MatchingNodes)
|
||||||
innerList.Style = yaml.FlowStyle
|
innerList.Style = yaml.FlowStyle
|
||||||
return &CandidateNode{Node: innerList, Document: document, Path: path}, nil
|
return &CandidateNode{Node: innerList, Document: document, Path: path}, nil
|
||||||
}
|
}
|
||||||
|
@ -1,65 +1,40 @@
|
|||||||
package yqlib
|
package yqlib
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"container/list"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
yaml "gopkg.in/yaml.v3"
|
yaml "gopkg.in/yaml.v3"
|
||||||
)
|
)
|
||||||
|
|
||||||
func deleteChildOperator(d *dataTreeNavigator, matchingNodes *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
func deleteChildOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
|
nodesToDelete, err := d.GetMatchingNodes(context.ReadOnlyClone(), expressionNode.Rhs)
|
||||||
nodesToDelete, err := d.GetMatchingNodes(matchingNodes, expressionNode.Rhs)
|
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return Context{}, err
|
||||||
}
|
}
|
||||||
|
//need to iterate backwards to ensure correct indices when deleting multiple
|
||||||
for el := nodesToDelete.Front(); el != nil; el = el.Next() {
|
for el := nodesToDelete.MatchingNodes.Back(); el != nil; el = el.Prev() {
|
||||||
candidate := el.Value.(*CandidateNode)
|
candidate := el.Value.(*CandidateNode)
|
||||||
|
|
||||||
deleteImmediateChildOp := &Operation{
|
//problem: context may already be '.a' and then I pass in '.a.a2'.
|
||||||
OperationType: deleteImmediateChildOpType,
|
// should pass in .a2.
|
||||||
Value: candidate.Path[len(candidate.Path)-1],
|
if candidate.Parent == nil {
|
||||||
|
log.Info("Could not find parent of %v", candidate.GetKey())
|
||||||
|
return context, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
deleteImmediateChildOpNode := &ExpressionNode{
|
parentNode := candidate.Parent.Node
|
||||||
Operation: deleteImmediateChildOp,
|
childPath := candidate.Path[len(candidate.Path)-1]
|
||||||
Rhs: createTraversalTree(candidate.Path[0 : len(candidate.Path)-1]),
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err := d.GetMatchingNodes(matchingNodes, deleteImmediateChildOpNode)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return matchingNodes, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func deleteImmediateChildOperator(d *dataTreeNavigator, matchingNodes *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
|
||||||
parents, err := d.GetMatchingNodes(matchingNodes, expressionNode.Rhs)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
childPath := expressionNode.Operation.Value
|
|
||||||
|
|
||||||
log.Debug("childPath to remove %v", childPath)
|
|
||||||
|
|
||||||
for el := parents.Front(); el != nil; el = el.Next() {
|
|
||||||
parent := el.Value.(*CandidateNode)
|
|
||||||
parentNode := unwrapDoc(parent.Node)
|
|
||||||
if parentNode.Kind == yaml.MappingNode {
|
if parentNode.Kind == yaml.MappingNode {
|
||||||
deleteFromMap(parent, childPath)
|
deleteFromMap(candidate.Parent, childPath)
|
||||||
} else if parentNode.Kind == yaml.SequenceNode {
|
} else if parentNode.Kind == yaml.SequenceNode {
|
||||||
deleteFromArray(parent, childPath)
|
deleteFromArray(candidate.Parent, childPath)
|
||||||
} else {
|
} else {
|
||||||
return nil, fmt.Errorf("Cannot delete nodes from parent of tag %v", parentNode.Tag)
|
return Context{}, fmt.Errorf("Cannot delete nodes from parent of tag %v", parentNode.Tag)
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
return matchingNodes, nil
|
return context, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func deleteFromMap(candidate *CandidateNode, childPath interface{}) {
|
func deleteFromMap(candidate *CandidateNode, childPath interface{}) {
|
||||||
|
@ -21,6 +21,70 @@ var deleteOperatorScenarios = []expressionScenario{
|
|||||||
"D0, P[], (doc)::{a: {a2: frood}}\n",
|
"D0, P[], (doc)::{a: {a2: frood}}\n",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `{a: {a1: fred, a2: frood}}`,
|
||||||
|
expression: `.a | del(.a1)`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[a], (!!map)::{a2: frood}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `a: [1,2,3]`,
|
||||||
|
expression: `.a | del(.[1])`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[a], (!!seq)::[1, 3]\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `[0, {a: cat, b: dog}]`,
|
||||||
|
expression: `.[1] | del(.a)`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[1], (!!map)::{b: dog}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `[{a: cat, b: dog}]`,
|
||||||
|
expression: `.[0] | del(.a)`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[0], (!!map)::{b: dog}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `[{a: {b: thing, c: frog}}]`,
|
||||||
|
expression: `.[0].a | del(.b)`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[0 a], (!!map)::{c: frog}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `[{a: {b: thing, c: frog}}]`,
|
||||||
|
expression: `.[0] | del(.a.b)`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[0], (!!map)::{a: {c: frog}}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `{a: [0, {b: thing, c: frog}]}`,
|
||||||
|
expression: `.a[1] | del(.b)`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[a 1], (!!map)::{c: frog}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `{a: [0, {b: thing, c: frog}]}`,
|
||||||
|
expression: `.a | del(.[1].b)`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[a], (!!seq)::[0, {c: frog}]\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
skipDoc: true,
|
skipDoc: true,
|
||||||
document: `{a: {a1: fred, a2: frood}}`,
|
document: `{a: {a1: fred, a2: frood}}`,
|
||||||
@ -37,6 +101,38 @@ var deleteOperatorScenarios = []expressionScenario{
|
|||||||
"D0, P[], (doc)::[1, 3]\n",
|
"D0, P[], (doc)::[1, 3]\n",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `a: [1,2,3]`,
|
||||||
|
expression: `del(.a[])`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::a: []\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `a: [10,x,10, 10, x, 10]`,
|
||||||
|
expression: `del(.a[] | select(. == 10))`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::a: [x, x]\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `a: {thing1: yep, thing2: cool, thing3: hi, b: {thing1: cool, great: huh}}`,
|
||||||
|
expression: `del(..)`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!map)::{}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `a: {thing1: yep, thing2: cool, thing3: hi, b: {thing1: cool, great: huh}}`,
|
||||||
|
expression: `del(.. | select(tag == "!!map") | (.b.thing1,.thing2))`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!map)::a: {thing1: yep, thing3: hi, b: {great: huh}}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
description: "Delete nested entry in array",
|
description: "Delete nested entry in array",
|
||||||
document: `[{a: cat, b: dog}]`,
|
document: `[{a: cat, b: dog}]`,
|
||||||
|
@ -7,14 +7,14 @@ import (
|
|||||||
"gopkg.in/yaml.v3"
|
"gopkg.in/yaml.v3"
|
||||||
)
|
)
|
||||||
|
|
||||||
func getDocumentIndexOperator(d *dataTreeNavigator, matchingNodes *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
func getDocumentIndexOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
var results = list.New()
|
var results = list.New()
|
||||||
|
|
||||||
for el := matchingNodes.Front(); el != nil; el = el.Next() {
|
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||||
candidate := el.Value.(*CandidateNode)
|
candidate := el.Value.(*CandidateNode)
|
||||||
node := &yaml.Node{Kind: yaml.ScalarNode, Value: fmt.Sprintf("%v", candidate.Document), Tag: "!!int"}
|
node := &yaml.Node{Kind: yaml.ScalarNode, Value: fmt.Sprintf("%v", candidate.Document), Tag: "!!int"}
|
||||||
scalar := candidate.CreateChild(nil, node)
|
scalar := candidate.CreateChild(nil, node)
|
||||||
results.PushBack(scalar)
|
results.PushBack(scalar)
|
||||||
}
|
}
|
||||||
return results, nil
|
return context.ChildContext(results), nil
|
||||||
}
|
}
|
||||||
|
162
pkg/yqlib/operator_entries.go
Normal file
162
pkg/yqlib/operator_entries.go
Normal file
@ -0,0 +1,162 @@
|
|||||||
|
package yqlib
|
||||||
|
|
||||||
|
import (
|
||||||
|
"container/list"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
yaml "gopkg.in/yaml.v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
func entrySeqFor(key *yaml.Node, value *yaml.Node) *yaml.Node {
|
||||||
|
var keyKey = &yaml.Node{Kind: yaml.ScalarNode, Tag: "!!str", Value: "key"}
|
||||||
|
var valueKey = &yaml.Node{Kind: yaml.ScalarNode, Tag: "!!str", Value: "value"}
|
||||||
|
|
||||||
|
return &yaml.Node{
|
||||||
|
Kind: yaml.MappingNode,
|
||||||
|
Tag: "!!map",
|
||||||
|
Content: []*yaml.Node{keyKey, key, valueKey, value},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func toEntriesFromMap(candidateNode *CandidateNode) *CandidateNode {
|
||||||
|
var sequence = &yaml.Node{Kind: yaml.SequenceNode, Tag: "!!seq"}
|
||||||
|
var entriesNode = candidateNode.CreateChild(nil, sequence)
|
||||||
|
|
||||||
|
var contents = unwrapDoc(candidateNode.Node).Content
|
||||||
|
for index := 0; index < len(contents); index = index + 2 {
|
||||||
|
key := contents[index]
|
||||||
|
value := contents[index+1]
|
||||||
|
|
||||||
|
sequence.Content = append(sequence.Content, entrySeqFor(key, value))
|
||||||
|
}
|
||||||
|
return entriesNode
|
||||||
|
}
|
||||||
|
|
||||||
|
func toEntriesfromSeq(candidateNode *CandidateNode) *CandidateNode {
|
||||||
|
var sequence = &yaml.Node{Kind: yaml.SequenceNode, Tag: "!!seq"}
|
||||||
|
var entriesNode = candidateNode.CreateChild(nil, sequence)
|
||||||
|
|
||||||
|
var contents = unwrapDoc(candidateNode.Node).Content
|
||||||
|
for index := 0; index < len(contents); index = index + 1 {
|
||||||
|
key := &yaml.Node{Kind: yaml.ScalarNode, Tag: "!!int", Value: fmt.Sprintf("%v", index)}
|
||||||
|
value := contents[index]
|
||||||
|
|
||||||
|
sequence.Content = append(sequence.Content, entrySeqFor(key, value))
|
||||||
|
}
|
||||||
|
return entriesNode
|
||||||
|
}
|
||||||
|
|
||||||
|
func toEntriesOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
|
var results = list.New()
|
||||||
|
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||||
|
candidate := el.Value.(*CandidateNode)
|
||||||
|
candidateNode := unwrapDoc(candidate.Node)
|
||||||
|
|
||||||
|
switch candidateNode.Kind {
|
||||||
|
case yaml.MappingNode:
|
||||||
|
results.PushBack(toEntriesFromMap(candidate))
|
||||||
|
|
||||||
|
case yaml.SequenceNode:
|
||||||
|
results.PushBack(toEntriesfromSeq(candidate))
|
||||||
|
default:
|
||||||
|
if candidateNode.Tag != "!!null" {
|
||||||
|
return Context{}, fmt.Errorf("%v has no keys", candidate.Node.Tag)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return context.ChildContext(results), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseEntry(d *dataTreeNavigator, entry *yaml.Node, position int) (*yaml.Node, *yaml.Node, error) {
|
||||||
|
prefs := traversePreferences{DontAutoCreate: true}
|
||||||
|
candidateNode := &CandidateNode{Node: entry}
|
||||||
|
|
||||||
|
keyResults, err := traverseMap(Context{}, candidateNode, "key", prefs, false)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
} else if keyResults.Len() != 1 {
|
||||||
|
return nil, nil, fmt.Errorf("Expected to find one 'key' entry but found %v in position %v", keyResults.Len(), position)
|
||||||
|
}
|
||||||
|
|
||||||
|
valueResults, err := traverseMap(Context{}, candidateNode, "value", prefs, false)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
} else if valueResults.Len() != 1 {
|
||||||
|
return nil, nil, fmt.Errorf("Expected to find one 'value' entry but found %v in position %v", valueResults.Len(), position)
|
||||||
|
}
|
||||||
|
|
||||||
|
return keyResults.Front().Value.(*CandidateNode).Node, valueResults.Front().Value.(*CandidateNode).Node, nil
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func fromEntries(d *dataTreeNavigator, candidateNode *CandidateNode) (*CandidateNode, error) {
|
||||||
|
var node = &yaml.Node{Kind: yaml.MappingNode, Tag: "!!map"}
|
||||||
|
var mapCandidateNode = candidateNode.CreateChild(nil, node)
|
||||||
|
|
||||||
|
var contents = unwrapDoc(candidateNode.Node).Content
|
||||||
|
|
||||||
|
for index := 0; index < len(contents); index = index + 1 {
|
||||||
|
key, value, err := parseEntry(d, contents[index], index)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
node.Content = append(node.Content, key, value)
|
||||||
|
}
|
||||||
|
return mapCandidateNode, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func fromEntriesOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
|
var results = list.New()
|
||||||
|
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||||
|
candidate := el.Value.(*CandidateNode)
|
||||||
|
candidateNode := unwrapDoc(candidate.Node)
|
||||||
|
|
||||||
|
switch candidateNode.Kind {
|
||||||
|
case yaml.SequenceNode:
|
||||||
|
mapResult, err := fromEntries(d, candidate)
|
||||||
|
if err != nil {
|
||||||
|
return Context{}, err
|
||||||
|
}
|
||||||
|
results.PushBack(mapResult)
|
||||||
|
default:
|
||||||
|
return Context{}, fmt.Errorf("from entries only runs against arrays")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return context.ChildContext(results), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func withEntriesOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
|
|
||||||
|
//to_entries on the context
|
||||||
|
toEntries, err := toEntriesOperator(d, context, expressionNode)
|
||||||
|
if err != nil {
|
||||||
|
return Context{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
//run expression against entries
|
||||||
|
// splat toEntries and pipe it into Rhs
|
||||||
|
splatted, err := splat(d, toEntries, traversePreferences{})
|
||||||
|
if err != nil {
|
||||||
|
return Context{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
result, err := d.GetMatchingNodes(splatted, expressionNode.Rhs)
|
||||||
|
log.Debug("expressionNode.Rhs %v", expressionNode.Rhs.Operation.OperationType)
|
||||||
|
log.Debug("result %v", result)
|
||||||
|
if err != nil {
|
||||||
|
return Context{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
collected, err := collectOperator(d, result, expressionNode)
|
||||||
|
if err != nil {
|
||||||
|
return Context{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
//from_entries on the result
|
||||||
|
return fromEntriesOperator(d, collected, expressionNode)
|
||||||
|
}
|
62
pkg/yqlib/operator_entries_test.go
Normal file
62
pkg/yqlib/operator_entries_test.go
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
package yqlib
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
var entriesOperatorScenarios = []expressionScenario{
|
||||||
|
{
|
||||||
|
description: "to_entries Map",
|
||||||
|
document: `{a: 1, b: 2}`,
|
||||||
|
expression: `to_entries`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!seq)::- key: a\n value: 1\n- key: b\n value: 2\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "to_entries Array",
|
||||||
|
document: `[a, b]`,
|
||||||
|
expression: `to_entries`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!seq)::- key: 0\n value: a\n- key: 1\n value: b\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "to_entries null",
|
||||||
|
document: `null`,
|
||||||
|
expression: `to_entries`,
|
||||||
|
expected: []string{},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "from_entries map",
|
||||||
|
document: `{a: 1, b: 2}`,
|
||||||
|
expression: `to_entries | from_entries`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!map)::a: 1\nb: 2\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "from_entries with numeric key indexes",
|
||||||
|
subdescription: "from_entries always creates a map, even for numeric keys",
|
||||||
|
document: `[a,b]`,
|
||||||
|
expression: `to_entries | from_entries`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!map)::0: a\n1: b\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "Use with_entries to update keys",
|
||||||
|
document: `{a: 1, b: 2}`,
|
||||||
|
expression: `with_entries(.key |= "KEY_" + .)`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!map)::KEY_a: 1\nKEY_b: 2\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestEntriesOperatorScenarios(t *testing.T) {
|
||||||
|
for _, tt := range entriesOperatorScenarios {
|
||||||
|
testScenario(t, &tt)
|
||||||
|
}
|
||||||
|
documentScenarios(t, "Entries", entriesOperatorScenarios)
|
||||||
|
}
|
@ -1,7 +1,6 @@
|
|||||||
package yqlib
|
package yqlib
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"container/list"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
@ -13,13 +12,13 @@ type envOpPreferences struct {
|
|||||||
StringValue bool
|
StringValue bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func envOperator(d *dataTreeNavigator, matchMap *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
func envOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
envName := expressionNode.Operation.CandidateNode.Node.Value
|
envName := expressionNode.Operation.CandidateNode.Node.Value
|
||||||
log.Debug("EnvOperator, env name:", envName)
|
log.Debug("EnvOperator, env name:", envName)
|
||||||
|
|
||||||
rawValue := os.Getenv(envName)
|
rawValue := os.Getenv(envName)
|
||||||
|
|
||||||
preferences := expressionNode.Operation.Preferences.(*envOpPreferences)
|
preferences := expressionNode.Operation.Preferences.(envOpPreferences)
|
||||||
|
|
||||||
var node *yaml.Node
|
var node *yaml.Node
|
||||||
if preferences.StringValue {
|
if preferences.StringValue {
|
||||||
@ -29,13 +28,13 @@ func envOperator(d *dataTreeNavigator, matchMap *list.List, expressionNode *Expr
|
|||||||
Value: rawValue,
|
Value: rawValue,
|
||||||
}
|
}
|
||||||
} else if rawValue == "" {
|
} else if rawValue == "" {
|
||||||
return nil, fmt.Errorf("Value for env variable '%v' not provided in env()", envName)
|
return Context{}, fmt.Errorf("Value for env variable '%v' not provided in env()", envName)
|
||||||
} else {
|
} else {
|
||||||
var dataBucket yaml.Node
|
var dataBucket yaml.Node
|
||||||
decoder := yaml.NewDecoder(strings.NewReader(rawValue))
|
decoder := yaml.NewDecoder(strings.NewReader(rawValue))
|
||||||
errorReading := decoder.Decode(&dataBucket)
|
errorReading := decoder.Decode(&dataBucket)
|
||||||
if errorReading != nil {
|
if errorReading != nil {
|
||||||
return nil, errorReading
|
return Context{}, errorReading
|
||||||
}
|
}
|
||||||
//first node is a doc
|
//first node is a doc
|
||||||
node = unwrapDoc(&dataBucket)
|
node = unwrapDoc(&dataBucket)
|
||||||
@ -46,5 +45,5 @@ func envOperator(d *dataTreeNavigator, matchMap *list.List, expressionNode *Expr
|
|||||||
|
|
||||||
target := &CandidateNode{Node: node}
|
target := &CandidateNode{Node: node}
|
||||||
|
|
||||||
return nodeToMap(target), nil
|
return context.SingleChildContext(target), nil
|
||||||
}
|
}
|
||||||
|
@ -1,25 +1,54 @@
|
|||||||
package yqlib
|
package yqlib
|
||||||
|
|
||||||
import (
|
import "gopkg.in/yaml.v3"
|
||||||
"container/list"
|
|
||||||
)
|
|
||||||
|
|
||||||
func equalsOperator(d *dataTreeNavigator, matchingNodes *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
func equalsOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
log.Debugf("-- equalsOperation")
|
log.Debugf("-- equalsOperation")
|
||||||
return crossFunction(d, matchingNodes, expressionNode, isEquals)
|
return crossFunction(d, context.ReadOnlyClone(), expressionNode, isEquals(false), true)
|
||||||
}
|
}
|
||||||
|
|
||||||
func isEquals(d *dataTreeNavigator, lhs *CandidateNode, rhs *CandidateNode) (*CandidateNode, error) {
|
func isEquals(flip bool) func(d *dataTreeNavigator, context Context, lhs *CandidateNode, rhs *CandidateNode) (*CandidateNode, error) {
|
||||||
value := false
|
return func(d *dataTreeNavigator, context Context, lhs *CandidateNode, rhs *CandidateNode) (*CandidateNode, error) {
|
||||||
|
value := false
|
||||||
|
log.Debugf("-- isEquals cross function")
|
||||||
|
if lhs == nil && rhs == nil {
|
||||||
|
owner := &CandidateNode{}
|
||||||
|
return createBooleanCandidate(owner, !flip), nil
|
||||||
|
} else if lhs == nil {
|
||||||
|
log.Debugf("lhs nil, but rhs is not")
|
||||||
|
rhsNode := unwrapDoc(rhs.Node)
|
||||||
|
value := rhsNode.Tag == "!!null"
|
||||||
|
if flip {
|
||||||
|
value = !value
|
||||||
|
}
|
||||||
|
return createBooleanCandidate(rhs, value), nil
|
||||||
|
} else if rhs == nil {
|
||||||
|
log.Debugf("lhs not nil, but rhs is")
|
||||||
|
lhsNode := unwrapDoc(lhs.Node)
|
||||||
|
value := lhsNode.Tag == "!!null"
|
||||||
|
if flip {
|
||||||
|
value = !value
|
||||||
|
}
|
||||||
|
return createBooleanCandidate(lhs, value), nil
|
||||||
|
}
|
||||||
|
|
||||||
lhsNode := unwrapDoc(lhs.Node)
|
lhsNode := unwrapDoc(lhs.Node)
|
||||||
rhsNode := unwrapDoc(rhs.Node)
|
rhsNode := unwrapDoc(rhs.Node)
|
||||||
|
|
||||||
if lhsNode.Tag == "!!null" {
|
if lhsNode.Tag == "!!null" {
|
||||||
value = (rhsNode.Tag == "!!null")
|
value = (rhsNode.Tag == "!!null")
|
||||||
} else {
|
} else if lhsNode.Kind == yaml.ScalarNode && rhsNode.Kind == yaml.ScalarNode {
|
||||||
value = matchKey(lhsNode.Value, rhsNode.Value)
|
value = matchKey(lhsNode.Value, rhsNode.Value)
|
||||||
|
}
|
||||||
|
log.Debugf("%v == %v ? %v", NodeToString(lhs), NodeToString(rhs), value)
|
||||||
|
if flip {
|
||||||
|
value = !value
|
||||||
|
}
|
||||||
|
return createBooleanCandidate(lhs, value), nil
|
||||||
}
|
}
|
||||||
log.Debugf("%v == %v ? %v", NodeToString(lhs), NodeToString(rhs), value)
|
}
|
||||||
return createBooleanCandidate(lhs, value), nil
|
|
||||||
|
func notEqualsOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
|
log.Debugf("-- notEqualsOperator")
|
||||||
|
return crossFunction(d, context.ReadOnlyClone(), expressionNode, isEquals(true), true)
|
||||||
}
|
}
|
||||||
|
@ -5,6 +5,13 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
var equalsOperatorScenarios = []expressionScenario{
|
var equalsOperatorScenarios = []expressionScenario{
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
expression: ".a == .b",
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!bool)::true\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
skipDoc: true,
|
skipDoc: true,
|
||||||
document: "cat",
|
document: "cat",
|
||||||
@ -14,6 +21,68 @@ var equalsOperatorScenarios = []expressionScenario{
|
|||||||
"D0, P[], (!!bool)::false\n",
|
"D0, P[], (!!bool)::false\n",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: "{}",
|
||||||
|
expression: "(.a == .b) as $x",
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::{}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: "{}",
|
||||||
|
expression: ".a == .b",
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!bool)::true\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: "{}",
|
||||||
|
expression: "(.a != .b) as $x",
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::{}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: "{}",
|
||||||
|
expression: ".a != .b",
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!bool)::false\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: "{a: {b: 10}}",
|
||||||
|
expression: "select(.c != null)",
|
||||||
|
expected: []string{},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: "{a: {b: 10}}",
|
||||||
|
expression: "select(.d == .c)",
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::{a: {b: 10}}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: "{a: {b: 10}}",
|
||||||
|
expression: "select(null == .c)",
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::{a: {b: 10}}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: "{a: { b: {things: \"\"}, f: [1], g: [] }}",
|
||||||
|
expression: ".. | select(. == \"\")",
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[a b things], (!!str)::\"\"\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
description: "Match string",
|
description: "Match string",
|
||||||
document: `[cat,goat,dog]`,
|
document: `[cat,goat,dog]`,
|
||||||
@ -23,7 +92,18 @@ var equalsOperatorScenarios = []expressionScenario{
|
|||||||
"D0, P[1], (!!bool)::true\n",
|
"D0, P[1], (!!bool)::true\n",
|
||||||
"D0, P[2], (!!bool)::false\n",
|
"D0, P[2], (!!bool)::false\n",
|
||||||
},
|
},
|
||||||
}, {
|
},
|
||||||
|
{
|
||||||
|
description: "Don't match string",
|
||||||
|
document: `[cat,goat,dog]`,
|
||||||
|
expression: `.[] | (. != "*at")`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[0], (!!bool)::false\n",
|
||||||
|
"D0, P[1], (!!bool)::false\n",
|
||||||
|
"D0, P[2], (!!bool)::true\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
description: "Match number",
|
description: "Match number",
|
||||||
document: `[3, 4, 5]`,
|
document: `[3, 4, 5]`,
|
||||||
expression: `.[] | (. == 4)`,
|
expression: `.[] | (. == 4)`,
|
||||||
@ -32,7 +112,18 @@ var equalsOperatorScenarios = []expressionScenario{
|
|||||||
"D0, P[1], (!!bool)::true\n",
|
"D0, P[1], (!!bool)::true\n",
|
||||||
"D0, P[2], (!!bool)::false\n",
|
"D0, P[2], (!!bool)::false\n",
|
||||||
},
|
},
|
||||||
}, {
|
},
|
||||||
|
{
|
||||||
|
description: "Dont match number",
|
||||||
|
document: `[3, 4, 5]`,
|
||||||
|
expression: `.[] | (. != 4)`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[0], (!!bool)::true\n",
|
||||||
|
"D0, P[1], (!!bool)::false\n",
|
||||||
|
"D0, P[2], (!!bool)::true\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
skipDoc: true,
|
skipDoc: true,
|
||||||
document: `a: { cat: {b: apple, c: whatever}, pat: {b: banana} }`,
|
document: `a: { cat: {b: apple, c: whatever}, pat: {b: banana} }`,
|
||||||
expression: `.a | (.[].b == "apple")`,
|
expression: `.a | (.[].b == "apple")`,
|
||||||
@ -57,6 +148,22 @@ var equalsOperatorScenarios = []expressionScenario{
|
|||||||
"D0, P[], (!!bool)::true\n",
|
"D0, P[], (!!bool)::true\n",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
description: "Non exisitant key doesn't equal a value",
|
||||||
|
document: "a: frog",
|
||||||
|
expression: `select(.b != "thing")`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::a: frog\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "Two non existant keys are equal",
|
||||||
|
document: "a: frog",
|
||||||
|
expression: `select(.b == .c)`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::a: frog\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestEqualOperatorScenarios(t *testing.T) {
|
func TestEqualOperatorScenarios(t *testing.T) {
|
||||||
|
@ -7,32 +7,32 @@ import (
|
|||||||
yaml "gopkg.in/yaml.v3"
|
yaml "gopkg.in/yaml.v3"
|
||||||
)
|
)
|
||||||
|
|
||||||
func getFilenameOperator(d *dataTreeNavigator, matchingNodes *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
func getFilenameOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
log.Debugf("GetFilename")
|
log.Debugf("GetFilename")
|
||||||
|
|
||||||
var results = list.New()
|
var results = list.New()
|
||||||
|
|
||||||
for el := matchingNodes.Front(); el != nil; el = el.Next() {
|
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||||
candidate := el.Value.(*CandidateNode)
|
candidate := el.Value.(*CandidateNode)
|
||||||
node := &yaml.Node{Kind: yaml.ScalarNode, Value: candidate.Filename, Tag: "!!str"}
|
node := &yaml.Node{Kind: yaml.ScalarNode, Value: candidate.Filename, Tag: "!!str"}
|
||||||
result := candidate.CreateChild(nil, node)
|
result := candidate.CreateChild(nil, node)
|
||||||
results.PushBack(result)
|
results.PushBack(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
return results, nil
|
return context.ChildContext(results), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func getFileIndexOperator(d *dataTreeNavigator, matchingNodes *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
func getFileIndexOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
log.Debugf("GetFileIndex")
|
log.Debugf("GetFileIndex")
|
||||||
|
|
||||||
var results = list.New()
|
var results = list.New()
|
||||||
|
|
||||||
for el := matchingNodes.Front(); el != nil; el = el.Next() {
|
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||||
candidate := el.Value.(*CandidateNode)
|
candidate := el.Value.(*CandidateNode)
|
||||||
node := &yaml.Node{Kind: yaml.ScalarNode, Value: fmt.Sprintf("%v", candidate.FileIndex), Tag: "!!int"}
|
node := &yaml.Node{Kind: yaml.ScalarNode, Value: fmt.Sprintf("%v", candidate.FileIndex), Tag: "!!int"}
|
||||||
result := candidate.CreateChild(nil, node)
|
result := candidate.CreateChild(nil, node)
|
||||||
results.PushBack(result)
|
results.PushBack(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
return results, nil
|
return context.ChildContext(results), nil
|
||||||
}
|
}
|
||||||
|
@ -21,6 +21,16 @@ var fileOperatorScenarios = []expressionScenario{
|
|||||||
"D0, P[], (!!int)::0\n",
|
"D0, P[], (!!int)::0\n",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
description: "Get file indices of multiple documents",
|
||||||
|
document: `{a: cat}`,
|
||||||
|
document2: `{a: cat}`,
|
||||||
|
expression: `fileIndex`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!int)::0\n",
|
||||||
|
"D0, P[], (!!int)::1\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
description: "Get file index alias",
|
description: "Get file index alias",
|
||||||
document: `{a: cat}`,
|
document: `{a: cat}`,
|
||||||
|
@ -7,20 +7,25 @@ import (
|
|||||||
yaml "gopkg.in/yaml.v3"
|
yaml "gopkg.in/yaml.v3"
|
||||||
)
|
)
|
||||||
|
|
||||||
func hasOperator(d *dataTreeNavigator, matchingNodes *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
func hasOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
|
|
||||||
log.Debugf("-- hasOperation")
|
log.Debugf("-- hasOperation")
|
||||||
var results = list.New()
|
var results = list.New()
|
||||||
|
|
||||||
rhs, err := d.GetMatchingNodes(matchingNodes, expressionNode.Rhs)
|
rhs, err := d.GetMatchingNodes(context.ReadOnlyClone(), expressionNode.Rhs)
|
||||||
wanted := rhs.Front().Value.(*CandidateNode).Node
|
|
||||||
wantedKey := wanted.Value
|
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return Context{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
for el := matchingNodes.Front(); el != nil; el = el.Next() {
|
wantedKey := "null"
|
||||||
|
wanted := &yaml.Node{Tag: "!!null"}
|
||||||
|
if rhs.MatchingNodes.Len() != 0 {
|
||||||
|
wanted = rhs.MatchingNodes.Front().Value.(*CandidateNode).Node
|
||||||
|
wantedKey = wanted.Value
|
||||||
|
}
|
||||||
|
|
||||||
|
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||||
candidate := el.Value.(*CandidateNode)
|
candidate := el.Value.(*CandidateNode)
|
||||||
|
|
||||||
// grab the first value
|
// grab the first value
|
||||||
@ -41,7 +46,7 @@ func hasOperator(d *dataTreeNavigator, matchingNodes *list.List, expressionNode
|
|||||||
if wanted.Tag == "!!int" {
|
if wanted.Tag == "!!int" {
|
||||||
var number, errParsingInt = strconv.ParseInt(wantedKey, 10, 64) // nolint
|
var number, errParsingInt = strconv.ParseInt(wantedKey, 10, 64) // nolint
|
||||||
if errParsingInt != nil {
|
if errParsingInt != nil {
|
||||||
return nil, errParsingInt
|
return Context{}, errParsingInt
|
||||||
}
|
}
|
||||||
candidateHasKey = int64(len(contents)) > number
|
candidateHasKey = int64(len(contents)) > number
|
||||||
}
|
}
|
||||||
@ -50,5 +55,5 @@ func hasOperator(d *dataTreeNavigator, matchingNodes *list.List, expressionNode
|
|||||||
results.PushBack(createBooleanCandidate(candidate, false))
|
results.PushBack(createBooleanCandidate(candidate, false))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return results, nil
|
return context.ChildContext(results), nil
|
||||||
}
|
}
|
||||||
|
@ -13,6 +13,22 @@ var hasOperatorScenarios = []expressionScenario{
|
|||||||
"D0, P[], (!!bool)::true\n",
|
"D0, P[], (!!bool)::true\n",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `a: hello`,
|
||||||
|
expression: `has(.b) as $c`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::a: hello\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `a: hello`,
|
||||||
|
expression: `has(.b)`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!bool)::false\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
description: "Has map key",
|
description: "Has map key",
|
||||||
document: `- a: "yes"
|
document: `- a: "yes"
|
||||||
@ -28,6 +44,15 @@ var hasOperatorScenarios = []expressionScenario{
|
|||||||
"D0, P[3], (!!bool)::false\n",
|
"D0, P[3], (!!bool)::false\n",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
description: "Select, checking for existence of deep paths",
|
||||||
|
subdescription: "Simply pipe in parent expressions into `has`",
|
||||||
|
document: "- {a: {b: {c: cat}}}\n- {a: {b: {d: dog}}}",
|
||||||
|
expression: `.[] | select(.a.b | has("c"))`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[0], (!!map)::{a: {b: {c: cat}}}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
dontFormatInputForDoc: true,
|
dontFormatInputForDoc: true,
|
||||||
description: "Has array index",
|
description: "Has array index",
|
||||||
|
54
pkg/yqlib/operator_keys.go
Normal file
54
pkg/yqlib/operator_keys.go
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
package yqlib
|
||||||
|
|
||||||
|
import (
|
||||||
|
"container/list"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"gopkg.in/yaml.v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
func keysOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
|
log.Debugf("-- keysOperator")
|
||||||
|
|
||||||
|
var results = list.New()
|
||||||
|
|
||||||
|
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||||
|
candidate := el.Value.(*CandidateNode)
|
||||||
|
node := unwrapDoc(candidate.Node)
|
||||||
|
var targetNode *yaml.Node
|
||||||
|
if node.Kind == yaml.MappingNode {
|
||||||
|
targetNode = getMapKeys(node)
|
||||||
|
} else if node.Kind == yaml.SequenceNode {
|
||||||
|
targetNode = getIndicies(node)
|
||||||
|
} else {
|
||||||
|
return Context{}, fmt.Errorf("Cannot get keys of %v, keys only works for maps and arrays", node.Tag)
|
||||||
|
}
|
||||||
|
|
||||||
|
result := candidate.CreateChild(nil, targetNode)
|
||||||
|
results.PushBack(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
return context.ChildContext(results), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func getMapKeys(node *yaml.Node) *yaml.Node {
|
||||||
|
contents := make([]*yaml.Node, 0)
|
||||||
|
for index := 0; index < len(node.Content); index = index + 2 {
|
||||||
|
contents = append(contents, node.Content[index])
|
||||||
|
}
|
||||||
|
return &yaml.Node{Kind: yaml.SequenceNode, Tag: "!!seq", Content: contents}
|
||||||
|
}
|
||||||
|
|
||||||
|
func getIndicies(node *yaml.Node) *yaml.Node {
|
||||||
|
var contents = make([]*yaml.Node, len(node.Content))
|
||||||
|
|
||||||
|
for index := range node.Content {
|
||||||
|
contents[index] = &yaml.Node{
|
||||||
|
Kind: yaml.ScalarNode,
|
||||||
|
Tag: "!!int",
|
||||||
|
Value: fmt.Sprintf("%v", index),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &yaml.Node{Kind: yaml.SequenceNode, Tag: "!!seq", Content: contents}
|
||||||
|
}
|
47
pkg/yqlib/operator_keys_test.go
Normal file
47
pkg/yqlib/operator_keys_test.go
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
package yqlib
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
var keysOperatorScenarios = []expressionScenario{
|
||||||
|
{
|
||||||
|
description: "Map keys",
|
||||||
|
document: `{dog: woof, cat: meow}`,
|
||||||
|
expression: `keys`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!seq)::- dog\n- cat\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `{}`,
|
||||||
|
expression: `keys`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!seq)::[]\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "Array keys",
|
||||||
|
document: `[apple, banana]`,
|
||||||
|
expression: `keys`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!seq)::- 0\n- 1\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `[]`,
|
||||||
|
expression: `keys`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!seq)::[]\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestKeysOperatorScenarios(t *testing.T) {
|
||||||
|
for _, tt := range keysOperatorScenarios {
|
||||||
|
testScenario(t, &tt)
|
||||||
|
}
|
||||||
|
documentScenarios(t, "Keys", keysOperatorScenarios)
|
||||||
|
}
|
@ -7,17 +7,21 @@ import (
|
|||||||
yaml "gopkg.in/yaml.v3"
|
yaml "gopkg.in/yaml.v3"
|
||||||
)
|
)
|
||||||
|
|
||||||
func lengthOperator(d *dataTreeNavigator, matchMap *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
func lengthOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
log.Debugf("-- lengthOperation")
|
log.Debugf("-- lengthOperation")
|
||||||
var results = list.New()
|
var results = list.New()
|
||||||
|
|
||||||
for el := matchMap.Front(); el != nil; el = el.Next() {
|
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||||
candidate := el.Value.(*CandidateNode)
|
candidate := el.Value.(*CandidateNode)
|
||||||
targetNode := unwrapDoc(candidate.Node)
|
targetNode := unwrapDoc(candidate.Node)
|
||||||
var length int
|
var length int
|
||||||
switch targetNode.Kind {
|
switch targetNode.Kind {
|
||||||
case yaml.ScalarNode:
|
case yaml.ScalarNode:
|
||||||
length = len(targetNode.Value)
|
if targetNode.Tag == "!!null" {
|
||||||
|
length = 0
|
||||||
|
} else {
|
||||||
|
length = len(targetNode.Value)
|
||||||
|
}
|
||||||
case yaml.MappingNode:
|
case yaml.MappingNode:
|
||||||
length = len(targetNode.Content) / 2
|
length = len(targetNode.Content) / 2
|
||||||
case yaml.SequenceNode:
|
case yaml.SequenceNode:
|
||||||
@ -31,5 +35,5 @@ func lengthOperator(d *dataTreeNavigator, matchMap *list.List, expressionNode *E
|
|||||||
results.PushBack(result)
|
results.PushBack(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
return results, nil
|
return context.ChildContext(results), nil
|
||||||
}
|
}
|
||||||
|
@ -14,6 +14,30 @@ var lengthOperatorScenarios = []expressionScenario{
|
|||||||
"D0, P[a], (!!int)::3\n",
|
"D0, P[a], (!!int)::3\n",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
description: "null length",
|
||||||
|
document: `{a: null}`,
|
||||||
|
expression: `.a | length`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[a], (!!int)::0\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `{a: ~}`,
|
||||||
|
expression: `.a | length`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[a], (!!int)::0\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `{a: key no exist}`,
|
||||||
|
expression: `.b | length`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[b], (!!int)::0\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
description: "Map length",
|
description: "Map length",
|
||||||
subdescription: "returns number of entries",
|
subdescription: "returns number of entries",
|
||||||
|
@ -2,87 +2,94 @@ package yqlib
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
"container/list"
|
"container/list"
|
||||||
|
|
||||||
yaml "gopkg.in/yaml.v3"
|
yaml "gopkg.in/yaml.v3"
|
||||||
)
|
)
|
||||||
|
|
||||||
type crossFunctionCalculation func(d *dataTreeNavigator, lhs *CandidateNode, rhs *CandidateNode) (*CandidateNode, error)
|
|
||||||
|
|
||||||
func crossFunction(d *dataTreeNavigator, matchingNodes *list.List, expressionNode *ExpressionNode, calculation crossFunctionCalculation) (*list.List, error) {
|
|
||||||
lhs, err := d.GetMatchingNodes(matchingNodes, expressionNode.Lhs)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
log.Debugf("crossFunction LHS len: %v", lhs.Len())
|
|
||||||
|
|
||||||
rhs, err := d.GetMatchingNodes(matchingNodes, expressionNode.Rhs)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
log.Debugf("crossFunction RHS len: %v", rhs.Len())
|
|
||||||
|
|
||||||
var results = list.New()
|
|
||||||
|
|
||||||
for el := lhs.Front(); el != nil; el = el.Next() {
|
|
||||||
lhsCandidate := el.Value.(*CandidateNode)
|
|
||||||
|
|
||||||
for rightEl := rhs.Front(); rightEl != nil; rightEl = rightEl.Next() {
|
|
||||||
log.Debugf("Applying calc")
|
|
||||||
rhsCandidate := rightEl.Value.(*CandidateNode)
|
|
||||||
resultCandidate, err := calculation(d, lhsCandidate, rhsCandidate)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
results.PushBack(resultCandidate)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
return results, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type multiplyPreferences struct {
|
type multiplyPreferences struct {
|
||||||
AppendArrays bool
|
AppendArrays bool
|
||||||
|
DeepMergeArrays bool
|
||||||
|
TraversePrefs traversePreferences
|
||||||
}
|
}
|
||||||
|
|
||||||
func multiplyOperator(d *dataTreeNavigator, matchingNodes *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
func multiplyOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
log.Debugf("-- MultiplyOperator")
|
log.Debugf("-- MultiplyOperator")
|
||||||
return crossFunction(d, matchingNodes, expressionNode, multiply(expressionNode.Operation.Preferences.(*multiplyPreferences)))
|
return crossFunction(d, context, expressionNode, multiply(expressionNode.Operation.Preferences.(multiplyPreferences)), false)
|
||||||
}
|
}
|
||||||
|
|
||||||
func multiply(preferences *multiplyPreferences) func(d *dataTreeNavigator, lhs *CandidateNode, rhs *CandidateNode) (*CandidateNode, error) {
|
func multiply(preferences multiplyPreferences) func(d *dataTreeNavigator, context Context, lhs *CandidateNode, rhs *CandidateNode) (*CandidateNode, error) {
|
||||||
return func(d *dataTreeNavigator, lhs *CandidateNode, rhs *CandidateNode) (*CandidateNode, error) {
|
return func(d *dataTreeNavigator, context Context, lhs *CandidateNode, rhs *CandidateNode) (*CandidateNode, error) {
|
||||||
lhs.Node = unwrapDoc(lhs.Node)
|
lhs.Node = unwrapDoc(lhs.Node)
|
||||||
rhs.Node = unwrapDoc(rhs.Node)
|
rhs.Node = unwrapDoc(rhs.Node)
|
||||||
log.Debugf("Multipling LHS: %v", lhs.Node.Tag)
|
log.Debugf("Multipling LHS: %v", lhs.Node.Tag)
|
||||||
log.Debugf("- RHS: %v", rhs.Node.Tag)
|
log.Debugf("- RHS: %v", rhs.Node.Tag)
|
||||||
|
|
||||||
shouldAppendArrays := preferences.AppendArrays
|
|
||||||
|
|
||||||
if lhs.Node.Kind == yaml.MappingNode && rhs.Node.Kind == yaml.MappingNode ||
|
if lhs.Node.Kind == yaml.MappingNode && rhs.Node.Kind == yaml.MappingNode ||
|
||||||
(lhs.Node.Kind == yaml.SequenceNode && rhs.Node.Kind == yaml.SequenceNode) {
|
(lhs.Node.Kind == yaml.SequenceNode && rhs.Node.Kind == yaml.SequenceNode) {
|
||||||
|
|
||||||
var newBlank = lhs.CreateChild(nil, &yaml.Node{})
|
var newBlank = lhs.CreateChild(nil, &yaml.Node{})
|
||||||
var newThing, err = mergeObjects(d, newBlank, lhs, false)
|
|
||||||
|
var newThing, err = mergeObjects(d, context, newBlank, lhs, multiplyPreferences{})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
return mergeObjects(d, newThing, rhs, shouldAppendArrays)
|
return mergeObjects(d, context, newThing, rhs, preferences)
|
||||||
|
} else if lhs.Node.Tag == "!!int" && rhs.Node.Tag == "!!int" {
|
||||||
|
return multiplyIntegers(lhs, rhs)
|
||||||
|
} else if (lhs.Node.Tag == "!!int" || lhs.Node.Tag == "!!float") && (rhs.Node.Tag == "!!int" || rhs.Node.Tag == "!!float") {
|
||||||
|
return multiplyFloats(lhs, rhs)
|
||||||
}
|
}
|
||||||
return nil, fmt.Errorf("Cannot multiply %v with %v", lhs.Node.Tag, rhs.Node.Tag)
|
return nil, fmt.Errorf("Cannot multiply %v with %v", lhs.Node.Tag, rhs.Node.Tag)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func mergeObjects(d *dataTreeNavigator, lhs *CandidateNode, rhs *CandidateNode, shouldAppendArrays bool) (*CandidateNode, error) {
|
func multiplyFloats(lhs *CandidateNode, rhs *CandidateNode) (*CandidateNode, error) {
|
||||||
|
target := lhs.CreateChild(nil, &yaml.Node{})
|
||||||
|
target.Node.Kind = yaml.ScalarNode
|
||||||
|
target.Node.Style = lhs.Node.Style
|
||||||
|
target.Node.Tag = "!!float"
|
||||||
|
|
||||||
|
lhsNum, err := strconv.ParseFloat(lhs.Node.Value, 64)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
rhsNum, err := strconv.ParseFloat(rhs.Node.Value, 64)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
target.Node.Value = fmt.Sprintf("%v", lhsNum*rhsNum)
|
||||||
|
return target, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func multiplyIntegers(lhs *CandidateNode, rhs *CandidateNode) (*CandidateNode, error) {
|
||||||
|
target := lhs.CreateChild(nil, &yaml.Node{})
|
||||||
|
target.Node.Kind = yaml.ScalarNode
|
||||||
|
target.Node.Style = lhs.Node.Style
|
||||||
|
target.Node.Tag = "!!int"
|
||||||
|
|
||||||
|
lhsNum, err := strconv.Atoi(lhs.Node.Value)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
rhsNum, err := strconv.Atoi(rhs.Node.Value)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
target.Node.Value = fmt.Sprintf("%v", lhsNum*rhsNum)
|
||||||
|
return target, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func mergeObjects(d *dataTreeNavigator, context Context, lhs *CandidateNode, rhs *CandidateNode, preferences multiplyPreferences) (*CandidateNode, error) {
|
||||||
|
shouldAppendArrays := preferences.AppendArrays
|
||||||
var results = list.New()
|
var results = list.New()
|
||||||
|
|
||||||
// shouldn't recurse arrays if appending
|
// shouldn't recurse arrays if appending
|
||||||
prefs := &recursiveDescentPreferences{RecurseArray: !shouldAppendArrays,
|
prefs := recursiveDescentPreferences{RecurseArray: !shouldAppendArrays,
|
||||||
TraversePreferences: &traversePreferences{FollowAlias: false}}
|
TraversePreferences: traversePreferences{DontFollowAlias: true, IncludeMapKeys: true}}
|
||||||
err := recursiveDecent(d, results, nodeToMap(rhs), prefs)
|
err := recursiveDecent(d, results, context.SingleChildContext(rhs), prefs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -93,7 +100,12 @@ func mergeObjects(d *dataTreeNavigator, lhs *CandidateNode, rhs *CandidateNode,
|
|||||||
}
|
}
|
||||||
|
|
||||||
for el := results.Front(); el != nil; el = el.Next() {
|
for el := results.Front(); el != nil; el = el.Next() {
|
||||||
err := applyAssignment(d, pathIndexToStartFrom, lhs, el.Value.(*CandidateNode), shouldAppendArrays)
|
candidate := el.Value.(*CandidateNode)
|
||||||
|
if candidate.Node.Tag == "!!merge" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
err := applyAssignment(d, context, pathIndexToStartFrom, lhs, candidate, preferences)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -101,24 +113,25 @@ func mergeObjects(d *dataTreeNavigator, lhs *CandidateNode, rhs *CandidateNode,
|
|||||||
return lhs, nil
|
return lhs, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func applyAssignment(d *dataTreeNavigator, pathIndexToStartFrom int, lhs *CandidateNode, rhs *CandidateNode, shouldAppendArrays bool) error {
|
func applyAssignment(d *dataTreeNavigator, context Context, pathIndexToStartFrom int, lhs *CandidateNode, rhs *CandidateNode, preferences multiplyPreferences) error {
|
||||||
|
shouldAppendArrays := preferences.AppendArrays
|
||||||
log.Debugf("merge - applyAssignment lhs %v, rhs: %v", NodeToString(lhs), NodeToString(rhs))
|
log.Debugf("merge - applyAssignment lhs %v, rhs: %v", lhs.GetKey(), rhs.GetKey())
|
||||||
|
|
||||||
lhsPath := rhs.Path[pathIndexToStartFrom:]
|
lhsPath := rhs.Path[pathIndexToStartFrom:]
|
||||||
|
|
||||||
assignmentOp := &Operation{OperationType: assignAttributesOpType}
|
assignmentOp := &Operation{OperationType: assignAttributesOpType}
|
||||||
if rhs.Node.Kind == yaml.ScalarNode || rhs.Node.Kind == yaml.AliasNode {
|
if shouldAppendArrays && rhs.Node.Kind == yaml.SequenceNode {
|
||||||
|
assignmentOp.OperationType = addAssignOpType
|
||||||
|
} else if !preferences.DeepMergeArrays && rhs.Node.Kind == yaml.SequenceNode ||
|
||||||
|
(rhs.Node.Kind == yaml.ScalarNode || rhs.Node.Kind == yaml.AliasNode) {
|
||||||
assignmentOp.OperationType = assignOpType
|
assignmentOp.OperationType = assignOpType
|
||||||
assignmentOp.UpdateAssign = false
|
assignmentOp.UpdateAssign = false
|
||||||
} else if shouldAppendArrays && rhs.Node.Kind == yaml.SequenceNode {
|
|
||||||
assignmentOp.OperationType = addAssignOpType
|
|
||||||
}
|
}
|
||||||
rhsOp := &Operation{OperationType: valueOpType, CandidateNode: rhs}
|
rhsOp := &Operation{OperationType: valueOpType, CandidateNode: rhs}
|
||||||
|
|
||||||
assignmentOpNode := &ExpressionNode{Operation: assignmentOp, Lhs: createTraversalTree(lhsPath), Rhs: &ExpressionNode{Operation: rhsOp}}
|
assignmentOpNode := &ExpressionNode{Operation: assignmentOp, Lhs: createTraversalTree(lhsPath, preferences.TraversePrefs, rhs.IsMapKey), Rhs: &ExpressionNode{Operation: rhsOp}}
|
||||||
|
|
||||||
_, err := d.GetMatchingNodes(nodeToMap(lhs), assignmentOpNode)
|
_, err := d.GetMatchingNodes(context.SingleChildContext(lhs), assignmentOpNode)
|
||||||
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,63 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var doc1 = `list:
|
||||||
|
# Hi this is a comment.
|
||||||
|
# Hello this is another comment.
|
||||||
|
- "abc"`
|
||||||
|
|
||||||
|
var doc2 = `list2:
|
||||||
|
# This is yet another comment.
|
||||||
|
# Indeed this is yet another comment.
|
||||||
|
- "123"`
|
||||||
|
|
||||||
|
var docExpected = `D0, P[], (!!map)::list:
|
||||||
|
# Hi this is a comment.
|
||||||
|
# Hello this is another comment.
|
||||||
|
- "abc"
|
||||||
|
list2:
|
||||||
|
# This is yet another comment.
|
||||||
|
# Indeed this is yet another comment.
|
||||||
|
- "123"
|
||||||
|
`
|
||||||
|
|
||||||
|
var mergeArraysObjectKeysText = `It's a complex command, the trickyness comes from needing to have the right context in the expressions.
|
||||||
|
First we save the second array into a variable '$two' which lets us reference it later.
|
||||||
|
We then need to update the first array. We will use the relative update (|=) because we need to update relative to the current element of the array in the LHS in the RHS expression.
|
||||||
|
We set the current element of the first array as $cur. Now we multiply (merge) $cur with the matching entry in $two, by passing $two through a select filter.
|
||||||
|
`
|
||||||
|
|
||||||
var multiplyOperatorScenarios = []expressionScenario{
|
var multiplyOperatorScenarios = []expressionScenario{
|
||||||
|
{
|
||||||
|
description: "Multiply integers",
|
||||||
|
expression: `3 * 4`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!int)::12\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: doc1,
|
||||||
|
document2: doc2,
|
||||||
|
expression: `select(fi == 0) * select(fi == 1)`,
|
||||||
|
expected: []string{
|
||||||
|
docExpected,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
expression: `3 * 4.5`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!float)::13.5\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
expression: `4.5 * 3`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!float)::13.5\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
skipDoc: true,
|
skipDoc: true,
|
||||||
document: `{a: {also: [1]}, b: {also: me}}`,
|
document: `{a: {also: [1]}, b: {also: me}}`,
|
||||||
@ -13,6 +69,51 @@ var multiplyOperatorScenarios = []expressionScenario{
|
|||||||
"D0, P[], (!!map)::{a: {also: me}, b: {also: me}}\n",
|
"D0, P[], (!!map)::{a: {also: me}, b: {also: me}}\n",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: "# b\nb:\n # a\n a: cat",
|
||||||
|
expression: "{} * .",
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!map)::# b\nb:\n # a\n a: cat\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: "# b\nb:\n # a\n a: cat",
|
||||||
|
expression: ". * {}",
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!map)::# b\nb:\n # a\n a: cat\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `{a: &a { b: &b { c: &c cat } } }`,
|
||||||
|
expression: `{} * .`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!map)::{a: &a {b: &b {c: &c cat}}}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `{a: 2, b: 5}`,
|
||||||
|
document2: `{a: 3, b: 10}`,
|
||||||
|
expression: `.a * .b`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[a], (!!int)::10\n",
|
||||||
|
"D0, P[a], (!!int)::20\n",
|
||||||
|
"D0, P[a], (!!int)::15\n",
|
||||||
|
"D0, P[a], (!!int)::30\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `{a: 2}`,
|
||||||
|
document2: `{b: 10}`,
|
||||||
|
expression: `select(fi ==0) * select(fi==1)`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!map)::{a: 2, b: 10}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
skipDoc: true,
|
skipDoc: true,
|
||||||
expression: `{} * {"cat":"dog"}`,
|
expression: `{} * {"cat":"dog"}`,
|
||||||
@ -71,7 +172,7 @@ var multiplyOperatorScenarios = []expressionScenario{
|
|||||||
{
|
{
|
||||||
skipDoc: true,
|
skipDoc: true,
|
||||||
document: `{a: {things: great}, b: {also: me}}`,
|
document: `{a: {things: great}, b: {also: me}}`,
|
||||||
expression: `. * {"a":.b}`,
|
expression: `. * {"a": .b}`,
|
||||||
expected: []string{
|
expected: []string{
|
||||||
"D0, P[], (!!map)::{a: {things: great, also: me}, b: {also: me}}\n",
|
"D0, P[], (!!map)::{a: {things: great, also: me}, b: {also: me}}\n",
|
||||||
},
|
},
|
||||||
@ -79,16 +180,10 @@ var multiplyOperatorScenarios = []expressionScenario{
|
|||||||
{
|
{
|
||||||
description: "Merge keeps style of LHS",
|
description: "Merge keeps style of LHS",
|
||||||
dontFormatInputForDoc: true,
|
dontFormatInputForDoc: true,
|
||||||
document: `a: {things: great}
|
document: "a: {things: great}\nb:\n also: \"me\"",
|
||||||
b:
|
expression: `. * {"a":.b}`,
|
||||||
also: "me"
|
|
||||||
`,
|
|
||||||
expression: `. * {"a":.b}`,
|
|
||||||
expected: []string{
|
expected: []string{
|
||||||
`D0, P[], (!!map)::a: {things: great, also: "me"}
|
"D0, P[], (!!map)::a: {things: great, also: \"me\"}\nb:\n also: \"me\"\n",
|
||||||
b:
|
|
||||||
also: "me"
|
|
||||||
`,
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -107,6 +202,30 @@ b:
|
|||||||
"D0, P[a], (!!seq)::[1, 2]\n",
|
"D0, P[a], (!!seq)::[1, 2]\n",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
description: "Merge, only existing fields",
|
||||||
|
document: `{a: {thing: one, cat: frog}, b: {missing: two, thing: two}}`,
|
||||||
|
expression: `.a *? .b`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[a], (!!map)::{thing: two, cat: frog}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `{a: [{thing: one}], b: [{missing: two, thing: two}]}`,
|
||||||
|
expression: `.a *?d .b`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[a], (!!seq)::[{thing: two}]\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `{a: {array: [1]}, b: {}}`,
|
||||||
|
expression: `.b *+ .a`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[b], (!!map)::{array: [1]}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
description: "Merge, appending arrays",
|
description: "Merge, appending arrays",
|
||||||
document: `{a: {array: [1, 2, animal: dog], value: coconut}, b: {array: [3, 4, animal: cat], value: banana}}`,
|
document: `{a: {array: [1, 2, animal: dog], value: coconut}, b: {array: [3, 4, animal: cat], value: banana}}`,
|
||||||
@ -115,6 +234,33 @@ b:
|
|||||||
"D0, P[a], (!!map)::{array: [1, 2, {animal: dog}, 3, 4, {animal: cat}], value: banana}\n",
|
"D0, P[a], (!!map)::{array: [1, 2, {animal: dog}, 3, 4, {animal: cat}], value: banana}\n",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
description: "Merge, only existing fields, appending arrays",
|
||||||
|
document: `{a: {thing: [1,2]}, b: {thing: [3,4], another: [1]}}`,
|
||||||
|
expression: `.a *?+ .b`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[a], (!!map)::{thing: [1, 2, 3, 4]}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "Merge, deeply merging arrays",
|
||||||
|
subdescription: "Merging arrays deeply means arrays are merge like objects, with indexes as their key. In this case, we merge the first item in the array, and do nothing with the second.",
|
||||||
|
document: `{a: [{name: fred, age: 12}, {name: bob, age: 32}], b: [{name: fred, age: 34}]}`,
|
||||||
|
expression: `.a *d .b`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[a], (!!seq)::[{name: fred, age: 34}, {name: bob, age: 32}]\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "Merge arrays of objects together, matching on a key",
|
||||||
|
subdescription: mergeArraysObjectKeysText,
|
||||||
|
document: `[{a: apple, b: appleB}, {a: kiwi, b: kiwiB}, {a: banana, b: bananaB}]`,
|
||||||
|
document2: `[{a: banana, c: bananaC}, {a: apple, b: appleB2}, {a: dingo, c: dingoC}]`,
|
||||||
|
expression: `(select(fi==1) | .[]) as $two | select(fi==0) | .[] |= (. as $cur | $cur * ($two | select(.a == $cur.a)))`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::[{a: apple, b: appleB2}, {a: kiwi, b: kiwiB}, {a: banana, b: bananaB, c: bananaC}]\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
description: "Merge to prefix an element",
|
description: "Merge to prefix an element",
|
||||||
document: `{a: cat, b: dog}`,
|
document: `{a: cat, b: dog}`,
|
||||||
@ -153,5 +299,5 @@ func TestMultiplyOperatorScenarios(t *testing.T) {
|
|||||||
for _, tt := range multiplyOperatorScenarios {
|
for _, tt := range multiplyOperatorScenarios {
|
||||||
testScenario(t, &tt)
|
testScenario(t, &tt)
|
||||||
}
|
}
|
||||||
documentScenarios(t, "Multiply", multiplyOperatorScenarios)
|
documentScenarios(t, "Multiply (Merge)", multiplyOperatorScenarios)
|
||||||
}
|
}
|
||||||
|
@ -16,12 +16,12 @@ func createPathNodeFor(pathElement interface{}) *yaml.Node {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func getPathOperator(d *dataTreeNavigator, matchingNodes *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
func getPathOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
log.Debugf("GetPath")
|
log.Debugf("GetPath")
|
||||||
|
|
||||||
var results = list.New()
|
var results = list.New()
|
||||||
|
|
||||||
for el := matchingNodes.Front(); el != nil; el = el.Next() {
|
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||||
candidate := el.Value.(*CandidateNode)
|
candidate := el.Value.(*CandidateNode)
|
||||||
node := &yaml.Node{Kind: yaml.SequenceNode, Tag: "!!seq"}
|
node := &yaml.Node{Kind: yaml.SequenceNode, Tag: "!!seq"}
|
||||||
|
|
||||||
@ -35,5 +35,5 @@ func getPathOperator(d *dataTreeNavigator, matchingNodes *list.List, expressionN
|
|||||||
results.PushBack(result)
|
results.PushBack(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
return results, nil
|
return context.ChildContext(results), nil
|
||||||
}
|
}
|
||||||
|
@ -1,11 +1,18 @@
|
|||||||
package yqlib
|
package yqlib
|
||||||
|
|
||||||
import "container/list"
|
func pipeOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
|
|
||||||
func pipeOperator(d *dataTreeNavigator, matchingNodes *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
//lhs may update the variable context, we should pass that into the RHS
|
||||||
lhs, err := d.GetMatchingNodes(matchingNodes, expressionNode.Lhs)
|
// BUT we still return the original context back (see jq)
|
||||||
|
// https://stedolan.github.io/jq/manual/#Variable/SymbolicBindingOperator:...as$identifier|...
|
||||||
|
|
||||||
|
lhs, err := d.GetMatchingNodes(context, expressionNode.Lhs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return Context{}, err
|
||||||
}
|
}
|
||||||
return d.GetMatchingNodes(lhs, expressionNode.Rhs)
|
rhs, err := d.GetMatchingNodes(lhs, expressionNode.Rhs)
|
||||||
|
if err != nil {
|
||||||
|
return Context{}, err
|
||||||
|
}
|
||||||
|
return context.ChildContext(rhs.MatchingNodes), nil
|
||||||
}
|
}
|
||||||
|
@ -7,24 +7,24 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type recursiveDescentPreferences struct {
|
type recursiveDescentPreferences struct {
|
||||||
TraversePreferences *traversePreferences
|
TraversePreferences traversePreferences
|
||||||
RecurseArray bool
|
RecurseArray bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func recursiveDescentOperator(d *dataTreeNavigator, matchMap *list.List, expressionNode *ExpressionNode) (*list.List, error) {
|
func recursiveDescentOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
var results = list.New()
|
var results = list.New()
|
||||||
|
|
||||||
preferences := expressionNode.Operation.Preferences.(*recursiveDescentPreferences)
|
preferences := expressionNode.Operation.Preferences.(recursiveDescentPreferences)
|
||||||
err := recursiveDecent(d, results, matchMap, preferences)
|
err := recursiveDecent(d, results, context, preferences)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return Context{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return results, nil
|
return context.ChildContext(results), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func recursiveDecent(d *dataTreeNavigator, results *list.List, matchMap *list.List, preferences *recursiveDescentPreferences) error {
|
func recursiveDecent(d *dataTreeNavigator, results *list.List, context Context, preferences recursiveDescentPreferences) error {
|
||||||
for el := matchMap.Front(); el != nil; el = el.Next() {
|
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||||
candidate := el.Value.(*CandidateNode)
|
candidate := el.Value.(*CandidateNode)
|
||||||
|
|
||||||
candidate.Node = unwrapDoc(candidate.Node)
|
candidate.Node = unwrapDoc(candidate.Node)
|
||||||
@ -35,7 +35,7 @@ func recursiveDecent(d *dataTreeNavigator, results *list.List, matchMap *list.Li
|
|||||||
if candidate.Node.Kind != yaml.AliasNode && len(candidate.Node.Content) > 0 &&
|
if candidate.Node.Kind != yaml.AliasNode && len(candidate.Node.Content) > 0 &&
|
||||||
(preferences.RecurseArray || candidate.Node.Kind != yaml.SequenceNode) {
|
(preferences.RecurseArray || candidate.Node.Kind != yaml.SequenceNode) {
|
||||||
|
|
||||||
children, err := splat(d, nodeToMap(candidate), preferences.TraversePreferences)
|
children, err := splat(d, context.SingleChildContext(candidate), preferences.TraversePreferences)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
59
pkg/yqlib/operator_reduce.go
Normal file
59
pkg/yqlib/operator_reduce.go
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
package yqlib
|
||||||
|
|
||||||
|
import (
|
||||||
|
"container/list"
|
||||||
|
"fmt"
|
||||||
|
)
|
||||||
|
|
||||||
|
func reduceOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
|
log.Debugf("-- reduceOp")
|
||||||
|
//.a as $var reduce (0; . + $var)
|
||||||
|
//lhs is the assignment operator
|
||||||
|
//rhs is the reduce block
|
||||||
|
// '.' refers to the current accumulator, initialised to 0
|
||||||
|
// $var references a single element from the .a
|
||||||
|
|
||||||
|
//ensure lhs is actually an assignment
|
||||||
|
//and rhs is a block (empty)
|
||||||
|
if expressionNode.Lhs.Operation.OperationType != assignVariableOpType {
|
||||||
|
return Context{}, fmt.Errorf("reduce must be given a variables assignment, got %v instead", expressionNode.Lhs.Operation.OperationType.Type)
|
||||||
|
} else if expressionNode.Rhs.Operation.OperationType != blockOpType {
|
||||||
|
return Context{}, fmt.Errorf("reduce must be given a block, got %v instead", expressionNode.Rhs.Operation.OperationType.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
arrayExpNode := expressionNode.Lhs.Lhs
|
||||||
|
array, err := d.GetMatchingNodes(context, arrayExpNode)
|
||||||
|
|
||||||
|
log.Debugf("array of %v things", array.MatchingNodes.Len())
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return Context{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
variableName := expressionNode.Lhs.Rhs.Operation.StringValue
|
||||||
|
|
||||||
|
initExp := expressionNode.Rhs.Lhs
|
||||||
|
|
||||||
|
accum, err := d.GetMatchingNodes(context, initExp)
|
||||||
|
if err != nil {
|
||||||
|
return Context{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Debugf("with variable %v", variableName)
|
||||||
|
|
||||||
|
blockExp := expressionNode.Rhs.Rhs
|
||||||
|
for el := array.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||||
|
candidate := el.Value.(*CandidateNode)
|
||||||
|
log.Debugf("REDUCING WITH %v", NodeToString(candidate))
|
||||||
|
l := list.New()
|
||||||
|
l.PushBack(candidate)
|
||||||
|
accum.SetVariable(variableName, l)
|
||||||
|
|
||||||
|
accum, err = d.GetMatchingNodes(accum, blockExp)
|
||||||
|
if err != nil {
|
||||||
|
return Context{}, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return accum, nil
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user