pax_global_header00006660000000000000000000000064144771632430014525gustar00rootroot0000000000000052 comment=ee87412fa89add226d263ec7885193e07243aa4c yq-3.2.3/000077500000000000000000000000001447716324300121635ustar00rootroot00000000000000yq-3.2.3/.github/000077500000000000000000000000001447716324300135235ustar00rootroot00000000000000yq-3.2.3/.github/FUNDING.yml000066400000000000000000000000221447716324300153320ustar00rootroot00000000000000github: [kislyuk] yq-3.2.3/.github/workflows/000077500000000000000000000000001447716324300155605ustar00rootroot00000000000000yq-3.2.3/.github/workflows/ci.yml000066400000000000000000000015051447716324300166770ustar00rootroot00000000000000name: Test suite on: [push, pull_request] jobs: unit_tests: runs-on: ${{matrix.os}} strategy: max-parallel: 8 matrix: os: [ubuntu-20.04, ubuntu-22.04, macos-12] python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] steps: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: python-version: ${{matrix.python-version}} - name: Install jq run: | if [[ $(uname) == Linux ]]; then sudo apt-get install --yes jq; fi if [[ $(uname) == Darwin ]]; then brew install jq; fi - run: make install - run: make lint - run: make test - uses: codecov/codecov-action@v3 black: runs-on: ubuntu-22.04 steps: - uses: psf/black@stable isort: runs-on: ubuntu-22.04 steps: - uses: isort/isort-action@v1.1.0 yq-3.2.3/.gitignore000066400000000000000000000007351447716324300141600ustar00rootroot00000000000000# Reminder: # - A leading slash means the pattern is anchored at the root. # - No leading slash means the pattern matches at any depth. # Python files *.pyc __pycache__/ .tox/ *.egg-info/ /build/ /dist/ /.eggs/ /yq/version.py # IDE project files /.pydevproject # vim python-mode plugin /.ropeproject # IntelliJ IDEA / PyCharm project files /.idea /*.iml # JS/node/npm/web dev files node_modules npm-debug.log # OS X metadata files .DS_Store # Python coverage .coverage yq-3.2.3/Changes.rst000066400000000000000000000171541447716324300142750ustar00rootroot00000000000000Changes for v3.2.3 (2023-09-09) =============================== - test.py: use valid values for jsonargs. Fixes #172 - Allow editing toml in place (#171) - Documentation improvements Changes for v3.2.2 (2023-04-22) =============================== - Allow XML documents to be streamed with –xml-item-depth Changes for v3.2.1 (2023-04-04) =============================== - Relax tomlkit dependency version range to preserve Python 3.6 compatibility (#165) Changes for v3.2.0 (2023-04-03) =============================== - Use tomlkit instead of toml - Add security policy - Documentation and release infrastructure improvements Changes for v3.1.1 (2023-02-21) =============================== - Handle TOML datetimes. Fixes #160 - Test and documentation improvements Changes for v3.1.0 (2022-07-24) =============================== - Use configurable output grammar, compatible with YAML 1.1 Changes for v3.0.2 (2022-07-05) =============================== - Expand merge keys (https://yaml.org/type/merge.html). This is a YAML 1.1 specific behavior, but will be accommodated in yq due to its widespread use and low likelihood of accidental invocation. Changes for v3.0.1 (2022-07-04) =============================== - yq now uses the YAML 1.2 grammar when parsing and rendering YAML documents. This means bare literals like “on”, “no”, and “12:34:56” are interpreted as strings. (Fix interrupted release) Changes for v3.0.0 (2022-07-04) =============================== - yq now uses the YAML 1.2 grammar when parsing and rendering YAML documents. This means bare literals like “on”, “no”, and “12:34:56” are interpreted as strings. Changes for v2.14.0 (2022-02-20) ================================ - Support binary and set tags. Fixes #140 Changes for v2.13.0 (2021-12-02) ================================ - Armor against entity expansion attacks - Improve YAML loading performance by using CSafeLoader where available - Require PyYAML 5.3.1 or newer - Test and release infrastructure improvements Changes for v2.12.2 (2021-06-13) ================================ - Handle -n correctly Changes for v2.12.1 (2021-06-13) ================================ - Do not close fds on child process Changes for v2.12.0 (2021-02-05) ================================ - Add TOML support and the tomlq CLI utility to access it - Drop Python 2.7 support Changes for v2.11.1 (2020-09-26) ================================ Parse correctly when empty string is passed as jq_filter Changes for v2.11.0 (2020-09-03) ================================ - Better handling of jq_filter and files arguments (#102) - Create **main**.py (#82) Changes for v2.10.1 (2020-05-11) ================================ - Add support for xmltodict force_list definition for xq CLI (#95) - Support explicit doc markers (#93) - Ensure proper ordering of help messages (#90) Changes for v2.10.0 (2019-12-23) ================================ - Add support for in-place editing (yq -yi) - Add argcomplete integration - Docs: Migrate from RTD to gh-pages Changes for v2.9.2 (2019-11-04) =============================== - Fix interrupted release Changes for v2.9.1 (2019-11-04) =============================== - Fix documentation build Changes for v2.9.0 (2019-11-04) =============================== - Add -Y/–yaml-roundtrip for preserving YAML styles and tags Changes for v2.8.1 (2019-10-28) =============================== - Filter out -C and separate commingled yq and jq short options Changes for v2.8.0 (2019-10-25) =============================== - Set default block sequence indentation to 2 spaces, –indentless for 0 - Make main body of yq callable as a library function - Test and release infrastructure updates Changes for v2.7.2 (2019-01-09) =============================== - Support options introduced in jq 1.6. Fixes #46 - xq: Re-raise if exception is unrecognized Changes for v2.7.1 (2018-11-05) =============================== - xq: Introduce –xml-dtd and –xml-root. Fixes #37. - TOML support is optional and experimental Changes for v2.7.0 (2018-08-04) =============================== - TOML support with the tq executable entry point. - Disallow argparse abbreviated options. Fixes #38 on Python 3.5+. - Now available in Homebrew as python-yq. Changes for v2.6.0 (2018-04-28) =============================== - Packaging: Replace scripts with entry-points - Packaging: Package the license file Changes for v2.5.0 (2018-04-02) =============================== - Parse unrecognized tags instead of dropping them. Fixes #23 Changes for v2.4.1 (2018-02-13) =============================== - Ignore unrecognized YAML tags instead of crashing - Explicitly disable XML entity expansion and mention in docs - xq -x: Raise understandable error on non-dict conversion failure Changes for v2.4.0 (2018-02-08) =============================== - Support XML parsing with xmltodict Changes for v2.3.7 (2018-02-07) =============================== - Fix for the –from-file/-f argument: Re-route jq_filter to files when using –from-file. Fixes #19. Changes for v2.3.6 (2018-01-29) =============================== - Parse and pass multiple positional-consuming jq args Changes for v2.3.5 (2018-01-29) =============================== - Support jq arguments that consume subsequent positionals (such as –arg k v) (#16). Changes for v2.3.4 (2017-12-26) =============================== - Support bare YAML dates and times. Fixes #10 Changes for v2.3.3 (2017-09-30) =============================== - Avoid buffering all input docs in memory with no -y - End all json.dump output with newlines. Close all input streams. Fixes #8. Thanks to @bubbleattic for reporting. Changes for v2.3.2 (2017-09-25) =============================== - Fix test suite on Python 3 Changes for v2.3.1 (2017-09-25) =============================== - Add support for multiple yaml files in arguments. Thanks to @bubbleattic (PR #7) Changes for v2.3.0 (2017-08-27) =============================== - Handle multi-document streams. Fixes #6 - Report version via yq --version Changes for v2.2.0 (2017-07-07) =============================== - Stringify datetimes loaded from YAML. Fixes #5 Changes for v2.1.2 (2017-06-27) =============================== - Fix ResourceWarning: unclosed file - Internal: Make usage of loader argument consistent - Documentation improvements Changes for v2.1.1 (2017-05-02) =============================== - Fix release script. Release is identical to v2.1.0. Changes for v2.1.0 (2017-05-02) =============================== - yq now supports emitting YAML (round-trip YAML support) using "yq -y". Fixes #2. - Key order is now preserved in mappings/objects/dictionaries. - Passing input files by filename as an argument is now supported (in addition to providing data on standard input). Changes for v2.0.2 (2017-01-16) =============================== - Test and documentation improvements Changes for v2.0.1 (2017-01-14) =============================== - Fix description in setup.py Changes for v2.0.0 (2017-01-14) =============================== - Begin 2.0.0 release series. This package's release series available on PyPI begins with version 2.0.0. Versions of ``yq`` prior to 2.0.0 are distributed by https://github.com/abesto/yq and are not related to this package. No guarantees of compatibility are made between abesto/yq and kislyuk/yq. This package follows the ``Semantic Versioning 2.0.0 ``\ \_ standard. To ensure proper operation, declare dependency version ranges according to SemVer. See the Readme for more information. yq-3.2.3/LICENSE000066400000000000000000000240411447716324300131710ustar00rootroot00000000000000Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: You must give any other recipients of the Work or Derivative Works a copy of this License; and You must cause any modified files to carry prominent notices stating that You changed the files; and You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. yq-3.2.3/MANIFEST.in000066400000000000000000000000511447716324300137150ustar00rootroot00000000000000include LICENSE recursive-include test * yq-3.2.3/Makefile000066400000000000000000000007361447716324300136310ustar00rootroot00000000000000SHELL=/bin/bash lint: ruff $$(dirname */__init__.py) mypy --install-types --non-interactive --check-untyped-defs $$(dirname */__init__.py) test: python ./test/test.py -v init_docs: cd docs; sphinx-quickstart docs: python -m pip install furo sphinx-copybutton sphinx-build docs docs/html install: -rm -rf dist python -m pip install build python -m build python -m pip install --upgrade $$(echo dist/*.whl)[tests] .PHONY: test lint release docs include common.mk yq-3.2.3/README.rst000066400000000000000000000164521447716324300136620ustar00rootroot00000000000000yq: Command-line YAML/XML/TOML processor - jq wrapper for YAML, XML, TOML documents =================================================================================== Installation ------------ :: pip install yq Before using ``yq``, you also have to install its dependency, ``jq``. See the `jq installation instructions `_ for details and directions specific to your platform. On macOS, ``yq`` is also available on `Homebrew `_: use ``brew install python-yq``. Synopsis -------- ``yq`` takes YAML input, converts it to JSON, and pipes it to `jq `_:: cat input.yml | yq .foo.bar Like in ``jq``, you can also specify input filename(s) as arguments:: yq .foo.bar input.yml By default, no conversion of ``jq`` output is done. Use the ``--yaml-output``/``-y`` option to convert it back into YAML:: cat input.yml | yq -y .foo.bar Mapping key order is preserved. By default, custom `YAML tags `_ and `styles `_ in the input are ignored. Use the ``--yaml-roundtrip``/``-Y`` option to preserve YAML tags and styles by representing them as extra items in their enclosing mappings and sequences while in JSON:: yq -Y .foo.bar input.yml yq can be called as a module if needed. With ``-y/-Y``, files can be edited in place like with ``sed -i``:: python -m yq -Y --indentless --in-place '.["current-context"] = "staging-cluster"' ~/.kube/config Use the ``--width``/``-w`` option to pass the line wrap width for string literals. Use ``--explicit-start``/``--explicit-end`` to emit YAML start/end markers even when processing a single document. All other command line arguments are forwarded to ``jq``. ``yq`` forwards the exit code ``jq`` produced, unless there was an error in YAML parsing, in which case the exit code is 1. See the `jq manual `_ for more details on ``jq`` features and options. Because YAML treats JSON as a dialect of YAML, you can use yq to convert JSON to YAML: ``yq -y . < in.json > out.yml``. Preserving tags and styles using the ``-Y`` (``--yaml-roundtrip``) option ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The ``-Y`` option helps preserve custom `string styles `_ and `tags `_ in your document. For example, consider the following document (an `AWS CloudFormation `_ template fragment):: Resources: ElasticLoadBalancer: Type: 'AWS::ElasticLoadBalancing::LoadBalancer' Properties: AvailabilityZones: !GetAZs '' Instances: - !Ref Ec2Instance1 - !Ref Ec2Instance2 Description: >- Load balancer for Big Important Service. Good thing it's managed by this template. Passing this document through ``yq -y .Resources.ElasticLoadBalancer`` will drop custom tags, such as ``!Ref``, and styles, such as the `folded `_ style of the ``Description`` field:: Type: AWS::ElasticLoadBalancing::LoadBalancer Properties: AvailabilityZones: '' Instances: - Ec2Instance1 - Ec2Instance2 Description: 'Load balancer for Big Important Service. Good thing it''s managed by this template.' By contrast, passing it through ``yq -Y .Resources.ElasticLoadBalancer`` will preserve tags and styles:: Type: 'AWS::ElasticLoadBalancing::LoadBalancer' Properties: AvailabilityZones: !GetAZs '' Instances: - !Ref 'Ec2Instance1' - !Ref 'Ec2Instance2' Description: >- Load balancer for Big Important Service. Good thing it's managed by this template. To accomplish this in ``-Y`` mode, yq carries extra metadata (mapping pairs and sequence values) in the JSON representation of your document for any custom tags or styles that it finds. When converting the JSON back into YAML, it parses this metadata, re-applies the tags and styles, and discards the extra pairs and values. .. warning :: The ``-Y`` option is incompatible with jq filters that do not expect the extra information injected into the document to preserve the YAML formatting. For example, a jq filter that counts entries in the Instances array will come up with 4 entries instead of 2. A filter that expects all array entries to be mappings may break due to the presence of string metadata keys. Check your jq filter for compatibility/semantic validity when using the ``-Y`` option. XML support ----------- ``yq`` also supports XML. The ``yq`` package installs an executable, ``xq``, which `transcodes XML to JSON `_ using `xmltodict `_ and pipes it to ``jq``. Roundtrip transcoding is available with the ``xq --xml-output``/``xq -x`` option. Multiple XML documents can be passed in separate files/streams as ``xq a.xml b.xml``. Use ``--xml-item-depth`` to descend into large documents, streaming their contents without loading the full doc into memory (for example, stream a `Wikipedia database dump `_ with ``cat enwiki-*.xml.bz2 | bunzip2 | xq . --xml-item-depth=2``). Entity expansion and DTD resolution is disabled to avoid XML parsing vulnerabilities. TOML support ------------ ``yq`` supports `TOML `_ as well. The ``yq`` package installs an executable, ``tomlq``, which uses the `tomlkit library `_ to transcode TOML to JSON, then pipes it to ``jq``. Roundtrip transcoding is available with the ``tomlq --toml-output``/``tomlq -t`` option. .. admonition:: Compatibility note This package's release series available on PyPI begins with version 2.0.0. Versions of ``yq`` prior to 2.0.0 are distributed by https://github.com/abesto/yq and are not related to this package. No guarantees of compatibility are made between abesto/yq and kislyuk/yq. This package follows the `Semantic Versioning 2.0.0 `_ standard. To ensure proper operation, declare dependency version ranges according to SemVer. Authors ------- * Andrey Kislyuk Links ----- * `Project home page (GitHub) `_ * `Documentation `_ * `Package distribution (PyPI) `_ * `Change log `_ * `jq `_ - the command-line JSON processor utility powering ``yq`` Bugs ~~~~ Please report bugs, issues, feature requests, etc. on `GitHub `_. License ------- Licensed under the terms of the `Apache License, Version 2.0 `_. .. image:: https://github.com/kislyuk/yq/workflows/Python%20package/badge.svg :target: https://github.com/kislyuk/yq/actions .. image:: https://codecov.io/github/kislyuk/yq/coverage.svg?branch=master :target: https://codecov.io/github/kislyuk/yq?branch=master .. image:: https://img.shields.io/pypi/v/yq.svg :target: https://pypi.python.org/pypi/yq .. image:: https://img.shields.io/pypi/l/yq.svg :target: https://pypi.python.org/pypi/yq yq-3.2.3/SECURITY.md000066400000000000000000000012701447716324300137540ustar00rootroot00000000000000# Security Policy ## Reporting a Vulnerability If you believe you have found a security vulnerability in this project, please report it to us by submitting a security advisory at https://github.com/kislyuk/yq/security/advisories. You can expect an initial response within 14 days. ## Supported Versions In general, the maintainers of this project provide security updates only for the most recent published release. If you need support for prior versions, please open an issue and describe your situation. Requests for updates to prior releases will be considered on a case-by-case basis, and will generally be accommodated only for the latest releases in prior major version release series. yq-3.2.3/common.mk000066400000000000000000000047741447716324300140200ustar00rootroot00000000000000SHELL=/bin/bash -eo pipefail release-major: $(eval export TAG=$(shell git describe --tags --match 'v*.*.*' | perl -ne '/^v(\d+)\.(\d+)\.(\d+)/; print "v@{[$$1+1]}.0.0"')) $(MAKE) release release-minor: $(eval export TAG=$(shell git describe --tags --match 'v*.*.*' | perl -ne '/^v(\d+)\.(\d+)\.(\d+)/; print "v$$1.@{[$$2+1]}.0"')) $(MAKE) release release-patch: $(eval export TAG=$(shell git describe --tags --match 'v*.*.*' | perl -ne '/^v(\d+)\.(\d+)\.(\d+)/; print "v$$1.$$2.@{[$$3+1]}"')) $(MAKE) release release: @if ! git diff --cached --exit-code; then echo "Commit staged files before proceeding"; exit 1; fi @if [[ -z $$TAG ]]; then echo "Use release-{major,minor,patch}"; exit 1; fi @if ! type -P pandoc; then echo "Please install pandoc"; exit 1; fi @if ! type -P sponge; then echo "Please install moreutils"; exit 1; fi @if ! type -P gh; then echo "Please install gh"; exit 1; fi @if ! type -P twine; then echo "Please install twine"; exit 1; fi git pull git clean -x --force $$(python setup.py --name) sed -i -e "s/version=\([\'\"]\)[0-9]*\.[0-9]*\.[0-9]*/version=\1$${TAG:1}/" setup.py git add setup.py TAG_MSG=$$(mktemp); \ echo "# Changes for ${TAG} ($$(date +%Y-%m-%d))" > $$TAG_MSG; \ git log --pretty=format:%s $$(git describe --abbrev=0)..HEAD >> $$TAG_MSG; \ $${EDITOR:-emacs} $$TAG_MSG; \ if [[ -f Changes.md ]]; then cat $$TAG_MSG <(echo) Changes.md | sponge Changes.md; git add Changes.md; fi; \ if [[ -f Changes.rst ]]; then cat <(pandoc --from markdown --to rst $$TAG_MSG) <(echo) Changes.rst | sponge Changes.rst; git add Changes.rst; fi; \ yq --help > docs/cli-doc.txt; git add docs/cli-doc.txt; \ xq --help > docs/cli-doc-xq.txt; git add docs/cli-doc-xq.txt; \ tomlq --help > docs/cli-doc-tomlq.txt; git add docs/cli-doc-tomlq.txt; \ git commit -m ${TAG}; \ git tag --sign --annotate --file $$TAG_MSG ${TAG} git push --follow-tags $(MAKE) install gh release create ${TAG} dist/*.whl --notes="$$(git tag --list ${TAG} -n99 | perl -pe 's/^\S+\s*// if $$. == 1' | sed 's/^\s\s\s\s//')" $(MAKE) release-pypi $(MAKE) release-docs release-pypi: python -m build twine upload dist/*.tar.gz dist/*.whl --sign --verbose release-docs: $(MAKE) docs -git branch -D gh-pages git checkout -B gh-pages-stage touch docs/html/.nojekyll git add --force docs/html git commit -m "Docs for ${TAG}" git push --force origin $$(git subtree split --prefix docs/html --branch gh-pages):refs/heads/gh-pages git checkout - .PHONY: release yq-3.2.3/docs/000077500000000000000000000000001447716324300131135ustar00rootroot00000000000000yq-3.2.3/docs/changelog.rst000066400000000000000000000000701447716324300155710ustar00rootroot00000000000000Release Notes ============= .. include:: ../Changes.rst yq-3.2.3/docs/cli-doc-tomlq.txt000066400000000000000000000045771447716324300163350ustar00rootroot00000000000000usage: tomlq [options] [input file...] tomlq: Command-line TOML processor - jq wrapper for TOML documents tomlq transcodes TOML documents to JSON and passes them to jq. See https://github.com/kislyuk/tomlq for more information. positional arguments: jq_filter files options: -h, --help show this help message and exit --toml-output, -t Transcode jq JSON output back into TOML and emit it --in-place, -i Edit files in place (no backup - use caution) --version show program's version number and exit jq - commandline JSON processor [version 1.6] Usage: jq [options] [file...] jq [options] --args [strings...] jq [options] --jsonargs [JSON_TEXTS...] jq is a tool for processing JSON inputs, applying the given filter to its JSON text inputs and producing the filter's results as JSON on standard output. The simplest filter is ., which copies jq's input to its output unmodified (except for formatting, but note that IEEE754 is used for number representation internally, with all that that implies). For more advanced filters see the jq(1) manpage ("man jq") and/or https://stedolan.github.io/jq Example: $ echo '{"foo": 0}' | jq . { "foo": 0 } Some of the options include: -c compact instead of pretty-printed output; -n use `null` as the single input value; -e set the exit status code based on the output; -s read (slurp) all inputs into an array; apply filter to it; -r output raw strings, not JSON texts; -R read raw strings, not JSON texts; -C colorize JSON; -M monochrome (don't colorize JSON); -S sort keys of objects on output; --tab use tabs for indentation; --arg a v set variable $a to value ; --argjson a v set variable $a to JSON value ; --slurpfile a f set variable $a to an array of JSON texts read from ; --rawfile a f set variable $a to a string consisting of the contents of ; --args remaining arguments are string arguments, not files; --jsonargs remaining arguments are JSON arguments, not files; -- terminates argument processing; Named arguments are also available as $ARGS.named[], while positional arguments are available as $ARGS.positional[]. See the manpage for more options. yq-3.2.3/docs/cli-doc-xq.txt000066400000000000000000000055371447716324300156260ustar00rootroot00000000000000usage: xq [options] [input file...] [--version] [jq_filter] [files ...] xq: Command-line XML processor - jq wrapper for XML documents xq transcodes XML documents to JSON and passes them to jq. See https://github.com/kislyuk/xq for more information. positional arguments: jq_filter files options: -h, --help show this help message and exit --xml-output, -x Transcode jq JSON output back into XML and emit it --xml-item-depth 123 Specify depth of items to emit (default 0; use a positive integer to stream large docs) --xml-dtd Preserve XML Document Type Definition (disables streaming of multiple docs) --xml-root XML_ROOT When transcoding back to XML, envelope the output in an element with this name --xml-force-list ELT Emit a list for elements with this name even if they occur only once (option can repeat) --in-place, -i Edit files in place (no backup - use caution) --version show program's version number and exit jq - commandline JSON processor [version 1.6] Usage: jq [options] [file...] jq [options] --args [strings...] jq [options] --jsonargs [JSON_TEXTS...] jq is a tool for processing JSON inputs, applying the given filter to its JSON text inputs and producing the filter's results as JSON on standard output. The simplest filter is ., which copies jq's input to its output unmodified (except for formatting, but note that IEEE754 is used for number representation internally, with all that that implies). For more advanced filters see the jq(1) manpage ("man jq") and/or https://stedolan.github.io/jq Example: $ echo '{"foo": 0}' | jq . { "foo": 0 } Some of the options include: -c compact instead of pretty-printed output; -n use `null` as the single input value; -e set the exit status code based on the output; -s read (slurp) all inputs into an array; apply filter to it; -r output raw strings, not JSON texts; -R read raw strings, not JSON texts; -C colorize JSON; -M monochrome (don't colorize JSON); -S sort keys of objects on output; --tab use tabs for indentation; --arg a v set variable $a to value ; --argjson a v set variable $a to JSON value ; --slurpfile a f set variable $a to an array of JSON texts read from ; --rawfile a f set variable $a to a string consisting of the contents of ; --args remaining arguments are string arguments, not files; --jsonargs remaining arguments are JSON arguments, not files; -- terminates argument processing; Named arguments are also available as $ARGS.named[], while positional arguments are available as $ARGS.positional[]. See the manpage for more options. yq-3.2.3/docs/cli-doc.txt000066400000000000000000000071031447716324300151670ustar00rootroot00000000000000usage: yq [options] [input file...] [--indentless-lists] [--explicit-start] [--explicit-end] [--in-place] [--version] [jq_filter] [files ...] yq: Command-line YAML processor - jq wrapper for YAML documents yq transcodes YAML documents to JSON and passes them to jq. See https://github.com/kislyuk/yq for more information. positional arguments: jq_filter files options: -h, --help show this help message and exit --yaml-output, --yml-output, -y Transcode jq JSON output back into YAML and emit it --yaml-roundtrip, --yml-roundtrip, -Y Transcode jq JSON output back into YAML and emit it. Preserve YAML tags and styles by representing them as extra items in their enclosing mappings and sequences while in JSON. This option is incompatible with jq filters that do not expect these extra items. --yaml-output-grammar-version {1.1,1.2}, --yml-out-ver {1.1,1.2} When using --yaml-output, specify output grammar (the default is 1.1 and will be changed to 1.2 in a future version). Setting this to 1.2 will cause strings like 'on' and 'no' to be emitted unquoted. --width WIDTH, -w WIDTH When using --yaml-output, specify string wrap width --indentless-lists, --indentless When using --yaml-output, indent block style lists (sequences) with 0 spaces instead of 2 --explicit-start When using --yaml-output, always emit explicit document start ("---") --explicit-end When using --yaml-output, always emit explicit document end ("...") --in-place, -i Edit files in place (no backup - use caution) --version show program's version number and exit jq - commandline JSON processor [version 1.6] Usage: jq [options] [file...] jq [options] --args [strings...] jq [options] --jsonargs [JSON_TEXTS...] jq is a tool for processing JSON inputs, applying the given filter to its JSON text inputs and producing the filter's results as JSON on standard output. The simplest filter is ., which copies jq's input to its output unmodified (except for formatting, but note that IEEE754 is used for number representation internally, with all that that implies). For more advanced filters see the jq(1) manpage ("man jq") and/or https://stedolan.github.io/jq Example: $ echo '{"foo": 0}' | jq . { "foo": 0 } Some of the options include: -c compact instead of pretty-printed output; -n use `null` as the single input value; -e set the exit status code based on the output; -s read (slurp) all inputs into an array; apply filter to it; -r output raw strings, not JSON texts; -R read raw strings, not JSON texts; -C colorize JSON; -M monochrome (don't colorize JSON); -S sort keys of objects on output; --tab use tabs for indentation; --arg a v set variable $a to value ; --argjson a v set variable $a to JSON value ; --slurpfile a f set variable $a to an array of JSON texts read from ; --rawfile a f set variable $a to a string consisting of the contents of ; --args remaining arguments are string arguments, not files; --jsonargs remaining arguments are JSON arguments, not files; -- terminates argument processing; Named arguments are also available as $ARGS.named[], while positional arguments are available as $ARGS.positional[]. See the manpage for more options. yq-3.2.3/docs/conf.py000066400000000000000000000014461447716324300144170ustar00rootroot00000000000000import os project = "yq" copyright = "Andrey Kislyuk" author = "Andrey Kislyuk" version = "" release = "" language = "en" master_doc = "index" extensions = ["sphinx.ext.autodoc", "sphinx.ext.viewcode", "sphinx_copybutton"] source_suffix = [".rst", ".md"] exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] pygments_style = "sphinx" templates_path = [""] if "readthedocs.org" in os.getcwd().split("/"): with open("index.rst", "w") as fh: fh.write("Documentation for this project has moved to https://kislyuk.github.io/" + project) else: html_theme = "furo" html_sidebars = { "**": [ "sidebar/brand.html", "sidebar/search.html", "sidebar/scroll-start.html", "toc.html", "sidebar/scroll-end.html", ] } yq-3.2.3/docs/index.rst000066400000000000000000000003701447716324300147540ustar00rootroot00000000000000.. include:: ../README.rst CLI usage ========= yq -- .. literalinclude:: cli-doc.txt xq -- .. literalinclude:: cli-doc-xq.txt tomlq ----- .. literalinclude:: cli-doc-tomlq.txt Change log ========== .. toctree:: :maxdepth: 5 changelog yq-3.2.3/docs/toc.html000066400000000000000000000000101447716324300145550ustar00rootroot00000000000000{{toc}} yq-3.2.3/pyproject.toml000066400000000000000000000004071447716324300151000ustar00rootroot00000000000000[tool.black] line-length = 120 [tool.isort] profile = "black" line_length = 120 skip_gitignore = true [tool.ruff] line-length = 120 per-file-ignores = {"yq/__init__.py" = ["F401"]} [[tool.mypy.overrides]] module = "argcomplete.*" ignore_missing_imports = true yq-3.2.3/setup.py000077500000000000000000000032141447716324300137000ustar00rootroot00000000000000#!/usr/bin/env python from setuptools import find_packages, setup setup( name="yq", version="3.2.3", url="https://github.com/kislyuk/yq", license="Apache Software License", author="Andrey Kislyuk", author_email="kislyuk@gmail.com", description="Command-line YAML/XML processor - jq wrapper for YAML/XML documents", long_description=open("README.rst").read(), python_requires=">=3.6", use_scm_version={ "write_to": "yq/version.py", }, setup_requires=["setuptools_scm >= 3.4.3"], install_requires=[ "PyYAML >= 5.3.1", "xmltodict >= 0.11.0", "tomlkit >= 0.11.6", "argcomplete >= 1.8.1", ], extras_require={ "tests": [ "coverage", "wheel", "build", "ruff", "mypy", ] }, packages=find_packages(exclude=["test"]), include_package_data=True, entry_points={ "console_scripts": ["yq=yq:cli", "xq=yq:xq_cli", "tomlq=yq:tq_cli"], }, test_suite="test", classifiers=[ "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: MacOS :: MacOS X", "Operating System :: POSIX", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Topic :: Software Development :: Libraries :: Python Modules", ], ) yq-3.2.3/test/000077500000000000000000000000001447716324300131425ustar00rootroot00000000000000yq-3.2.3/test/cfn.yml000066400000000000000000000066141447716324300144420ustar00rootroot00000000000000AWSTemplateFormatVersion: '2010-09-09' Metadata: License: Apache-2.0 Description: 'AWS CloudFormation Sample Template S3_Website_With_CloudFront_Distribution: Sample template showing how to create a website with a custom DNS name, hosted on Amazon S3 and served via Amazone CloudFront. It assumes you already have a Hosted Zone registered with Amazon Route 53. **WARNING** This template creates an Amazon Route 53 DNS record, an S3 bucket and a CloudFront distribution. You will be billed for the AWS resources used if you create a stack from this template.' Parameters: HostedZone: Type: String Description: The DNS name of an existing Amazon Route 53 hosted zone AllowedPattern: (?!-)[a-zA-Z0-9-.]{1,63}(?", line 1, column 3.' ) err2 = ( "yq: Error running jq: ScannerError: while scanning for the next token\nfound character that " 'cannot start any token\n in "", line 1, column 3.' ) self.run_yq("- %", ["."], expect_exit_codes={err, err2, 2}) def test_yq_arg_handling(self): from unittest import mock test_doc = os.path.join(os.path.dirname(__file__), "doc.yml") test_filter = os.path.join(os.path.dirname(__file__), "filter.jq") unusable_non_tty_input = mock.Mock() unusable_non_tty_input.isatty = mock.Mock(return_value=False) unusable_tty_input = mock.Mock() unusable_tty_input.isatty = mock.Mock(return_value=True) self.run_yq("{}", ["."]) self.run_yq(unusable_non_tty_input, [".", test_doc]) self.run_yq(unusable_non_tty_input, [".", test_doc, test_doc]) self.run_yq("{}", ["-f", test_filter]) self.run_yq(unusable_non_tty_input, ["-f", test_filter, test_doc]) self.run_yq(unusable_non_tty_input, ["-f", test_filter, test_doc, test_doc]) self.run_yq(unusable_tty_input, [], expect_exit_codes={2}) self.run_yq(unusable_tty_input, ["."], expect_exit_codes={2}) self.run_yq(unusable_tty_input, ["-f", test_filter], expect_exit_codes={2}) def test_yq_arg_passthrough(self): self.assertEqual(self.run_yq("{}", ["--arg", "foo", "bar", "--arg", "x", "y", "--indent", "4", "."]), "") self.assertEqual( self.run_yq("{}", ["--arg", "foo", "bar", "--arg", "x", "y", "-y", "--indent", "4", ".x=$x"]), "x: y\n" ) err = "yq: Error running jq: BrokenPipeError: [Errno 32] Broken pipe" + (": ''." if USING_PYPY else ".") self.run_yq("{}", ["--indent", "9", "."], expect_exit_codes={err, 2}) self.assertEqual(self.run_yq("", ["true", "-y", "-rn"]), "true\n...\n") with tempfile.NamedTemporaryFile() as tf, tempfile.TemporaryFile() as tf2: tf.write(b".a") tf.seek(0) tf2.write(b'{"a": 1}') for arg in "--from-file", "-f": tf2.seek(0) self.assertEqual(self.run_yq("", ["-y", arg, tf.name, self.fd_path(tf2)]), "1\n...\n") @unittest.skipIf(subprocess.check_output(["jq", "--version"]) < b"jq-1.6", "Test options introduced in jq 1.6") def test_jq16_arg_passthrough(self): self.assertEqual( self.run_yq("{}", ["--indentless", "-y", ".a=$ARGS.positional", "--args", "a", "b"]), "a:\n- a\n- b\n" ) self.assertEqual(self.run_yq("{}", ["-y", ".a=$ARGS.positional", "--args", "a", "b"]), "a:\n - a\n - b\n") self.assertEqual(self.run_yq("{}", [".", "--jsonargs", "{}", "{}"]), "") def test_short_option_separation(self): # self.assertEqual(self.run_yq('{"a": 1}', ["-yCcC", "."]), "a: 1\n") - Fails on 2.7 and 3.8 self.assertEqual(self.run_yq('{"a": 1}', ["-CcCy", "."]), "a: 1\n") self.assertEqual(self.run_yq('{"a": 1}', ["-y", "-CS", "."]), "a: 1\n") self.assertEqual(self.run_yq('{"a": 1}', ["-y", "-CC", "."]), "a: 1\n") self.assertEqual(self.run_yq('{"a": 1}', ["-y", "-cC", "."]), "a: 1\n") self.assertEqual(self.run_yq('{"a": 1}', ["-x", "-cC", "."]), "1\n") self.assertEqual(self.run_yq('{"a": 1}', ["-C", "."]), "") self.assertEqual(self.run_yq('{"a": 1}', ["-Cc", "."]), "") def fd_path(self, fh): return "/dev/fd/{}".format(fh.fileno()) def test_multidocs(self): self.assertEqual(self.run_yq("---\na: b\n---\nc: d", ["-y", "."]), "a: b\n---\nc: d\n") with tempfile.TemporaryFile() as tf, tempfile.TemporaryFile() as tf2: tf.write(b'{"a": "b"}') tf.seek(0) tf2.write(b'{"a": 1}') tf2.seek(0) self.assertEqual(self.run_yq("", ["-y", ".a", self.fd_path(tf), self.fd_path(tf2)]), "b\n--- 1\n...\n") def test_datetimes(self): self.assertEqual(self.run_yq("- 2016-12-20T22:07:36Z\n", ["."]), "") self.assertEqual(self.run_yq("- 2016-12-20T22:07:36Z\n", ["-y", "."]), "- '2016-12-20T22:07:36Z'\n") self.assertEqual( self.run_yq("- 2016-12-20T22:07:36Z\n", ["-y", "--yml-out-ver=1.2", "."]), "- 2016-12-20T22:07:36Z\n" ) self.assertEqual(self.run_yq("2016-12-20", ["."]), "") self.assertEqual(self.run_yq("2016-12-20", ["-y", "."]), "'2016-12-20'\n") self.assertEqual(self.run_yq("2016-12-20", ["-y", "--yml-out-ver=1.2", "."]), "2016-12-20\n...\n") def test_unrecognized_tags(self): self.assertEqual(self.run_yq("!!foo bar\n", ["."]), "") self.assertEqual(self.run_yq("!!foo bar\n", ["-y", "."]), "bar\n...\n") self.assertEqual(self.run_yq("x: !foo bar\n", ["-y", "."]), "x: bar\n") self.assertEqual(self.run_yq("x: !!foo bar\n", ["-y", "."]), "x: bar\n") with tempfile.TemporaryFile() as tf: tf.write(yaml_with_tags.encode()) tf.seek(0) self.assertEqual(self.run_yq("", ["-y", ".xyz.foo", self.fd_path(tf)]), "bar\n...\n") def test_roundtrip_yaml(self): cfn_filename = os.path.join(os.path.dirname(__file__), "cfn.yml") with io.open(cfn_filename) as fh: self.assertEqual(self.run_yq("", ["-Y", ".", cfn_filename]), fh.read()) def test_in_place_yaml(self): with tempfile.NamedTemporaryFile() as tf, tempfile.NamedTemporaryFile() as tf2: tf.write(b"- foo\n- bar\n") tf.seek(0) tf2.write(b"- foo\n- bar\n") tf2.seek(0) self.run_yq("", ["-i", "-y", ".[0]", tf.name, tf2.name]) self.assertEqual(tf.read(), b"foo\n...\n") self.assertEqual(tf2.read(), b"foo\n...\n") # Files do not get overwritten on error (DeferredOutputStream logic) self.run_yq("", ["-i", "-y", tf.name, tf2.name], expect_exit_codes=[3]) tf.seek(0) tf2.seek(0) self.assertEqual(tf.read(), b"foo\n...\n") self.assertEqual(tf2.read(), b"foo\n...\n") def test_in_place_toml(self): with tempfile.NamedTemporaryFile() as tf: tf.write(b'[GLOBAL]\nversion="1.0.0"\n') tf.seek(0) self.run_yq("", ["-i", "-t", '.GLOBAL.version="1.0.1"', tf.name], input_format="toml") self.assertEqual(tf.read(), b'[GLOBAL]\nversion = "1.0.1"\n') def test_explicit_doc_markers(self): test_doc = os.path.join(os.path.dirname(__file__), "doc.yml") self.assertTrue(self.run_yq("", ["-y", ".", test_doc]).startswith("yaml_struct")) self.assertTrue(self.run_yq("", ["-y", "--explicit-start", ".", test_doc]).startswith("---")) self.assertTrue(self.run_yq("", ["-y", "--explicit-end", ".", test_doc]).endswith("...\n")) def test_xq(self): self.assertEqual(self.run_yq("", ["."], input_format="xml"), "") self.assertEqual(self.run_yq("", ["--xml-item-depth=2", "."], input_format="xml"), "") self.assertEqual(self.run_yq("", ["--xml-dtd", "."], input_format="xml"), "") self.assertEqual(self.run_yq("", ["-x", ".foo.x=1"], input_format="xml"), "\n 1\n\n") self.assertTrue(self.run_yq("", ["-x", "--xml-dtd", "."], input_format="xml").startswith("", ["-x", "--xml-root=R", "."], input_format="xml").startswith("")) self.assertEqual(self.run_yq("", ["--xml-force-list=foo", "."], input_format="xml"), "") self.assertEqual(self.run_yq("", ["-y", "."], input_format="xml"), "a:\n b: null\n") self.assertEqual( self.run_yq("", ["-y", "--xml-force-list", "b", "."], input_format="xml"), "a:\n b:\n - null\n", ) with tempfile.TemporaryFile() as tf, tempfile.TemporaryFile() as tf2: tf.write(b"") tf.seek(0) tf2.write(b"") tf2.seek(0) self.assertEqual( self.run_yq("", ["-x", ".a", self.fd_path(tf), self.fd_path(tf2)], input_format="xml"), "\n\n", ) err = ( "yq: Error converting JSON to XML: cannot represent non-object types at top level. " "Use --xml-root=name to envelope your output with a root element." ) self.run_yq("[1]", ["-x", "."], expect_exit_codes=[err]) def test_xq_dtd(self): with tempfile.TemporaryFile() as tf: tf.write(b'ef') tf.seek(0) self.assertEqual( self.run_yq("", ["-x", ".a", self.fd_path(tf)], input_format="xml"), 'ef\n' ) tf.seek(0) self.assertEqual( self.run_yq("", ["-x", "--xml-dtd", ".", self.fd_path(tf)], input_format="xml"), '\n\n e\n f\n\n', ) tf.seek(0) self.assertEqual( self.run_yq("", ["-x", "--xml-dtd", "--xml-root=g", ".a", self.fd_path(tf)], input_format="xml"), '\n\n e\n f\n\n', ) def test_tomlq(self): self.assertEqual(self.run_yq("[foo]\nbar = 1", ["."], input_format="toml"), "") self.assertEqual(self.run_yq("[foo]\nbar = 1", ["-t", ".foo"], input_format="toml"), "bar = 1\n") self.assertEqual(self.run_yq("[foo]\nbar = 2020-02-20", ["."], input_format="toml"), "") def test_abbrev_opt_collisions(self): with tempfile.TemporaryFile() as tf, tempfile.TemporaryFile() as tf2: self.assertEqual( self.run_yq("", ["-y", "-e", "--slurp", ".[0] == .[1]", "-", self.fd_path(tf), self.fd_path(tf2)]), "true\n...\n", ) def test_entity_expansion_defense(self): self.run_yq(bomb_yaml, ["."], expect_exit_codes=["yq: Error: detected unsafe YAML entity expansion"]) def test_yaml_type_tags(self): bin_yaml = "example: !!binary Zm9vYmFyCg==" self.assertEqual(self.run_yq(bin_yaml, ["."]), "") self.assertEqual(self.run_yq(bin_yaml, ["-y", "."]), "example: Zm9vYmFyCg==\n") set_yaml = "example: !!set { Boston Red Sox, Detroit Tigers, New York Yankees }" self.assertEqual(self.run_yq(set_yaml, ["."]), "") self.assertEqual( self.run_yq(set_yaml, ["-y", "."]), "example:\n Boston Red Sox: null\n Detroit Tigers: null\n New York Yankees: null\n", ) def test_yaml_merge(self): self.assertEqual( self.run_yq("a: &b\n c: d\ne:\n <<: *b\n g: h", ["-y", "."]), "a:\n c: d\ne:\n c: d\n g: h\n" ) def test_yaml_1_2(self): self.assertEqual(self.run_yq("11:12:13", ["."]), "") self.assertEqual(self.run_yq("11:12:13", ["-y", "."]), "'11:12:13'\n") self.assertEqual(self.run_yq("on: 12:34:56", ["-y", "."]), "'on': '12:34:56'\n") self.assertEqual(self.run_yq("on: 12:34:56", ["-y", "--yml-out-ver=1.2", "."]), "on: 12:34:56\n") self.assertEqual(self.run_yq("2022-02-22", ["-y", "."]), "'2022-02-22'\n") self.assertEqual(self.run_yq("2022-02-22", ["-y", "--yml-out-ver=1.2", "."]), "2022-02-22\n...\n") self.assertEqual(self.run_yq("0b1010_0111", ["-y", "."]), "'0b1010_0111'\n") self.assertEqual(self.run_yq("0b1010_0111", ["-y", "--yml-out-ver=1.2", "."]), "0b1010_0111\n...\n") self.assertEqual(self.run_yq("0x_0A_74_AE", ["-y", "."]), "'0x_0A_74_AE'\n") self.assertEqual(self.run_yq("0x_0A_74_AE", ["-y", "--yml-out-ver=1.2", "."]), "0x_0A_74_AE\n...\n") self.assertEqual(self.run_yq("+685_230", ["-y", "."]), "'+685_230'\n") self.assertEqual(self.run_yq("+685_230", ["-y", "--yml-out-ver=1.2", "."]), "+685_230\n...\n") self.assertEqual(self.run_yq("+12345", ["-y", "."]), "12345\n...\n") def test_yaml_1_1_octals(self): self.assertEqual(self.run_yq("on: -012345", ["-y", "."]), "'on': -5349\n") @unittest.expectedFailure def test_yaml_1_2_octals(self): """YAML 1.2 octals not yet implemented""" self.assertEqual(self.run_yq("on: -012345", ["-y", "--yml-out-ver=1.2", "."]), "on: -12345\n") if __name__ == "__main__": unittest.main() yq-3.2.3/yq/000077500000000000000000000000001447716324300126145ustar00rootroot00000000000000yq-3.2.3/yq/__init__.py000066400000000000000000000327061447716324300147350ustar00rootroot00000000000000""" yq: Command-line YAML processor - jq wrapper for YAML documents yq transcodes YAML documents to JSON and passes them to jq. See https://github.com/kislyuk/yq for more information. """ # PYTHON_ARGCOMPLETE_OK import argparse import io import json import os import subprocess import sys from datetime import date, datetime, time import argcomplete import yaml from .dumper import get_dumper from .loader import get_loader from .parser import get_parser, jq_arg_spec try: from .version import version as __version__ except ImportError: __version__ = "0.0.0" class JSONDateTimeEncoder(json.JSONEncoder): def default(self, o): if isinstance(o, (datetime, date, time)): return o.isoformat() return json.JSONEncoder.default(self, o) def decode_docs(jq_output, json_decoder): while jq_output: doc, pos = json_decoder.raw_decode(jq_output) jq_output = jq_output[pos + 1 :] yield doc def xq_cli(): cli(input_format="xml", program_name="xq") def tq_cli(): cli(input_format="toml", program_name="tomlq") class DeferredOutputStream: def __init__(self, name, mode="w"): self.name = name self.mode = mode self._fh = None @property def fh(self): if self._fh is None: self._fh = open(self.name, self.mode) return self._fh def flush(self): if self._fh is not None: return self.fh.flush() def close(self): if self._fh is not None: return self.fh.close() def __getattr__(self, a): return getattr(self.fh, a) def cli(args=None, input_format="yaml", program_name="yq"): parser = get_parser(program_name, __doc__) argcomplete.autocomplete(parser) args, jq_args = parser.parse_known_args(args=args) null_input = False for i, arg in enumerate(jq_args): if arg.startswith("-") and not arg.startswith("--"): if "n" in arg: null_input = True if "i" in arg: args.in_place = True if "y" in arg: args.output_format = "yaml" elif "Y" in arg: args.output_format = "annotated_yaml" elif "x" in arg: args.output_format = "xml" jq_args[i] = arg.replace("i", "").replace("x", "").replace("y", "").replace("Y", "") if args.output_format != "json": jq_args[i] = jq_args[i].replace("C", "") if jq_args[i] == "-": jq_args[i] = None jq_args = [arg for arg in jq_args if arg is not None] for arg in jq_arg_spec: values = getattr(args, arg, None) delattr(args, arg) if values is not None: for value_group in values: jq_args.append(arg) jq_args.extend(value_group) if args.jq_filter is not None: if "--from-file" in jq_args or "-f" in jq_args: args.input_streams.insert(0, argparse.FileType()(args.jq_filter)) else: jq_filter_arg_loc = len(jq_args) if "--args" in jq_args: jq_filter_arg_loc = jq_args.index("--args") + 1 elif "--jsonargs" in jq_args: jq_filter_arg_loc = jq_args.index("--jsonargs") + 1 jq_args.insert(jq_filter_arg_loc, args.jq_filter) if null_input: args.input_streams.insert(0, open(os.devnull)) delattr(args, "jq_filter") in_place = args.in_place delattr(args, "in_place") if sys.stdin.isatty() and not args.input_streams: parser.print_help() sys.exit(2) elif not args.input_streams: args.input_streams = [sys.stdin] yq_args = dict(input_format=input_format, program_name=program_name, jq_args=jq_args, **vars(args)) if in_place: if args.output_format not in {"yaml", "annotated_yaml", "toml"}: sys.exit("{}: -i/--in-place can only be used with -y/-Y/-t".format(program_name)) input_streams = yq_args.pop("input_streams") if len(input_streams) == 1 and input_streams[0].name == "": msg = "{}: -i/--in-place can only be used with filename arguments, not on standard input" sys.exit(msg.format(program_name)) for i, input_stream in enumerate(input_streams): def exit_handler(arg=None): if arg: sys.exit(arg) if i < len(input_streams): yq_args["exit_func"] = exit_handler yq(input_streams=[input_stream], output_stream=DeferredOutputStream(input_stream.name), **yq_args) else: yq(**yq_args) def load_yaml_docs(in_stream, out_stream, jq, loader_class, max_expansion_factor, exit_func, prog): loader = loader_class(in_stream) last_loader_pos = 0 try: while loader.check_node(): node = loader.get_node() doc = loader.construct_document(node) loader_pos = node.end_mark.index doc_len = loader_pos - last_loader_pos doc_bytes_written = 0 for chunk in JSONDateTimeEncoder().iterencode(doc): doc_bytes_written += len(chunk) if doc_bytes_written > doc_len * max_expansion_factor: if jq: jq.kill() exit_func("{}: Error: detected unsafe YAML entity expansion".format(prog)) out_stream.write(chunk) out_stream.write("\n") last_loader_pos = loader_pos finally: loader.dispose() def yq( input_streams=None, output_stream=None, input_format="yaml", output_format="json", program_name="yq", width=None, indentless_lists=False, xml_root=None, xml_item_depth=0, xml_dtd=False, xml_force_list=frozenset(), explicit_start=False, explicit_end=False, expand_merge_keys=True, expand_aliases=True, max_expansion_factor=1024, yaml_output_grammar_version="1.1", jq_args=frozenset(), exit_func=None, ): if not input_streams: input_streams = [sys.stdin] if not output_stream: output_stream = sys.stdout if not exit_func: exit_func = sys.exit converting_output = True if output_format != "json" else False try: # Notes: universal_newlines is just a way to induce subprocess to make stdin a text buffer and encode it for us; # close_fds must be false for command substitution to work (yq . t.yml --slurpfile t <(yq . t.yml)) jq = subprocess.Popen( ["jq"] + list(jq_args), stdin=subprocess.PIPE, stdout=subprocess.PIPE if converting_output else None, close_fds=False, universal_newlines=True, ) except OSError as e: msg = "{}: Error starting jq: {}: {}. Is jq installed and available on PATH?" exit_func(msg.format(program_name, type(e).__name__, e)) try: if converting_output: # TODO: enable true streaming in this branch (with asyncio, asyncproc, a multi-shot variant of # subprocess.Popen._communicate, etc.) # See https://stackoverflow.com/questions/375427/non-blocking-read-on-a-subprocess-pipe-in-python use_annotations = True if output_format == "annotated_yaml" else False json_buffer = io.StringIO() for input_stream in input_streams: if input_format == "yaml": loader_class = get_loader( use_annotations=use_annotations, expand_aliases=expand_aliases, expand_merge_keys=expand_merge_keys, ) load_yaml_docs( in_stream=input_stream, out_stream=json_buffer, jq=None, loader_class=loader_class, max_expansion_factor=max_expansion_factor, exit_func=exit_func, prog=program_name, ) elif input_format == "xml": import xmltodict if xml_item_depth != 0: raise Exception("xml_item_depth is not supported with xq -x") doc = xmltodict.parse( input_stream.buffer if isinstance(input_stream, io.TextIOWrapper) else input_stream.read(), disable_entities=True, force_list=xml_force_list, ) json.dump(doc, json_buffer, cls=JSONDateTimeEncoder) json_buffer.write("\n") elif input_format == "toml": import tomlkit doc = tomlkit.load(input_stream) # type: ignore json.dump(doc, json_buffer, cls=JSONDateTimeEncoder) json_buffer.write("\n") else: raise Exception("Unknown input format") jq_out, jq_err = jq.communicate(json_buffer.getvalue()) json_decoder = json.JSONDecoder() if output_format == "yaml" or output_format == "annotated_yaml": dumper_class = get_dumper( use_annotations=use_annotations, indentless=indentless_lists, grammar_version=yaml_output_grammar_version, ) yaml.dump_all( decode_docs(jq_out, json_decoder), stream=output_stream, Dumper=dumper_class, width=width, allow_unicode=True, default_flow_style=False, explicit_start=explicit_start, explicit_end=explicit_end, ) elif output_format == "xml": import xmltodict for doc in decode_docs(jq_out, json_decoder): if xml_root: doc = {xml_root: doc} # type: ignore elif not isinstance(doc, dict): msg = ( "{}: Error converting JSON to XML: cannot represent non-object types at top level. " "Use --xml-root=name to envelope your output with a root element." ) exit_func(msg.format(program_name)) full_document = True if xml_dtd else False try: xmltodict.unparse( doc, output=output_stream, full_document=full_document, pretty=True, indent=" " ) except ValueError as e: if "Document must have exactly one root" in str(e): raise Exception(str(e) + " Use --xml-root=name to envelope your output with a root element") else: raise output_stream.write(b"\n" if sys.version_info < (3, 0) else "\n") elif output_format == "toml": import tomlkit for doc in decode_docs(jq_out, json_decoder): if not isinstance(doc, dict): msg = "{}: Error converting JSON to TOML: cannot represent non-object types at top level." exit_func(msg.format(program_name)) tomlkit.dump(doc, output_stream) else: if input_format == "yaml": loader_class = get_loader( use_annotations=False, expand_aliases=expand_aliases, expand_merge_keys=expand_merge_keys ) for input_stream in input_streams: load_yaml_docs( in_stream=input_stream, out_stream=jq.stdin, jq=jq, loader_class=loader_class, max_expansion_factor=max_expansion_factor, exit_func=exit_func, prog=program_name, ) elif input_format == "xml": import xmltodict def emit_entry(path, entry): json.dump(entry, jq.stdin) # type: ignore jq.stdin.write("\n") # type: ignore return True for input_stream in input_streams: doc = xmltodict.parse( input_stream.buffer if isinstance(input_stream, io.TextIOWrapper) else input_stream.read(), disable_entities=True, force_list=xml_force_list, item_depth=xml_item_depth, item_callback=emit_entry, ) if doc: emit_entry(None, doc) elif input_format == "toml": import tomlkit for input_stream in input_streams: json.dump(tomlkit.load(input_stream), jq.stdin, cls=JSONDateTimeEncoder) # type: ignore jq.stdin.write("\n") # type: ignore else: raise Exception("Unknown input format") try: jq.stdin.close() # type: ignore except Exception: pass jq.wait() for input_stream in input_streams: input_stream.close() exit_func(jq.returncode) except Exception as e: exit_func("{}: Error running jq: {}: {}.".format(program_name, type(e).__name__, e)) yq-3.2.3/yq/__main__.py000066400000000000000000000000701447716324300147030ustar00rootroot00000000000000from . import cli if __name__ == "__main__": cli() yq-3.2.3/yq/dumper.py000066400000000000000000000067131447716324300144710ustar00rootroot00000000000000import re import yaml from .loader import hash_key, set_yaml_grammar # try: # from yaml import CSafeDumper as default_dumper # except ImportError: # from yaml import SafeDumper as default_dumper class OrderedIndentlessDumper(yaml.SafeDumper): pass class OrderedDumper(yaml.SafeDumper): def increase_indent(self, flow=False, indentless=False): return super(OrderedDumper, self).increase_indent(flow, False) def ignore_aliases(self, data): return True yaml_value_annotation_re = re.compile(r"^__yq_(?Ptag|style)_(?P.+)__$") yaml_item_annotation_re = re.compile(r"^__yq_(?Ptag|style)_(?P\d+)_(?P.+)__$") def get_dumper(use_annotations=False, indentless=False, grammar_version="1.1"): # if not (use_annotations or indentless): # return default_dumper def represent_dict(dumper, data): pairs, custom_styles, custom_tags = [], {}, {} for k, v in data.items(): if use_annotations and isinstance(k, str): if k == "__yq_alias__": continue value_annotation = yaml_value_annotation_re.match(k) if value_annotation and value_annotation.group("type") == "style": custom_styles[value_annotation.group("key")] = v continue elif value_annotation and value_annotation.group("type") == "tag": custom_tags[value_annotation.group("key")] = v continue pairs.append((k, v)) mapping = dumper.represent_mapping("tag:yaml.org,2002:map", pairs) if use_annotations: for k, v in mapping.value: hashed_key = hash_key(k.value) if hashed_key in custom_styles: if isinstance(v, yaml.nodes.ScalarNode): v.style = custom_styles[hashed_key] elif custom_styles[hashed_key] == "flow": v.flow_style = True if hashed_key in custom_tags: v.tag = custom_tags[hashed_key] return mapping def represent_list(dumper, data): raw_list, custom_styles, custom_tags = [], {}, {} for v in data: if use_annotations and isinstance(v, str): annotation = yaml_item_annotation_re.match(v) if annotation and annotation.group("type") == "style": custom_styles[annotation.group("key")] = annotation.group("value") continue elif annotation and annotation.group("type") == "tag": custom_tags[annotation.group("key")] = annotation.group("value") continue raw_list.append(v) sequence = dumper.represent_list(raw_list) if use_annotations: for i, v in enumerate(sequence.value): if str(i) in custom_styles: if isinstance(v, yaml.nodes.ScalarNode): v.style = custom_styles[str(i)] elif custom_styles[str(i)] == "flow": v.flow_style = True if str(i) in custom_tags: v.tag = custom_tags[str(i)] return sequence dumper = OrderedIndentlessDumper if indentless else OrderedDumper dumper.add_representer(dict, represent_dict) dumper.add_representer(list, represent_list) set_yaml_grammar(dumper, grammar_version=grammar_version) return dumper yq-3.2.3/yq/loader.py000066400000000000000000000177541447716324300144520ustar00rootroot00000000000000import re from base64 import b64encode from hashlib import sha224 import yaml from yaml.tokens import ( AliasToken, AnchorToken, FlowMappingEndToken, FlowMappingStartToken, KeyToken, ScalarToken, ValueToken, ) try: from yaml import CSafeLoader as default_loader except ImportError: from yaml import SafeLoader as default_loader # type: ignore core_resolvers = { "1.1": [ { "tag": "tag:yaml.org,2002:bool", "regexp": re.compile( r"""^(?:yes|Yes|YES|no|No|NO |true|True|TRUE|false|False|FALSE |on|On|ON|off|Off|OFF)$""", re.X, ), "start_chars": list("yYnNtTfFoO"), }, { "tag": "tag:yaml.org,2002:float", "regexp": re.compile( r"""^(?:[-+]?(?:[0-9][0-9_]*)\.[0-9_]*(?:[eE][-+][0-9]+)? |\.[0-9_]+(?:[eE][-+][0-9]+)? |[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]* |[-+]?\.(?:inf|Inf|INF) |\.(?:nan|NaN|NAN))$""", re.X, ), "start_chars": list("-+0123456789."), }, { "tag": "tag:yaml.org,2002:int", "regexp": re.compile( r"""^(?:[-+]?0b[0-1_]+ |[-+]?0[0-7_]+ |[-+]?(?:0|[1-9][0-9_]*) |[-+]?0x[0-9a-fA-F_]+ |[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$""", re.X, ), "start_chars": list("-+0123456789"), }, { "tag": "tag:yaml.org,2002:null", "regexp": re.compile( r"""^(?: ~ |null|Null|NULL | )$""", re.X, ), "start_chars": ["~", "n", "N", ""], }, { "tag": "tag:yaml.org,2002:timestamp", "regexp": re.compile( r"""^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9] |[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]? (?:[Tt]|[ \t]+)[0-9][0-9]? :[0-9][0-9] :[0-9][0-9] (?:\.[0-9]*)? (?:[ \t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$""", re.X, ), "start_chars": list("0123456789"), }, {"tag": "tag:yaml.org,2002:value", "regexp": re.compile(r"^(?:=)$"), "start_chars": ["="]}, ], "1.2": [ { "tag": "tag:yaml.org,2002:bool", "regexp": re.compile(r"^(?:|true|True|TRUE|false|False|FALSE)$", re.X), "start_chars": list("tTfF"), }, { "tag": "tag:yaml.org,2002:int", "regexp": re.compile(r"^(?:|0o[0-7]+|[-+]?(?:[0-9]+)|0x[0-9a-fA-F]+)$", re.X), "start_chars": list("-+0123456789"), }, { "tag": "tag:yaml.org,2002:float", "regexp": re.compile( r"^(?:[-+]?(?:\.[0-9]+|[0-9]+(\.[0-9]*)?)(?:[eE][-+]?[0-9]+)?|[-+]?\.(?:inf|Inf|INF)|\.(?:nan|NaN|NAN))$", # noqa re.X, ), "start_chars": list("-+0123456789."), }, { "tag": "tag:yaml.org,2002:null", "regexp": re.compile(r"^(?:~||null|Null|NULL)$", re.X), "start_chars": ["~", "n", "N", ""], }, ], } merge_resolver = {"tag": "tag:yaml.org,2002:merge", "regexp": re.compile(r"^(?:<<)$"), "start_chars": ["<"]} def set_yaml_grammar(resolver, grammar_version="1.2", expand_merge_keys=True): if grammar_version not in core_resolvers: raise Exception(f"Unknown grammar version {grammar_version}") resolvers = list(core_resolvers[grammar_version]) if expand_merge_keys: resolvers.append(merge_resolver) resolver.yaml_implicit_resolvers = {} for r in resolvers: for start_char in r["start_chars"]: # type: ignore resolver.yaml_implicit_resolvers.setdefault(start_char, []) resolver.yaml_implicit_resolvers[start_char].append((r["tag"], r["regexp"])) def hash_key(key): return b64encode(sha224(key.encode() if isinstance(key, str) else key).digest()).decode() class CustomLoader(yaml.SafeLoader): expand_aliases = False def emit_yq_kv(self, key, value, original_token): marks = dict(start_mark=original_token.start_mark, end_mark=original_token.end_mark) self.tokens.append(FlowMappingStartToken(**marks)) self.tokens.append(KeyToken(**marks)) self.tokens.append(ScalarToken(value=key, plain=True, **marks)) self.tokens.append(ValueToken(**marks)) self.tokens.append(ScalarToken(value=value, plain=True, **marks)) self.tokens.append(FlowMappingEndToken(**marks)) def fetch_alias(self): if self.expand_aliases: return super().fetch_alias() self.save_possible_simple_key() self.allow_simple_key = False alias_token = self.scan_anchor(AliasToken) self.emit_yq_kv("__yq_alias__", alias_token.value, original_token=alias_token) def fetch_anchor(self): if self.expand_aliases: return super().fetch_anchor() self.save_possible_simple_key() self.allow_simple_key = False anchor_token = self.scan_anchor(AnchorToken) # noqa: F841 # self.emit_yq_kv("__yq_anchor__", anchor_token.value, original_token=anchor_token) def get_loader(use_annotations=False, expand_aliases=True, expand_merge_keys=True): def construct_sequence(loader, node): annotations = [] for i, v_node in enumerate(node.value): if not use_annotations: break if v_node.tag and v_node.tag.startswith("!") and not v_node.tag.startswith("!!") and len(v_node.tag) > 1: annotations.append("__yq_tag_{}_{}__".format(i, v_node.tag)) if isinstance(v_node, yaml.nodes.ScalarNode) and v_node.style: annotations.append("__yq_style_{}_{}__".format(i, v_node.style)) elif isinstance(v_node, (yaml.nodes.SequenceNode, yaml.nodes.MappingNode)) and v_node.flow_style is True: annotations.append("__yq_style_{}_{}__".format(i, "flow")) return [loader.construct_object(i) for i in node.value] + annotations def construct_mapping(loader, node): loader.flatten_mapping(node) # TODO: is this needed? pairs = [] for k_node, v_node in node.value: key = loader.construct_object(k_node) value = loader.construct_object(v_node) pairs.append((key, value)) if not (use_annotations and isinstance(key, (str, bytes))): continue if v_node.tag and v_node.tag.startswith("!") and not v_node.tag.startswith("!!") and len(v_node.tag) > 1: pairs.append(("__yq_tag_{}__".format(hash_key(key)), v_node.tag)) if isinstance(v_node, yaml.nodes.ScalarNode) and v_node.style: pairs.append(("__yq_style_{}__".format(hash_key(key)), v_node.style)) elif isinstance(v_node, (yaml.nodes.SequenceNode, yaml.nodes.MappingNode)) and v_node.flow_style is True: pairs.append(("__yq_style_{}__".format(hash_key(key)), "flow")) return dict(pairs) def parse_unknown_tags(loader, tag_suffix, node): if isinstance(node, yaml.nodes.ScalarNode): return loader.construct_scalar(node) elif isinstance(node, yaml.nodes.SequenceNode): return construct_sequence(loader, node) elif isinstance(node, yaml.nodes.MappingNode): return construct_mapping(loader, node) loader_class = default_loader if expand_aliases else CustomLoader loader_class.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, construct_mapping) loader_class.add_constructor(yaml.resolver.BaseResolver.DEFAULT_SEQUENCE_TAG, construct_sequence) loader_class.add_multi_constructor("", parse_unknown_tags) loader_class.yaml_constructors.pop("tag:yaml.org,2002:binary", None) loader_class.yaml_constructors.pop("tag:yaml.org,2002:set", None) set_yaml_grammar(loader_class, expand_merge_keys=expand_merge_keys) return loader_class yq-3.2.3/yq/parser.py000066400000000000000000000141041447716324300144620ustar00rootroot00000000000000import argparse import subprocess import sys from typing import Union try: from .version import version as __version__ except ImportError: __version__ = "0.0.0" # jq arguments that consume positionals must be listed here to avoid our parser mistaking them for our positionals jq_arg_spec = { "--indent": 1, "-f": 1, "--from-file": 1, "-L": 1, "--arg": 2, "--argjson": 2, "--slurpfile": 2, "--argfile": 2, "--rawfile": 2, "--args": argparse.REMAINDER, "--jsonargs": argparse.REMAINDER, } class Parser(argparse.ArgumentParser): def print_help(self): yq_help = argparse.ArgumentParser.format_help(self).splitlines() print("\n".join(["usage: {} [options] [input file...]".format(self.prog)] + yq_help[2:] + [""])) sys.stdout.flush() try: subprocess.check_call(["jq", "--help"]) except Exception: pass def get_parser(program_name, description): # By default suppress these help strings and only enable them in the specific programs. yaml_output_help, yaml_roundtrip_help, width_help, indentless_help, grammar_help = [argparse.SUPPRESS] * 5 explicit_start_help, explicit_end_help = [argparse.SUPPRESS] * 2 xml_output_help, xml_item_depth_help, xml_dtd_help, xml_root_help, xml_force_list_help = [argparse.SUPPRESS] * 5 toml_output_help = argparse.SUPPRESS if program_name == "yq": current_language = "YAML" yaml_output_help = "Transcode jq JSON output back into YAML and emit it" yaml_roundtrip_help = ( "Transcode jq JSON output back into YAML and emit it. " "Preserve YAML tags and styles by representing them as extra items " "in their enclosing mappings and sequences while in JSON. This option " "is incompatible with jq filters that do not expect these extra items." ) width_help = "When using --yaml-output, specify string wrap width" indentless_help = "When using --yaml-output, indent block style lists (sequences) with 0 spaces instead of 2" grammar_help = ( "When using --yaml-output, specify output grammar (the default is 1.1 and will be changed " "to 1.2 in a future version). Setting this to 1.2 will cause strings like 'on' and 'no' to be " "emitted unquoted." ) explicit_start_help = 'When using --yaml-output, always emit explicit document start ("---")' explicit_end_help = 'When using --yaml-output, always emit explicit document end ("...")' elif program_name == "xq": current_language = "XML" xml_output_help = "Transcode jq JSON output back into XML and emit it" xml_item_depth_help = "Specify depth of items to emit (default 0; use a positive integer to stream large docs)" xml_dtd_help = "Preserve XML Document Type Definition (disables streaming of multiple docs)" xml_root_help = "When transcoding back to XML, envelope the output in an element with this name" xml_force_list_help = "Emit a list for elements with this name even if they occur only once (option can repeat)" elif program_name == "tomlq": current_language = "TOML" toml_output_help = "Transcode jq JSON output back into TOML and emit it" else: raise Exception("Unknown program name") description = description.replace("yq", program_name).replace("YAML", current_language) parser_args = dict(prog=program_name, description=description, formatter_class=argparse.RawTextHelpFormatter) if sys.version_info >= (3, 5): parser_args.update(allow_abbrev=False) # required to disambiguate options listed in jq_arg_spec parser = Parser(**parser_args) parser.add_argument("--output-format", default="json", help=argparse.SUPPRESS) parser.add_argument( "--yaml-output", "--yml-output", "-y", dest="output_format", action="store_const", const="yaml", help=yaml_output_help, ) parser.add_argument( "--yaml-roundtrip", "--yml-roundtrip", "-Y", dest="output_format", action="store_const", const="annotated_yaml", help=yaml_roundtrip_help, ) parser.add_argument( "--yaml-output-grammar-version", "--yml-out-ver", choices=["1.1", "1.2"], default="1.1", help=grammar_help ) parser.add_argument("--width", "-w", type=int, help=width_help) parser.add_argument("--indentless-lists", "--indentless", action="store_true", help=indentless_help) parser.add_argument("--explicit-start", action="store_true", help=explicit_start_help) parser.add_argument("--explicit-end", action="store_true", help=explicit_end_help) parser.add_argument("--no-expand-aliases", action="store_false", dest="expand_aliases", help=argparse.SUPPRESS) parser.add_argument("--max-expansion-factor", type=int, default=1024, help=argparse.SUPPRESS) parser.add_argument( "--xml-output", "-x", dest="output_format", action="store_const", const="xml", help=xml_output_help ) parser.add_argument("--xml-item-depth", type=int, default=0, help=xml_item_depth_help, metavar="123") parser.add_argument("--xml-dtd", action="store_true", help=xml_dtd_help) parser.add_argument("--xml-root", help=xml_root_help) parser.add_argument("--xml-force-list", action="append", help=xml_force_list_help, metavar="ELT") parser.add_argument( "--toml-output", "-t", dest="output_format", action="store_const", const="toml", help=toml_output_help ) parser.add_argument("--in-place", "-i", action="store_true", help="Edit files in place (no backup - use caution)") parser.add_argument("--version", action="version", version="%(prog)s {version}".format(version=__version__)) for arg in jq_arg_spec: nargs: Union[int, str] = jq_arg_spec[arg] # type: ignore parser.add_argument(arg, nargs=nargs, dest=arg, action="append", help=argparse.SUPPRESS) parser.add_argument("jq_filter", nargs="?") parser.add_argument("input_streams", nargs="*", type=argparse.FileType(), metavar="files", default=[]) return parser