pax_global_header 0000666 0000000 0000000 00000000064 15056754172 0014526 g ustar 00root root 0000000 0000000 52 comment=d6a82e8b3e186a979b75774b82936a17bf0125ac
python-ruyaml-0.92.1/ 0000775 0000000 0000000 00000000000 15056754172 0014447 5 ustar 00root root 0000000 0000000 python-ruyaml-0.92.1/.github/ 0000775 0000000 0000000 00000000000 15056754172 0016007 5 ustar 00root root 0000000 0000000 python-ruyaml-0.92.1/.github/CODEOWNERS 0000664 0000000 0000000 00000000071 15056754172 0017400 0 ustar 00root root 0000000 0000000 .github/ @ssbarnea
* @ssbarnea @smurfix @gdubicki
python-ruyaml-0.92.1/.github/release-drafter.yml 0000664 0000000 0000000 00000000131 15056754172 0021572 0 ustar 00root root 0000000 0000000 # see https://github.com/ansible-community/devtools
_extends: ansible-community/devtools
python-ruyaml-0.92.1/.github/workflows/ 0000775 0000000 0000000 00000000000 15056754172 0020044 5 ustar 00root root 0000000 0000000 python-ruyaml-0.92.1/.github/workflows/ack.yml 0000664 0000000 0000000 00000000404 15056754172 0021323 0 ustar 00root root 0000000 0000000 # See https://github.com/ansible-community/devtools/blob/main/.github/workflows/ack.yml
name: ack
on:
pull_request_target:
types: [opened, labeled, unlabeled, synchronize]
jobs:
ack:
uses: ansible-community/devtools/.github/workflows/ack.yml@main
python-ruyaml-0.92.1/.github/workflows/push.yml 0000664 0000000 0000000 00000000410 15056754172 0021541 0 ustar 00root root 0000000 0000000 # See https://github.com/ansible-community/devtools/blob/main/.github/workflows/push.yml
name: push
on:
push:
branches:
- main
- 'releases/**'
- 'stable/**'
jobs:
ack:
uses: ansible-community/devtools/.github/workflows/push.yml@main
python-ruyaml-0.92.1/.github/workflows/release.yml 0000664 0000000 0000000 00000002351 15056754172 0022210 0 ustar 00root root 0000000 0000000 name: release
on:
release:
types: [published]
jobs:
pypi:
name: Publish to PyPI registry
environment: release
runs-on: ubuntu-20.04
env:
FORCE_COLOR: 1
PY_COLORS: 1
TOXENV: packaging
TOX_PARALLEL_NO_SPINNER: 1
steps:
- name: Switch to using Python 3.8 by default
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install tox
run: >-
python3 -m
pip install
--user
tox
- name: Check out src from Git
uses: actions/checkout@v2
with:
fetch-depth: 0 # needed by setuptools-scm
- name: Build dists
run: python -m tox
- name: Publish to test.pypi.org
if: >- # "create" workflows run separately from "push" & "pull_request"
github.event_name == 'release'
uses: pypa/gh-action-pypi-publish@master
with:
password: ${{ secrets.testpypi_password }}
repository_url: https://test.pypi.org/legacy/
- name: Publish to pypi.org
if: >- # "create" workflows run separately from "push" & "pull_request"
github.event_name == 'release'
uses: pypa/gh-action-pypi-publish@master
with:
password: ${{ secrets.pypi_password }}
python-ruyaml-0.92.1/.github/workflows/tox.yml 0000664 0000000 0000000 00000004056 15056754172 0021406 0 ustar 00root root 0000000 0000000 name: gh
on:
pull_request:
jobs:
gh:
name: ${{ matrix.name }}
runs-on: ubuntu-20.04
strategy:
fail-fast: false
matrix:
include:
- name: linters
python-version: 3.8
# - name: docs
# python-version: 3.8
# continue-on-error: true
- name: packaging
python-version: 3.13
- name: py37
python-version: 3.7
- name: py38
python-version: 3.8
- name: py39
python-version: 3.9
- name: py310
python-version: "3.10"
- name: py311
python-version: "3.11"
- name: py312
python-version: "3.12"
- name: py313
python-version: "3.13"
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0 # needed by setuptools-scm
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: >-
Log the currently selected Python
version info (${{ matrix.python-version }})
run: |
python --version --version
which python
- name: Pip cache
uses: actions/cache@v2
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ env.PY_SHA256 }}-${{ hashFiles('setup.cfg', 'tox.ini', 'pyproject.toml', '.pre-commit-config.yaml', 'pytest.ini') }}
restore-keys: |
${{ runner.os }}-pip-
${{ runner.os }}-
- name: Install tox
run: |
python3 -m pip install --upgrade tox
- name: Log installed dists
run: >-
python3 -m pip freeze --all
- name: "Test with tox"
run: |
python3 -m tox
env:
TOXENV: ${{ matrix.name }}
- name: Archive logs
uses: actions/upload-artifact@v2
with:
name: logs.zip
path: .tox/**/log/
check:
needs:
- gh
runs-on: ubuntu-latest
steps:
- name: Report success of the test matrix
run: >-
print("All's good")
shell: python
python-ruyaml-0.92.1/.gitignore 0000664 0000000 0000000 00000000120 15056754172 0016430 0 ustar 00root root 0000000 0000000 /.tox/
/build/
/dist/
/.eggs/
/.pybuild/
*.egg-info/
__pycache__
/_doc/_build/
python-ruyaml-0.92.1/.hgignore 0000664 0000000 0000000 00000000640 15056754172 0016252 0 ustar 00root root 0000000 0000000 # this should only include project specific files. Ignores that are valid for other
# ruamel. projects like e.g. the directory .tox should go in the file pointed to by
# the ui->ignore entry in ~/.hgrc (mercurial doesn't conform to the XDG Base Directory
# Specification):
# [ui]
# ignore = ~/.hgext/hgignore
syntax: glob
# _yaml.so
venv
TODO.rst
try_*
_doc/*.pdf
_doc/*.html
_doc/*.rst
_doc/*.md
*.py_alt
ziglib
python-ruyaml-0.92.1/.pre-commit-config.yaml 0000664 0000000 0000000 00000002355 15056754172 0020735 0 ustar 00root root 0000000 0000000 ---
exclude: |
(?x)(
^docs/conf.py$|
^_test/data/.*$
)
repos:
- repo: https://github.com/PyCQA/isort
rev: 5.12.0
hooks:
- id: isort
- repo: https://github.com/psf/black
rev: 23.1.0
hooks:
- id: black
language_version: python3
- repo: https://github.com/pre-commit/pre-commit-hooks.git
rev: v4.4.0
hooks:
- id: end-of-file-fixer
- id: trailing-whitespace
- id: mixed-line-ending
- id: check-byte-order-marker
- id: check-executables-have-shebangs
- id: check-merge-conflict
- id: debug-statements
language_version: python3
- repo: https://github.com/PyCQA/flake8
rev: 6.0.0
hooks:
- id: flake8
additional_dependencies:
- pydocstyle>=5.1.1
# - flake8-black>=0.1.1
- flake8-bugbear>=20.11.1
language_version: python3
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.1.1
hooks:
- id: mypy
# empty args needed in order to match mypy cli behavior
args: ['--allow-redefinition']
entry: mypy lib/
pass_filenames: false
additional_dependencies:
- packaging
- rich
- subprocess-tee>=0.1.4
python-ruyaml-0.92.1/.readthedocs.yaml 0000664 0000000 0000000 00000000504 15056754172 0017675 0 ustar 00root root 0000000 0000000 version: 2
build:
os: ubuntu-22.04
tools:
python: "3.11"
jobs:
pre_build:
- pip install ryd>=0.9.2
- ryd --version -v
- ryd convert --generate-mkdocs-config mkdocs.yaml _doc
python:
install:
- method: pip
path: .
extra_requirements: [docs]
mkdocs:
configuration: mkdocs.yaml
python-ruyaml-0.92.1/CHANGES 0000664 0000000 0000000 00000163244 15056754172 0015454 0 ustar 00root root 0000000 0000000 [0.18.15, 2025-08-19]:
- duplicate merge keys are never allowed (not even with .allow_duplicate_keys =
True
- merge keys now keep there position if a key before the merge key gets deleted
(previously a key after the merge key would move before it)
[0.18.14, 2025-06-09]:
- Fix issue with constructing dataclasses that have a default factoryi
attribute, but were missing a mapping value for that attribute. Reported by
[Victor Prieto](https://sourceforge.net/u/vsprieto/profile/)
- the tagged release tar files can now also be downloaded from
https://yaml.dev/ruamel-dl-tagged-releases/ please adjust if you use
https://sourceforge.net/projects/ruamel-dl-tagged-releases/files/ as that
repository in sourceforge will no longer be updated from some later date.
[0.18.13, 2025-06-06]:
- Fix line wrapping on plain scalars not observing width correctly. Issue 529,
reported by [Sebastien
Vermeille](https://sourceforge.net/u/svermeille/profile/)
- Fix sha256 and length in RECORD files. Reported by
[Evan](https://sourceforge.net/u/bempelise/profile/)
[0.18.12, 2025-05-30]:
- fix additional issue with extra space in double quoted string. Reported by
[Saugat Pachhai](https://sourceforge.net/u/skshetry/profile/)
- fix duplicate key url, now pointing to yaml.dev. Reported by
[Hugo](https://sourceforge.net/u/hugovk/profile/)
- fix broken RECORD file, which was a problem for uv, not pip. Reported by
[konstin](https://sourceforge.net/u/konstin/profile/)
[0.18.11, 2025-05-19]:
- function `load_yaml_guess_indent` now takes an option `yaml` argument so you
can provide an already created/configured `YAML` instance
- Sequence item indicator with both comment/empty line before indicator **and**
comment before sequence item, could not move comment and raise
`NotImplementedError`. Reported by [Karsten
Tessarzik](https://sourceforge.net/u/kars10/profile/).
- missing f for f-string (reported by π, via email)
- fixed issue with extra space in double quoted dump (reported by [Jan
Möller](https://sourceforge.net/u/redfiredragon/profile/))
[0.18.10, 2025-01-06]:
- implemented changes to the setup.py for Python 3.14 as suggested by [Miro
Hrončok](https://sourceforge.net/u/hroncok/profile/) in merge requests (MR not
merged as those files are copied in from `develop` config)
[0.18.9, 2025-01-05]:
- fix issue with roundtripping 0 in YAML 1.1 reported by [Peter
Law](https://sourceforge.net/u/peterjclaw/profile/)
[0.18.8, 2025-01-02]:
- added warning to README.md that PyPI might block updates due to breaking
changes
[0.18.7, 2024-12-30]:
- fixes for README (reported by [Kees
Bakker](https://sourceforge.net/u/keesb/profile/))
- fixes preserving anchor on scalar integer `0` (issue reported by (Mor
Peled)[https://sourceforge.net/u/morp/profile/] and also in a question by
[Ravi](https://stackoverflow.com/users/6550398/ravi) on
[Stackoverflow](https://stackoverflow.com/a/79306830/1307905))
- fix for formatting of README suggested by [Michael R.
Crusoe](https://sourceforge.net/u/crusoe/profile/)
[0.18.6, 2024-02-07]:
- fixed an issue with dataclass loading when the fields were collections (bug
found as a result of a question by
[FibroMyAlgebra](https://stackoverflow.com/users/6855070/fibromyalgebra) on
[StackOverflow](https://stackoverflow.com/a/77485786/1307905))
- fixed an issue loading dataclasses with `InitVar` fields when `from __future__
import annotations` was used to delay evaluation of typing.
[0.18.5, 2023-11-03]:
- there is some indication that dependent packages have been pinned to use
specific (tested) and just install the latest even in Python versions that
have end-of-life
[0.18.4, 2023-11-01]:
- YAML() instance has a `doc_infos` attribute which is a cumulative list of
DocInfo instances (one for `load()`, one per document for `load_all()`).
DocInfo instances contain version information (requested, directive) and tag
directive information
- fix issue that the YAML instance tags attribute was not reset between
documents, resulting in mixing of tag directives of multiple documents. Now
only provides tag directive information on latest document after loading. This
means tags for dumping must be set **again** after a document is loaded with
the same instance. (because of this tags will be removed in a favour of a
different mechanism in the future)
- fix issue with multiple document intermixing YAML 1.2 and YAML 1.1, the
VersionedResolver now resets
- fix issue with disappearing comment when next token was Tag (still can't have
both a comment before a tag and after a tag, before node)
[0.18.3, 2023-10-29]:
- fix issue with spurious newline on first item after comment + nested block
sequence
- additional links in the metadata on PyPI (Reported, with pointers how to fix,
by [Sorin](https://sourceforge.net/u/ssbarnea/profile/)).
[0.18.2, 2023-10-24]:
- calling the deprecated functions now raises an `AttributeError` with the,
somewhat more informative, orginal warning message. Instead of calling
`sys.exit(1)`
[0.18.1, 2023-10-24]:
- calling the deprecated functions now always displays the warning message.
(reported by [Trend Lloyd](https://sourceforge.net/u/lathiat2/profile/))
[0.18.0, 2023-10-23]:
- the **functions** `scan`, `parse`, `compose`, `load`, `emit`, `serialize`,
`dump` and their variants (`_all`, `safe_`, `round_trip_`, etc) have been
deprecated (the same named **methods** on `YAML()` instances are, of course,
still there.
- |-
`YAML(typ='unsafe')` now issues a `PendingDeprecationWarning`. This will become deprecated in the 0.18 series
(probably before the end of 2023).
You can use `YAML(typ='full')` to dump unregistered Python classes/functions.
For loading you'll have to register your classes/functions
if you want the old, unsafe, functionality. You can still load any tag, like `!!python/name:posix.system', **safely**
with the (default) round-trip parser.
- fix for `bytes-like object is required not 'str' while dumping binary
streams`. This was reported, analysed and a fix provided by [Vit
Zikmund](https://sourceforge.net/u/tlwhitec/profile/)
[0.17.40, 2023-10-20]:
- flow style sets are now preserved ( `!!set {a, b, c} )`. Any values specified
when loading are dropped, including `!!null ""`.
- |-
potential workaround for issue 484: the long_description_content_type including the variant specification `CommonMark`
can result in problems on Azure. If you can install from `.tar.gz` using
`RUAMEL_NO_LONG_DESCRIPTION=1 pip install ruamel.yaml --no-binary :all:` then the long description, and its
offending type, are nog included (in the METADATA).
(Reported by [Coury Ditch](https://sourceforge.net/u/cmditch/profile/))
- links in documentation update (reported by [David
Hoese](https://sourceforge.net/u/daveydave400/profile/))
- Added some `__repr__` for internally used classes
[0.17.39, 2023-10-19]:
- update README generation, no code changes
[0.17.36, 2023-10-19]:
- fixed issue 480, dumping of a loaded empty flow-style mapping with comment
failed (Reported by [Stéphane
Brunner](https://sourceforge.net/u/stbrunner/profile/))
- fixed issue 482, caused by DEFAULT_MAPPING_TAG having changes to being a
`Tag()` instance, not a string (reported by
[yan12125](https://sourceforge.net/u/yan12125/profile/))
- updated documentation to use mkdocs
[0.17.35, 2023-10-04]:
- support for loading dataclasses with `InitVar` variables (some special coding
was necessary to get the, unexecpected, default value in the corresponding
instance attribute ( example of usage in [this
question](https://stackoverflow.com/q/77228378/1307905))
[0.17.34, 2023-10-03]:
- Python 3.12 also loads C version when using `typ='safe'`
- |-
initial support for loading invoking
`__post_init__()` on dataclasses that have that
method after loading a registered dataclass.
(Originally
[asked](https://stackoverflow.com/q/51529458/1307905) on
Stackoverflow by
[nyanpasu64](https://stackoverflow.com/users/2683842/nyanpasu64)
and as
[ticket](https://sourceforge.net/p/ruamel-yaml/tickets/355/) by
[Patrick Lehmann](https://sourceforge.net/u/paebbels/profile/))
```
@yaml.register_class
@dataclass
class ...
```
[0.17.33, 2023-09-28]:
- added `flow_seq_start`, `flow_seq_end`, `flow_seq_separator`,
`flow_map_start`, `flow_map_end`, `flow_map_separator` **class** attributes to
the `Emitter` class so flow style output can more easily be influenced (based
on [this answer](https://stackoverflow.com/a/76547814/1307905) on a
StackOverflow question by [Huw
Walters](https://stackoverflow.com/users/291033/huw-walters)).
[0.17.32, 2023-06-17]:
- fix issue with scanner getting stuck in infinite loop
[0.17.31, 2023-05-31]:
- added tag.setter on `ScalarEvent` and on `Node`, that takes either a `Tag`
instance, or a str (reported by [Sorin
Sbarnea](https://sourceforge.net/u/ssbarnea/profile/))
[0.17.30, 2023-05-30]:
- fix issue 467, caused by Tag instances not being hashable (reported by
[Douglas
Raillard](https://bitbucket.org/%7Bcf052d92-a278-4339-9aa8-de41923bb556%7D/))
[0.17.29, 2023-05-30]:
- changed the internals of the tag property from a string to a class which
allows for preservation of the original handle and suffix. This should result
in better results using documents with %TAG directives, as well as preserving
URI escapes in tag suffixes.
[0.17.28, 2023-05-26]:
- |-
fix for issue 464: documents ending with document end marker
without final newline fail to load (reported by [Mariusz
Rusiniak](https://sourceforge.net/u/r2dan/profile/))
[0.17.27, 2023-05-25]:
- fix issue with inline mappings as value for merge keys (reported by Sirish on
[StackOverflow](https://stackoverflow.com/q/76331049/1307905))
- fix for 468, error inserting after accessing merge attribute on `CommentedMap`
(reported by [Bastien gerard](https://sourceforge.net/u/bagerard/))
- fix for issue 461 pop + insert on same `CommentedMap` key throwing error
(reported by [John Thorvald Wodder
II](https://sourceforge.net/u/jwodder/profile/))
[0.17.26, 2023-05-09]:
- fix for error on edge cage for issue 459
[0.17.25, 2023-05-09]:
- fix for regression while dumping wrapped strings with too many backslashes
removed (issue 459, reported by [Lele
Gaifax](https://sourceforge.net/u/lele/profile/))
[0.17.24, 2023-05-06]:
- rewrite of `CommentedMap.insert()`. If you have a merge key in the YAML
document for the mapping you insert to, the position value should be the one
as you look at the YAML input. This fixes issue 453 where other keys of a
merged in mapping would show up after an insert (reported by [Alex
Miller](https://sourceforge.net/u/millerdevel/profile/)). It also fixes a call
to `.insert()` resulting into the merge key to move to be the first key if it
wasn't already and it is also now possible to insert a key before a merge key
(even if the fist key in the mapping).
- fix (in the pure Python implementation including default) for issue 447.
(reported by [Jack Cherng](https://sourceforge.net/u/jfcherng/profile/), also
brought up by brent on
[StackOverflow](https://stackoverflow.com/q/40072485/1307905))
[0.17.23, 2023-05-05]:
- fix 458, error on plain scalars starting with word longer than width.
(reported by [Kyle Larose](https://sourceforge.net/u/klarose/profile/))
- fix for `.update()` no longer correctly handling keyword arguments (reported
by John Lin on [StackOverflow]( https://stackoverflow.com/q/76089100/1307905))
- |-
fix issue 454: high Unicode (emojis) in quoted strings always
escaped (reported by [Michal
Čihař](https://sourceforge.net/u/nijel/profile/) based on a
question on StackOverflow).
- fix issue with emitter conservatively inserting extra backslashes in wrapped
quoted strings (reported by thebenman on
[StackOverflow](https://stackoverflow.com/q/75631454/1307905))
[0.17.22, 2023-05-02]:
- fix issue 449 where the second exclamation marks got URL encoded (reported and
fixing PR provided by [John Stark](https://sourceforge.net/u/jods/profile/))
- fix issue with indent != 2 and literal scalars with empty first line (reported
by wrdis on [StackOverflow](https://stackoverflow.com/q/75584262/1307905))
- updated `__repr__` of CommentedMap, now that Python's dict is ordered -> no
more `ordereddict(list-of-tuples)`
- merge MR 4, handling OctalInt in YAML 1.1 (provided by [Jacob
Floyd](https://sourceforge.net/u/cognifloyd/profile/))
- fix loading of `!!float 42` (reported by Eric on [Stack
overflow](https://stackoverflow.com/a/71555107/1307905))
- line numbers are now set on `CommentedKeySeq` and `CommentedKeyMap` (which are
created if you have a sequence resp. mapping as the key in a mapping)
- |-
plain scalars: put single words longer than width on a line of
their own, instead of after the previous line (issue 427, reported
by [Antoine
Cotten](https://sourceforge.net/u/antoineco/profile/)). Caveat:
this currently results in a space ending the previous line.
- |-
fix for folded scalar part of 421: comments after ">" on first
line of folded scalars are now preserved (as were those in the
same position on literal scalars). Issue reported by Jacob Floyd.
- added stacklevel to warnings
- typing changed from Py2 compatible comments to Py3, removed various Py2-isms
[0.17.21, 2022-02-12]:
- fix bug in calling `.compose()` method with `pathlib.Path` instance.
[0.17.20, 2022-01-03]:
- fix error in microseconds while rounding datetime fractions >= 9999995
(reported by [Luis Ferreira](https://sourceforge.net/u/ljmf00/))
[0.17.19, 2021-12-26]:
- fix mypy problems (reported by
[Arun](https://sourceforge.net/u/arunppsg/profile/))
[0.17.18, 2021-12-24]:
- copy-paste error in folded scalar comment attachment (reported by [Stephan
Geulette](https://sourceforge.net/u/sgeulette/profile/))
- fix 411, indent error comment between key empty seq value (reported by
[Guillermo Julián](https://sourceforge.net/u/gjulianm/profile/))
[0.17.17, 2021-10-31]:
- extract timestamp matching/creation to util
[0.17.16, 2021-08-28]:
- 398 also handle issue 397 when comment is newline
[0.17.15, 2021-08-28]:
- fix issue 397, insert comment before key when a comment between key and value
exists (reported by [Bastien gerard](https://sourceforge.net/u/bagerard/))
[0.17.14, 2021-08-25]:
- fix issue 396, inserting key/val in merged-in dictionary (reported by [Bastien
gerard](https://sourceforge.net/u/bagerard/))
[0.17.13, 2021-08-21]:
- minor fix in attr handling
[0.17.12, 2021-08-21]:
- fix issue with anchor on registered class not preserved and those classes
using package attrs with `@attr.s()` (both reported by
[ssph](https://sourceforge.net/u/sph/))
[0.17.11, 2021-08-19]:
- fix error baseclass for `DuplicateKeyError` (reported by [Łukasz
Rogalski](https://sourceforge.net/u/lrogalski/))
- fix typo in reader error message, causing `KeyError` during reader error
(reported by [MTU](https://sourceforge.net/u/mtu/))
[0.17.10, 2021-06-24]:
- fix issue 388, token with old comment structure != two elements (reported by
[Dimitrios Bariamis](https://sourceforge.net/u/dbdbc/))
[0.17.9, 2021-06-10]:
- fix issue with updating CommentedMap (reported by sri on
[StackOverflow](https://stackoverflow.com/q/67911659/1307905))
[0.17.8, 2021-06-09]:
- fix for issue 387 where templated anchors on tagged object did get set
resulting in potential id reuse. (reported by [Artem
Ploujnikov](https://sourceforge.net/u/flexthink/))
[0.17.7, 2021-05-31]:
- issue 385 also affected other deprecated loaders (reported via email by Oren
Watson)
[0.17.6, 2021-05-31]:
- merged type annotations update provided by [Jochen
Sprickerhof](https://sourceforge.net/u/jspricke/)
- |-
fix for issue 385: deprecated round_trip_loader function not
working (reported by [Mike
Gouline](https://sourceforge.net/u/gouline/))
- wasted a few hours getting rid of mypy warnings/errors
[0.17.5, 2021-05-30]:
- fix for issue 384 `!!set` with aliased entry resulting in broken YAML on rt
reported by [William Kimball](https://sourceforge.net/u/william303/))
[0.17.4, 2021-04-07]:
- prevent (empty) comments from throwing assertion error (issue 351 reported by
[William Kimball](https://sourceforge.net/u/william303/)) comments (or empty
line) will be dropped
[0.17.3, 2021-04-07]:
- fix for issue 382 caused by an error in a format string (reported by [William
Kimball](https://sourceforge.net/u/william303/))
- |-
allow expansion of aliases by setting `yaml.composer.return_alias = lambda s: copy.deepcopy(s)`
(as per [Stackoverflow answer](https://stackoverflow.com/a/66983530/1307905))
[0.17.2, 2021-03-29]:
- change -py2.py3-none-any.whl to -py3-none-any.whl, and remove 0.17.1
[0.17.1, 2021-03-29]:
- |-
added 'Programming Language :: Python :: 3 :: Only', and
removing 0.17.0 from PyPI (reported by [Alasdair
Nicol](https://sourceforge.net/u/alasdairnicol/))
[0.17.0, 2021-03-26]:
- removed because of incomplete classifiers
- this release no longer supports Python 2.7, most if not all Python 2 specific
code is removed. The 0.17.x series is the last to support Python 3.5 (this
also allowed for removal of the dependency on `ruamel.std.pathlib`)
- remove Python2 specific code branches and adaptations (u-strings)
- prepare % code for f-strings using `_F`
- allow PyOxidisation ([issue
324](https://sourceforge.net/p/ruamel-yaml/tickets/324/) resp. [issue
171](https://github.com/indygreg/PyOxidizer/issues/171))
- replaced Python 2 compatible enforcement of keyword arguments with '*'
- the old top level *functions* `load`, `safe_load`, `round_trip_load`, `dump`,
`safe_dump`, `round_trip_dump`, `scan`, `parse`, `compose`, `emit`,
`serialize` as well as their `_all` variants for multi-document streams, now
issue a `PendingDeprecationning` (e.g. when run from pytest, but also Python
is started with `-Wd`). Use the methods on `YAML()`, which have been extended.
- |-
fix for issue 376: indentation changes could put literal/folded
scalar to start before the `#` column of a following comment.
Effectively making the comment part of the scalar in the output.
(reported by [Bence Nagy](https://sourceforge.net/u/underyx/))
[0.16.13, 2021-03-05]:
- |-
fix for issue 359: could not update() CommentedMap with keyword
arguments (reported by [Steve
Franchak](https://sourceforge.net/u/binaryadder/))
- |-
fix for issue 365: unable to dump mutated TimeStamp objects
(reported by [Anton Akmerov](https://sourceforge.net/u/akhmerov))
- |-
fix for issue 371: unable to add comment without starting space
(reported by [Mark Grandi](https://sourceforge.net/u/mgrandi))
- |-
fix for issue 373: recursive call to walk_tree not preserving
all params (reported by [eulores](https://sourceforge.net/u/eulores/))
- a None value in a flow-style sequence is now dumped as `null` instead of
`!!null ''` (reported by mcarans on
[StackOverflow](https://stackoverflow.com/a/66489600/1307905))
[0.16.12, 2020-09-04]:
- update links in doc
[0.16.11, 2020-09-03]:
- workaround issue with setuptools 0.50 and importing pip (fix by
[jaraco](https://github.com/pypa/setuptools/issues/2355#issuecomment-685159580)
[0.16.10, 2020-02-12]:
- (auto) updated image references in README to sourceforge
[0.16.9, 2020-02-11]:
- update CHANGES
[0.16.8, 2020-02-11]:
- update requirements so that ruamel.yaml.clib is installed for 3.8, as it has
become available (via manylinux builds)
[0.16.7, 2020-01-30]:
- fix typchecking issue on TaggedScalar (reported by Jens Nielsen)
- fix error in dumping literal scalar in sequence with comments before element
(reported by [EJ Etherington](https://sourceforge.net/u/ejether/))
[0.16.6, 2020-01-20]:
- fix empty string mapping key roundtripping with preservation of quotes as `?
''` (reported via email by Tomer Aharoni).
- fix incorrect state setting in class constructor (reported by [Douglas
Raillard](https://bitbucket.org/%7Bcf052d92-a278-4339-9aa8-de41923bb556%7D/))
- adjust deprecation warning test for Hashable, as that no longer warns
(reported by [Jason
Montleon](https://bitbucket.org/%7B8f377d12-8d5b-4069-a662-00a2674fee4e%7D/))
[0.16.5, 2019-08-18]:
- allow for `YAML(typ=['unsafe', 'pytypes'])`
[0.16.4, 2019-08-16]:
- fix output of TAG directives with `#` (reported by [Thomas
Smith](https://bitbucket.org/%7Bd4c57a72-f041-4843-8217-b4d48b6ece2f%7D/))
[0.16.3, 2019-08-15]:
- split construct_object
- change stuff back to keep mypy happy
- move setting of version based on YAML directive to scanner, allowing to check
for file version during TAG directive scanning
[0.16.2, 2019-08-15]:
- preserve YAML and TAG directives on roundtrip, correctly output `#` in URL for
YAML 1.2 (both reported by [Thomas
Smith](https://bitbucket.org/%7Bd4c57a72-f041-4843-8217-b4d48b6ece2f%7D/))
[0.16.1, 2019-08-08]:
- Force the use of new version of ruamel.yaml.clib (reported by [Alex
Joz](https://bitbucket.org/%7B9af55900-2534-4212-976c-61339b6ffe14%7D/))
- Allow `#` in tag URI as these are allowed in YAML 1.2 (reported by [Thomas
Smith](https://bitbucket.org/%7Bd4c57a72-f041-4843-8217-b4d48b6ece2f%7D/))
[0.16.0, 2019-07-25]:
- split of C source that generates `.so` file to [ruamel.yaml.clib](
https://pypi.org/project/ruamel.yaml.clib/)
- duplicate keys are now an error when working with the old API as well
[0.15.100, 2019-07-17]:
- fixing issue with dumping deep-copied data from commented YAML, by providing
both the memo parameter to __deepcopy__, and by allowing startmarks to be
compared on their content (reported by `Theofilos Petsios
`__)
[0.15.99, 2019-07-12]:
- add `py.typed` to distribution, based on a PR submitted by `Michael Crusoe
`__
- merge PR 40 (also by Michael Crusoe) to more accurately specify repository in
the README (also reported in a misunderstood issue some time ago)
[0.15.98, 2019-07-09]:
- regenerate ext/_ruamel_yaml.c with Cython version 0.29.12, needed for Python
3.8.0b2 (reported by `John Vandenberg
`__)
[0.15.97, 2019-06-06]:
- regenerate ext/_ruamel_yaml.c with Cython version 0.29.10, needed for Python
3.8.0b1
- regenerate ext/_ruamel_yaml.c with Cython version 0.29.9, needed for Python
3.8.0a4 (reported by `Anthony Sottile
`__)
[0.15.96, 2019-05-16]:
- fix failure to indent comments on round-trip anchored block style scalars in
block sequence (reported by `William Kimball
`__)
[0.15.95, 2019-05-16]:
- fix failure to round-trip anchored scalars in block sequence (reported by
`William Kimball
`__)
- wheel files for Python 3.4 no longer provided (`Python 3.4 EOL 2019-03-18
`__)
[0.15.94, 2019-04-23]:
- fix missing line-break after end-of-file comments not ending in line-break
(reported by `Philip Thompson
`__)
[0.15.93, 2019-04-21]:
- fix failure to parse empty implicit flow mapping key
- in YAML 1.1 plains scalars `y`, 'n', `Y`, and 'N' are now correctly recognised
as booleans and such strings dumped quoted (reported by `Marcel Bollmann
`__)
[0.15.92, 2019-04-16]:
- fix failure to parse empty implicit block mapping key (reported by `Nolan W
`__)
[0.15.91, 2019-04-05]:
- allowing duplicate keys would not work for merge keys (reported by mamacdon on
`StackOverflow `__
[0.15.90, 2019-04-04]:
- fix issue with updating `CommentedMap` from list of tuples (reported by `Peter
Henry `__)
[0.15.89, 2019-02-27]:
- fix for items with flow-mapping in block sequence output on single line
(reported by `Zahari Dim `__)
- fix for safe dumping erroring in creation of representereror when dumping
namedtuple (reported and solution by `Jaakko Kantojärvi
`__)
[0.15.88, 2019-02-12]:
- fix inclusing of python code from the subpackage data (containing extra tests,
reported by `Florian Apolloner `__)
[0.15.87, 2019-01-22]:
- fix problem with empty lists and the code to reinsert merge keys (reported via
email by Zaloo)
[0.15.86, 2019-01-16]:
- reinsert merge key in its old position (reported by grumbler on `__)
- fix for issue with non-ASCII anchor names (reported and fix provided by
Dandaleon Flux via email)
- fix for issue when parsing flow mapping value starting with colon (in pure
Python only) (reported by `FichteFoll `__)
[0.15.85, 2019-01-08]:
- the types used by `SafeConstructor` for mappings and sequences can now by set
by assigning to `XXXConstructor.yaml_base_dict_type` (and `..._list_type`),
preventing the need to copy two methods with 50+ lines that had `var = {}`
hardcoded. (Implemented to help solve an feature request by `Anthony Sottile
`__ in an easier way)
[0.15.84, 2019-01-07]:
- fix for `CommentedMap.copy()` not returning `CommentedMap`, let alone copying
comments etc. (reported by `Anthony Sottile
`__)
[0.15.83, 2019-01-02]:
- fix for bug in roundtripping aliases used as key (reported via email by Zaloo)
[0.15.82, 2018-12-28]:
- anchors and aliases on scalar int, float, string and bool are now preserved.
Anchors do not need a referring alias for these (reported by `Alex Harvey
`__)
- anchors no longer lost on tagged objects when roundtripping (reported by
`Zaloo `__)
[0.15.81, 2018-12-06]:
- fix issue saving methods of metaclass derived classes (reported and fix
provided by `Douglas Raillard `__)
[0.15.80, 2018-11-26]:
- fix issue emitting BEL character when round-tripping invalid folded input
(reported by Isaac on `StackOverflow
`__)
[0.15.79, 2018-11-21]:
- fix issue with anchors nested deeper than alias (reported by gaFF on
`StackOverflow `__)
[0.15.78, 2018-11-15]:
- fix setup issue for 3.8 (reported by `Sidney Kuyateh
`__)
[0.15.77, 2018-11-09]:
- setting `yaml.sort_base_mapping_type_on_output = False`, will prevent explicit
sorting by keys in the base representer of mappings. Roundtrip already did not
do this. Usage only makes real sense for Python 3.6+ (feature request by
`Sebastian Gerber `__).
- implement Python version check in YAML metadata in `_test/test_z_data.py`
[0.15.76, 2018-11-01]:
- fix issue with empty mapping and sequence loaded as flow-style (mapping
reported by `Min RK `__, sequence by `Maged
Ahmed `__)
[0.15.75, 2018-10-27]:
- fix issue with single '?' scalar (reported by `Terrance
`__)
- fix issue with duplicate merge keys (prompted by `answering
`__ a `StackOverflow question
`__ by `math
`__)
[0.15.74, 2018-10-17]:
- fix dropping of comment on rt before sequence item that is sequence item
(reported by `Thorsten Kampe `__)
[0.15.73, 2018-10-16]:
- fix irregular output on pre-comment in sequence within sequence (reported by
`Thorsten Kampe `__)
- allow non-compact (i.e. next line) dumping sequence/mapping within sequence.
[0.15.72, 2018-10-06]:
- fix regression on explicit 1.1 loading with the C based scanner/parser
(reported by `Tomas Vavra `__)
[0.15.71, 2018-09-26]:
- fix regression where handcrafted CommentedMaps could not be initiated
(reported by `Dan Helfman `__)
- fix regression with non-root literal scalars that needed indent indicator
(reported by `Clark Breyman `__)
- tag:yaml.org,2002:python/object/apply now also uses __qualname__ on PY3
(reported by `Douglas RAILLARD `__)
[0.15.70, 2018-09-21]:
- reverted CommentedMap and CommentedSeq to subclass ordereddict resp. list,
reimplemented merge maps so that both `dict(**commented_map_instance)` and
JSON dumping works. This also allows checking with `isinstance()` on `dict`
resp. `list`. (Proposed by `Stuart Berg
`__, with feedback from `blhsing
`__ on `StackOverflow
`__)
[0.15.69, 2018-09-20]:
- fix issue with dump_all gobbling end-of-document comments on parsing (reported
by `Pierre B. `__)
[0.15.68, 2018-09-20]:
- fix issue with parsabel, but incorrect output with nested flow-style sequences
(reported by `Dougal Seeley `__)
- fix issue with loading Python objects that have __setstate__ and recursion in
parameters (reported by `Douglas RAILLARD
`__)
[0.15.67, 2018-09-19]:
- fix issue with extra space inserted with non-root literal strings (Issue
reported and PR with fix provided by `Naomi Seyfer
`__.)
[0.15.66, 2018-09-07]:
- fix issue with fold indicating characters inserted in safe_load-ed folded
strings (reported by `Maximilian Hils `__).
[0.15.65, 2018-09-07]:
- |-
fix issue #232 revert to throw ParserError for unexcpected `]`
and `}` instead of IndexError. (Issue reported and PR with fix
provided by `Naomi Seyfer `__.)
- added `key` and `reverse` parameter (suggested by Jannik Klemm via email)
- indent root level literal scalars that have directive or document end markers
at the beginning of a line
[0.15.64, 2018-08-30]:
- |-
support round-trip of tagged sequences: `!Arg [a, {b: 1}]`
- |-
single entry mappings in flow sequences now written by default without quotes
set `yaml.brace_single_entry_mapping_in_flow_sequence=True` to force
getting `[a, {b: 1}, {c: {d: 2}}]` instead of the default `[a, b: 1, c: {d: 2}]`
- fix issue when roundtripping floats starting with a dot such as `.5` (reported
by `Harrison Gregg `__)
[0.15.63, 2018-08-29]:
- small fix only necessary for Windows users that don't use wheels.
[0.15.62, 2018-08-29]:
- C based reader/scanner & emitter now allow setting of 1.2 as YAML version. **
The loading/dumping is still YAML 1.1 code**, so use the common subset of YAML
1.2 and 1.1 (reported by `Ge Yang `__)
[0.15.61, 2018-08-23]:
- support for round-tripping folded style scalars (initially requested by
`Johnathan Viduchinsky `__)
- update of C code
- speed up of scanning (~30% depending on the input)
[0.15.60, 2018-08-18]:
- cleanup for mypy
- spurious print in library (reported by `Lele Gaifax
`__), now automatically checked
[0.15.59, 2018-08-17]:
- issue with C based loader and leading zeros (reported by `Tom Hamilton Stubber
`__)
[0.15.58, 2018-08-17]:
- |-
simple mappings can now be used as keys when round-tripping::
{a: 1, b: 2}: hello world
although using the obvious operations (del, popitem) on the key will
fail, you can mutilate it by going through its attributes. If you load the
above YAML in `d`, then changing the value is cumbersome:
d = {CommentedKeyMap([('a', 1), ('b', 2)]): "goodbye"}
and changing the key even more so:
d[CommentedKeyMap([('b', 1), ('a', 2)])] = d.pop(
CommentedKeyMap([('a', 1), ('b', 2)]))
(you can use a `dict` instead of a list of tuples (or ordereddict), but that might result
in a different order, of the keys of the key, in the output)
- check integers to dump with 1.2 patterns instead of 1.1 (reported by `Lele
Gaifax `__)
[0.15.57, 2018-08-15]:
- Fix that CommentedSeq could no longer be used in adding or do a copy (reported
by `Christopher Wright `__)
[0.15.56, 2018-08-15]:
- fix issue with `python -O` optimizing away code (reported, and detailed cause
pinpointed, by `Alex Grönholm `__
[0.15.55, 2018-08-14]:
- unmade `CommentedSeq` a subclass of `list`. It is now indirectly a subclass of
the standard `collections.abc.MutableSequence` (without .abc if you are still
on Python2.7). If you do `isinstance(yaml.load('[1, 2]'), list)`) anywhere in
your code replace `list` with `MutableSequence`. Directly, `CommentedSeq` is
a subclass of the abstract baseclass
`ruamel.yaml.compat.MutableScliceableSequence`, with the result that
*(extended) slicing is supported on `CommentedSeq`*. (reported by `Stuart Berg
`__)
- duplicate keys (or their values) with non-ascii now correctly report in
Python2, instead of raising a Unicode error. (Reported by `Jonathan Pyle
`__)
[0.15.54, 2018-08-13]:
- fix issue where a comment could pop-up twice in the output (reported by `Mike
Kazantsev `__ and by `Nate Peterson
`__)
- fix issue where JSON object (mapping) without spaces was not parsed properly
(reported by `Marc Schmidt `__)
- fix issue where comments after empty flow-style mappings were not emitted
(reported by `Qinfench Chen `__)
[0.15.53, 2018-08-12]:
- fix issue with flow style mapping with comments gobbled newline (reported by
`Christopher Lambert `__)
- fix issue where single '+' under YAML 1.2 was interpreted as integer, erroring
out (reported by `Jethro Yu `__)
[0.15.52, 2018-08-09]:
- added `.copy()` mapping representation for round-tripping (`CommentedMap`) to
fix incomplete copies of merged mappings (reported by `Will Richards
`__)
- Also unmade that class a subclass of ordereddict to solve incorrect behaviour
for `{**merged-mapping}` and `dict(**merged-mapping)` (reported by `Filip
Matzner `__)
[0.15.51, 2018-08-08]:
- Fix method name dumps (were not dotted) and loads (reported by `Douglas
Raillard `__)
- Fix spurious trailing white-space caused when the comment start column was no
longer reached and there was no actual EOL comment (e.g. following empty line)
and doing substitutions, or when quotes around scalars got dropped. (reported
by `Thomas Guillet `__)
[0.15.50, 2018-08-05]:
- Allow `YAML()` as a context manager for output, thereby making it much easier
to generate multi-documents in a stream.
- Fix issue with incorrect type information for `load()` and `dump()` (reported
by `Jimbo Jim `__)
[0.15.49, 2018-08-05]:
- |-
fix preservation of leading newlines in root level literal style scalar,
and preserve comment after literal style indicator (`| # some comment`)
Both needed for round-tripping multi-doc streams in
`ryd `__.
[0.15.48, 2018-08-03]:
- |-
housekeeping: `oitnb` for formatting, mypy 0.620 upgrade and conformity
[0.15.47, 2018-07-31]:
- fix broken 3.6 manylinux1 (result of an unclean `build` (reported by `Roman
Sichnyi `__)
[0.15.46, 2018-07-29]:
- fixed DeprecationWarning for importing from `collections` on 3.7 (issue 210,
reported by `Reinoud Elhorst `__). It was
`difficult to find why tox/pytest did not report
`__ and as time consuming to
actually `fix `__ the tests.
[0.15.45, 2018-07-26]:
- After adding failing test for `YAML.load_all(Path())`, remove StopIteration
(PR provided by `Zachary Buhman `__, also
reported by `Steven Hiscocks `__.
[0.15.44, 2018-07-14]:
- Correct loading plain scalars consisting of numerals only and starting with
`0`, when not explicitly specifying YAML version 1.1. This also fixes the
issue about dumping string `'019'` as plain scalars as reported by `Min RK
`__, that prompted this chance.
[0.15.43, 2018-07-12]:
- |-
merge PR33: Python2.7 on Windows is narrow, but has no
`sysconfig.get_config_var('Py_UNICODE_SIZE')`. (merge provided by
`Marcel Bargull `__)
- `register_class()` now returns class (proposed by
`Mike Nerone `__}
[0.15.42, 2018-07-01]:
- fix regression showing only on narrow Python 2.7 (py27mu) builds (with help
from `Marcel Bargull `__ and `Colm O'Connor
<>`__).
- run pre-commit `tox` on Python 2.7 wide and narrow, as well as
3.4/3.5/3.6/3.7/pypy
[0.15.41, 2018-06-27]:
- add detection of C-compile failure (investigation prompted by `StackOverlow
`__ by `Emmanuel Blot
`__), which was removed
while no longer dependent on `libyaml`, C-extensions compilation still needs a
compiler though.
[0.15.40, 2018-06-18]:
- added links to landing places as suggested in issue 190 by `KostisA
`__
- |-
fixes issue #201: decoding unicode escaped tags on Python2, reported
by `Dan Abolafia `__
[0.15.39, 2018-06-16]:
- merge PR27 improving package startup time (and loading when regexp not
actually used), provided by `Marcel Bargull
`__
[0.15.38, 2018-06-13]:
- fix for losing precision when roundtripping floats by `Rolf Wojtech
`__
- fix for hardcoded dir separator not working for Windows by `Nuno André
`__
- typo fix by `Andrey Somov `__
[0.15.37, 2018-03-21]:
- again trying to create installable files for 187
[0.15.36, 2018-02-07]:
- fix issue 187, incompatibility of C extension with 3.7 (reported by Daniel
Blanchard)
[0.15.35, 2017-12-03]:
- allow `None` as stream when specifying `transform` parameters to
`YAML.dump()`. This is useful if the transforming function doesn't return a
meaningful value (inspired by `StackOverflow
`__ by `rsaw
`__).
[0.15.34, 2017-09-17]:
- fix for issue 157: CDumper not dumping floats (reported by Jan Smitka)
[0.15.33, 2017-08-31]:
- support for "undefined" round-tripping tagged scalar objects (in addition to
tagged mapping object). Inspired by a use case presented by Matthew Patton on
`StackOverflow `__.
- |-
fix issue 148: replace cryptic error message when using `!!timestamp` with an
incorrectly formatted or non-scalar. Reported by FichteFoll.
[0.15.32, 2017-08-21]:
- |-
allow setting `yaml.default_flow_style = None` (default: `False`) for for `typ='rt'`.
- fix for issue 149: multiplications on `ScalarFloat` now return `float`
[0.15.31, 2017-08-15]:
- fix Comment dumping
[0.15.30, 2017-08-14]:
- |-
fix for issue with "compact JSON" not parsing: `{"in":{},"out":{}}`
(reported on `StackOverflow `_ by
`mjalkio `_
[0.15.29, 2017-08-14]:
- |-
fix issue #51: different indents for mappings and sequences (reported by Alex Harvey)
- fix for flow sequence/mapping as element/value of block sequence with
sequence-indent minus dash-offset not equal two.
[0.15.28, 2017-08-13]:
- |-
fix issue #61: merge of merge cannot be __repr__-ed (reported by Tal Liron)
[0.15.27, 2017-08-13]:
- fix issue 62, YAML 1.2 allows `?` and `:` in plain scalars if non-ambigious
(reported by nowox)
- fix lists within lists which would make comments disappear
[0.15.26, 2017-08-10]:
- fix for disappearing comment after empty flow sequence (reported by
oit-tzhimmash)
[0.15.25, 2017-08-09]:
- fix for problem with dumping (unloaded) floats (reported by eyenseo)
[0.15.24, 2017-08-09]:
- added ScalarFloat which supports roundtripping of 23.1, 23.100, 42.00E+56,
0.0, -0.0 etc. while keeping the format. Underscores in mantissas are not
preserved/supported (yet, is anybody using that?).
- (finally) fixed longstanding issue 23 (reported by `Antony Sottile
`_), now handling comment between block
mapping key and value correctly
- warn on YAML 1.1 float input that is incorrect (triggered by invalid YAML
provided by Cecil Curry)
- |-
allow setting of boolean representation (`false`, `true`) by using:
`yaml.boolean_representation = [u'False', u'True']`
[0.15.23, 2017-08-01]:
- fix for round_tripping integers on 2.7.X > sys.maxint (reported by ccatterina)
[0.15.22, 2017-07-28]:
- fix for round_tripping singe excl. mark tags doubling (reported and fix by Jan
Brezina)
[0.15.21, 2017-07-25]:
- fix for writing unicode in new API,
https://stackoverflow.com/a/45281922/1307905
[0.15.20, 2017-07-23]:
- wheels for windows including C extensions
[0.15.19, 2017-07-13]:
- added object constructor for rt, decorator `yaml_object` to replace
YAMLObject.
- fix for problem using load_all with Path() instance
- fix for load_all in combination with zero indent block style literal
(`pure=True` only!)
[0.15.18, 2017-07-04]:
- missing `pure` attribute on `YAML` useful for implementing `!include` tag
constructor for `including YAML files in a YAML file
`_
- some documentation improvements
- trigger of doc build on new revision
[0.15.17, 2017-07-03]:
- support for Unicode supplementary Plane **output** with allow_unicode (input
was already supported, triggered by `this
`_ Stack Overflow Q&A)
[0.15.16, 2017-07-01]:
- minor typing issues (reported and fix provided by `Manvendra Singh
`_)
- small doc improvements
[0.15.15, 2017-06-27]:
- fix for issue 135, typ='safe' not dumping in Python 2.7 (reported by Andrzej
Ostrowski `_)
[0.15.14, 2017-06-25]:
- setup.py: change ModuleNotFoundError to ImportError (reported and fix by Asley
Drake)
[0.15.13, 2017-06-24]:
- suppress duplicate key warning on mappings with merge keys (reported by
Cameron Sweeney)
[0.15.12, 2017-06-24]:
- remove fatal dependency of setup.py on wheel package (reported by Cameron
Sweeney)
[0.15.11, 2017-06-24]:
- fix for issue 130, regression in nested merge keys (reported by `David Fee
`_)
[0.15.10, 2017-06-23]:
- top level PreservedScalarString not indented if not explicitly asked to
- remove Makefile (not very useful anyway)
- some mypy additions
[0.15.9, 2017-06-16]:
- |-
fix for issue 127: tagged scalars were always quoted and seperated
by a newline when in a block sequence (reported and largely fixed by
`Tommy Wang `_)
[0.15.8, 2017-06-15]:
- allow plug-in install via `install ruamel.yaml[jinja2]`
[0.15.7, 2017-06-14]:
- add plug-in mechanism for load/dump pre resp. post-processing
[0.15.6, 2017-06-10]:
- a set() with duplicate elements now throws error in rt loading
- support for toplevel column zero literal/folded scalar in explicit documents
[0.15.5, 2017-06-08]:
- repeat `load()` on a single `YAML()` instance would fail.
[0.15.4, 2017-06-08]:
- |-
`transform` parameter on dump that expects a function taking a
string and returning a string. This allows transformation of the output
before it is written to stream.
- some updates to the docs
[0.15.3, 2017-06-07]:
- No longer try to compile C extensions on Windows. Compilation can be forced by
setting the environment variable `RUAMEL_FORCE_EXT_BUILD` to some value before
starting the `pip install`.
[0.15.2, 2017-06-07]:
- update to conform to mypy 0.511:mypy --strict
[0.15.1, 2017-06-07]:
- Any `duplicate keys
`_ in mappings
generate an error (in the old API this change generates a warning until 0.16)
- dependecy on ruamel.ordereddict for 2.7 now via extras_require
[0.15.0, 2017-06-04]:
- it is now allowed to pass in a `pathlib.Path` as "stream" parameter to all
load/dump functions
- passing in a non-supported object (e.g. a string) as "stream" will result in a
much more meaningful YAMLStreamError.
- assigning a normal string value to an existing CommentedMap key or
CommentedSeq element will result in a value cast to the previous value's type
if possible.
[0.14.12, 2017-05-14]:
- fix for issue 119, deepcopy not returning subclasses (reported and PR by
Constantine Evans )
[0.14.11, 2017-05-01]:
- fix for issue 103 allowing implicit documents after document end marker line
(`...`) in YAML 1.2
[0.14.10, 2017-04-26]:
- fix problem with emitting using cyaml
[0.14.9, 2017-04-22]:
- remove dependency on `typing` while still supporting `mypy`
(http://stackoverflow.com/a/43516781/1307905)
- fix unclarity in doc that stated 2.6 is supported (reported by feetdust)
[0.14.8, 2017-04-19]:
- fix Text not available on 3.5.0 and 3.5.1, now proactively setting version
guards on all files (reported by `João Paulo Magalhães
`_)
[0.14.7, 2017-04-18]:
- round trip of integers (decimal, octal, hex, binary) now preserve leading
zero(s) padding and underscores. Underscores are presumed to be at regular
distances (i.e. `0o12_345_67` dumps back as `0o1_23_45_67` as the space from
the last digit to the underscore before that is the determining factor).
[0.14.6, 2017-04-14]:
- binary, octal and hex integers are now preserved by default. This was a known
deficiency. Working on this was prompted by the issue report (112) from
devnoname120, as well as the additional experience with `.replace()` on
`scalarstring` classes.
- fix issues 114 cannot install on Buildozer (reported by mixmastamyk). Setting
env. var `RUAMEL_NO_PIP_INSTALL_CHECK` will suppress `pip`-check.
[0.14.5, 2017-04-04]:
- fix issue 109 None not dumping correctly at top level (reported by Andrea
Censi)
- fix issue 110 .replace on Preserved/DoubleQuoted/SingleQuoted ScalarString
would give back "normal" string (reported by sandres23)
[0.14.4, 2017-03-31]:
- fix readme
[0.14.3, 2017-03-31]:
- fix for 0o52 not being a string in YAML 1.1 (reported on `StackOverflow Q&A
43138503>`_ by `Frank D
`_
[0.14.2, 2017-03-23]:
- fix for old default pip on Ubuntu 14.04 (reported by Sébastien
Maccagnoni-Munch)
[0.14.1, 2017-03-22]:
- fix Text not available on 3.5.0 and 3.5.1 (reported by Charles
Bouchard-Légaré)
[0.14.0, 2017-03-21]:
- updates for mypy --strict
- preparation for moving away from inheritance in Loader and Dumper, calls from
e.g. the Representer to the Serializer.serialize() are now done via the
attribute .serializer.serialize(). Usage of .serialize() outside of Serializer
will be deprecated soon
- some extra tests on main.py functions
[0.13.14, 2017-02-12]:
- fix for issue 97, clipped block scalar followed by empty lines and comment
would result in two CommentTokens of which the first was dropped. (reported by
Colm O'Connor)
[0.13.13, 2017-01-28]:
- fix for issue 96, prevent insertion of extra empty line if indented mapping
entries are separated by an empty line (reported by Derrick Sawyer)
[0.13.11, 2017-01-23]:
- allow ':' in flow style scalars if not followed by space. Also don't quote
such scalar as this is no longer necessary.
- add python 3.6 manylinux wheel to PyPI
[0.13.10, 2017-01-22]:
- fix for issue 93, insert spurious blank line before single line comment
between indented sequence elements (reported by Alex)
[0.13.9, 2017-01-18]:
- fix for issue 92, wrong import name reported by the-corinthian
[0.13.8, 2017-01-18]:
- fix for issue 91, when a compiler is unavailable reported by Maximilian Hils
- fix for deepcopy issue with TimeStamps not preserving 'T', reported on
`StackOverflow Q&A `_ by
`Quuxplusone `_
[0.13.7, 2016-12-27]:
- fix for issue 85, constructor.py importing unicode_literals caused mypy to
fail on 2.7 (reported by Peter Amstutz)
[0.13.6, 2016-12-27]:
- fix for issue 83, collections.OrderedDict not representable by SafeRepresenter
(reported by Frazer McLean)
[0.13.5, 2016-12-25]:
- fix for issue 84, deepcopy not properly working (reported by Peter Amstutz)
[0.13.4, 2016-12-05]:
- another fix for issue 82, change to non-global resolver data broke implicit
type specification
[0.13.3, 2016-12-05]:
- fix for issue 82, deepcopy not working (reported by code monk)
[0.13.2, 2016-11-28]:
- fix for comments after empty (null) values (reported by dsw2127 and cokelaer)
[0.13.1, 2016-11-22]:
- optimisations on memory usage when loading YAML from large files (py3 -50%,
py2 -85%)
[0.13.0, 2016-11-20]:
- if `load()` or `load_all()` is called with only a single argument (stream or
string) a UnsafeLoaderWarning will be issued once. If appropriate you can
surpress this warning by filtering it. Explicitly supplying the
`Loader=ruamel.yaml.Loader` argument, will also prevent it from being issued.
You should however consider using `safe_load()`, `safe_load_all()` if your
YAML input does not use tags.
- allow adding comments before and after keys (based on `StackOveflow Q&A
`_ by `msinn
`_)
[0.12.18, 2016-11-16]:
- another fix for numpy (re-reported independently by PaulG & Nathanial Burdic)
[0.12.17, 2016-11-15]:
- only the RoundTripLoader included the Resolver that supports YAML 1.2 now all
loaders do (reported by mixmastamyk)
[0.12.16, 2016-11-13]:
- allow dot char (and many others) in anchor name Fix issue 72 (reported by
Shalon Wood)
- |-
Slightly smarter behaviour dumping strings when no style is
specified. Single string scalars that start with single quotes
or have newlines now are dumped double quoted "'abc\nklm'" instead of
'''abc
klm'''
[0.12.14, 2016-09-21]:
- preserve round-trip sequences that are mapping keys (prompted by stackoverflow
question 39595807 from Nowox)
[0.12.13, 2016-09-15]:
- |-
Fix for issue #60 representation of CommentedMap with merge
keys incorrect (reported by Tal Liron)
[0.12.11, 2016-09-06]:
- Fix issue 58 endless loop in scanning tokens (reported by Christopher Lambert)
[0.12.10, 2016-09-05]:
- Make previous fix depend on unicode char width (32 bit unicode support is a
problem on MacOS reported by David Tagatac)
[0.12.8, 2016-09-05]:
- To be ignored Unicode characters were not properly regex matched (no specific
tests, PR by Haraguroicha Hsu)
[0.12.7, 2016-09-03]:
- fixing issue 54 empty lines with spaces (reported by Alex Harvey)
[0.12.6, 2016-09-03]:
- fixing issue 46 empty lines between top-level keys were gobbled (but not
between sequence elements, nor between keys in netsted mappings (reported by
Alex Harvey)
[0.12.5, 2016-08-20]:
- |-
fixing issue 45 preserving datetime formatting (submitted by altuin)
Several formatting parameters are preserved with some normalisation:
- preserve 'T', 't' is replaced by 'T', multiple spaces between date and time
reduced to one.
- optional space before timezone is removed
- still using microseconds, but now rounded (.1234567 -> .123457)
- Z/-5/+01:00 preserved
[0.12.4, 2016-08-19]:
- |-
Fix for issue 44: missing preserve_quotes keyword argument (reported by M. Crusoe)
[0.12.3, 2016-08-17]:
- correct 'in' operation for merged CommentedMaps in round-trip mode
(implementation inspired by J.Ngo, but original not working for merges)
- iteration over round-trip loaded mappings, that contain merges. Also keys(),
items(), values() (Py3/Py2) and iterkeys(), iteritems(), itervalues(),
viewkeys(), viewitems(), viewvalues() (Py2)
- |-
reuse of anchor name now generates warning, not an error. Round-tripping such
anchors works correctly. This inherited PyYAML issue was brought to attention
by G. Coddut (and was long standing https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=515634)
suppressing the warning:
```
import warnings
from ruamel.yaml.error import ReusedAnchorWarning
warnings.simplefilter("ignore", ReusedAnchorWarning)
```
[0.12.2, 2016-08-16]:
- minor improvements based on feedback from M. Crusoe
https://bitbucket.org/ruamel/yaml/issues/42/
[0.12.0, 2016-08-16]:
- drop support for Python 2.6
- include initial Type information (inspired by M. Crusoe)
[0.11.15, 2016-08-07]:
- Change to prevent FutureWarning in NumPy, as reported by tgehring ("comparison
to None will result in an elementwise object comparison in the future")
[0.11.14, 2016-07-06]:
- fix preserve_quotes missing on original Loaders (as reported by Leynos,
bitbucket issue 38)
[0.11.13, 2016-07-06]:
- documentation only, automated linux wheels
[0.11.12, 2016-07-06]:
- |-
added support for roundtrip of single/double quoted scalars using:
ruamel.yaml.round_trip_load(stream, preserve_quotes=True)
[0.11.10, 2016-05-02]:
- added `.insert(pos, key, value, comment=None)` to CommentedMap
[0.11.10, 2016-04-19]:
- indent=2, block_seq_indent=2 works as expected
[0.11.0, 2016-02-18]:
- RoundTripLoader loads 1.2 by default (no sexagesimals, 012 octals nor
yes/no/on/off booleans
[0.10.11, 2015-09-17]:
- Fix issue 13: dependency on libyaml to be installed for yaml.h
[0.10.10, 2015-09-15]:
- Python 3.5 tested with tox
- pypy full test (old PyYAML tests failed on too many open file handles)
[0.10.6-0.10.9, 2015-09-14]:
- Fix for issue 9
- Fix for issue 11: double dump losing comments
- Include libyaml code
- move code from 'py' subdir for proper namespace packaging.
[0.10.5, 2015-08-25]:
- preservation of newlines after block scalars. Contributed by Sam Thursfield.
[0.10, 2015-06-22]:
- preservation of hand crafted anchor names ( not of the form "idNNN")
- preservation of map merges ( `<<` )
[0.9, 2015-04-18]:
- collections read in by the RoundTripLoader now have a `lc` property that can
be quired for line and column ( `lc.line` resp. `lc.col`)
[0.8, 2015-04-15]:
- bug fix for non-roundtrip save of ordereddict
- adding/replacing end of line comments on block style mappings/sequences
[0.7.2, 2015-03-29]:
- support for end-of-line comments on flow style sequences and mappings
[0.7.1, 2015-03-27]:
- RoundTrip capability of flow style sequences ( 'a: b, c, d' )
[0.7, 2015-03-26]:
- tests (currently failing) for inline sequece and non-standard spacing between
block sequence dash and scalar (Anthony Sottile)
- initial possibility (on list, i.e. CommentedSeq) to set the flow format
explicitly
- RoundTrip capability of flow style sequences ( 'a: b, c, d' )
[0.6.1, 2015-03-15]:
- setup.py changed so ruamel.ordereddict no longer is a dependency if not on
CPython 2.x (used to test only for 2.x, which breaks pypy 2.5.0 reported by
Anthony Sottile)
[0.6, 2015-03-11]:
- basic support for scalars with preserved newlines
- html option for yaml command
- check if yaml C library is available before trying to compile C extension
- include unreleased change in PyYAML dd 20141128
[0.5, 2015-01-14]:
- move configobj -> YAML generator to own module
- added dependency on ruamel.base (based on feedback from Sess
[0.4, 2014-11-25]:
- move comment classes in own module comments
- fix omap pre comment
- make !!omap and !!set take parameters. There are still some restrictions:
- no comments before the !!tag
- extra tests
[0.3, 2014-11-24]:
- fix value comment occuring as on previous line (looking like eol comment)
- INI conversion in yaml + tests
- (hidden) test in yaml for debugging with auto command
- fix for missing comment in middel of simple map + test
[0.2, 2014-11-23]:
- add ext/_yaml.c etc to the source tree
- tests for yaml to work on 2.6/3.3/3.4
- change install so that you can include ruamel.yaml instead of ruamel.yaml.py
- add "yaml" utility with initial subcommands (test rt, from json)
[0.1, 2014-11-22]:
- merge py2 and py3 code bases
- remove support for 2.5/3.0/3.1/3.2 (this merge relies on u"" as available in
3.3 and . imports not available in 2.5)
- tox.ini for 2.7/3.4/2.6/3.3
- remove lib3/ and tests/lib3 directories and content
- commit
- correct --verbose for test application
- DATA=changed to be relative to __file__ of code
- DATA using os.sep
- remove os.path from imports as os is already imported
- have test_yaml.py exit with value 0 on success, 1 on failures, 2 on error
- added support for octal integers starting with '0o' keep support for 01234 as
well as 0o1234
- commit
- |-
added test_roundtrip_data:
requires a .data file and .roundtrip (empty), yaml_load .data
and compare dump against original.
- |-
fix grammar as per David Pursehouse:
https://bitbucket.org/xi/pyyaml/pull-request/5/fix-grammar-in-error-messages/diff
- http://www.json.org/ extra escaped char `\/` add .skip-ext as libyaml is not
updated
- |-
David Fraser: Extract a method to represent keys in mappings, so that
a subclass can choose not to quote them, used in repesent_mapping
https://bitbucket.org/davidfraser/pyyaml/
- add CommentToken and percolate through parser and composer and constructor
- add Comments to wrapped mapping and sequence constructs (not to scalars)
- generate YAML with comments
- initial README
python-ruyaml-0.92.1/LICENSE 0000664 0000000 0000000 00000002141 15056754172 0015452 0 ustar 00root root 0000000 0000000 The MIT License (MIT)
Copyright (c) 2014-2025 Anthon van der Neut, Ruamel bvba
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
python-ruyaml-0.92.1/MANIFEST.in 0000664 0000000 0000000 00000000027 15056754172 0016204 0 ustar 00root root 0000000 0000000 prune ext*
prune clib*
python-ruyaml-0.92.1/README.md 0000664 0000000 0000000 00000057125 15056754172 0015740 0 ustar 00root root 0000000 0000000 # ruyaml
``ruyaml`` package is a fork of ``ruamel.yaml`` aimed to made in order to
secure the future of the library, mainly by having a pool of maintainers.
## Notes
- Several references to ``ruamel.yaml``, as well as its changelog and
some ancient compatibility warnings, have been retained because
that makes merging future upstream changes substantially easier.
- The current version has the same API as the "ruamel.yaml" package.
However, it will install the `ruyaml` python module. Thus, simply
replace ``from ruamel import yaml`` with ``import ruyaml as yaml``
(or equivalent) and you're all set.
- python3.7 is the minimal version of python supported
# ruamel.yaml
`ruamel.yaml` is a YAML 1.2 loader/dumper package for Python.
| | |
| - | - |
| version |0.18.14 |
| updated |2025-06-09 |
| documentation |https://yaml.dev/doc/ruamel.yaml |
| repository |https://sourceforge.net/projects/ruamel-yaml |
| pypi |https://pypi.org/project/ruamel.yaml |
## breaking changes, that may make future uploads to PyPI impossible
*If you are interested in future upgrades of `ruamel.yaml`
please check the [documentation on installing](https://yaml.dev/doc/ruamel.yaml/install/),
since at some point I might not be able to upload a new version to PyPI with updated information.*
`ruamel.yaml` was intentionally named as `yaml` in a namespace `ruamel`. The namespace allows the installation
name to correspond unchanged to how the package is imported, reduces the number of links I have to create
in site-packages of a Python install during development, as well as providing a recognisable set of packages
my company releases to the public.
However, after uploading version 0.18.7, I got an email from PyPI, about having to change the project name
to `ruamel_yaml` to comply with PEP 625, sometime in the future. The email doesn't say if namespace packages are
no longer allowed, or how to deal with the very real clash with the pre-existing package `ruamel_yaml`.
I might not be able to adapt `ruamel.yaml`, in
a way that does not negatively affect the 0.5 million daily downloads (and my own usage of the package) in time.
My experience with other such service downgrades (Bitbucket, Readthedocs), has not been entirely positive.
-----
As announced, in 0.18.0, the old PyYAML functions have been deprecated.
(`scan`, `parse`, `compose`, `load`, `emit`, `serialize`, `dump` and their variants
(`_all`, `safe_`, `round_trip_`, etc)). If you only read this after your program has
stopped working: I am sorry to hear that, but that also means you, or the person
developing your program, has not tested with warnings on (which is the recommendation
in PEP 565, and e.g. defaulting when using `pytest`). If you have troubles, explicitly use
```
pip install "ruamel.yaml<0.18.0"
```
or put something to that effects in your requirments, to give yourself
some time to solve the issue.
There will be at least one more potentially breaking change in the 0.18 series: `YAML(typ='unsafe')`
now has a pending deprecation warning and is going to be deprecated, probably before the end of 2023.
If you only use it to dump, please use the new `YAML(typ='full')`, the result of that can be *safely*
loaded with a default instance `YAML()`, as that will get you inspectable, tagged, scalars, instead of
executed Python functions/classes. (You should probably add constructors for what you actually need,
but I do consider adding a `ruamel.yaml.unsafe` package that will re-add the `typ='unsafe'` option.
*Please adjust/pin your dependencies accordingly if necessary.*
There seems to be a CVE on `ruamel.yaml`, stating that the `load()` function could be abused
because of unchecked input. `load()` was never the default function (that was `round_trip_load()`
before the new API came into existence. So the creator of that CVE was ill informed and
probably lazily assumed that since `ruamel.yaml` is a derivative of PyYAML (for which
a similar CVE exists), the same problem would still exist, without checking.
So the CVE was always inappropriate, now just more so, as the call
to the function `load()` with any input will terminate your program with an error message. If you
(have to) care about such things as this CVE, my recommendation is to stop using Python
completely, as `pickle.load()` can be abused in the same way as `load()` (and like unlike `load()`
is only documented to be unsafe, without development-time warning.
Version 0.18.9 was the last one tested to be working with Python 3.7
Version 0.17.21 was the last one tested to be working on Python 3.5 and 3.6
The 0.16.13 release was the last that was tested to be working on Python 2.7.
There are two extra plug-in packages
(`ruamel.yaml.bytes` and `ruamel.yaml.string`)
for those not wanting to do the streaming to a
`io.BytesIO/StringIO` buffer themselves.
If your package uses `ruamel.yaml` and is not listed on PyPI, drop me an
email, preferably with some information on how you use the package (or a
link to the repository) and I'll keep you informed when the status of
the API is stable enough to make the transition.
For packaging purposes you can use a download of the [tar balls oof tagged source](https://yaml.dev/ruamel-dl-tagged-releases)
NEXT:
- duplicate merge keys are never allowed (not even with .allow_duplicate_keys = True
- merge keys now keep there position if a key before the merge key gets deleted (previously a key after the merge key would move before it)
0.18.14 (2025-06-09):
- Fix issue with constructing dataclasses that have a default factoryi attribute, but were missing a mapping value for that attribute. Reported by [Victor Prieto](https://sourceforge.net/u/vsprieto/profile/)
- the tagged release tar files can now also be downloaded from https://yaml.dev/ruamel-dl-tagged-releases/ please adjust if you use https://sourceforge.net/projects/ruamel-dl-tagged-releases/files/ as that repository in sourceforge will no longer be updated from some later date.
0.18.13 (2025-06-06):
- Fix line wrapping on plain scalars not observing width correctly. Issue 529, reported by [Sebastien Vermeille](https://sourceforge.net/u/svermeille/profile/)
- Fix sha256 and length in RECORD files. Reported by [Evan](https://sourceforge.net/u/bempelise/profile/)
0.18.12 (2025-05-30):
- fix additional issue with extra space in double quoted string. Reported by [Saugat Pachhai](https://sourceforge.net/u/skshetry/profile/)
- fix duplicate key url, now pointing to yaml.dev. Reported by [Hugo](https://sourceforge.net/u/hugovk/profile/)
- fix broken RECORD file, which was a problem for uv, not pip. Reported by [konstin](https://sourceforge.net/u/konstin/profile/)
0.18.11 (2025-05-19):
- function `load_yaml_guess_indent` now takes an option `yaml` argument so you can provide an already created/configured `YAML` instance
- Sequence item indicator with both comment/empty line before indicator **and** comment before sequence item, could not move comment and raise `NotImplementedError`. Reported by [Karsten Tessarzik](https://sourceforge.net/u/kars10/profile/).
- missing f for f-string (reported by π, via email)
- fixed issue with extra space in double quoted dump (reported by [Jan Möller](https://sourceforge.net/u/redfiredragon/profile/))
0.18.10 (2025-01-06):
- implemented changes to the setup.py for Python 3.14 as suggested by [Miro Hrončok](https://sourceforge.net/u/hroncok/profile/) in merge requests (MR not merged as those files are copied in from `develop` config)
0.18.9 (2025-01-05):
- fix issue with roundtripping 0 in YAML 1.1 reported by [Peter Law](https://sourceforge.net/u/peterjclaw/profile/)
0.18.8 (2025-01-02):
- added warning to README.md that PyPI might block updates due to breaking changes
0.18.7 (2024-12-30):
- fixes for README (reported by [Kees Bakker](https://sourceforge.net/u/keesb/profile/))
- fixes preserving anchor on scalar integer `0` (issue reported by (Mor Peled)[https://sourceforge.net/u/morp/profile/] and also in a question by [Ravi](https://stackoverflow.com/users/6550398/ravi) on [Stackoverflow](https://stackoverflow.com/a/79306830/1307905))
- fix for formatting of README suggested by [Michael R. Crusoe](https://sourceforge.net/u/crusoe/profile/)
0.18.6 (2024-02-07):
- fixed an issue with dataclass loading when the fields were collections (bug found as a result of a question by [FibroMyAlgebra](https://stackoverflow.com/users/6855070/fibromyalgebra) on [StackOverflow](https://stackoverflow.com/a/77485786/1307905))
- fixed an issue loading dataclasses with `InitVar` fields when `from __future__ import annotations` was used to delay evaluation of typing.
0.18.5 (2023-11-03):
- there is some indication that dependent packages have been pinned to use specific (tested) and just install the latest even in Python versions that have end-of-life
0.18.4 (2023-11-01):
- YAML() instance has a `doc_infos` attribute which is a cumulative list of DocInfo instances (one for `load()`, one per document for `load_all()`). DocInfo instances contain version information (requested, directive) and tag directive information
- fix issue that the YAML instance tags attribute was not reset between documents, resulting in mixing of tag directives of multiple documents. Now only provides tag directive information on latest document after loading. This means tags for dumping must be set **again** after a document is loaded with the same instance. (because of this tags will be removed in a favour of a different mechanism in the future)
- fix issue with multiple document intermixing YAML 1.2 and YAML 1.1, the VersionedResolver now resets
- fix issue with disappearing comment when next token was Tag (still can't have both a comment before a tag and after a tag, before node)
0.18.3 (2023-10-29):
- fix issue with spurious newline on first item after comment + nested block sequence
- additional links in the metadata on PyPI (Reported, with pointers how to fix, by [Sorin](https://sourceforge.net/u/ssbarnea/profile/)).
0.18.2 (2023-10-24):
- calling the deprecated functions now raises an `AttributeError` with the, somewhat more informative, orginal warning message. Instead of calling `sys.exit(1)`
0.18.1 (2023-10-24):
- calling the deprecated functions now always displays the warning message. (reported by [Trend Lloyd](https://sourceforge.net/u/lathiat2/profile/))
0.18.0 (2023-10-23):
- the **functions** `scan`, `parse`, `compose`, `load`, `emit`, `serialize`, `dump` and their variants (`_all`, `safe_`, `round_trip_`, etc) have been deprecated (the same named **methods** on `YAML()` instances are, of course, still there.
- `YAML(typ='unsafe')` now issues a `PendingDeprecationWarning`. This will become deprecated in the 0.18 series
(probably before the end of 2023).
You can use `YAML(typ='full')` to dump unregistered Python classes/functions.
For loading you'll have to register your classes/functions
if you want the old, unsafe, functionality. You can still load any tag, like `!!python/name:posix.system', **safely**
with the (default) round-trip parser.
- fix for `bytes-like object is required not 'str' while dumping binary streams`. This was reported, analysed and a fix provided by [Vit Zikmund](https://sourceforge.net/u/tlwhitec/profile/)
0.17.40 (2023-10-20):
- flow style sets are now preserved ( `!!set {a, b, c} )`. Any values specified when loading are dropped, including `!!null ""`.
- potential workaround for issue 484: the long_description_content_type including the variant specification `CommonMark`
can result in problems on Azure. If you can install from `.tar.gz` using
`RUAMEL_NO_LONG_DESCRIPTION=1 pip install ruamel.yaml --no-binary :all:` then the long description, and its
offending type, are nog included (in the METADATA).
(Reported by [Coury Ditch](https://sourceforge.net/u/cmditch/profile/))
- links in documentation update (reported by [David Hoese](https://sourceforge.net/u/daveydave400/profile/))
- Added some `__repr__` for internally used classes
0.17.39 (2023-10-19):
- update README generation, no code changes
0.17.36 (2023-10-19):
- fixed issue 480, dumping of a loaded empty flow-style mapping with comment failed (Reported by [Stéphane Brunner](https://sourceforge.net/u/stbrunner/profile/))
- fixed issue 482, caused by DEFAULT_MAPPING_TAG having changes to being a `Tag()` instance, not a string (reported by [yan12125](https://sourceforge.net/u/yan12125/profile/))
- updated documentation to use mkdocs
0.17.35 (2023-10-04):
- support for loading dataclasses with `InitVar` variables (some special coding was necessary to get the, unexecpected, default value in the corresponding instance attribute ( example of usage in [this question](https://stackoverflow.com/q/77228378/1307905))
0.17.34 (2023-10-03):
- Python 3.12 also loads C version when using `typ='safe'`
- initial support for loading invoking
`__post_init__()` on dataclasses that have that
method after loading a registered dataclass.
(Originally
[asked](https://stackoverflow.com/q/51529458/1307905) on
Stackoverflow by
[nyanpasu64](https://stackoverflow.com/users/2683842/nyanpasu64)
and as
[ticket](https://sourceforge.net/p/ruamel-yaml/tickets/355/) by
[Patrick Lehmann](https://sourceforge.net/u/paebbels/profile/))
```
@yaml.register_class
@dataclass
class ...
```
0.17.33 (2023-09-28):
- added `flow_seq_start`, `flow_seq_end`, `flow_seq_separator`, `flow_map_start`, `flow_map_end`, `flow_map_separator` **class** attributes to the `Emitter` class so flow style output can more easily be influenced (based on [this answer](https://stackoverflow.com/a/76547814/1307905) on a StackOverflow question by [Huw Walters](https://stackoverflow.com/users/291033/huw-walters)).
0.17.32 (2023-06-17):
- fix issue with scanner getting stuck in infinite loop
0.17.31 (2023-05-31):
- added tag.setter on `ScalarEvent` and on `Node`, that takes either a `Tag` instance, or a str (reported by [Sorin Sbarnea](https://sourceforge.net/u/ssbarnea/profile/))
0.17.30 (2023-05-30):
- fix issue 467, caused by Tag instances not being hashable (reported by [Douglas Raillard](https://bitbucket.org/%7Bcf052d92-a278-4339-9aa8-de41923bb556%7D/))
0.17.29 (2023-05-30):
- changed the internals of the tag property from a string to a class which allows for preservation of the original handle and suffix. This should result in better results using documents with %TAG directives, as well as preserving URI escapes in tag suffixes.
0.17.28 (2023-05-26):
- fix for issue 464: documents ending with document end marker
without final newline fail to load (reported by [Mariusz
Rusiniak](https://sourceforge.net/u/r2dan/profile/))
0.17.27 (2023-05-25):
- fix issue with inline mappings as value for merge keys (reported by Sirish on [StackOverflow](https://stackoverflow.com/q/76331049/1307905))
- fix for 468, error inserting after accessing merge attribute on `CommentedMap` (reported by [Bastien gerard](https://sourceforge.net/u/bagerard/))
- fix for issue 461 pop + insert on same `CommentedMap` key throwing error (reported by [John Thorvald Wodder II](https://sourceforge.net/u/jwodder/profile/))
0.17.26 (2023-05-09):
- fix for error on edge cage for issue 459
0.17.25 (2023-05-09):
- fix for regression while dumping wrapped strings with too many backslashes removed (issue 459, reported by [Lele Gaifax](https://sourceforge.net/u/lele/profile/))
0.17.24 (2023-05-06):
- rewrite of `CommentedMap.insert()`. If you have a merge key in the YAML document for the mapping you insert to, the position value should be the one as you look at the YAML input. This fixes issue 453 where other keys of a merged in mapping would show up after an insert (reported by [Alex Miller](https://sourceforge.net/u/millerdevel/profile/)). It also fixes a call to `.insert()` resulting into the merge key to move to be the first key if it wasn't already and it is also now possible to insert a key before a merge key (even if the fist key in the mapping).
- fix (in the pure Python implementation including default) for issue 447. (reported by [Jack Cherng](https://sourceforge.net/u/jfcherng/profile/), also brought up by brent on [StackOverflow](https://stackoverflow.com/q/40072485/1307905))
0.17.23 (2023-05-05):
- fix 458, error on plain scalars starting with word longer than width. (reported by [Kyle Larose](https://sourceforge.net/u/klarose/profile/))
- fix for `.update()` no longer correctly handling keyword arguments (reported by John Lin on [StackOverflow]( https://stackoverflow.com/q/76089100/1307905))
- fix issue 454: high Unicode (emojis) in quoted strings always
escaped (reported by [Michal
Čihař](https://sourceforge.net/u/nijel/profile/) based on a
question on StackOverflow).
- fix issue with emitter conservatively inserting extra backslashes in wrapped quoted strings (reported by thebenman on [StackOverflow](https://stackoverflow.com/q/75631454/1307905))
0.17.22 (2023-05-02):
- fix issue 449 where the second exclamation marks got URL encoded (reported and fixing PR provided by [John Stark](https://sourceforge.net/u/jods/profile/))
- fix issue with indent != 2 and literal scalars with empty first line (reported by wrdis on [StackOverflow](https://stackoverflow.com/q/75584262/1307905))
- updated `__repr__` of CommentedMap, now that Python's dict is ordered -> no more `ordereddict(list-of-tuples)`
- merge MR 4, handling OctalInt in YAML 1.1 (provided by [Jacob Floyd](https://sourceforge.net/u/cognifloyd/profile/))
- fix loading of `!!float 42` (reported by Eric on [Stack overflow](https://stackoverflow.com/a/71555107/1307905))
- line numbers are now set on `CommentedKeySeq` and `CommentedKeyMap` (which are created if you have a sequence resp. mapping as the key in a mapping)
- plain scalars: put single words longer than width on a line of
their own, instead of after the previous line (issue 427, reported
by [Antoine
Cotten](https://sourceforge.net/u/antoineco/profile/)). Caveat:
this currently results in a space ending the previous line.
- fix for folded scalar part of 421: comments after ">" on first
line of folded scalars are now preserved (as were those in the
same position on literal scalars). Issue reported by Jacob Floyd.
- added stacklevel to warnings
- typing changed from Py2 compatible comments to Py3, removed various Py2-isms
0.17.21 (2022-02-12):
- fix bug in calling `.compose()` method with `pathlib.Path` instance.
0.17.20 (2022-01-03):
- fix error in microseconds while rounding datetime fractions >= 9999995 (reported by [Luis Ferreira](https://sourceforge.net/u/ljmf00/))
0.17.19 (2021-12-26):
- fix mypy problems (reported by [Arun](https://sourceforge.net/u/arunppsg/profile/))
0.17.18 (2021-12-24):
- copy-paste error in folded scalar comment attachment (reported by [Stephan Geulette](https://sourceforge.net/u/sgeulette/profile/))
- fix 411, indent error comment between key empty seq value (reported by [Guillermo Julián](https://sourceforge.net/u/gjulianm/profile/))
0.17.17 (2021-10-31):
- extract timestamp matching/creation to util
0.17.16 (2021-08-28):
- 398 also handle issue 397 when comment is newline
0.17.15 (2021-08-28):
- fix issue 397, insert comment before key when a comment between key and value exists (reported by [Bastien gerard](https://sourceforge.net/u/bagerard/))
0.17.14 (2021-08-25):
- fix issue 396, inserting key/val in merged-in dictionary (reported by [Bastien gerard](https://sourceforge.net/u/bagerard/))
0.17.13 (2021-08-21):
- minor fix in attr handling
0.17.12 (2021-08-21):
- fix issue with anchor on registered class not preserved and those classes using package attrs with `@attr.s()` (both reported by [ssph](https://sourceforge.net/u/sph/))
0.17.11 (2021-08-19):
- fix error baseclass for `DuplicateKeyError` (reported by [Łukasz Rogalski](https://sourceforge.net/u/lrogalski/))
- fix typo in reader error message, causing `KeyError` during reader error (reported by [MTU](https://sourceforge.net/u/mtu/))
0.17.10 (2021-06-24):
- fix issue 388, token with old comment structure != two elements (reported by [Dimitrios Bariamis](https://sourceforge.net/u/dbdbc/))
0.17.9 (2021-06-10):
- fix issue with updating CommentedMap (reported by sri on [StackOverflow](https://stackoverflow.com/q/67911659/1307905))
0.17.8 (2021-06-09):
- fix for issue 387 where templated anchors on tagged object did get set resulting in potential id reuse. (reported by [Artem Ploujnikov](https://sourceforge.net/u/flexthink/))
0.17.7 (2021-05-31):
- issue 385 also affected other deprecated loaders (reported via email by Oren Watson)
0.17.6 (2021-05-31):
- merged type annotations update provided by [Jochen Sprickerhof](https://sourceforge.net/u/jspricke/)
- fix for issue 385: deprecated round_trip_loader function not
working (reported by [Mike
Gouline](https://sourceforge.net/u/gouline/))
- wasted a few hours getting rid of mypy warnings/errors
0.17.5 (2021-05-30):
- fix for issue 384 `!!set` with aliased entry resulting in broken YAML on rt reported by [William Kimball](https://sourceforge.net/u/william303/))
0.17.4 (2021-04-07):
- prevent (empty) comments from throwing assertion error (issue 351 reported by [William Kimball](https://sourceforge.net/u/william303/)) comments (or empty line) will be dropped
0.17.3 (2021-04-07):
- fix for issue 382 caused by an error in a format string (reported by [William Kimball](https://sourceforge.net/u/william303/))
- allow expansion of aliases by setting `yaml.composer.return_alias = lambda s: copy.deepcopy(s)`
(as per [Stackoverflow answer](https://stackoverflow.com/a/66983530/1307905))
0.17.2 (2021-03-29):
- change -py2.py3-none-any.whl to -py3-none-any.whl, and remove 0.17.1
0.17.1 (2021-03-29):
- added 'Programming Language :: Python :: 3 :: Only', and
removing 0.17.0 from PyPI (reported by [Alasdair
Nicol](https://sourceforge.net/u/alasdairnicol/))
0.17.0 (2021-03-26):
- removed because of incomplete classifiers
- this release no longer supports Python 2.7, most if not all Python 2 specific code is removed. The 0.17.x series is the last to support Python 3.5 (this also allowed for removal of the dependency on `ruamel.std.pathlib`)
- remove Python2 specific code branches and adaptations (u-strings)
- prepare % code for f-strings using `_F`
- allow PyOxidisation ([issue 324](https://sourceforge.net/p/ruamel-yaml/tickets/324/) resp. [issue 171](https://github.com/indygreg/PyOxidizer/issues/171))
- replaced Python 2 compatible enforcement of keyword arguments with '*'
- the old top level *functions* `load`, `safe_load`, `round_trip_load`, `dump`, `safe_dump`, `round_trip_dump`, `scan`, `parse`, `compose`, `emit`, `serialize` as well as their `_all` variants for multi-document streams, now issue a `PendingDeprecationning` (e.g. when run from pytest, but also Python is started with `-Wd`). Use the methods on `YAML()`, which have been extended.
- fix for issue 376: indentation changes could put literal/folded
scalar to start before the `#` column of a following comment.
Effectively making the comment part of the scalar in the output.
(reported by [Bence Nagy](https://sourceforge.net/u/underyx/))
------------------------------------------------------------------------
For older changes see the file
[CHANGES](https://sourceforge.net/p/ruamel-yaml/code/ci/default/tree/CHANGES)
python-ruyaml-0.92.1/_doc/ 0000775 0000000 0000000 00000000000 15056754172 0015353 5 ustar 00root root 0000000 0000000 python-ruyaml-0.92.1/_doc/Makefile 0000664 0000000 0000000 00000016743 15056754172 0017026 0 ustar 00root root 0000000 0000000 # Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER = a4
BUILDDIR = _build
# User-friendly check for sphinx-build
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
endif
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
.PHONY: help
help:
@echo "Please use \`make ' where is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " applehelp to make an Apple Help Book"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
@echo " coverage to run coverage check of the documentation (if enabled)"
.PHONY: clean
clean:
rm -rf $(BUILDDIR)/*
.PHONY: html
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
.PHONY: dirhtml
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
.PHONY: singlehtml
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
.PHONY: pickle
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
.PHONY: json
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
.PHONY: htmlhelp
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
.PHONY: qthelp
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/yaml.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/yaml.qhc"
.PHONY: applehelp
applehelp:
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
@echo
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
@echo "N.B. You won't be able to view it unless you put it in" \
"~/Library/Documentation/Help or install it in your application" \
"bundle."
.PHONY: devhelp
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/yaml"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/yaml"
@echo "# devhelp"
.PHONY: epub
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
.PHONY: latex
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
.PHONY: latexpdf
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
.PHONY: latexpdfja
latexpdfja:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
.PHONY: text
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
.PHONY: man
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
.PHONY: texinfo
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
.PHONY: info
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
.PHONY: gettext
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
.PHONY: changes
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
.PHONY: linkcheck
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
.PHONY: doctest
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
.PHONY: coverage
coverage:
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
@echo "Testing of coverage in the sources finished, look at the " \
"results in $(BUILDDIR)/coverage/python.txt."
.PHONY: xml
xml:
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
.PHONY: pseudoxml
pseudoxml:
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
python-ruyaml-0.92.1/_doc/README.ryd 0000664 0000000 0000000 00000017440 15056754172 0017036 0 ustar 00root root 0000000 0000000 version: 0.2
text: smd
pdf: false
order:
- overview.ryd
- install.ryd
- basicuse.ryd
- dumpcls.ryd
- detail.ryd
- example.ryd
- api.ryd
- pyyaml.ryd
- contributing.ryd
toc: False # don't index this file or put in mkdocs.nav
mkdocs:
site_name: yaml
docs_dir: _doc
site_author: Anthon van der Neut
nav:
- overview.md
- install.md
- basicuse.md
- dumpcls.md
- detail.md
- example.md
- api.md
- pyyaml.md
- contributing.md
theme:
name: readthedocs
exclude_docs: |
*.ryd
*.rst
markdown_extensions:
- toc:
permalink: true
--- |
# ruamel.yaml
`ruamel.yaml` is a YAML 1.2 loader/dumper package for Python.
--- !table
version: !Env version
updated: !Env date
documentation: https://yaml.dev/doc/ruamel.yaml
repository: https://sourceforge.net/projects/ruamel-yaml
pypi: https://pypi.org/project/ruamel.yaml
--- !python-hidden |
import os
from textwrap import dedent
if os.environ.get('GEN_README_MD'):
print(dedent("""\
## breaking changes, that may make future uploads to PyPI impossible
*If you are interested in future upgrades of `ruamel.yaml`
please check the [documentation on installing](https://yaml.dev/doc/ruamel.yaml/install/),
since at some point I might not be able to upload a new version to PyPI with updated information.*
`ruamel.yaml` was intentionally named as `yaml` in a namespace `ruamel`. The namespace allows the installation
name to correspond unchanged to how the package is imported, reduces the number of links I have to create
in site-packages of a Python install during development, as well as providing a recognisable set of packages
my company releases to the public.
However, after uploading version 0.18.7, I got an email from PyPI, about having to change the project name
to `ruamel_yaml` to comply with PEP 625, sometime in the future. The email doesn't say if namespace packages are
no longer allowed, or how to deal with the very real clash with the pre-existing package `ruamel_yaml`.
I might not be able to adapt `ruamel.yaml`, in
a way that does not negatively affect the 0.5 million daily downloads (and my own usage of the package) in time.
My experience with other such service downgrades (Bitbucket, Readthedocs), has not been entirely positive.
-----
"""))
--- !stdout-raw |
--- |
As announced, in 0.18.0, the old PyYAML functions have been deprecated.
(`scan`, `parse`, `compose`, `load`, `emit`, `serialize`, `dump` and their variants
(`_all`, `safe_`, `round_trip_`, etc)). If you only read this after your program has
stopped working: I am sorry to hear that, but that also means you, or the person
developing your program, has not tested with warnings on (which is the recommendation
in PEP 565, and e.g. defaulting when using `pytest`). If you have troubles, explicitly use
```
pip install "ruamel.yaml<0.18.0"
```
or put something to that effects in your requirments, to give yourself
some time to solve the issue.
There will be at least one more potentially breaking change in the 0.18 series: `YAML(typ='unsafe')`
now has a pending deprecation warning and is going to be deprecated, probably before the end of 2023.
If you only use it to dump, please use the new `YAML(typ='full')`, the result of that can be *safely*
loaded with a default instance `YAML()`, as that will get you inspectable, tagged, scalars, instead of
executed Python functions/classes. (You should probably add constructors for what you actually need,
but I do consider adding a `ruamel.yaml.unsafe` package that will re-add the `typ='unsafe'` option.
*Please adjust/pin your dependencies accordingly if necessary.*
There seems to be a CVE on `ruamel.yaml`, stating that the `load()` function could be abused
because of unchecked input. `load()` was never the default function (that was `round_trip_load()`
before the new API came into existence. So the creator of that CVE was ill informed and
probably lazily assumed that since `ruamel.yaml` is a derivative of PyYAML (for which
a similar CVE exists), the same problem would still exist, without checking.
So the CVE was always inappropriate, now just more so, as the call
to the function `load()` with any input will terminate your program with an error message. If you
(have to) care about such things as this CVE, my recommendation is to stop using Python
completely, as `pickle.load()` can be abused in the same way as `load()` (and like unlike `load()`
is only documented to be unsafe, without development-time warning.
Version 0.18.9 was the last one tested to be working with Python 3.7
Version 0.17.21 was the last one tested to be working on Python 3.5 and 3.6
The 0.16.13 release was the last that was tested to be working on Python 2.7.
There are two extra plug-in packages
(`ruamel.yaml.bytes` and `ruamel.yaml.string`)
for those not wanting to do the streaming to a
`io.BytesIO/StringIO` buffer themselves.
If your package uses `ruamel.yaml` and is not listed on PyPI, drop me an
email, preferably with some information on how you use the package (or a
link to the repository) and I'll keep you informed when the status of
the API is stable enough to make the transition.
For packaging purposes you can use a download of the [tar balls oof tagged source](https://yaml.dev/ruamel-dl-tagged-releases)
--- !comment |
# # ToDo 2024-12
# --- !toc
# level: 3
# # prefix: http://yaml.readthedocs.io/en/latest/
--- |
```=html
```
--- !comment |
[]($image.url('https://sourceforge.net/p/oitnb/code/ci/default/tree/_doc/_static/oitnb.svg?format=raw'))
[bla]($link.url("https://pypi.org/project/oitnb/"))
[[]($image.url('https://sourceforge.net/p/oitnb/code/ci/default/tree/_doc/_static/oitnb.svg?format=raw'))]($link.url("https://pypi.org/project/oitnb/"))
[($image.url('https://sourceforge.net/p/oitnb/code/ci/default/tree/_doc/_static/oitnb.svg?format=raw').linked(true))]
[($image.url('https://sourceforge.net/p/oitnb/code/ci/default/tree/_doc/_static/oitnb.svg?format=raw'))](https://pypi.org/project/oitnb/)
# [](https://yaml.readthedocs.org/en/latest?badge=latest)
# [](https://bestpractices.coreinfrastructure.org/projects/1128)
# [](https://opensource.org/licenses/MIT)
# [](https://pypi.org/project/ruamel.yaml/)
# [](https://pypi.org/project/oitnb/)
# [](http://mypy-lang.org/)
# [](https://www.pepy.tech/projects/ruamel.yaml)
# ChangeLog
--- !changelog
CHANGES
--- |
------------------------------------------------------------------------
For older changes see the file
[CHANGES](https://sourceforge.net/p/ruamel-yaml/code/ci/default/tree/CHANGES)
python-ruyaml-0.92.1/_doc/_static/ 0000775 0000000 0000000 00000000000 15056754172 0017001 5 ustar 00root root 0000000 0000000 python-ruyaml-0.92.1/_doc/_static/license.svg 0000664 0000000 0000000 00000001666 15056754172 0021155 0 ustar 00root root 0000000 0000000
python-ruyaml-0.92.1/_doc/_static/pypi.svg 0000664 0000000 0000000 00000001671 15056754172 0020510 0 ustar 00root root 0000000 0000000
python-ruyaml-0.92.1/_doc/api.rst 0000664 0000000 0000000 00000025463 15056754172 0016670 0 ustar 00root root 0000000 0000000 # Departure from previous API
With version 0.15.0 `ruyaml` starts to depart from the previous
(PyYAML) way of loading and dumping. During a transition period the
original `load()` and `dump()` in its various formats will still be
supported, but this is not guaranteed to be so with the transition to
1.0.
At the latest with 1.0, but possible earlier transition error and
warning messages will be issued, so any packages depending on
ruyaml should pin the version with which they are testing.
Up to 0.15.0, the loaders (`load()`, `safe_load()`, `round_trip_load()`,
`load_all`, etc.) took, apart from the input stream, a `version`
argument to allow downgrading to YAML 1.1, sometimes needed for
documents without directive. When round-tripping, there was an option to
preserve quotes.
Up to 0.15.0, the dumpers (`dump()`, `safe_dump`, `round_trip_dump()`,
`dump_all()`, etc.) had a plethora of arguments, some inherited from
`PyYAML`, some added in `ruyaml`. The only required argument is the
`data` to be dumped. If the stream argument is not provided to the
dumper, then a string representation is build up in memory and returned
to the caller.
Starting with 0.15.0 `load()` and `dump()` are methods on a `YAML`
instance and only take the stream, resp. the data and stream argument.
All other parameters are set on the instance of `YAML` before calling
`load()` or `dump()`
Before 0.15.0 you could do:
``` python
from pathlib import Path
from ruamel import yaml
data = yaml.safe_load("abc: 1")
out = Path('/tmp/out.yaml')
with out.open('w') as fp:
yaml.safe_dump(data, fp, default_flow_style=False)
```
after:
--- !python |
from pathlib import Path
from ruyaml import YAML
from pathlib import Path
from ruyaml import YAML
yaml = YAML(typ='safe')
yaml.default_flow_style = False
data = yaml.load("abc: 1")
out = Path('/tmp/out.yaml')
yaml.dump(data, out)
yaml = YAML(typ='safe')
yaml.default_flow_style = False
data = yaml.load("abc: 1")
out = Path('/tmp/out.yaml')
yaml.dump(data, out)
--- |
If you previously used a keyword argument `explicit_start=True` you now
do `yaml.explicit_start = True` before calling `dump()`. The `Loader`
and `Dumper` keyword arguments are not supported that way. You can
provide the `typ` keyword to `rt` (default), `safe`, `unsafe` or `base`
(for round-trip load/dump, safe_load/dump, load/dump resp. using the
BaseLoader / BaseDumper. More fine-control is possible by setting the
attributes `.Parser`, `.Constructor`, `.Emitter`, etc., to the class of
the type to create for that stage (typically a subclass of an existing
class implementing that).
The default loader (`typ='rt'`) is a direct derivative of the safe
loader, without the methods to construct arbitrary Python objects that
make the `unsafe` loader unsafe, but with the changes needed for
round-trip preservation of comments, etc.. For trusted Python classes a
constructor can of course be added to the round-trip or safe-loader, but
this has to be done explicitly (`add_constructor`).
All data is dumped (not just for round-trip-mode) with
`.allow_unicode = True`
You can of course have multiple YAML instances active at the same time,
with different load and/or dump behaviour.
Initially only the typical operations are supported, but in principle
all functionality of the old interface will be available via `YAML`
instances (if you are using something that isn\'t let me know).
If a parse or dump fails, and throws and exception, the state of the
`YAML()` instance is not guaranteed to be able to handle further
processing. You should, at that point to recreate the YAML instance
before proceeding.
## Loading
### Duplicate keys
In JSON mapping keys should be unique, in YAML they must be unique.
PyYAML never enforced this although the YAML 1.1 specification already
required this.
In the new API (starting 0.15.1) duplicate keys in mappings are no
longer allowed by default. To allow duplicate keys in mappings:
--- !python |
yaml = ruyaml.YAML()
yaml.allow_duplicate_keys = True
yaml.load(stream)
--- |
In the old API this is a warning starting with 0.15.2 and an error in
0.16.0.
When a duplicate key is found it and its value are discarded, as should
be done according to the [YAML 1.1
specification](http://yaml.org/spec/1.1/#id932806).
## Dumping a multi-document YAML stream
The \"normal\" `dump_all` expected as first element a list of documents,
or something else the internals of the method can iterate over. To read
and write a multi-document you would either make a `list`:
--- !code |
yaml = YAML()
data = list(yaml.load_all(in_path))
# do something on data[0], data[1], etc.
yaml.dump_all(data, out_path)
--- |
or create some function/object that would yield the `data` values.
What you now can do is create `YAML()` as an context manager. This works
for output (dumping) only, requires you to specify the output (file,
buffer, `Path`) at creation time, and doesn\'t support `transform`
(yet).
:
--- !code |
with YAML(output=sys.stdout) as yaml:
yaml.explicit_start = True
for data in yaml.load_all(Path(multi_document_filename)):
# do something on data
yaml.dump(data)
--- |
Within the context manager, you cannot use the `dump()` with a second
(stream) argument, nor can you use `dump_all()`. The `dump()` within the
context of the `YAML()` automatically creates multi-document if called
more than once.
To combine multiple YAML documents from multiple files:
:
--- !code |
list_of_filenames = ['x.yaml', 'y.yaml', ]
with YAML(output=sys.stdout) as yaml:
yaml.explicit_start = True
for path in list_of_filename:
with open(path) as fp:
yaml.dump(yaml.load(fp))
--- |
The output will be a valid, uniformly indented YAML file. Doing
`cat {x,y}.yaml` might result in a single document if there is not
document start marker at the beginning of `y.yaml`
## Dumping
### Controls
On your `YAML()` instance you can set attributes e.g with:
yaml = YAML(typ='safe', pure=True)
yaml.allow_unicode = False
available attributes include:
`unicode_supplementary`
: Defaults to `True` if Python\'s Unicode size is larger than 2 bytes.
Set to `False` to enforce output of the form `\U0001f601` (ignored
if `allow_unicode` is `False`)
## Transparent usage of new and old API
With 0.18 the entry functions for the old API has been removed, so the
following now only makes sense if you use the old API on a pinned
old version or `ruamel.yaml`.
If you have multiple packages depending on `ruyaml`, or install
your utility together with other packages not under your control, then
fixing your `install_requires` might not be so easy.
Depending on your usage you might be able to \"version\" your usage to
be compatible with both the old and the new. The following are some
examples all assuming `from ruamel import yaml` somewhere at the top of
your file and some `istream` and `ostream` apropriately opened for
reading resp. writing.
Loading and dumping using the `SafeLoader`:
if ruyaml.version_info < (0, 15):
data = yaml.safe_load(istream)
yaml.safe_dump(data, ostream)
else:
yml = ruyaml.YAML(typ='safe', pure=True) # 'safe' load and dump
data = yml.load(istream)
yml.dump(data, ostream)
Loading with the `CSafeLoader`, dumping with `RoundTripLoader`. You need
two `YAML` instances, but each of them can be re-used:
--- !python |
if ruyaml.version_info < (0, 15):
data = yaml.load(istream, Loader=yaml.CSafeLoader)
yaml.round_trip_dump(data, ostream, width=1000, explicit_start=True)
else:
yml = ruyaml.YAML(typ='safe')
data = yml.load(istream)
ymlo = ruyaml.YAML() # or yaml.YAML(typ='rt')
ymlo.width = 1000
ymlo.explicit_start = True
ymlo.dump(data, ostream)
--- |
Loading and dumping from `pathlib.Path` instances using the
round-trip-loader:
--- !code |
# in myyaml.py
if ruyaml.version_info < (0, 15):
class MyYAML(yaml.YAML):
def __init__(self):
yaml.YAML.__init__(self)
self.preserve_quotes = True
self.indent(mapping=4, sequence=4, offset=2)
# in your code
from myyaml import MyYAML
# some pathlib.Path
from pathlib import Path
inf = Path('/tmp/in.yaml')
outf = Path('/tmp/out.yaml')
yml = MyYAML()
# no need for with statement when using pathlib.Path instances
data = yml.load(inf)
yml.dump(data, outf)
--- |
## Reason for API change
`ruyaml` inherited the way of doing things from `PyYAML`. In
particular when calling the function `load()` or `dump()` temporary
instances of `Loader()` resp. `Dumper()` were created that were
discarded on termination of the function.
This way of doing things leads to several problems:
- it is virtually impossible to return information to the caller apart
from the constructed data structure. E.g. if you would get a YAML
document version number from a directive, there is no way to let the
caller know apart from handing back special data structures. The
same problem exists when trying to do on the fly analysis of a
document for indentation width.
- these instances were composites of the various load/dump steps and
if you wanted to enhance one of the steps, you needed e.g. subclass
the emitter and make a new composite (dumper) as well, providing all
of the parameters (i.e. copy paste)
Alternatives, like making a class that returned a `Dumper` when
called and sets attributes before doing so, is cumbersome for
day-to-day use.
- many routines (like `add_representer()`) have a direct global impact
on all of the following calls to `dump()` and those are difficult if
not impossible to turn back. This forces the need to subclass
`Loaders` and `Dumpers`, a long time problem in PyYAML as some
attributes were not `deep_copied` although a bug-report (and fix)
had been available a long time.
- If you want to set an attribute, e.g. to control whether literal
block style scalars are allowed to have trailing spaces on a line
instead of being dumped as double quoted scalars, you have to change
the `dump()` family of routines, all of the `Dumpers()` as well as
the actual functionality change in `emitter.Emitter()`. The
functionality change takes changing 4 (four!) lines in one file, and
being able to enable that another 50+ line changes (non-contiguous)
in 3 more files resulting in diff that is far over 200 lines long.
- replacing libyaml with something that doesn\'t both support `0o52`
and `052` for the integer `42` (instead of `52` as per YAML 1.2) is
difficult
With `ruyaml>=0.15.0` the various steps \"know\" about the `YAML`
instance and can pick up setting, as well as report back information via
that instance. Representers, etc., are added to a reusable instance and
different YAML instances can co-exists.
This change eases development and helps prevent regressions.
python-ruyaml-0.92.1/_doc/basicuse.rst 0000664 0000000 0000000 00000003534 15056754172 0017710 0 ustar 00root root 0000000 0000000 # Basic Usage
## Load and dump
You load a YAML document using:
--- !python |
from ruyaml import YAML
from ruyaml import YAML
yaml=YAML(typ='safe') # default, if not specfied, is 'rt' (round-trip)
yaml.load(doc)
--- |
in this, `doc` can be a file pointer (i.e. an object that has the
`.read()` method, a string, or a `pathlib.Path()` instance. `typ='safe'`
accomplishes the same as what `safe_load()` did before: loading of a
document without resolving unknown tags. Provide `pure=True` to enforce
using the pure Python implementation, otherwise the faster C libraries
will be used when possible/available but these behave slightly different
(and sometimes more like a YAML 1.1 loader).
Dumping works in the same way:
--- !python-code |
from ruyaml import YAML
yaml=YAML()
yaml.default_flow_style = False
yaml.dump({'a': [1, 2]}, s)
--- |
in this `s` can be a file pointer (i.e. an object that has the
`.write()` method, or a `pathlib.Path()`. If you want to display your
output, just stream to `sys.stdout`.
If you need to transform a string representation of the output provide a
function that takes a string as input and returns one:
--- !python |
def tr(s):
return s.replace('\n', '<\n') # such output is not valid YAML!
yaml.dump(data, sys.stdout, transform=tr)
--- |
## More examples
Using the C based SafeLoader (at this time is inherited from
libyaml/PyYAML and e.g. loads `0o52` as well as `052` as integer
`42`):
--- !python |
from ruyaml import YAML
yaml=YAML(typ="safe")
yaml.load("""a:\n b: 2\n c: 3\n""")
--- |
Using the Python based SafeLoader (YAML 1.2 support, `052` loads as
`52`):
--- !python |
yaml=YAML(typ="safe", pure=True)
yaml.load("""a:\n b: 2\n c: 3\n""")
--- |
Restrictions when using the C based SafeLoader/SafeDumper:
- yaml.indent will set the same value for mappings and sequences. (Issue 471)
python-ruyaml-0.92.1/_doc/conf.py 0000664 0000000 0000000 00000023054 15056754172 0016656 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8 -*-
#
# yaml documentation build configuration file, created by
# sphinx-quickstart on Mon Feb 29 12:03:00 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os # NOQA
import sys # NOQA
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [] # type: ignore
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = {".rst": "restructuredtext"}
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
# General information about the project.
project = u"ruyaml"
copyright = u"2017-2021, Anthon van der Neut and other contributors"
author = u"Anthon van der Neut et al."
# The version info for the project you are documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
try:
from ruyaml import __version__, version_info # NOQA
# The short X.Y version.
version = '.'.join([str(ch) for ch in version_info[:3]])
# The full version, including alpha/beta/rc tags.
release = version # = __version__
except Exception as e:
print("exception", e)
version = release = "dev"
print("ruyaml version", version)
# print('cwd:', os.getcwd())
# current working directory is the one with `conf.py` !
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ["_build"]
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "default"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# " v documentation".
html_title = "Python YAML package documentation"
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = "yamldoc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
"papersize": "a4paper",
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
# Latex figure (float) alignment
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
master_doc,
'yaml.tex',
'Python YAML package documentation',
'Anthon van der Neut',
'manual',
),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, 'yaml', 'yaml Documentation', [author], 1)]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
'yaml',
'yaml Documentation',
author,
'yaml',
'One line description of project.',
'Miscellaneous',
),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
python-ruyaml-0.92.1/_doc/contributing.rst 0000664 0000000 0000000 00000005247 15056754172 0020624 0 ustar 00root root 0000000 0000000 ************
Contributing
************
All contributions to ``ruyaml`` are welcome.
Please post an issue or, if possible, a pull request (PR) on github.
Please don't use issues to post support questions.
TODO:: The maintainers of ruyaml don't have an official support channel yet.
Documentation
=============
The documentation for ``ruyaml`` is written in the `ReStructured Text
`_ format and follows the `Sphinx
Document Generator `_'s conventions.
Code
====
Code changes are welcome as well, but anything beyond a minor change should be
tested (``tox``/``pytest``), checked for typing conformance (``mypy``) and pass
pep8 conformance (``flake8``).
In my experience it is best to use two ``virtualenv`` environments, one with the
latest Python from the 2.7 series, the other with 3.5 or 3.6. In the
site-packages directory of each virtualenv make a soft link to the ruyaml
directory of your (cloned and checked out) copy of the repository. Do not under
any circumstances run ``pip install -e .`` it will
not work (at least not until these commands are fixed to support packages with
namespaces).
You can install ``tox``, ``pytest``, ``mypy`` and ``flake8`` in the Python3
``virtualenv``, or in a ``virtualenv`` of their own. If all of these commands
pass without warning/error, you can create your pull-request.
Flake
+++++
The `Flake8 `_ configuration is part of ``setup.cfg``::
[flake8]
show-source = True
max-line-length = 95
ignore = F405
The suppress of F405 is necessary to allow ``from xxx import *``.
Please make sure your checked out source passes ``flake8`` without test (it should).
Then make your changes pass without any warnings/errors.
Tox/pytest
++++++++++
Whether you add something or fix some bug with your code changes, first add one
or more tests that fail in the unmodified source when running ``tox``. Once that
is in place add your code, which should have as a result that your added test(s)
no longer fail, and neither should any other existing tests.
Typing/mypy
+++++++++++
You should run ``mypy`` from ``ruyaml``'s source directory::
mypy --strict --follow-imports silent lib/ruyaml/*.py
This command should give no errors or warnings.
Vulnerabilities
===============
If you find a vulnerability in ``ruyaml`` (e.g. that would show the ``safe``
and ``rt`` loader are not safe due to a bug in the software)), please contact
the maintainers directly via email.
After the vulnerability is removed, and affected parties notified to allow them
to update versions, the vulnerability will be published, and your role in
finding/resolving this properly attributed.
python-ruyaml-0.92.1/_doc/contributing.ryd 0000664 0000000 0000000 00000012652 15056754172 0020610 0 ustar 00root root 0000000 0000000 version: 0.2
text: smd
pdf: false
--- |
# Contributing
Any contribution to `ruamel.yaml` is welcome, be it in the form of an
email, a question on stackoverflow (I\'ll get notified of that when you
tag it with `ruamel.yaml`), an issue or pull-request (PR) on
sourceforge.
Contributing via stackoverflow is, for most, easy to do. When I answer
your question there and the answer warrants an extension to the
documentation or code, I will include it in a documentation update
and/or future (normally the next) release of `ruamel.yaml`.
Please don\'t post support questions as an issue on sourceforge.
## Documentation
The documentation for `ruamel.yaml` is in YAML, more specifically in
[ryd](https://pypi.python.org/pypi/ryd) ( /rɑɪt/, pronounced like the
verb "write" ). This is Markdown (previously reStructuredText)
mixed with Python, each in
separate YAML documents within a single file. If you know a bit of YAML,
Python and Markdown, it will be clear how that works.
If you want to contribute to the documentation, you can send me a clear
description of the needed changes, e.g. as a unified diff. If the
changes encompass multiple documents in a `.ryd` file, it is best to
install `ryd` (use a virtualenv!), clone the `ruamel.yaml` repository on
sourceforge, edit documentation, run `ryd`:
ryd --pdf '**/*.ryd'
(quoting might not be necessary depending on your shell), and once the
PDF(s) look acceptable, submit a pull-request.
`ryd` will check your file for single backquotes (my most common mistake
going back and forth between reStructuredText and other mark up).
If you contribute example programs, note that `ryd` will automatically
run your program (so it should be correct) and can include the output of
the program in the resulting `.rst` (and PDF) file.
## Code
Code changes are welcome as well, but anything beyond a minor change
should be tested (`tox`/`pytest`), checked for typing conformance
(`mypy`) and pass pep8 conformance (`flake8`).
In my experience it is best to use two `virtualenv` environments, one
with the latest Python version currently supported, the other with
the oldest supported version.
In the site-packages directory of each virtualenv make a soft link to
the ruamel directory of your (cloned and checked out) copy of the
repository. Do not under any circumstances run `pip install -e .` or
`python setup.py -e .` it will not work (at least not until these
commands are fixed to support packages with namespaces).
You can install `tox`, `pytest`, `mypy` and `flake8` in the Python3
`virtualenv`, or in a `virtualenv` of their own. If all of these
commands pass without warning/error, you can create your pull-request.
### Flake
My `~/.config/flake8` file:
[flake8]
show-source = True
max-line-length = 95
ignore = F405
The suppress of F405 is necessary to allow `from xxx import *`, which I
have not removed in all places (yet).
First make sure your checked out source passes `flake8` without test (it
should). Then make your changes pass without any warnings/errors.
### Tox/pytest
Whether you add something or fix some bug with your code changes, first
add one or more tests that fail in the unmodified source when running
`tox`. Once that is in place add your code, which should have as a
result that your added test(s) no longer fail, and neither should any
other existing tests.
### Typing/mypy
If you add methods or functions to `ruamel.yaml`, you will need to add
Python 2.7 compatible typing information in order for `mypy` to pass
without error.
I run `mypy` from the directory where the (link to) ruamel directory is
using:
mypy --py2 --strict --follow-imports silent ruamel/yaml/*.py
This should give no errors or warnings
## Generated files
I use a minimal environment when developing, void of most artifacts
needed for packaging, testing etc. These artifact files are *generated*,
just before committing to sourceforge and pushing to PyPI, with nuances
coming from the `_package_data` information in `__init__.py`. Included
changes in these files will automatically be reverted, even assuming
your PR is accepted as is.
Consider the following files **read-only** (if you think changes need to
be made to these, contact me):
setup.py
tox.ini
LICENSE
_ryd/conf.py
-ryd/Makefile
## Vulnerabilities
If you find a vulnerability in `ruamel.yaml` (e.g. that would show the
`safe` and `rt` loader are not safe due to a bug in the software)),
please contact me directly via email, or by leaving a comment on
StackOverflow (below any of my posts), without going into the details
about the vulnerability. After contact is estabilished I will work to
eliminate the vulnerability in a timely fashion. After the vulnerability
is removed, and affected parties haven been notified to allow them to
update versions, the vulnerability will be published, and your role in
finding/resolving this properly attributed.
Please note that there is a CVE out there against `ruamel.yaml`, that states
that the input of the function `load()` is not checked. As the
use of `ruamel.yaml.load()` was never the default, was documented to potentially
cause problems when specific parameters were provided, and issued a
warning, this was always an inappropriate statement.
(To compare: no such CVE was given for the use of the Python standard library
function `pickle.load`, which only documents which is default function
to use and only documented to potentially dangerious). The whole CVE is moot,
with the removal of the `load()` function 0.18.
python-ruyaml-0.92.1/_doc/detail.rst 0000664 0000000 0000000 00000023101 15056754172 0017344 0 ustar 00root root 0000000 0000000 # Details
- support for simple lists as mapping keys by transforming these to
tuples
- `!!omap` generates ordereddict (C) on Python 2,
collections.OrderedDict on Python 3, and `!!omap` is generated for
these types.
- Tests whether the C yaml library is installed as well as the header
files. That library doesn\'t generate CommentTokens, so it cannot be
used to do round trip editing on comments. It can be used to speed
up normal processing (so you don\'t need to install `ruyaml`
and `PyYaml`). See the section *Optional requirements*.
- Basic support for multiline strings with preserved newlines and
chomping ( \'`|`\', \'`|+`\', \'`|-`\' ). As this subclasses the
string type the information is lost on reassignment. (This might be
changed in the future so that the preservation/folding/chomping is
part of the parent container, like comments).
- anchors names that are hand-crafted (not of the form`idNNN`) are
preserved
- [merges](http://yaml.org/type/merge.html) in dictionaries are
preserved
- adding/replacing comments on block-style sequences and mappings with
smart column positioning
- collection objects (when read in via RoundTripParser) have an `lc`
property that contains line and column info `lc.line` and `lc.col`.
Individual positions for mappings and sequences can also be
retrieved (`lc.key('a')`, `lc.value('a')` resp. `lc.item(3)`)
- preservation of whitelines after block scalars. Contributed by Sam
Thursfield.
*In the following examples it is assumed you have done something like:*
```python
from ruyaml import YAML
yaml = YAML()
```
*if not explicitly specified.*
## Indentation of block sequences
Although ruyaml doesn\'t preserve individual indentations of block
sequence items, it does dump:
```yaml
x:
- b: 1
- 2
```
back to:
```yaml
x:
- b: 1
- 2
```
if you specify `yaml.indent(sequence=4)` (indentation is counted to the
beginning of the sequence element).
PyYAML (and older versions of ruyaml) gives you non-indented
scalars (when specifying default_flow_style=False):
```yaml
x:
- b: 1
- 2
```
You can use `mapping=4` to also have the mappings values indented. The
dump also observes an additional `offset=2` setting that can be used to
push the dash inwards, *within the space defined by* `sequence`.
The above example with the often seen
`yaml.indent(mapping=2, sequence=4, offset=2)` indentation:
```yaml
x:
y:
- b: 1
- 2
```
The defaults are as if you specified
`yaml.indent(mapping=2, sequence=2, offset=0)`.
If the `offset` equals `sequence`, there is not enough room for the dash
and the space that has to follow it. In that case the element itself
would normally be pushed to the next line (and older versions of
`ruyaml` did so). But this is prevented from happening. However the
`indent` level is what is used for calculating the cumulative indent for
deeper levels and specifying `sequence=3` resp. `offset=2`, might give
correct, but counter-intuitive results.
**It is best to always have** `sequence >= offset + 2` **but this is not
enforced**. Depending on your structure, not following this advice
**might lead to invalid output**.
### Inconsistently indented YAML
If your input is inconsistently indented, such indentation cannot be
preserved. The first round-trip will make it consistent/normalize it.
Here are some inconsistently indented YAML examples.
`b` indented 3, `c` indented 4 positions:
```yaml
a:
b:
c: 1
```
Top level sequence is indented 2 without offset, the other sequence 4
(with offset 2):
```yaml
- key:
- foo
- bar
```
### Indenting using `typ="safe"`
The C based emitter doesn't have the fine control, distinguishing between
block mappings and sequences. Do only use the `pure` Python versions
of the dumper if you want to have that sort of control.
## Positioning ':' in top level mappings, prefixing ':'
If you want your toplevel mappings to look like:
```yaml
library version: 1
comment : |
this is just a first try
```
then set `yaml.top_level_colon_align = True` (and `yaml.indent = 4`).
`True` causes calculation based on the longest key, but you can also
explicitly set a number.
If you want an extra space between a mapping key and the colon specify
`yaml.prefix_colon = ' '`:
```yaml
- https://myurl/abc.tar.xz : 23445
# extra space here --^
- https://myurl/def.tar.xz : 944
```
If you combine `prefix_colon` with `top_level_colon_align`, the top
level mapping doesn\'t get the extra prefix. If you want that anyway,
specify `yaml.top_level_colon_align = 12` where `12` has to be an
integer that is one more than length of the widest key.
### Document version support
In YAML a document version can be explicitly set by using:
%YAML 1.x
before the document start (at the top or before a `---`). For
`ruyaml` x has to be 1 or 2. If no explicit version is set [version
1.2](http://www.yaml.org/spec/1.2/spec.html) is assumed (which has been
released in 2009).
The 1.2 version does **not** support:
- sexagesimals like `12:34:56`
- octals that start with 0 only: like `012` for number 10 (`0o12`
**is** supported by YAML 1.2)
- Unquoted `Yes` and `On` as alternatives for `True`, resp. `No` and `Off` for
`False`.
If you cannot change your YAML files and you need them to load as 1.1
you can load with `yaml.version = (1, 1)`, or the equivalent (version
can be a tuple, list or string) `yaml.version = "1.1"`
*If you cannot change your code, stick with ruyaml==0.10.23 and let
me know if it would help to be able to set an environment variable.*
This does not affect dump as ruyaml never emitted sexagesimals, nor
octal numbers, and emitted booleans always as true resp. false
### Round trip including comments
The major motivation for this fork is the round-trip capability for
comments. The integration of the sources was just an initial step to
make this easier.
#### adding/replacing comments
Starting with version 0.8, you can add/replace comments on block style
collections (mappings/sequences resuting in Python dict/list). The basic
pattern for this is:
--- !python |
import sys
import ruyaml
yaml = ruyaml.YAML() # defaults to round-trip
inp = """\
abc:
- a # comment 1
xyz:
a: 1 # comment 2
b: 2
c: 3
d: 4
e: 5
f: 6 # comment 3
"""
data = yaml.load(inp)
data['abc'].append('b')
data['abc'].yaml_add_eol_comment('comment 4', 1) # takes column of comment 1
data['xyz'].yaml_add_eol_comment('comment 5', 'c') # takes column of comment 2
data['xyz'].yaml_add_eol_comment('comment 6', 'e') # takes column of comment 3
data['xyz'].yaml_add_eol_comment('comment 7\n\n# that\'s all folks', 'd', column=20)
yaml.dump(data, sys.stdout)
--- !stdout-yaml |
Resulting in:
--- !comment |
abc:
- a # comment 1
- b # comment 4
xyz:
a: 1 # comment 2
b: 2
c: 3 # comment 5
d: 4 # comment 7
e: 5 # comment 6
f: 6 # comment 3
--- |
If the comment doesn\'t start with \'#\', this will be added. The key is
the element index for list, the actual key for dictionaries. As can be
seen from the example, the column to choose for a comment is derived
from the previous, next or preceding comment column (picking the first
one found).
Make sure that the added comment is correct, in the sense that when it
contains newlines, the following is either an empty line or a line with
only spaces, or the first non-space is a `#`.
## Config file formats
There are only a few configuration file formats that are easily readable
and editable: JSON, INI/ConfigParser, YAML (XML is to cluttered to be
called easily readable).
Unfortunately [JSON](http://www.json.org/) doesn\'t support comments,
and although there are some solutions with pre-processed filtering of
comments, there are no libraries that support round trip updating of
such commented files.
INI files support comments, and the excellent
[ConfigObj](http://www.voidspace.org.uk/python/configobj.html) library
by Foord and Larosa even supports round trip editing with comment
preservation, nesting of sections and limited lists (within a value).
Retrieval of particular value format is explicit (and extensible).
YAML has basic mapping and sequence structures as well as support for
ordered mappings and sets. It supports scalars various types including
dates and datetimes (missing in JSON). YAML has comments, but these are
normally thrown away.
Block structured YAML is a clean and very human readable format. By
extending the Python YAML parser to support round trip preservation of
comments, it makes YAML a very good choice for configuration files that
are human readable and editable while at the same time interpretable and
modifiable by a program.
## Extending
There are normally six files involved when extending the roundtrip
capabilities: the reader, parser, composer and constructor to go from
YAML to Python and the resolver, representer, serializer and emitter to
go the other way.
Extending involves keeping extra data around for the next process step,
eventuallly resulting in a different Python object (subclass or
alternative), that should behave like the original, but on the way from
Python to YAML generates the original (or at least something much
closer).
## Smartening
When you use round-tripping, then the complex data you get are already
subclasses of the built-in types. So you can patch in extra methods or
override existing ones. Some methods are already included and you can
do:
```python
yaml_str = """\
a:
- b:
c: 42
- d:
f: 196
e:
g: 3.14
"""
data = yaml.load(yaml_str)
assert data.mlget(['a', 1, 'd', 'f'], list_ok=True) == 196
```
python-ruyaml-0.92.1/_doc/dumpcls.rst 0000664 0000000 0000000 00000005030 15056754172 0017552 0 ustar 00root root 0000000 0000000
**********************
Dumping Python classes
**********************
Only ``yaml = YAML(typ='unsafe')`` loads and dumps Python objects out-of-the-box. And
since it loads **any** Python object, this can be unsafe.
If you have instances of some class(es) that you want to dump or load, it is
easy to allow the YAML instance to do that explicitly. You can either register the
class with the ``YAML`` instance or decorate the class.
Registering is done with ``YAML.register_class()``::
import sys
import ruyaml
class User:
def __init__(self, name, age):
self.name = name
self.age = age
yaml = ruyaml.YAML()
yaml.register_class(User)
yaml.dump([User('Anthon', 18)], sys.stdout)
which gives as output::
- !User
name: Anthon
age: 18
The tag ``!User`` originates from the name of the class.
You can specify a different tag by adding the attribute ``yaml_tag``, and
explicitly specify dump and/or load *classmethods* which have to be called
``from_yaml`` resp. ``from_yaml``::
import sys
import ruyaml
class User:
yaml_tag = u'!user'
def __init__(self, name, age):
self.name = name
self.age = age
@classmethod
def to_yaml(cls, representer, node):
return representer.represent_scalar(cls.yaml_tag,
u'{.name}-{.age}'.format(node, node))
@classmethod
def from_yaml(cls, constructor, node):
return cls(*node.value.split('-'))
yaml = ruyaml.YAML()
yaml.register_class(User)
yaml.dump([User('Anthon', 18)], sys.stdout)
which gives as output::
- !user Anthon-18
When using the decorator, which takes the ``YAML()`` instance as a parameter,
the ``yaml = YAML()`` line needs to be moved up in the file::
import sys
from ruyaml import YAML, yaml_object
yaml = YAML()
@yaml_object(yaml)
class User:
yaml_tag = u'!user'
def __init__(self, name, age):
self.name = name
self.age = age
@classmethod
def to_yaml(cls, representer, node):
return representer.represent_scalar(cls.yaml_tag,
u'{.name}-{.age}'.format(node, node))
@classmethod
def from_yaml(cls, constructor, node):
return cls(*node.value.split('-'))
yaml.dump([User('Anthon', 18)], sys.stdout)
The ``yaml_tag``, ``from_yaml`` and ``to_yaml`` work in the same way as when using
``.register_class()``.
python-ruyaml-0.92.1/_doc/dumpcls.ryd 0000664 0000000 0000000 00000016047 15056754172 0017552 0 ustar 00root root 0000000 0000000 version: 0.2
text: smd
pdf: false
# code_directory: ../_example
--- |
# Working with Python classes
## Dumping Python classes
Only `yaml = YAML(typ='unsafe')` loads and dumps Python objects
out-of-the-box. And since it loads **any** Python object, this can be
unsafe, so don't use it.
If you have instances of some class(es) that you want to dump or load,
it is easy to allow the YAML instance to do that explicitly. You can
either register the class with the `YAML` instance or decorate the
class.
Registering is done with `YAML.register_class()`:
--- !python |
import sys
import ruyaml
class User:
def __init__(self, name, age):
self.name = name
self.age = age
yaml = ruyaml.YAML()
yaml.register_class(User)
yaml.dump([User('Anthon', 18)], sys.stdout)
--- !stdout-yaml |
which gives as output:
--- |
The tag `!User` originates from the name of the class.
You can specify a different tag by adding the attribute `yaml_tag`, and
explicitly specify dump and/or load *classmethods* which have to be
named `to_yaml` resp. `from_yaml`:
--- !python |
import sys
import ruyaml
class User:
yaml_tag = u'!user'
def __init__(self, name, age):
self.name = name
self.age = age
@classmethod
def to_yaml(cls, representer, node):
return representer.represent_scalar(cls.yaml_tag,
u'{.name}-{.age}'.format(node, node))
@classmethod
def from_yaml(cls, constructor, node):
return cls(*node.value.split('-'))
yaml = ruyaml.YAML()
yaml.register_class(User)
yaml.dump([User('Anthon', 18)], sys.stdout)
--- !stdout-yaml |
which gives as output:
--- |
When using the decorator, which takes the `YAML()` instance as a
parameter, the `yaml = YAML()` line needs to be moved up in the file:
--- !python |
import sys
from ruyaml import YAML, yaml_object
yaml = YAML()
@yaml_object(yaml)
class User:
yaml_tag = u'!user'
def __init__(self, name, age):
self.name = name
self.age = age
@classmethod
def to_yaml(cls, representer, node):
return representer.represent_scalar(cls.yaml_tag,
u'{.name}-{.age}'.format(node, node))
@classmethod
def from_yaml(cls, constructor, node):
return cls(*node.value.split('-'))
yaml.dump([User('Anthon', 18)], sys.stdout)
--- |
The `yaml_tag`, `from_yaml` and `to_yaml` work in the same way as when
using `.register_class()`.
Alternatively you can use the `register_class()` method as decorator,
This also requires you have the yaml instance available:
--- !python |
import sys
import ruamel.yaml
yaml = ruamel.yaml.YAML()
@yaml.register_class
class User:
yaml_tag = u'!user'
def __init__(self, name, age):
self.name = name
self.age = age
@classmethod
def to_yaml(cls, representer, node):
return representer.represent_scalar(cls.yaml_tag,
u'{.name}-{.age}'.format(node, node))
@classmethod
def from_yaml(cls, constructor, node):
return cls(*node.value.split('-'))
yaml.dump([User('Anthon', 18)], sys.stdout)
--- !stdout-yaml |
This also gives:
--- |
If your class is dumped as a YAML mapping or sequence, there might be an (indirect)
reference to the object itself in one or more of the mapping keys (in YAML these
don't have to be simple scalars), mapping values or sequence entries.
That means that re-creating an object in `to_yaml` cannot generally just create
a `dict`/`list` from the `node` parameter and then create and return a complete
object. The solution for this is to create an empty object and yield that
and then fill in the content data afterwards. That way, if there is a self
reference, and the same node is encountered *while creating the content for the
object*, there is an `id` (from the yielded object) created for that node which
can be assigned.
--- !python |
from pathlib import Path
import ruamel.yaml
class Person:
def __init__(self, name, siblings=None):
self.name = name
self.siblings = [] if siblings is None else siblings
arya = Person('Arya')
sansa = Person('Sansa')
arya.siblings.append(sansa) # there are better ways to represent this
sansa.siblings.append(arya)
yaml = ruamel.yaml.YAML()
yaml.register_class(Person)
path = Path('/tmp/arya.yaml')
yaml.dump(arya, path)
print(path.read_text())
--- !stdout-yaml |
dumping as:
--- |
And you can load the output:
--- !python |
from pathlib import Path
import ruamel.yaml
class Person:
def __init__(self, name, siblings=None):
self.name = name
self.siblings = [] if siblings is None else siblings
def __repr__(self):
return f'Person(name: {self.name}, siblings: {self.siblings})'
path = Path('/tmp/arya.yaml')
yaml = ruamel.yaml.YAML()
yaml.register_class(Person)
data = yaml.load(path)
print(data)
--- !stdout |
giving:
--- |
But if you provide a (too) simple loader:
--- !python |
from pathlib import Path
import ruamel.yaml
class Person:
def __init__(self, name, siblings=None):
self.name = name
self.siblings = [] if siblings is None else siblings
def __repr__(self):
return f'Person(name: {self.name}, siblings: {self.siblings})'
@classmethod
def from_yaml(cls, constructor, node):
data = ruamel.yaml.CommentedMap()
constructor.construct_mapping(node, maptyp=data, deep=True)
return cls(**data)
path = Path('/tmp/arya.yaml')
yaml = ruamel.yaml.YAML()
yaml.register_class(Person)
data = yaml.load(path)
print(data)
--- !stdout |
giving:
--- |
As you can see, Sansa has no normal siblings after this load.
What you need to do is yield the empty `Person` instance, and fill it in
afterwards:
--- !python |
from pathlib import Path
import ruamel.yaml
class Person:
def __init__(self, name, siblings=None):
self.name = name
self.siblings = [] if siblings is None else siblings
def __repr__(self):
return f'Person(name: {self.name}, siblings: {self.siblings})'
@classmethod
def from_yaml(cls, constructor, node):
person = Person(name='')
yield person
data = ruamel.yaml.CommentedMap()
constructor.construct_mapping(node, maptyp=data, deep=True)
for k, v in data.items():
setattr(person, k, v)
path = Path('/tmp/arya.yaml')
yaml = ruamel.yaml.YAML()
yaml.register_class(Person)
data = yaml.load(path)
print(data)
--- !stdout |
giving:
--- |
## Dataclass
Although you could always register dataclasses, in 0.17.34 support was added to
call `__post_init__()` on these classes, if available.
--- !python |
from typing import ClassVar
from dataclasses import dataclass
import ruamel.yaml
@dataclass
class DC:
yaml_tag: ClassVar = '!dc_example' # if you don't want !DC as tag
abc: int
klm: int
xyz: int = 0
def __post_init__(self) -> None:
self.xyz = self.abc + self.klm
yaml = ruamel.yaml.YAML()
yaml.register_class(DC)
dc = DC(abc=5, klm=42)
assert dc.xyz == 47
yaml_str = """\
!dc_example
abc: 13
klm: 37
"""
dc2 = yaml.load(yaml_str)
print(f'{dc2.xyz=}')
--- !stdout |
printing:
python-ruyaml-0.92.1/_doc/example.rst 0000664 0000000 0000000 00000020267 15056754172 0017547 0 ustar 00root root 0000000 0000000 ********
Examples
********
Basic round trip of parsing YAML to Python objects, modifying
and generating YAML::
import sys
from ruyaml import YAML
inp = """\
# example
name:
# details
family: Smith # very common
given: Alice # one of the siblings
"""
yaml = YAML()
code = yaml.load(inp)
code['name']['given'] = 'Bob'
yaml.dump(code, sys.stdout)
Resulting in::
# example
name:
# details
family: Smith # very common
given: Bob # one of the siblings
with the old API::
from __future__ import print_function
import sys
import ruyaml
inp = """\
# example
name:
# details
family: Smith # very common
given: Alice # one of the siblings
"""
code = ruyaml.load(inp, ruyaml.RoundTripLoader)
code['name']['given'] = 'Bob'
ruyaml.dump(code, sys.stdout, Dumper=ruyaml.RoundTripDumper)
# the last statement can be done less efficient in time and memory with
# leaving out the end='' would cause a double newline at the end
# print(ruyaml.dump(code, Dumper=ruyaml.RoundTripDumper), end='')
Resulting in ::
# example
name:
# details
family: Smith # very common
given: Bob # one of the siblings
----
YAML handcrafted anchors and references as well as key merging
are preserved. The merged keys can transparently be accessed
using ``[]`` and ``.get()``::
from ruyaml import YAML
inp = """\
- &CENTER {x: 1, y: 2}
- &LEFT {x: 0, y: 2}
- &BIG {r: 10}
- &SMALL {r: 1}
# All the following maps are equal:
# Explicit keys
- x: 1
y: 2
r: 10
label: center/big
# Merge one map
- <<: *CENTER
r: 10
label: center/big
# Merge multiple maps
- <<: [*CENTER, *BIG]
label: center/big
# Override
- <<: [*BIG, *LEFT, *SMALL]
x: 1
label: center/big
"""
yaml = YAML()
data = yaml.load(inp)
assert data[7]['y'] == 2
The ``CommentedMap``, which is the ``dict`` like construct one gets when round-trip loading,
supports insertion of a key into a particular position, while optionally adding a comment::
import sys
from ruyaml import YAML
yaml_str = """\
first_name: Art
occupation: Architect # This is an occupation comment
about: Art Vandelay is a fictional character that George invents...
"""
yaml = YAML()
data = yaml.load(yaml_str)
data.insert(1, 'last name', 'Vandelay', comment="new key")
yaml.dump(data, sys.stdout)
gives::
first_name: Art
last name: Vandelay # new key
occupation: Architect # This is an occupation comment
about: Art Vandelay is a fictional character that George invents...
Please note that the comment is aligned with that of its neighbour (if available).
The above was inspired by a `question `_
posted by *demux* on StackOverflow.
----
By default ``ruyaml`` indents with two positions in block style, for
both mappings and sequences. For sequences the indent is counted to the
beginning of the scalar, with the dash taking the first position of the
indented "space".
You can change this default indentation by e.g. using ``yaml.indent()``::
import sys
from ruyaml import YAML
d = dict(a=dict(b=2),c=[3, 4])
yaml = YAML()
yaml.dump(d, sys.stdout)
print('0123456789')
yaml = YAML()
yaml.indent(mapping=4, sequence=6, offset=3)
yaml.dump(d, sys.stdout)
print('0123456789')
giving::
a:
b: 2
c:
- 3
- 4
0123456789
a:
b: 2
c:
- 3
- 4
0123456789
If a block sequence or block mapping is the element of a sequence, the
are, by default, displayed `compact
`__ notation. This means
that the dash of the "parent" sequence is on the same line as the
first element resp. first key/value pair of the child collection.
If you want either or both of these (sequence within sequence, mapping
within sequence) to begin on the next line use ``yaml.compact()``::
import sys
from ruyaml import YAML
d = [dict(b=2), [3, 4]]
yaml = YAML()
yaml.dump(d, sys.stdout)
print('='*15)
yaml = YAML()
yaml.compact(seq_seq=False, seq_map=False)
yaml.dump(d, sys.stdout)
giving::
- b: 2
- - 3
- 4
===============
-
b: 2
-
- 3
- 4
------
The following program uses three dumps on the same data, resulting in a stream with
three documents::
import sys
from ruyaml import YAML
data = {1: {1: [{1: 1, 2: 2}, {1: 1, 2: 2}], 2: 2}, 2: 42}
yaml = YAML()
yaml.explicit_start = True
yaml.dump(data, sys.stdout)
yaml.indent(sequence=4, offset=2)
yaml.dump(data, sys.stdout)
def sequence_indent_four(s):
# this will fail on direclty nested lists: {1; [[2, 3], 4]}
levels = []
ret_val = ''
for line in s.splitlines(True):
ls = line.lstrip()
indent = len(line) - len(ls)
if ls.startswith('- '):
if not levels or indent > levels[-1]:
levels.append(indent)
elif levels:
if indent < levels[-1]:
levels = levels[:-1]
# same -> do nothing
else:
if levels:
if indent <= levels[-1]:
while levels and indent <= levels[-1]:
levels = levels[:-1]
ret_val += ' ' * len(levels) + line
return ret_val
yaml = YAML()
yaml.explicit_start = True
yaml.dump(data, sys.stdout, transform=sequence_indent_four)
gives as output::
---
1:
1:
- 1: 1
2: 2
- 1: 1
2: 2
2: 2
2: 42
---
1:
1:
- 1: 1
2: 2
- 1: 1
2: 2
2: 2
2: 42
---
1:
1:
- 1: 1
2: 2
- 1: 1
2: 2
2: 2
2: 42
The transform example, in the last document, was inspired by a
`question posted by *nowox*
`_ on StackOverflow.
-----
Output of ``dump()`` as a string
++++++++++++++++++++++++++++++++
The single most abused "feature" of the old API is not providing the (second)
stream parameter to one of the ``dump()`` variants, in order to get a monolithic string
representation of the stream back.
Apart from being memory inefficient and slow, quite often people using this did not
realise that ``print(round_trip_dump(dict(a=1, b=2)))`` gets you an extra,
empty, line after ``b: 2``.
The real question is why this functionality, which is seldom really
necessary, is available in the old API (and in PyYAML) in the first place. One
explanation you get by looking at what someone would need to do to make this
available if it weren't there already. Apart from subclassing the ``Serializer``
and providing a new ``dump`` method, which would ten or so lines, another
**hundred** lines, essentially the whole ``dumper.py`` file, would need to be
copied and to make use of this serializer.
The fact is that one should normally be doing ``round_trip_dump(dict(a=1, b=2)),
sys.stdout)`` and do away with 90% of the cases for returning the string, and
that all post-processing YAML, before writing to stream, can be handled by using
the ``transform=`` parameter of dump, being able to handle most of the rest. But
it is also much easier in the new API to provide that YAML output as a string if
you really need to have it (or think you do)::
import sys
from ruyaml import YAML
from io import StringIO
class MyYAML(YAML):
def dump(self, data, stream=None, **kw):
inefficient = False
if stream is None:
inefficient = True
stream = StringIO()
YAML.dump(self, data, stream, **kw)
if inefficient:
return stream.getvalue()
yaml = MyYAML() # or typ='safe'/'unsafe' etc
with about one tenth of the lines needed for the old interface, you can once more do::
print(yaml.dump(dict(a=1, b=2)))
instead of::
yaml.dump((dict(a=1, b=2)), sys.stdout)
print() # or sys.stdout.write('\n')
python-ruyaml-0.92.1/_doc/example.ryd 0000664 0000000 0000000 00000015461 15056754172 0017535 0 ustar 00root root 0000000 0000000 version: 0.2
text: smd
pdf: false
--- |
# Examples
Basic round trip of parsing YAML to Python objects, modifying and
generating YAML:
--- !python |
import sys
from ruamel.yaml import YAML
inp = """\
# example
name:
# details
family: Smith # very common
given: Alice # one of the siblings
"""
yaml = YAML()
code = yaml.load(inp)
code['name']['given'] = 'Bob'
yaml.dump(code, sys.stdout)
--- !stdout-yaml |
Resulting in:
--- |
------------------------------------------------------------------------
YAML handcrafted anchors and references as well as key merging are
preserved. The merged keys can transparently be accessed using `[]` and
`.get()`:
--- !python |
from ruamel.yaml import YAML
inp = """\
- &CENTER {x: 1, y: 2}
- &LEFT {x: 0, y: 2}
- &BIG {r: 10}
- &SMALL {r: 1}
# All the following maps are equal:
# Explicit keys
- x: 1
y: 2
r: 10
label: center/big
# Merge one map
- <<: *CENTER
r: 10
label: center/big
# Merge multiple maps
- <<: [*CENTER, *BIG]
label: center/big
# Override
- <<: [*BIG, *LEFT, *SMALL]
x: 1
label: center/big
"""
yaml = YAML()
data = yaml.load(inp)
assert data[7]['y'] == 2
--- |
The `CommentedMap`, which is the `dict` like construct one gets when
round-trip loading, supports insertion of a key into a particular
position, while optionally adding a comment:
--- !python |
import sys
from ruamel.yaml import YAML
yaml_str = """\
first_name: Art
occupation: Architect # This is an occupation comment
about: Art Vandelay is a fictional character that George invents...
"""
yaml = YAML()
data = yaml.load(yaml_str)
data.insert(1, 'last name', 'Vandelay', comment="new key")
yaml.dump(data, sys.stdout)
--- !stdout-yaml |
gives:
--- |
Please note that the comment is aligned with that of its neighbour (if
available).
The above was inspired by a
[question](http://stackoverflow.com/a/36970608/1307905) posted by
*demux* on StackOverflow.
------------------------------------------------------------------------
By default `ruamel.yaml` indents with two positions in block style, for
both mappings and sequences. For sequences the indent is counted to the
beginning of the scalar, with the dash taking the first position of the
indented \"space\".
You can change this default indentation by e.g. using `yaml.indent()`:
--- !python |
import sys
from ruamel.yaml import YAML
d = dict(a=dict(b=2),c=[3, 4])
yaml = YAML()
yaml.dump(d, sys.stdout)
print('#123456789')
yaml = YAML()
yaml.indent(mapping=4, sequence=6, offset=3)
yaml.dump(d, sys.stdout)
print('#123456789')
--- !stdout-yaml |
giving:
--- |
If a block sequence or block mapping is the element of a sequence, the
are, by default, displayed
[compact](http://yaml.org/spec/1.2/spec.html#id2797686) notation. This
means that the dash of the \"parent\" sequence is on the same line as
the first element resp. first key/value pair of the child collection.
If you want either or both of these (sequence within sequence, mapping
within sequence) to begin on the next line use `yaml.compact()`:
--- !python |
import sys
from ruamel.yaml import YAML
d = [dict(b=2), [3, 4]]
yaml = YAML()
yaml.dump(d, sys.stdout)
print('='*15)
yaml = YAML()
yaml.compact(seq_seq=False, seq_map=False)
yaml.dump(d, sys.stdout)
--- !stdout-yaml |
giving:
--- |
------------------------------------------------------------------------
The following program uses three dumps on the same data, resulting in a
stream with three documents:
--- !python |
import sys
from ruamel.yaml import YAML
data = {1: {1: [{1: 1, 2: 2}, {1: 1, 2: 2}], 2: 2}, 2: 42}
yaml = YAML()
yaml.explicit_start = True
yaml.dump(data, sys.stdout)
yaml.indent(sequence=4, offset=2)
yaml.dump(data, sys.stdout)
def sequence_indent_four(s):
# this will fail on direclty nested lists: {1; [[2, 3], 4]}
levels = []
ret_val = ''
for line in s.splitlines(True):
ls = line.lstrip()
indent = len(line) - len(ls)
if ls.startswith('- '):
if not levels or indent > levels[-1]:
levels.append(indent)
elif levels:
if indent < levels[-1]:
levels = levels[:-1]
# same -> do nothing
else:
if levels:
if indent <= levels[-1]:
while levels and indent <= levels[-1]:
levels = levels[:-1]
ret_val += ' ' * len(levels) + line
return ret_val
yaml = YAML()
yaml.explicit_start = True
yaml.dump(data, sys.stdout, transform=sequence_indent_four)
--- !stdout-yaml |
gives as output:
--- |
The transform example, in the last document, was inspired by a [question
posted by \*nowox\*](https://stackoverflow.com/q/44388701/1307905) on
StackOverflow.
------------------------------------------------------------------------
## Output of `dump()` as a string
The single most abused "feature" of the old API is not providing the
(second) stream parameter to one of the `dump()` variants, in order to
get a monolithic string representation of the stream back.
Apart from being memory inefficient and slow, quite often people using
this did not realise that `print(round_trip_dump(dict(a=1, b=2)))` gets
you an extra, empty, line after `b: 2`.
The real question is why this functionality, which is seldom really
necessary, is available in the old API (and in PyYAML) in the first
place. One explanation you get by looking at what someone would need to
do to make this available if it weren\'t there already. Apart from
subclassing the `Serializer` and providing a new `dump` method, which
would ten or so lines, another **hundred** lines, essentially the whole
`dumper.py` file, would need to be copied and to make use of this
serializer.
The fact is that one should normally be doing
`round_trip_dump(dict(a=1, b=2)), sys.stdout)` and do away with 90% of
the cases for returning the string, and that all post-processing YAML,
before writing to stream, can be handled by using the `transform=`
parameter of dump, being able to handle most of the rest. But it is also
much easier in the new API to provide that YAML output as a string if
you really need to have it (or think you do):
--- !python |
import sys
from ruamel.yaml import YAML
from ruamel.yaml.compat import StringIO
class MyYAML(YAML):
def dump(self, data, stream=None, **kw):
inefficient = False
if stream is None:
inefficient = True
stream = StringIO()
YAML.dump(self, data, stream, **kw)
if inefficient:
return stream.getvalue()
yaml = MyYAML() # or typ='safe'/'unsafe' etc
--- |
with about one tenth of the lines needed for the old interface, you can
once more do:
--- !python-code |
print(yaml.dump(dict(a=1, b=2)))
--- |
instead of:
--- !python-code |
yaml.dump((dict(a=1, b=2)), sys.stdout)
print() # or sys.stdout.write('\n')
python-ruyaml-0.92.1/_doc/index.rst 0000664 0000000 0000000 00000000650 15056754172 0017215 0 ustar 00root root 0000000 0000000
***********
ruyaml
***********
`Github `_ |
`PyPI `_
Contents:
.. toctree::
:maxdepth: 2
overview
install
basicuse
dumpcls
detail
example
api
pyyaml
contributing
upmerge
.. image:: https://readthedocs.org/projects/ruyaml/badge/?version=stable
:target: https://ruyaml.readthedocs.org/en/stable
python-ruyaml-0.92.1/_doc/install.rst 0000664 0000000 0000000 00000002432 15056754172 0017554 0 ustar 00root root 0000000 0000000 version: 0.2
text: smd
pdf: false
--- |
# Installing
Make sure you have a recent version of `pip` and `setuptools` installed.
The later needs environment marker support (`setuptools>=20.6.8`) and
that is e.g. bundled with Python 3.4.6 but not with 3.4.4. It is
probably best to do:
pip install -U pip setuptools wheel
in your environment (`virtualenv`, (Docker) container, etc) before
installing `ruyaml`.
`ruyaml` itself should be installed from [PyPI] using:
pip install ruyaml
If you want to process jinja2/YAML templates (which are not valid YAML
with the default jinja2 markers), do `pip install ruyaml[jinja2]`
(you might need to quote the last argument because of the `[]`)
There also is a commandline utility `yaml` available after installing:
pip install ruyaml.cmd
that allows for round-trip testing/re-indenting and conversion of YAML
files (JSON,INI,HTML tables)
## Optional requirements
If you have the the header files for your Python executables installed
then you can use the (non-roundtrip), but faster, C loader and emitter.
On Debian systems you should use:
sudo apt-get install python3-dev
you can leave out `python3-dev` if you don\'t use python3
For CentOS (7) based systems you should do:
sudo yum install python-devel
--- !inc-raw |
links.rydinc
python-ruyaml-0.92.1/_doc/overview.rst 0000664 0000000 0000000 00000003644 15056754172 0017762 0 ustar 00root root 0000000 0000000 version: 0.2
text: smd
pdf: false
--- |
# Overview
`ruyaml` is a YAML 1.2 loader/dumper package for Python. It is a
derivative of Kirill Simonov\'s [PyYAML
3.11](https://bitbucket.org/xi/pyyaml).
`ruyaml` supports [YAML 1.2] and has round-trip loaders and
dumpers. A round-trip is a YAML load-modify-save sequence and
ruyaml tries to preserve, among others:
- comments
- block style and key ordering are kept, so you can diff the
round-tripped source
- flow style sequences ( \'a: b, c, d\') (based on request and test by
Anthony Sottile)
- anchor names that are hand-crafted (i.e. not of the form`idNNN`)
- [merges](http://yaml.org/type/merge.html) in dictionaries are
preserved
This preservation is normally not broken unless you severely alter the
structure of a component (delete a key in a dict, remove list entries).
Reassigning values or replacing list items, etc., is fine.
For the specific 1.2 differences see
`yaml-1-2-support`{.interpreted-text role="ref"}
Although individual indentation of lines is not preserved, you can
specify separate indentation levels for mappings and sequences (counting
for sequences does **not** include the dash for a sequence element) and
specific offset of block sequence dashes within that indentation.
Although `ruyaml` still allows most of the PyYAML way of doing
things, adding features required a different API then the transient
nature of PyYAML\'s `Loader` and `Dumper`. Starting with `ruyaml`
version 0.15.0 this new API gets introduced. Old ways that get in the
way will be removed, after first generating warnings on use, then
generating an error. In general a warning in version 0.N.x will become
an error in 0.N+1.0
Many of the bugs filed against PyYAML, but that were never acted upon,
have been fixed in `ruyaml`
--- !inc-raw |
links.rydinc
--- |
----------
This documentation was generate using [ryd](https://pypi.org/project/ryd/) and [zine](https://zine-ssg.io/).
python-ruyaml-0.92.1/_doc/pyyaml.rst 0000664 0000000 0000000 00000004303 15056754172 0017420 0 ustar 00root root 0000000 0000000 version: 0.2
text: smd
pdf: false
--- |
# Differences with PyYAML
```
If I have seen further, it is by standing on the shoulders of giants.
Isaac Newton (1676)
```
`ruyaml` is a derivative of Kirill Simonov\'s [PyYAML
3.11](https://bitbucket.org/xi/pyyaml) and would not exist without that
excellent base to start from.
The following a summary of the major differences with PyYAML 3.11
## Defaulting to YAML 1.2 support
PyYAML supports the [YAML 1.1] standard, `ruyaml` supports [YAML
1.2] as released in 2009.
- YAML 1.2 dropped support for several features unquoted `Yes`, `No`,
`On`, `Off`
- YAML 1.2 no longer accepts strings that start with a `0` and solely
consist of number characters as octal, you need to specify such
strings with `0o[0-7]+` (zero + lower-case o for octal + one or more
octal characters).
- YAML 1.2 no longer supports
[sexagesimals](https://en.wikipedia.org/wiki/Sexagesimal), so the
string scalar `12:34:56` doesn\'t need quoting.
- `\/` escape for JSON compatibility
- correct parsing of floating point scalars with exponentials
*unless* the YAML document is loaded with an explicit `version==1.1` or
the document starts with:
% YAML 1.1
, `ruyaml` will load the document as version 1.2.
## PY2/PY3 reintegration
`ruyaml` re-integrates the Python 2 and 3 sources, running on
Python 2.7 (CPython, PyPy), 3.3, 3.4, 3.5 and 3.6 (support for 2.6 has
been dropped mid 2016). It is more easy to extend and maintain as only a
miniscule part of the code is Python version specific.
## Fixes
- `ruyaml` follows the `indent` keyword argument on scalars when
dumping.
- `ruyaml` allows `:` in plain scalars, as long as these are not
followed by a space (as per the specification)
## Testing
`ruyaml` is tested using [tox] and [py.test]. In addition to
new tests, the original PyYAML test framework is called from within
`tox` runs.
Before versions are pushed to PyPI, `tox` is invoked, and has to pass,
on all supported Python versions, on PyPI as well as flake8/pep8
## API
Starting with 0.15 the API for using `ruyaml` has diverged allowing
easier addition of new features.
--- !inc-raw
- links.rydinc
python-ruyaml-0.92.1/_doc/upmerge.rst 0000664 0000000 0000000 00000005236 15056754172 0017557 0 ustar 00root root 0000000 0000000 *************
Upstrem Merge
*************
The process to merge ``ruamel.yaml``'s Mercurial repository to ours is
non-trivial due to non-unique Mergurial-to-git imports and squash merges.
Preparation
===========
We create a git import of the Upstream repository. Then we add a
pseudo-merge node to it which represents our version of the code
at the point where the last merge happened. The commit we want is most
likely named "Upstream 0.xx.yy".
So, first we get a git copy of an HG clone of the ``ruamel.yaml``
repository::
# install Mercurial (depends on your distribution)
cd /your/src
mkdir -p ruyaml/git
cd ruyaml/git; git init
cd ../
hg clone http://hg.code.sf.net/p/ruamel-yaml/code hg
Next we prepare our repository for merging. We need a ``hg-fast-export``
script::
cd ..
git clone git@github.com:frej/fast-export.git
We use that script to setup our git copy::
cd ../git
../fast-export/hg-fast-export.sh -r ../hg --ignore-unnamed-heads
Now let's create a third repository for the actual work::
cd ../
git clone git@github.com:pycontribs/ruyaml.git repo
cd repo
git remote add ../git ruamel
git fetch ruamel
Create a branch for merging::
git checkout -b merge main
This concludes setting things up.
Incremental merge
=================
First, let's pull the remote changes (if any)::
cd /your/src/ruyaml/hg
hg pull
cd ../git
../fast-export/hg-fast-export.sh -r ../hg --ignore-unnamed-heads
cd ../repo
git fetch --all
git checkout merge
Next, we need a pseudo-merge that declares "we have merged all of Upstream
up to *THAT* into *THIS*", where *THIS* is the latest Merge commit in our
repository (typically named "Upstream 0.xx.yy") and *THAT* is the
corresponding commit in the Ruamel tree (it should be tagged 0.xx.yy)::
git log --date-order --all --oneline
git reset --hard THIS
git merge -s ours THAT
Now we'll "merge" the current Upstream sources::
git merge --squash ruamel/main
This will create a heap of conflicts, but no commit yet.
.. note::
The reason we do a squash-merge here is that otherwise git will
un-helpfully upload the complete history of ``ruamel.yaml`` to GitHub.
It's already there, of course, but due to the diverging git hashes that
doesn't help.
The next step, obviously, is to fix the conflicts. (There will be a bunch.)
If git complains about a deleted ``__init__.py``, the solution is to ``git
rm -f __init__.py``.
Then, commit your changes::
git commit -a -m "Merge Upstream 0.xx.yz"
git push -f origin merge
Now check github. If everything is OK, congratulations, otherwise fix and
push (no need to repeat the ``-f``).
python-ruyaml-0.92.1/_test/ 0000775 0000000 0000000 00000000000 15056754172 0015565 5 ustar 00root root 0000000 0000000 python-ruyaml-0.92.1/_test/__init__.py 0000664 0000000 0000000 00000000000 15056754172 0017664 0 ustar 00root root 0000000 0000000 python-ruyaml-0.92.1/_test/conftest.py 0000664 0000000 0000000 00000000766 15056754172 0017775 0 ustar 00root root 0000000 0000000 import pytest
def _fn(r, n):
raise pytest.skip.Exception("XXX_filename fixtures are unknown")
@pytest.fixture(scope="function")
def canonical_filename(request):
return _fn(request, "canonical")
@pytest.fixture(scope="function")
def data_filename(request):
return _fn(request, "data")
@pytest.fixture(scope="function")
def detect_filename(request):
return _fn(request, "detect")
@pytest.fixture(scope="function")
def events_filename(request):
return _fn(request, "events")
python-ruyaml-0.92.1/_test/data/ 0000775 0000000 0000000 00000000000 15056754172 0016476 5 ustar 00root root 0000000 0000000 python-ruyaml-0.92.1/_test/data/a-nasty-libyaml-bug.loader-error 0000664 0000000 0000000 00000000003 15056754172 0024564 0 ustar 00root root 0000000 0000000 [ [ python-ruyaml-0.92.1/_test/data/aliases-cdumper-bug.code 0000664 0000000 0000000 00000000021 15056754172 0023154 0 ustar 00root root 0000000 0000000 [ today, today ]
python-ruyaml-0.92.1/_test/data/aliases.events 0000664 0000000 0000000 00000000273 15056754172 0021347 0 ustar 00root root 0000000 0000000 - !StreamStart
- !DocumentStart
- !SequenceStart
- !Scalar { anchor: 'myanchor', tag: '!mytag', value: 'data' }
- !Alias { anchor: 'myanchor' }
- !SequenceEnd
- !DocumentEnd
- !StreamEnd
python-ruyaml-0.92.1/_test/data/bool.data 0000664 0000000 0000000 00000000157 15056754172 0020267 0 ustar 00root root 0000000 0000000 - yes
- Yes
- YES
- no
- No
- NO
- true
- True
- TRUE
- false
- False
- FALSE
- on
- On
- ON
- off
- Off
- OFF
python-ruyaml-0.92.1/_test/data/bool.detect 0000664 0000000 0000000 00000000027 15056754172 0020622 0 ustar 00root root 0000000 0000000 tag:yaml.org,2002:bool
python-ruyaml-0.92.1/_test/data/colon-in-flow-context.loader-error 0000664 0000000 0000000 00000000014 15056754172 0025155 0 ustar 00root root 0000000 0000000 { foo:bar }
python-ruyaml-0.92.1/_test/data/comment_no_eol.data 0000664 0000000 0000000 00000000022 15056754172 0022320 0 ustar 00root root 0000000 0000000 european: 10 # abc python-ruyaml-0.92.1/_test/data/composite_key.code 0000664 0000000 0000000 00000000030 15056754172 0022175 0 ustar 00root root 0000000 0000000 {('foo', 'bar'): 'baz'}
python-ruyaml-0.92.1/_test/data/composite_key.data 0000664 0000000 0000000 00000000032 15056754172 0022176 0 ustar 00root root 0000000 0000000 ---
? - foo
- bar
: baz
python-ruyaml-0.92.1/_test/data/construct-binary-py3.code 0000664 0000000 0000000 00000002447 15056754172 0023360 0 ustar 00root root 0000000 0000000 {
"canonical":
b"GIF89a\x0c\x00\x0c\x00\x84\x00\x00\xff\xff\xf7\xf5\xf5\xee\xe9\xe9\xe5fff\x00\x00\x00\xe7\xe7\xe7^^^\xf3\xf3\xed\x8e\x8e\x8e\xe0\xe0\xe0\x9f\x9f\x9f\x93\x93\x93\xa7\xa7\xa7\x9e\x9e\x9eiiiccc\xa3\xa3\xa3\x84\x84\x84\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9!\xfe\x0eMade with GIMP\x00,\x00\x00\x00\x00\x0c\x00\x0c\x00\x00\x05, \x8e\x810\x9e\xe3@\x14\xe8i\x10\xc4\xd1\x8a\x08\x1c\xcf\x80M$z\xef\xff0\x85p\xb8\xb01f\r\x1b\xce\x01\xc3\x01\x1e\x10' \x82\n\x01\x00;",
"generic":
b"GIF89a\x0c\x00\x0c\x00\x84\x00\x00\xff\xff\xf7\xf5\xf5\xee\xe9\xe9\xe5fff\x00\x00\x00\xe7\xe7\xe7^^^\xf3\xf3\xed\x8e\x8e\x8e\xe0\xe0\xe0\x9f\x9f\x9f\x93\x93\x93\xa7\xa7\xa7\x9e\x9e\x9eiiiccc\xa3\xa3\xa3\x84\x84\x84\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9!\xfe\x0eMade with GIMP\x00,\x00\x00\x00\x00\x0c\x00\x0c\x00\x00\x05, \x8e\x810\x9e\xe3@\x14\xe8i\x10\xc4\xd1\x8a\x08\x1c\xcf\x80M$z\xef\xff0\x85p\xb8\xb01f\r\x1b\xce\x01\xc3\x01\x1e\x10' \x82\n\x01\x00;",
"description": "The binary value above is a tiny arrow encoded as a gif image.",
}
python-ruyaml-0.92.1/_test/data/construct-binary-py3.data 0000664 0000000 0000000 00000001174 15056754172 0023353 0 ustar 00root root 0000000 0000000 canonical: !!binary "\
R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5\
OTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/+\
+f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLC\
AgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs="
generic: !!binary |
R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5
OTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/+
+f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLC
AgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs=
description:
The binary value above is a tiny arrow encoded as a gif image.
python-ruyaml-0.92.1/_test/data/construct-bool.code 0000664 0000000 0000000 00000000220 15056754172 0022301 0 ustar 00root root 0000000 0000000 {
"canonical": True,
"answer": False,
"logical": True,
"option": True,
"but": { "y": "is a string", "n": "is a string" },
}
python-ruyaml-0.92.1/_test/data/construct-bool.data 0000664 0000000 0000000 00000000140 15056754172 0022301 0 ustar 00root root 0000000 0000000 canonical: yes
answer: NO
logical: True
option: on
but:
y: is a string
n: is a string
python-ruyaml-0.92.1/_test/data/construct-custom.code 0000664 0000000 0000000 00000000423 15056754172 0022665 0 ustar 00root root 0000000 0000000 [
MyTestClass1(x=1),
MyTestClass1(x=1, y=2, z=3),
MyTestClass2(x=10),
MyTestClass2(x=10, y=20, z=30),
MyTestClass3(x=1),
MyTestClass3(x=1, y=2, z=3),
MyTestClass3(x=1, y=2, z=3),
YAMLObject1(my_parameter='foo', my_another_parameter=[1,2,3])
]
python-ruyaml-0.92.1/_test/data/construct-custom.data 0000664 0000000 0000000 00000000351 15056754172 0022664 0 ustar 00root root 0000000 0000000 ---
- !tag1
x: 1
- !tag1
x: 1
'y': 2
z: 3
- !tag2
10
- !tag2
=: 10
'y': 20
z: 30
- !tag3
x: 1
- !tag3
x: 1
'y': 2
z: 3
- !tag3
=: 1
'y': 2
z: 3
- !foo
my-parameter: foo
my-another-parameter: [1,2,3]
python-ruyaml-0.92.1/_test/data/construct-float.code 0000664 0000000 0000000 00000000277 15056754172 0022467 0 ustar 00root root 0000000 0000000 {
"canonical": 685230.15,
"exponential": 685230.15,
"fixed": 685230.15,
"sexagesimal": 685230.15,
"negative infinity": -1e300000,
"not a number": 1e300000/1e300000,
}
python-ruyaml-0.92.1/_test/data/construct-float.data 0000664 0000000 0000000 00000000214 15056754172 0022455 0 ustar 00root root 0000000 0000000 canonical: 6.8523015e+5
exponential: 685.230_15e+03
fixed: 685_230.15
sexagesimal: 190:20:30.15
negative infinity: -.inf
not a number: .NaN
python-ruyaml-0.92.1/_test/data/construct-int.code 0000664 0000000 0000000 00000000225 15056754172 0022145 0 ustar 00root root 0000000 0000000 {
"canonical": 685230,
"decimal": 685230,
"octal": 685230,
"hexadecimal": 685230,
"binary": 685230,
"sexagesimal": 685230,
}
python-ruyaml-0.92.1/_test/data/construct-int.data 0000664 0000000 0000000 00000000207 15056754172 0022144 0 ustar 00root root 0000000 0000000 canonical: 685230
decimal: +685_230
octal: 02472256
hexadecimal: 0x_0A_74_AE
binary: 0b1010_0111_0100_1010_1110
sexagesimal: 190:20:30
python-ruyaml-0.92.1/_test/data/construct-map.code 0000664 0000000 0000000 00000000275 15056754172 0022135 0 ustar 00root root 0000000 0000000 {
"Block style":
{ "Clark" : "Evans", "Brian" : "Ingerson", "Oren" : "Ben-Kiki" },
"Flow style":
{ "Clark" : "Evans", "Brian" : "Ingerson", "Oren" : "Ben-Kiki" },
}
python-ruyaml-0.92.1/_test/data/construct-map.data 0000664 0000000 0000000 00000000262 15056754172 0022130 0 ustar 00root root 0000000 0000000 # Unordered set of key: value pairs.
Block style: !!map
Clark : Evans
Brian : Ingerson
Oren : Ben-Kiki
Flow style: !!map { Clark: Evans, Brian: Ingerson, Oren: Ben-Kiki }
python-ruyaml-0.92.1/_test/data/construct-merge.code 0000664 0000000 0000000 00000000465 15056754172 0022460 0 ustar 00root root 0000000 0000000 [
{ "x": 1, "y": 2 },
{ "x": 0, "y": 2 },
{ "r": 10 },
{ "r": 1 },
{ "x": 1, "y": 2, "r": 10, "label": "center/big" },
{ "x": 1, "y": 2, "r": 10, "label": "center/big" },
{ "x": 1, "y": 2, "r": 10, "label": "center/big" },
{ "x": 1, "y": 2, "r": 10, "label": "center/big" },
]
python-ruyaml-0.92.1/_test/data/construct-merge.data 0000664 0000000 0000000 00000000613 15056754172 0022452 0 ustar 00root root 0000000 0000000 ---
- &CENTER { x: 1, 'y': 2 }
- &LEFT { x: 0, 'y': 2 }
- &BIG { r: 10 }
- &SMALL { r: 1 }
# All the following maps are equal:
- # Explicit keys
x: 1
'y': 2
r: 10
label: center/big
- # Merge one map
<< : *CENTER
r: 10
label: center/big
- # Merge multiple maps
<< : [ *CENTER, *BIG ]
label: center/big
- # Override
<< : [ *BIG, *LEFT, *SMALL ]
x: 1
label: center/big
python-ruyaml-0.92.1/_test/data/construct-null.code 0000664 0000000 0000000 00000000357 15056754172 0022333 0 ustar 00root root 0000000 0000000 [
None,
{ "empty": None, "canonical": None, "english": None, None: "null key" },
{
"sparse": [
None,
"2nd entry",
None,
"4th entry",
None,
],
},
]
python-ruyaml-0.92.1/_test/data/construct-null.data 0000664 0000000 0000000 00000000361 15056754172 0022325 0 ustar 00root root 0000000 0000000 # A document may be null.
---
---
# This mapping has four keys,
# one has a value.
empty:
canonical: ~
english: null
~: null key
---
# This sequence has five
# entries, two have values.
sparse:
- ~
- 2nd entry
-
- 4th entry
- Null
python-ruyaml-0.92.1/_test/data/construct-omap.code 0000664 0000000 0000000 00000000471 15056754172 0022312 0 ustar 00root root 0000000 0000000 {
"Bestiary": ordereddict([
("aardvark", "African pig-like ant eater. Ugly."),
("anteater", "South-American ant eater. Two species."),
("anaconda", "South-American constrictor snake. Scaly."),
]),
"Numbers": ordereddict([ ("one", 4), ("one", 1), ("two", 2), ("three", 3) ]),
}
python-ruyaml-0.92.1/_test/data/construct-omap.data 0000664 0000000 0000000 00000000436 15056754172 0022312 0 ustar 00root root 0000000 0000000 # Explicitly typed ordered map (dictionary).
Bestiary: !!omap
- aardvark: African pig-like ant eater. Ugly.
- anteater: South-American ant eater. Two species.
- anaconda: South-American constrictor snake. Scaly.
# Etc.
# Flow style
Numbers: !!omap [ one: 1, two: 2, three : 3 ]
python-ruyaml-0.92.1/_test/data/construct-pairs.code 0000664 0000000 0000000 00000000362 15056754172 0022473 0 ustar 00root root 0000000 0000000 {
"Block tasks": [
("meeting", "with team."),
("meeting", "with boss."),
("break", "lunch."),
("meeting", "with client."),
],
"Flow tasks": [ ("meeting", "with team"), ("meeting", "with boss") ],
}
python-ruyaml-0.92.1/_test/data/construct-pairs.data 0000664 0000000 0000000 00000000312 15056754172 0022465 0 ustar 00root root 0000000 0000000 # Explicitly typed pairs.
Block tasks: !!pairs
- meeting: with team.
- meeting: with boss.
- break: lunch.
- meeting: with client.
Flow tasks: !!pairs [ meeting: with team, meeting: with boss ]
python-ruyaml-0.92.1/_test/data/construct-python-bool.code 0000664 0000000 0000000 00000000020 15056754172 0023616 0 ustar 00root root 0000000 0000000 [ True, False ]
python-ruyaml-0.92.1/_test/data/construct-python-bool.data 0000664 0000000 0000000 00000000054 15056754172 0023624 0 ustar 00root root 0000000 0000000 [ !!python/bool True, !!python/bool False ]
python-ruyaml-0.92.1/_test/data/construct-python-bytes-py3.code 0000664 0000000 0000000 00000000024 15056754172 0024526 0 ustar 00root root 0000000 0000000 b'some binary data'
python-ruyaml-0.92.1/_test/data/construct-python-bytes-py3.data 0000664 0000000 0000000 00000000056 15056754172 0024532 0 ustar 00root root 0000000 0000000 --- !!python/bytes 'c29tZSBiaW5hcnkgZGF0YQ=='
python-ruyaml-0.92.1/_test/data/construct-python-complex.code 0000664 0000000 0000000 00000000111 15056754172 0024333 0 ustar 00root root 0000000 0000000 [0.5+0j, 0.5+0.5j, 0.5j, -0.5+0.5j, -0.5+0j, -0.5-0.5j, -0.5j, 0.5-0.5j]
python-ruyaml-0.92.1/_test/data/construct-python-complex.data 0000664 0000000 0000000 00000000330 15056754172 0024335 0 ustar 00root root 0000000 0000000 - !!python/complex 0.5+0j
- !!python/complex 0.5+0.5j
- !!python/complex 0.5j
- !!python/complex -0.5+0.5j
- !!python/complex -0.5+0j
- !!python/complex -0.5-0.5j
- !!python/complex -0.5j
- !!python/complex 0.5-0.5j
python-ruyaml-0.92.1/_test/data/construct-python-float.code 0000664 0000000 0000000 00000000010 15056754172 0023767 0 ustar 00root root 0000000 0000000 123.456
python-ruyaml-0.92.1/_test/data/construct-python-float.data 0000664 0000000 0000000 00000000027 15056754172 0023776 0 ustar 00root root 0000000 0000000 !!python/float 123.456
python-ruyaml-0.92.1/_test/data/construct-python-int.code 0000664 0000000 0000000 00000000004 15056754172 0023457 0 ustar 00root root 0000000 0000000 123
python-ruyaml-0.92.1/_test/data/construct-python-int.data 0000664 0000000 0000000 00000000021 15056754172 0023455 0 ustar 00root root 0000000 0000000 !!python/int 123
python-ruyaml-0.92.1/_test/data/construct-python-long-short-py3.code 0000664 0000000 0000000 00000000004 15056754172 0025472 0 ustar 00root root 0000000 0000000 123
python-ruyaml-0.92.1/_test/data/construct-python-long-short-py3.data 0000664 0000000 0000000 00000000022 15056754172 0025471 0 ustar 00root root 0000000 0000000 !!python/long 123
python-ruyaml-0.92.1/_test/data/construct-python-name-module.code 0000664 0000000 0000000 00000000060 15056754172 0025072 0 ustar 00root root 0000000 0000000 [str, yaml.Loader, yaml.dump, abs, yaml.tokens]
python-ruyaml-0.92.1/_test/data/construct-python-name-module.data 0000664 0000000 0000000 00000000174 15056754172 0025077 0 ustar 00root root 0000000 0000000 - !!python/name:str
- !!python/name:yaml.Loader
- !!python/name:yaml.dump
- !!python/name:abs
- !!python/module:yaml.tokens
python-ruyaml-0.92.1/_test/data/construct-python-none.code 0000664 0000000 0000000 00000000005 15056754172 0023625 0 ustar 00root root 0000000 0000000 None
python-ruyaml-0.92.1/_test/data/construct-python-none.data 0000664 0000000 0000000 00000000016 15056754172 0023626 0 ustar 00root root 0000000 0000000 !!python/none
python-ruyaml-0.92.1/_test/data/construct-python-object.code 0000664 0000000 0000000 00000000641 15056754172 0024142 0 ustar 00root root 0000000 0000000 [
AnObject(1, 'two', [3,3,3]),
AnInstance(1, 'two', [3,3,3]),
AnObject(1, 'two', [3,3,3]),
AnInstance(1, 'two', [3,3,3]),
AState(1, 'two', [3,3,3]),
ACustomState(1, 'two', [3,3,3]),
InitArgs(1, 'two', [3,3,3]),
InitArgsWithState(1, 'two', [3,3,3]),
NewArgs(1, 'two', [3,3,3]),
NewArgsWithState(1, 'two', [3,3,3]),
Reduce(1, 'two', [3,3,3]),
ReduceWithState(1, 'two', [3,3,3]),
MyInt(3),
MyList(3),
MyDict(3),
]
python-ruyaml-0.92.1/_test/data/construct-python-object.data 0000664 0000000 0000000 00000002243 15056754172 0024141 0 ustar 00root root 0000000 0000000 - !!python/object:test_constructor.AnObject { foo: 1, bar: two, baz: [3,3,3] }
- !!python/object:test_constructor.AnInstance { foo: 1, bar: two, baz: [3,3,3] }
- !!python/object/new:test_constructor.AnObject { args: [1, two], kwds: {baz: [3,3,3]} }
- !!python/object/apply:test_constructor.AnInstance { args: [1, two], kwds: {baz: [3,3,3]} }
- !!python/object:test_constructor.AState { _foo: 1, _bar: two, _baz: [3,3,3] }
- !!python/object/new:test_constructor.ACustomState { state: !!python/tuple [1, two, [3,3,3]] }
- !!python/object/new:test_constructor.InitArgs [1, two, [3,3,3]]
- !!python/object/new:test_constructor.InitArgsWithState { args: [1, two], state: [3,3,3] }
- !!python/object/new:test_constructor.NewArgs [1, two, [3,3,3]]
- !!python/object/new:test_constructor.NewArgsWithState { args: [1, two], state: [3,3,3] }
- !!python/object/apply:test_constructor.Reduce [1, two, [3,3,3]]
- !!python/object/apply:test_constructor.ReduceWithState { args: [1, two], state: [3,3,3] }
- !!python/object/new:test_constructor.MyInt [3]
- !!python/object/new:test_constructor.MyList { listitems: [~, ~, ~] }
- !!python/object/new:test_constructor.MyDict { dictitems: {0, 1, 2} }
python-ruyaml-0.92.1/_test/data/construct-python-str-ascii.code 0000664 0000000 0000000 00000000017 15056754172 0024567 0 ustar 00root root 0000000 0000000 "ascii string"
python-ruyaml-0.92.1/_test/data/construct-python-str-ascii.data 0000664 0000000 0000000 00000000040 15056754172 0024562 0 ustar 00root root 0000000 0000000 --- !!python/str "ascii string"
python-ruyaml-0.92.1/_test/data/construct-python-str-utf8-py2.code 0000664 0000000 0000000 00000000201 15056754172 0025070 0 ustar 00root root 0000000 0000000 '\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430'.encode('utf-8')
python-ruyaml-0.92.1/_test/data/construct-python-str-utf8-py3.code 0000664 0000000 0000000 00000000161 15056754172 0025076 0 ustar 00root root 0000000 0000000 '\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430'
python-ruyaml-0.92.1/_test/data/construct-python-str-utf8-py3.data 0000664 0000000 0000000 00000000072 15056754172 0025076 0 ustar 00root root 0000000 0000000 --- !!python/str "Это уникодная строка"
python-ruyaml-0.92.1/_test/data/construct-python-tuple-list-dict.code 0000664 0000000 0000000 00000000150 15056754172 0025712 0 ustar 00root root 0000000 0000000 [
[1, 2, 3, 4],
(1, 2, 3, 4),
{1: 2, 3: 4},
{(0,0): 0, (0,1): 1, (1,0): 1, (1,1): 0},
]
python-ruyaml-0.92.1/_test/data/construct-python-tuple-list-dict.data 0000664 0000000 0000000 00000000330 15056754172 0025711 0 ustar 00root root 0000000 0000000 - !!python/list [1, 2, 3, 4]
- !!python/tuple [1, 2, 3, 4]
- !!python/dict {1: 2, 3: 4}
- !!python/dict
!!python/tuple [0,0]: 0
!!python/tuple [0,1]: 1
!!python/tuple [1,0]: 1
!!python/tuple [1,1]: 0
python-ruyaml-0.92.1/_test/data/construct-python-unicode-ascii-py3.code 0000664 0000000 0000000 00000000017 15056754172 0026116 0 ustar 00root root 0000000 0000000 "ascii string"
python-ruyaml-0.92.1/_test/data/construct-python-unicode-ascii-py3.data 0000664 0000000 0000000 00000000044 15056754172 0026115 0 ustar 00root root 0000000 0000000 --- !!python/unicode "ascii string"
python-ruyaml-0.92.1/_test/data/construct-python-unicode-utf8-py2.code 0000664 0000000 0000000 00000000161 15056754172 0025713 0 ustar 00root root 0000000 0000000 '\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430'
python-ruyaml-0.92.1/_test/data/construct-python-unicode-utf8-py3.code 0000664 0000000 0000000 00000000161 15056754172 0025714 0 ustar 00root root 0000000 0000000 '\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430'
python-ruyaml-0.92.1/_test/data/construct-python-unicode-utf8-py3.data 0000664 0000000 0000000 00000000076 15056754172 0025720 0 ustar 00root root 0000000 0000000 --- !!python/unicode "Это уникодная строка"
python-ruyaml-0.92.1/_test/data/construct-seq.code 0000664 0000000 0000000 00000000335 15056754172 0022145 0 ustar 00root root 0000000 0000000 {
"Block style": ["Mercury", "Venus", "Earth", "Mars", "Jupiter", "Saturn", "Uranus", "Neptune", "Pluto"],
"Flow style": ["Mercury", "Venus", "Earth", "Mars", "Jupiter", "Saturn", "Uranus", "Neptune", "Pluto"],
}
python-ruyaml-0.92.1/_test/data/construct-seq.data 0000664 0000000 0000000 00000001024 15056754172 0022140 0 ustar 00root root 0000000 0000000 # Ordered sequence of nodes
Block style: !!seq
- Mercury # Rotates - no light/dark sides.
- Venus # Deadliest. Aptly named.
- Earth # Mostly dirt.
- Mars # Seems empty.
- Jupiter # The king.
- Saturn # Pretty.
- Uranus # Where the sun hardly shines.
- Neptune # Boring. No rings.
- Pluto # You call this a planet?
Flow style: !!seq [ Mercury, Venus, Earth, Mars, # Rocks
Jupiter, Saturn, Uranus, Neptune, # Gas
Pluto ] # Overrated
python-ruyaml-0.92.1/_test/data/construct-set.code 0000664 0000000 0000000 00000000245 15056754172 0022150 0 ustar 00root root 0000000 0000000 {
"baseball players": set(["Mark McGwire", "Sammy Sosa", "Ken Griffey"]),
"baseball teams": set(["Boston Red Sox", "Detroit Tigers", "New York Yankees"]),
}
python-ruyaml-0.92.1/_test/data/construct-set.data 0000664 0000000 0000000 00000000270 15056754172 0022145 0 ustar 00root root 0000000 0000000 # Explicitly typed set.
baseball players: !!set
? Mark McGwire
? Sammy Sosa
? Ken Griffey
# Flow style
baseball teams: !!set { Boston Red Sox, Detroit Tigers, New York Yankees }
python-ruyaml-0.92.1/_test/data/construct-str-ascii.code 0000664 0000000 0000000 00000000017 15056754172 0023250 0 ustar 00root root 0000000 0000000 "ascii string"
python-ruyaml-0.92.1/_test/data/construct-str-ascii.data 0000664 0000000 0000000 00000000031 15056754172 0023243 0 ustar 00root root 0000000 0000000 --- !!str "ascii string"
python-ruyaml-0.92.1/_test/data/construct-str-utf8-py2.code 0000664 0000000 0000000 00000000161 15056754172 0023556 0 ustar 00root root 0000000 0000000 '\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430'
python-ruyaml-0.92.1/_test/data/construct-str-utf8-py3.code 0000664 0000000 0000000 00000000161 15056754172 0023557 0 ustar 00root root 0000000 0000000 '\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430'
python-ruyaml-0.92.1/_test/data/construct-str-utf8-py3.data 0000664 0000000 0000000 00000000063 15056754172 0023557 0 ustar 00root root 0000000 0000000 --- !!str "Это уникодная строка"
python-ruyaml-0.92.1/_test/data/construct-str.code 0000664 0000000 0000000 00000000025 15056754172 0022161 0 ustar 00root root 0000000 0000000 { "string": "abcd" }
python-ruyaml-0.92.1/_test/data/construct-str.data 0000664 0000000 0000000 00000000015 15056754172 0022157 0 ustar 00root root 0000000 0000000 string: abcd
python-ruyaml-0.92.1/_test/data/construct-timestamp.code 0000664 0000000 0000000 00000000536 15056754172 0023363 0 ustar 00root root 0000000 0000000 {
"canonical": datetime.datetime(2001, 12, 15, 2, 59, 43, 100000),
"valid iso8601": datetime.datetime(2001, 12, 15, 2, 59, 43, 100000),
"space separated": datetime.datetime(2001, 12, 15, 2, 59, 43, 100000),
"no time zone (Z)": datetime.datetime(2001, 12, 15, 2, 59, 43, 100000),
"date (00:00:00Z)": datetime.date(2002, 12, 14),
}
python-ruyaml-0.92.1/_test/data/construct-timestamp.data 0000664 0000000 0000000 00000000311 15056754172 0023351 0 ustar 00root root 0000000 0000000 canonical: 2001-12-15T02:59:43.1Z
valid iso8601: 2001-12-14t21:59:43.10-05:00
space separated: 2001-12-14 21:59:43.10 -5
no time zone (Z): 2001-12-15 2:59:43.10
date (00:00:00Z): 2002-12-14
python-ruyaml-0.92.1/_test/data/construct-value.code 0000664 0000000 0000000 00000000326 15056754172 0022471 0 ustar 00root root 0000000 0000000 [
{ "link with": [ "library1.dll", "library2.dll" ] },
{
"link with": [
{ "=": "library1.dll", "version": 1.2 },
{ "=": "library2.dll", "version": 2.3 },
],
},
]
python-ruyaml-0.92.1/_test/data/construct-value.data 0000664 0000000 0000000 00000000256 15056754172 0022472 0 ustar 00root root 0000000 0000000 --- # Old schema
link with:
- library1.dll
- library2.dll
--- # New schema
link with:
- = : library1.dll
version: 1.2
- = : library2.dll
version: 2.3
python-ruyaml-0.92.1/_test/data/document-separator-in-quoted-scalar.loader-error 0000664 0000000 0000000 00000000165 15056754172 0030001 0 ustar 00root root 0000000 0000000 ---
"this --- is correct"
---
"this
...is also
correct"
---
"a quoted scalar
cannot contain
---
document separators"
python-ruyaml-0.92.1/_test/data/documents.events 0000664 0000000 0000000 00000000555 15056754172 0021732 0 ustar 00root root 0000000 0000000 - !StreamStart
- !DocumentStart { explicit: false }
- !Scalar { implicit: [true,false], value: 'data' }
- !DocumentEnd
- !DocumentStart
- !Scalar { implicit: [true,false] }
- !DocumentEnd
- !DocumentStart { version: [1,1], tags: { '!': '!foo', '!yaml!': 'tag:yaml.org,2002:', '!ugly!': '!!!!!!!' } }
- !Scalar { implicit: [true,false] }
- !DocumentEnd
- !StreamEnd
python-ruyaml-0.92.1/_test/data/duplicate-anchor-1.loader-warning 0000664 0000000 0000000 00000000041 15056754172 0024704 0 ustar 00root root 0000000 0000000 - &foo bar
- &bar bar
- &foo bar
python-ruyaml-0.92.1/_test/data/duplicate-anchor-2.loader-warning 0000664 0000000 0000000 00000000027 15056754172 0024711 0 ustar 00root root 0000000 0000000 &foo [1, 2, 3, &foo 4]
python-ruyaml-0.92.1/_test/data/duplicate-merge-key.former-loader-error.code 0000664 0000000 0000000 00000000061 15056754172 0027050 0 ustar 00root root 0000000 0000000 { 'x': 1, 'y': 2, 'foo': 'bar', 'z': 3, 't': 4 }
python-ruyaml-0.92.1/_test/data/duplicate-tag-directive.loader-error 0000664 0000000 0000000 00000000060 15056754172 0025510 0 ustar 00root root 0000000 0000000 %TAG !foo! bar
%TAG !foo! baz
--- foo
python-ruyaml-0.92.1/_test/data/duplicate-yaml-directive.loader-error 0000664 0000000 0000000 00000000040 15056754172 0025675 0 ustar 00root root 0000000 0000000 %YAML 1.1
%YAML 1.1
--- foo
python-ruyaml-0.92.1/_test/data/emit-block-scalar-in-simple-key-context-bug.canonical 0000664 0000000 0000000 00000000070 15056754172 0030553 0 ustar 00root root 0000000 0000000 %YAML 1.1
--- !!map
{
? !!str "foo"
: !!str "bar"
}
python-ruyaml-0.92.1/_test/data/emit-block-scalar-in-simple-key-context-bug.data 0000664 0000000 0000000 00000000026 15056754172 0027536 0 ustar 00root root 0000000 0000000 ? |-
foo
: |-
bar
python-ruyaml-0.92.1/_test/data/emitting-unacceptable-unicode-character-bug-py3.code 0000664 0000000 0000000 00000000011 15056754172 0030430 0 ustar 00root root 0000000 0000000 "\udd00"
python-ruyaml-0.92.1/_test/data/emitting-unacceptable-unicode-character-bug-py3.data 0000664 0000000 0000000 00000000011 15056754172 0030427 0 ustar 00root root 0000000 0000000 "\udd00"
python-ruyaml-0.92.1/_test/data/emitting-unacceptable-unicode-character-bug-py3.skip-ext 0000664 0000000 0000000 00000000000 15056754172 0031260 0 ustar 00root root 0000000 0000000 python-ruyaml-0.92.1/_test/data/empty-anchor.emitter-error 0000664 0000000 0000000 00000000143 15056754172 0023624 0 ustar 00root root 0000000 0000000 - !StreamStart
- !DocumentStart
- !Scalar { anchor: '', value: 'foo' }
- !DocumentEnd
- !StreamEnd
python-ruyaml-0.92.1/_test/data/empty-document-bug.canonical 0000664 0000000 0000000 00000000057 15056754172 0024076 0 ustar 00root root 0000000 0000000 # This YAML stream contains no YAML documents.
python-ruyaml-0.92.1/_test/data/empty-document-bug.data 0000664 0000000 0000000 00000000000 15056754172 0023044 0 ustar 00root root 0000000 0000000 python-ruyaml-0.92.1/_test/data/empty-document-bug.empty 0000664 0000000 0000000 00000000000 15056754172 0023271 0 ustar 00root root 0000000 0000000 python-ruyaml-0.92.1/_test/data/empty-documents.single-loader-error 0000664 0000000 0000000 00000000053 15056754172 0025427 0 ustar 00root root 0000000 0000000 --- # first document
--- # second document
python-ruyaml-0.92.1/_test/data/empty-python-module.loader-error 0000664 0000000 0000000 00000000025 15056754172 0024752 0 ustar 00root root 0000000 0000000 --- !!python:module:
python-ruyaml-0.92.1/_test/data/empty-python-name.loader-error 0000664 0000000 0000000 00000000031 15056754172 0024402 0 ustar 00root root 0000000 0000000 --- !!python/name: empty
python-ruyaml-0.92.1/_test/data/empty-tag-handle.emitter-error 0000664 0000000 0000000 00000000157 15056754172 0024363 0 ustar 00root root 0000000 0000000 - !StreamStart
- !DocumentStart { tags: { '': 'bar' } }
- !Scalar { value: 'foo' }
- !DocumentEnd
- !StreamEnd
python-ruyaml-0.92.1/_test/data/empty-tag-prefix.emitter-error 0000664 0000000 0000000 00000000155 15056754172 0024423 0 ustar 00root root 0000000 0000000 - !StreamStart
- !DocumentStart { tags: { '!': '' } }
- !Scalar { value: 'foo' }
- !DocumentEnd
- !StreamEnd
python-ruyaml-0.92.1/_test/data/empty-tag.emitter-error 0000664 0000000 0000000 00000000171 15056754172 0023126 0 ustar 00root root 0000000 0000000 - !StreamStart
- !DocumentStart
- !Scalar { tag: '', value: 'key', implicit: [false,false] }
- !DocumentEnd
- !StreamEnd
python-ruyaml-0.92.1/_test/data/expected-document-end.emitter-error 0000664 0000000 0000000 00000000170 15056754172 0025377 0 ustar 00root root 0000000 0000000 - !StreamStart
- !DocumentStart
- !Scalar { value: 'data 1' }
- !Scalar { value: 'data 2' }
- !DocumentEnd
- !StreamEnd
python-ruyaml-0.92.1/_test/data/expected-document-start.emitter-error 0000664 0000000 0000000 00000000072 15056754172 0025767 0 ustar 00root root 0000000 0000000 - !StreamStart
- !MappingStart
- !MappingEnd
- !StreamEnd
python-ruyaml-0.92.1/_test/data/expected-mapping.loader-error 0000664 0000000 0000000 00000000030 15056754172 0024240 0 ustar 00root root 0000000 0000000 --- !!map [not, a, map]
python-ruyaml-0.92.1/_test/data/expected-node-1.emitter-error 0000664 0000000 0000000 00000000074 15056754172 0024103 0 ustar 00root root 0000000 0000000 - !StreamStart
- !DocumentStart
- !DocumentEnd
- !StreamEnd
python-ruyaml-0.92.1/_test/data/expected-node-2.emitter-error 0000664 0000000 0000000 00000000165 15056754172 0024105 0 ustar 00root root 0000000 0000000 - !StreamStart
- !DocumentStart
- !MappingStart
- !Scalar { value: 'key' }
- !MappingEnd
- !DocumentEnd
- !StreamEnd
python-ruyaml-0.92.1/_test/data/expected-nothing.emitter-error 0000664 0000000 0000000 00000000070 15056754172 0024462 0 ustar 00root root 0000000 0000000 - !StreamStart
- !StreamEnd
- !StreamStart
- !StreamEnd
python-ruyaml-0.92.1/_test/data/expected-scalar.loader-error 0000664 0000000 0000000 00000000031 15056754172 0024053 0 ustar 00root root 0000000 0000000 --- !!str [not a scalar]
python-ruyaml-0.92.1/_test/data/expected-sequence.loader-error 0000664 0000000 0000000 00000000032 15056754172 0024417 0 ustar 00root root 0000000 0000000 --- !!seq {foo, bar, baz}
python-ruyaml-0.92.1/_test/data/expected-stream-start.emitter-error 0000664 0000000 0000000 00000000040 15056754172 0025437 0 ustar 00root root 0000000 0000000 - !DocumentStart
- !DocumentEnd
python-ruyaml-0.92.1/_test/data/explicit-document.single-loader-error 0000664 0000000 0000000 00000000032 15056754172 0025724 0 ustar 00root root 0000000 0000000 ---
foo: bar
---
foo: bar
python-ruyaml-0.92.1/_test/data/fetch-complex-value-bug.loader-error 0000664 0000000 0000000 00000000021 15056754172 0025431 0 ustar 00root root 0000000 0000000 ? "foo"
: "bar"
python-ruyaml-0.92.1/_test/data/float-representer-2.3-bug.code 0000664 0000000 0000000 00000000140 15056754172 0024041 0 ustar 00root root 0000000 0000000 {
# 0.0: 0,
1.0: 1,
1e300000: +10,
-1e300000: -10,
1e300000/1e300000: 100,
}
python-ruyaml-0.92.1/_test/data/float-representer-2.3-bug.data 0000664 0000000 0000000 00000000140 15056754172 0024040 0 ustar 00root root 0000000 0000000 #0.0: # hash(0) == hash(nan) and 0 == nan in Python 2.3
1.0: 1
+.inf: 10
-.inf: -10
.nan: 100
python-ruyaml-0.92.1/_test/data/float.data 0000664 0000000 0000000 00000000113 15056754172 0020431 0 ustar 00root root 0000000 0000000 - 6.8523015e+5
- 685.230_15e+03
- 685_230.15
- 190:20:30.15
- -.inf
- .NaN
python-ruyaml-0.92.1/_test/data/float.detect 0000664 0000000 0000000 00000000030 15056754172 0020766 0 ustar 00root root 0000000 0000000 tag:yaml.org,2002:float
python-ruyaml-0.92.1/_test/data/forbidden-entry.loader-error 0000664 0000000 0000000 00000000030 15056754172 0024101 0 ustar 00root root 0000000 0000000 test: - foo
- bar
python-ruyaml-0.92.1/_test/data/forbidden-key.loader-error 0000664 0000000 0000000 00000000030 15056754172 0023530 0 ustar 00root root 0000000 0000000 test: ? foo
: bar
python-ruyaml-0.92.1/_test/data/forbidden-value.loader-error 0000664 0000000 0000000 00000000021 15056754172 0024054 0 ustar 00root root 0000000 0000000 test: key: value
python-ruyaml-0.92.1/_test/data/implicit-document.single-loader-error 0000664 0000000 0000000 00000000026 15056754172 0025720 0 ustar 00root root 0000000 0000000 foo: bar
---
foo: bar
python-ruyaml-0.92.1/_test/data/int.data 0000664 0000000 0000000 00000000142 15056754172 0020120 0 ustar 00root root 0000000 0000000 - 685230
- +685_230
- 02472256
- 0o2472256
- 0x_0A_74_AE
- 0b1010_0111_0100_1010_1110
- 190:20:30
python-ruyaml-0.92.1/_test/data/int.detect 0000664 0000000 0000000 00000000026 15056754172 0020460 0 ustar 00root root 0000000 0000000 tag:yaml.org,2002:int
python-ruyaml-0.92.1/_test/data/invalid-anchor-1.loader-error 0000664 0000000 0000000 00000000112 15056754172 0024043 0 ustar 00root root 0000000 0000000 --- &? foo # we allow only ascii and numeric characters in anchor names.
python-ruyaml-0.92.1/_test/data/invalid-anchor-2.loader-error 0000664 0000000 0000000 00000000177 15056754172 0024057 0 ustar 00root root 0000000 0000000 ---
- [
&correct foo,
*correct,
*correct] # still correct
- *correct: still correct
- &correct-or-not[foo, bar]
python-ruyaml-0.92.1/_test/data/invalid-anchor.emitter-error 0000664 0000000 0000000 00000000151 15056754172 0024113 0 ustar 00root root 0000000 0000000 - !StreamStart
- !DocumentStart
- !Scalar { anchor: '5*5=25', value: 'foo' }
- !DocumentEnd
- !StreamEnd
python-ruyaml-0.92.1/_test/data/invalid-base64-data-2.loader-error 0000664 0000000 0000000 00000000071 15056754172 0024571 0 ustar 00root root 0000000 0000000 --- !!binary
двоичные данные в base64
python-ruyaml-0.92.1/_test/data/invalid-base64-data.loader-error 0000664 0000000 0000000 00000000077 15056754172 0024440 0 ustar 00root root 0000000 0000000 --- !!binary
binary data encoded in base64 should be here.
python-ruyaml-0.92.1/_test/data/invalid-block-scalar-indicator.loader-error 0000664 0000000 0000000 00000000046 15056754172 0026750 0 ustar 00root root 0000000 0000000 --- > what is this? # a comment
data
python-ruyaml-0.92.1/_test/data/invalid-character.loader-error 0000664 0000000 0000000 00000004241 15056754172 0024376 0 ustar 00root root 0000000 0000000 -------------------------------------------------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------------------------------------------------
Control character ('\x0'): <--
-------------------------------------------------------------------------------------------------------------------------------
python-ruyaml-0.92.1/_test/data/invalid-character.stream-error 0000664 0000000 0000000 00000010141 15056754172 0024417 0 ustar 00root root 0000000 0000000 ###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
Control character ('\x0'): <--
###############################################################
python-ruyaml-0.92.1/_test/data/invalid-directive-line.loader-error 0000664 0000000 0000000 00000000043 15056754172 0025341 0 ustar 00root root 0000000 0000000 %YAML 1.1 ? # extra symbol
---
python-ruyaml-0.92.1/_test/data/invalid-directive-name-1.loader-error 0000664 0000000 0000000 00000000031 15056754172 0025465 0 ustar 00root root 0000000 0000000 % # no name at all
---
python-ruyaml-0.92.1/_test/data/invalid-directive-name-2.loader-error 0000664 0000000 0000000 00000000052 15056754172 0025471 0 ustar 00root root 0000000 0000000 %invalid-characters:in-directive name
---
python-ruyaml-0.92.1/_test/data/invalid-escape-character.loader-error 0000664 0000000 0000000 00000000102 15056754172 0025624 0 ustar 00root root 0000000 0000000 "some escape characters are \ncorrect, but this one \?\nis not\n"
python-ruyaml-0.92.1/_test/data/invalid-escape-numbers.loader-error 0000664 0000000 0000000 00000000020 15056754172 0025342 0 ustar 00root root 0000000 0000000 "hm.... \u123?"
python-ruyaml-0.92.1/_test/data/invalid-indentation-indicator-1.loader-error 0000664 0000000 0000000 00000000031 15056754172 0027057 0 ustar 00root root 0000000 0000000 --- >0 # not valid
data
python-ruyaml-0.92.1/_test/data/invalid-indentation-indicator-2.loader-error 0000664 0000000 0000000 00000000015 15056754172 0027062 0 ustar 00root root 0000000 0000000 --- >-0
data
python-ruyaml-0.92.1/_test/data/invalid-item-without-trailing-break.loader-error 0000664 0000000 0000000 00000000004 15056754172 0027763 0 ustar 00root root 0000000 0000000 -
-0 python-ruyaml-0.92.1/_test/data/invalid-merge-1.loader-error 0000664 0000000 0000000 00000000021 15056754172 0023667 0 ustar 00root root 0000000 0000000 foo: bar
<<: baz
python-ruyaml-0.92.1/_test/data/invalid-merge-2.loader-error 0000664 0000000 0000000 00000000043 15056754172 0023674 0 ustar 00root root 0000000 0000000 foo: bar
<<: [x: 1, y: 2, z, t: 4]
python-ruyaml-0.92.1/_test/data/invalid-omap-1.loader-error 0000664 0000000 0000000 00000000035 15056754172 0023531 0 ustar 00root root 0000000 0000000 --- !!omap
foo: bar
baz: bat
python-ruyaml-0.92.1/_test/data/invalid-omap-2.loader-error 0000664 0000000 0000000 00000000034 15056754172 0023531 0 ustar 00root root 0000000 0000000 --- !!omap
- foo: bar
- baz
python-ruyaml-0.92.1/_test/data/invalid-omap-3.loader-error 0000664 0000000 0000000 00000000054 15056754172 0023534 0 ustar 00root root 0000000 0000000 --- !!omap
- foo: bar
- baz: bar
bar: bar
python-ruyaml-0.92.1/_test/data/invalid-pairs-1.loader-error 0000664 0000000 0000000 00000000036 15056754172 0023714 0 ustar 00root root 0000000 0000000 --- !!pairs
foo: bar
baz: bat
python-ruyaml-0.92.1/_test/data/invalid-pairs-2.loader-error 0000664 0000000 0000000 00000000035 15056754172 0023714 0 ustar 00root root 0000000 0000000 --- !!pairs
- foo: bar
- baz
python-ruyaml-0.92.1/_test/data/invalid-pairs-3.loader-error 0000664 0000000 0000000 00000000055 15056754172 0023717 0 ustar 00root root 0000000 0000000 --- !!pairs
- foo: bar
- baz: bar
bar: bar
python-ruyaml-0.92.1/_test/data/invalid-python-bytes-2-py3.loader-error 0000664 0000000 0000000 00000000077 15056754172 0025762 0 ustar 00root root 0000000 0000000 --- !!python/bytes
двоичные данные в base64
python-ruyaml-0.92.1/_test/data/invalid-python-bytes-py3.loader-error 0000664 0000000 0000000 00000000105 15056754172 0025613 0 ustar 00root root 0000000 0000000 --- !!python/bytes
binary data encoded in base64 should be here.
python-ruyaml-0.92.1/_test/data/invalid-python-module-kind.loader-error 0000664 0000000 0000000 00000000055 15056754172 0026170 0 ustar 00root root 0000000 0000000 --- !!python/module:sys { must, be, scalar }
python-ruyaml-0.92.1/_test/data/invalid-python-module-value.loader-error 0000664 0000000 0000000 00000000052 15056754172 0026354 0 ustar 00root root 0000000 0000000 --- !!python/module:sys "non-empty value"
python-ruyaml-0.92.1/_test/data/invalid-python-module.loader-error 0000664 0000000 0000000 00000000043 15056754172 0025242 0 ustar 00root root 0000000 0000000 --- !!python/module:no.such.module
python-ruyaml-0.92.1/_test/data/invalid-python-name-kind.loader-error 0000664 0000000 0000000 00000000041 15056754172 0025616 0 ustar 00root root 0000000 0000000 --- !!python/name:sys.modules {}
python-ruyaml-0.92.1/_test/data/invalid-python-name-module-2.loader-error 0000664 0000000 0000000 00000000036 15056754172 0026321 0 ustar 00root root 0000000 0000000 --- !!python/name:xml.parsers
python-ruyaml-0.92.1/_test/data/invalid-python-name-module.loader-error 0000664 0000000 0000000 00000000043 15056754172 0026160 0 ustar 00root root 0000000 0000000 --- !!python/name:sys.modules.keys
python-ruyaml-0.92.1/_test/data/invalid-python-name-object.loader-error 0000664 0000000 0000000 00000000040 15056754172 0026136 0 ustar 00root root 0000000 0000000 --- !!python/name:os.path.rm_rf
python-ruyaml-0.92.1/_test/data/invalid-python-name-value.loader-error 0000664 0000000 0000000 00000000040 15056754172 0026004 0 ustar 00root root 0000000 0000000 --- !!python/name:sys.modules 5
python-ruyaml-0.92.1/_test/data/invalid-simple-key.loader-error 0000664 0000000 0000000 00000000063 15056754172 0024517 0 ustar 00root root 0000000 0000000 key: value
invalid simple key
next key: next value
python-ruyaml-0.92.1/_test/data/invalid-single-quote-bug.code 0000664 0000000 0000000 00000000034 15056754172 0024142 0 ustar 00root root 0000000 0000000 ["foo 'bar'", "foo\n'bar'"]
python-ruyaml-0.92.1/_test/data/invalid-single-quote-bug.data 0000664 0000000 0000000 00000000035 15056754172 0024142 0 ustar 00root root 0000000 0000000 - "foo 'bar'"
- "foo\n'bar'"
python-ruyaml-0.92.1/_test/data/invalid-starting-character.loader-error 0000664 0000000 0000000 00000000024 15056754172 0026222 0 ustar 00root root 0000000 0000000 @@@@@@@@@@@@@@@@@@@
python-ruyaml-0.92.1/_test/data/invalid-tag-1.loader-error 0000664 0000000 0000000 00000000021 15056754172 0023343 0 ustar 00root root 0000000 0000000 - ! baz
python-ruyaml-0.92.1/_test/data/invalid-tag-2.loader-error 0000664 0000000 0000000 00000000026 15056754172 0023351 0 ustar 00root root 0000000 0000000 - !prefix!foo#bar baz
python-ruyaml-0.92.1/_test/data/invalid-tag-directive-handle.loader-error 0000664 0000000 0000000 00000000021 15056754172 0026412 0 ustar 00root root 0000000 0000000 %TAG !!! !!!
---
python-ruyaml-0.92.1/_test/data/invalid-tag-directive-prefix.loader-error 0000664 0000000 0000000 00000000101 15056754172 0026453 0 ustar 00root root 0000000 0000000 %TAG ! tag:zz.com/foo#bar # '#' is not allowed in URLs
---
python-ruyaml-0.92.1/_test/data/invalid-tag-handle-1.emitter-error 0000664 0000000 0000000 00000000163 15056754172 0025006 0 ustar 00root root 0000000 0000000 - !StreamStart
- !DocumentStart { tags: { '!foo': 'bar' } }
- !Scalar { value: 'foo' }
- !DocumentEnd
- !StreamEnd
python-ruyaml-0.92.1/_test/data/invalid-tag-handle-1.loader-error 0000664 0000000 0000000 00000000024 15056754172 0024577 0 ustar 00root root 0000000 0000000 %TAG foo bar
---
python-ruyaml-0.92.1/_test/data/invalid-tag-handle-2.emitter-error 0000664 0000000 0000000 00000000162 15056754172 0025006 0 ustar 00root root 0000000 0000000 - !StreamStart
- !DocumentStart { tags: { '!!!': 'bar' } }
- !Scalar { value: 'foo' }
- !DocumentEnd
- !StreamEnd
python-ruyaml-0.92.1/_test/data/invalid-tag-handle-2.loader-error 0000664 0000000 0000000 00000000030 15056754172 0024575 0 ustar 00root root 0000000 0000000 %TAG !foo bar
---
python-ruyaml-0.92.1/_test/data/invalid-uri-escapes-1.loader-error 0000664 0000000 0000000 00000000024 15056754172 0025013 0 ustar 00root root 0000000 0000000 --- ! foo
python-ruyaml-0.92.1/_test/data/invalid-uri-escapes-2.loader-error 0000664 0000000 0000000 00000000017 15056754172 0025016 0 ustar 00root root 0000000 0000000 --- !<%FF> foo
python-ruyaml-0.92.1/_test/data/invalid-uri-escapes-3.loader-error 0000664 0000000 0000000 00000000041 15056754172 0025014 0 ustar 00root root 0000000 0000000 --- ! baz
python-ruyaml-0.92.1/_test/data/invalid-uri.loader-error 0000664 0000000 0000000 00000000020 15056754172 0023230 0 ustar 00root root 0000000 0000000 --- !foo! bar
python-ruyaml-0.92.1/_test/data/invalid-utf8-byte.loader-error 0000664 0000000 0000000 00000010135 15056754172 0024270 0 ustar 00root root 0000000 0000000 ###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
Invalid byte ('\xFF'): <--
###############################################################
python-ruyaml-0.92.1/_test/data/invalid-utf8-byte.stream-error 0000664 0000000 0000000 00000010135 15056754172 0024315 0 ustar 00root root 0000000 0000000 ###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
###############################################################
Invalid byte ('\xFF'): <--
###############################################################
python-ruyaml-0.92.1/_test/data/invalid-yaml-directive-version-1.loader-error 0000664 0000000 0000000 00000000037 15056754172 0027200 0 ustar 00root root 0000000 0000000 # No version at all.
%YAML
---
python-ruyaml-0.92.1/_test/data/invalid-yaml-directive-version-2.loader-error 0000664 0000000 0000000 00000000021 15056754172 0027172 0 ustar 00root root 0000000 0000000 %YAML 1e-5
---
python-ruyaml-0.92.1/_test/data/invalid-yaml-directive-version-3.loader-error 0000664 0000000 0000000 00000000015 15056754172 0027176 0 ustar 00root root 0000000 0000000 %YAML 1.
---
python-ruyaml-0.92.1/_test/data/invalid-yaml-directive-version-4.loader-error 0000664 0000000 0000000 00000000024 15056754172 0027177 0 ustar 00root root 0000000 0000000 %YAML 1.132.435
---
python-ruyaml-0.92.1/_test/data/invalid-yaml-directive-version-5.loader-error 0000664 0000000 0000000 00000000016 15056754172 0027201 0 ustar 00root root 0000000 0000000 %YAML A.0
---
python-ruyaml-0.92.1/_test/data/invalid-yaml-directive-version-6.loader-error 0000664 0000000 0000000 00000000020 15056754172 0027175 0 ustar 00root root 0000000 0000000 %YAML 123.C
---
python-ruyaml-0.92.1/_test/data/invalid-yaml-version.loader-error 0000664 0000000 0000000 00000000024 15056754172 0025062 0 ustar 00root root 0000000 0000000 %YAML 2.0
--- foo
python-ruyaml-0.92.1/_test/data/latin.unicode 0000664 0000000 0000000 00000165100 15056754172 0021160 0 ustar 00root root 0000000 0000000 ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ
ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ
ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ
őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ
ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ
ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ
ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ
ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ
ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ
ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ
ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ
єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ
ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ
ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ
ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ
ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ
ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ
ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ
ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ
ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố
ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ
ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ
ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ
ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ
ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ
ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ
ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ
őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ
ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ
ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ
ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ
ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ
ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ
ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ
ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ
єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ
ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ
ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ
ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ
ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ
ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ
ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ
ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ
ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố
ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ
ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ
ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ
ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ
ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ
ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ
ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ
őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ
ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ
ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ
ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ
ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ
ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ
ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ
ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ
єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ
ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ
ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ
ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ
ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ
ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ
ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ
ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ
ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố
ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ
ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ
ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ
ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ
ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ
ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ
ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ
őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ
ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ
ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ
ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ
ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ
ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ
ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ
ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ
єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ
ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ
ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ
ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ
ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ
ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ
ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ
ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ
ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố
ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ
ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ
ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ
ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ
ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ
ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ
ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ
őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ
ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ
ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ
ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ
ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ
ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ
ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ
ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ
єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ
ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ
ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ
ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ
ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ
ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ
ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ
ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ
ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố
ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ
ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ
ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ
ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ
ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ
ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ
ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ
őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ
ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ
ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ
ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ
ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ
ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ
ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ
ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ
єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ
ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ
ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ
ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ
ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ
ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ
ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ
ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ
ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố
ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ
ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ
ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ
ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ
ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ
ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ
ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ
őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ
ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ
ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ
ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ
ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ
ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ
ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ
ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ
єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ
ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ
ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ
ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ
ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ
ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ
ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ
ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ
ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố
ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ
ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ
ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ
ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ
ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ
ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ
ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ
őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ
ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ
ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ
ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ
ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ
ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ
ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ
ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ
єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ
ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ
ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ
ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ
ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ
ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ
ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ
ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ
ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố
ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ
ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ
ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ
ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ
ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ
ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ
ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ
őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ
ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ
ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ
ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ
ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ
ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ
ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ
ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ
єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ
ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ
ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ
ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ
ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ
ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ
ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ
ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ
ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố
ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ
ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ
ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ
ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ
ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ
ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ
ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ
őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ
ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ
ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ
ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ
ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ
ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ
ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ
ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ
єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ
ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ
ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ
ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ
ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ
ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ
ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ
ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ
ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố
ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ
ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ
ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ
ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ
ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ
ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ
ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ
őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ
ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ
ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ
ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ
ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ
ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ
ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ
ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ
єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ
ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ
ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ
ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ
ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ
ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ
ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ
ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ
ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố
ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ
ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ
ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ
ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ
ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ
ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ
ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ
őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ
ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ
ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ
ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ
ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ
ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ
ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ
ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ
єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ
ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ
ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ
ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ
ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ
ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ
ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ
ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ
ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố
ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ
ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ
ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ
ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ
ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ
ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ
ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ
őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ
ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ
ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ
ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ
ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ
ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ
ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ
ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ
єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ
ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ
ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ
ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ
ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ
ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ
ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ
ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ
ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố
ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ
ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ
ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ
ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ
ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ
ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ
ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ
őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ
ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ
ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ
ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ
ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ
ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ
ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ
ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ
єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ
ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ
ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ
ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ
ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ
ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ
ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ
ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ
ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố
ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ
ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ
ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ
ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ
ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ
ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ
ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ
őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ
ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ
ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ
ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ
ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ
ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ
ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ
ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ
єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ
ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ
ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ
ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ
ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ
ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ
ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ
ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ
ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố
ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ
ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ
ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ
ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ
ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ
ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ
ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ
őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ
ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ
ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ
ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ
ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ
ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ
ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ
ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ
єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ
ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ
ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ
ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ
ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ
ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ
ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ
ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ
ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố
ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ
ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ
ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ
ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ
python-ruyaml-0.92.1/_test/data/mappings.events 0000664 0000000 0000000 00000002670 15056754172 0021547 0 ustar 00root root 0000000 0000000 - !StreamStart
- !DocumentStart
- !MappingStart
- !Scalar { implicit: [true,true], value: 'key' }
- !Scalar { implicit: [true,true], value: 'value' }
- !Scalar { implicit: [true,true], value: 'empty mapping' }
- !MappingStart
- !MappingEnd
- !Scalar { implicit: [true,true], value: 'empty mapping with tag' }
- !MappingStart { tag: '!mytag', implicit: false }
- !MappingEnd
- !Scalar { implicit: [true,true], value: 'block mapping' }
- !MappingStart
- !MappingStart
- !Scalar { implicit: [true,true], value: 'complex' }
- !Scalar { implicit: [true,true], value: 'key' }
- !Scalar { implicit: [true,true], value: 'complex' }
- !Scalar { implicit: [true,true], value: 'key' }
- !MappingEnd
- !MappingStart
- !Scalar { implicit: [true,true], value: 'complex' }
- !Scalar { implicit: [true,true], value: 'key' }
- !MappingEnd
- !MappingEnd
- !Scalar { implicit: [true,true], value: 'flow mapping' }
- !MappingStart { flow_style: true }
- !Scalar { implicit: [true,true], value: 'key' }
- !Scalar { implicit: [true,true], value: 'value' }
- !MappingStart
- !Scalar { implicit: [true,true], value: 'complex' }
- !Scalar { implicit: [true,true], value: 'key' }
- !Scalar { implicit: [true,true], value: 'complex' }
- !Scalar { implicit: [true,true], value: 'key' }
- !MappingEnd
- !MappingStart
- !Scalar { implicit: [true,true], value: 'complex' }
- !Scalar { implicit: [true,true], value: 'key' }
- !MappingEnd
- !MappingEnd
- !MappingEnd
- !DocumentEnd
- !StreamEnd
python-ruyaml-0.92.1/_test/data/merge.data 0000664 0000000 0000000 00000000005 15056754172 0020423 0 ustar 00root root 0000000 0000000 - <<
python-ruyaml-0.92.1/_test/data/merge.detect 0000664 0000000 0000000 00000000030 15056754172 0020760 0 ustar 00root root 0000000 0000000 tag:yaml.org,2002:merge
python-ruyaml-0.92.1/_test/data/more-floats.code 0000664 0000000 0000000 00000000201 15056754172 0021553 0 ustar 00root root 0000000 0000000 [0.0, +1.0, -1.0, +1e300000, -1e300000, 1e300000/1e300000, -(1e300000/1e300000)] # last two items are ind and qnan respectively.
python-ruyaml-0.92.1/_test/data/more-floats.data 0000664 0000000 0000000 00000000054 15056754172 0021560 0 ustar 00root root 0000000 0000000 [0.0, +1.0, -1.0, +.inf, -.inf, .nan, .nan]
python-ruyaml-0.92.1/_test/data/negative-float-bug.code 0000664 0000000 0000000 00000000005 15056754172 0023005 0 ustar 00root root 0000000 0000000 -1.0
python-ruyaml-0.92.1/_test/data/negative-float-bug.data 0000664 0000000 0000000 00000000005 15056754172 0023004 0 ustar 00root root 0000000 0000000 -1.0
python-ruyaml-0.92.1/_test/data/no-alias-anchor.emitter-error 0000664 0000000 0000000 00000000216 15056754172 0024172 0 ustar 00root root 0000000 0000000 - !StreamStart
- !DocumentStart
- !SequenceStart
- !Scalar { anchor: A, value: data }
- !Alias { }
- !SequenceEnd
- !DocumentEnd
- !StreamEnd
python-ruyaml-0.92.1/_test/data/no-alias-anchor.skip-ext 0000664 0000000 0000000 00000000000 15056754172 0023125 0 ustar 00root root 0000000 0000000 python-ruyaml-0.92.1/_test/data/no-block-collection-end.loader-error 0000664 0000000 0000000 00000000025 15056754172 0025413 0 ustar 00root root 0000000 0000000 - foo
- bar
baz: bar
python-ruyaml-0.92.1/_test/data/no-block-mapping-end-2.loader-error 0000664 0000000 0000000 00000000022 15056754172 0025047 0 ustar 00root root 0000000 0000000 ? foo
: bar
: baz
python-ruyaml-0.92.1/_test/data/no-block-mapping-end.loader-error 0000664 0000000 0000000 00000000021 15056754172 0024707 0 ustar 00root root 0000000 0000000 foo: "bar" "baz"
python-ruyaml-0.92.1/_test/data/no-document-start.loader-error 0000664 0000000 0000000 00000000036 15056754172 0024377 0 ustar 00root root 0000000 0000000 %YAML 1.1
# no ---
foo: bar
python-ruyaml-0.92.1/_test/data/no-flow-mapping-end.loader-error 0000664 0000000 0000000 00000000015 15056754172 0024567 0 ustar 00root root 0000000 0000000 { foo: bar ]
python-ruyaml-0.92.1/_test/data/no-flow-sequence-end.loader-error 0000664 0000000 0000000 00000000013 15056754172 0024742 0 ustar 00root root 0000000 0000000 [foo, bar}
python-ruyaml-0.92.1/_test/data/no-node-1.loader-error 0000664 0000000 0000000 00000000011 15056754172 0022502 0 ustar 00root root 0000000 0000000 - !foo ]
python-ruyaml-0.92.1/_test/data/no-node-2.loader-error 0000664 0000000 0000000 00000000015 15056754172 0022507 0 ustar 00root root 0000000 0000000 - [ !foo } ]
python-ruyaml-0.92.1/_test/data/no-tag.emitter-error 0000664 0000000 0000000 00000000160 15056754172 0022402 0 ustar 00root root 0000000 0000000 - !StreamStart
- !DocumentStart
- !Scalar { value: 'foo', implicit: [false,false] }
- !DocumentEnd
- !StreamEnd
python-ruyaml-0.92.1/_test/data/null.data 0000664 0000000 0000000 00000000015 15056754172 0020277 0 ustar 00root root 0000000 0000000 -
- ~
- null
python-ruyaml-0.92.1/_test/data/null.detect 0000664 0000000 0000000 00000000027 15056754172 0020641 0 ustar 00root root 0000000 0000000 tag:yaml.org,2002:null
python-ruyaml-0.92.1/_test/data/odd-utf16.stream-error 0000664 0000000 0000000 00000002437 15056754172 0022561 0 ustar 00root root 0000000 0000000 # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
T h i s f i l e c o n t a i n s o d d n u m b e r o f b y t e s , s o i t c a n n o t b e a v a l i d U T F - 1 6 s t r e a m .
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # python-ruyaml-0.92.1/_test/data/omap.data 0000664 0000000 0000000 00000000324 15056754172 0020264 0 ustar 00root root 0000000 0000000 Bestiary: !!omap
- aardvark: African pig-like ant eater. Ugly.
- anteater: South-American ant eater. Two species.
- anaconda: South-American constrictor snake. Scaly.
Numbers: !!omap
- one: 1
- two: 2
- three: 3
python-ruyaml-0.92.1/_test/data/omap.roundtrip 0000664 0000000 0000000 00000000000 15056754172 0021370 0 ustar 00root root 0000000 0000000 python-ruyaml-0.92.1/_test/data/recursive-anchor.former-loader-error 0000664 0000000 0000000 00000000042 15056754172 0025560 0 ustar 00root root 0000000 0000000 - &foo [1
2,
3,
*foo]
python-ruyaml-0.92.1/_test/data/recursive-dict.recursive 0000664 0000000 0000000 00000000112 15056754172 0023351 0 ustar 00root root 0000000 0000000 value = {}
instance = AnInstance(value, value)
value[instance] = instance
python-ruyaml-0.92.1/_test/data/recursive-list.recursive 0000664 0000000 0000000 00000000037 15056754172 0023407 0 ustar 00root root 0000000 0000000 value = []
value.append(value)
python-ruyaml-0.92.1/_test/data/recursive-set.recursive 0000664 0000000 0000000 00000000245 15056754172 0023230 0 ustar 00root root 0000000 0000000 try:
set
except NameError:
from sets import Set as set
value = set()
value.add(AnInstance(foo=value, bar=value))
value.add(AnInstance(foo=value, bar=value))
python-ruyaml-0.92.1/_test/data/recursive-state.recursive 0000664 0000000 0000000 00000000073 15056754172 0023554 0 ustar 00root root 0000000 0000000 value = []
value.append(AnInstanceWithState(value, value))
python-ruyaml-0.92.1/_test/data/recursive-tuple.recursive 0000664 0000000 0000000 00000000102 15056754172 0023556 0 ustar 00root root 0000000 0000000 value = ([], [])
value[0].append(value)
value[1].append(value[0])
python-ruyaml-0.92.1/_test/data/recursive.former-dumper-error 0000664 0000000 0000000 00000000047 15056754172 0024343 0 ustar 00root root 0000000 0000000 data = []
data.append(data)
dump(data)
python-ruyaml-0.92.1/_test/data/remove-possible-simple-key-bug.loader-error 0000664 0000000 0000000 00000000164 15056754172 0026761 0 ustar 00root root 0000000 0000000 foo: &A bar
*A ] # The ']' indicator triggers remove_possible_simple_key,
# which should raise an error.
python-ruyaml-0.92.1/_test/data/resolver.data 0000664 0000000 0000000 00000001171 15056754172 0021172 0 ustar 00root root 0000000 0000000 ---
"this scalar should be selected"
---
key11: !foo
key12:
is: [selected]
key22:
key13: [not, selected]
key23: [not, selected]
key32:
key31: [not, selected]
key32: [not, selected]
key33: {not: selected}
key21: !bar
- not selected
- selected
- not selected
key31: !baz
key12:
key13:
key14: {selected}
key23:
key14: [not, selected]
key33:
key14: {selected}
key24: {not: selected}
key22:
- key14: {selected}
key24: {not: selected}
- key14: {selected}
python-ruyaml-0.92.1/_test/data/resolver.path 0000664 0000000 0000000 00000001430 15056754172 0021213 0 ustar 00root root 0000000 0000000 --- !root/scalar
"this scalar should be selected"
--- !root
key11: !foo
key12: !root/key11/key12/*
is: [selected]
key22:
key13: [not, selected]
key23: [not, selected]
key32:
key31: [not, selected]
key32: [not, selected]
key33: {not: selected}
key21: !bar
- not selected
- !root/key21/1/* selected
- not selected
key31: !baz
key12:
key13:
key14: !root/key31/*/*/key14/map {selected}
key23:
key14: [not, selected]
key33:
key14: !root/key31/*/*/key14/map {selected}
key24: {not: selected}
key22:
- key14: !root/key31/*/*/key14/map {selected}
key24: {not: selected}
- key14: !root/key31/*/*/key14/map {selected}
python-ruyaml-0.92.1/_test/data/run-parser-crash-bug.data 0000664 0000000 0000000 00000000275 15056754172 0023304 0 ustar 00root root 0000000 0000000 ---
- Harry Potter and the Prisoner of Azkaban
- Harry Potter and the Goblet of Fire
- Harry Potter and the Order of the Phoenix
---
- Memoirs Found in a Bathtub
- Snow Crash
- Ghost World
python-ruyaml-0.92.1/_test/data/scalars.events 0000664 0000000 0000000 00000002422 15056754172 0021354 0 ustar 00root root 0000000 0000000 - !StreamStart
- !DocumentStart
- !MappingStart
- !Scalar { implicit: [true,true], value: 'empty scalar' }
- !Scalar { implicit: [true,false], value: '' }
- !Scalar { implicit: [true,true], value: 'implicit scalar' }
- !Scalar { implicit: [true,true], value: 'data' }
- !Scalar { implicit: [true,true], value: 'quoted scalar' }
- !Scalar { value: 'data', style: '"' }
- !Scalar { implicit: [true,true], value: 'block scalar' }
- !Scalar { value: 'data', style: '|' }
- !Scalar { implicit: [true,true], value: 'empty scalar with tag' }
- !Scalar { implicit: [false,false], tag: '!mytag', value: '' }
- !Scalar { implicit: [true,true], value: 'implicit scalar with tag' }
- !Scalar { implicit: [false,false], tag: '!mytag', value: 'data' }
- !Scalar { implicit: [true,true], value: 'quoted scalar with tag' }
- !Scalar { value: 'data', style: '"', tag: '!mytag', implicit: [false,false] }
- !Scalar { implicit: [true,true], value: 'block scalar with tag' }
- !Scalar { value: 'data', style: '|', tag: '!mytag', implicit: [false,false] }
- !Scalar { implicit: [true,true], value: 'single character' }
- !Scalar { value: 'a', implicit: [true,true] }
- !Scalar { implicit: [true,true], value: 'single digit' }
- !Scalar { value: '1', implicit: [true,false] }
- !MappingEnd
- !DocumentEnd
- !StreamEnd
python-ruyaml-0.92.1/_test/data/scan-document-end-bug.canonical 0000664 0000000 0000000 00000000030 15056754172 0024417 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!null ""
python-ruyaml-0.92.1/_test/data/scan-document-end-bug.data 0000664 0000000 0000000 00000000023 15056754172 0023403 0 ustar 00root root 0000000 0000000 # Ticket #4
---
... python-ruyaml-0.92.1/_test/data/scan-line-break-bug.canonical 0000664 0000000 0000000 00000000070 15056754172 0024052 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map { ? !!str "foo" : !!str "bar baz" }
python-ruyaml-0.92.1/_test/data/scan-line-break-bug.data 0000664 0000000 0000000 00000000030 15056754172 0023030 0 ustar 00root root 0000000 0000000 foo:
bar
baz
python-ruyaml-0.92.1/_test/data/sequences.events 0000664 0000000 0000000 00000003236 15056754172 0021723 0 ustar 00root root 0000000 0000000 - !StreamStart
- !DocumentStart
- !SequenceStart
- !SequenceEnd
- !DocumentEnd
- !DocumentStart
- !SequenceStart { tag: '!mytag', implicit: false }
- !SequenceEnd
- !DocumentEnd
- !DocumentStart
- !SequenceStart
- !SequenceStart
- !SequenceEnd
- !SequenceStart { tag: '!mytag', implicit: false }
- !SequenceEnd
- !SequenceStart
- !Scalar
- !Scalar { value: 'data' }
- !Scalar { tag: '!mytag', implicit: [false,false], value: 'data' }
- !SequenceEnd
- !SequenceStart
- !SequenceStart
- !SequenceStart
- !Scalar
- !SequenceEnd
- !SequenceEnd
- !SequenceEnd
- !SequenceStart
- !SequenceStart { tag: '!mytag', implicit: false }
- !SequenceStart
- !Scalar { value: 'data' }
- !SequenceEnd
- !SequenceEnd
- !SequenceEnd
- !SequenceEnd
- !DocumentEnd
- !DocumentStart
- !SequenceStart
- !MappingStart
- !Scalar { value: 'key1' }
- !SequenceStart
- !Scalar { value: 'data1' }
- !Scalar { value: 'data2' }
- !SequenceEnd
- !Scalar { value: 'key2' }
- !SequenceStart { tag: '!mytag1', implicit: false }
- !Scalar { value: 'data3' }
- !SequenceStart
- !Scalar { value: 'data4' }
- !Scalar { value: 'data5' }
- !SequenceEnd
- !SequenceStart { tag: '!mytag2', implicit: false }
- !Scalar { value: 'data6' }
- !Scalar { value: 'data7' }
- !SequenceEnd
- !SequenceEnd
- !MappingEnd
- !SequenceEnd
- !DocumentEnd
- !DocumentStart
- !SequenceStart
- !SequenceStart { flow_style: true }
- !SequenceStart
- !SequenceEnd
- !Scalar
- !Scalar { value: 'data' }
- !Scalar { tag: '!mytag', implicit: [false,false], value: 'data' }
- !SequenceStart { tag: '!mytag', implicit: false }
- !Scalar { value: 'data' }
- !Scalar { value: 'data' }
- !SequenceEnd
- !SequenceEnd
- !SequenceEnd
- !DocumentEnd
- !StreamEnd
python-ruyaml-0.92.1/_test/data/serializer-is-already-opened.dumper-error 0000664 0000000 0000000 00000000075 15056754172 0026516 0 ustar 00root root 0000000 0000000 dumper = yaml.Dumper(StringIO())
dumper.open()
dumper.open()
python-ruyaml-0.92.1/_test/data/serializer-is-closed-1.dumper-error 0000664 0000000 0000000 00000000114 15056754172 0025226 0 ustar 00root root 0000000 0000000 dumper = yaml.Dumper(StringIO())
dumper.open()
dumper.close()
dumper.open()
python-ruyaml-0.92.1/_test/data/serializer-is-closed-2.dumper-error 0000664 0000000 0000000 00000000171 15056754172 0025232 0 ustar 00root root 0000000 0000000 dumper = yaml.Dumper(StringIO())
dumper.open()
dumper.close()
dumper.serialize(yaml.ScalarNode(tag='!foo', value='bar'))
python-ruyaml-0.92.1/_test/data/serializer-is-not-opened-1.dumper-error 0000664 0000000 0000000 00000000060 15056754172 0026025 0 ustar 00root root 0000000 0000000 dumper = yaml.Dumper(StringIO())
dumper.close()
python-ruyaml-0.92.1/_test/data/serializer-is-not-opened-2.dumper-error 0000664 0000000 0000000 00000000134 15056754172 0026030 0 ustar 00root root 0000000 0000000 dumper = yaml.Dumper(StringIO())
dumper.serialize(yaml.ScalarNode(tag='!foo', value='bar'))
python-ruyaml-0.92.1/_test/data/single-dot-is-not-float-bug.code 0000664 0000000 0000000 00000000004 15056754172 0024456 0 ustar 00root root 0000000 0000000 '.'
python-ruyaml-0.92.1/_test/data/single-dot-is-not-float-bug.data 0000664 0000000 0000000 00000000002 15056754172 0024453 0 ustar 00root root 0000000 0000000 .
python-ruyaml-0.92.1/_test/data/sloppy-indentation.canonical 0000664 0000000 0000000 00000001013 15056754172 0024202 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "in the block context"
: !!map {
? !!str "indentation should be kept"
: !!map {
? !!str "but in the flow context"
: !!seq [ !!str "it may be violated" ]
}
}
}
--- !!str
"the parser does not require scalars to be indented with at least one space"
--- !!str
"the parser does not require scalars to be indented with at least one space"
--- !!map
{ ? !!str "foo": { ? !!str "bar" : !!str "quoted scalars may not adhere indentation" } }
python-ruyaml-0.92.1/_test/data/sloppy-indentation.data 0000664 0000000 0000000 00000000524 15056754172 0023172 0 ustar 00root root 0000000 0000000 ---
in the block context:
indentation should be kept: {
but in the flow context: [
it may be violated]
}
---
the parser does not require scalars
to be indented with at least one space
...
---
"the parser does not require scalars
to be indented with at least one space"
---
foo:
bar: 'quoted scalars
may not adhere indentation'
python-ruyaml-0.92.1/_test/data/spec-02-01.code 0000664 0000000 0000000 00000000056 15056754172 0020722 0 ustar 00root root 0000000 0000000 ['Mark McGwire', 'Sammy Sosa', 'Ken Griffey']
python-ruyaml-0.92.1/_test/data/spec-02-01.data 0000664 0000000 0000000 00000000052 15056754172 0020715 0 ustar 00root root 0000000 0000000 - Mark McGwire
- Sammy Sosa
- Ken Griffey
python-ruyaml-0.92.1/_test/data/spec-02-01.structure 0000664 0000000 0000000 00000000023 15056754172 0022042 0 ustar 00root root 0000000 0000000 [True, True, True]
python-ruyaml-0.92.1/_test/data/spec-02-01.tokens 0000664 0000000 0000000 00000000022 15056754172 0021304 0 ustar 00root root 0000000 0000000 [[ , _ , _ , _ ]}
python-ruyaml-0.92.1/_test/data/spec-02-02.data 0000664 0000000 0000000 00000000120 15056754172 0020712 0 ustar 00root root 0000000 0000000 hr: 65 # Home runs
avg: 0.278 # Batting average
rbi: 147 # Runs Batted In
python-ruyaml-0.92.1/_test/data/spec-02-02.structure 0000664 0000000 0000000 00000000053 15056754172 0022046 0 ustar 00root root 0000000 0000000 [(True, True), (True, True), (True, True)]
python-ruyaml-0.92.1/_test/data/spec-02-02.tokens 0000664 0000000 0000000 00000000036 15056754172 0021312 0 ustar 00root root 0000000 0000000 {{
? _ : _
? _ : _
? _ : _
]}
python-ruyaml-0.92.1/_test/data/spec-02-03.data 0000664 0000000 0000000 00000000205 15056754172 0020717 0 ustar 00root root 0000000 0000000 american:
- Boston Red Sox
- Detroit Tigers
- New York Yankees
national:
- New York Mets
- Chicago Cubs
- Atlanta Braves
python-ruyaml-0.92.1/_test/data/spec-02-03.structure 0000664 0000000 0000000 00000000071 15056754172 0022047 0 ustar 00root root 0000000 0000000 [(True, [True, True, True]), (True, [True, True, True])]
python-ruyaml-0.92.1/_test/data/spec-02-03.tokens 0000664 0000000 0000000 00000000066 15056754172 0021316 0 ustar 00root root 0000000 0000000 {{
? _ : [[ , _ , _ , _ ]}
? _ : [[ , _ , _ , _ ]}
]}
python-ruyaml-0.92.1/_test/data/spec-02-04.data 0000664 0000000 0000000 00000000136 15056754172 0020723 0 ustar 00root root 0000000 0000000 -
name: Mark McGwire
hr: 65
avg: 0.278
-
name: Sammy Sosa
hr: 63
avg: 0.288
python-ruyaml-0.92.1/_test/data/spec-02-04.structure 0000664 0000000 0000000 00000000144 15056754172 0022051 0 ustar 00root root 0000000 0000000 [
[(True, True), (True, True), (True, True)],
[(True, True), (True, True), (True, True)],
]
python-ruyaml-0.92.1/_test/data/spec-02-04.tokens 0000664 0000000 0000000 00000000106 15056754172 0021312 0 ustar 00root root 0000000 0000000 [[
, {{ ? _ : _ ? _ : _ ? _ : _ ]}
, {{ ? _ : _ ? _ : _ ? _ : _ ]}
]}
python-ruyaml-0.92.1/_test/data/spec-02-05.data 0000664 0000000 0000000 00000000124 15056754172 0020721 0 ustar 00root root 0000000 0000000 - [name , hr, avg ]
- [Mark McGwire, 65, 0.278]
- [Sammy Sosa , 63, 0.288]
python-ruyaml-0.92.1/_test/data/spec-02-05.structure 0000664 0000000 0000000 00000000114 15056754172 0022047 0 ustar 00root root 0000000 0000000 [
[True, True, True],
[True, True, True],
[True, True, True],
]
python-ruyaml-0.92.1/_test/data/spec-02-05.tokens 0000664 0000000 0000000 00000000066 15056754172 0021320 0 ustar 00root root 0000000 0000000 [[
, [ _ , _ , _ ]
, [ _ , _ , _ ]
, [ _ , _ , _ ]
]}
python-ruyaml-0.92.1/_test/data/spec-02-06.data 0000664 0000000 0000000 00000000120 15056754172 0020716 0 ustar 00root root 0000000 0000000 Mark McGwire: {hr: 65, avg: 0.278}
Sammy Sosa: {
hr: 63,
avg: 0.288
}
python-ruyaml-0.92.1/_test/data/spec-02-06.structure 0000664 0000000 0000000 00000000130 15056754172 0022046 0 ustar 00root root 0000000 0000000 [
(True, [(True, True), (True, True)]),
(True, [(True, True), (True, True)]),
]
python-ruyaml-0.92.1/_test/data/spec-02-06.tokens 0000664 0000000 0000000 00000000076 15056754172 0021322 0 ustar 00root root 0000000 0000000 {{
? _ : { ? _ : _ , ? _ : _ }
? _ : { ? _ : _ , ? _ : _ }
]}
python-ruyaml-0.92.1/_test/data/spec-02-07.data 0000664 0000000 0000000 00000000202 15056754172 0020720 0 ustar 00root root 0000000 0000000 # Ranking of 1998 home runs
---
- Mark McGwire
- Sammy Sosa
- Ken Griffey
# Team ranking
---
- Chicago Cubs
- St Louis Cardinals
python-ruyaml-0.92.1/_test/data/spec-02-07.structure 0000664 0000000 0000000 00000000046 15056754172 0022055 0 ustar 00root root 0000000 0000000 [
[True, True, True],
[True, True],
]
python-ruyaml-0.92.1/_test/data/spec-02-07.tokens 0000664 0000000 0000000 00000000051 15056754172 0021314 0 ustar 00root root 0000000 0000000 ---
[[
, _
, _
, _
]}
---
[[
, _
, _
]}
python-ruyaml-0.92.1/_test/data/spec-02-08.data 0000664 0000000 0000000 00000000175 15056754172 0020732 0 ustar 00root root 0000000 0000000 ---
time: 20:03:20
player: Sammy Sosa
action: strike (miss)
...
---
time: 20:03:47
player: Sammy Sosa
action: grand slam
...
python-ruyaml-0.92.1/_test/data/spec-02-08.structure 0000664 0000000 0000000 00000000134 15056754172 0022054 0 ustar 00root root 0000000 0000000 [
[(True, True), (True, True), (True, True)],
[(True, True), (True, True), (True, True)],
]
python-ruyaml-0.92.1/_test/data/spec-02-08.tokens 0000664 0000000 0000000 00000000115 15056754172 0021316 0 ustar 00root root 0000000 0000000 ---
{{
? _ : _
? _ : _
? _ : _
]}
...
---
{{
? _ : _
? _ : _
? _ : _
]}
...
python-ruyaml-0.92.1/_test/data/spec-02-09.data 0000664 0000000 0000000 00000000163 15056754172 0020730 0 ustar 00root root 0000000 0000000 ---
hr: # 1998 hr ranking
- Mark McGwire
- Sammy Sosa
rbi:
# 1998 rbi ranking
- Sammy Sosa
- Ken Griffey
python-ruyaml-0.92.1/_test/data/spec-02-09.structure 0000664 0000000 0000000 00000000055 15056754172 0022057 0 ustar 00root root 0000000 0000000 [(True, [True, True]), (True, [True, True])]
python-ruyaml-0.92.1/_test/data/spec-02-09.tokens 0000664 0000000 0000000 00000000062 15056754172 0021320 0 ustar 00root root 0000000 0000000 ---
{{
? _ : [[ , _ , _ ]}
? _ : [[ , _ , _ ]}
]}
python-ruyaml-0.92.1/_test/data/spec-02-10.data 0000664 0000000 0000000 00000000177 15056754172 0020725 0 ustar 00root root 0000000 0000000 ---
hr:
- Mark McGwire
# Following node labeled SS
- &SS Sammy Sosa
rbi:
- *SS # Subsequent occurrence
- Ken Griffey
python-ruyaml-0.92.1/_test/data/spec-02-10.structure 0000664 0000000 0000000 00000000054 15056754172 0022046 0 ustar 00root root 0000000 0000000 [(True, [True, True]), (True, ['*', True])]
python-ruyaml-0.92.1/_test/data/spec-02-10.tokens 0000664 0000000 0000000 00000000064 15056754172 0021312 0 ustar 00root root 0000000 0000000 ---
{{
? _ : [[ , _ , & _ ]}
? _ : [[ , * , _ ]}
]}
python-ruyaml-0.92.1/_test/data/spec-02-11.code 0000664 0000000 0000000 00000000320 15056754172 0020715 0 ustar 00root root 0000000 0000000 {
('Detroit Tigers', 'Chicago cubs'): [datetime.date(2001, 7, 23)],
('New York Yankees', 'Atlanta Braves'):
[datetime.date(2001, 7, 2),
datetime.date(2001, 8, 12),
datetime.date(2001, 8, 14)]
}
python-ruyaml-0.92.1/_test/data/spec-02-11.data 0000664 0000000 0000000 00000000216 15056754172 0020720 0 ustar 00root root 0000000 0000000 ? - Detroit Tigers
- Chicago cubs
:
- 2001-07-23
? [ New York Yankees,
Atlanta Braves ]
: [ 2001-07-02, 2001-08-12,
2001-08-14 ]
python-ruyaml-0.92.1/_test/data/spec-02-11.structure 0000664 0000000 0000000 00000000100 15056754172 0022037 0 ustar 00root root 0000000 0000000 [
([True, True], [True]),
([True, True], [True, True, True]),
]
python-ruyaml-0.92.1/_test/data/spec-02-11.tokens 0000664 0000000 0000000 00000000076 15056754172 0021316 0 ustar 00root root 0000000 0000000 {{
? [[ , _ , _ ]}
: [[ , _ ]}
? [ _ , _ ]
: [ _ , _ , _ ]
]}
python-ruyaml-0.92.1/_test/data/spec-02-12.data 0000664 0000000 0000000 00000000207 15056754172 0020721 0 ustar 00root root 0000000 0000000 ---
# products purchased
- item : Super Hoop
quantity: 1
- item : Basketball
quantity: 4
- item : Big Shoes
quantity: 1
python-ruyaml-0.92.1/_test/data/spec-02-12.structure 0000664 0000000 0000000 00000000136 15056754172 0022051 0 ustar 00root root 0000000 0000000 [
[(True, True), (True, True)],
[(True, True), (True, True)],
[(True, True), (True, True)],
]
python-ruyaml-0.92.1/_test/data/spec-02-12.tokens 0000664 0000000 0000000 00000000122 15056754172 0021307 0 ustar 00root root 0000000 0000000 ---
[[
, {{ ? _ : _ ? _ : _ ]}
, {{ ? _ : _ ? _ : _ ]}
, {{ ? _ : _ ? _ : _ ]}
]}
python-ruyaml-0.92.1/_test/data/spec-02-13.data 0000664 0000000 0000000 00000000054 15056754172 0020722 0 ustar 00root root 0000000 0000000 # ASCII Art
--- |
\//||\/||
// || ||__
python-ruyaml-0.92.1/_test/data/spec-02-13.structure 0000664 0000000 0000000 00000000005 15056754172 0022045 0 ustar 00root root 0000000 0000000 True
python-ruyaml-0.92.1/_test/data/spec-02-13.tokens 0000664 0000000 0000000 00000000006 15056754172 0021311 0 ustar 00root root 0000000 0000000 --- _
python-ruyaml-0.92.1/_test/data/spec-02-14.data 0000664 0000000 0000000 00000000075 15056754172 0020726 0 ustar 00root root 0000000 0000000 ---
Mark McGwire's
year was crippled
by a knee injury.
python-ruyaml-0.92.1/_test/data/spec-02-14.structure 0000664 0000000 0000000 00000000005 15056754172 0022046 0 ustar 00root root 0000000 0000000 True
python-ruyaml-0.92.1/_test/data/spec-02-14.tokens 0000664 0000000 0000000 00000000006 15056754172 0021312 0 ustar 00root root 0000000 0000000 --- _
python-ruyaml-0.92.1/_test/data/spec-02-15.data 0000664 0000000 0000000 00000000170 15056754172 0020723 0 ustar 00root root 0000000 0000000 >
Sammy Sosa completed another
fine season with great stats.
63 Home Runs
0.288 Batting Average
What a year!
python-ruyaml-0.92.1/_test/data/spec-02-15.structure 0000664 0000000 0000000 00000000005 15056754172 0022047 0 ustar 00root root 0000000 0000000 True
python-ruyaml-0.92.1/_test/data/spec-02-15.tokens 0000664 0000000 0000000 00000000002 15056754172 0021307 0 ustar 00root root 0000000 0000000 _
python-ruyaml-0.92.1/_test/data/spec-02-16.data 0000664 0000000 0000000 00000000212 15056754172 0020721 0 ustar 00root root 0000000 0000000 name: Mark McGwire
accomplishment: >
Mark set a major league
home run record in 1998.
stats: |
65 Home Runs
0.278 Batting Average
python-ruyaml-0.92.1/_test/data/spec-02-16.structure 0000664 0000000 0000000 00000000053 15056754172 0022053 0 ustar 00root root 0000000 0000000 [(True, True), (True, True), (True, True)]
python-ruyaml-0.92.1/_test/data/spec-02-16.tokens 0000664 0000000 0000000 00000000036 15056754172 0021317 0 ustar 00root root 0000000 0000000 {{
? _ : _
? _ : _
? _ : _
]}
python-ruyaml-0.92.1/_test/data/spec-02-17.data 0000664 0000000 0000000 00000000261 15056754172 0020726 0 ustar 00root root 0000000 0000000 unicode: "Sosa did fine.\u263A"
control: "\b1998\t1999\t2000\n"
hexesc: "\x13\x10 is \r\n"
single: '"Howdy!" he cried.'
quoted: ' # not a ''comment''.'
tie-fighter: '|\-*-/|'
python-ruyaml-0.92.1/_test/data/spec-02-17.structure 0000664 0000000 0000000 00000000125 15056754172 0022054 0 ustar 00root root 0000000 0000000 [(True, True), (True, True), (True, True), (True, True), (True, True), (True, True)]
python-ruyaml-0.92.1/_test/data/spec-02-17.tokens 0000664 0000000 0000000 00000000066 15056754172 0021323 0 ustar 00root root 0000000 0000000 {{
? _ : _
? _ : _
? _ : _
? _ : _
? _ : _
? _ : _
]}
python-ruyaml-0.92.1/_test/data/spec-02-18.data 0000664 0000000 0000000 00000000135 15056754172 0020727 0 ustar 00root root 0000000 0000000 plain:
This unquoted scalar
spans many lines.
quoted: "So does this
quoted scalar.\n"
python-ruyaml-0.92.1/_test/data/spec-02-18.structure 0000664 0000000 0000000 00000000035 15056754172 0022055 0 ustar 00root root 0000000 0000000 [(True, True), (True, True)]
python-ruyaml-0.92.1/_test/data/spec-02-18.tokens 0000664 0000000 0000000 00000000026 15056754172 0021320 0 ustar 00root root 0000000 0000000 {{
? _ : _
? _ : _
]}
python-ruyaml-0.92.1/_test/data/spec-02-19.data 0000664 0000000 0000000 00000000123 15056754172 0020725 0 ustar 00root root 0000000 0000000 canonical: 12345
decimal: +12,345
sexagesimal: 3:25:45
octal: 014
hexadecimal: 0xC
python-ruyaml-0.92.1/_test/data/spec-02-19.structure 0000664 0000000 0000000 00000000107 15056754172 0022056 0 ustar 00root root 0000000 0000000 [(True, True), (True, True), (True, True), (True, True), (True, True)]
python-ruyaml-0.92.1/_test/data/spec-02-19.tokens 0000664 0000000 0000000 00000000056 15056754172 0021324 0 ustar 00root root 0000000 0000000 {{
? _ : _
? _ : _
? _ : _
? _ : _
? _ : _
]}
python-ruyaml-0.92.1/_test/data/spec-02-20.data 0000664 0000000 0000000 00000000201 15056754172 0020712 0 ustar 00root root 0000000 0000000 canonical: 1.23015e+3
exponential: 12.3015e+02
sexagesimal: 20:30.15
fixed: 1,230.15
negative infinity: -.inf
not a number: .NaN
python-ruyaml-0.92.1/_test/data/spec-02-20.structure 0000664 0000000 0000000 00000000125 15056754172 0022046 0 ustar 00root root 0000000 0000000 [(True, True), (True, True), (True, True), (True, True), (True, True), (True, True)]
python-ruyaml-0.92.1/_test/data/spec-02-20.tokens 0000664 0000000 0000000 00000000066 15056754172 0021315 0 ustar 00root root 0000000 0000000 {{
? _ : _
? _ : _
? _ : _
? _ : _
? _ : _
? _ : _
]}
python-ruyaml-0.92.1/_test/data/spec-02-21.data 0000664 0000000 0000000 00000000051 15056754172 0020716 0 ustar 00root root 0000000 0000000 null: ~
true: y
false: n
string: '12345'
python-ruyaml-0.92.1/_test/data/spec-02-21.structure 0000664 0000000 0000000 00000000071 15056754172 0022047 0 ustar 00root root 0000000 0000000 [(True, True), (True, True), (True, True), (True, True)]
python-ruyaml-0.92.1/_test/data/spec-02-21.tokens 0000664 0000000 0000000 00000000046 15056754172 0021314 0 ustar 00root root 0000000 0000000 {{
? _ : _
? _ : _
? _ : _
? _ : _
]}
python-ruyaml-0.92.1/_test/data/spec-02-22.data 0000664 0000000 0000000 00000000173 15056754172 0020724 0 ustar 00root root 0000000 0000000 canonical: 2001-12-15T02:59:43.1Z
iso8601: 2001-12-14t21:59:43.10-05:00
spaced: 2001-12-14 21:59:43.10 -5
date: 2002-12-14
python-ruyaml-0.92.1/_test/data/spec-02-22.structure 0000664 0000000 0000000 00000000071 15056754172 0022050 0 ustar 00root root 0000000 0000000 [(True, True), (True, True), (True, True), (True, True)]
python-ruyaml-0.92.1/_test/data/spec-02-22.tokens 0000664 0000000 0000000 00000000046 15056754172 0021315 0 ustar 00root root 0000000 0000000 {{
? _ : _
? _ : _
? _ : _
? _ : _
]}
python-ruyaml-0.92.1/_test/data/spec-02-23.data 0000664 0000000 0000000 00000000410 15056754172 0020717 0 ustar 00root root 0000000 0000000 ---
not-date: !!str 2002-04-28
picture: !!binary |
R0lGODlhDAAMAIQAAP//9/X
17unp5WZmZgAAAOfn515eXv
Pz7Y6OjuDg4J+fn5OTk6enp
56enmleECcgggoBADs=
application specific tag: !something |
The semantics of the tag
above may be different for
different documents.
python-ruyaml-0.92.1/_test/data/spec-02-23.structure 0000664 0000000 0000000 00000000053 15056754172 0022051 0 ustar 00root root 0000000 0000000 [(True, True), (True, True), (True, True)]
python-ruyaml-0.92.1/_test/data/spec-02-23.tokens 0000664 0000000 0000000 00000000050 15056754172 0021311 0 ustar 00root root 0000000 0000000 ---
{{
? _ : ! _
? _ : ! _
? _ : ! _
]}
python-ruyaml-0.92.1/_test/data/spec-02-24.data 0000664 0000000 0000000 00000000452 15056754172 0020726 0 ustar 00root root 0000000 0000000 %TAG ! tag:clarkevans.com,2002:
--- !shape
# Use the ! handle for presenting
# tag:clarkevans.com,2002:circle
- !circle
center: &ORIGIN {x: 73, y: 129}
radius: 7
- !line
start: *ORIGIN
finish: { x: 89, y: 102 }
- !label
start: *ORIGIN
color: 0xFFEEBB
text: Pretty vector drawing.
python-ruyaml-0.92.1/_test/data/spec-02-24.structure 0000664 0000000 0000000 00000000232 15056754172 0022051 0 ustar 00root root 0000000 0000000 [
[(True, [(True, True), (True, True)]), (True, True)],
[(True, '*'), (True, [(True, True), (True, True)])],
[(True, '*'), (True, True), (True, True)],
]
python-ruyaml-0.92.1/_test/data/spec-02-24.tokens 0000664 0000000 0000000 00000000302 15056754172 0021312 0 ustar 00root root 0000000 0000000 %
--- !
[[
, !
{{
? _ : & { ? _ : _ , ? _ : _ }
? _ : _
]}
, !
{{
? _ : *
? _ : { ? _ : _ , ? _ : _ }
]}
, !
{{
? _ : *
? _ : _
? _ : _
]}
]}
python-ruyaml-0.92.1/_test/data/spec-02-25.data 0000664 0000000 0000000 00000000215 15056754172 0020724 0 ustar 00root root 0000000 0000000 # sets are represented as a
# mapping where each key is
# associated with the empty string
--- !!set
? Mark McGwire
? Sammy Sosa
? Ken Griff
python-ruyaml-0.92.1/_test/data/spec-02-25.structure 0000664 0000000 0000000 00000000053 15056754172 0022053 0 ustar 00root root 0000000 0000000 [(True, None), (True, None), (True, None)]
python-ruyaml-0.92.1/_test/data/spec-02-25.tokens 0000664 0000000 0000000 00000000030 15056754172 0021311 0 ustar 00root root 0000000 0000000 --- !
{{
? _
? _
? _
]}
python-ruyaml-0.92.1/_test/data/spec-02-26.data 0000664 0000000 0000000 00000000237 15056754172 0020731 0 ustar 00root root 0000000 0000000 # ordered maps are represented as
# a sequence of mappings, with
# each mapping having one key
--- !!omap
- Mark McGwire: 65
- Sammy Sosa: 63
- Ken Griffy: 58
python-ruyaml-0.92.1/_test/data/spec-02-26.structure 0000664 0000000 0000000 00000000064 15056754172 0022056 0 ustar 00root root 0000000 0000000 [
[(True, True)],
[(True, True)],
[(True, True)],
]
python-ruyaml-0.92.1/_test/data/spec-02-26.tokens 0000664 0000000 0000000 00000000074 15056754172 0021322 0 ustar 00root root 0000000 0000000 --- !
[[
, {{ ? _ : _ ]}
, {{ ? _ : _ ]}
, {{ ? _ : _ ]}
]}
python-ruyaml-0.92.1/_test/data/spec-02-27.data 0000664 0000000 0000000 00000001204 15056754172 0020725 0 ustar 00root root 0000000 0000000 --- !
invoice: 34843
date : 2001-01-23
bill-to: &id001
given : Chris
family : Dumars
address:
lines: |
458 Walkman Dr.
Suite #292
city : Royal Oak
state : MI
postal : 48046
ship-to: *id001
product:
- sku : BL394D
quantity : 4
description : Basketball
price : 450.00
- sku : BL4438H
quantity : 1
description : Super Hoop
price : 2392.00
tax : 251.42
total: 4443.52
comments:
Late afternoon is best.
Backup contact is Nancy
Billsmer @ 338-4338.
python-ruyaml-0.92.1/_test/data/spec-02-27.structure 0000664 0000000 0000000 00000000547 15056754172 0022065 0 ustar 00root root 0000000 0000000 [
(True, True),
(True, True),
(True, [
(True, True),
(True, True),
(True, [(True, True), (True, True), (True, True), (True, True)]),
]),
(True, '*'),
(True, [
[(True, True), (True, True), (True, True), (True, True)],
[(True, True), (True, True), (True, True), (True, True)],
]),
(True, True),
(True, True),
(True, True),
]
python-ruyaml-0.92.1/_test/data/spec-02-27.tokens 0000664 0000000 0000000 00000000406 15056754172 0021322 0 ustar 00root root 0000000 0000000 --- !
{{
? _ : _
? _ : _
? _ : &
{{
? _ : _
? _ : _
? _ : {{ ? _ : _ ? _ : _ ? _ : _ ? _ : _ ]}
]}
? _ : *
? _ :
[[
, {{ ? _ : _ ? _ : _ ? _ : _ ? _ : _ ]}
, {{ ? _ : _ ? _ : _ ? _ : _ ? _ : _ ]}
]}
? _ : _
? _ : _
? _ : _
]}
python-ruyaml-0.92.1/_test/data/spec-02-28.data 0000664 0000000 0000000 00000000633 15056754172 0020733 0 ustar 00root root 0000000 0000000 ---
Time: 2001-11-23 15:01:42 -5
User: ed
Warning:
This is an error message
for the log file
---
Time: 2001-11-23 15:02:31 -5
User: ed
Warning:
A slightly different error
message.
---
Date: 2001-11-23 15:03:17 -5
User: ed
Fatal:
Unknown variable "bar"
Stack:
- file: TopClass.py
line: 23
code: |
x = MoreObject("345\n")
- file: MoreClass.py
line: 58
code: |-
foo = bar
python-ruyaml-0.92.1/_test/data/spec-02-28.structure 0000664 0000000 0000000 00000000372 15056754172 0022062 0 ustar 00root root 0000000 0000000 [
[(True, True), (True, True), (True, True)],
[(True, True), (True, True), (True, True)],
[(True, True), (True, True), (True, True),
(True, [
[(True, True), (True, True), (True, True)],
[(True, True), (True, True), (True, True)],
]),
]
]
python-ruyaml-0.92.1/_test/data/spec-02-28.tokens 0000664 0000000 0000000 00000000312 15056754172 0021317 0 ustar 00root root 0000000 0000000 ---
{{
? _ : _
? _ : _
? _ : _
]}
---
{{
? _ : _
? _ : _
? _ : _
]}
---
{{
? _ : _
? _ : _
? _ : _
? _ :
[[
, {{ ? _ : _ ? _ : _ ? _ : _ ]}
, {{ ? _ : _ ? _ : _ ? _ : _ ]}
]}
]}
python-ruyaml-0.92.1/_test/data/spec-05-01-utf16be.data 0000664 0000000 0000000 00000000042 15056754172 0022171 0 ustar 00root root 0000000 0000000 # C o m m e n t o n l y .
python-ruyaml-0.92.1/_test/data/spec-05-01-utf16be.empty 0000664 0000000 0000000 00000000066 15056754172 0022424 0 ustar 00root root 0000000 0000000 # This stream contains no
# documents, only comments.
python-ruyaml-0.92.1/_test/data/spec-05-01-utf16le.data 0000664 0000000 0000000 00000000042 15056754172 0022203 0 ustar 00root root 0000000 0000000 # C o m m e n t o n l y .
python-ruyaml-0.92.1/_test/data/spec-05-01-utf16le.empty 0000664 0000000 0000000 00000000066 15056754172 0022436 0 ustar 00root root 0000000 0000000 # This stream contains no
# documents, only comments.
python-ruyaml-0.92.1/_test/data/spec-05-01-utf8.data 0000664 0000000 0000000 00000000023 15056754172 0021602 0 ustar 00root root 0000000 0000000 # Comment only.
python-ruyaml-0.92.1/_test/data/spec-05-01-utf8.empty 0000664 0000000 0000000 00000000066 15056754172 0022036 0 ustar 00root root 0000000 0000000 # This stream contains no
# documents, only comments.
python-ruyaml-0.92.1/_test/data/spec-05-02-utf16be.data 0000664 0000000 0000000 00000000132 15056754172 0022172 0 ustar 00root root 0000000 0000000 # I n v a l i d u s e o f B O M
# i n s i d e a
# d o c u m e n t .
python-ruyaml-0.92.1/_test/data/spec-05-02-utf16be.error 0000664 0000000 0000000 00000000062 15056754172 0022414 0 ustar 00root root 0000000 0000000 ERROR:
A BOM must not appear
inside a document.
python-ruyaml-0.92.1/_test/data/spec-05-02-utf16le.data 0000664 0000000 0000000 00000000132 15056754172 0022204 0 ustar 00root root 0000000 0000000 # I n v a l i d u s e o f B O M
# i n s i d e a
# d o c u m e n t .
python-ruyaml-0.92.1/_test/data/spec-05-02-utf16le.error 0000664 0000000 0000000 00000000062 15056754172 0022426 0 ustar 00root root 0000000 0000000 ERROR:
A BOM must not appear
inside a document.
python-ruyaml-0.92.1/_test/data/spec-05-02-utf8.data 0000664 0000000 0000000 00000000057 15056754172 0021612 0 ustar 00root root 0000000 0000000 # Invalid use of BOM
# inside a
# document.
python-ruyaml-0.92.1/_test/data/spec-05-02-utf8.error 0000664 0000000 0000000 00000000062 15056754172 0022026 0 ustar 00root root 0000000 0000000 ERROR:
A BOM must not appear
inside a document.
python-ruyaml-0.92.1/_test/data/spec-05-03.canonical 0000664 0000000 0000000 00000000400 15056754172 0021735 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "sequence"
: !!seq [
!!str "one", !!str "two"
],
? !!str "mapping"
: !!map {
? !!str "sky" : !!str "blue",
# ? !!str "sea" : !!str "green",
? !!map { ? !!str "sea" : !!str "green" } : !!null "",
}
}
python-ruyaml-0.92.1/_test/data/spec-05-03.data 0000664 0000000 0000000 00000000100 15056754172 0020714 0 ustar 00root root 0000000 0000000 sequence:
- one
- two
mapping:
? sky
: blue
? sea : green
python-ruyaml-0.92.1/_test/data/spec-05-04.canonical 0000664 0000000 0000000 00000000304 15056754172 0021741 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "sequence"
: !!seq [
!!str "one", !!str "two"
],
? !!str "mapping"
: !!map {
? !!str "sky" : !!str "blue",
? !!str "sea" : !!str "green",
}
}
python-ruyaml-0.92.1/_test/data/spec-05-04.data 0000664 0000000 0000000 00000000073 15056754172 0020726 0 ustar 00root root 0000000 0000000 sequence: [ one, two, ]
mapping: { sky: blue, sea: green }
python-ruyaml-0.92.1/_test/data/spec-05-05.data 0000664 0000000 0000000 00000000020 15056754172 0020717 0 ustar 00root root 0000000 0000000 # Comment only.
python-ruyaml-0.92.1/_test/data/spec-05-05.empty 0000664 0000000 0000000 00000000066 15056754172 0021156 0 ustar 00root root 0000000 0000000 # This stream contains no
# documents, only comments.
python-ruyaml-0.92.1/_test/data/spec-05-06.canonical 0000664 0000000 0000000 00000000140 15056754172 0021741 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "anchored"
: &A1 !local "value",
? !!str "alias"
: *A1,
}
python-ruyaml-0.92.1/_test/data/spec-05-06.data 0000664 0000000 0000000 00000000056 15056754172 0020731 0 ustar 00root root 0000000 0000000 anchored: !local &anchor value
alias: *anchor
python-ruyaml-0.92.1/_test/data/spec-05-07.canonical 0000664 0000000 0000000 00000000147 15056754172 0021751 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "literal"
: !!str "text\n",
? !!str "folded"
: !!str "text\n",
}
python-ruyaml-0.92.1/_test/data/spec-05-07.data 0000664 0000000 0000000 00000000043 15056754172 0020726 0 ustar 00root root 0000000 0000000 literal: |
text
folded: >
text
python-ruyaml-0.92.1/_test/data/spec-05-08.canonical 0000664 0000000 0000000 00000000142 15056754172 0021745 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "single"
: !!str "text",
? !!str "double"
: !!str "text",
}
python-ruyaml-0.92.1/_test/data/spec-05-08.data 0000664 0000000 0000000 00000000036 15056754172 0020731 0 ustar 00root root 0000000 0000000 single: 'text'
double: "text"
python-ruyaml-0.92.1/_test/data/spec-05-09.canonical 0000664 0000000 0000000 00000000033 15056754172 0021745 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!str "text"
python-ruyaml-0.92.1/_test/data/spec-05-09.data 0000664 0000000 0000000 00000000023 15056754172 0020726 0 ustar 00root root 0000000 0000000 %YAML 1.1
--- text
python-ruyaml-0.92.1/_test/data/spec-05-10.data 0000664 0000000 0000000 00000000051 15056754172 0020717 0 ustar 00root root 0000000 0000000 commercial-at: @text
grave-accent: `text
python-ruyaml-0.92.1/_test/data/spec-05-10.error 0000664 0000000 0000000 00000000071 15056754172 0021141 0 ustar 00root root 0000000 0000000 ERROR:
Reserved indicators can't
start a plain scalar.
python-ruyaml-0.92.1/_test/data/spec-05-11.canonical 0000664 0000000 0000000 00000000212 15056754172 0021735 0 ustar 00root root 0000000 0000000 %YAML 1.1
--- !!str
"Generic line break (no glyph)\n\
Generic line break (glyphed)\n\
Line separator\u2028\
Paragraph separator\u2029"
python-ruyaml-0.92.1/_test/data/spec-05-11.data 0000664 0000000 0000000 00000000155 15056754172 0020725 0 ustar 00root root 0000000 0000000 |
Generic line break (no glyph)
Generic line break (glyphed)
Line separator
Paragraph separator
python-ruyaml-0.92.1/_test/data/spec-05-12.data 0000664 0000000 0000000 00000000251 15056754172 0020723 0 ustar 00root root 0000000 0000000 # Tabs do's and don'ts:
# comment:
quoted: "Quoted "
block: |
void main() {
printf("Hello, world!\n");
}
elsewhere: # separation
indentation, in plain scalar
python-ruyaml-0.92.1/_test/data/spec-05-12.error 0000664 0000000 0000000 00000000240 15056754172 0021141 0 ustar 00root root 0000000 0000000 ERROR:
Tabs may appear inside
comments and quoted or
block scalar content.
Tabs must not appear
elsewhere, such as
in indentation and
separation spaces.
python-ruyaml-0.92.1/_test/data/spec-05-13.canonical 0000664 0000000 0000000 00000000112 15056754172 0021736 0 ustar 00root root 0000000 0000000 %YAML 1.1
--- !!str
"Text containing \
both space and \
tab characters"
python-ruyaml-0.92.1/_test/data/spec-05-13.data 0000664 0000000 0000000 00000000073 15056754172 0020726 0 ustar 00root root 0000000 0000000 "Text containing
both space and
tab characters"
python-ruyaml-0.92.1/_test/data/spec-05-14.canonical 0000664 0000000 0000000 00000000167 15056754172 0021751 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
"Fun with \x5C
\x22 \x07 \x08 \x1B \x0C
\x0A \x0D \x09 \x0B \x00
\x20 \xA0 \x85 \u2028 \u2029
A A A"
python-ruyaml-0.92.1/_test/data/spec-05-14.data 0000664 0000000 0000000 00000000141 15056754172 0020723 0 ustar 00root root 0000000 0000000 "Fun with \\
\" \a \b \e \f \
\n \r \t \v \0 \
\ \_ \N \L \P \
\x41 \u0041 \U00000041"
python-ruyaml-0.92.1/_test/data/spec-05-15.data 0000664 0000000 0000000 00000000033 15056754172 0020724 0 ustar 00root root 0000000 0000000 Bad escapes:
"\c
\xq-"
python-ruyaml-0.92.1/_test/data/spec-05-15.error 0000664 0000000 0000000 00000000116 15056754172 0021146 0 ustar 00root root 0000000 0000000 ERROR:
- c is an invalid escaped character.
- q and - are invalid hex digits.
python-ruyaml-0.92.1/_test/data/spec-06-01.canonical 0000664 0000000 0000000 00000000423 15056754172 0021741 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "Not indented"
: !!map {
? !!str "By one space"
: !!str "By four\n spaces\n",
? !!str "Flow style"
: !!seq [
!!str "By two",
!!str "Also by two",
!!str "Still by two",
]
}
}
python-ruyaml-0.92.1/_test/data/spec-06-01.data 0000664 0000000 0000000 00000000547 15056754172 0020732 0 ustar 00root root 0000000 0000000 # Leading comment line spaces are
# neither content nor indentation.
Not indented:
By one space: |
By four
spaces
Flow style: [ # Leading spaces
By two, # in flow style
Also by two, # are neither
# Tabs are not allowed:
# Still by two # content nor
Still by two # content nor
] # indentation.
python-ruyaml-0.92.1/_test/data/spec-06-02.data 0000664 0000000 0000000 00000000021 15056754172 0020716 0 ustar 00root root 0000000 0000000 # Comment
python-ruyaml-0.92.1/_test/data/spec-06-02.empty 0000664 0000000 0000000 00000000066 15056754172 0021154 0 ustar 00root root 0000000 0000000 # This stream contains no
# documents, only comments.
python-ruyaml-0.92.1/_test/data/spec-06-03.canonical 0000664 0000000 0000000 00000000072 15056754172 0021743 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "key"
: !!str "value"
}
python-ruyaml-0.92.1/_test/data/spec-06-03.data 0000664 0000000 0000000 00000000032 15056754172 0020721 0 ustar 00root root 0000000 0000000 key: # Comment
value
python-ruyaml-0.92.1/_test/data/spec-06-04.canonical 0000664 0000000 0000000 00000000072 15056754172 0021744 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "key"
: !!str "value"
}
python-ruyaml-0.92.1/_test/data/spec-06-04.data 0000664 0000000 0000000 00000000053 15056754172 0020725 0 ustar 00root root 0000000 0000000 key: # Comment
# lines
value
python-ruyaml-0.92.1/_test/data/spec-06-05.canonical 0000664 0000000 0000000 00000000322 15056754172 0021743 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!map {
? !!str "first"
: !!str "Sammy",
? !!str "last"
: !!str "Sosa"
}
: !!map {
? !!str "hr"
: !!int "65",
? !!str "avg"
: !!float "0.278"
}
}
python-ruyaml-0.92.1/_test/data/spec-06-05.data 0000664 0000000 0000000 00000000141 15056754172 0020724 0 ustar 00root root 0000000 0000000 { first: Sammy, last: Sosa }:
# Statistics:
hr: # Home runs
65
avg: # Average
0.278
python-ruyaml-0.92.1/_test/data/spec-06-06.canonical 0000664 0000000 0000000 00000000233 15056754172 0021745 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "plain"
: !!str "text lines",
? !!str "quoted"
: !!str "text lines",
? !!str "block"
: !!str "text\n lines\n"
}
python-ruyaml-0.92.1/_test/data/spec-06-06.data 0000664 0000000 0000000 00000000106 15056754172 0020726 0 ustar 00root root 0000000 0000000 plain: text
lines
quoted: "text
lines"
block: |
text
lines
python-ruyaml-0.92.1/_test/data/spec-06-07.canonical 0000664 0000000 0000000 00000000101 15056754172 0021740 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!seq [
!!str "foo\nbar",
!!str "foo\n\nbar"
]
python-ruyaml-0.92.1/_test/data/spec-06-07.data 0000664 0000000 0000000 00000000044 15056754172 0020730 0 ustar 00root root 0000000 0000000 - foo
bar
- |-
foo
bar
python-ruyaml-0.92.1/_test/data/spec-06-08.canonical 0000664 0000000 0000000 00000000074 15056754172 0021752 0 ustar 00root root 0000000 0000000 %YAML 1.1
--- !!str
"specific\L\
trimmed\n\n\n\
as space"
python-ruyaml-0.92.1/_test/data/spec-06-08.data 0000664 0000000 0000000 00000000062 15056754172 0020731 0 ustar 00root root 0000000 0000000 >-
specific
trimmed
as
space
python-ruyaml-0.92.1/_test/data/spec-07-01.canonical 0000664 0000000 0000000 00000000032 15056754172 0021736 0 ustar 00root root 0000000 0000000 %YAML 1.1
--- !!str
"foo"
python-ruyaml-0.92.1/_test/data/spec-07-01.data 0000664 0000000 0000000 00000000115 15056754172 0020722 0 ustar 00root root 0000000 0000000 %FOO bar baz # Should be ignored
# with a warning.
--- "foo"
python-ruyaml-0.92.1/_test/data/spec-07-01.skip-ext 0000664 0000000 0000000 00000000000 15056754172 0021546 0 ustar 00root root 0000000 0000000 python-ruyaml-0.92.1/_test/data/spec-07-02.canonical 0000664 0000000 0000000 00000000032 15056754172 0021737 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!str "foo"
python-ruyaml-0.92.1/_test/data/spec-07-02.data 0000664 0000000 0000000 00000000102 15056754172 0020717 0 ustar 00root root 0000000 0000000 %YAML 1.2 # Attempt parsing
# with a warning
---
"foo"
python-ruyaml-0.92.1/_test/data/spec-07-02.skip-ext 0000664 0000000 0000000 00000000000 15056754172 0021547 0 ustar 00root root 0000000 0000000 python-ruyaml-0.92.1/_test/data/spec-07-03.data 0000664 0000000 0000000 00000000030 15056754172 0020720 0 ustar 00root root 0000000 0000000 %YAML 1.1
%YAML 1.1
foo
python-ruyaml-0.92.1/_test/data/spec-07-03.error 0000664 0000000 0000000 00000000110 15056754172 0021137 0 ustar 00root root 0000000 0000000 ERROR:
The YAML directive must only be
given at most once per document.
python-ruyaml-0.92.1/_test/data/spec-07-04.canonical 0000664 0000000 0000000 00000000032 15056754172 0021741 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!str "foo"
python-ruyaml-0.92.1/_test/data/spec-07-04.data 0000664 0000000 0000000 00000000063 15056754172 0020727 0 ustar 00root root 0000000 0000000 %TAG !yaml! tag:yaml.org,2002:
---
!yaml!str "foo"
python-ruyaml-0.92.1/_test/data/spec-07-05.data 0000664 0000000 0000000 00000000034 15056754172 0020726 0 ustar 00root root 0000000 0000000 %TAG ! !foo
%TAG ! !foo
bar
python-ruyaml-0.92.1/_test/data/spec-07-05.error 0000664 0000000 0000000 00000000132 15056754172 0021145 0 ustar 00root root 0000000 0000000 ERROR:
The TAG directive must only
be given at most once per
handle in the same document.
python-ruyaml-0.92.1/_test/data/spec-07-06.canonical 0000664 0000000 0000000 00000000120 15056754172 0021741 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!seq [
! "baz",
! "string"
]
python-ruyaml-0.92.1/_test/data/spec-07-06.data 0000664 0000000 0000000 00000000126 15056754172 0020731 0 ustar 00root root 0000000 0000000 %TAG ! !foo
%TAG !yaml! tag:yaml.org,2002:
---
- !bar "baz"
- !yaml!str "string"
python-ruyaml-0.92.1/_test/data/spec-07-07a.canonical 0000664 0000000 0000000 00000000034 15056754172 0022107 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
! "bar"
python-ruyaml-0.92.1/_test/data/spec-07-07a.data 0000664 0000000 0000000 00000000042 15056754172 0021070 0 ustar 00root root 0000000 0000000 # Private application:
!foo "bar"
python-ruyaml-0.92.1/_test/data/spec-07-07b.canonical 0000664 0000000 0000000 00000000065 15056754172 0022114 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
! "bar"
python-ruyaml-0.92.1/_test/data/spec-07-07b.data 0000664 0000000 0000000 00000000107 15056754172 0021073 0 ustar 00root root 0000000 0000000 # Migrated to global:
%TAG ! tag:ben-kiki.org,2000:app/
---
!foo "bar"
python-ruyaml-0.92.1/_test/data/spec-07-08.canonical 0000664 0000000 0000000 00000000164 15056754172 0021753 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!seq [
! "bar",
! "string",
! "baz"
]
python-ruyaml-0.92.1/_test/data/spec-07-08.data 0000664 0000000 0000000 00000000305 15056754172 0020732 0 ustar 00root root 0000000 0000000 # Explicitly specify default settings:
%TAG ! !
%TAG !! tag:yaml.org,2002:
# Named handles have no default:
%TAG !o! tag:ben-kiki.org,2000:
---
- !foo "bar"
- !!str "string"
- !o!type "baz"
python-ruyaml-0.92.1/_test/data/spec-07-09.canonical 0000664 0000000 0000000 00000000116 15056754172 0021751 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!str "foo"
%YAML 1.1
---
!!str "bar"
%YAML 1.1
---
!!str "baz"
python-ruyaml-0.92.1/_test/data/spec-07-09.data 0000664 0000000 0000000 00000000114 15056754172 0020731 0 ustar 00root root 0000000 0000000 ---
foo
...
# Repeated end marker.
...
---
bar
# No end marker.
---
baz
...
python-ruyaml-0.92.1/_test/data/spec-07-10.canonical 0000664 0000000 0000000 00000000241 15056754172 0021740 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!str "Root flow scalar"
%YAML 1.1
---
!!str "Root block scalar\n"
%YAML 1.1
---
!!map {
? !!str "foo"
: !!str "bar"
}
---
#!!str ""
!!null ""
python-ruyaml-0.92.1/_test/data/spec-07-10.data 0000664 0000000 0000000 00000000216 15056754172 0020724 0 ustar 00root root 0000000 0000000 "Root flow
scalar"
--- !!str >
Root block
scalar
---
# Root collection:
foo : bar
... # Is optional.
---
# Explicit document may be empty.
python-ruyaml-0.92.1/_test/data/spec-07-11.data 0000664 0000000 0000000 00000000052 15056754172 0020723 0 ustar 00root root 0000000 0000000 # A stream may contain
# no documents.
python-ruyaml-0.92.1/_test/data/spec-07-11.empty 0000664 0000000 0000000 00000000066 15056754172 0021155 0 ustar 00root root 0000000 0000000 # This stream contains no
# documents, only comments.
python-ruyaml-0.92.1/_test/data/spec-07-12a.canonical 0000664 0000000 0000000 00000000070 15056754172 0022103 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "foo"
: !!str "bar"
}
python-ruyaml-0.92.1/_test/data/spec-07-12a.data 0000664 0000000 0000000 00000000101 15056754172 0021060 0 ustar 00root root 0000000 0000000 # Implicit document. Root
# collection (mapping) node.
foo : bar
python-ruyaml-0.92.1/_test/data/spec-07-12b.canonical 0000664 0000000 0000000 00000000045 15056754172 0022106 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!str "Text content\n"
python-ruyaml-0.92.1/_test/data/spec-07-12b.data 0000664 0000000 0000000 00000000107 15056754172 0021067 0 ustar 00root root 0000000 0000000 # Explicit document. Root
# scalar (literal) node.
--- |
Text content
python-ruyaml-0.92.1/_test/data/spec-07-13.canonical 0000664 0000000 0000000 00000000177 15056754172 0021753 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!str "First document"
---
! "No directives"
---
! "With directives"
---
! "Reset settings"
python-ruyaml-0.92.1/_test/data/spec-07-13.data 0000664 0000000 0000000 00000000167 15056754172 0020734 0 ustar 00root root 0000000 0000000 ! "First document"
---
!foo "No directives"
%TAG ! !foo
---
!bar "With directives"
%YAML 1.1
---
!baz "Reset settings"
python-ruyaml-0.92.1/_test/data/spec-08-01.canonical 0000664 0000000 0000000 00000000131 15056754172 0021737 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? &A1 !!str "foo"
: !!str "bar",
? &A2 !!str "baz"
: *A1
}
python-ruyaml-0.92.1/_test/data/spec-08-01.data 0000664 0000000 0000000 00000000052 15056754172 0020723 0 ustar 00root root 0000000 0000000 !!str &a1 "foo" : !!str bar
&a2 baz : *a1
python-ruyaml-0.92.1/_test/data/spec-08-02.canonical 0000664 0000000 0000000 00000000160 15056754172 0021742 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "First occurrence"
: &A !!str "Value",
? !!str "Second occurrence"
: *A
}
python-ruyaml-0.92.1/_test/data/spec-08-02.data 0000664 0000000 0000000 00000000073 15056754172 0020727 0 ustar 00root root 0000000 0000000 First occurrence: &anchor Value
Second occurrence: *anchor
python-ruyaml-0.92.1/_test/data/spec-08-03.canonical 0000664 0000000 0000000 00000000115 15056754172 0021743 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? ! "foo"
: ! "baz"
}
python-ruyaml-0.92.1/_test/data/spec-08-03.data 0000664 0000000 0000000 00000000055 15056754172 0020730 0 ustar 00root root 0000000 0000000 ! foo :
! baz
python-ruyaml-0.92.1/_test/data/spec-08-04.data 0000664 0000000 0000000 00000000030 15056754172 0020722 0 ustar 00root root 0000000 0000000 - ! foo
- !<$:?> bar
python-ruyaml-0.92.1/_test/data/spec-08-04.error 0000664 0000000 0000000 00000000220 15056754172 0021143 0 ustar 00root root 0000000 0000000 ERROR:
- Verbatim tags aren't resolved,
so ! is invalid.
- The $:? tag is neither a global
URI tag nor a local tag starting
with “!”.
python-ruyaml-0.92.1/_test/data/spec-08-05.canonical 0000664 0000000 0000000 00000000164 15056754172 0021751 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!seq [
! "foo",
! "bar",
! "baz",
]
python-ruyaml-0.92.1/_test/data/spec-08-05.data 0000664 0000000 0000000 00000000113 15056754172 0020725 0 ustar 00root root 0000000 0000000 %TAG !o! tag:ben-kiki.org,2000:
---
- !local foo
- !!str bar
- !o!type baz
python-ruyaml-0.92.1/_test/data/spec-08-06.data 0000664 0000000 0000000 00000000110 15056754172 0020723 0 ustar 00root root 0000000 0000000 %TAG !o! tag:ben-kiki.org,2000:
---
- !$a!b foo
- !o! bar
- !h!type baz
python-ruyaml-0.92.1/_test/data/spec-08-06.error 0000664 0000000 0000000 00000000151 15056754172 0021150 0 ustar 00root root 0000000 0000000 ERROR:
- The !$a! looks like a handle.
- The !o! handle has no suffix.
- The !h! handle wasn't declared.
python-ruyaml-0.92.1/_test/data/spec-08-07.canonical 0000664 0000000 0000000 00000000235 15056754172 0021752 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!seq [
! "12",
! "12",
# ! "12",
! "12",
]
python-ruyaml-0.92.1/_test/data/spec-08-07.data 0000664 0000000 0000000 00000000067 15056754172 0020737 0 ustar 00root root 0000000 0000000 # Assuming conventional resolution:
- "12"
- 12
- ! 12
python-ruyaml-0.92.1/_test/data/spec-08-08.canonical 0000664 0000000 0000000 00000000224 15056754172 0021751 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "foo"
: !!str "bar baz"
}
%YAML 1.1
---
!!str "foo bar"
%YAML 1.1
---
!!str "foo bar"
%YAML 1.1
---
!!str "foo\n"
python-ruyaml-0.92.1/_test/data/spec-08-08.data 0000664 0000000 0000000 00000000100 15056754172 0020724 0 ustar 00root root 0000000 0000000 ---
foo:
"bar
baz"
---
"foo
bar"
---
foo
bar
--- |
foo
...
python-ruyaml-0.92.1/_test/data/spec-08-09.canonical 0000664 0000000 0000000 00000000735 15056754172 0021761 0 ustar 00root root 0000000 0000000 %YAML 1.1
--- !!map {
? !!str "scalars" : !!map {
? !!str "plain"
: !!str "some text",
? !!str "quoted"
: !!map {
? !!str "single"
: !!str "some text",
? !!str "double"
: !!str "some text"
} },
? !!str "collections" : !!map {
? !!str "sequence" : !!seq [
!!str "entry",
!!map {
? !!str "key" : !!str "value"
} ],
? !!str "mapping" : !!map {
? !!str "key" : !!str "value"
} } }
python-ruyaml-0.92.1/_test/data/spec-08-09.data 0000664 0000000 0000000 00000000320 15056754172 0020731 0 ustar 00root root 0000000 0000000 ---
scalars:
plain: !!str some text
quoted:
single: 'some text'
double: "some text"
collections:
sequence: !!seq [ !!str entry,
# Mapping entry:
key: value ]
mapping: { key: value }
python-ruyaml-0.92.1/_test/data/spec-08-10.canonical 0000664 0000000 0000000 00000001022 15056754172 0021737 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "block styles" : !!map {
? !!str "scalars" : !!map {
? !!str "literal"
: !!str "#!/usr/bin/perl\n\
print \"Hello,
world!\\n\";\n",
? !!str "folded"
: !!str "This sentence
is false.\n"
},
? !!str "collections" : !!map {
? !!str "sequence" : !!seq [
!!str "entry",
!!map {
? !!str "key" : !!str "value"
}
],
? !!str "mapping" : !!map {
? !!str "key" : !!str "value"
} } } }
python-ruyaml-0.92.1/_test/data/spec-08-10.data 0000664 0000000 0000000 00000000446 15056754172 0020732 0 ustar 00root root 0000000 0000000 block styles:
scalars:
literal: !!str |
#!/usr/bin/perl
print "Hello, world!\n";
folded: >
This sentence
is false.
collections: !!map
sequence: !!seq # Entry:
- entry # Plain
# Mapping entry:
- key: value
mapping:
key: value
python-ruyaml-0.92.1/_test/data/spec-08-11.canonical 0000664 0000000 0000000 00000000160 15056754172 0021742 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "First occurrence"
: &A !!str "Value",
? !!str "Second occurrence"
: *A
}
python-ruyaml-0.92.1/_test/data/spec-08-11.data 0000664 0000000 0000000 00000000073 15056754172 0020727 0 ustar 00root root 0000000 0000000 First occurrence: &anchor Value
Second occurrence: *anchor
python-ruyaml-0.92.1/_test/data/spec-08-12.canonical 0000664 0000000 0000000 00000000175 15056754172 0021751 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!seq [
!!str "Without properties",
&A !!str "Anchored",
!!str "Tagged",
*A,
!!str "",
!!str "",
]
python-ruyaml-0.92.1/_test/data/spec-08-12.data 0000664 0000000 0000000 00000000227 15056754172 0020731 0 ustar 00root root 0000000 0000000 [
Without properties,
&anchor "Anchored",
!!str 'Tagged',
*anchor, # Alias node
!!str , # Empty plain scalar
'', # Empty plain scalar
]
python-ruyaml-0.92.1/_test/data/spec-08-13.canonical 0000664 0000000 0000000 00000000163 15056754172 0021747 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "foo"
# : !!str "",
# ? !!str ""
: !!null "",
? !!null ""
: !!str "bar",
}
python-ruyaml-0.92.1/_test/data/spec-08-13.data 0000664 0000000 0000000 00000000032 15056754172 0020724 0 ustar 00root root 0000000 0000000 {
? foo :,
? : bar,
}
python-ruyaml-0.92.1/_test/data/spec-08-13.skip-ext 0000664 0000000 0000000 00000000000 15056754172 0021552 0 ustar 00root root 0000000 0000000 python-ruyaml-0.92.1/_test/data/spec-08-14.canonical 0000664 0000000 0000000 00000000175 15056754172 0021753 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!seq [
!!str "flow in block",
!!str "Block scalar\n",
!!map {
? !!str "foo"
: !!str "bar"
}
]
python-ruyaml-0.92.1/_test/data/spec-08-14.data 0000664 0000000 0000000 00000000113 15056754172 0020725 0 ustar 00root root 0000000 0000000 - "flow in block"
- >
Block scalar
- !!map # Block collection
foo : bar
python-ruyaml-0.92.1/_test/data/spec-08-15.canonical 0000664 0000000 0000000 00000000171 15056754172 0021750 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!seq [
!!null "",
!!map {
? !!str "foo"
: !!null "",
? !!null ""
: !!str "bar",
}
]
python-ruyaml-0.92.1/_test/data/spec-08-15.data 0000664 0000000 0000000 00000000057 15056754172 0020735 0 ustar 00root root 0000000 0000000 - # Empty plain scalar
- ? foo
:
?
: bar
python-ruyaml-0.92.1/_test/data/spec-09-01.canonical 0000664 0000000 0000000 00000000245 15056754172 0021746 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "simple key"
: !!map {
? !!str "also simple"
: !!str "value",
? !!str "not a simple key"
: !!str "any value"
}
}
python-ruyaml-0.92.1/_test/data/spec-09-01.data 0000664 0000000 0000000 00000000125 15056754172 0020725 0 ustar 00root root 0000000 0000000 "simple key" : {
"also simple" : value,
? "not a
simple key" : "any
value"
}
python-ruyaml-0.92.1/_test/data/spec-09-02.canonical 0000664 0000000 0000000 00000000124 15056754172 0021743 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!str "as space \
trimmed\n\
specific\L\n\
escaped\t\n\
none"
python-ruyaml-0.92.1/_test/data/spec-09-02.data 0000664 0000000 0000000 00000000072 15056754172 0020727 0 ustar 00root root 0000000 0000000 "as space
trimmed
specific
escaped \
none"
python-ruyaml-0.92.1/_test/data/spec-09-03.canonical 0000664 0000000 0000000 00000000123 15056754172 0021743 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!seq [
!!str " last",
!!str " last",
!!str " \tfirst last",
]
python-ruyaml-0.92.1/_test/data/spec-09-03.data 0000664 0000000 0000000 00000000055 15056754172 0020731 0 ustar 00root root 0000000 0000000 - "
last"
- "
last"
- " first
last"
python-ruyaml-0.92.1/_test/data/spec-09-04.canonical 0000664 0000000 0000000 00000000076 15056754172 0021753 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!str "first \
inner 1 \
inner 2 \
last"
python-ruyaml-0.92.1/_test/data/spec-09-04.data 0000664 0000000 0000000 00000000047 15056754172 0020733 0 ustar 00root root 0000000 0000000 "first
inner 1
\ inner 2 \
last"
python-ruyaml-0.92.1/_test/data/spec-09-05.canonical 0000664 0000000 0000000 00000000140 15056754172 0021744 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!seq [
!!str "first ",
!!str "first\nlast",
!!str "first inner \tlast",
]
python-ruyaml-0.92.1/_test/data/spec-09-05.data 0000664 0000000 0000000 00000000073 15056754172 0020733 0 ustar 00root root 0000000 0000000 - "first
"
- "first
last"
- "first
inner
\ last"
python-ruyaml-0.92.1/_test/data/spec-09-06.canonical 0000664 0000000 0000000 00000000053 15056754172 0021750 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!str "here's to \"quotes\""
python-ruyaml-0.92.1/_test/data/spec-09-06.data 0000664 0000000 0000000 00000000027 15056754172 0020733 0 ustar 00root root 0000000 0000000 'here''s to "quotes"'
python-ruyaml-0.92.1/_test/data/spec-09-07.canonical 0000664 0000000 0000000 00000000245 15056754172 0021754 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "simple key"
: !!map {
? !!str "also simple"
: !!str "value",
? !!str "not a simple key"
: !!str "any value"
}
}
python-ruyaml-0.92.1/_test/data/spec-09-07.data 0000664 0000000 0000000 00000000125 15056754172 0020733 0 ustar 00root root 0000000 0000000 'simple key' : {
'also simple' : value,
? 'not a
simple key' : 'any
value'
}
python-ruyaml-0.92.1/_test/data/spec-09-08.canonical 0000664 0000000 0000000 00000000105 15056754172 0021750 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!str "as space \
trimmed\n\
specific\L\n\
none"
python-ruyaml-0.92.1/_test/data/spec-09-08.data 0000664 0000000 0000000 00000000057 15056754172 0020740 0 ustar 00root root 0000000 0000000 'as space
trimmed
specific
none'
python-ruyaml-0.92.1/_test/data/spec-09-09.canonical 0000664 0000000 0000000 00000000123 15056754172 0021751 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!seq [
!!str " last",
!!str " last",
!!str " \tfirst last",
]
python-ruyaml-0.92.1/_test/data/spec-09-09.data 0000664 0000000 0000000 00000000055 15056754172 0020737 0 ustar 00root root 0000000 0000000 - '
last'
- '
last'
- ' first
last'
python-ruyaml-0.92.1/_test/data/spec-09-10.canonical 0000664 0000000 0000000 00000000057 15056754172 0021747 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!str "first \
inner \
last"
python-ruyaml-0.92.1/_test/data/spec-09-10.data 0000664 0000000 0000000 00000000030 15056754172 0020720 0 ustar 00root root 0000000 0000000 'first
inner
last'
python-ruyaml-0.92.1/_test/data/spec-09-11.canonical 0000664 0000000 0000000 00000000101 15056754172 0021736 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!seq [
!!str "first ",
!!str "first\nlast",
]
python-ruyaml-0.92.1/_test/data/spec-09-11.data 0000664 0000000 0000000 00000000041 15056754172 0020723 0 ustar 00root root 0000000 0000000 - 'first
'
- 'first
last'
python-ruyaml-0.92.1/_test/data/spec-09-12.canonical 0000664 0000000 0000000 00000000270 15056754172 0021746 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!seq [
!!str "::std::vector",
!!str "Up, up, and away!",
!!int "-123",
!!seq [
!!str "::std::vector",
!!str "Up, up, and away!",
!!int "-123",
]
]
python-ruyaml-0.92.1/_test/data/spec-09-12.data 0000664 0000000 0000000 00000000225 15056754172 0020730 0 ustar 00root root 0000000 0000000 # Outside flow collection:
- ::std::vector
- Up, up, and away!
- -123
# Inside flow collection:
- [ '::std::vector',
"Up, up, and away!",
-123 ]
python-ruyaml-0.92.1/_test/data/spec-09-13.canonical 0000664 0000000 0000000 00000000245 15056754172 0021751 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "simple key"
: !!map {
? !!str "also simple"
: !!str "value",
? !!str "not a simple key"
: !!str "any value"
}
}
python-ruyaml-0.92.1/_test/data/spec-09-13.data 0000664 0000000 0000000 00000000115 15056754172 0020727 0 ustar 00root root 0000000 0000000 simple key : {
also simple : value,
? not a
simple key : any
value
}
python-ruyaml-0.92.1/_test/data/spec-09-14.data 0000664 0000000 0000000 00000000116 15056754172 0020731 0 ustar 00root root 0000000 0000000 ---
--- ||| : foo
... >>>: bar
---
[
---
,
... ,
{
--- :
... # Nested
}
]
...
python-ruyaml-0.92.1/_test/data/spec-09-14.error 0000664 0000000 0000000 00000000213 15056754172 0021147 0 ustar 00root root 0000000 0000000 ERROR:
The --- and ... document
start and end markers must
not be specified as the
first content line of a
non-indented plain scalar.
python-ruyaml-0.92.1/_test/data/spec-09-15.canonical 0000664 0000000 0000000 00000000301 15056754172 0021744 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "---"
: !!str "foo",
? !!str "..."
: !!str "bar"
}
%YAML 1.1
---
!!seq [
!!str "---",
!!str "...",
!!map {
? !!str "---"
: !!str "..."
}
]
python-ruyaml-0.92.1/_test/data/spec-09-15.data 0000664 0000000 0000000 00000000077 15056754172 0020740 0 ustar 00root root 0000000 0000000 ---
"---" : foo
...: bar
---
[
---,
...,
{
? ---
: ...
}
]
...
python-ruyaml-0.92.1/_test/data/spec-09-16.canonical 0000664 0000000 0000000 00000000105 15056754172 0021747 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!str "as space \
trimmed\n\
specific\L\n\
none"
python-ruyaml-0.92.1/_test/data/spec-09-16.data 0000664 0000000 0000000 00000000144 15056754172 0020734 0 ustar 00root root 0000000 0000000 # Tabs are confusing:
# as space/trimmed/specific/none
as space
trimmed
specific
none
python-ruyaml-0.92.1/_test/data/spec-09-17.canonical 0000664 0000000 0000000 00000000064 15056754172 0021754 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!str "first line\n\
more line"
python-ruyaml-0.92.1/_test/data/spec-09-17.data 0000664 0000000 0000000 00000000035 15056754172 0020734 0 ustar 00root root 0000000 0000000 first line
more line
python-ruyaml-0.92.1/_test/data/spec-09-18.canonical 0000664 0000000 0000000 00000000150 15056754172 0021751 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!seq [
!!str "literal\n",
!!str " folded\n",
!!str "keep\n\n",
!!str " strip",
]
python-ruyaml-0.92.1/_test/data/spec-09-18.data 0000664 0000000 0000000 00000000205 15056754172 0020734 0 ustar 00root root 0000000 0000000 - | # Just the style
literal
- >1 # Indentation indicator
folded
- |+ # Chomping indicator
keep
- >-1 # Both indicators
strip
python-ruyaml-0.92.1/_test/data/spec-09-19.canonical 0000664 0000000 0000000 00000000101 15056754172 0021746 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!seq [
!!str "literal\n",
!!str "folded\n",
]
python-ruyaml-0.92.1/_test/data/spec-09-19.data 0000664 0000000 0000000 00000000031 15056754172 0020732 0 ustar 00root root 0000000 0000000 - |
literal
- >
folded
python-ruyaml-0.92.1/_test/data/spec-09-20.canonical 0000664 0000000 0000000 00000000173 15056754172 0021747 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!seq [
!!str "detected\n",
!!str "\n\n# detected\n",
!!str " explicit\n",
!!str "\t\ndetected\n",
]
python-ruyaml-0.92.1/_test/data/spec-09-20.data 0000664 0000000 0000000 00000000105 15056754172 0020724 0 ustar 00root root 0000000 0000000 - |
detected
- >
# detected
- |1
explicit
- >
detected
python-ruyaml-0.92.1/_test/data/spec-09-20.skip-ext 0000664 0000000 0000000 00000000000 15056754172 0021551 0 ustar 00root root 0000000 0000000 python-ruyaml-0.92.1/_test/data/spec-09-21.data 0000664 0000000 0000000 00000000051 15056754172 0020725 0 ustar 00root root 0000000 0000000 - |
text
- >
text
text
- |1
text
python-ruyaml-0.92.1/_test/data/spec-09-21.error 0000664 0000000 0000000 00000000260 15056754172 0021147 0 ustar 00root root 0000000 0000000 ERROR:
- A leading all-space line must
not have too many spaces.
- A following text line must
not be less indented.
- The text is less indented
than the indicated level.
python-ruyaml-0.92.1/_test/data/spec-09-22.canonical 0000664 0000000 0000000 00000000206 15056754172 0021746 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "strip"
: !!str "text",
? !!str "clip"
: !!str "text\n",
? !!str "keep"
: !!str "text\L",
}
python-ruyaml-0.92.1/_test/data/spec-09-22.data 0000664 0000000 0000000 00000000065 15056754172 0020733 0 ustar 00root root 0000000 0000000 strip: |-
text
clip: |
text
keep: |+
text
python-ruyaml-0.92.1/_test/data/spec-09-23.canonical 0000664 0000000 0000000 00000000216 15056754172 0021750 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "strip"
: !!str "# text",
? !!str "clip"
: !!str "# text\n",
? !!str "keep"
: !!str "# text\L\n",
}
python-ruyaml-0.92.1/_test/data/spec-09-23.data 0000664 0000000 0000000 00000000244 15056754172 0020733 0 ustar 00root root 0000000 0000000 # Strip
# Comments:
strip: |-
# text
# Clip
# comments:
clip: |
# text
# Keep
# comments:
keep: |+
# text
# Trail
# comments.
python-ruyaml-0.92.1/_test/data/spec-09-24.canonical 0000664 0000000 0000000 00000000170 15056754172 0021750 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "strip"
: !!str "",
? !!str "clip"
: !!str "",
? !!str "keep"
: !!str "\n",
}
python-ruyaml-0.92.1/_test/data/spec-09-24.data 0000664 0000000 0000000 00000000036 15056754172 0020733 0 ustar 00root root 0000000 0000000 strip: >-
clip: >
keep: |+
python-ruyaml-0.92.1/_test/data/spec-09-25.canonical 0000664 0000000 0000000 00000000060 15056754172 0021747 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!str "literal\n\
\ttext\n"
python-ruyaml-0.92.1/_test/data/spec-09-25.data 0000664 0000000 0000000 00000000050 15056754172 0020730 0 ustar 00root root 0000000 0000000 | # Simple block scalar
literal
text
python-ruyaml-0.92.1/_test/data/spec-09-26.canonical 0000664 0000000 0000000 00000000054 15056754172 0021753 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!str "\n\nliteral\n\ntext\n"
python-ruyaml-0.92.1/_test/data/spec-09-26.data 0000664 0000000 0000000 00000000046 15056754172 0020736 0 ustar 00root root 0000000 0000000 |
literal
text
# Comment
python-ruyaml-0.92.1/_test/data/spec-09-27.canonical 0000664 0000000 0000000 00000000054 15056754172 0021754 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!str "\n\nliteral\n\ntext\n"
python-ruyaml-0.92.1/_test/data/spec-09-27.data 0000664 0000000 0000000 00000000046 15056754172 0020737 0 ustar 00root root 0000000 0000000 |
literal
text
# Comment
python-ruyaml-0.92.1/_test/data/spec-09-28.canonical 0000664 0000000 0000000 00000000054 15056754172 0021755 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!str "\n\nliteral\n\ntext\n"
python-ruyaml-0.92.1/_test/data/spec-09-28.data 0000664 0000000 0000000 00000000046 15056754172 0020740 0 ustar 00root root 0000000 0000000 |
literal
text
# Comment
python-ruyaml-0.92.1/_test/data/spec-09-29.canonical 0000664 0000000 0000000 00000000065 15056754172 0021760 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!str "folded text\n\
\tlines\n"
python-ruyaml-0.92.1/_test/data/spec-09-29.data 0000664 0000000 0000000 00000000057 15056754172 0020743 0 ustar 00root root 0000000 0000000 > # Simple folded scalar
folded
text
lines
python-ruyaml-0.92.1/_test/data/spec-09-30.canonical 0000664 0000000 0000000 00000000166 15056754172 0021752 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!str "folded line\n\
next line\n\n\
\ * bullet\n\
\ * list\n\n\
last line\n"
python-ruyaml-0.92.1/_test/data/spec-09-30.data 0000664 0000000 0000000 00000000114 15056754172 0020725 0 ustar 00root root 0000000 0000000 >
folded
line
next
line
* bullet
* list
last
line
# Comment
python-ruyaml-0.92.1/_test/data/spec-09-31.canonical 0000664 0000000 0000000 00000000166 15056754172 0021753 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!str "folded line\n\
next line\n\n\
\ * bullet\n\
\ * list\n\n\
last line\n"
python-ruyaml-0.92.1/_test/data/spec-09-31.data 0000664 0000000 0000000 00000000114 15056754172 0020726 0 ustar 00root root 0000000 0000000 >
folded
line
next
line
* bullet
* list
last
line
# Comment
python-ruyaml-0.92.1/_test/data/spec-09-32.canonical 0000664 0000000 0000000 00000000166 15056754172 0021754 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!str "folded line\n\
next line\n\n\
\ * bullet\n\
\ * list\n\n\
last line\n"
python-ruyaml-0.92.1/_test/data/spec-09-32.data 0000664 0000000 0000000 00000000114 15056754172 0020727 0 ustar 00root root 0000000 0000000 >
folded
line
next
line
* bullet
* list
last
line
# Comment
python-ruyaml-0.92.1/_test/data/spec-09-33.canonical 0000664 0000000 0000000 00000000166 15056754172 0021755 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!str "folded line\n\
next line\n\n\
\ * bullet\n\
\ * list\n\n\
last line\n"
python-ruyaml-0.92.1/_test/data/spec-09-33.data 0000664 0000000 0000000 00000000114 15056754172 0020730 0 ustar 00root root 0000000 0000000 >
folded
line
next
line
* bullet
* list
last
line
# Comment
python-ruyaml-0.92.1/_test/data/spec-10-01.canonical 0000664 0000000 0000000 00000000201 15056754172 0021726 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!seq [
!!seq [
!!str "inner",
!!str "inner",
],
!!seq [
!!str "inner",
!!str "last",
],
]
python-ruyaml-0.92.1/_test/data/spec-10-01.data 0000664 0000000 0000000 00000000043 15056754172 0020714 0 ustar 00root root 0000000 0000000 - [ inner, inner, ]
- [inner,last]
python-ruyaml-0.92.1/_test/data/spec-10-02.canonical 0000664 0000000 0000000 00000000271 15056754172 0021736 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!seq [
!!str "double quoted",
!!str "single quoted",
!!str "plain text",
!!seq [
!!str "nested",
],
!!map {
? !!str "single"
: !!str "pair"
}
]
python-ruyaml-0.92.1/_test/data/spec-10-02.data 0000664 0000000 0000000 00000000132 15056754172 0020714 0 ustar 00root root 0000000 0000000 [
"double
quoted", 'single
quoted',
plain
text, [ nested ],
single: pair ,
]
python-ruyaml-0.92.1/_test/data/spec-10-03.canonical 0000664 0000000 0000000 00000000207 15056754172 0021736 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "block"
: !!seq [
!!str "one",
!!map {
? !!str "two"
: !!str "three"
}
]
}
python-ruyaml-0.92.1/_test/data/spec-10-03.data 0000664 0000000 0000000 00000000065 15056754172 0020722 0 ustar 00root root 0000000 0000000 block: # Block
# sequence
- one
- two : three
python-ruyaml-0.92.1/_test/data/spec-10-04.canonical 0000664 0000000 0000000 00000000157 15056754172 0021743 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "block"
: !!seq [
!!str "one",
!!seq [
!!str "two"
]
]
}
python-ruyaml-0.92.1/_test/data/spec-10-04.data 0000664 0000000 0000000 00000000026 15056754172 0020720 0 ustar 00root root 0000000 0000000 block:
- one
-
- two
python-ruyaml-0.92.1/_test/data/spec-10-05.canonical 0000664 0000000 0000000 00000000241 15056754172 0021736 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!seq [
!!null "",
!!str "block node\n",
!!seq [
!!str "one",
!!str "two",
],
!!map {
? !!str "one"
: !!str "two",
}
]
python-ruyaml-0.92.1/_test/data/spec-10-05.data 0000664 0000000 0000000 00000000151 15056754172 0020720 0 ustar 00root root 0000000 0000000 - # Empty
- |
block node
- - one # in-line
- two # sequence
- one: two # in-line
# mapping
python-ruyaml-0.92.1/_test/data/spec-10-06.canonical 0000664 0000000 0000000 00000000325 15056754172 0021742 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!seq [
!!map {
? !!str "inner"
: !!str "entry",
? !!str "also"
: !!str "inner"
},
!!map {
? !!str "inner"
: !!str "entry",
? !!str "last"
: !!str "entry"
}
]
python-ruyaml-0.92.1/_test/data/spec-10-06.data 0000664 0000000 0000000 00000000102 15056754172 0020715 0 ustar 00root root 0000000 0000000 - { inner : entry , also: inner , }
- {inner: entry,last : entry}
python-ruyaml-0.92.1/_test/data/spec-10-07.canonical 0000664 0000000 0000000 00000000355 15056754172 0021746 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!null ""
: !!str "value",
? !!str "explicit key"
: !!str "value",
? !!str "simple key"
: !!str "value",
? !!seq [
!!str "collection",
!!str "simple",
!!str "key"
]
: !!str "value"
}
python-ruyaml-0.92.1/_test/data/spec-10-07.data 0000664 0000000 0000000 00000000152 15056754172 0020723 0 ustar 00root root 0000000 0000000 {
? : value, # Empty key
? explicit
key: value,
simple key : value,
[ collection, simple, key ]: value
}
python-ruyaml-0.92.1/_test/data/spec-10-08.data 0000664 0000000 0000000 00000004106 15056754172 0020727 0 ustar 00root root 0000000 0000000 {
multi-line
simple key : value,
very long ...................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................(>1KB)................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................... key: value
}
python-ruyaml-0.92.1/_test/data/spec-10-08.error 0000664 0000000 0000000 00000000162 15056754172 0021145 0 ustar 00root root 0000000 0000000 ERROR:
- A simple key is restricted
to only one line.
- A simple key must not be
longer than 1024 characters.
python-ruyaml-0.92.1/_test/data/spec-10-09.canonical 0000664 0000000 0000000 00000000134 15056754172 0021743 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "key"
: !!str "value",
? !!str "empty"
: !!null "",
}
python-ruyaml-0.92.1/_test/data/spec-10-09.data 0000664 0000000 0000000 00000000051 15056754172 0020723 0 ustar 00root root 0000000 0000000 {
key : value,
empty: # empty value↓
}
python-ruyaml-0.92.1/_test/data/spec-10-10.canonical 0000664 0000000 0000000 00000000442 15056754172 0021735 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "explicit key1"
: !!str "explicit value",
? !!str "explicit key2"
: !!null "",
? !!str "explicit key3"
: !!null "",
? !!str "simple key1"
: !!str "explicit value",
? !!str "simple key2"
: !!null "",
? !!str "simple key3"
: !!null "",
}
python-ruyaml-0.92.1/_test/data/spec-10-10.data 0000664 0000000 0000000 00000000324 15056754172 0020716 0 ustar 00root root 0000000 0000000 {
? explicit key1 : explicit value,
? explicit key2 : , # Explicit empty
? explicit key3, # Empty value
simple key1 : explicit value,
simple key2 : , # Explicit empty
simple key3, # Empty value
}
python-ruyaml-0.92.1/_test/data/spec-10-11.canonical 0000664 0000000 0000000 00000000532 15056754172 0021736 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!seq [
!!map {
? !!str "explicit key1"
: !!str "explicit value",
},
!!map {
? !!str "explicit key2"
: !!null "",
},
!!map {
? !!str "explicit key3"
: !!null "",
},
!!map {
? !!str "simple key1"
: !!str "explicit value",
},
!!map {
? !!str "simple key2"
: !!null "",
},
]
python-ruyaml-0.92.1/_test/data/spec-10-11.data 0000664 0000000 0000000 00000000264 15056754172 0020722 0 ustar 00root root 0000000 0000000 [
? explicit key1 : explicit value,
? explicit key2 : , # Explicit empty
? explicit key3, # Implicit empty
simple key1 : explicit value,
simple key2 : , # Explicit empty
]
python-ruyaml-0.92.1/_test/data/spec-10-12.canonical 0000664 0000000 0000000 00000000140 15056754172 0021732 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "block"
: !!map {
? !!str "key"
: !!str "value"
}
}
python-ruyaml-0.92.1/_test/data/spec-10-12.data 0000664 0000000 0000000 00000000051 15056754172 0020715 0 ustar 00root root 0000000 0000000 block: # Block
# mapping
key: value
python-ruyaml-0.92.1/_test/data/spec-10-13.canonical 0000664 0000000 0000000 00000000212 15056754172 0021733 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "explicit key"
: !!null "",
? !!str "block key\n"
: !!seq [
!!str "one",
!!str "two",
]
}
python-ruyaml-0.92.1/_test/data/spec-10-13.data 0000664 0000000 0000000 00000000141 15056754172 0020716 0 ustar 00root root 0000000 0000000 ? explicit key # implicit value
? |
block key
: - one # explicit in-line
- two # block value
python-ruyaml-0.92.1/_test/data/spec-10-14.canonical 0000664 0000000 0000000 00000000206 15056754172 0021737 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!map {
? !!str "plain key"
: !!null "",
? !!str "quoted key"
: !!seq [
!!str "one",
!!str "two",
]
}
python-ruyaml-0.92.1/_test/data/spec-10-14.data 0000664 0000000 0000000 00000000126 15056754172 0020722 0 ustar 00root root 0000000 0000000 plain key: # empty value
"quoted key":
- one # explicit next-line
- two # block value
python-ruyaml-0.92.1/_test/data/spec-10-15.canonical 0000664 0000000 0000000 00000000332 15056754172 0021740 0 ustar 00root root 0000000 0000000 %YAML 1.1
---
!!seq [
!!map {
? !!str "sun"
: !!str "yellow"
},
!!map {
? !!map {
? !!str "earth"
: !!str "blue"
}
: !!map {
? !!str "moon"
: !!str "white"
}
}
]
python-ruyaml-0.92.1/_test/data/spec-10-15.data 0000664 0000000 0000000 00000000056 15056754172 0020725 0 ustar 00root root 0000000 0000000 - sun: yellow
- ? earth: blue
: moon: white
python-ruyaml-0.92.1/_test/data/str.data 0000664 0000000 0000000 00000000007 15056754172 0020136 0 ustar 00root root 0000000 0000000 - abcd
python-ruyaml-0.92.1/_test/data/str.detect 0000664 0000000 0000000 00000000026 15056754172 0020476 0 ustar 00root root 0000000 0000000 tag:yaml.org,2002:str
python-ruyaml-0.92.1/_test/data/tags.events 0000664 0000000 0000000 00000000576 15056754172 0020672 0 ustar 00root root 0000000 0000000 - !StreamStart
- !DocumentStart
- !SequenceStart
- !Scalar { value: 'data' }
#- !Scalar { tag: '!', value: 'data' }
- !Scalar { tag: 'tag:yaml.org,2002:str', value: 'data' }
- !Scalar { tag: '!myfunnytag', value: 'data' }
- !Scalar { tag: '!my!ugly!tag', value: 'data' }
- !Scalar { tag: 'tag:my.domain.org,2002:data!? #', value: 'data' }
- !SequenceEnd
- !DocumentEnd
- !StreamEnd
python-ruyaml-0.92.1/_test/data/test_mark.marks 0000664 0000000 0000000 00000001036 15056754172 0021526 0 ustar 00root root 0000000 0000000 ---
*The first line.
The last line.
---
The first*line.
The last line.
---
The first line.*
The last line.
---
The first line.
*The last line.
---
The first line.
The last*line.
---
The first line.
The last line.*
---
The first line.
*The selected line.
The last line.
---
The first line.
The selected*line.
The last line.
---
The first line.
The selected line.*
The last line.
---
*The only line.
---
The only*line.
---
The only line.*
---
Loooooooooooooooooooooooooooooooooooooooooooooong*Liiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiine
python-ruyaml-0.92.1/_test/data/timestamp-bugs.code 0000664 0000000 0000000 00000000574 15056754172 0022301 0 ustar 00root root 0000000 0000000 [
datetime.datetime(2001, 12, 15, 3, 29, 43, 100000),
datetime.datetime(2001, 12, 14, 16, 29, 43, 100000),
datetime.datetime(2001, 12, 14, 21, 59, 43, 1010),
datetime.datetime(2001, 12, 14, 21, 59, 43, 0, FixedOffset(60, "+1")),
datetime.datetime(2001, 12, 14, 21, 59, 43, 0, FixedOffset(-90, "-1:30")),
datetime.datetime(2005, 7, 8, 17, 35, 4, 517600),
]
python-ruyaml-0.92.1/_test/data/timestamp-bugs.data 0000664 0000000 0000000 00000000252 15056754172 0022271 0 ustar 00root root 0000000 0000000 - 2001-12-14 21:59:43.10 -5:30
- 2001-12-14 21:59:43.10 +5:30
- 2001-12-14 21:59:43.00101
- 2001-12-14 21:59:43+1
- 2001-12-14 21:59:43-1:30
- 2005-07-08 17:35:04.517600
python-ruyaml-0.92.1/_test/data/timestamp.data 0000664 0000000 0000000 00000000171 15056754172 0021333 0 ustar 00root root 0000000 0000000 - 2001-12-15T02:59:43.1Z
- 2001-12-14t21:59:43.10-05:00
- 2001-12-14 21:59:43.10 -5
- 2001-12-15 2:59:43.10
- 2002-12-14
python-ruyaml-0.92.1/_test/data/timestamp.detect 0000664 0000000 0000000 00000000034 15056754172 0021670 0 ustar 00root root 0000000 0000000 tag:yaml.org,2002:timestamp
python-ruyaml-0.92.1/_test/data/unclosed-bracket.loader-error 0000664 0000000 0000000 00000000327 15056754172 0024244 0 ustar 00root root 0000000 0000000 test:
- [ foo: bar
# comment the rest of the stream to let the scanner detect the problem.
# - baz
#"we could have detected the unclosed bracket on the above line, but this would forbid such syntax as": {
#}
python-ruyaml-0.92.1/_test/data/unclosed-quoted-scalar.loader-error 0000664 0000000 0000000 00000000012 15056754172 0025364 0 ustar 00root root 0000000 0000000 'foo
bar
python-ruyaml-0.92.1/_test/data/undefined-anchor.loader-error 0000664 0000000 0000000 00000000030 15056754172 0024217 0 ustar 00root root 0000000 0000000 - foo
- &bar baz
- *bat
python-ruyaml-0.92.1/_test/data/undefined-constructor.loader-error 0000664 0000000 0000000 00000000015 15056754172 0025335 0 ustar 00root root 0000000 0000000 --- !foo bar
python-ruyaml-0.92.1/_test/data/undefined-tag-handle.loader-error 0000664 0000000 0000000 00000000024 15056754172 0024754 0 ustar 00root root 0000000 0000000 --- !foo!bar baz
python-ruyaml-0.92.1/_test/data/unknown.dumper-error 0000664 0000000 0000000 00000000027 15056754172 0022541 0 ustar 00root root 0000000 0000000 yaml.safe_dump(object)
python-ruyaml-0.92.1/_test/data/unsupported-version.emitter-error 0000664 0000000 0000000 00000000150 15056754172 0025267 0 ustar 00root root 0000000 0000000 - !StreamStart
- !DocumentStart { version: [5,6] }
- !Scalar { value: foo }
- !DocumentEnd
- !StreamEnd
python-ruyaml-0.92.1/_test/data/utf16be.code 0000664 0000000 0000000 00000000014 15056754172 0020601 0 ustar 00root root 0000000 0000000 "UTF-16-BE"
python-ruyaml-0.92.1/_test/data/utf16be.data 0000664 0000000 0000000 00000000036 15056754172 0020604 0 ustar 00root root 0000000 0000000 - - - U T F - 1 6 - B E
python-ruyaml-0.92.1/_test/data/utf16le.code 0000664 0000000 0000000 00000000014 15056754172 0020613 0 ustar 00root root 0000000 0000000 "UTF-16-LE"
python-ruyaml-0.92.1/_test/data/utf16le.data 0000664 0000000 0000000 00000000036 15056754172 0020616 0 ustar 00root root 0000000 0000000 - - - U T F - 1 6 - L E
python-ruyaml-0.92.1/_test/data/utf8-implicit.code 0000664 0000000 0000000 00000000021 15056754172 0022021 0 ustar 00root root 0000000 0000000 "implicit UTF-8"
python-ruyaml-0.92.1/_test/data/utf8-implicit.data 0000664 0000000 0000000 00000000023 15056754172 0022022 0 ustar 00root root 0000000 0000000 --- implicit UTF-8
python-ruyaml-0.92.1/_test/data/utf8.code 0000664 0000000 0000000 00000000010 15056754172 0020207 0 ustar 00root root 0000000 0000000 "UTF-8"
python-ruyaml-0.92.1/_test/data/utf8.data 0000664 0000000 0000000 00000000015 15056754172 0020213 0 ustar 00root root 0000000 0000000 --- UTF-8
python-ruyaml-0.92.1/_test/data/util/ 0000775 0000000 0000000 00000000000 15056754172 0017453 5 ustar 00root root 0000000 0000000 python-ruyaml-0.92.1/_test/data/util/00_ok.yaml 0000664 0000000 0000000 00000000042 15056754172 0021243 0 ustar 00root root 0000000 0000000 - abc
- ghi # some comment
- klm
python-ruyaml-0.92.1/_test/data/util/01_second_rt_ok.yaml 0000664 0000000 0000000 00000000042 15056754172 0023304 0 ustar 00root root 0000000 0000000 - abc
- ghi # some comment
- klm
python-ruyaml-0.92.1/_test/data/util/02_not_ok.yaml 0000664 0000000 0000000 00000000054 15056754172 0022130 0 ustar 00root root 0000000 0000000 123 # single scalar cannot have comment
...
python-ruyaml-0.92.1/_test/data/util/03_no_comment_ok.yaml 0000664 0000000 0000000 00000000011 15056754172 0023460 0 ustar 00root root 0000000 0000000 123
...
python-ruyaml-0.92.1/_test/data/valid_escape_characters.code 0000664 0000000 0000000 00000000031 15056754172 0024142 0 ustar 00root root 0000000 0000000 "\" \\ / \b \f \n \r \t"
python-ruyaml-0.92.1/_test/data/valid_escape_characters.data 0000664 0000000 0000000 00000000032 15056754172 0024142 0 ustar 00root root 0000000 0000000 "\" \\ \/ \b \f \n \r \t"
python-ruyaml-0.92.1/_test/data/valid_escape_characters.skip-ext 0000664 0000000 0000000 00000000000 15056754172 0024770 0 ustar 00root root 0000000 0000000 python-ruyaml-0.92.1/_test/data/value.data 0000664 0000000 0000000 00000000004 15056754172 0020437 0 ustar 00root root 0000000 0000000 - =
python-ruyaml-0.92.1/_test/data/value.detect 0000664 0000000 0000000 00000000030 15056754172 0020775 0 ustar 00root root 0000000 0000000 tag:yaml.org,2002:value
python-ruyaml-0.92.1/_test/data/yaml.data 0000664 0000000 0000000 00000000047 15056754172 0020274 0 ustar 00root root 0000000 0000000 - !!yaml '!'
- !!yaml '&'
- !!yaml '*'
python-ruyaml-0.92.1/_test/data/yaml.detect 0000664 0000000 0000000 00000000027 15056754172 0020631 0 ustar 00root root 0000000 0000000 tag:yaml.org,2002:yaml
python-ruyaml-0.92.1/_test/lib/ 0000775 0000000 0000000 00000000000 15056754172 0016333 5 ustar 00root root 0000000 0000000 python-ruyaml-0.92.1/_test/lib/canonical.py 0000664 0000000 0000000 00000031064 15056754172 0020640 0 ustar 00root root 0000000 0000000 import ruyaml
from ruyaml.composer import Composer
from ruyaml.constructor import Constructor
from ruyaml.resolver import Resolver
class CanonicalError(ruyaml.YAMLError):
pass
class CanonicalScanner:
def __init__(self, data):
try:
if isinstance(data, bytes):
data = data.decode('utf-8')
except UnicodeDecodeError:
raise CanonicalError('utf-8 stream is expected')
self.data = data + '\0'
self.index = 0
self.tokens = []
self.scanned = False
def check_token(self, *choices):
if not self.scanned:
self.scan()
if self.tokens:
if not choices:
return True
for choice in choices:
if isinstance(self.tokens[0], choice):
return True
return False
def peek_token(self):
if not self.scanned:
self.scan()
if self.tokens:
return self.tokens[0]
def get_token(self, choice=None):
if not self.scanned:
self.scan()
token = self.tokens.pop(0)
if choice and not isinstance(token, choice):
raise CanonicalError('unexpected token ' + repr(token))
return token
def get_token_value(self):
token = self.get_token()
return token.value
def scan(self):
self.tokens.append(ruyaml.StreamStartToken(None, None))
while True:
self.find_token()
ch = self.data[self.index]
if ch == '\0':
self.tokens.append(ruyaml.StreamEndToken(None, None))
break
elif ch == '%':
self.tokens.append(self.scan_directive())
elif ch == '-' and self.data[self.index : self.index + 3] == '---':
self.index += 3
self.tokens.append(ruyaml.DocumentStartToken(None, None))
elif ch == '[':
self.index += 1
self.tokens.append(ruyaml.FlowSequenceStartToken(None, None))
elif ch == '{':
self.index += 1
self.tokens.append(ruyaml.FlowMappingStartToken(None, None))
elif ch == ']':
self.index += 1
self.tokens.append(ruyaml.FlowSequenceEndToken(None, None))
elif ch == '}':
self.index += 1
self.tokens.append(ruyaml.FlowMappingEndToken(None, None))
elif ch == '?':
self.index += 1
self.tokens.append(ruyaml.KeyToken(None, None))
elif ch == ':':
self.index += 1
self.tokens.append(ruyaml.ValueToken(None, None))
elif ch == ',':
self.index += 1
self.tokens.append(ruyaml.FlowEntryToken(None, None))
elif ch == '*' or ch == '&':
self.tokens.append(self.scan_alias())
elif ch == '!':
self.tokens.append(self.scan_tag())
elif ch == '"':
self.tokens.append(self.scan_scalar())
else:
raise CanonicalError('invalid token')
self.scanned = True
DIRECTIVE = '%YAML 1.1'
def scan_directive(self):
if (
self.data[self.index : self.index + len(self.DIRECTIVE)] == self.DIRECTIVE
and self.data[self.index + len(self.DIRECTIVE)] in ' \n\0'
):
self.index += len(self.DIRECTIVE)
return ruyaml.DirectiveToken('YAML', (1, 1), None, None)
else:
raise CanonicalError('invalid directive')
def scan_alias(self):
if self.data[self.index] == '*':
TokenClass = ruyaml.AliasToken
else:
TokenClass = ruyaml.AnchorToken
self.index += 1
start = self.index
while self.data[self.index] not in ', \n\0':
self.index += 1
value = self.data[start : self.index]
return TokenClass(value, None, None)
def scan_tag(self):
self.index += 1
start = self.index
while self.data[self.index] not in ' \n\0':
self.index += 1
value = self.data[start : self.index]
if not value:
value = '!'
elif value[0] == '!':
value = 'tag:yaml.org,2002:' + value[1:]
elif value[0] == '<' and value[-1] == '>':
value = value[1:-1]
else:
value = '!' + value
return ruyaml.TagToken(value, None, None)
QUOTE_CODES = {'x': 2, 'u': 4, 'U': 8}
QUOTE_REPLACES = {
'\\': '\\',
'"': '"',
' ': ' ',
'a': '\x07',
'b': '\x08',
'e': '\x1B',
'f': '\x0C',
'n': '\x0A',
'r': '\x0D',
't': '\x09',
'v': '\x0B',
'N': '\u0085',
'L': '\u2028',
'P': '\u2029',
'_': '_',
'0': '\x00',
}
def scan_scalar(self):
self.index += 1
chunks = []
start = self.index
ignore_spaces = False
while self.data[self.index] != '"':
if self.data[self.index] == '\\':
ignore_spaces = False
chunks.append(self.data[start : self.index])
self.index += 1
ch = self.data[self.index]
self.index += 1
if ch == '\n':
ignore_spaces = True
elif ch in self.QUOTE_CODES:
length = self.QUOTE_CODES[ch]
code = int(self.data[self.index : self.index + length], 16)
chunks.append(chr(code))
self.index += length
else:
if ch not in self.QUOTE_REPLACES:
raise CanonicalError('invalid escape code')
chunks.append(self.QUOTE_REPLACES[ch])
start = self.index
elif self.data[self.index] == '\n':
chunks.append(self.data[start : self.index])
chunks.append(' ')
self.index += 1
start = self.index
ignore_spaces = True
elif ignore_spaces and self.data[self.index] == ' ':
self.index += 1
start = self.index
else:
ignore_spaces = False
self.index += 1
chunks.append(self.data[start : self.index])
self.index += 1
return ruyaml.ScalarToken("".join(chunks), False, None, None)
def find_token(self):
found = False
while not found:
while self.data[self.index] in ' \t':
self.index += 1
if self.data[self.index] == '#':
while self.data[self.index] != '\n':
self.index += 1
if self.data[self.index] == '\n':
self.index += 1
else:
found = True
class CanonicalParser:
def __init__(self):
self.events = []
self.parsed = False
def dispose(self):
pass
# stream: STREAM-START document* STREAM-END
def parse_stream(self):
self.get_token(ruyaml.StreamStartToken)
self.events.append(ruyaml.StreamStartEvent(None, None))
while not self.check_token(ruyaml.StreamEndToken):
if self.check_token(ruyaml.DirectiveToken, ruyaml.DocumentStartToken):
self.parse_document()
else:
raise CanonicalError(
'document is expected, got ' + repr(self.tokens[0])
)
self.get_token(ruyaml.StreamEndToken)
self.events.append(ruyaml.StreamEndEvent(None, None))
# document: DIRECTIVE? DOCUMENT-START node
def parse_document(self):
# node = None
if self.check_token(ruyaml.DirectiveToken):
self.get_token(ruyaml.DirectiveToken)
self.get_token(ruyaml.DocumentStartToken)
self.events.append(ruyaml.DocumentStartEvent(None, None))
self.parse_node()
self.events.append(ruyaml.DocumentEndEvent(None, None))
# node: ALIAS | ANCHOR? TAG? (SCALAR|sequence|mapping)
def parse_node(self):
if self.check_token(ruyaml.AliasToken):
self.events.append(ruyaml.AliasEvent(self.get_token_value(), None, None))
else:
anchor = None
if self.check_token(ruyaml.AnchorToken):
anchor = self.get_token_value()
tag = None
if self.check_token(ruyaml.TagToken):
tag = self.get_token_value()
if self.check_token(ruyaml.ScalarToken):
self.events.append(
ruyaml.ScalarEvent(
anchor, tag, (False, False), self.get_token_value(), None, None
)
)
elif self.check_token(ruyaml.FlowSequenceStartToken):
self.events.append(ruyaml.SequenceStartEvent(anchor, tag, None, None))
self.parse_sequence()
elif self.check_token(ruyaml.FlowMappingStartToken):
self.events.append(ruyaml.MappingStartEvent(anchor, tag, None, None))
self.parse_mapping()
else:
raise CanonicalError(
"SCALAR, '[', or '{' is expected, got " + repr(self.tokens[0])
)
# sequence: SEQUENCE-START (node (ENTRY node)*)? ENTRY? SEQUENCE-END
def parse_sequence(self):
self.get_token(ruyaml.FlowSequenceStartToken)
if not self.check_token(ruyaml.FlowSequenceEndToken):
self.parse_node()
while not self.check_token(ruyaml.FlowSequenceEndToken):
self.get_token(ruyaml.FlowEntryToken)
if not self.check_token(ruyaml.FlowSequenceEndToken):
self.parse_node()
self.get_token(ruyaml.FlowSequenceEndToken)
self.events.append(ruyaml.SequenceEndEvent(None, None))
# mapping: MAPPING-START (map_entry (ENTRY map_entry)*)? ENTRY? MAPPING-END
def parse_mapping(self):
self.get_token(ruyaml.FlowMappingStartToken)
if not self.check_token(ruyaml.FlowMappingEndToken):
self.parse_map_entry()
while not self.check_token(ruyaml.FlowMappingEndToken):
self.get_token(ruyaml.FlowEntryToken)
if not self.check_token(ruyaml.FlowMappingEndToken):
self.parse_map_entry()
self.get_token(ruyaml.FlowMappingEndToken)
self.events.append(ruyaml.MappingEndEvent(None, None))
# map_entry: KEY node VALUE node
def parse_map_entry(self):
self.get_token(ruyaml.KeyToken)
self.parse_node()
self.get_token(ruyaml.ValueToken)
self.parse_node()
def parse(self):
self.parse_stream()
self.parsed = True
def get_event(self):
if not self.parsed:
self.parse()
return self.events.pop(0)
def check_event(self, *choices):
if not self.parsed:
self.parse()
if self.events:
if not choices:
return True
for choice in choices:
if isinstance(self.events[0], choice):
return True
return False
def peek_event(self):
if not self.parsed:
self.parse()
return self.events[0]
class CanonicalLoader(
CanonicalScanner, CanonicalParser, Composer, Constructor, Resolver
):
def __init__(self, stream):
if hasattr(stream, 'read'):
stream = stream.read()
CanonicalScanner.__init__(self, stream)
CanonicalParser.__init__(self)
Composer.__init__(self)
Constructor.__init__(self)
Resolver.__init__(self)
ruyaml.CanonicalLoader = CanonicalLoader
def canonical_scan(stream):
yaml = ruyaml.YAML()
yaml.scanner = CanonicalScanner
return yaml.scan(stream)
ruyaml.canonical_scan = canonical_scan
def canonical_parse(stream):
yaml = ruyaml.YAML()
return yaml.parse(stream, Loader=CanonicalLoader)
ruyaml.canonical_parse = canonical_parse
def canonical_compose(stream):
yaml = ruyaml.YAML()
return yaml.compose(stream, Loader=CanonicalLoader)
ruyaml.canonical_compose = canonical_compose
def canonical_compose_all(stream):
yaml = ruyaml.YAML()
return yaml.compose_all(stream, Loader=CanonicalLoader)
ruyaml.canonical_compose_all = canonical_compose_all
def canonical_load(stream):
yaml = ruyaml.YAML()
return yaml.load(stream, Loader=CanonicalLoader)
ruyaml.canonical_load = canonical_load
def canonical_load_all(stream):
yaml = ruyaml.YAML(typ='safe', pure=True)
yaml.Loader = CanonicalLoader
return yaml.load_all(stream)
ruyaml.canonical_load_all = canonical_load_all
python-ruyaml-0.92.1/_test/lib/test_all.py 0000664 0000000 0000000 00000000527 15056754172 0020520 0 ustar 00root root 0000000 0000000 import sys # NOQA
import test_appliance
import ruyaml
def main(args=None):
collections = []
import test_yaml
collections.append(test_yaml)
if ruyaml.__with_libyaml__:
import test_yaml_ext
collections.append(test_yaml_ext)
test_appliance.run(collections, args)
if __name__ == '__main__':
main()
python-ruyaml-0.92.1/_test/lib/test_appliance.py 0000664 0000000 0000000 00000016060 15056754172 0021703 0 ustar 00root root 0000000 0000000 import argparse
import os
import pprint
import sys
import traceback
import types
# DATA = 'tests/data'
# determine the position of data dynamically relative to program
# this allows running test while the current path is not the top of the
# repository, e.g. from the tests/data directory: python ../test_yaml.py
DATA = __file__.rsplit(os.sep, 2)[0] + '/data'
def find_test_functions(collections):
if not isinstance(collections, list):
collections = [collections]
functions = []
for collection in collections:
if not isinstance(collection, dict):
collection = vars(collection)
for key in sorted(collection):
value = collection[key]
if isinstance(value, types.FunctionType) and hasattr(value, 'unittest'):
functions.append(value)
return functions
def find_test_filenames(directory):
filenames = {}
for filename in os.listdir(directory):
if os.path.isfile(os.path.join(directory, filename)):
base, ext = os.path.splitext(filename)
filenames.setdefault(base, []).append(ext)
filenames = sorted(filenames.items())
return filenames
def parse_arguments(args):
""""""
parser = argparse.ArgumentParser(
usage=""" run the yaml tests. By default
all functions on all appropriate test_files are run. Functions have
unittest attributes that determine the required extensions to filenames
that need to be available in order to run that test. E.g.\n\n
python test_yaml.py test_constructor_types\n
python test_yaml.py --verbose test_tokens spec-02-05\n\n
The presence of an extension in the .skip attribute of a function
disables the test for that function."""
)
# ToDo: make into int and test > 0 in functions
parser.add_argument(
'--verbose',
'-v',
action='store_true',
default='YAML_TEST_VERBOSE' in os.environ,
help='set verbosity output',
)
parser.add_argument(
'--list-functions',
action='store_true',
help="""list all functions with required file extensions for test files
""",
)
parser.add_argument('function', nargs='?', help="""restrict function to run""")
parser.add_argument(
'filenames',
nargs='*',
help="""basename of filename set, extensions (.code, .data) have to
be a superset of those in the unittest attribute of the selected
function""",
)
args = parser.parse_args(args)
# print('args', args)
verbose = args.verbose
include_functions = [args.function] if args.function else []
include_filenames = args.filenames
# if args is None:
# args = sys.argv[1:]
# verbose = False
# if '-v' in args:
# verbose = True
# args.remove('-v')
# if '--verbose' in args:
# verbose = True
# args.remove('--verbose') # never worked without this
# if 'YAML_TEST_VERBOSE' in os.environ:
# verbose = True
# include_functions = []
# if args:
# include_functions.append(args.pop(0))
if 'YAML_TEST_FUNCTIONS' in os.environ:
include_functions.extend(os.environ['YAML_TEST_FUNCTIONS'].split())
# include_filenames = []
# include_filenames.extend(args)
if 'YAML_TEST_FILENAMES' in os.environ:
include_filenames.extend(os.environ['YAML_TEST_FILENAMES'].split())
return include_functions, include_filenames, verbose, args
def execute(function, filenames, verbose):
name = function.__name__
if verbose:
sys.stdout.write('=' * 75 + '\n')
sys.stdout.write('%s(%s)...\n' % (name, ', '.join(filenames)))
try:
function(verbose=verbose, *filenames) # noqa: B026
except Exception as exc:
info = sys.exc_info()
if isinstance(exc, AssertionError):
kind = 'FAILURE'
else:
kind = 'ERROR'
if verbose:
traceback.print_exc(limit=1, file=sys.stdout)
else:
sys.stdout.write(kind[0])
sys.stdout.flush()
else:
kind = 'SUCCESS'
info = None
if not verbose:
sys.stdout.write('.')
sys.stdout.flush()
return (name, filenames, kind, info)
def display(results, verbose):
if results and not verbose:
sys.stdout.write('\n')
total = len(results)
failures = 0
errors = 0
for name, filenames, kind, info in results:
if kind == 'SUCCESS':
continue
if kind == 'FAILURE':
failures += 1
if kind == 'ERROR':
errors += 1
sys.stdout.write('=' * 75 + '\n')
sys.stdout.write('%s(%s): %s\n' % (name, ', '.join(filenames), kind))
if kind == 'ERROR':
traceback.print_exception(*info)
else:
sys.stdout.write('Traceback (most recent call last):\n')
traceback.print_tb(info[2], file=sys.stdout)
sys.stdout.write('%s: see below\n' % info[0].__name__)
sys.stdout.write('~' * 75 + '\n')
for arg in info[1].args:
pprint.pprint(arg, stream=sys.stdout)
for filename in filenames:
sys.stdout.write('-' * 75 + '\n')
sys.stdout.write('%s:\n' % filename)
with open(filename, 'r', errors='replace') as fp:
data = fp.read()
sys.stdout.write(data)
if data and data[-1] != '\n':
sys.stdout.write('\n')
sys.stdout.write('=' * 75 + '\n')
sys.stdout.write('TESTS: %s\n' % total)
ret_val = 0
if failures:
sys.stdout.write('FAILURES: %s\n' % failures)
ret_val = 1
if errors:
sys.stdout.write('ERRORS: %s\n' % errors)
ret_val = 2
return ret_val
def run(collections, args=None):
test_functions = find_test_functions(collections)
test_filenames = find_test_filenames(DATA)
include_functions, include_filenames, verbose, a = parse_arguments(args)
if a.list_functions:
print('test functions:')
for f in test_functions:
print(' {:30s} {}'.format(f.__name__, f.unittest))
return
results = []
for function in test_functions:
if include_functions and function.__name__ not in include_functions:
continue
if function.unittest:
for base, exts in test_filenames:
if include_filenames and base not in include_filenames:
continue
filenames = []
for ext in function.unittest:
if ext not in exts:
break
filenames.append(os.path.join(DATA, base + ext))
else:
skip_exts = getattr(function, 'skip', [])
for skip_ext in skip_exts:
if skip_ext in exts:
break
else:
result = execute(function, filenames, verbose)
results.append(result)
else:
result = execute(function, [], verbose)
results.append(result)
return display(results, verbose=verbose)
python-ruyaml-0.92.1/_test/lib/test_build.py 0000664 0000000 0000000 00000000602 15056754172 0021041 0 ustar 00root root 0000000 0000000 if __name__ == '__main__':
import distutils.util
import os
import sys
build_lib = 'build/lib'
build_lib_ext = os.path.join(
'build', 'lib.%s-%s' % (distutils.util.get_platform(), sys.version[0:3])
)
sys.path.insert(0, build_lib)
sys.path.insert(0, build_lib_ext)
import test_appliance
import test_yaml
test_appliance.run(test_yaml)
python-ruyaml-0.92.1/_test/lib/test_build_ext.py 0000664 0000000 0000000 00000000612 15056754172 0021722 0 ustar 00root root 0000000 0000000 if __name__ == '__main__':
import distutils.util
import os
import sys
build_lib = 'build/lib'
build_lib_ext = os.path.join(
'build', 'lib.%s-%s' % (distutils.util.get_platform(), sys.version[0:3])
)
sys.path.insert(0, build_lib)
sys.path.insert(0, build_lib_ext)
import test_appliance
import test_yaml_ext
test_appliance.run(test_yaml_ext)
python-ruyaml-0.92.1/_test/lib/test_canonical.py 0000664 0000000 0000000 00000002553 15056754172 0021700 0 ustar 00root root 0000000 0000000 # Skipped because we have no idea where this "canonical" module
# comes from, nor where all those fixtures originate
import pytest
import ruyaml
# import canonical # NOQA
def test_canonical_scanner(canonical_filename, verbose=False):
with open(canonical_filename, 'rb') as fp0:
data = fp0.read()
tokens = list(ruyaml.canonical_scan(data))
assert tokens, tokens
if verbose:
for token in tokens:
print(token)
test_canonical_scanner.unittest = ['.canonical']
def test_canonical_parser(canonical_filename, verbose=False):
with open(canonical_filename, 'rb') as fp0:
data = fp0.read()
events = list(ruyaml.canonical_parse(data))
assert events, events
if verbose:
for event in events:
print(event)
test_canonical_parser.unittest = ['.canonical']
def test_canonical_error(data_filename, canonical_filename, verbose=False):
with open(data_filename, 'rb') as fp0:
data = fp0.read()
try:
output = list(ruyaml.canonical_load_all(data)) # NOQA
except ruyaml.YAMLError as exc:
if verbose:
print(exc)
else:
raise AssertionError('expected an exception')
test_canonical_error.unittest = ['.data', '.canonical']
test_canonical_error.skip = ['.empty']
if __name__ == '__main__':
import test_appliance
test_appliance.run(globals())
python-ruyaml-0.92.1/_test/lib/test_constructor.py 0000664 0000000 0000000 00000025213 15056754172 0022334 0 ustar 00root root 0000000 0000000 # Skipped because we have no idea where all those fixtures originate
import pytest
pytestmark = pytest.mark.skip
import ruyaml as yaml
YAML = yaml.YAML
import datetime
import pprint
import ruyaml
try:
set
except NameError:
from sets import Set as set # NOQA
import ruyaml.tokens
def cmp(a, b):
return (a > b) - (a < b)
def execute(code):
global value
exec(code)
return value
def _make_objects():
global MyLoader, MyDumper, MyTestClass1, MyTestClass2, MyTestClass3
global YAMLobject1, YAMLobject2, AnObject, AnInstance, AState, ACustomState
global InitArgs, InitArgsWithState
global NewArgs, NewArgsWithState, Reduce, ReduceWithState, MyInt, MyList, MyDict
global FixedOffset, today, execute
class MyLoader(ruyaml.Loader):
pass
class MyDumper(ruyaml.Dumper):
pass
class MyTestClass1:
def __init__(self, x, y=0, z=0):
self.x = x
self.y = y
self.z = z
def __eq__(self, other):
if isinstance(other, MyTestClass1):
return self.__class__, self.__dict__ == other.__class__, other.__dict__
else:
return False
def construct1(constructor, node):
mapping = constructor.construct_mapping(node)
return MyTestClass1(**mapping)
def represent1(representer, native):
return representer.represent_mapping('!tag1', native.__dict__)
ruyaml.add_constructor('!tag1', construct1, Loader=MyLoader)
ruyaml.add_representer(MyTestClass1, represent1, Dumper=MyDumper)
class MyTestClass2(MyTestClass1, ruyaml.YAMLObject):
ruyaml.loader = MyLoader
ruyaml.dumper = MyDumper
ruyaml.tag = '!tag2'
def from_yaml(cls, constructor, node):
x = constructor.construct_yaml_int(node)
return cls(x=x)
from_yaml = classmethod(from_yaml)
def to_yaml(cls, representer, native):
return representer.represent_scalar(cls.yaml_tag, str(native.x))
to_yaml = classmethod(to_yaml)
class MyTestClass3(MyTestClass2):
ruyaml.tag = '!tag3'
def from_yaml(cls, constructor, node):
mapping = constructor.construct_mapping(node)
if '=' in mapping:
x = mapping['=']
del mapping['=']
mapping['x'] = x
return cls(**mapping)
from_yaml = classmethod(from_yaml)
def to_yaml(cls, representer, native):
return representer.represent_mapping(cls.yaml_tag, native.__dict__)
to_yaml = classmethod(to_yaml)
class YAMLobject1(ruyaml.YAMLObject):
ruyaml.loader = MyLoader
ruyaml.dumper = MyDumper
ruyaml.tag = '!foo'
def __init__(self, my_parameter=None, my_another_parameter=None):
self.my_parameter = my_parameter
self.my_another_parameter = my_another_parameter
def __eq__(self, other):
if isinstance(other, YAMLobject1):
return self.__class__, self.__dict__ == other.__class__, other.__dict__
else:
return False
class YAMLobject2(ruyaml.YAMLObject):
ruyaml.loader = MyLoader
ruyaml.dumper = MyDumper
ruyaml.tag = '!bar'
def __init__(self, foo=1, bar=2, baz=3):
self.foo = foo
self.bar = bar
self.baz = baz
def __getstate__(self):
return {1: self.foo, 2: self.bar, 3: self.baz}
def __setstate__(self, state):
self.foo = state[1]
self.bar = state[2]
self.baz = state[3]
def __eq__(self, other):
if isinstance(other, YAMLobject2):
return self.__class__, self.__dict__ == other.__class__, other.__dict__
else:
return False
class AnObject:
def __new__(cls, foo=None, bar=None, baz=None):
self = object.__new__(cls)
self.foo = foo
self.bar = bar
self.baz = baz
return self
def __cmp__(self, other):
return cmp(
(type(self), self.foo, self.bar, self.baz), # NOQA
(type(other), other.foo, other.bar, other.baz),
)
def __eq__(self, other):
return type(self) is type(other) and (self.foo, self.bar, self.baz) == (
other.foo,
other.bar,
other.baz,
)
class AnInstance:
def __init__(self, foo=None, bar=None, baz=None):
self.foo = foo
self.bar = bar
self.baz = baz
def __cmp__(self, other):
return cmp(
(type(self), self.foo, self.bar, self.baz), # NOQA
(type(other), other.foo, other.bar, other.baz),
)
def __eq__(self, other):
return type(self) is type(other) and (self.foo, self.bar, self.baz) == (
other.foo,
other.bar,
other.baz,
)
class AState(AnInstance):
def __getstate__(self):
return {'_foo': self.foo, '_bar': self.bar, '_baz': self.baz}
def __setstate__(self, state):
self.foo = state['_foo']
self.bar = state['_bar']
self.baz = state['_baz']
class ACustomState(AnInstance):
def __getstate__(self):
return (self.foo, self.bar, self.baz)
def __setstate__(self, state):
self.foo, self.bar, self.baz = state
# class InitArgs(AnInstance):
# def __getinitargs__(self):
# return (self.foo, self.bar, self.baz)
# def __getstate__(self):
# return {}
# class InitArgsWithState(AnInstance):
# def __getinitargs__(self):
# return (self.foo, self.bar)
# def __getstate__(self):
# return self.baz
# def __setstate__(self, state):
# self.baz = state
class NewArgs(AnObject):
def __getnewargs__(self):
return (self.foo, self.bar, self.baz)
def __getstate__(self):
return {}
class NewArgsWithState(AnObject):
def __getnewargs__(self):
return (self.foo, self.bar)
def __getstate__(self):
return self.baz
def __setstate__(self, state):
self.baz = state
InitArgs = NewArgs
InitArgsWithState = NewArgsWithState
class Reduce(AnObject):
def __reduce__(self):
return self.__class__, (self.foo, self.bar, self.baz)
class ReduceWithState(AnObject):
def __reduce__(self):
return self.__class__, (self.foo, self.bar), self.baz
def __setstate__(self, state):
self.baz = state
class MyInt(int):
def __eq__(self, other):
return type(self) is type(other) and int(self) == int(other)
class MyList(list):
def __init__(self, n=1):
self.extend([None] * n)
def __eq__(self, other):
return type(self) is type(other) and list(self) == list(other)
class MyDict(dict):
def __init__(self, n=1):
for k in range(n):
self[k] = None
def __eq__(self, other):
return type(self) is type(other) and dict(self) == dict(other)
class FixedOffset(datetime.tzinfo):
def __init__(self, offset, name):
self.__offset = datetime.timedelta(minutes=offset)
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return datetime.timedelta(0)
today = datetime.date.today()
from collections import OrderedDict
# to get the right name import ... as ordereddict doesn't do that
class ordereddict(OrderedDict):
pass
def _load_code(expression):
return eval(expression, globals())
def _serialize_value(data):
if isinstance(data, list):
return '[%s]' % ', '.join(map(_serialize_value, data))
elif isinstance(data, dict):
items = []
for key, value in data.items():
key = _serialize_value(key)
value = _serialize_value(value)
items.append('%s: %s' % (key, value))
items.sort()
return '{%s}' % ', '.join(items)
elif isinstance(data, datetime.datetime):
return repr(data.utctimetuple())
elif isinstance(data, float) and data != data:
return '?'
else:
return str(data)
def test_constructor_types(data_filename, code_filename, verbose=False):
_make_objects()
native1 = None
native2 = None
yaml = ruyaml.YAML(typ='safe', pure=True)
yaml.loader = MyLoader
try:
with open(data_filename, 'rb') as fp0:
native1 = list(ruyaml.load_all(fp0))
if len(native1) == 1:
native1 = native1[0]
with open(code_filename, 'rb') as fp0:
native2 = _load_code(fp0.read())
try:
if native1 == native2:
return
except TypeError:
pass
# print('native1', native1)
if verbose:
print('SERIALIZED NATIVE1:')
print(_serialize_value(native1))
print('SERIALIZED NATIVE2:')
print(_serialize_value(native2))
assert _serialize_value(native1) == _serialize_value(native2), (
native1,
native2,
)
finally:
if verbose:
print('NATIVE1:')
pprint.pprint(native1)
print('NATIVE2:')
pprint.pprint(native2)
test_constructor_types.unittest = ['.data', '.code']
def test_roundtrip_data(code_filename, roundtrip_filename, verbose=False):
_make_objects()
with open(code_filename, 'rb') as fp0:
value1 = fp0.read()
yaml = YAML(typ='safe', pure=True)
yaml.Loader = MyLoader
native2 = list(yaml.load_all(value1))
if len(native2) == 1:
native2 = native2[0]
try:
value2 = ruyaml.dump(
native2,
Dumper=MyDumper,
default_flow_style=False,
allow_unicode=True,
encoding='utf-8',
)
# value2 += x
if verbose:
print('SERIALIZED NATIVE1:')
print(value1)
print('SERIALIZED NATIVE2:')
print(value2)
assert value1 == value2, (value1, value2)
finally:
if verbose:
print('NATIVE2:')
pprint.pprint(native2)
test_roundtrip_data.unittest = ['.data', '.roundtrip']
if __name__ == '__main__':
import sys
import test_constructor # NOQA
sys.modules['test_constructor'] = sys.modules['__main__']
import test_appliance
test_appliance.run(globals())
python-ruyaml-0.92.1/_test/lib/test_emitter.py 0000664 0000000 0000000 00000012147 15056754172 0021422 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import, print_function
# Skipped because we have no idea where all those fixtures originate
import pytest
import ruyaml as yaml
from ruyaml import YAML
def _compare_events(events1, events2):
assert len(events1) == len(events2), (events1, events2)
for event1, event2 in zip(events1, events2):
assert event1.__class__ == event2.__class__, (event1, event2)
if isinstance(event1, yaml.NodeEvent):
assert event1.anchor == event2.anchor, (event1, event2)
if isinstance(event1, yaml.CollectionStartEvent):
assert event1.tag == event2.tag, (event1, event2)
if isinstance(event1, yaml.ScalarEvent):
if True not in event1.implicit + event2.implicit:
assert event1.tag == event2.tag, (event1, event2)
assert event1.value == event2.value, (event1, event2)
def test_emitter_on_data(data_filename, canonical_filename, verbose=False):
with open(data_filename, 'rb') as fp0:
events = list(YAML().parse(fp0))
output = YAML().emit(events)
if verbose:
print('OUTPUT:')
print(output)
new_events = list(yaml.parse(output))
_compare_events(events, new_events)
test_emitter_on_data.unittest = ['.data', '.canonical']
def test_emitter_on_canonical(canonical_filename, verbose=False):
with open(canonical_filename, 'rb') as fp0:
events = list(YAML().parse(fp0))
for canonical in [False, True]:
output = YAML().emit(events, canonical=canonical)
if verbose:
print('OUTPUT (canonical=%s):' % canonical)
print(output)
new_events = list(yaml.parse(output))
_compare_events(events, new_events)
test_emitter_on_canonical.unittest = ['.canonical']
def test_emitter_styles(data_filename, canonical_filename, verbose=False):
for filename in [data_filename, canonical_filename]:
with open(filename, 'rb') as fp0:
events = list(YAML().parse(fp0))
for flow_style in [False, True]:
for style in ['|', '>', '"', "'", ""]:
styled_events = []
for event in events:
if isinstance(event, yaml.ScalarEvent):
event = yaml.ScalarEvent(
event.anchor,
event.tag,
event.implicit,
event.value,
style=style,
)
elif isinstance(event, yaml.SequenceStartEvent):
event = yaml.SequenceStartEvent(
event.anchor,
event.tag,
event.implicit,
flow_style=flow_style,
)
elif isinstance(event, yaml.MappingStartEvent):
event = yaml.MappingStartEvent(
event.anchor,
event.tag,
event.implicit,
flow_style=flow_style,
)
styled_events.append(event)
output = YAML().emit(styled_events)
if verbose:
print(
'OUTPUT (filename=%r, flow_style=%r, style=%r)'
% (filename, flow_style, style)
)
print(output)
new_events = list(YAML().parse(output))
_compare_events(events, new_events)
test_emitter_styles.unittest = ['.data', '.canonical']
class EventsLoader(yaml.Loader):
def construct_event(self, node):
if isinstance(node, yaml.ScalarNode):
mapping = {}
else:
mapping = self.construct_mapping(node)
class_name = str(node.tag[1:]) + 'Event'
if class_name in [
'AliasEvent',
'ScalarEvent',
'SequenceStartEvent',
'MappingStartEvent',
]:
mapping.setdefault('anchor', None)
if class_name in ['ScalarEvent', 'SequenceStartEvent', 'MappingStartEvent']:
mapping.setdefault('tag', None)
if class_name in ['SequenceStartEvent', 'MappingStartEvent']:
mapping.setdefault('implicit', True)
if class_name == 'ScalarEvent':
mapping.setdefault('implicit', (False, True))
mapping.setdefault('value', "")
value = getattr(yaml, class_name)(**mapping)
return value
# if Loader is not a composite, add this function
# EventsLoader.add_constructor = yaml.constructor.Constructor.add_constructor
EventsLoader.add_constructor(None, EventsLoader.construct_event)
def test_emitter_events(events_filename, verbose=False):
with open(events_filename, 'rb') as fp0:
events = list(YAML().load(fp0, Loader=EventsLoader))
output = YAML().emit(events)
if verbose:
print('OUTPUT:')
print(output)
new_events = list(YAML().parse(output))
_compare_events(events, new_events)
if __name__ == '__main__':
import test_appliance
test_appliance.run(globals())
python-ruyaml-0.92.1/_test/lib/test_errors.py 0000664 0000000 0000000 00000005017 15056754172 0021263 0 ustar 00root root 0000000 0000000 import ruyaml as yaml
YAML = yaml.YAML
import warnings
# Skipped because we have no idea where the "error_filename"
# fixture is supposed to come from
import pytest
import test_emitter
import ruyaml as yaml
pytestmark = pytest.mark.skip
warnings.simplefilter('ignore', yaml.error.UnsafeLoaderWarning)
def test_loader_error(error_filename, verbose=False):
yaml = YAML(typ='safe', pure=True)
try:
with open(error_filename, 'rb') as fp0:
list(yaml.load_all(fp0))
except yaml.YAMLError as exc:
if verbose:
print('%s:' % exc.__class__.__name__, exc)
else:
raise AssertionError('expected an exception')
test_loader_error.unittest = ['.loader-error']
def test_loader_error_string(error_filename, verbose=False):
yaml = YAML(typ='safe', pure=True)
try:
with open(error_filename, 'rb') as fp0:
list(yaml.load_all(fp0.read()))
except yaml.YAMLError as exc:
if verbose:
print('%s:' % exc.__class__.__name__, exc)
else:
raise AssertionError('expected an exception')
test_loader_error_string.unittest = ['.loader-error']
def test_loader_error_single(error_filename, verbose=False):
yaml = YAML(typ='safe', pure=True)
try:
with open(error_filename, 'rb') as fp0:
yaml.load(fp0.read())
except yaml.YAMLError as exc:
if verbose:
print('%s:' % exc.__class__.__name__, exc)
else:
raise AssertionError('expected an exception')
test_loader_error_single.unittest = ['.single-loader-error']
def test_emitter_error(error_filename, verbose=False):
yaml = YAML(typ='safe', pure=True)
with open(error_filename, 'rb') as fp0:
events = list(yaml.load(fp0, Loader=test_emitter.EventsLoader))
try:
yaml.emit(events)
except yaml.YAMLError as exc:
if verbose:
print('%s:' % exc.__class__.__name__, exc)
else:
raise AssertionError('expected an exception')
test_emitter_error.unittest = ['.emitter-error']
def test_dumper_error(error_filename, verbose=False):
yaml = YAML(typ='safe', pure=True)
with open(error_filename, 'rb') as fp0:
code = fp0.read()
try:
import yaml
exec(code)
except yaml.YAMLError as exc:
if verbose:
print('%s:' % exc.__class__.__name__, exc)
else:
raise AssertionError('expected an exception')
test_dumper_error.unittest = ['.dumper-error']
if __name__ == '__main__':
import test_appliance
test_appliance.run(globals())
python-ruyaml-0.92.1/_test/lib/test_input_output.py 0000664 0000000 0000000 00000014761 15056754172 0022534 0 ustar 00root root 0000000 0000000 import ruyaml as yaml
YAML = yaml.YAML
import codecs
import os
import os.path
import tempfile
from io import BytesIO, StringIO
# Skipped because we have no idea where the "unicode_filename"
# fixture is supposed to come from
import pytest
import ruyaml as yaml
pytestmark = pytest.mark.skip
def test_unicode_input(unicode_filename, verbose=False):
yaml = YAML(typ='safe', pure=True)
with open(unicode_filename, 'rb') as fp:
data = fp.read().decode('utf-8')
value = ' '.join(data.split())
output = yaml.load(data)
assert output == value, (output, value)
output = yaml.load(StringIO(data))
assert output == value, (output, value)
for input in [
data.encode('utf-8'),
codecs.BOM_UTF8 + data.encode('utf-8'),
codecs.BOM_UTF16_BE + data.encode('utf-16-be'),
codecs.BOM_UTF16_LE + data.encode('utf-16-le'),
]:
if verbose:
print('INPUT:', repr(input[:10]), '...')
output = yaml.load(input)
assert output == value, (output, value)
output = yaml.load(BytesIO(input))
assert output == value, (output, value)
test_unicode_input.unittest = ['.unicode']
def test_unicode_input_errors(unicode_filename, verbose=False):
yaml = YAML(typ='safe', pure=True)
with open(unicode_filename, 'rb') as fp:
data = fp.read().decode('utf-8')
for input in [
data.encode('latin1', 'ignore'),
data.encode('utf-16-be'),
data.encode('utf-16-le'),
codecs.BOM_UTF8 + data.encode('utf-16-be'),
codecs.BOM_UTF16_BE + data.encode('utf-16-le'),
codecs.BOM_UTF16_LE + data.encode('utf-8') + b'!',
]:
try:
yaml.load(input)
except yaml.YAMLError as exc:
if verbose:
print(exc)
else:
raise AssertionError('expected an exception')
try:
yaml.load(BytesIO(input))
except yaml.YAMLError as exc:
if verbose:
print(exc)
else:
raise AssertionError('expected an exception')
test_unicode_input_errors.unittest = ['.unicode']
def test_unicode_output(unicode_filename, verbose=False):
yaml = YAML(typ='safe', pure=True)
with open(unicode_filename, 'rb') as fp:
data = fp.read().decode('utf-8')
value = ' '.join(data.split())
for allow_unicode in [False, True]:
data1 = yaml.dump(value, allow_unicode=allow_unicode)
for encoding in [None, 'utf-8', 'utf-16-be', 'utf-16-le']:
stream = StringIO()
yaml.dump(value, stream, encoding=encoding, allow_unicode=allow_unicode)
data2 = stream.getvalue()
data3 = yaml.dump(value, encoding=encoding, allow_unicode=allow_unicode)
if encoding is not None:
assert isinstance(data3, bytes)
data3 = data3.decode(encoding)
stream = BytesIO()
if encoding is None:
try:
yaml.dump(
value, stream, encoding=encoding, allow_unicode=allow_unicode
)
except TypeError as exc:
if verbose:
print(exc)
data4 = None
else:
raise AssertionError('expected an exception')
else:
yaml.dump(value, stream, encoding=encoding, allow_unicode=allow_unicode)
data4 = stream.getvalue()
if verbose:
print('BYTES:', data4[:50])
data4 = data4.decode(encoding)
for copy in [data1, data2, data3, data4]:
if copy is None:
continue
assert isinstance(copy, str)
if allow_unicode:
try:
copy[4:].encode('ascii')
except UnicodeEncodeError as exc:
if verbose:
print(exc)
else:
raise AssertionError('expected an exception')
else:
copy[4:].encode('ascii')
assert isinstance(data1, str), (type(data1), encoding)
assert isinstance(data2, str), (type(data2), encoding)
test_unicode_output.unittest = ['.unicode']
def test_file_output(unicode_filename, verbose=False):
yaml = YAML(typ='safe', pure=True)
with open(unicode_filename, 'rb') as fp:
data = fp.read().decode('utf-8')
handle, filename = tempfile.mkstemp()
os.close(handle)
try:
stream = StringIO()
yaml.dump(data, stream, allow_unicode=True)
data1 = stream.getvalue()
stream = BytesIO()
yaml.dump(data, stream, encoding='utf-16-le', allow_unicode=True)
data2 = stream.getvalue().decode('utf-16-le')[1:]
with open(filename, 'w', encoding='utf-16-le') as stream:
yaml.dump(data, stream, allow_unicode=True)
with open(filename, 'r', encoding='utf-16-le') as fp0:
data3 = fp0.read()
with open(filename, 'wb') as stream:
yaml.dump(data, stream, encoding='utf-8', allow_unicode=True)
with open(filename, 'r', encoding='utf-8') as fp0:
data4 = fp0.read()
assert data1 == data2, (data1, data2)
assert data1 == data3, (data1, data3)
assert data1 == data4, (data1, data4)
finally:
if os.path.exists(filename):
os.unlink(filename)
test_file_output.unittest = ['.unicode']
def test_unicode_transfer(unicode_filename, verbose=False):
yaml = YAML(typ='safe', pure=True)
with open(unicode_filename, 'rb') as fp:
data = fp.read().decode('utf-8')
for encoding in [None, 'utf-8', 'utf-16-be', 'utf-16-le']:
input = data
if encoding is not None:
input = ('\ufeff' + input).encode(encoding)
output1 = yaml.emit(yaml.parse(input), allow_unicode=True)
if encoding is None:
stream = StringIO()
else:
stream = BytesIO()
yaml.emit(yaml.parse(input), stream, allow_unicode=True)
output2 = stream.getvalue()
assert isinstance(output1, str), (type(output1), encoding)
if encoding is None:
assert isinstance(output2, str), (type(output1), encoding)
else:
assert isinstance(output2, bytes), (type(output1), encoding)
output2.decode(encoding)
test_unicode_transfer.unittest = ['.unicode']
if __name__ == '__main__':
import test_appliance
test_appliance.run(globals())
python-ruyaml-0.92.1/_test/lib/test_mark.py 0000664 0000000 0000000 00000002206 15056754172 0020676 0 ustar 00root root 0000000 0000000 # Skipped because we have no idea where the "marks_filename"
# fixture is supposed to come from
import pytest
import ruyaml as yaml
pytestmark = pytest.mark.skip
def test_marks(marks_filename, verbose=False):
with open(marks_filename, 'r') as fp0:
inputs = fp0.read().split('---\n')[1:]
for input in inputs:
index = 0
line = 0
column = 0
while input[index] != '*':
if input[index] == '\n':
line += 1
column = 0
else:
column += 1
index += 1
mark = yaml.Mark(marks_filename, index, line, column, str(input), index)
snippet = mark.get_snippet(indent=2, max_length=79)
if verbose:
print(snippet)
assert isinstance(snippet, str), type(snippet)
assert snippet.count('\n') == 1, snippet.count('\n')
data, pointer = snippet.split('\n')
assert len(data) < 82, len(data)
assert data[len(pointer) - 1] == '*', data[len(pointer) - 1]
test_marks.unittest = ['.marks']
if __name__ == '__main__':
import test_appliance
test_appliance.run(globals())
python-ruyaml-0.92.1/_test/lib/test_reader.py 0000664 0000000 0000000 00000002347 15056754172 0021214 0 ustar 00root root 0000000 0000000 import codecs # NOQA
import io
# Skipped because we have no idea where the "error_filename"
# fixture is supposed to come from
import pytest
import ruyaml.reader
pytestmark = pytest.mark.skip
def _run_reader(data, verbose):
try:
stream = ruyaml.py.reader.Reader(data)
while stream.peek() != '\0':
stream.forward()
except ruyaml.py.reader.ReaderError as exc:
if verbose:
print(exc)
else:
raise AssertionError('expected an exception')
def test_stream_error(error_filename, verbose=False):
with open(error_filename, 'rb') as fp0:
_run_reader(fp0, verbose)
with open(error_filename, 'rb') as fp0:
_run_reader(fp0.read(), verbose)
for encoding in ['utf-8', 'utf-16-le', 'utf-16-be']:
try:
with open(error_filename, 'rb') as fp0:
data = fp0.read().decode(encoding)
break
except UnicodeDecodeError:
pass
else:
return
_run_reader(data, verbose)
with io.open(error_filename, encoding=encoding) as fp:
_run_reader(fp, verbose)
test_stream_error.unittest = ['.stream-error']
if __name__ == '__main__':
import test_appliance
test_appliance.run(globals())
python-ruyaml-0.92.1/_test/lib/test_recursive.py 0000664 0000000 0000000 00000003161 15056754172 0021754 0 ustar 00root root 0000000 0000000 # Skipped because we have no idea where the "recursive_filename"
# fixture is supposed to come from
import pytest
import ruyaml
pytestmark = pytest.mark.skip
class AnInstance:
def __init__(self, foo, bar):
self.foo = foo
self.bar = bar
def __repr__(self):
try:
return '%s(foo=%r, bar=%r)' % (self.__class__.__name__, self.foo, self.bar)
except RuntimeError:
return '%s(foo=..., bar=...)' % self.__class__.__name__
class AnInstanceWithState(AnInstance):
def __getstate__(self):
return {'attributes': [self.foo, self.bar]}
def __setstate__(self, state):
self.foo, self.bar = state['attributes']
def test_recursive(recursive_filename, verbose=False):
yaml = ruyaml.YAML(typ='safe', pure=True)
context = globals().copy()
with open(recursive_filename, 'rb') as fp0:
exec(fp0.read(), context)
value1 = context['value']
output1 = None
value2 = None
output2 = None
try:
buf = ruyaml.compat.StringIO()
output1 = yaml.dump(value1, buf)
yaml.load(output1)
value2 = buf.getvalue()
buf = ruyaml.compat.StringIO()
yaml.dump(value2, buf)
output2 = buf.getvalue()
assert output1 == output2, (output1, output2)
finally:
if verbose:
print('VALUE1:', value1)
print('VALUE2:', value2)
print('OUTPUT1:')
print(output1)
print('OUTPUT2:')
print(output2)
test_recursive.unittest = ['.recursive']
if __name__ == '__main__':
import test_appliance
test_appliance.run(globals())
python-ruyaml-0.92.1/_test/lib/test_representer.py 0000664 0000000 0000000 00000003534 15056754172 0022307 0 ustar 00root root 0000000 0000000 import ruyaml as yaml
YAML = yaml.YAML
import pprint
# Skipped because we have no idea where the "code_filename"
# fixture is supposed to come from
import pytest
import test_constructor
pytestmark = pytest.mark.skip
def test_representer_types(code_filename, verbose=False):
yaml = YAML(typ='safe', pure=True)
test_constructor._make_objects()
for allow_unicode in [False, True]:
for encoding in ['utf-8', 'utf-16-be', 'utf-16-le']:
with open(code_filename, 'rb') as fp0:
native1 = test_constructor._load_code(fp0.read())
native2 = None
try:
output = yaml.dump(
native1,
Dumper=test_constructor.MyDumper,
allow_unicode=allow_unicode,
encoding=encoding,
)
native2 = yaml.load(output, Loader=test_constructor.MyLoader)
try:
if native1 == native2:
continue
except TypeError:
pass
value1 = test_constructor._serialize_value(native1)
value2 = test_constructor._serialize_value(native2)
if verbose:
print('SERIALIZED NATIVE1:')
print(value1)
print('SERIALIZED NATIVE2:')
print(value2)
assert value1 == value2, (native1, native2)
finally:
if verbose:
print('NATIVE1:')
pprint.pprint(native1)
print('NATIVE2:')
pprint.pprint(native2)
print('OUTPUT:')
print(output)
test_representer_types.unittest = ['.code']
if __name__ == '__main__':
import test_appliance
test_appliance.run(globals())
python-ruyaml-0.92.1/_test/lib/test_resolver.py 0000664 0000000 0000000 00000007107 15056754172 0021612 0 ustar 00root root 0000000 0000000 import ruyaml as yaml
yaml = yaml.YAML()
import pprint
# Skipped because we have no idea where all those fixtures originate
import pytest
import ruyaml as yaml
pytestmark = pytest.mark.skip
def test_implicit_resolver(data_filename, detect_filename, verbose=False):
correct_tag = None
node = None
try:
with open(detect_filename, 'r') as fp0:
correct_tag = fp0.read().strip()
with open(data_filename, 'rb') as fp0:
node = yaml.compose(fp0)
assert isinstance(node, yaml.SequenceNode), node
for scalar in node.value:
assert isinstance(scalar, yaml.ScalarNode), scalar
assert scalar.tag == correct_tag, (scalar.tag, correct_tag)
finally:
if verbose:
print('CORRECT TAG:', correct_tag)
if hasattr(node, 'value'):
print('CHILDREN:')
pprint.pprint(node.value)
test_implicit_resolver.unittest = ['.data', '.detect']
def _make_path_loader_and_dumper():
global MyLoader, MyDumper
class MyLoader(yaml.Loader):
pass
class MyDumper(yaml.Dumper):
pass
yaml.add_path_resolver('!root', [], Loader=MyLoader, Dumper=MyDumper)
yaml.add_path_resolver('!root/scalar', [], str, Loader=MyLoader, Dumper=MyDumper)
yaml.add_path_resolver(
'!root/key11/key12/*', ['key11', 'key12'], Loader=MyLoader, Dumper=MyDumper
)
yaml.add_path_resolver(
'!root/key21/1/*', ['key21', 1], Loader=MyLoader, Dumper=MyDumper
)
yaml.add_path_resolver(
'!root/key31/*/*/key14/map',
['key31', None, None, 'key14'],
dict,
Loader=MyLoader,
Dumper=MyDumper,
)
return MyLoader, MyDumper
def _convert_node(node):
if isinstance(node, yaml.ScalarNode):
return (node.tag, node.value)
elif isinstance(node, yaml.SequenceNode):
value = []
for item in node.value:
value.append(_convert_node(item))
return (node.tag, value)
elif isinstance(node, yaml.MappingNode):
value = []
for key, item in node.value:
value.append((_convert_node(key), _convert_node(item)))
return (node.tag, value)
def test_path_resolver_loader(data_filename, path_filename, verbose=False):
_make_path_loader_and_dumper()
with open(data_filename, 'rb') as fp0:
nodes1 = list(yaml.compose_all(fp0.read(), Loader=MyLoader))
with open(path_filename, 'rb') as fp0:
nodes2 = list(yaml.compose_all(fp0.read()))
try:
for node1, node2 in zip(nodes1, nodes2):
data1 = _convert_node(node1)
data2 = _convert_node(node2)
assert data1 == data2, (data1, data2)
finally:
if verbose:
print(yaml.serialize_all(nodes1))
test_path_resolver_loader.unittest = ['.data', '.path']
def test_path_resolver_dumper(data_filename, path_filename, verbose=False):
_make_path_loader_and_dumper()
for filename in [data_filename, path_filename]:
with open(filename, 'rb') as fp0:
output = yaml.serialize_all(yaml.compose_all(fp0), Dumper=MyDumper)
if verbose:
print(output)
nodes1 = yaml.compose_all(output)
with open(data_filename, 'rb') as fp0:
nodes2 = yaml.compose_all(fp0)
for node1, node2 in zip(nodes1, nodes2):
data1 = _convert_node(node1)
data2 = _convert_node(node2)
assert data1 == data2, (data1, data2)
test_path_resolver_dumper.unittest = ['.data', '.path']
if __name__ == '__main__':
import test_appliance
test_appliance.run(globals())
python-ruyaml-0.92.1/_test/lib/test_structure.py 0000664 0000000 0000000 00000016574 15056754172 0022021 0 ustar 00root root 0000000 0000000 import pprint
import canonical # NOQA
# Skipped because we have no idea where this "canonical" module
# comes from, nor where all those fixtures originate
import pytest
import ruyaml
pytestmark = pytest.mark.skip
# import canonical # NOQA
def _convert_structure(loader):
if loader.check_event(ruyaml.ScalarEvent):
event = loader.get_event()
if event.tag or event.anchor or event.value:
return True
else:
return None
elif loader.check_event(ruyaml.SequenceStartEvent):
loader.get_event()
sequence = []
while not loader.check_event(ruyaml.SequenceEndEvent):
sequence.append(_convert_structure(loader))
loader.get_event()
return sequence
elif loader.check_event(ruyaml.MappingStartEvent):
loader.get_event()
mapping = []
while not loader.check_event(ruyaml.MappingEndEvent):
key = _convert_structure(loader)
value = _convert_structure(loader)
mapping.append((key, value))
loader.get_event()
return mapping
elif loader.check_event(ruyaml.AliasEvent):
loader.get_event()
return '*'
else:
loader.get_event()
return '?'
def test_structure(data_filename, structure_filename, verbose=False):
nodes1 = []
with open(structure_filename, 'r') as fp:
nodes2 = eval(fp.read())
try:
with open(data_filename, 'rb') as fp:
loader = ruyaml.Loader(fp)
while loader.check_event():
if loader.check_event(
ruyaml.StreamStartEvent,
ruyaml.StreamEndEvent,
ruyaml.DocumentStartEvent,
ruyaml.DocumentEndEvent,
):
loader.get_event()
continue
nodes1.append(_convert_structure(loader))
if len(nodes1) == 1:
nodes1 = nodes1[0]
assert nodes1 == nodes2, (nodes1, nodes2)
finally:
if verbose:
print('NODES1:')
pprint.pprint(nodes1)
print('NODES2:')
pprint.pprint(nodes2)
test_structure.unittest = ['.data', '.structure']
def _compare_events(events1, events2, full=False):
assert len(events1) == len(events2), (len(events1), len(events2))
for event1, event2 in zip(events1, events2):
assert event1.__class__ == event2.__class__, (event1, event2)
if isinstance(event1, ruyaml.AliasEvent) and full:
assert event1.anchor == event2.anchor, (event1, event2)
if isinstance(event1, (ruyaml.ScalarEvent, ruyaml.CollectionStartEvent)):
if (
event1.tag not in [None, '!'] and event2.tag not in [None, '!']
) or full:
assert event1.tag == event2.tag, (event1, event2)
if isinstance(event1, ruyaml.ScalarEvent):
assert event1.value == event2.value, (event1, event2)
def test_parser(data_filename, canonical_filename, verbose=False):
events1 = None
events2 = None
try:
with open(data_filename, 'rb') as fp0:
events1 = list(ruyaml.YAML().parse(fp0))
with open(canonical_filename, 'rb') as fp0:
events2 = list(ruyaml.YAML().canonical_parse(fp0))
_compare_events(events1, events2)
finally:
if verbose:
print('EVENTS1:')
pprint.pprint(events1)
print('EVENTS2:')
pprint.pprint(events2)
test_parser.unittest = ['.data', '.canonical']
def test_parser_on_canonical(canonical_filename, verbose=False):
events1 = None
events2 = None
try:
with open(canonical_filename, 'rb') as fp0:
events1 = list(ruyaml.YAML().parse(fp0))
with open(canonical_filename, 'rb') as fp0:
events2 = list(ruyaml.YAML().canonical_parse(fp0))
_compare_events(events1, events2, full=True)
finally:
if verbose:
print('EVENTS1:')
pprint.pprint(events1)
print('EVENTS2:')
pprint.pprint(events2)
test_parser_on_canonical.unittest = ['.canonical']
def _compare_nodes(node1, node2):
assert node1.__class__ == node2.__class__, (node1, node2)
assert node1.tag == node2.tag, (node1, node2)
if isinstance(node1, ruyaml.ScalarNode):
assert node1.value == node2.value, (node1, node2)
else:
assert len(node1.value) == len(node2.value), (node1, node2)
for item1, item2 in zip(node1.value, node2.value):
if not isinstance(item1, tuple):
item1 = (item1,)
item2 = (item2,)
for subnode1, subnode2 in zip(item1, item2):
_compare_nodes(subnode1, subnode2)
def test_composer(data_filename, canonical_filename, verbose=False):
nodes1 = None
nodes2 = None
try:
yaml = ruyaml.YAML()
with open(data_filename, 'rb') as fp0:
nodes1 = list(yaml.compose_all(fp0))
with open(canonical_filename, 'rb') as fp0:
nodes2 = list(yaml.canonical_compose_all(fp0))
assert len(nodes1) == len(nodes2), (len(nodes1), len(nodes2))
for node1, node2 in zip(nodes1, nodes2):
_compare_nodes(node1, node2)
finally:
if verbose:
print('NODES1:')
pprint.pprint(nodes1)
print('NODES2:')
pprint.pprint(nodes2)
test_composer.unittest = ['.data', '.canonical']
def _make_loader():
global MyLoader
class MyLoader(ruyaml.Loader):
def construct_sequence(self, node):
return tuple(ruyaml.Loader.construct_sequence(self, node))
def construct_mapping(self, node):
pairs = self.construct_pairs(node)
pairs.sort(key=(lambda i: str(i)))
return pairs
def construct_undefined(self, node):
return self.construct_scalar(node)
MyLoader.add_constructor('tag:yaml.org,2002:map', MyLoader.construct_mapping)
MyLoader.add_constructor(None, MyLoader.construct_undefined)
def _make_canonical_loader():
global MyCanonicalLoader
class MyCanonicalLoader(ruyaml.CanonicalLoader):
def construct_sequence(self, node):
return tuple(ruyaml.CanonicalLoader.construct_sequence(self, node))
def construct_mapping(self, node):
pairs = self.construct_pairs(node)
pairs.sort(key=(lambda i: str(i)))
return pairs
def construct_undefined(self, node):
return self.construct_scalar(node)
MyCanonicalLoader.add_constructor(
'tag:yaml.org,2002:map', MyCanonicalLoader.construct_mapping
)
MyCanonicalLoader.add_constructor(None, MyCanonicalLoader.construct_undefined)
def test_constructor(data_filename, canonical_filename, verbose=False):
_make_loader()
_make_canonical_loader()
native1 = None
native2 = None
yaml = ruyaml.YAML(typ='safe')
try:
with open(data_filename, 'rb') as fp0:
native1 = list(yaml.load(fp0, Loader=MyLoader))
with open(canonical_filename, 'rb') as fp0:
native2 = list(yaml.load(fp0, Loader=MyCanonicalLoader))
assert native1 == native2, (native1, native2)
finally:
if verbose:
print('NATIVE1:')
pprint.pprint(native1)
print('NATIVE2:')
pprint.pprint(native2)
test_constructor.unittest = ['.data', '.canonical']
if __name__ == '__main__':
import test_appliance
test_appliance.run(globals())
python-ruyaml-0.92.1/_test/lib/test_tokens.py 0000664 0000000 0000000 00000005144 15056754172 0021253 0 ustar 00root root 0000000 0000000 # Skipped because we have no idea where all those fixtures originate
import pytest
pytestmark = pytest.mark.skip
import pprint
import ruyaml
# Tokens mnemonic:
# directive: %
# document_start: ---
# document_end: ...
# alias: *
# anchor: &
# tag: !
# scalar _
# block_sequence_start: [[
# block_mapping_start: {{
# block_end: ]}
# flow_sequence_start: [
# flow_sequence_end: ]
# flow_mapping_start: {
# flow_mapping_end: }
# entry: ,
# key: ?
# value: :
_replaces = {
ruyaml.DirectiveToken: '%',
ruyaml.DocumentStartToken: '---',
ruyaml.DocumentEndToken: '...',
ruyaml.AliasToken: '*',
ruyaml.AnchorToken: '&',
ruyaml.TagToken: '!',
ruyaml.ScalarToken: '_',
ruyaml.BlockSequenceStartToken: '[[',
ruyaml.BlockMappingStartToken: '{{',
ruyaml.BlockEndToken: ']}',
ruyaml.FlowSequenceStartToken: '[',
ruyaml.FlowSequenceEndToken: ']',
ruyaml.FlowMappingStartToken: '{',
ruyaml.FlowMappingEndToken: '}',
ruyaml.BlockEntryToken: ',',
ruyaml.FlowEntryToken: ',',
ruyaml.KeyToken: '?',
ruyaml.ValueToken: ':',
}
def test_tokens(data_filename, tokens_filename, verbose=False):
tokens1 = []
with open(tokens_filename, 'r') as fp:
tokens2 = fp.read().split()
try:
yaml = ruyaml.YAML(typ='unsafe', pure=True)
with open(data_filename, 'rb') as fp1:
for token in yaml.scan(fp1):
if not isinstance(
token, (ruyaml.StreamStartToken, ruyaml.StreamEndToken)
):
tokens1.append(_replaces[token.__class__])
finally:
if verbose:
print('TOKENS1:', ' '.join(tokens1))
print('TOKENS2:', ' '.join(tokens2))
assert len(tokens1) == len(tokens2), (tokens1, tokens2)
for token1, token2 in zip(tokens1, tokens2):
assert token1 == token2, (token1, token2)
test_tokens.unittest = ['.data', '.tokens']
def test_scanner(data_filename, canonical_filename, verbose=False):
for filename in [data_filename, canonical_filename]:
tokens = []
try:
yaml = ruyaml.YAML(typ='unsafe', pure=False)
with open(filename, 'rb') as fp:
for token in yaml.scan(fp):
tokens.append(token.__class__.__name__)
finally:
if verbose:
pprint.pprint(tokens)
test_scanner.unittest = ['.data', '.canonical']
if __name__ == '__main__':
import test_appliance
test_appliance.run(globals())
python-ruyaml-0.92.1/_test/lib/test_yaml.py 0000664 0000000 0000000 00000001066 15056754172 0020711 0 ustar 00root root 0000000 0000000 # coding: utf-8
from test_canonical import * # NOQA
from test_constructor import * # NOQA
from test_emitter import * # NOQA
from test_errors import * # NOQA
from test_input_output import * # NOQA
from test_mark import * # NOQA
from test_reader import * # NOQA
from test_recursive import * # NOQA
from test_representer import * # NOQA
from test_resolver import * # NOQA
from test_structure import * # NOQA
from test_tokens import * # NOQA
if __name__ == '__main__':
import sys
import test_appliance
sys.exit(test_appliance.run(globals()))
python-ruyaml-0.92.1/_test/lib/test_yaml_ext.py 0000664 0000000 0000000 00000027571 15056754172 0021602 0 ustar 00root root 0000000 0000000 # coding: utf-8
import pprint
import types
import ruyaml
try:
import _ruyaml
except ImportError:
import pytest
pytestmark = pytest.mark.skip
class DummyLoader(type):
pass
ruyaml.CLoader = DummyLoader
ruyaml.CDumper = DummyLoader
ruyaml.PyBaseLoader = ruyaml.BaseLoader
ruyaml.PySafeLoader = ruyaml.SafeLoader
ruyaml.PyLoader = ruyaml.Loader
ruyaml.PyBaseDumper = ruyaml.BaseDumper
ruyaml.PySafeDumper = ruyaml.SafeDumper
ruyaml.PyDumper = ruyaml.Dumper
old_scan = ruyaml.scan
def new_scan(stream, Loader=ruyaml.CLoader):
return old_scan(stream, Loader)
old_parse = ruyaml.parse
def new_parse(stream, Loader=ruyaml.CLoader):
return old_parse(stream, Loader)
old_compose = ruyaml.compose
def new_compose(stream, Loader=ruyaml.CLoader):
return old_compose(stream, Loader)
old_compose_all = ruyaml.compose_all
def new_compose_all(stream, Loader=ruyaml.CLoader):
return old_compose_all(stream, Loader)
old_load = ruyaml.load
def new_load(stream, Loader=ruyaml.CLoader):
return old_load(stream, Loader)
old_load_all = ruyaml.load_all
def new_load_all(stream, Loader=ruyaml.CLoader):
return old_load_all(stream, Loader)
old_safe_load = ruyaml.safe_load
def new_safe_load(stream):
return old_load(stream, ruyaml.CSafeLoader)
old_safe_load_all = ruyaml.safe_load_all
def new_safe_load_all(stream):
return old_load_all(stream, ruyaml.CSafeLoader)
old_emit = ruyaml.emit
def new_emit(events, stream=None, Dumper=ruyaml.CDumper, **kwds):
return old_emit(events, stream, Dumper, **kwds)
old_serialize = ruyaml.serialize
def new_serialize(node, stream, Dumper=ruyaml.CDumper, **kwds):
return old_serialize(node, stream, Dumper, **kwds)
old_serialize_all = ruyaml.serialize_all
def new_serialize_all(nodes, stream=None, Dumper=ruyaml.CDumper, **kwds):
return old_serialize_all(nodes, stream, Dumper, **kwds)
old_dump = ruyaml.dump
def new_dump(data, stream=None, Dumper=ruyaml.CDumper, **kwds):
return old_dump(data, stream, Dumper, **kwds)
old_dump_all = ruyaml.dump_all
def new_dump_all(documents, stream=None, Dumper=ruyaml.CDumper, **kwds):
return old_dump_all(documents, stream, Dumper, **kwds)
old_safe_dump = ruyaml.safe_dump
def new_safe_dump(data, stream=None, **kwds):
return old_dump(data, stream, ruyaml.CSafeDumper, **kwds)
def _set_up():
ruyaml.BaseLoader = ruyaml.CBaseLoader
ruyaml.SafeLoader = ruyaml.CSafeLoader
ruyaml.Loader = ruyaml.CLoader
ruyaml.BaseDumper = ruyaml.CBaseDumper
ruyaml.SafeDumper = ruyaml.CSafeDumper
ruyaml.Dumper = ruyaml.CDumper
ruyaml.scan = new_scan
ruyaml.parse = new_parse
ruyaml.compose = new_compose
ruyaml.compose_all = new_compose_all
ruyaml.load = new_load
ruyaml.load_all = new_load_all
ruyaml.safe_load = new_safe_load
ruyaml.safe_load_all = new_safe_load_all
ruyaml.emit = new_emit
ruyaml.serialize = new_serialize
ruyaml.serialize_all = new_serialize_all
ruyaml.dump = new_dump
ruyaml.dump_all = new_dump_all
ruyaml.safe_dump = new_safe_dump
def _tear_down():
ruyaml.BaseLoader = ruyaml.PyBaseLoader
ruyaml.SafeLoader = ruyaml.PySafeLoader
ruyaml.Loader = ruyaml.PyLoader
ruyaml.BaseDumper = ruyaml.PyBaseDumper
ruyaml.SafeDumper = ruyaml.PySafeDumper
ruyaml.Dumper = ruyaml.PyDumper
ruyaml.scan = old_scan
ruyaml.parse = old_parse
ruyaml.compose = old_compose
ruyaml.compose_all = old_compose_all
ruyaml.load = old_load
ruyaml.load_all = old_load_all
ruyaml.safe_load = old_safe_load
ruyaml.safe_load_all = old_safe_load_all
ruyaml.emit = old_emit
ruyaml.serialize = old_serialize
ruyaml.serialize_all = old_serialize_all
ruyaml.dump = old_dump
ruyaml.dump_all = old_dump_all
ruyaml.safe_dump = old_safe_dump
def test_c_version(verbose=False):
if verbose:
print(_ruyaml.get_version())
print(_ruyaml.get_version_string())
assert ('%s.%s.%s' % _ruyaml.get_version()) == _ruyaml.get_version_string(), (
_ruyaml.get_version(),
_ruyaml.get_version_string(),
)
def _compare_scanners(py_data, c_data, verbose):
yaml = ruyaml.YAML(typ='unsafe', pure=True)
py_tokens = list(yaml.scan(py_data, Loader=ruyaml.PyLoader))
c_tokens = []
try:
yaml = ruyaml.YAML(typ='unsafe', pure=False)
for token in yaml.scan(c_data, Loader=ruyaml.CLoader):
c_tokens.append(token)
assert len(py_tokens) == len(c_tokens), (len(py_tokens), len(c_tokens))
for py_token, c_token in zip(py_tokens, c_tokens):
assert py_token.__class__ == c_token.__class__, (py_token, c_token)
if hasattr(py_token, 'value'):
assert py_token.value == c_token.value, (py_token, c_token)
if isinstance(py_token, ruyaml.StreamEndToken):
continue
py_start = (
py_token.start_mark.index,
py_token.start_mark.line,
py_token.start_mark.column,
)
py_end = (
py_token.end_mark.index,
py_token.end_mark.line,
py_token.end_mark.column,
)
c_start = (
c_token.start_mark.index,
c_token.start_mark.line,
c_token.start_mark.column,
)
c_end = (
c_token.end_mark.index,
c_token.end_mark.line,
c_token.end_mark.column,
)
assert py_start == c_start, (py_start, c_start)
assert py_end == c_end, (py_end, c_end)
finally:
if verbose:
print('PY_TOKENS:')
pprint.pprint(py_tokens)
print('C_TOKENS:')
pprint.pprint(c_tokens)
def test_c_scanner(data_filename, canonical_filename, verbose=False):
with open(data_filename, 'rb') as fp0:
with open(data_filename, 'rb') as fp1:
_compare_scanners(fp0, fp1, verbose)
with open(data_filename, 'rb') as fp0:
with open(data_filename, 'rb') as fp1:
_compare_scanners(fp0.read(), fp1.read(), verbose)
with open(canonical_filename, 'rb') as fp0:
with open(canonical_filename, 'rb') as fp1:
_compare_scanners(fp0, fp1, verbose)
with open(canonical_filename, 'rb') as fp0:
with open(canonical_filename, 'rb') as fp1:
_compare_scanners(fp0.read(), fp1.read(), verbose)
test_c_scanner.unittest = ['.data', '.canonical']
test_c_scanner.skip = ['.skip-ext']
def _compare_parsers(py_data, c_data, verbose):
yaml = ruyaml.YAML(typ='unsafe', pure=True)
py_events = list(yaml.parse(py_data, Loader=ruyaml.PyLoader))
c_events = []
try:
yaml = ruyaml.YAML(typ='unsafe', pure=False)
for event in yaml.parse(c_data, Loader=ruyaml.CLoader):
c_events.append(event)
assert len(py_events) == len(c_events), (len(py_events), len(c_events))
for py_event, c_event in zip(py_events, c_events):
for attribute in [
'__class__',
'anchor',
'tag',
'implicit',
'value',
'explicit',
'version',
'tags',
]:
py_value = getattr(py_event, attribute, None)
c_value = getattr(c_event, attribute, None)
assert py_value == c_value, (py_event, c_event, attribute)
finally:
if verbose:
print('PY_EVENTS:')
pprint.pprint(py_events)
print('C_EVENTS:')
pprint.pprint(c_events)
def test_c_parser(data_filename, canonical_filename, verbose=False):
with open(data_filename, 'rb') as fp0:
with open(data_filename, 'rb') as fp1:
_compare_parsers(fp0, fp1, verbose)
with open(data_filename, 'rb') as fp0:
with open(data_filename, 'rb') as fp1:
_compare_parsers(fp0.read(), fp1.read(), verbose)
with open(canonical_filename, 'rb') as fp0:
with open(canonical_filename, 'rb') as fp1:
_compare_parsers(fp0, fp1, verbose)
with open(canonical_filename, 'rb') as fp0:
with open(canonical_filename, 'rb') as fp1:
_compare_parsers(fp0.read(), fp1.read(), verbose)
test_c_parser.unittest = ['.data', '.canonical']
test_c_parser.skip = ['.skip-ext']
def _compare_emitters(py_data, verbose):
yaml = ruyaml.YAML(typ='unsafe', pure=True)
events = list(yaml.parse(py_data, Loader=ruyaml.PyLoader))
c_data = yaml.emit(events, Dumper=ruyaml.CDumper)
if verbose:
print(c_data)
py_events = list(yaml.parse(c_data, Loader=ruyaml.PyLoader))
c_events = list(yaml.parse(c_data, Loader=ruyaml.CLoader))
try:
assert len(events) == len(py_events), (len(events), len(py_events))
assert len(events) == len(c_events), (len(events), len(c_events))
for event, py_event, c_event in zip(events, py_events, c_events):
for attribute in [
'__class__',
'anchor',
'tag',
'implicit',
'value',
'explicit',
'version',
'tags',
]:
value = getattr(event, attribute, None)
py_value = getattr(py_event, attribute, None)
c_value = getattr(c_event, attribute, None)
if (
attribute == 'tag'
and value in [None, '!']
and py_value in [None, '!']
and c_value in [None, '!']
):
continue
if attribute == 'explicit' and (py_value or c_value):
continue
assert value == py_value, (event, py_event, attribute)
assert value == c_value, (event, c_event, attribute)
finally:
if verbose:
print('EVENTS:')
pprint.pprint(events)
print('PY_EVENTS:')
pprint.pprint(py_events)
print('C_EVENTS:')
pprint.pprint(c_events)
def test_c_emitter(data_filename, canonical_filename, verbose=False):
with open(data_filename, 'rb') as fp0:
_compare_emitters(fp0.read(), verbose)
with open(canonical_filename, 'rb') as fp0:
_compare_emitters(fp0.read(), verbose)
test_c_emitter.unittest = ['.data', '.canonical']
test_c_emitter.skip = ['.skip-ext']
def wrap_ext_function(function):
def wrapper(*args, **kwds):
_set_up()
try:
function(*args, **kwds)
finally:
_tear_down()
wrapper.__name__ = '%s_ext' % function.__name__
wrapper.unittest = function.unittest
wrapper.skip = getattr(function, 'skip', []) + ['.skip-ext']
return wrapper
def wrap_ext(collections):
functions = []
if not isinstance(collections, list):
collections = [collections]
for collection in collections:
if not isinstance(collection, dict):
collection = vars(collection)
for key in sorted(collection):
value = collection[key]
if isinstance(value, types.FunctionType) and hasattr(value, 'unittest'):
functions.append(wrap_ext_function(value))
for function in functions:
assert function.__name__ not in globals()
globals()[function.__name__] = function
import test_constructor # NOQA
import test_emitter # NOQA
import test_errors # NOQA
import test_input_output # NOQA
import test_recursive # NOQA
import test_representer # NOQA
import test_resolver # NOQA
import test_structure # NOQA
import test_tokens # NOQA
wrap_ext(
[
test_tokens,
test_structure,
test_errors,
test_resolver,
test_constructor,
test_emitter,
test_representer,
test_recursive,
test_input_output,
]
)
if __name__ == '__main__':
import sys
import test_appliance
sys.exit(test_appliance.run(globals()))
python-ruyaml-0.92.1/_test/roundtrip.py 0000664 0000000 0000000 00000026455 15056754172 0020201 0 ustar 00root root 0000000 0000000 # coding: utf-8
"""
helper routines for testing round trip of commented YAML data
"""
import io
import sys
import textwrap
from pathlib import Path
from typing import Any, Optional, Union
import ruyaml
unset = object()
def dedent(data: str) -> str:
try:
position_of_first_newline = data.index('\n')
for idx in range(position_of_first_newline):
if not data[idx].isspace():
raise ValueError
except ValueError:
pass
else:
data = data[position_of_first_newline + 1 :]
return textwrap.dedent(data)
def round_trip_load(
inp: Any,
preserve_quotes: Optional[bool] = None,
version: Optional[Any] = None,
) -> Any:
import ruyaml # NOQA
dinp = dedent(inp).replace('', ' ') # to allow for end-of-line spaces
yaml = ruyaml.YAML()
yaml.preserve_quotes = preserve_quotes
yaml.version = version
return yaml.load(dinp)
def round_trip_load_all(
inp: Any,
preserve_quotes: Optional[bool] = None,
version: Optional[Any] = None,
) -> Any:
import ruyaml # NOQA
dinp = dedent(inp).replace('', ' ') # to allow for end-of-line spaces
yaml = ruyaml.YAML()
yaml.preserve_quotes = preserve_quotes
yaml.version = version
return yaml.load_all(dinp)
def round_trip_dump(
data: Any,
stream: Any = None, # *,
indent: Optional[int] = None,
block_seq_indent: Optional[int] = None,
default_flow_style: Any = unset,
top_level_colon_align: Any = None,
prefix_colon: Any = None,
explicit_start: Optional[bool] = None,
explicit_end: Optional[bool] = None,
version: Optional[Any] = None,
allow_unicode: bool = True,
) -> Union[str, None]:
import ruyaml # NOQA
yaml = ruyaml.YAML()
yaml.indent(mapping=indent, sequence=indent, offset=block_seq_indent)
if default_flow_style is not unset:
yaml.default_flow_style = default_flow_style
yaml.top_level_colon_align = top_level_colon_align
yaml.prefix_colon = prefix_colon
yaml.explicit_start = explicit_start
yaml.explicit_end = explicit_end
yaml.version = version
yaml.allow_unicode = allow_unicode
if stream is not None:
yaml.dump(data, stream=stream)
return None
buf = io.StringIO()
yaml.dump(data, stream=buf)
return buf.getvalue()
def round_trip_dump_all(
data: Any,
stream: Any = None, # *,
indent: Optional[int] = None,
block_seq_indent: Optional[int] = None,
default_flow_style: Any = unset,
top_level_colon_align: Any = None,
prefix_colon: Any = None,
explicit_start: Optional[bool] = None,
explicit_end: Optional[bool] = None,
version: Optional[Any] = None,
allow_unicode: bool = True,
) -> Union[str, None]:
import ruyaml # NOQA
yaml = ruyaml.YAML()
yaml.indent(mapping=indent, sequence=indent, offset=block_seq_indent)
if default_flow_style is not unset:
yaml.default_flow_style = default_flow_style
yaml.top_level_colon_align = top_level_colon_align
yaml.prefix_colon = prefix_colon
yaml.explicit_start = explicit_start
yaml.explicit_end = explicit_end
yaml.version = version
yaml.allow_unicode = allow_unicode
if stream is not None:
yaml.dump(data, stream=stream)
return None
buf = io.StringIO()
yaml.dump_all(data, stream=buf)
return buf.getvalue()
def diff(inp: str, outp: str, file_name: str = 'stdin') -> None:
import difflib
inl = inp.splitlines(True) # True for keepends
outl = outp.splitlines(True)
diff = difflib.unified_diff(inl, outl, file_name, 'round trip YAML')
for line in diff:
sys.stdout.write(line)
def round_trip(
inp: str,
outp: Optional[str] = None,
extra: Optional[str] = None,
intermediate: Any = None,
indent: Optional[int] = None,
block_seq_indent: Optional[int] = None,
default_flow_style: Any = unset,
top_level_colon_align: Any = None,
prefix_colon: Any = None,
preserve_quotes: Any = None,
explicit_start: Optional[bool] = None,
explicit_end: Optional[bool] = None,
version: Optional[Any] = None,
dump_data: Any = None,
) -> Any:
"""
inp: input string to parse
outp: expected output (equals input if not specified)
"""
if outp is None:
outp = inp
doutp = dedent(outp).replace('', ' ') # allow for end-of-line spaces
if extra is not None:
doutp += extra
data = round_trip_load(inp, preserve_quotes=preserve_quotes)
if dump_data:
print('data', data)
if intermediate is not None:
if isinstance(intermediate, dict):
for k, v in intermediate.items():
if data[k] != v:
print('{0!r} <> {1!r}'.format(data[k], v))
raise ValueError
res = round_trip_dump(
data,
indent=indent,
block_seq_indent=block_seq_indent,
top_level_colon_align=top_level_colon_align,
prefix_colon=prefix_colon,
explicit_start=explicit_start,
explicit_end=explicit_end,
version=version,
)
assert isinstance(res, str)
if res != doutp:
diff(doutp, res, 'input string')
print('\nroundtrip data:\n', res, sep="")
assert res == doutp
res = round_trip_dump(
data,
indent=indent,
block_seq_indent=block_seq_indent,
top_level_colon_align=top_level_colon_align,
prefix_colon=prefix_colon,
explicit_start=explicit_start,
explicit_end=explicit_end,
version=version,
)
print('roundtrip second round data:\n', res, sep="")
assert res == doutp
return data
def na_round_trip(
inp: str,
outp: Optional[str] = None,
extra: Optional[str] = None,
intermediate: Any = None,
indent: Optional[int] = None,
top_level_colon_align: Any = None,
prefix_colon: Any = None,
preserve_quotes: Any = None,
explicit_start: Optional[bool] = None,
explicit_end: Optional[bool] = None,
version: Optional[Any] = None,
dump_data: Any = None,
) -> Any:
"""
inp: input string to parse
outp: expected output (equals input if not specified)
"""
inp = dedent(inp)
if outp is None:
outp = inp
if version is not None:
version = version
doutp = dedent(outp)
if extra is not None:
doutp += extra
yaml = YAML()
yaml.preserve_quotes = preserve_quotes
yaml.scalar_after_indicator = False # newline after every directives end
data = yaml.load(inp)
if dump_data:
print('data', data)
if intermediate is not None:
if isinstance(intermediate, dict):
for k, v in intermediate.items():
if data[k] != v:
print('{0!r} <> {1!r}'.format(data[k], v))
raise ValueError
yaml.indent = indent
yaml.top_level_colon_align = top_level_colon_align
yaml.prefix_colon = prefix_colon
yaml.explicit_start = explicit_start
yaml.explicit_end = explicit_end
res = yaml.dump(data, compare=doutp)
return res
def YAML(**kw: Any) -> Any:
import ruyaml # NOQA
class MyYAML(ruyaml.YAML):
"""auto dedent string parameters on load"""
def load(self, stream: Any) -> Any:
if isinstance(stream, str):
if stream and stream[0] == '\n':
stream = stream[1:]
stream = textwrap.dedent(stream)
return ruyaml.YAML.load(self, stream)
def load_all(self, stream: Any) -> Any:
if isinstance(stream, str):
if stream and stream[0] == '\n':
stream = stream[1:]
stream = textwrap.dedent(stream)
for d in ruyaml.YAML.load_all(self, stream):
yield d
def dump(self, data: Any, **kw: Any) -> Any: # type: ignore
from ruyaml.compat import BytesIO, StringIO # NOQA
assert ('stream' in kw) ^ ('compare' in kw)
if 'stream' in kw:
return ruyaml.YAML.dump(data, **kw)
lkw = kw.copy()
expected = textwrap.dedent(lkw.pop('compare'))
unordered_lines = lkw.pop('unordered_lines', False)
if expected and expected[0] == '\n':
expected = expected[1:]
lkw['stream'] = st = StringIO()
ruyaml.YAML.dump(self, data, **lkw)
res = st.getvalue()
print(res)
if unordered_lines:
res = sorted(res.splitlines()) # type: ignore
expected = sorted(expected.splitlines()) # type: ignore
assert res == expected
def round_trip(self, stream: Any, **kw: Any) -> None:
from ruyaml.compat import BytesIO, StringIO # NOQA
assert isinstance(stream, str)
lkw = kw.copy()
if stream and stream[0] == '\n':
stream = stream[1:]
stream = textwrap.dedent(stream)
data = ruyaml.YAML.load(self, stream)
outp = lkw.pop('outp', stream)
lkw['stream'] = st = StringIO()
ruyaml.YAML.dump(self, data, **lkw)
res = st.getvalue()
if res != outp:
diff(outp, res, 'input string')
assert res == outp
def round_trip_all(self, stream: Any, **kw: Any) -> None:
from ruyaml.compat import BytesIO, StringIO # NOQA
assert isinstance(stream, str)
lkw = kw.copy()
if stream and stream[0] == '\n':
stream = stream[1:]
stream = textwrap.dedent(stream)
data = list(ruyaml.YAML.load_all(self, stream))
outp = lkw.pop('outp', stream)
lkw['stream'] = st = StringIO()
ruyaml.YAML.dump_all(self, data, **lkw)
res = st.getvalue()
if res != outp:
diff(outp, res, 'input string')
assert res == outp
return MyYAML(**kw)
def save_and_run(
program: str,
base_dir: Optional[Any] = None,
output: Optional[Any] = None,
file_name: Optional[Any] = None,
optimized: bool = False,
) -> int:
"""
safe and run a python program, thereby circumventing any restrictions on module level
imports
"""
from subprocess import STDOUT, CalledProcessError, check_output
if not hasattr(base_dir, 'hash'):
base_dir = Path(str(base_dir))
if file_name is None:
file_name = 'safe_and_run_tmp.py'
file_name = base_dir / file_name # type: ignore
file_name.write_text(dedent(program))
try:
cmd = [sys.executable, '-Wd']
if optimized:
cmd.append('-O')
cmd.append(str(file_name))
print('running:', *cmd)
# 3.5 needs strings
res = check_output(
cmd, stderr=STDOUT, universal_newlines=True, cwd=str(base_dir)
)
if output is not None:
if '__pypy__' in sys.builtin_module_names:
res1 = res.splitlines(True)
res2 = [line for line in res1 if 'no version info' not in line]
res = ''.join(res2)
print('result: ', res, end='')
print('expected:', output, end='')
assert res == output
except CalledProcessError as exception:
print("##### Running '{} {}' FAILED #####".format(sys.executable, file_name))
print(exception.output)
return exception.returncode
return 0
python-ruyaml-0.92.1/_test/test_a_dedent.py 0000664 0000000 0000000 00000002266 15056754172 0020747 0 ustar 00root root 0000000 0000000 # coding: utf-8
from roundtrip import dedent # type: ignore
class TestDedent:
def test_start_newline(self) -> None:
# fmt: off
x = dedent("""
123
456
""")
# fmt: on
assert x == '123\n 456\n'
def test_start_space_newline(self) -> None:
# special construct to prevent stripping of following whitespace
# fmt: off
x = dedent(" " """
123
""")
# fmt: on
assert x == '123\n'
def test_start_no_newline(self) -> None:
# special construct to prevent stripping of following whitespac
x = dedent(
"""\
123
456
"""
)
assert x == '123\n 456\n'
def test_preserve_no_newline_at_end(self) -> None:
x = dedent(
"""
123"""
)
assert x == '123'
def test_preserve_no_newline_at_all(self) -> None:
x = dedent(
"""\
123"""
)
assert x == '123'
def test_multiple_dedent(self) -> None:
x = dedent(
dedent(
"""
123
"""
),
)
assert x == '123\n'
python-ruyaml-0.92.1/_test/test_add_xxx.py 0000664 0000000 0000000 00000013121 15056754172 0020633 0 ustar 00root root 0000000 0000000 # coding: utf-8
import re
from typing import Any
import pytest # type: ignore # NOQA
from roundtrip import dedent, round_trip_dump # type: ignore # NOQA
# from PyYAML docs
class Dice(tuple): # type: ignore
def __new__(cls, a: int, b: int) -> "Dice":
return tuple.__new__(cls, [a, b])
def __repr__(self) -> str:
return 'Dice(%s,%s)' % self
def dice_constructor(loader: Any, node: Any) -> Dice:
value = loader.construct_scalar(node)
a, b = map(int, value.split('d'))
return Dice(a, b)
def dice_representer(dumper: Any, data: Any) -> Any:
return dumper.represent_scalar('!dice', '{}d{}'.format(*data))
def test_dice_constructor() -> None:
import ruyaml # NOQA
with pytest.warns(PendingDeprecationWarning):
yaml = ruyaml.YAML(typ='unsafe', pure=True)
ruyaml.add_constructor('!dice', dice_constructor)
data = yaml.load('initial hit points: !dice 8d4')
assert str(data) == "{'initial hit points': Dice(8,4)}"
def test_dice_constructor_with_loader() -> None:
import ruyaml # NOQA
with pytest.warns(PendingDeprecationWarning):
yaml = ruyaml.YAML(typ='unsafe', pure=True)
ruyaml.add_constructor('!dice', dice_constructor, Loader=ruyaml.Loader)
data = yaml.load('initial hit points: !dice 8d4')
assert str(data) == "{'initial hit points': Dice(8,4)}"
def test_dice_representer() -> None:
import ruyaml # NOQA
with pytest.warns(PendingDeprecationWarning):
yaml = ruyaml.YAML(typ='unsafe', pure=True)
yaml.default_flow_style = False
ruyaml.add_representer(Dice, dice_representer)
# ruyaml 0.15.8+ no longer forces quotes tagged scalars
buf = ruyaml.compat.StringIO()
yaml.dump(dict(gold=Dice(10, 6)), buf)
assert buf.getvalue() == 'gold: !dice 10d6\n'
def test_dice_implicit_resolver() -> None:
import ruyaml # NOQA
with pytest.warns(PendingDeprecationWarning):
yaml = ruyaml.YAML(typ='unsafe', pure=True)
yaml.default_flow_style = False
pattern = re.compile(r'^\d+d\d+$')
ruyaml.add_implicit_resolver('!dice', pattern)
buf = ruyaml.compat.StringIO()
yaml.dump(dict(treasure=Dice(10, 20)), buf)
assert buf.getvalue() == 'treasure: 10d20\n'
assert yaml.load('damage: 5d10') == dict(damage=Dice(5, 10))
class Obj1(dict): # type: ignore
def __init__(self, suffix: Any) -> None:
self._suffix = suffix
self._node = None
def add_node(self, n: Any) -> None:
self._node = n
def __repr__(self) -> str:
return 'Obj1(%s->%s)' % (self._suffix, self.items())
def dump(self) -> str:
return repr(self._node)
class YAMLObj1:
yaml_tag = '!obj:'
@classmethod
def from_yaml(cls, loader: Any, suffix: Any, node: Any) -> Any:
import ruyaml # NOQA
obj1 = Obj1(suffix)
if isinstance(node, ruyaml.MappingNode):
obj1.add_node(loader.construct_mapping(node))
else:
raise NotImplementedError
return obj1
@classmethod
def to_yaml(cls, dumper: Any, data: Any) -> Any:
return dumper.represent_scalar(cls.yaml_tag + data._suffix, data.dump())
def test_yaml_obj() -> None:
import ruyaml # NOQA
with pytest.warns(PendingDeprecationWarning):
yaml = ruyaml.YAML(typ='unsafe', pure=True)
ruyaml.add_representer(Obj1, YAMLObj1.to_yaml)
ruyaml.add_multi_constructor(YAMLObj1.yaml_tag, YAMLObj1.from_yaml)
x = yaml.load('!obj:x.2\na: 1')
print(x)
buf = ruyaml.compat.StringIO()
yaml.dump(x, buf)
assert buf.getvalue() == """!obj:x.2 "{'a': 1}"\n"""
def test_yaml_obj_with_loader_and_dumper() -> None:
import ruyaml # NOQA
with pytest.warns(PendingDeprecationWarning):
yaml = ruyaml.YAML(typ='unsafe', pure=True)
ruyaml.add_representer(Obj1, YAMLObj1.to_yaml, Dumper=ruyaml.Dumper)
ruyaml.add_multi_constructor(
YAMLObj1.yaml_tag,
YAMLObj1.from_yaml,
Loader=ruyaml.Loader,
)
x = yaml.load('!obj:x.2\na: 1')
# x = ruyaml.load('!obj:x.2\na: 1')
print(x)
buf = ruyaml.compat.StringIO()
yaml.dump(x, buf)
assert buf.getvalue() == """!obj:x.2 "{'a': 1}"\n"""
# ToDo use nullege to search add_multi_representer and add_path_resolver
# and add some test code
# Issue 127 reported by Tommy Wang
def test_issue_127() -> None:
import ruyaml # NOQA
class Ref(ruyaml.YAMLObject):
yaml_constructor = ruyaml.RoundTripConstructor # type: ignore
yaml_representer = ruyaml.RoundTripRepresenter # type: ignore
yaml_tag = '!Ref'
def __init__(self, logical_id: Any) -> None:
self.logical_id = logical_id
@classmethod
def from_yaml(cls, loader: Any, node: Any) -> Any:
return cls(loader.construct_scalar(node))
@classmethod
def to_yaml(cls, dumper: Any, data: Any) -> Any:
if isinstance(data.logical_id, ruyaml.scalarstring.ScalarString):
style = data.logical_id.style # type: ignore # ruyaml>0.15.8
else:
style = None
return dumper.represent_scalar(cls.yaml_tag, data.logical_id, style=style)
document = dedent(
"""\
AList:
- !Ref One
- !Ref 'Two'
- !Ref
Two and a half
BList: [!Ref Three, !Ref "Four"]
CList:
- Five Six
- 'Seven Eight'
"""
)
yaml = ruyaml.YAML()
yaml.preserve_quotes = True
yaml.default_flow_style = None
yaml.indent(sequence=4, offset=2)
data = yaml.load(document)
buf = ruyaml.compat.StringIO()
yaml.dump(data, buf)
assert buf.getvalue() == document.replace('\n Two and', ' Two and')
python-ruyaml-0.92.1/_test/test_anchor.py 0000664 0000000 0000000 00000035564 15056754172 0020465 0 ustar 00root root 0000000 0000000 # coding: utf-8
"""
testing of anchors and the aliases referring to them
"""
import platform
from typing import Any
import pytest # type: ignore # NOQA
from roundtrip import ( # type: ignore # NOQA
YAML,
dedent,
round_trip,
round_trip_dump,
round_trip_load,
)
def load(s: str) -> Any:
return round_trip_load(dedent(s))
def compare(d: Any, s: str) -> None:
assert round_trip_dump(d) == dedent(s)
class TestAnchorsAliases:
def test_anchor_id_renumber(self) -> None:
from ruyaml.serializer import Serializer
assert Serializer.ANCHOR_TEMPLATE == 'id{:03d}'
data = load(
"""
a: &id002
b: 1
c: 2
d: *id002
"""
)
compare(
data,
"""
a: &id001
b: 1
c: 2
d: *id001
""",
)
def test_template_matcher(self) -> None:
"""test if id matches the anchor template"""
from ruyaml.serializer import templated_id
assert templated_id('id001')
assert templated_id('id999')
assert templated_id('id1000')
assert templated_id('id0001')
assert templated_id('id0000')
assert not templated_id('id02')
assert not templated_id('id000')
assert not templated_id('x000')
# def test_re_matcher(self) -> None:
# import re
# assert re.compile('id(?!000)\\d{3,}').match('id001')
# assert not re.compile('id(?!000\\d*)\\d{3,}').match('id000')
# assert re.compile('id(?!000$)\\d{3,}').match('id0001')
def test_anchor_assigned(self) -> None:
from ruyaml.comments import CommentedMap
data = load(
"""
a: &id002
b: 1
c: 2
d: *id002
e: &etemplate
b: 1
c: 2
f: *etemplate
"""
)
d = data['d']
assert isinstance(d, CommentedMap)
assert d.yaml_anchor() is None # got dropped as it matches pattern
e = data['e']
assert isinstance(e, CommentedMap)
assert e.yaml_anchor().value == 'etemplate'
assert e.yaml_anchor().always_dump is False
def test_anchor_id_retained(self) -> None:
data = load(
"""
a: &id002
b: 1
c: 2
d: *id002
e: &etemplate
b: 1
c: 2
f: *etemplate
"""
)
compare(
data,
"""
a: &id001
b: 1
c: 2
d: *id001
e: &etemplate
b: 1
c: 2
f: *etemplate
""",
)
@pytest.mark.skipif( # type: ignore
platform.python_implementation() == 'Jython',
reason='Jython throws RepresenterError',
)
def test_alias_before_anchor(self) -> None:
from ruyaml.composer import ComposerError
with pytest.raises(ComposerError):
data = load(
"""
d: *id002
a: &id002
b: 1
c: 2
"""
)
data = data
def test_anchor_on_sequence(self) -> None:
# as reported by Bjorn Stabell
# https://bitbucket.org/ruyaml/issue/7/anchor-names-not-preserved
from ruyaml.comments import CommentedSeq
data = load(
"""
nut1: &alice
- 1
- 2
nut2: &blake
- some data
- *alice
nut3:
- *blake
- *alice
"""
)
r = data['nut1']
assert isinstance(r, CommentedSeq)
assert r.yaml_anchor() is not None
assert r.yaml_anchor().value == 'alice'
merge_yaml = dedent(
"""
- &CENTER {x: 1, y: 2}
- &LEFT {x: 0, y: 2}
- &BIG {r: 10}
- &SMALL {r: 1}
# All the following maps are equal:
# Explicit keys
- x: 1
y: 2
r: 10
label: center/small
# Merge one map
- <<: *CENTER
r: 10
label: center/medium
# Merge multiple maps
- <<: [*CENTER, *BIG]
label: center/big
# Override
- <<: [*BIG, *LEFT, *SMALL]
x: 1
label: center/huge
"""
)
def test_merge_00(self) -> None:
data = load(self.merge_yaml)
d = data[4]
ok = True
for k in d:
for o in [5, 6, 7]:
x = d.get(k)
y = data[o].get(k)
if not isinstance(x, int):
x = x.split('/')[0]
y = y.split('/')[0]
if x != y:
ok = False
print('key', k, d.get(k), data[o].get(k))
assert ok
def test_merge_accessible(self) -> None:
from ruyaml.comments import CommentedMap, merge_attrib
data = load(
"""
k: &level_2 { a: 1, b2 }
l: &level_1 { a: 10, c: 3 }
m:
<<: *level_1
c: 30
d: 40
"""
)
d = data['m']
assert isinstance(d, CommentedMap)
assert hasattr(d, merge_attrib)
def test_merge_01(self) -> None:
data = load(self.merge_yaml)
compare(data, self.merge_yaml)
def test_merge_nested(self) -> None:
yaml = """
a:
<<: &content
1: plugh
2: plover
0: xyzzy
b:
<<: *content
"""
data = round_trip(yaml) # NOQA
def test_merge_nested_with_sequence(self) -> None:
yaml = """
a:
<<: &content
<<: &y2
1: plugh
2: plover
0: xyzzy
b:
<<: [*content, *y2]
"""
data = round_trip(yaml) # NOQA
def test_add_anchor(self) -> None:
from ruyaml.comments import CommentedMap
data = CommentedMap()
data_a = CommentedMap()
data['a'] = data_a
data_a['c'] = 3
data['b'] = 2
data.yaml_set_anchor('klm', always_dump=True)
data['a'].yaml_set_anchor('xyz', always_dump=True)
compare(
data,
"""
&klm
a: &xyz
c: 3
b: 2
""",
)
# this is an error in PyYAML
def test_reused_anchor(self) -> None:
from ruyaml.error import ReusedAnchorWarning
yaml = """
- &a
x: 1
- <<: *a
- &a
x: 2
- <<: *a
"""
with pytest.warns(ReusedAnchorWarning):
data = round_trip(yaml) # NOQA
def test_issue_130(self) -> None:
# issue 130 reported by Devid Fee
import ruyaml
ys = dedent(
"""\
components:
server: &server_component
type: spark.server:ServerComponent
host: 0.0.0.0
port: 8000
shell: &shell_component
type: spark.shell:ShellComponent
services:
server: &server_service
<<: *server_component
shell: &shell_service
<<: *shell_component
components:
server: {<<: *server_service}
"""
)
yaml = ruyaml.YAML(typ='safe', pure=True)
data = yaml.load(ys)
assert data['services']['shell']['components']['server']['port'] == 8000
def test_issue_130a(self) -> None:
# issue 130 reported by Devid Fee
import ruyaml
ys = dedent(
"""\
components:
server: &server_component
type: spark.server:ServerComponent
host: 0.0.0.0
port: 8000
shell: &shell_component
type: spark.shell:ShellComponent
services:
server: &server_service
<<: *server_component
port: 4000
shell: &shell_service
<<: *shell_component
components:
server: {<<: *server_service}
"""
)
yaml = ruyaml.YAML(typ='safe', pure=True)
data = yaml.load(ys)
assert data['services']['shell']['components']['server']['port'] == 4000
class TestMergeKeysValues:
yaml_str = dedent(
"""\
- &mx
a: x1
b: x2
c: x3
- &my
a: y1
b: y2 # masked by the one in &mx
d: y4
-
a: 1
<<: [*mx, *my]
m: 6
"""
)
# in the following d always has "expanded" the merges
def test_merge_for(self) -> None:
from ruyaml import YAML
d = YAML(typ='safe', pure=True).load(self.yaml_str)
data = round_trip_load(self.yaml_str)
count = 0
for x in data[2]:
count += 1
print(count, x)
assert count == len(d[2])
def test_merge_keys(self) -> None:
from ruyaml import YAML
d = YAML(typ='safe', pure=True).load(self.yaml_str)
data = round_trip_load(self.yaml_str)
count = 0
for x in data[2].keys():
count += 1
print(count, x)
assert count == len(d[2])
def test_merge_values(self) -> None:
from ruyaml import YAML
d = YAML(typ='safe', pure=True).load(self.yaml_str)
data = round_trip_load(self.yaml_str)
count = 0
for x in data[2].values():
count += 1
print(count, x)
assert count == len(d[2])
def test_merge_items(self) -> None:
from ruyaml import YAML
d = YAML(typ='safe', pure=True).load(self.yaml_str)
data = round_trip_load(self.yaml_str)
count = 0
for x in data[2].items():
count += 1
print(count, x)
assert count == len(d[2])
def test_len_items_delete(self) -> None:
from ruyaml import YAML
d = YAML(typ='safe', pure=True).load(self.yaml_str)
data = round_trip_load(self.yaml_str)
x = data[2].items()
print('d2 items', d[2].items(), len(d[2].items()), x, len(x))
ref = len(d[2].items())
print('ref', ref)
assert len(x) == ref
del data[2]['m']
ref -= 1
assert len(x) == ref
del data[2]['d']
ref -= 1
assert len(x) == ref
del data[2]['a']
ref -= 1
assert len(x) == ref
def test_issue_196_cast_of_dict(self, capsys: Any) -> None:
from ruyaml import YAML
yaml = YAML()
mapping = yaml.load(
"""\
anchored: &anchor
a : 1
mapping:
<<: *anchor
b: 2
"""
)['mapping']
for k in mapping:
print('k', k)
for k in mapping.copy():
print('kc', k)
print('v', list(mapping.keys()))
print('v', list(mapping.values()))
print('v', list(mapping.items()))
print(len(mapping))
print('-----')
# print({**mapping})
# print(type({**mapping}))
# assert 'a' in {**mapping}
assert 'a' in mapping
x = {}
for k in mapping:
x[k] = mapping[k]
assert 'a' in x
assert 'a' in mapping.keys()
assert mapping['a'] == 1
assert mapping.__getitem__('a') == 1
assert 'a' in dict(mapping)
assert 'a' in dict(mapping.items())
def test_values_of_merged(self) -> None:
from ruyaml import YAML
yaml = YAML()
data = yaml.load(dedent(self.yaml_str))
assert list(data[2].values()) == [1, 6, 'x2', 'x3', 'y4']
def test_issue_213_copy_of_merge(self) -> None:
from ruyaml import YAML
yaml = YAML()
d = yaml.load(
"""\
foo: &foo
a: a
foo2:
<<: *foo
b: b
"""
)['foo2']
assert d['a'] == 'a'
d2 = d.copy()
assert d2['a'] == 'a'
print('d', d)
del d['a']
assert 'a' not in d
assert 'a' in d2
def test_dup_merge(self):
from ruyaml import YAML
yaml = YAML()
yaml.allow_duplicate_keys = True
d = yaml.load(
"""\
foo: &f
a: a
foo2: &g
b: b
all:
<<: *f
<<: *g
"""
)['all']
assert d == {'a': 'a', 'b': 'b'}
def test_dup_merge_fail(self):
from ruyaml import YAML
from ruyaml.constructor import DuplicateKeyError
yaml = YAML()
yaml.allow_duplicate_keys = False
with pytest.raises(DuplicateKeyError):
yaml.load(
"""\
foo: &f
a: a
foo2: &g
b: b
all:
<<: *f
<<: *g
"""
)
class TestDuplicateKeyThroughAnchor:
def test_duplicate_key_00(self) -> None:
from ruyaml import YAML, version_info
from ruyaml.constructor import DuplicateKeyError, DuplicateKeyFutureWarning
s = dedent(
"""\
&anchor foo:
foo: bar
*anchor : duplicate key
baz: bat
*anchor : duplicate key
"""
)
if version_info < (0, 15, 1):
pass
elif version_info < (0, 16, 0):
with pytest.warns(DuplicateKeyFutureWarning):
YAML(typ='safe', pure=True).load(s)
with pytest.warns(DuplicateKeyFutureWarning):
YAML(typ='rt').load(s)
else:
with pytest.raises(DuplicateKeyError):
YAML(typ='safe', pure=True).load(s)
with pytest.raises(DuplicateKeyError):
YAML(typ='rt').load(s)
def test_duplicate_key_01(self) -> None:
# so issue https://stackoverflow.com/a/52852106/1307905
from ruyaml.constructor import DuplicateKeyError
s = dedent(
"""\
- &name-name
a: 1
- &help-name
b: 2
- <<: *name-name
<<: *help-name
"""
)
with pytest.raises(DuplicateKeyError):
yaml = YAML(typ='safe')
yaml.load(s)
with pytest.raises(DuplicateKeyError):
yaml = YAML()
yaml.load(s)
class TestFullCharSetAnchors:
def test_master_of_orion(self) -> None:
# https://bitbucket.org/ruyaml/issues/72/not-allowed-in-anchor-names
# submitted by Shalon Wood
yaml_str = """
- collection: &Backend.Civilizations.RacialPerk
items:
- key: perk_population_growth_modifier
- *Backend.Civilizations.RacialPerk
"""
data = load(yaml_str) # NOQA
def test_roundtrip_00(self) -> None:
yaml_str = """
- &dotted.words.here
a: 1
b: 2
- *dotted.words.here
"""
data = round_trip(yaml_str) # NOQA
def test_roundtrip_01(self) -> None:
yaml_str = """
- &dotted.words.here[a, b]
- *dotted.words.here
"""
data = load(yaml_str) # NOQA
compare(data, yaml_str.replace('[', ' [')) # an extra space is inserted
python-ruyaml-0.92.1/_test/test_api_change.py 0000664 0000000 0000000 00000015356 15056754172 0021266 0 ustar 00root root 0000000 0000000 # coding: utf-8
"""
testing of anchors and the aliases referring to them
"""
import sys
import textwrap
from pathlib import Path
from typing import Any
import pytest # type: ignore
class TestNewAPI:
def test_duplicate_keys_00(self) -> None:
from ruyaml import YAML
from ruyaml.constructor import DuplicateKeyError
yaml = YAML()
with pytest.raises(DuplicateKeyError):
yaml.load('{a: 1, a: 2}')
def test_duplicate_keys_01(self) -> None:
from ruyaml import YAML
from ruyaml.constructor import DuplicateKeyError
yaml = YAML(typ='safe', pure=True)
with pytest.raises(DuplicateKeyError):
yaml.load('{a: 1, a: 2}')
def test_duplicate_keys_02(self) -> None:
from ruyaml import YAML
from ruyaml.constructor import DuplicateKeyError
yaml = YAML(typ='safe')
with pytest.raises(DuplicateKeyError):
yaml.load('{a: 1, a: 2}')
def test_issue_135(self) -> None:
# reported by Andrzej Ostrowski
from ruyaml import YAML
data = {'a': 1, 'b': 2}
yaml = YAML(typ='safe')
# originally on 2.7: with pytest.raises(TypeError):
yaml.dump(data, sys.stdout)
def test_issue_135_temporary_workaround(self) -> None:
# never raised error
from ruyaml import YAML
data = {'a': 1, 'b': 2}
yaml = YAML(typ='safe', pure=True)
yaml.dump(data, sys.stdout)
class TestWrite:
def test_dump_path(self, tmpdir: Any) -> None:
from ruyaml import YAML
fn = Path(str(tmpdir)) / 'test.yaml'
yaml = YAML()
data = yaml.map()
data['a'] = 1
data['b'] = 2
yaml.dump(data, fn)
assert fn.read_text() == 'a: 1\nb: 2\n'
def test_dump_file(self, tmpdir: Any) -> None:
from ruyaml import YAML
fn = Path(str(tmpdir)) / 'test.yaml'
yaml = YAML()
data = yaml.map()
data['a'] = 1
data['b'] = 2
with open(str(fn), 'w') as fp:
yaml.dump(data, fp)
assert fn.read_text() == 'a: 1\nb: 2\n'
def test_dump_missing_stream(self) -> None:
from ruyaml import YAML
yaml = YAML()
data = yaml.map()
data['a'] = 1
data['b'] = 2
with pytest.raises(TypeError):
yaml.dump(data)
def test_dump_too_many_args(self, tmpdir: Any) -> None:
from ruyaml import YAML
fn = Path(str(tmpdir)) / 'test.yaml'
yaml = YAML()
data = yaml.map()
data['a'] = 1
data['b'] = 2
with pytest.raises(TypeError):
yaml.dump(data, fn, True) # type: ignore
def test_transform(self, tmpdir: Any) -> None:
from ruyaml import YAML
def tr(s: str) -> str:
return s.replace(' ', ' ')
fn = Path(str(tmpdir)) / 'test.yaml'
yaml = YAML()
data = yaml.map()
data['a'] = 1
data['b'] = 2
yaml.dump(data, fn, transform=tr)
assert fn.read_text() == 'a: 1\nb: 2\n'
def test_print(self, capsys: Any) -> None:
from ruyaml import YAML
yaml = YAML()
data = yaml.map()
data['a'] = 1
data['b'] = 2
yaml.dump(data, sys.stdout)
out, err = capsys.readouterr()
assert out == 'a: 1\nb: 2\n'
class TestRead:
def test_multi_load(self) -> None:
# make sure reader, scanner, parser get reset
from ruyaml import YAML
yaml = YAML()
yaml.load('a: 1')
yaml.load('a: 1') # did not work in 0.15.4
def test_parse(self) -> None:
# ensure `parse` method is functional and can parse "unsafe" yaml
from ruyaml import YAML
from ruyaml.constructor import ConstructorError
yaml = YAML(typ='safe')
s = '- !User0 {age: 18, name: Anthon}'
# should fail to load
with pytest.raises(ConstructorError):
yaml.load(s)
# should parse fine
yaml = YAML(typ='safe')
for _ in yaml.parse(s):
pass
class TestLoadAll:
def test_multi_document_load(self, tmpdir: Any) -> None:
"""this went wrong on 3.7 because of StopIteration, PR 37 and Issue 211"""
from ruyaml import YAML
fn = Path(str(tmpdir)) / 'test.yaml'
fn.write_text(
textwrap.dedent(
"""\
---
- a
---
- b
...
"""
),
)
yaml = YAML()
assert list(yaml.load_all(fn)) == [['a'], ['b']]
class TestDuplSet:
def test_dupl_set_00(self) -> None:
# round-trip-loader should except
from ruyaml import YAML
from ruyaml.constructor import DuplicateKeyError
yaml = YAML()
with pytest.raises(DuplicateKeyError):
yaml.load(
textwrap.dedent(
"""\
!!set
? a
? b
? c
? a
"""
),
)
class TestDumpLoadUnicode:
# test triggered by SamH on stackoverflow (https://stackoverflow.com/q/45281596/1307905)
# and answer by randomir (https://stackoverflow.com/a/45281922/1307905)
def test_write_unicode(self, tmpdir: Any) -> None:
from ruyaml import YAML
yaml = YAML()
text_dict = {'text': 'HELLO_WORLD©'}
file_name = str(tmpdir) + '/tstFile.yaml'
yaml.dump(text_dict, open(file_name, 'w'))
assert open(file_name, 'rb').read().decode('utf-8') == 'text: HELLO_WORLD©\n'
def test_read_unicode(self, tmpdir: Any) -> None:
from ruyaml import YAML
yaml = YAML()
file_name = str(tmpdir) + '/tstFile.yaml'
with open(file_name, 'wb') as fp:
fp.write('text: HELLO_WORLD©\n'.encode('utf-8'))
text_dict = yaml.load(open(file_name, 'r'))
print(text_dict)
assert text_dict['text'] == 'HELLO_WORLD©'
class TestFlowStyle:
def test_flow_style(self, capsys: Any) -> None:
# https://stackoverflow.com/questions/45791712/
from ruyaml import YAML
yaml = YAML()
yaml.default_flow_style = None
data = yaml.map()
data['b'] = 1
data['a'] = [[1, 2], [3, 4]]
yaml.dump(data, sys.stdout)
out, err = capsys.readouterr()
assert out == 'b: 1\na:\n- [1, 2]\n- [3, 4]\n'
class TestOldAPI:
@pytest.mark.skipif(sys.version_info >= (3, 0), reason='ok on Py3') # type: ignore
def test_duplicate_keys_02(self) -> None:
# Issue 165 unicode keys in error/warning
from ruyaml import safe_load
from ruyaml.constructor import DuplicateKeyError
with pytest.raises(DuplicateKeyError):
safe_load('type: Doméstica\ntype: International')
python-ruyaml-0.92.1/_test/test_class_register.py 0000664 0000000 0000000 00000007244 15056754172 0022216 0 ustar 00root root 0000000 0000000 # coding: utf-8
"""
testing of YAML.register_class and @yaml_object
"""
from typing import Any
import pytest # type: ignore # NOQA
from roundtrip import YAML # type: ignore
from ruyaml.comments import CommentedMap, TaggedScalar # NOQA
class User0:
def __init__(self, name: str, age: int) -> None:
self.name = name
self.age = age
class User1:
yaml_tag = '!user'
def __init__(self, name: str, age: int) -> None:
self.name = name
self.age = age
@classmethod
def to_yaml(cls, representer: Any, node: Any) -> Any:
return representer.represent_scalar(
cls.yaml_tag, '{.name}-{.age}'.format(node, node)
)
@classmethod
def from_yaml(cls, constructor: Any, node: Any) -> Any:
return cls(*node.value.split('-'))
class TestRegisterClass:
def test_register_0_rt(self) -> None:
yaml = YAML()
yaml.register_class(User0)
ys = """
- !User0
name: Anthon
age: 18
"""
d = yaml.load(ys)
yaml.dump(d, compare=ys, unordered_lines=True)
def test_register_0_safe(self) -> None:
# default_flow_style = None
yaml = YAML(typ='safe')
yaml.register_class(User0)
ys = """
- !User0 {age: 18, name: Anthon}
"""
d = yaml.load(ys)
yaml.dump(d, compare=ys)
def test_register_0_unsafe(self) -> None:
# default_flow_style = None
with pytest.warns(PendingDeprecationWarning):
yaml = YAML(typ='unsafe')
yaml.register_class(User0)
ys = """
- !User0 {age: 18, name: Anthon}
"""
d = yaml.load(ys)
yaml.dump(d, compare=ys)
def test_register_1_rt(self) -> None:
yaml = YAML()
yaml.register_class(User1)
ys = """
- !user Anthon-18
"""
d = yaml.load(ys)
yaml.dump(d, compare=ys)
def test_register_1_safe(self) -> None:
yaml = YAML(typ='safe')
yaml.register_class(User1)
ys = """
[!user Anthon-18]
"""
d = yaml.load(ys)
yaml.dump(d, compare=ys)
def test_register_1_unsafe(self) -> None:
with pytest.warns(PendingDeprecationWarning):
yaml = YAML(typ='unsafe')
yaml.register_class(User1)
ys = """
[!user Anthon-18]
"""
d = yaml.load(ys)
yaml.dump(d, compare=ys)
class TestDecorator:
def test_decorator_implicit(self) -> None:
from ruyaml import yaml_object
yml = YAML()
@yaml_object(yml)
class User2:
def __init__(self, name: str, age: int) -> None:
self.name = name
self.age = age
ys = """
- !User2
name: Anthon
age: 18
"""
d = yml.load(ys)
yml.dump(d, compare=ys, unordered_lines=True)
def test_decorator_explicit(self) -> None:
from ruyaml import yaml_object
yml = YAML()
@yaml_object(yml)
class User3:
yaml_tag = '!USER'
def __init__(self, name: str, age: int) -> None:
self.name = name
self.age = age
@classmethod
def to_yaml(cls, representer: Any, node: Any) -> Any:
return representer.represent_scalar(
cls.yaml_tag,
'{.name}-{.age}'.format(node, node),
)
@classmethod
def from_yaml(cls, constructor: Any, node: Any) -> Any:
return cls(*node.value.split('-'))
ys = """
- !USER Anthon-18
"""
d = yml.load(ys)
yml.dump(d, compare=ys)
python-ruyaml-0.92.1/_test/test_collections.py 0000664 0000000 0000000 00000001005 15056754172 0021510 0 ustar 00root root 0000000 0000000 # coding: utf-8
"""
collections.OrderedDict is a new class not supported by PyYAML (issue 83 by Frazer McLean)
This is now so integrated in Python that it can be mapped to !!omap
"""
import pytest # type: ignore # NOQA
from roundtrip import ( # type: ignore # NOQA
dedent,
round_trip,
round_trip_dump,
round_trip_load,
)
class TestOrderedDict:
def test_ordereddict(self) -> None:
from collections import OrderedDict
assert round_trip_dump(OrderedDict()) == '!!omap []\n'
python-ruyaml-0.92.1/_test/test_comment_manipulation.py 0000664 0000000 0000000 00000040546 15056754172 0023431 0 ustar 00root root 0000000 0000000 # coding: utf-8
from typing import Any
import pytest # type: ignore # NOQA
from roundtrip import ( # type: ignore # NOQA
dedent,
round_trip,
round_trip_dump,
round_trip_load,
)
def load(s: str) -> Any:
return round_trip_load(dedent(s))
def compare(data: Any, s: str, **kw: Any) -> None:
assert round_trip_dump(data, **kw) == dedent(s)
def compare_eol(data: Any, s: str) -> None:
assert 'EOL' in s
ds = dedent(s).replace('EOL', '').replace('\n', '|\n')
assert round_trip_dump(data).replace('\n', '|\n') == ds
class TestCommentsManipulation:
# list
def test_seq_set_comment_on_existing_explicit_column(self) -> None:
data = load(
"""
- a # comment 1
- b
- c
"""
)
data.yaml_add_eol_comment('comment 2', key=1, column=6)
exp = """
- a # comment 1
- b # comment 2
- c
"""
compare(data, exp)
def test_seq_overwrite_comment_on_existing_explicit_column(self) -> None:
data = load(
"""
- a # comment 1
- b
- c
"""
)
data.yaml_add_eol_comment('comment 2', key=0, column=6)
exp = """
- a # comment 2
- b
- c
"""
compare(data, exp)
def test_seq_first_comment_explicit_column(self) -> None:
data = load(
"""
- a
- b
- c
"""
)
data.yaml_add_eol_comment('comment 1', key=1, column=6)
exp = """
- a
- b # comment 1
- c
"""
compare(data, exp)
def test_seq_set_comment_on_existing_column_prev(self) -> None:
data = load(
"""
- a # comment 1
- b
- c
- d # comment 3
"""
)
data.yaml_add_eol_comment('comment 2', key=1)
exp = """
- a # comment 1
- b # comment 2
- c
- d # comment 3
"""
compare(data, exp)
def test_seq_set_comment_on_existing_column_next(self) -> None:
data = load(
"""
- a # comment 1
- b
- c
- d # comment 3
"""
)
print(data.ca)
# print(type(data._yaml_comment._items[0][0].start_mark))
# ruyaml.error.Mark
# print(type(data._yaml_comment._items[0][0].start_mark))
data.yaml_add_eol_comment('comment 2', key=2)
exp = """
- a # comment 1
- b
- c # comment 2
- d # comment 3
"""
compare(data, exp)
def test_seq_set_comment_on_existing_column_further_away(self) -> None:
"""
no comment line before or after, take the latest before
the new position
"""
data = load(
"""
- a # comment 1
- b
- c
- d
- e
- f # comment 3
"""
)
print(data.ca)
# print(type(data._yaml_comment._items[0][0].start_mark))
# ruyaml.error.Mark
# print(type(data._yaml_comment._items[0][0].start_mark))
data.yaml_add_eol_comment('comment 2', key=3)
exp = """
- a # comment 1
- b
- c
- d # comment 2
- e
- f # comment 3
"""
compare(data, exp)
def test_seq_set_comment_on_existing_explicit_column_with_hash(self) -> None:
data = load(
"""
- a # comment 1
- b
- c
"""
)
data.yaml_add_eol_comment('# comment 2', key=1, column=6)
exp = """
- a # comment 1
- b # comment 2
- c
"""
compare(data, exp)
# dict
def test_dict_set_comment_on_existing_explicit_column(self) -> None:
data = load(
"""
a: 1 # comment 1
b: 2
c: 3
d: 4
e: 5
"""
)
data.yaml_add_eol_comment('comment 2', key='c', column=7)
exp = """
a: 1 # comment 1
b: 2
c: 3 # comment 2
d: 4
e: 5
"""
compare(data, exp)
def test_dict_overwrite_comment_on_existing_explicit_column(self) -> None:
data = load(
"""
a: 1 # comment 1
b: 2
c: 3
d: 4
e: 5
"""
)
data.yaml_add_eol_comment('comment 2', key='a', column=7)
exp = """
a: 1 # comment 2
b: 2
c: 3
d: 4
e: 5
"""
compare(data, exp)
def test_map_set_comment_on_existing_column_prev(self) -> None:
data = load(
"""
a: 1 # comment 1
b: 2
c: 3
d: 4
e: 5 # comment 3
"""
)
data.yaml_add_eol_comment('comment 2', key='b')
exp = """
a: 1 # comment 1
b: 2 # comment 2
c: 3
d: 4
e: 5 # comment 3
"""
compare(data, exp)
def test_map_set_comment_on_existing_column_next(self) -> None:
data = load(
"""
a: 1 # comment 1
b: 2
c: 3
d: 4
e: 5 # comment 3
"""
)
data.yaml_add_eol_comment('comment 2', key='d')
exp = """
a: 1 # comment 1
b: 2
c: 3
d: 4 # comment 2
e: 5 # comment 3
"""
compare(data, exp)
def test_map_set_comment_on_existing_column_further_away(self) -> None:
"""
no comment line before or after, take the latest before
the new position
"""
data = load(
"""
a: 1 # comment 1
b: 2
c: 3
d: 4
e: 5 # comment 3
"""
)
data.yaml_add_eol_comment('comment 2', key='c')
print(round_trip_dump(data))
exp = """
a: 1 # comment 1
b: 2
c: 3 # comment 2
d: 4
e: 5 # comment 3
"""
compare(data, exp)
def test_before_top_map_rt(self) -> None:
data = load(
"""
a: 1
b: 2
"""
)
data.yaml_set_start_comment('Hello\nWorld\n')
exp = """
# Hello
# World
a: 1
b: 2
"""
compare(data, exp.format(comment='#'))
def test_before_top_map_replace(self) -> None:
data = load(
"""
# abc
# def
a: 1 # 1
b: 2
"""
)
data.yaml_set_start_comment('Hello\nWorld\n')
exp = """
# Hello
# World
a: 1 # 1
b: 2
"""
compare(data, exp.format(comment='#'))
def test_before_top_map_from_scratch(self) -> None:
from ruyaml.comments import CommentedMap
data = CommentedMap()
data['a'] = 1
data['b'] = 2
data.yaml_set_start_comment('Hello\nWorld\n')
# print(data.ca)
# print(data.ca._items)
exp = """
# Hello
# World
a: 1
b: 2
"""
compare(data, exp.format(comment='#'))
def test_before_top_seq_rt(self) -> None:
data = load(
"""
- a
- b
"""
)
data.yaml_set_start_comment('Hello\nWorld\n')
print(round_trip_dump(data))
exp = """
# Hello
# World
- a
- b
"""
compare(data, exp)
def test_before_top_seq_rt_replace(self) -> None:
s = """
# this
# that
- a
- b
"""
data = load(s.format(comment='#'))
data.yaml_set_start_comment('Hello\nWorld\n')
print(round_trip_dump(data))
exp = """
# Hello
# World
- a
- b
"""
compare(data, exp.format(comment='#'))
def test_before_top_seq_from_scratch(self) -> None:
from ruyaml.comments import CommentedSeq
data = CommentedSeq()
data.append('a')
data.append('b')
data.yaml_set_start_comment('Hello\nWorld\n')
print(round_trip_dump(data))
exp = """
# Hello
# World
- a
- b
"""
compare(data, exp.format(comment='#'))
# nested variants
def test_before_nested_map_rt(self) -> None:
data = load(
"""
a: 1
b:
c: 2
d: 3
"""
)
data['b'].yaml_set_start_comment('Hello\nWorld\n')
exp = """
a: 1
b:
# Hello
# World
c: 2
d: 3
"""
compare(data, exp.format(comment='#'))
def test_before_nested_map_rt_indent(self) -> None:
data = load(
"""
a: 1
b:
c: 2
d: 3
"""
)
data['b'].yaml_set_start_comment('Hello\nWorld\n', indent=2)
exp = """
a: 1
b:
# Hello
# World
c: 2
d: 3
"""
compare(data, exp.format(comment='#'))
print(data['b'].ca)
def test_before_nested_map_from_scratch(self) -> None:
from ruyaml.comments import CommentedMap
data = CommentedMap()
datab = CommentedMap()
data['a'] = 1
data['b'] = datab
datab['c'] = 2
datab['d'] = 3
data['b'].yaml_set_start_comment('Hello\nWorld\n')
exp = """
a: 1
b:
# Hello
# World
c: 2
d: 3
"""
compare(data, exp.format(comment='#'))
def test_before_nested_seq_from_scratch(self) -> None:
from ruyaml.comments import CommentedMap, CommentedSeq
data = CommentedMap()
datab = CommentedSeq()
data['a'] = 1
data['b'] = datab
datab.append('c')
datab.append('d')
data['b'].yaml_set_start_comment('Hello\nWorld\n', indent=2)
exp = """
a: 1
b:
# Hello
# World
- c
- d
"""
compare(data, exp.format(comment='#'))
def test_before_nested_seq_from_scratch_block_seq_indent(self) -> None:
from ruyaml.comments import CommentedMap, CommentedSeq
data = CommentedMap()
datab = CommentedSeq()
data['a'] = 1
data['b'] = datab
datab.append('c')
datab.append('d')
data['b'].yaml_set_start_comment('Hello\nWorld\n', indent=2)
exp = """
a: 1
b:
# Hello
# World
- c
- d
"""
compare(data, exp.format(comment='#'), indent=4, block_seq_indent=2)
def test_map_set_comment_before_and_after_non_first_key_00(self) -> None:
# http://stackoverflow.com/a/40705671/1307905
data = load(
"""
xyz:
a: 1 # comment 1
b: 2
test1:
test2:
test3: 3
"""
)
data.yaml_set_comment_before_after_key(
'test1',
'before test1 (top level)',
after='before test2',
)
data['test1']['test2'].yaml_set_start_comment('after test2', indent=4)
exp = """
xyz:
a: 1 # comment 1
b: 2
# before test1 (top level)
test1:
# before test2
test2:
# after test2
test3: 3
"""
compare(data, exp)
def Xtest_map_set_comment_before_and_after_non_first_key_01(self) -> None:
data = load(
"""
xyz:
a: 1 # comment 1
b: 2
test1:
test2:
test3: 3
"""
)
data.yaml_set_comment_before_after_key(
'test1',
'before test1 (top level)',
after='before test2\n\n',
)
data['test1']['test2'].yaml_set_start_comment('after test2', indent=4)
# EOL is needed here as dedenting gets rid of spaces (as well as does Emacs
exp = """
xyz:
a: 1 # comment 1
b: 2
# before test1 (top level)
test1:
# before test2
EOL
test2:
# after test2
test3: 3
"""
compare_eol(data, exp)
# EOL is no longer necessary
# fixed together with issue # 216
def test_map_set_comment_before_and_after_non_first_key_01(self) -> None:
data = load(
"""
xyz:
a: 1 # comment 1
b: 2
test1:
test2:
test3: 3
"""
)
data.yaml_set_comment_before_after_key(
'test1',
'before test1 (top level)',
after='before test2\n\n',
)
data['test1']['test2'].yaml_set_start_comment('after test2', indent=4)
exp = """
xyz:
a: 1 # comment 1
b: 2
# before test1 (top level)
test1:
# before test2
test2:
# after test2
test3: 3
"""
compare(data, exp)
def Xtest_map_set_comment_before_and_after_non_first_key_02(self) -> None:
data = load(
"""
xyz:
a: 1 # comment 1
b: 2
test1:
test2:
test3: 3
"""
)
data.yaml_set_comment_before_after_key(
'test1',
'xyz\n\nbefore test1 (top level)',
after='\nbefore test2',
after_indent=4,
)
data['test1']['test2'].yaml_set_start_comment('after test2', indent=4)
# EOL is needed here as dedenting gets rid of spaces (as well as does Emacs
exp = """
xyz:
a: 1 # comment 1
b: 2
# xyz
# before test1 (top level)
test1:
EOL
# before test2
test2:
# after test2
test3: 3
"""
compare_eol(data, exp)
def test_map_set_comment_before_and_after_non_first_key_02(self) -> None:
data = load(
"""
xyz:
a: 1 # comment 1
b: 2
test1:
test2:
test3: 3
"""
)
data.yaml_set_comment_before_after_key(
'test1',
'xyz\n\nbefore test1 (top level)',
after='\nbefore test2',
after_indent=4,
)
data['test1']['test2'].yaml_set_start_comment('after test2', indent=4)
exp = """
xyz:
a: 1 # comment 1
b: 2
# xyz
# before test1 (top level)
test1:
# before test2
test2:
# after test2
test3: 3
"""
compare(data, exp)
# issue 32
def test_yaml_add_eol_comment_issue_32(self):
data = load(
"""
items:
- one: 1
uno: '1'
- # item 2
two: 2
duo: '2'
- three: 3
"""
)
data['items'].yaml_add_eol_comment('second pass', key=1)
exp = """
items:
- one: 1
uno: '1'
- # second pass
two: 2
duo: '2'
- three: 3
"""
compare(data, exp)
def test_yaml_add_eol_comment_issue_32_ok(self):
data = load(
"""
items:
- one
- two # item 2
- three
"""
)
data['items'].yaml_add_eol_comment('second pass', key=1)
exp = """
items:
- one
- two # second pass
- three
"""
compare(data, exp)
# issue 33
@pytest.mark.xfail(reason="open issue", raises=AssertionError)
def test_yaml_set_start_comment_issue_33(self):
data = load(
"""
items:
# item 1
- one: 1
uno: '1'
# item 2
- two: 2
duo: '2'
# item 3
- three: 3
"""
)
data['items'][0].yaml_set_start_comment('uno')
data['items'][1].yaml_set_start_comment('duo')
data['items'][2].yaml_set_start_comment('tre')
exp = """
items:
# uno
- one: 1
uno: '1'
# duo
- two: 2
duo: '2'
# tre
- three: 3
"""
compare(data, exp)
python-ruyaml-0.92.1/_test/test_comments.py 0000664 0000000 0000000 00000050770 15056754172 0021034 0 ustar 00root root 0000000 0000000 # coding: utf-8
"""
comment testing is all about roundtrips
these can be done in the "old" way by creating a file.data and file.roundtrip
but there is little flexibility in doing that
but some things are not easily tested, eog. how a
roundtrip changes
"""
import sys
import pytest # type: ignore # NOQA
from roundtrip import ( # type: ignore
dedent,
round_trip,
round_trip_dump,
round_trip_load,
)
class TestComments:
def test_no_end_of_file_eol(self) -> None:
"""not excluding comments caused some problems if at the end of
the file without a newline. First error, then included \0"""
x = """\
- europe: 10 # abc"""
round_trip(x, extra='\n')
with pytest.raises(AssertionError):
round_trip(x, extra='a\n')
def test_no_comments(self) -> None:
round_trip(
"""
- europe: 10
- usa:
- ohio: 2
- california: 9
"""
)
def test_round_trip_ordering(self) -> None:
round_trip(
"""
a: 1
b: 2
c: 3
b1: 2
b2: 2
d: 4
e: 5
f: 6
"""
)
def test_complex(self) -> None:
round_trip(
"""
- europe: 10 # top
- usa:
- ohio: 2
- california: 9 # o
"""
)
def test_dropped(self) -> None:
s = """\
# comment
scalar
...
"""
round_trip(s, 'scalar\n...\n')
def test_main_mapping_begin_end(self) -> None:
round_trip(
"""
# C start a
# C start b
abc: 1
ghi: 2
klm: 3
# C end a
# C end b
"""
)
def test_reindent(self) -> None:
x = """\
a:
b: # comment 1
c: 1 # comment 2
"""
d = round_trip_load(x)
y = round_trip_dump(d, indent=4)
assert y == dedent(
"""\
a:
b: # comment 1
c: 1 # comment 2
"""
)
def test_main_mapping_begin_end_items_post(self) -> None:
round_trip(
"""
# C start a
# C start b
abc: 1 # abc comment
ghi: 2
klm: 3 # klm comment
# C end a
# C end b
"""
)
def test_main_sequence_begin_end(self) -> None:
round_trip(
"""
# C start a
# C start b
- abc
- ghi
- klm
# C end a
# C end b
"""
)
def test_main_sequence_begin_end_items_post(self) -> None:
round_trip(
"""
# C start a
# C start b
- abc # abc comment
- ghi
- klm # klm comment
# C end a
# C end b
"""
)
def test_main_mapping_begin_end_complex(self) -> None:
round_trip(
"""
# C start a
# C start b
abc: 1
ghi: 2
klm:
3a: alpha
3b: beta # it is all greek to me
# C end a
# C end b
"""
)
def test_09(self) -> None: # 2.9 from the examples in the spec
s = """\
hr: # 1998 hr ranking
- Mark McGwire
- Sammy Sosa
rbi:
# 1998 rbi ranking
- Sammy Sosa
- Ken Griffey
"""
round_trip(s, indent=4, block_seq_indent=2)
def test_09a(self) -> None:
round_trip(
"""
hr: # 1998 hr ranking
- Mark McGwire
- Sammy Sosa
rbi:
# 1998 rbi ranking
- Sammy Sosa
- Ken Griffey
"""
)
def test_simple_map_middle_comment(self) -> None:
round_trip(
"""
abc: 1
# C 3a
# C 3b
ghi: 2
"""
)
def test_map_in_map_0(self) -> None:
round_trip(
"""
map1: # comment 1
# comment 2
map2:
key1: val1
"""
)
def test_map_in_map_1(self) -> None:
# comment is moved from value to key
round_trip(
"""
map1:
# comment 1
map2:
key1: val1
"""
)
def test_application_arguments(self) -> None:
# application configur
round_trip(
"""
args:
username: anthon
passwd: secret
fullname: Anthon van der Neut
tmux:
session-name: test
loop:
wait: 10
"""
)
def test_substitute(self) -> None:
x = """
args:
username: anthon # name
passwd: secret # password
fullname: Anthon van der Neut
tmux:
session-name: test
loop:
wait: 10
"""
data = round_trip_load(x)
data['args']['passwd'] = 'deleted password'
# note the requirement to add spaces for alignment of comment
x = x.replace(': secret ', ': deleted password')
assert round_trip_dump(data) == dedent(x)
def test_set_comment(self) -> None:
round_trip(
"""
!!set
# the beginning
? a
# next one is B (lowercase)
? b # You see? Promised you.
? c
# this is the end
"""
)
def test_omap_comment_roundtrip(self) -> None:
round_trip(
"""
!!omap
- a: 1
- b: 2 # two
- c: 3 # three
- d: 4
"""
)
def test_omap_comment_roundtrip_pre_comment(self) -> None:
round_trip(
"""
!!omap
- a: 1
- b: 2 # two
- c: 3 # three
# last one
- d: 4
"""
)
def test_non_ascii(self) -> None:
round_trip(
"""
verbosity: 1 # 0 is minimal output, -1 none
base_url: http://gopher.net
special_indices: [1, 5, 8]
also_special:
- a
- 19
- 32
asia and europe: &asia_europe
Turkey: Ankara
Russia: Moscow
countries:
Asia:
<<: *asia_europe
Japan: Tokyo # 東京
Europe:
<<: *asia_europe
Spain: Madrid
Italy: Rome
"""
)
def test_dump_utf8(self) -> None:
import ruyaml # NOQA
x = dedent(
"""\
ab:
- x # comment
- y # more comment
"""
)
data = round_trip_load(x)
for utf in [True, False]:
y = round_trip_dump(
data,
default_flow_style=False,
allow_unicode=utf,
)
assert y == x
def test_dump_unicode_utf8(self) -> None:
import ruyaml # NOQA
x = dedent(
"""\
ab:
- x # comment
- y # more comment
"""
)
data = round_trip_load(x)
for utf in [True, False]:
y = round_trip_dump(
data,
default_flow_style=False,
allow_unicode=utf,
)
assert y == x
def test_mlget_00(self) -> None:
x = """\
a:
- b:
c: 42
- d:
f: 196
e:
g: 3.14
"""
d = round_trip_load(x)
assert d.mlget(['a', 1, 'd', 'f'], list_ok=True) == 196
# with pytest.raises(AssertionError):
# d.mlget(['a', 1, 'd', 'f']) == 196
class TestInsertPopList:
"""list insertion is more complex than dict insertion, as you
need to move the values to subsequent keys on insert"""
@property
def ins(self) -> str:
return """\
ab:
- a # a
- b # b
- c
- d # d
de:
- 1
- 2
"""
def test_insert_0(self) -> None:
d = round_trip_load(self.ins)
d['ab'].insert(0, 'xyz')
y = round_trip_dump(d, indent=2)
assert y == dedent(
"""\
ab:
- xyz
- a # a
- b # b
- c
- d # d
de:
- 1
- 2
"""
)
def test_insert_1(self) -> None:
d = round_trip_load(self.ins)
d['ab'].insert(4, 'xyz')
y = round_trip_dump(d, indent=2)
assert y == dedent(
"""\
ab:
- a # a
- b # b
- c
- d # d
- xyz
de:
- 1
- 2
"""
)
def test_insert_2(self) -> None:
d = round_trip_load(self.ins)
d['ab'].insert(1, 'xyz')
y = round_trip_dump(d, indent=2)
assert y == dedent(
"""\
ab:
- a # a
- xyz
- b # b
- c
- d # d
de:
- 1
- 2
"""
)
def test_pop_0(self) -> None:
d = round_trip_load(self.ins)
d['ab'].pop(0)
y = round_trip_dump(d, indent=2)
print(y)
assert y == dedent(
"""\
ab:
- b # b
- c
- d # d
de:
- 1
- 2
"""
)
def test_pop_1(self) -> None:
d = round_trip_load(self.ins)
d['ab'].pop(1)
y = round_trip_dump(d, indent=2)
print(y)
assert y == dedent(
"""\
ab:
- a # a
- c
- d # d
de:
- 1
- 2
"""
)
def test_pop_2(self) -> None:
d = round_trip_load(self.ins)
d['ab'].pop(2)
y = round_trip_dump(d, indent=2)
print(y)
assert y == dedent(
"""\
ab:
- a # a
- b # b
- d # d
de:
- 1
- 2
"""
)
def test_pop_3(self) -> None:
d = round_trip_load(self.ins)
d['ab'].pop(3)
y = round_trip_dump(d, indent=2)
print(y)
assert y == dedent(
"""\
ab:
- a # a
- b # b
- c
de:
- 1
- 2
"""
)
# inspired by demux' question on stackoverflow
# http://stackoverflow.com/a/36970608/1307905
class TestInsertInMapping:
@property
def ins(self) -> str:
return """\
first_name: Art
occupation: Architect # This is an occupation comment
about: Art Vandelay is a fictional character that George invents...
"""
def test_insert_at_pos_1(self) -> None:
d = round_trip_load(self.ins)
d.insert(1, 'last name', 'Vandelay', comment='new key')
y = round_trip_dump(d)
print(y)
assert y == dedent(
"""\
first_name: Art
last name: Vandelay # new key
occupation: Architect # This is an occupation comment
about: Art Vandelay is a fictional character that George invents...
"""
)
def test_insert_at_pos_0(self) -> None:
d = round_trip_load(self.ins)
d.insert(0, 'last name', 'Vandelay', comment='new key')
y = round_trip_dump(d)
print(y)
assert y == dedent(
"""\
last name: Vandelay # new key
first_name: Art
occupation: Architect # This is an occupation comment
about: Art Vandelay is a fictional character that George invents...
"""
)
def test_insert_at_pos_3(self) -> None:
# much more simple if done with appending.
d = round_trip_load(self.ins)
d.insert(3, 'last name', 'Vandelay', comment='new key')
y = round_trip_dump(d)
print(y)
assert y == dedent(
"""\
first_name: Art
occupation: Architect # This is an occupation comment
about: Art Vandelay is a fictional character that George invents...
last name: Vandelay # new key
"""
)
class TestCommentedMapMerge:
def test_in_operator(self) -> None:
data = round_trip_load(
"""
x: &base
a: 1
b: 2
c: 3
y:
<<: *base
k: 4
l: 5
"""
)
assert data['x']['a'] == 1
assert 'a' in data['x']
assert data['y']['a'] == 1
assert 'a' in data['y']
def test_issue_60(self) -> None:
data = round_trip_load(
"""
x: &base
a: 1
y:
<<: *base
"""
)
assert data['x']['a'] == 1
assert data['y']['a'] == 1
assert str(data['y']) == """{'a': 1}"""
def test_issue_60_1(self) -> None:
data = round_trip_load(
"""
x: &base
a: 1
y:
<<: *base
b: 2
"""
)
assert data['x']['a'] == 1
assert data['y']['a'] == 1
assert str(data['y']) == """{'b': 2, 'a': 1}"""
class TestEmptyLines:
# prompted by issue 46 from Alex Harvey
def test_issue_46(self) -> None:
yaml_str = dedent(
"""\
---
# Please add key/value pairs in alphabetical order
aws_s3_bucket: 'mys3bucket'
jenkins_ad_credentials:
bind_name: 'CN=svc-AAA-BBB-T,OU=Example,DC=COM,DC=EXAMPLE,DC=Local'
bind_pass: 'xxxxyyyy{'
"""
)
d = round_trip_load(yaml_str, preserve_quotes=True)
y = round_trip_dump(d, explicit_start=True)
assert yaml_str == y
def test_multispace_map(self) -> None:
round_trip(
"""
a: 1x
b: 2x
c: 3x
d: 4x
"""
)
@pytest.mark.xfail(strict=True) # type: ignore
def test_multispace_map_initial(self) -> None:
round_trip(
"""
a: 1x
b: 2x
c: 3x
d: 4x
"""
)
def test_embedded_map(self) -> None:
round_trip(
"""
- a: 1y
b: 2y
c: 3y
"""
)
def test_toplevel_seq(self) -> None:
round_trip(
"""\
- 1
- 2
- 3
"""
)
def test_embedded_seq(self) -> None:
round_trip(
"""
a:
b:
- 1
- 2
- 3
"""
)
def test_line_with_only_spaces(self) -> None:
# issue 54
yaml_str = "---\n\na: 'x'\n \nb: y\n"
d = round_trip_load(yaml_str, preserve_quotes=True)
y = round_trip_dump(d, explicit_start=True)
stripped = ""
for line in yaml_str.splitlines():
stripped += line.rstrip() + '\n'
print(line + '$')
assert stripped == y
def test_some_eol_spaces(self) -> None:
# spaces after tokens and on empty lines
yaml_str = '--- \n \na: "x" \n \nb: y \n'
d = round_trip_load(yaml_str, preserve_quotes=True)
y = round_trip_dump(d, explicit_start=True)
stripped = ""
for line in yaml_str.splitlines():
stripped += line.rstrip() + '\n'
print(line + '$')
assert stripped == y
def test_issue_54_not_ok(self) -> None:
yaml_str = dedent(
"""\
toplevel:
# some comment
sublevel: 300
"""
)
d = round_trip_load(yaml_str)
print(d.ca)
y = round_trip_dump(d, indent=4)
assert isinstance(y, str)
print(y.replace('\n', '$\n'))
assert yaml_str == y
def test_issue_54_ok(self) -> None:
yaml_str = dedent(
"""\
toplevel:
# some comment
sublevel: 300
"""
)
d = round_trip_load(yaml_str)
y = round_trip_dump(d, indent=4)
assert yaml_str == y
def test_issue_93(self) -> None:
round_trip(
"""\
a:
b:
- c1: cat # a1
# my comment on catfish
- c2: catfish # a2
"""
)
def test_issue_93_00(self) -> None:
round_trip(
"""\
a:
- - c1: cat # a1
# my comment on catfish
- c2: catfish # a2
"""
)
def test_issue_93_01(self) -> None:
round_trip(
"""\
- - c1: cat # a1
# my comment on catfish
- c2: catfish # a2
"""
)
def test_issue_93_02(self) -> None:
# never failed as there is no indent
round_trip(
"""\
- c1: cat
# my comment on catfish
- c2: catfish
"""
)
def test_issue_96(self) -> None:
# inserted extra line on trailing spaces
round_trip(
"""\
a:
b:
c: c_val
d:
e:
g: g_val
"""
)
class TestUnicodeComments:
@pytest.mark.skipif(sys.version_info < (2, 7), reason='wide unicode') # type: ignore
def test_issue_55(self) -> None: # reported by Haraguroicha Hsu
round_trip(
"""\
name: TEST
description: test using
author: Harguroicha
sql:
command: |-
select name from testtbl where no = :no
ci-test:
- :no: 04043709 # 小花
- :no: 05161690 # 茶
- :no: 05293147 # 〇𤋥川
- :no: 05338777 # 〇〇啓
- :no: 05273867 # 〇
- :no: 05205786 # 〇𤦌
"""
)
class TestEmptyValueBeforeComments:
def test_issue_25a(self) -> None:
round_trip(
"""\
- a: b
c: d
d: # foo
- e: f
"""
)
def test_issue_25a1(self) -> None:
round_trip(
"""\
- a: b
c: d
d: # foo
e: f
"""
)
def test_issue_25b(self) -> None:
round_trip(
"""\
var1: #empty
var2: something #notempty
"""
)
def test_issue_25c(self) -> None:
round_trip(
"""\
params:
a: 1 # comment a
b: # comment b
c: 3 # comment c
"""
)
def test_issue_25c1(self) -> None:
round_trip(
"""\
params:
a: 1 # comment a
b: # comment b
# extra
c: 3 # comment c
"""
)
def test_issue_25_00(self) -> None:
round_trip(
"""\
params:
a: 1 # comment a
b: # comment b
"""
)
def test_issue_25_01(self) -> None:
round_trip(
"""\
a: # comment 1
# comment 2
- b: # comment 3
c: 1 # comment 4
"""
)
def test_issue_25_02(self) -> None:
round_trip(
"""\
a: # comment 1
# comment 2
- b: 2 # comment 3
"""
)
def test_issue_25_03(self) -> None:
s = """\
a: # comment 1
# comment 2
- b: 2 # comment 3
"""
round_trip(s, indent=4, block_seq_indent=2)
def test_issue_25_04(self) -> None:
round_trip(
"""\
a: # comment 1
# comment 2
b: 1 # comment 3
"""
)
def test_flow_seq_within_seq(self) -> None:
round_trip(
"""\
# comment 1
- a
- b
# comment 2
- c
- d
# comment 3
- [e]
- f
# comment 4
- []
"""
)
def test_comment_after_block_scalar_indicator(self) -> None:
round_trip(
"""\
a: | # abc
test 1
test 2
# all done
"""
)
test_block_scalar_commented_line_template = """\
y: p
# Some comment
a: |
x
{}b: y
"""
class TestBlockScalarWithComments:
# issue 99 reported by Colm O'Connor
def test_scalar_with_comments(self) -> None:
import ruyaml # NOQA
for x in [
"",
'\n',
'\n# Another comment\n',
'\n\n',
'\n\n# abc\n#xyz\n',
'\n\n# abc\n#xyz\n',
'# abc\n\n#xyz\n',
'\n\n # abc\n #xyz\n',
]:
commented_line = test_block_scalar_commented_line_template.format(x)
data = round_trip_load(commented_line)
assert round_trip_dump(data) == commented_line
python-ruyaml-0.92.1/_test/test_contextmanager.py 0000664 0000000 0000000 00000005422 15056754172 0022220 0 ustar 00root root 0000000 0000000 # coding: utf-8
"""
testing of anchors and the aliases referring to them
"""
import sys
from typing import Any
import pytest # type: ignore
single_doc = """\
- a: 1
- b:
- 2
- 3
"""
single_data = [dict(a=1), dict(b=[2, 3])]
multi_doc = """\
---
- abc
- xyz
---
- a: 1
- b:
- 2
- 3
"""
multi_doc_data = [['abc', 'xyz'], single_data]
def get_yaml() -> Any:
from ruyaml import YAML
return YAML()
class TestOldStyle:
def test_single_load(self) -> None:
d = get_yaml().load(single_doc)
print(d)
print(type(d[0]))
assert d == single_data
def test_single_load_no_arg(self) -> None:
with pytest.raises(TypeError):
assert get_yaml().load() == single_data
def test_multi_load(self) -> None:
data = list(get_yaml().load_all(multi_doc))
assert data == multi_doc_data
def test_single_dump(self, capsys: Any) -> None:
get_yaml().dump(single_data, sys.stdout)
out, err = capsys.readouterr()
assert out == single_doc
def test_multi_dump(self, capsys: Any) -> None:
yaml = get_yaml()
yaml.explicit_start = True
yaml.dump_all(multi_doc_data, sys.stdout)
out, err = capsys.readouterr()
assert out == multi_doc
class TestContextManager:
def test_single_dump(self, capsys: Any) -> None:
from ruyaml import YAML
with YAML(output=sys.stdout) as yaml:
yaml.dump(single_data)
out, err = capsys.readouterr()
print(err)
assert out == single_doc
def test_multi_dump(self, capsys: Any) -> None:
from ruyaml import YAML
with YAML(output=sys.stdout) as yaml:
yaml.explicit_start = True
yaml.dump(multi_doc_data[0])
yaml.dump(multi_doc_data[1])
out, err = capsys.readouterr()
print(err)
assert out == multi_doc
# input is not as simple with a context manager
# you need to indicate what you expect hence load and load_all
# @pytest.mark.xfail(strict=True)
# def test_single_load(self):
# from ruyaml import YAML
# with YAML(input=single_doc) as yaml:
# assert yaml.load() == single_data
#
# @pytest.mark.xfail(strict=True)
# def test_multi_load(self):
# from ruyaml import YAML
# with YAML(input=multi_doc) as yaml:
# for idx, data in enumerate(yaml.load()):
# assert data == multi_doc_data[0]
def test_roundtrip(self, capsys: Any) -> None:
from ruyaml import YAML
with YAML(output=sys.stdout) as yaml:
yaml.explicit_start = True
for data in yaml.load_all(multi_doc):
yaml.dump(data)
out, err = capsys.readouterr()
print(err)
assert out == multi_doc
python-ruyaml-0.92.1/_test/test_copy.py 0000664 0000000 0000000 00000007211 15056754172 0020151 0 ustar 00root root 0000000 0000000 # coding: utf-8
"""
Testing copy and deepcopy, instigated by Issue 84 (Peter Amstutz)
"""
import copy
import pytest # type: ignore # NOQA
from roundtrip import dedent, round_trip_dump, round_trip_load # type: ignore
class TestDeepCopy:
def test_preserve_flow_style_simple(self) -> None:
x = dedent(
"""\
{foo: bar, baz: quux}
"""
)
data = round_trip_load(x)
data_copy = copy.deepcopy(data)
y = round_trip_dump(data_copy)
print('x [{}]'.format(x))
print('y [{}]'.format(y))
assert y == x
assert data.fa.flow_style() == data_copy.fa.flow_style()
def test_deepcopy_flow_style_nested_dict(self) -> None:
x = dedent(
"""\
a: {foo: bar, baz: quux}
"""
)
data = round_trip_load(x)
assert data['a'].fa.flow_style() is True
data_copy = copy.deepcopy(data)
assert data_copy['a'].fa.flow_style() is True
data_copy['a'].fa.set_block_style()
assert data['a'].fa.flow_style() != data_copy['a'].fa.flow_style()
assert data['a'].fa._flow_style is True
assert data_copy['a'].fa._flow_style is False
y = round_trip_dump(data_copy)
print('x [{}]'.format(x))
print('y [{}]'.format(y))
assert y == dedent(
"""\
a:
foo: bar
baz: quux
"""
)
def test_deepcopy_flow_style_nested_list(self) -> None:
x = dedent(
"""\
a: [1, 2, 3]
"""
)
data = round_trip_load(x)
assert data['a'].fa.flow_style() is True
data_copy = copy.deepcopy(data)
assert data_copy['a'].fa.flow_style() is True
data_copy['a'].fa.set_block_style()
assert data['a'].fa.flow_style() != data_copy['a'].fa.flow_style()
assert data['a'].fa._flow_style is True
assert data_copy['a'].fa._flow_style is False
y = round_trip_dump(data_copy)
print('x [{}]'.format(x))
print('y [{}]'.format(y))
assert y == dedent(
"""\
a:
- 1
- 2
- 3
"""
)
class TestCopy:
def test_copy_flow_style_nested_dict(self) -> None:
x = dedent(
"""\
a: {foo: bar, baz: quux}
"""
)
data = round_trip_load(x)
assert data['a'].fa.flow_style() is True
data_copy = copy.copy(data)
assert data_copy['a'].fa.flow_style() is True
data_copy['a'].fa.set_block_style()
assert data['a'].fa.flow_style() == data_copy['a'].fa.flow_style()
assert data['a'].fa._flow_style is False
assert data_copy['a'].fa._flow_style is False
y = round_trip_dump(data_copy)
z = round_trip_dump(data)
assert y == z
assert y == dedent(
"""\
a:
foo: bar
baz: quux
"""
)
def test_copy_flow_style_nested_list(self) -> None:
x = dedent(
"""\
a: [1, 2, 3]
"""
)
data = round_trip_load(x)
assert data['a'].fa.flow_style() is True
data_copy = copy.copy(data)
assert data_copy['a'].fa.flow_style() is True
data_copy['a'].fa.set_block_style()
assert data['a'].fa.flow_style() == data_copy['a'].fa.flow_style()
assert data['a'].fa._flow_style is False
assert data_copy['a'].fa._flow_style is False
y = round_trip_dump(data_copy)
print('x [{}]'.format(x))
print('y [{}]'.format(y))
assert y == dedent(
"""\
a:
- 1
- 2
- 3
"""
)
python-ruyaml-0.92.1/_test/test_cyaml.py 0000664 0000000 0000000 00000004115 15056754172 0020304 0 ustar 00root root 0000000 0000000 # coding: utf-8
import platform
import sys
from textwrap import dedent
import pytest # type: ignore # NOQA
NO_CLIB_VER = (3, 12)
@pytest.mark.skipif( # type: ignore
platform.python_implementation() in ['Jython', 'PyPy'],
reason='Jython throws RepresenterError',
)
def test_load_cyaml() -> None:
print("???????????????????????", platform.python_implementation())
import ruyaml
if sys.version_info >= NO_CLIB_VER:
return
yaml = ruyaml.YAML(typ='safe', pure=False)
assert ruyaml.__with_libyaml__
yaml.load('abc: 1')
@pytest.mark.skipif(
sys.version_info >= NO_CLIB_VER # type: ignore
or platform.python_implementation() in ['Jython', 'PyPy'],
reason='no _PyGC_FINALIZED',
)
def test_dump_cyaml() -> None:
import ruyaml
if sys.version_info >= NO_CLIB_VER:
return
data = {'a': 1, 'b': 2}
yaml = ruyaml.YAML(typ='safe', pure=False)
yaml.default_flow_style = False
yaml.allow_unicode = True
buf = ruyaml.compat.StringIO()
yaml.dump(data, buf)
assert buf.getvalue() == 'a: 1\nb: 2\n'
@pytest.mark.skipif( # type: ignore
platform.python_implementation() in ['Jython', 'PyPy'],
reason='not avialable',
)
def test_load_cyaml_1_2() -> None:
# issue 155
import ruyaml
if sys.version_info >= NO_CLIB_VER:
return
assert ruyaml.__with_libyaml__
inp = dedent(
"""\
%YAML 1.2
---
num_epochs: 70000
"""
)
yaml = ruyaml.YAML(typ='safe')
yaml.load(inp)
@pytest.mark.skipif( # type: ignore
platform.python_implementation() in ['Jython', 'PyPy'],
reason='not available',
)
def test_dump_cyaml_1_2() -> None:
# issue 155
from io import StringIO
import ruyaml
if sys.version_info >= NO_CLIB_VER:
return
assert ruyaml.__with_libyaml__
yaml = ruyaml.YAML(typ='safe')
yaml.version = (1, 2)
yaml.default_flow_style = False
data = {'a': 1, 'b': 2}
exp = dedent(
"""\
%YAML 1.2
---
a: 1
b: 2
"""
)
buf = StringIO()
yaml.dump(data, buf)
assert buf.getvalue() == exp
python-ruyaml-0.92.1/_test/test_dataclass.py 0000664 0000000 0000000 00000011157 15056754172 0021142 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from dataclasses import InitVar, dataclass, fields # NOQA
from io import BytesIO
from textwrap import dedent
from typing import ClassVar, Union
class TestDataClasses:
def test_1(self) -> None:
from ruyaml import YAML
yaml = YAML()
@yaml.register_class
@dataclass
class DC:
abc: int
klm: int
xyz: int = 0
def __post_init__(self) -> None:
self.xyz = self.abc + self.klm
dc = DC(abc=5, klm=42)
assert dc.xyz == 47
yaml_str = dedent(
"""\
!DC
abc: 13
klm: 37
"""
)
dc2 = yaml.load(yaml_str)
assert dc2.xyz == 50
def test_yamltag(self) -> None:
from ruyaml import YAML
yaml = YAML()
@yaml.register_class
@dataclass
class DC:
yaml_tag: ClassVar = '!dc_example'
abc: int
klm: int
dc = DC(abc=5, klm=42)
buf = BytesIO()
yaml.dump(dc, buf)
assert (
buf.getvalue()
== dedent(
"""\
!dc_example
abc: 5
klm: 42
"""
).encode('utf-8')
)
dc2 = yaml.load(buf.getvalue())
assert len(fields(dc2)) == 2 # class var is not a field
assert dc2.abc == dc.abc
assert dc2.klm == dc.klm
def test_initvar(self) -> None:
from ruyaml import YAML
yaml = YAML()
@yaml.register_class
@dataclass
class DC:
abc: int
klm: int
xyz: InitVar[Union[str, None]] = None
def __post_init__(self, xyz: Union[str, None]) -> None:
# assert xyz == self.xyz # self.xyz is always None
if xyz is not None:
self.klm += len(xyz)
dc = DC(abc=5, klm=42, xyz='provided')
# this actually doesn't raise an attribute error, I would have expected it not to work
# at all, but it has the default value
assert dc.xyz is None # type: ignore
buf = BytesIO()
yaml.dump(dc, buf)
assert (
buf.getvalue()
== dedent(
"""\
!DC
abc: 5
klm: 50
"""
).encode('utf-8')
)
yaml_str = dedent(
"""\
!DC
abc: 18
klm: 55
xyz: some string
"""
)
dc2 = yaml.load(yaml_str)
assert dc2.xyz is None
assert dc2.klm == 55 + len('some string')
def test_initvar_not_in_yaml(self) -> None:
from ruyaml import YAML
yaml = YAML()
@yaml.register_class
@dataclass
class DC:
abc: int
klm: int
xyz: InitVar[Union[str, None]] = 'hello'
def __post_init__(self, xyz: Union[str, None]) -> None:
# assert xyz == self.xyz # self.xyz is always None
if xyz is not None:
self.klm += len(xyz)
dc = DC(abc=5, klm=42, xyz='provided')
assert dc.abc == 5
assert dc.xyz == 'hello' # type: ignore
buf = BytesIO()
yaml.dump(dc, buf)
assert (
buf.getvalue()
== dedent(
"""\
!DC
abc: 5
klm: 50
"""
).encode('utf-8')
)
yaml_str = dedent(
"""\
!DC
abc: 18
klm: 55
"""
)
dc2 = yaml.load(yaml_str)
assert dc2.xyz == 'hello'
assert dc2.klm == 55 + len('hello')
def test_collection_field(self) -> None:
# https://stackoverflow.com/a/77485786/1307905
from dataclasses import dataclass
import ruyaml
@dataclass
class Msg:
id: int
desc: str
fields: list[Field]
def __post_init__(self) -> None:
idx: int = 0
for field in self.fields: # why is this empty??
field.index = idx
idx += field.size
@dataclass
class Field:
id: int
name: str
units: str
size: int
index: int = -1
yaml = ruyaml.YAML()
yaml.register_class(Msg)
yaml.register_class(Field)
msg: Msg = yaml.load(
"""\
!Msg
id: 1
desc: status
fields:
- !Field
id: 1
name: Temp
units: degC
size: 2
"""
)
assert msg.fields[0].index != -1
python-ruyaml-0.92.1/_test/test_datetime.py 0000664 0000000 0000000 00000013140 15056754172 0020771 0 ustar 00root root 0000000 0000000 # coding: utf-8
"""
http://yaml.org/type/timestamp.html specifies the regexp to use
for datetime.date and datetime.datetime construction. Date is simple
but datetime can have 'T' or 't' as well as 'Z' or a timezone offset (in
hours and minutes). This information was originally used to create
a UTC datetime and then discarded
examples from the above:
canonical: 2001-12-15T02:59:43.1Z
valid iso8601: 2001-12-14t21:59:43.10-05:00
space separated: 2001-12-14 21:59:43.10 -5
no time zone (Z): 2001-12-15 2:59:43.10
date (00:00:00Z): 2002-12-14
Please note that a fraction can only be included if not equal to 0
"""
import copy
import sys
from datetime import datetime as DateTime
from datetime import timedelta as TimeDelta
from datetime import timezone as TimeZone
import pytest # type: ignore # NOQA
from roundtrip import ( # type: ignore # NOQA
dedent,
round_trip,
round_trip_dump,
round_trip_load,
)
class TestDateTime:
def test_date_only(self) -> None:
inp = """
- 2011-10-02
"""
exp = """
- 2011-10-02
"""
round_trip(inp, exp)
def test_zero_fraction(self) -> None:
inp = """
- 2011-10-02 16:45:00.0
"""
exp = """
- 2011-10-02 16:45:00
"""
round_trip(inp, exp)
def test_long_fraction(self) -> None:
inp = """
- 2011-10-02 16:45:00.1234 # expand with zeros
- 2011-10-02 16:45:00.123456
- 2011-10-02 16:45:00.12345612 # round to microseconds
- 2011-10-02 16:45:00.1234565 # round up
- 2011-10-02 16:45:00.12345678 # round up
"""
exp = """
- 2011-10-02 16:45:00.123400 # expand with zeros
- 2011-10-02 16:45:00.123456
- 2011-10-02 16:45:00.123456 # round to microseconds
- 2011-10-02 16:45:00.123457 # round up
- 2011-10-02 16:45:00.123457 # round up
"""
round_trip(inp, exp)
def test_canonical(self) -> None:
inp = """
- 2011-10-02T16:45:00.1Z
"""
exp = """
- 2011-10-02T16:45:00.100000Z
"""
round_trip(inp, exp)
def test_spaced_timezone(self) -> None:
inp = """
- 2011-10-02T11:45:00 -5
"""
exp = """
- 2011-10-02T11:45:00-5
"""
round_trip(inp, exp)
def test_normal_timezone(self) -> None:
round_trip(
"""
- 2011-10-02T11:45:00-5
- 2011-10-02 11:45:00-5
- 2011-10-02T11:45:00-05:00
- 2011-10-02 11:45:00-05:00
"""
)
def test_no_timezone(self) -> None:
inp = """
- 2011-10-02 6:45:00
"""
exp = """
- 2011-10-02 06:45:00
"""
round_trip(inp, exp)
def test_explicit_T(self) -> None:
inp = """
- 2011-10-02T16:45:00
"""
exp = """
- 2011-10-02T16:45:00
"""
round_trip(inp, exp)
def test_explicit_t(self) -> None: # to upper
inp = """
- 2011-10-02t16:45:00
"""
exp = """
- 2011-10-02T16:45:00
"""
round_trip(inp, exp)
def test_no_T_multi_space(self) -> None:
inp = """
- 2011-10-02 16:45:00
"""
exp = """
- 2011-10-02 16:45:00
"""
round_trip(inp, exp)
def test_iso(self) -> None:
round_trip(
"""
- 2011-10-02T15:45:00+01:00
"""
)
def test_zero_tz(self) -> None:
round_trip(
"""
- 2011-10-02T15:45:00+0
"""
)
def test_issue_45(self) -> None:
round_trip(
"""
dt: 2016-08-19T22:45:47Z
"""
)
def test_issue_366(self) -> None:
import io
import ruyaml
round_trip(
"""
[2021-02-01 22:34:48.696868-03:00]
"""
)
yaml = ruyaml.YAML()
dd = DateTime(
2021, 2, 1, 22, 34, 48, 696868, TimeZone(TimeDelta(hours=-3), name='')
)
buf = io.StringIO()
yaml.dump(dd, buf)
assert buf.getvalue() == '2021-02-01 22:34:48.696868-03:00\n...\n'
rd = yaml.load(buf.getvalue())
assert rd == dd
def test_deepcopy_datestring(self) -> None:
# reported by Quuxplusone, http://stackoverflow.com/a/41577841/1307905
x = dedent(
"""\
foo: 2016-10-12T12:34:56
"""
)
data = copy.deepcopy(round_trip_load(x))
assert round_trip_dump(data) == x
def test_fraction_overflow(self) -> None:
# reported (indirectly) by Luís Ferreira
# https://sourceforge.net/p/ruyaml/tickets/414/
inp = dedent(
"""\
- 2022-01-02T12:34:59.9999994
- 2022-01-02T12:34:59.9999995
"""
)
exp = dedent(
"""\
- 2022-01-02T12:34:59.999999
- 2022-01-02T12:35:00
"""
)
round_trip(inp, exp)
def Xtest_tzinfo(self) -> None:
import ruyaml
yaml = ruyaml.YAML()
dts = '2011-10-02T16:45:00.930619+01:00'
d = yaml.load(dts)
print('d', repr(d), d._yaml)
yaml.dump(dict(x=d), sys.stdout)
print('----')
# dx = DateTime.fromisoformat(dts)
# print('dx', dx, repr(dx))
dd = DateTime(
2011,
10,
2,
16,
45,
00,
930619,
TimeZone(TimeDelta(hours=1, minutes=0), name='+01:00'),
) # NOQA
yaml.dump([dd], sys.stdout)
print('dd', dd, dd.tzinfo)
raise AssertionError()
python-ruyaml-0.92.1/_test/test_deprecation.py 0000664 0000000 0000000 00000010063 15056754172 0021473 0 ustar 00root root 0000000 0000000 # coding: utf-8
import sys
import pytest # type:ignore # NOQA
last_to_warn = (0, 17, 40)
@pytest.mark.skipif(
sys.version_info < (3, 7) or sys.version_info >= (3, 9), # type: ignore
reason='collections not available?',
)
def test_collections_deprecation() -> None:
with pytest.warns(DeprecationWarning):
from collections import Hashable # type: ignore # NOQA
class TestFunctionDeprecation:
def test_deprecation_scan(self) -> None:
import ruyaml
if ruyaml.version_info <= last_to_warn:
with pytest.warns(PendingDeprecationWarning):
data = ruyaml.load('a: 42') # NOQA
else:
with pytest.raises(AttributeError):
data = ruyaml.load('a: 42') # NOQA
def test_deprecation_parse(self) -> None:
import ruyaml
if ruyaml.version_info <= last_to_warn:
data = ruyaml.parse('a: 42') # NOQA
else:
with pytest.raises(AttributeError):
data = ruyaml.parse('a: 42') # NOQA
def test_deprecation_compose(self) -> None:
import ruyaml
if ruyaml.version_info <= last_to_warn:
with pytest.warns(PendingDeprecationWarning):
data = ruyaml.compose('a: 42') # NOQA
else:
with pytest.raises(AttributeError):
data = ruyaml.parse('a: 42') # NOQA
def test_deprecation_compose_all(self) -> None:
import ruyaml
if ruyaml.version_info <= last_to_warn:
data = ruyaml.compose_all('a: 42') # NOQA
else:
with pytest.raises(AttributeError):
data = ruyaml.parse('a: 42') # NOQA
def test_deprecation_load(self) -> None:
import ruyaml
if ruyaml.version_info <= last_to_warn:
with pytest.warns(PendingDeprecationWarning):
data = ruyaml.load('a: 42') # NOQA
else:
with pytest.raises(AttributeError):
data = ruyaml.parse('a: 42') # NOQA
def test_deprecation_load_all(self) -> None:
import ruyaml
if ruyaml.version_info <= last_to_warn:
data = ruyaml.load_all('a: 42') # NOQA
else:
with pytest.raises(AttributeError):
data = ruyaml.parse('a: 42') # NOQA
def test_deprecation_safe_load(self) -> None:
import ruyaml
if ruyaml.version_info <= last_to_warn:
with pytest.warns(PendingDeprecationWarning):
data = ruyaml.safe_load('a: 42') # NOQA
else:
with pytest.raises(AttributeError):
data = ruyaml.parse('a: 42') # NOQA
def test_deprecation_round_trip_load(self) -> None:
import ruyaml
if ruyaml.version_info <= last_to_warn:
with pytest.warns(PendingDeprecationWarning):
data = ruyaml.round_trip_load('a: 42') # NOQA
else:
with pytest.raises(AttributeError):
data = ruyaml.parse('a: 42') # NOQA
class TestYamlTyp:
def test_unsafe_deprecation(self) -> None:
import ruyaml
if ruyaml.version_info < (0, 18, 0):
yaml = ruyaml.YAML(typ='unsafe')
else:
with pytest.warns(PendingDeprecationWarning):
# with pytest.raises(SystemExit):
yaml = ruyaml.YAML(typ='unsafe') # NOQA
def test_full_load_error(self) -> None:
import ruyaml
yaml = ruyaml.YAML(typ='full', pure=True)
with pytest.raises(ruyaml.error.YAMLError):
yaml.load('a: b')
yaml = ruyaml.YAML(typ='full') # C scanner/loader
with pytest.raises(ruyaml.error.YAMLError):
yaml.load('a: b')
def test_full_rt(self) -> None:
import io
import os
import ruyaml
yaml = ruyaml.YAML(typ='full', pure=True)
buf = io.BytesIO()
yaml.dump([{'fun': os.system}], buf)
print(buf.getvalue())
yaml = ruyaml.YAML()
data = yaml.load(buf.getvalue())
print(data)
ts = data[0]['fun']
assert 'posix.system' in str(ts.tag)
python-ruyaml-0.92.1/_test/test_docinfo.py 0000664 0000000 0000000 00000002436 15056754172 0020624 0 ustar 00root root 0000000 0000000 import pytest # type: ignore # NOQA
from ruyaml.docinfo import DocInfo, Tag, Version, version # NOQA
class TestVersion:
def test_create_from_integers(self) -> None:
v = Version(1, 2)
assert v.major == 1
assert v.minor == 2
def test_create_using_generator(self) -> None:
v = version(1, 2)
assert isinstance(v, Version)
assert v.major == 1
assert v.minor == 2
def test_create_from_string_using_generator(self) -> None:
v = version('1.2')
assert isinstance(v, Version)
assert v.major == 1
assert v.minor == 2
def test_create_from_string_extra_param(self) -> None:
with pytest.raises(AssertionError):
_ = version('1.2', 3)
def test_create_from_single_integer(self) -> None:
with pytest.raises(AssertionError):
_ = version(1)
with pytest.raises(TypeError):
_ = Version(1) # type: ignore
class TestDocInfo:
def test_empty(self) -> None:
di = DocInfo()
assert di.requested_version is None
assert di.doc_version is None
assert di.tags == []
def test_versions(self) -> None:
di = DocInfo(version('1.2'), version('1.1'))
assert di.requested_version > di.doc_version # type: ignore
python-ruyaml-0.92.1/_test/test_documents.py 0000664 0000000 0000000 00000005162 15056754172 0021203 0 ustar 00root root 0000000 0000000 # coding: utf-8
import pytest # type: ignore # NOQA
from roundtrip import ( # type: ignore
round_trip,
round_trip_dump_all,
round_trip_load_all,
)
class TestDocument:
def test_single_doc_begin_end(self) -> None:
inp = """\
---
- a
- b
...
"""
round_trip(inp, explicit_start=True, explicit_end=True)
def test_multi_doc_begin_end(self) -> None:
inp = """\
---
- a
...
---
- b
...
"""
docs = list(round_trip_load_all(inp))
assert docs == [['a'], ['b']]
out = round_trip_dump_all(docs, explicit_start=True, explicit_end=True)
assert out == '---\n- a\n...\n---\n- b\n...\n'
def test_multi_doc_no_start(self) -> None:
inp = """\
- a
...
---
- b
...
"""
docs = list(round_trip_load_all(inp))
assert docs == [['a'], ['b']]
def test_multi_doc_no_end(self) -> None:
inp = """\
- a
---
- b
"""
docs = list(round_trip_load_all(inp))
assert docs == [['a'], ['b']]
def test_multi_doc_ends_only(self) -> None:
# this is ok in 1.2
inp = """\
- a
...
- b
...
"""
docs = list(round_trip_load_all(inp, version=(1, 2)))
assert docs == [['a'], ['b']]
def test_single_scalar_comment(self) -> None:
import ruyaml as yaml
inp = """\
one # comment
two
"""
with pytest.raises(yaml.parser.ParserError):
d = list(round_trip_load_all(inp, version=(1, 2))) # NOQA
def test_scalar_after_seq_document(self) -> None:
import ruyaml as yaml
inp = """\
[ 42 ]
hello
"""
with pytest.raises(yaml.parser.ParserError):
d = list(round_trip_load_all(inp, version=(1, 2))) # NOQA
def test_yunk_after_explicit_document_end(self) -> None:
import ruyaml as yaml
inp = """\
hello: world
... this is no comment
"""
with pytest.raises(yaml.parser.ParserError):
d = list(round_trip_load_all(inp, version=(1, 2))) # NOQA
def test_multi_doc_ends_only_1_1(self) -> None:
import ruyaml as yaml
# this is not ok in 1.1
with pytest.raises(yaml.parser.ParserError):
inp = """\
- a
...
- b
...
"""
docs = list(round_trip_load_all(inp, version=(1, 1)))
assert docs == [['a'], ['b']] # not True, but not reached
python-ruyaml-0.92.1/_test/test_fail.py 0000664 0000000 0000000 00000014551 15056754172 0020117 0 ustar 00root root 0000000 0000000 # coding: utf-8
# there is some work to do
# provide a failing test xyz and a non-failing xyz_no_fail ( to see
# what the current failing output is.
# on fix of ruyaml, move the marked test to the appropriate test (without mark)
# and remove remove the xyz_no_fail
import pytest # type: ignore
from roundtrip import ( # type: ignore
dedent,
round_trip,
round_trip_dump,
round_trip_load,
)
class TestCommentFailures:
@pytest.mark.xfail(strict=True) # type: ignore
def test_set_comment_before_tag(self) -> None:
# no comments before tags
round_trip(
"""
# the beginning
!!set
# or this one?
? a
# next one is B (lowercase)
? b # You see? Promised you.
? c
# this is the end
"""
)
def test_set_comment_before_tag_no_fail(self) -> None:
# no comments before tags
inp = """
# the beginning
!!set
# or this one?
? a
# next one is B (lowercase)
? b # You see? Promised you.
? c
# this is the end
"""
assert round_trip_dump(round_trip_load(inp)) == dedent(
"""
!!set
# the beginning
# or this one?
? a
# next one is B (lowercase)
? b # You see? Promised you.
? c
# this is the end
"""
)
@pytest.mark.xfail(strict=True) # type: ignore
def test_comment_dash_line(self) -> None:
round_trip(
"""
- # abc
a: 1
b: 2
"""
)
def test_comment_dash_line_fail(self) -> None:
x = """
- # abc
a: 1
b: 2
"""
data = round_trip_load(x)
# this is not nice
assert round_trip_dump(data) == dedent(
"""
# abc
- a: 1
b: 2
"""
)
class TestIndentFailures:
@pytest.mark.xfail(strict=True) # type: ignore
def test_indent_not_retained(self) -> None:
round_trip(
"""
verbosity: 1 # 0 is minimal output, -1 none
base_url: http://gopher.net
special_indices: [1, 5, 8]
also_special:
- a
- 19
- 32
asia and europe: &asia_europe
Turkey: Ankara
Russia: Moscow
countries:
Asia:
<<: *asia_europe
Japan: Tokyo # 東京
Europe:
<<: *asia_europe
Spain: Madrid
Italy: Rome
Antarctica:
- too cold
"""
)
def test_indent_not_retained_no_fail(self) -> None:
inp = """
verbosity: 1 # 0 is minimal output, -1 none
base_url: http://gopher.net
special_indices: [1, 5, 8]
also_special:
- a
- 19
- 32
asia and europe: &asia_europe
Turkey: Ankara
Russia: Moscow
countries:
Asia:
<<: *asia_europe
Japan: Tokyo # 東京
Europe:
<<: *asia_europe
Spain: Madrid
Italy: Rome
Antarctica:
- too cold
"""
assert round_trip_dump(round_trip_load(inp), indent=4) == dedent(
"""
verbosity: 1 # 0 is minimal output, -1 none
base_url: http://gopher.net
special_indices: [1, 5, 8]
also_special:
- a
- 19
- 32
asia and europe: &asia_europe
Turkey: Ankara
Russia: Moscow
countries:
Asia:
<<: *asia_europe
Japan: Tokyo # 東京
Europe:
<<: *asia_europe
Spain: Madrid
Italy: Rome
Antarctica:
- too cold
"""
)
def Xtest_indent_top_level_no_fail(self) -> None:
inp = """
- a:
- b
"""
round_trip(inp, indent=4)
class TestTagFailures:
@pytest.mark.xfail(strict=True) # type: ignore
def test_standard_short_tag(self) -> None:
round_trip(
"""\
!!map
name: Anthon
location: Germany
language: python
"""
)
def test_standard_short_tag_no_fail(self) -> None:
inp = """
!!map
name: Anthon
location: Germany
language: python
"""
exp = """
name: Anthon
location: Germany
language: python
"""
assert round_trip_dump(round_trip_load(inp)) == dedent(exp)
class TestFlowValues:
def test_flow_value_with_colon(self) -> None:
inp = """\
{a: bcd:efg}
"""
round_trip(inp)
def test_flow_value_with_colon_quoted(self) -> None:
inp = """\
{a: 'bcd:efg'}
"""
round_trip(inp, preserve_quotes=True)
class TestMappingKey:
def test_simple_mapping_key(self) -> None:
inp = """\
{a: 1, b: 2}: hello world
"""
round_trip(inp, preserve_quotes=True, dump_data=False)
def test_set_simple_mapping_key(self) -> None:
from ruyaml.comments import CommentedKeyMap
d = {CommentedKeyMap([('a', 1), ('b', 2)]): 'hello world'}
exp = dedent(
"""\
{a: 1, b: 2}: hello world
"""
)
assert round_trip_dump(d) == exp
def test_change_key_simple_mapping_key(self) -> None:
from ruyaml.comments import CommentedKeyMap
inp = """\
{a: 1, b: 2}: hello world
"""
d = round_trip_load(inp, preserve_quotes=True)
d[CommentedKeyMap([('b', 1), ('a', 2)])] = d.pop(
CommentedKeyMap([('a', 1), ('b', 2)])
)
exp = dedent(
"""\
{b: 1, a: 2}: hello world
"""
)
assert round_trip_dump(d) == exp
def test_change_value_simple_mapping_key(self) -> None:
from ruyaml.comments import CommentedKeyMap
inp = """\
{a: 1, b: 2}: hello world
"""
d = round_trip_load(inp, preserve_quotes=True)
d = {CommentedKeyMap([('a', 1), ('b', 2)]): 'goodbye'}
exp = dedent(
"""\
{a: 1, b: 2}: goodbye
"""
)
assert round_trip_dump(d) == exp
python-ruyaml-0.92.1/_test/test_float.py 0000664 0000000 0000000 00000004317 15056754172 0020310 0 ustar 00root root 0000000 0000000 # coding: utf-8
import pytest # type: ignore # NOQA
from roundtrip import ( # type: ignore # NOQA
dedent,
round_trip,
round_trip_dump,
round_trip_load,
)
# http://yaml.org/type/int.html is where underscores in integers are defined
class TestFloat:
def test_round_trip_non_exp(self) -> None:
data = round_trip(
"""\
- 1.0
- 1.00
- 23.100
- -1.0
- -1.00
- -23.100
- 42.
- -42.
- +42.
- .5
- +.5
- -.5
- !!float '42'
- !!float '-42'
"""
)
print(data)
assert 0.999 < data[0] < 1.001
assert 0.999 < data[1] < 1.001
assert 23.099 < data[2] < 23.101
assert 0.999 < -data[3] < 1.001
assert 0.999 < -data[4] < 1.001
assert 23.099 < -data[5] < 23.101
assert 41.999 < data[6] < 42.001
assert 41.999 < -data[7] < 42.001
assert 41.999 < data[8] < 42.001
assert 0.49 < data[9] < 0.51
assert 0.49 < data[10] < 0.51
assert -0.51 < data[11] < -0.49
assert 41.99 < data[12] < 42.01
assert 41.99 < -data[13] < 42.01
def test_round_trip_zeros_0(self) -> None:
data = round_trip(
"""\
- 0.
- +0.
- -0.
- 0.0
- +0.0
- -0.0
- 0.00
- +0.00
- -0.00
"""
)
print(data)
for d in data:
assert -0.00001 < d < 0.00001
def test_round_trip_exp_trailing_dot(self) -> None:
data = round_trip(
"""\
- 3.e4
"""
)
print(data)
def test_yaml_1_1_no_dot(self) -> None:
from ruyaml.error import MantissaNoDotYAML1_1Warning
with pytest.warns(MantissaNoDotYAML1_1Warning):
round_trip_load(
"""\
%YAML 1.1
---
- 1e6
"""
)
class TestCalculations:
def test_mul_00(self) -> None:
# issue 149 reported by jan.brezina@tul.cz
d = round_trip_load(
"""\
- 0.1
"""
)
d[0] *= -1
x = round_trip_dump(d)
assert x == '- -0.1\n'
python-ruyaml-0.92.1/_test/test_flowsequencekey.py 0000664 0000000 0000000 00000000720 15056754172 0022406 0 ustar 00root root 0000000 0000000 # coding: utf-8
"""
test flow style sequences as keys roundtrip
"""
# import pytest
from roundtrip import round_trip # type: ignore
class TestFlowStyleSequenceKey:
def test_so_39595807(self) -> None:
inp = """\
%YAML 1.2
---
[2, 3, 4]:
a:
- 1
- 2
b: Hello World!
c: 'Voilà!'
"""
round_trip(inp, preserve_quotes=True, explicit_start=True, version=(1, 2))
python-ruyaml-0.92.1/_test/test_indentation.py 0000664 0000000 0000000 00000021220 15056754172 0021507 0 ustar 00root root 0000000 0000000 # coding: utf-8
from typing import Any
import pytest # type: ignore # NOQA
from roundtrip import ( # type: ignore # NOQA
YAML,
dedent,
round_trip,
round_trip_dump,
round_trip_load,
)
def rt(s: str) -> str:
res = round_trip_dump(round_trip_load(s))
assert res is not None
return res.strip() + '\n' # type: ignore
class TestIndent:
def test_roundtrip_inline_list(self) -> None:
s = 'a: [a, b, c]\n'
output = rt(s)
assert s == output
def test_roundtrip_mapping_of_inline_lists(self) -> None:
s = dedent(
"""\
a: [a, b, c]
j: [k, l, m]
"""
)
output = rt(s)
assert s == output
def test_roundtrip_mapping_of_inline_lists_comments(self) -> None:
s = dedent(
"""\
# comment A
a: [a, b, c]
# comment B
j: [k, l, m]
"""
)
output = rt(s)
assert s == output
def test_roundtrip_mapping_of_inline_sequence_eol_comments(self) -> None:
s = dedent(
"""\
# comment A
a: [a, b, c] # comment B
j: [k, l, m] # comment C
"""
)
output = rt(s)
assert s == output
# first test by explicitly setting flow style
def test_added_inline_list(self) -> None:
s1 = dedent(
"""
a:
- b
- c
- d
"""
)
s = 'a: [b, c, d]\n'
data = round_trip_load(s1)
val = data['a']
val.fa.set_flow_style()
# print(type(val), '_yaml_format' in dir(val))
output = round_trip_dump(data)
assert s == output
# ############ flow mappings
def test_roundtrip_flow_mapping(self) -> None:
s = dedent(
"""\
- {a: 1, b: hallo}
- {j: fka, k: 42}
"""
)
data = round_trip_load(s)
output = round_trip_dump(data)
assert s == output
def test_roundtrip_sequence_of_inline_mappings_eol_comments(self) -> None:
s = dedent(
"""\
# comment A
- {a: 1, b: hallo} # comment B
- {j: fka, k: 42} # comment C
"""
)
output = rt(s)
assert s == output
def test_indent_top_level(self) -> None:
inp = """
- a:
- b
"""
round_trip(inp, indent=4)
def test_set_indent_5_block_list_indent_1(self) -> None:
inp = """
a:
- b: c
- 1
- d:
- 2
"""
round_trip(inp, indent=5, block_seq_indent=1)
def test_set_indent_4_block_list_indent_2(self) -> None:
inp = """
a:
- b: c
- 1
- d:
- 2
"""
round_trip(inp, indent=4, block_seq_indent=2)
def test_set_indent_3_block_list_indent_0(self) -> None:
inp = """
a:
- b: c
- 1
- d:
- 2
"""
round_trip(inp, indent=3, block_seq_indent=0)
def Xtest_set_indent_3_block_list_indent_2(self) -> None:
inp = """
a:
-
b: c
-
1
-
d:
-
2
"""
round_trip(inp, indent=3, block_seq_indent=2)
def test_set_indent_3_block_list_indent_2(self) -> None:
inp = """
a:
- b: c
- 1
- d:
- 2
"""
round_trip(inp, indent=3, block_seq_indent=2)
def Xtest_set_indent_2_block_list_indent_2(self) -> None:
inp = """
a:
-
b: c
-
1
-
d:
-
2
"""
round_trip(inp, indent=2, block_seq_indent=2)
# this is how it should be: block_seq_indent stretches the indent
def test_set_indent_2_block_list_indent_2(self) -> None:
inp = """
a:
- b: c
- 1
- d:
- 2
"""
round_trip(inp, indent=2, block_seq_indent=2)
# have to set indent!
def test_roundtrip_four_space_indents(self) -> None:
# fmt: off
s = (
'a:\n'
'- foo\n'
'- bar\n'
)
# fmt: on
round_trip(s, indent=4)
def test_roundtrip_four_space_indents_no_fail(self) -> None:
inp = """
a:
- foo
- bar
"""
exp = """
a:
- foo
- bar
"""
assert round_trip_dump(round_trip_load(inp)) == dedent(exp)
class TestYpkgIndent:
def test_00(self) -> None:
inp = """
name : nano
version : 2.3.2
release : 1
homepage : http://www.nano-editor.org
source :
- http://www.nano-editor.org/dist/v2.3/nano-2.3.2.tar.gz :
ff30924807ea289f5b60106be8
license : GPL-2.0
summary : GNU nano is an easy-to-use text editor
builddeps :
- ncurses-devel
description: |
GNU nano is an easy-to-use text editor originally designed
as a replacement for Pico, the ncurses-based editor from the non-free mailer
package Pine (itself now available under the Apache License as Alpine).
"""
round_trip(
inp,
indent=4,
block_seq_indent=2,
top_level_colon_align=True,
prefix_colon=' ',
)
def guess(s: str) -> Any:
from ruyaml.util import load_yaml_guess_indent
x, y, z = load_yaml_guess_indent(dedent(s))
return y, z
class TestGuessIndent:
def test_guess_20(self) -> None:
inp = """\
a:
- 1
"""
assert guess(inp) == (2, 0)
def test_guess_42(self) -> None:
inp = """\
a:
- 1
"""
assert guess(inp) == (4, 2)
def test_guess_42a(self) -> None:
# block seq indent prevails over nested key indent level
inp = """\
b:
a:
- 1
"""
assert guess(inp) == (4, 2)
def test_guess_3None(self) -> None:
inp = """\
b:
a: 1
"""
assert guess(inp) == (3, None)
def test_guess_with_preserve_quotes(self) -> None:
from ruyaml.scalarstring import DoubleQuotedScalarString
from ruyaml.util import load_yaml_guess_indent
inp = """\
b:
a: "hello world"
"""
yaml = YAML()
yaml.preserve_quotes = True
x, y, z = load_yaml_guess_indent(dedent(inp), yaml=yaml)
assert y == 3
assert z is None
assert isinstance(x['b']['a'], DoubleQuotedScalarString)
class TestSeparateMapSeqIndents:
# using uncommon 6 indent with 3 push in as 2 push in automatically
# gets you 4 indent even if not set
def test_00(self) -> None:
# old style
yaml = YAML()
yaml.indent = 6
yaml.block_seq_indent = 3
inp = """
a:
- 1
- [1, 2]
"""
yaml.round_trip(inp)
def test_01(self) -> None:
yaml = YAML()
yaml.indent(sequence=6)
yaml.indent(offset=3)
inp = """
a:
- 1
- {b: 3}
"""
yaml.round_trip(inp)
def test_02(self) -> None:
yaml = YAML()
yaml.indent(mapping=5, sequence=6, offset=3)
inp = """
a:
b:
- 1
- [1, 2]
"""
yaml.round_trip(inp)
def test_03(self) -> None:
inp = """
a:
b:
c:
- 1
- [1, 2]
"""
round_trip(inp, indent=4)
def test_04(self) -> None:
yaml = YAML()
yaml.indent(mapping=5, sequence=6)
inp = """
a:
b:
- 1
- [1, 2]
- {d: 3.14}
"""
yaml.round_trip(inp)
def test_issue_51(self) -> None:
yaml = YAML()
# yaml.map_indent = 2 # the default
yaml.indent(sequence=4, offset=2)
yaml.preserve_quotes = True
yaml.round_trip(
"""
role::startup::author::rsyslog_inputs:
imfile:
- ruleset: 'AEM-slinglog'
File: '/opt/aem/author/crx-quickstart/logs/error.log'
startmsg.regex: '^[-+T.:[:digit:]]*'
tag: 'error'
- ruleset: 'AEM-slinglog'
File: '/opt/aem/author/crx-quickstart/logs/stdout.log'
startmsg.regex: '^[-+T.:[:digit:]]*'
tag: 'stdout'
"""
)
# ############ indentation
python-ruyaml-0.92.1/_test/test_int.py 0000664 0000000 0000000 00000001535 15056754172 0017774 0 ustar 00root root 0000000 0000000 # coding: utf-8
import pytest # type: ignore # NOQA
from roundtrip import dedent, round_trip_dump, round_trip_load # type: ignore
# http://yaml.org/type/int.html is where underscores in integers are defined
class TestBinHexOct:
def test_calculate(self) -> None:
# make sure type, leading zero(s) and underscore are preserved
s = dedent(
"""\
- 42
- 0b101010
- 0x_2a
- 0x2A
- 0o00_52
"""
)
d = round_trip_load(s)
for idx, elem in enumerate(d):
elem -= 21
d[idx] = elem
for idx, elem in enumerate(d):
elem *= 2
d[idx] = elem
for idx, elem in enumerate(d):
t = elem
elem **= 2
elem //= t
d[idx] = elem
assert round_trip_dump(d) == s
python-ruyaml-0.92.1/_test/test_issues.py 0000664 0000000 0000000 00000073252 15056754172 0020522 0 ustar 00root root 0000000 0000000 # coding: utf-8
from typing import Any
import pytest # type: ignore # NOQA
# cannot do "from .roundtrip" because of pytest, so mypy cannot find this
from roundtrip import ( # type: ignore
dedent,
na_round_trip,
round_trip,
round_trip_dump,
round_trip_load,
save_and_run,
YAML,
)
class TestIssues:
def test_issue_61(self) -> None:
s = dedent(
"""
def1: &ANCHOR1
key1: value1
def: &ANCHOR
<<: *ANCHOR1
key: value
comb:
<<: *ANCHOR
""",
)
data = round_trip_load(s)
assert str(data['comb']) == str(data['def'])
assert str(data['comb']) == "{'key': 'value', 'key1': 'value1'}"
# def test_issue_82(self, tmpdir):
# program_src = r'''
# import ruyaml as yaml
# import re
#
# class SINumber(yaml.YAMLObject):
# PREFIXES = {'k': 1e3, 'M': 1e6, 'G': 1e9}
# yaml_loader = yaml.Loader
# yaml_dumper = yaml.Dumper
# yaml_tag = '!si'
# yaml_implicit_pattern = re.compile(
# r'^(?P[0-9]+(?:\.[0-9]+)?)(?P[kMG])$')
#
# @classmethod
# def from_yaml(cls, loader, node):
# return cls(node.value)
#
# @classmethod
# def to_yaml(cls, dumper, data):
# return dumper.represent_scalar(cls.yaml_tag, str(data))
#
# def __init__(self, *args):
# m = self.yaml_implicit_pattern.match(args[0])
# self.value = float(m.groupdict()['value'])
# self.prefix = m.groupdict()['prefix']
#
# def __str__(self) -> None:
# return str(self.value)+self.prefix
#
# def __int__(self) -> None:
# return int(self.value*self.PREFIXES[self.prefix])
#
# # This fails:
# yaml.add_implicit_resolver(SINumber.yaml_tag, SINumber.yaml_implicit_pattern)
#
# ret = yaml.load("""
# [1,2,3, !si 10k, 100G]
# """, Loader=yaml.Loader)
# for idx, l in enumerate([1, 2, 3, 10000, 100000000000]):
# assert int(ret[idx]) == l
# '''
# assert save_and_run(dedent(program_src), tmpdir) == 0
def test_issue_82rt(self, tmpdir: Any) -> None:
yaml_str = '[1, 2, 3, !si 10k, 100G]\n'
x = round_trip(yaml_str, preserve_quotes=True) # NOQA
def test_issue_102(self) -> None:
yaml_str = dedent(
"""
var1: #empty
var2: something #notempty
var3: {} #empty object
var4: {a: 1} #filled object
var5: [] #empty array
""",
)
x = round_trip(yaml_str, preserve_quotes=True) # NOQA
def test_issue_150(self) -> None:
from ruyaml import YAML
inp = """\
base: &base_key
first: 123
second: 234
child:
<<: *base_key
third: 345
"""
yaml = YAML()
data = yaml.load(inp)
child = data['child']
assert 'second' in dict(**child)
def test_issue_160(self) -> None:
from ruyaml.compat import StringIO
s = dedent(
"""\
root:
# a comment
- {some_key: "value"}
foo: 32
bar: 32
""",
)
a = round_trip_load(s)
del a['root'][0]['some_key']
buf = StringIO()
round_trip_dump(a, buf, block_seq_indent=4)
exp = dedent(
"""\
root:
# a comment
- {}
foo: 32
bar: 32
""",
)
assert buf.getvalue() == exp
def test_issue_161(self) -> None:
yaml_str = dedent(
"""\
mapping-A:
key-A:{}
mapping-B:
""",
)
for comment in ['', ' # no-newline', ' # some comment\n', '\n']:
s = yaml_str.format(comment)
res = round_trip(s) # NOQA
def test_issue_161a(self) -> None:
yaml_str = dedent(
"""\
mapping-A:
key-A:{}
mapping-B:
""",
)
for comment in ['\n# between']:
s = yaml_str.format(comment)
res = round_trip(s) # NOQA
def test_issue_163(self) -> None:
s = dedent(
"""\
some-list:
# List comment
- {}
""",
)
x = round_trip(s, preserve_quotes=True) # NOQA
json_str = (
r'{"sshKeys":[{"name":"AETROS\/google-k80-1","uses":0,"getLastUse":0,'
'"fingerprint":"MD5:19:dd:41:93:a1:a3:f5:91:4a:8e:9b:d0:ae:ce:66:4c",'
'"created":1509497961}]}'
)
json_str2 = '{"abc":[{"a":"1", "uses":0}]}'
def test_issue_172(self) -> None:
x = round_trip_load(TestIssues.json_str2) # NOQA
x = round_trip_load(TestIssues.json_str) # NOQA
def test_issue_176(self) -> None:
# basic request by Stuart Berg
from ruyaml import YAML
yaml = YAML()
seq = yaml.load('[1,2,3]')
seq[:] = [1, 2, 3, 4]
def test_issue_176_preserve_comments_on_extended_slice_assignment(self) -> None:
yaml_str = dedent(
"""\
- a
- b # comment
- c # commment c
# comment c+
- d
- e # comment
""",
)
seq = round_trip_load(yaml_str)
seq[1::2] = ['B', 'D']
res = round_trip_dump(seq)
assert res == yaml_str.replace(' b ', ' B ').replace(' d\n', ' D\n')
def test_issue_176_test_slicing(self) -> None:
mss = round_trip_load('[0, 1, 2, 3, 4]')
assert len(mss) == 5
assert mss[2:2] == []
assert mss[2:4] == [2, 3]
assert mss[1::2] == [1, 3]
# slice assignment
m = mss[:]
m[2:2] = [42]
assert m == [0, 1, 42, 2, 3, 4]
m = mss[:]
m[:3] = [42, 43, 44]
assert m == [42, 43, 44, 3, 4]
m = mss[:]
m[2:] = [42, 43, 44]
assert m == [0, 1, 42, 43, 44]
m = mss[:]
m[:] = [42, 43, 44]
assert m == [42, 43, 44]
# extend slice assignment
m = mss[:]
m[2:4] = [42, 43, 44]
assert m == [0, 1, 42, 43, 44, 4]
m = mss[:]
m[1::2] = [42, 43]
assert m == [0, 42, 2, 43, 4]
m = mss[:]
with pytest.raises(TypeError, match='too many'):
m[1::2] = [42, 43, 44]
with pytest.raises(TypeError, match='not enough'):
m[1::2] = [42]
m = mss[:]
m += [5]
m[1::2] = [42, 43, 44]
assert m == [0, 42, 2, 43, 4, 44]
# deleting
m = mss[:]
del m[1:3]
assert m == [0, 3, 4]
m = mss[:]
del m[::2]
assert m == [1, 3]
m = mss[:]
del m[:]
assert m == []
def test_issue_184(self) -> None:
yaml_str = dedent(
"""\
test::test:
# test
foo:
bar: baz
""",
)
d = round_trip_load(yaml_str)
d['bar'] = 'foo'
d.yaml_add_eol_comment('test1', 'bar')
assert round_trip_dump(d) == yaml_str + 'bar: foo # test1\n'
def test_issue_219(self) -> None:
yaml_str = dedent(
"""\
[StackName: AWS::StackName]
""",
)
d = round_trip_load(yaml_str) # NOQA
def test_issue_219a(self) -> None:
yaml_str = dedent(
"""\
[StackName:
AWS::StackName]
""",
)
d = round_trip_load(yaml_str) # NOQA
def test_issue_220(self, tmpdir: Any) -> None:
program_src = r'''
from ruyaml import YAML
yaml_str = """\
---
foo: ["bar"]
"""
yaml = YAML(typ='safe', pure=True)
d = yaml.load(yaml_str)
print(d)
'''
assert save_and_run(dedent(program_src), tmpdir, optimized=True) == 0
def test_issue_221_add(self) -> None:
from ruyaml.comments import CommentedSeq
a = CommentedSeq([1, 2, 3])
a + [4, 5]
def test_issue_221_sort(self) -> None:
from ruyaml import YAML
from ruyaml.compat import StringIO
yaml = YAML()
inp = dedent(
"""\
- d
- a # 1
- c # 3
- e # 5
- b # 2
""",
)
a = yaml.load(dedent(inp))
a.sort()
buf = StringIO()
yaml.dump(a, buf)
exp = dedent(
"""\
- a # 1
- b # 2
- c # 3
- d
- e # 5
""",
)
assert buf.getvalue() == exp
def test_issue_221_sort_reverse(self) -> None:
from ruyaml import YAML
from ruyaml.compat import StringIO
yaml = YAML()
inp = dedent(
"""\
- d
- a # 1
- c # 3
- e # 5
- b # 2
""",
)
a = yaml.load(dedent(inp))
a.sort(reverse=True)
buf = StringIO()
yaml.dump(a, buf)
exp = dedent(
"""\
- e # 5
- d
- c # 3
- b # 2
- a # 1
""",
)
assert buf.getvalue() == exp
def test_issue_221_sort_key(self) -> None:
from ruyaml import YAML
from ruyaml.compat import StringIO
yaml = YAML()
inp = dedent(
"""\
- four
- One # 1
- Three # 3
- five # 5
- two # 2
""",
)
a = yaml.load(dedent(inp))
a.sort(key=str.lower)
buf = StringIO()
yaml.dump(a, buf)
exp = dedent(
"""\
- five # 5
- four
- One # 1
- Three # 3
- two # 2
""",
)
assert buf.getvalue() == exp
def test_issue_221_sort_key_reverse(self) -> None:
from ruyaml import YAML
from ruyaml.compat import StringIO
yaml = YAML()
inp = dedent(
"""\
- four
- One # 1
- Three # 3
- five # 5
- two # 2
""",
)
a = yaml.load(dedent(inp))
a.sort(key=str.lower, reverse=True)
buf = StringIO()
yaml.dump(a, buf)
exp = dedent(
"""\
- two # 2
- Three # 3
- One # 1
- four
- five # 5
""",
)
assert buf.getvalue() == exp
def test_issue_222(self) -> None:
import ruyaml
from ruyaml.compat import StringIO
yaml = ruyaml.YAML(typ='safe')
buf = StringIO()
yaml.dump(['012923'], buf)
assert buf.getvalue() == "['012923']\n"
def test_issue_223(self) -> None:
import ruyaml
yaml = ruyaml.YAML(typ='safe')
yaml.load('phone: 0123456789')
def test_issue_232(self) -> None:
import ruyaml
yaml = ruyaml.YAML(typ='safe', pure=True)
with pytest.raises(ruyaml.parser.ParserError):
yaml.load(']')
with pytest.raises(ruyaml.parser.ParserError):
yaml.load('{]')
def test_issue_233(self) -> None:
import json
yaml = YAML()
data = yaml.load('{}')
json_str = json.dumps(data) # NOQA
def test_issue_233a(self) -> None:
import json
yaml = YAML()
data = yaml.load('[]')
json_str = json.dumps(data) # NOQA
def test_issue_234(self) -> None:
from ruyaml import YAML
inp = dedent(
"""\
- key: key1
ctx: [one, two]
help: one
cmd: >
foo bar
foo bar
""",
)
yaml = YAML(typ='safe', pure=True)
data = yaml.load(inp)
fold = data[0]['cmd']
print(repr(fold))
assert '\a' not in fold
def test_issue_236(self) -> None:
inp = """
conf:
xx: {a: "b", c: []}
asd: "nn"
"""
d = round_trip(inp, preserve_quotes=True) # NOQA
def test_issue_238(self, tmpdir: Any) -> None:
program_src = r"""
import ruyaml
from io import StringIO
yaml = ruyaml.YAML(typ='unsafe')
class A:
def __setstate__(self, d):
self.__dict__ = d
class B:
pass
a = A()
b = B()
a.x = b
b.y = [b]
assert a.x.y[0] == a.x
buf = StringIO()
yaml.dump(a, buf)
data = yaml.load(buf.getvalue())
assert data.x.y[0] == data.x
"""
assert save_and_run(dedent(program_src), tmpdir) == 0
def test_issue_239(self) -> None:
inp = """
first_name: Art
occupation: Architect
# I'm safe
about: Art Vandelay is a fictional character that George invents...
# we are not :(
# help me!
---
# what?!
hello: world
# someone call the Batman
foo: bar # or quz
# Lost again
---
I: knew
# final words
"""
d = YAML().round_trip_all(inp) # NOQA
def test_issue_242(self) -> None:
from ruyaml.comments import CommentedMap
d0 = CommentedMap([('a', 'b')])
assert d0['a'] == 'b'
def test_issue_245(self) -> None:
from ruyaml import YAML
inp = """
d: yes
"""
for typ in ['safepure', 'rt', 'safe']:
if typ.endswith('pure'):
pure = True
typ = typ[:-4]
else:
pure = None
yaml = YAML(typ=typ, pure=pure)
yaml.version = (1, 1)
d = yaml.load(inp)
print(typ, yaml.parser, yaml.resolver)
assert d['d'] is True
def test_issue_249(self) -> None:
yaml = YAML()
inp = dedent(
"""\
# comment
-
- 1
- 2
- 3
""",
)
exp = dedent(
"""\
# comment
- - 1
- 2
- 3
""",
)
yaml.round_trip(inp, outp=exp) # NOQA
def test_issue_250(self) -> None:
inp = """
# 1.
- - 1
# 2.
- map: 2
# 3.
- 4
"""
d = round_trip(inp) # NOQA
# @pytest.mark.xfail(strict=True, reason='bla bla', raises=AssertionError)
def test_issue_279(self) -> None:
from ruyaml import YAML
from ruyaml.compat import StringIO
yaml = YAML()
yaml.indent(sequence=4, offset=2)
inp = dedent(
"""\
experiments:
- datasets:
# ATLAS EWK
- {dataset: ATLASWZRAP36PB, frac: 1.0}
- {dataset: ATLASZHIGHMASS49FB, frac: 1.0}
""",
)
a = yaml.load(inp)
buf = StringIO()
yaml.dump(a, buf)
print(buf.getvalue())
assert buf.getvalue() == inp
def test_issue_280(self) -> None:
from collections import namedtuple
from sys import stdout
from ruyaml import YAML
from ruyaml.representer import RepresenterError
T = namedtuple('T', ('a', 'b'))
t = T(1, 2)
yaml = YAML()
with pytest.raises(RepresenterError, match='cannot represent'):
yaml.dump({'t': t}, stdout)
def test_issue_282(self) -> None:
# update from list of tuples caused AttributeError
import ruyaml
yaml_data = ruyaml.comments.CommentedMap([('a', 'apple'), ('b', 'banana')])
yaml_data.update([('c', 'cantaloupe')])
yaml_data.update({'d': 'date', 'k': 'kiwi'})
assert 'c' in yaml_data.keys()
assert 'c' in yaml_data._ok
def test_issue_284(self) -> None:
import ruyaml
inp = dedent(
"""\
plain key: in-line value
: # Both empty
"quoted key":
- entry
""",
)
yaml = ruyaml.YAML(typ='rt')
yaml.version = (1, 2)
d = yaml.load(inp)
assert d[None] is None
yaml = ruyaml.YAML(typ='rt')
yaml.version = (1, 1)
with pytest.raises(ruyaml.parser.ParserError, match='expected '):
d = yaml.load(inp)
def test_issue_285(self) -> None:
from ruyaml import YAML
yaml = YAML()
inp = dedent(
"""\
%YAML 1.1
---
- y
- n
- Y
- N
""",
)
a = yaml.load(inp)
assert a[0]
assert a[2]
assert not a[1]
assert not a[3]
def test_issue_286(self) -> None:
from ruyaml import YAML
from ruyaml.compat import StringIO
yaml = YAML()
inp = dedent(
"""\
parent_key:
- sub_key: sub_value
# xxx""",
)
a = yaml.load(inp)
a['new_key'] = 'new_value'
buf = StringIO()
yaml.dump(a, buf)
assert buf.getvalue().endswith('xxx\nnew_key: new_value\n')
def test_issue_288(self) -> None:
import sys
from ruyaml import YAML
from ruyaml.compat import StringIO
yamldoc = dedent(
"""\
---
# Reusable values
aliases:
# First-element comment
- &firstEntry First entry
# Second-element comment
- &secondEntry Second entry
# Third-element comment is
# a multi-line value
- &thirdEntry Third entry
# EOF Comment
""",
)
yaml = YAML()
yaml.indent(mapping=2, sequence=4, offset=2)
yaml.explicit_start = True
yaml.preserve_quotes = True
yaml.width = sys.maxsize
data = yaml.load(yamldoc)
buf = StringIO()
yaml.dump(data, buf)
assert buf.getvalue() == yamldoc
def test_issue_288a(self) -> None:
import sys
from ruyaml import YAML
from ruyaml.compat import StringIO
yamldoc = dedent(
"""\
---
# Reusable values
aliases:
# First-element comment
- &firstEntry First entry
# Second-element comment
- &secondEntry Second entry
# Third-element comment is
# a multi-line value
- &thirdEntry Third entry
# EOF Comment
""",
)
yaml = YAML()
yaml.indent(mapping=2, sequence=4, offset=2)
yaml.explicit_start = True
yaml.preserve_quotes = True
yaml.width = sys.maxsize
data = yaml.load(yamldoc)
buf = StringIO()
yaml.dump(data, buf)
assert buf.getvalue() == yamldoc
def test_issue_290(self) -> None:
import sys
from ruyaml import YAML
from ruyaml.compat import StringIO
yamldoc = dedent(
"""\
---
aliases:
# Folded-element comment
# for a multi-line value
- &FoldedEntry >
THIS IS A
FOLDED, MULTI-LINE
VALUE
# Literal-element comment
# for a multi-line value
- &literalEntry |
THIS IS A
LITERAL, MULTI-LINE
VALUE
# Plain-element comment
- &plainEntry Plain entry
""",
)
yaml = YAML()
yaml.indent(mapping=2, sequence=4, offset=2)
yaml.explicit_start = True
yaml.preserve_quotes = True
yaml.width = sys.maxsize
data = yaml.load(yamldoc)
buf = StringIO()
yaml.dump(data, buf)
assert buf.getvalue() == yamldoc
def test_issue_290a(self) -> None:
import sys
from ruyaml import YAML
from ruyaml.compat import StringIO
yamldoc = dedent(
"""\
---
aliases:
# Folded-element comment
# for a multi-line value
- &FoldedEntry >
THIS IS A
FOLDED, MULTI-LINE
VALUE
# Literal-element comment
# for a multi-line value
- &literalEntry |
THIS IS A
LITERAL, MULTI-LINE
VALUE
# Plain-element comment
- &plainEntry Plain entry
""",
)
yaml = YAML()
yaml.indent(mapping=2, sequence=4, offset=2)
yaml.explicit_start = True
yaml.preserve_quotes = True
yaml.width = sys.maxsize
data = yaml.load(yamldoc)
buf = StringIO()
yaml.dump(data, buf)
assert buf.getvalue() == yamldoc
# @pytest.mark.xfail(strict=True, reason='should fail pre 0.15.100', raises=AssertionError)
def test_issue_295(self) -> None:
# deepcopy also makes a copy of the start and end mark, and these did not
# have any comparison beyond their ID, which of course changed, breaking
# some old merge_comment code
import copy
inp = dedent(
"""
A:
b:
# comment
- l1
- l2
C:
d: e
f:
# comment2
- - l31
- l32
- l33: '5'
""",
)
data = round_trip_load(inp) # NOQA
dc = copy.deepcopy(data)
assert round_trip_dump(dc) == inp
def test_issue_300(self) -> None:
from ruyaml import YAML
inp = dedent(
"""
%YAML 1.2
%TAG ! tag:example.com,2019/path#fragment
---
null
""",
)
YAML().load(inp)
def test_issue_300a(self) -> None:
import ruyaml
inp = dedent(
"""
%YAML 1.1
%TAG ! tag:example.com,2019/path#fragment
---
null
""",
)
yaml = YAML()
with pytest.raises(
ruyaml.scanner.ScannerError,
match='while scanning a directive',
):
yaml.load(inp)
def test_issue_304(self) -> None:
inp = """
%YAML 1.2
%TAG ! tag:example.com,2019:
---
!foo null
...
"""
d = na_round_trip(inp) # NOQA
def test_issue_305(self) -> None:
inp = """
%YAML 1.2
---
! null
...
"""
d = na_round_trip(inp) # NOQA
def test_issue_307(self) -> None:
inp = """
%YAML 1.2
%TAG ! tag:example.com,2019/path#
---
null
...
"""
d = na_round_trip(inp) # NOQA
def test_issue_445(self) -> None:
from ruyaml import YAML
from ruyaml.compat import StringIO
yaml = YAML()
yaml.version = '1.1' # type: ignore
data = yaml.load('quote: I have seen things')
buf = StringIO()
yaml.dump(data, buf)
assert buf.getvalue() == '%YAML 1.1\n---\nquote: I have seen things\n'
yaml = YAML()
yaml.version = [1, 1] # type: ignore
data = yaml.load('quote: I have seen things')
buf = StringIO()
yaml.dump(data, buf)
assert buf.getvalue() == '%YAML 1.1\n---\nquote: I have seen things\n'
def test_issue_447(self) -> None:
from ruyaml import YAML
YAML().load('{\n\t"FOO": "BAR"\n}')
def test_issue_449(self) -> None:
inp = """\
emoji_index: !!python/name:materialx.emoji.twemoji
"""
d = na_round_trip(inp) # NOQA
def test_issue_455(self) -> None:
from ruyaml import YAML
cm = YAML().map(a=97, b=98)
cm.update({'c': 42, 'd': 196})
cm.update(c=99, d=100)
prev = None
for k, v in cm.items():
if prev is not None:
assert prev + 1 == v
prev = v
assert ord(k) == v
assert len(cm) == 4
def test_issue_453(self) -> None:
from io import StringIO
from ruyaml import YAML
inp = dedent(
"""
to-merge: &anchor
merge-key: should not be duplicated
to-merge2: &anchor2
merge-key2: should not be duplicated
usage:
<<: [*anchor, *anchor2]
usage-key: usage-value
""",
)
yaml = YAML()
data = yaml.load(inp)
data['usage'].insert(0, 'insert-key', 'insert-value')
out_stream = StringIO()
yaml.dump(data, out_stream)
result = out_stream.getvalue()
print(result)
assert inp.replace('usage:\n', 'usage:\n insert-key: insert-value\n') == result
def test_issue_454(self) -> None:
inp = """
test1: 🎉
test2: "🎉"
test3: '🎉'
"""
d = round_trip(inp, preserve_quotes=True) # NOQA
def test_so_75631454(self) -> None:
from ruyaml import YAML
from ruyaml.compat import StringIO
inp = dedent(
"""
test:
long: "This is a sample text
across two lines."
""",
)
yaml = YAML()
yaml.preserve_quotes = True
yaml.indent(mapping=4)
yaml.width = 27
data = yaml.load(inp)
buf = StringIO()
yaml.dump(data, buf)
assert buf.getvalue() == inp
def test_issue_458(self) -> None:
from io import StringIO
from ruyaml import YAML
yaml = YAML()
out_stream = StringIO()
in_string = 'a' * 128
yaml.dump(in_string, out_stream)
result = out_stream.getvalue()
assert in_string == result.splitlines()[0]
def test_issue_459(self) -> None:
from io import StringIO
from ruyaml import YAML
MYOBJ = {
'data': dedent(
"""\
example: "first"
data:
- flag: true
integer: 1
float: 1.0
string: "this is a string"
list:
- first
- second
- third
circle:
x: 10cm
y: 10cm
radius: 2.24cm
- flag: false
integer: 2
float: 2.0
string: "this is another string"
list:
- first
- second
- third
circle:
x: 20cm
y: 20cm
radius: 2.24cm
""",
),
}
yaml = YAML()
yaml.width = 60
out_stream = StringIO()
yaml.dump([MYOBJ], out_stream)
data = yaml.load(out_stream.getvalue())
assert data[0]['data'] == MYOBJ['data']
def test_issue_461(self) -> None:
from ruyaml import YAML
yaml = YAML()
inp = dedent(
"""
first name: Roy
last name: Rogers
city: somewhere
""",
)
yaml = YAML()
data = yaml.load(inp)
data.pop('last name')
assert data.pop('not there', 'xxx') == 'xxx'
data.insert(1, 'last name', 'Beaty', comment='he has seen things')
def test_issue_463(self) -> None:
import sys
from ruyaml import YAML
from ruyaml.compat import StringIO
yaml = YAML()
inp = dedent(
"""
first_name: Art
""",
)
data = yaml.load(inp)
_ = data.merge
data.insert(0, 'some_key', 'test')
yaml.dump(data, sys.stdout)
buf = StringIO()
yaml.dump(data, buf)
exp = dedent(
"""
some_key: test
first_name: Art
""",
)
assert buf.getvalue() == exp
def test_issue_464(self) -> None:
# document end marker without newline threw error in 0.17.27
from ruyaml import YAML
yaml = YAML()
yaml.load('---\na: True\n...')
def test_issue_467(self) -> None:
# cannot change the default constructor, following test will fail
import ruyaml
yaml = ruyaml.YAML()
old_constructor = yaml.constructor.add_constructor(
yaml.resolver.DEFAULT_MAPPING_TAG,
lambda x, y: None,
)
# this should be solved by the copy to the Constructor instance
if old_constructor is not None:
yaml.constructor.add_constructor(
yaml.resolver.DEFAULT_MAPPING_TAG,
old_constructor,
)
yaml = ruyaml.YAML()
# for k, v in yaml.constructor.yaml_constructors.items():
# print(k, v)
assert yaml.load('a: b') is not None
def test_issue_480(self) -> None:
import sys
import ruyaml
yaml = ruyaml.YAML()
data = yaml.load(
dedent(
"""
# hi
{}
"""
),
)
yaml.dump(data, sys.stdout)
def test_issue_482(self) -> None:
from collections import OrderedDict
import ruyaml
def _ordered_constructor(loader: Any, node: Any) -> Any:
loader.flatten_mapping(node)
return OrderedDict(loader.construct_pairs(node))
content = 'foo: bar'
yaml = ruyaml.YAML(typ='safe', pure=True)
old_constructor = yaml.constructor.add_constructor(
yaml.Resolver.DEFAULT_MAPPING_TAG,
_ordered_constructor,
)
data = yaml.load(content)
print('data', data, type(data))
assert isinstance(data, OrderedDict)
if old_constructor is not None:
yaml.constructor.add_constructor(
yaml.resolver.DEFAULT_MAPPING_TAG,
old_constructor,
)
# @pytest.mark.xfail(strict=True, reason='bla bla', raises=AssertionError)
# def test_issue_ xxx(self) -> None:
# inp = """
# """
# d = round_trip(inp) # NOQA
python-ruyaml-0.92.1/_test/test_json_numbers.py 0000664 0000000 0000000 00000002677 15056754172 0021716 0 ustar 00root root 0000000 0000000 # coding: utf-8
import json
from typing import Any
import pytest # type: ignore # NOQA
def load(s: str, typ: Any = float) -> float:
import ruyaml
yaml = ruyaml.YAML()
x = '{"low": %s }' % (s)
print('input: [%s]' % (s), repr(x))
# just to check it is loadable json
res = json.loads(x)
assert isinstance(res['low'], typ)
ret_val = yaml.load(x)
print(ret_val)
return ret_val['low'] # type: ignore
class TestJSONNumbers:
# based on http://stackoverflow.com/a/30462009/1307905
# yaml number regex: http://yaml.org/spec/1.2/spec.html#id2804092
#
# -? [1-9] ( \. [0-9]* [1-9] )? ( e [-+] [1-9] [0-9]* )?
#
# which is not a superset of the JSON numbers
def test_json_number_float(self) -> None:
for x in (
y.split('#')[0].strip()
for y in """
1.0 # should fail on YAML spec on 1-9 allowed as single digit
-1.0
1e-06
3.1e-5
3.1e+5
3.1e5 # should fail on YAML spec: no +- after e
""".splitlines()
):
if not x:
continue
res = load(x)
assert isinstance(res, float)
def test_json_number_int(self) -> None:
for x in (
y.split('#')[0].strip()
for y in """
42
""".splitlines()
):
if not x:
continue
res = load(x, int)
assert isinstance(res, int)
python-ruyaml-0.92.1/_test/test_line_col.py 0000664 0000000 0000000 00000004310 15056754172 0020760 0 ustar 00root root 0000000 0000000 # coding: utf-8
from typing import Any
import pytest # type: ignore # NOQA
from roundtrip import ( # type: ignore # NOQA
dedent,
round_trip,
round_trip_dump,
round_trip_load,
)
def load(s: str) -> Any:
return round_trip_load(dedent(s))
class TestLineCol:
def test_item_00(self) -> None:
data = load(
"""
- a
- e
- [b, d]
- c
"""
)
assert data[2].lc.line == 2
assert data[2].lc.col == 2
def test_item_01(self) -> None:
data = load(
"""
- a
- e
- {x: 3}
- c
"""
)
assert data[2].lc.line == 2
assert data[2].lc.col == 2
def test_item_02(self) -> None:
data = load(
"""
- a
- e
- !!set {x, y}
- c
"""
)
assert data[2].lc.line == 2
assert data[2].lc.col == 2
def test_item_03(self) -> None:
data = load(
"""
- a
- e
- !!omap
- x: 1
- y: 3
- c
"""
)
assert data[2].lc.line == 2
assert data[2].lc.col == 2
def test_item_04(self) -> None:
data = load(
"""
# testing line and column based on SO
# http://stackoverflow.com/questions/13319067/
- key1: item 1
key2: item 2
- key3: another item 1
key4: another item 2
"""
)
assert data[0].lc.line == 2
assert data[0].lc.col == 2
assert data[1].lc.line == 4
assert data[1].lc.col == 2
def test_pos_mapping(self) -> None:
data = load(
"""
a: 1
b: 2
c: 3
# comment
klm: 42
d: 4
"""
)
assert data.lc.key('klm') == (4, 0)
assert data.lc.value('klm') == (4, 5)
def test_pos_sequence(self) -> None:
data = load(
"""
- a
- b
- c
# next one!
- klm
- d
"""
)
assert data.lc.item(3) == (4, 2)
python-ruyaml-0.92.1/_test/test_literal.py 0000664 0000000 0000000 00000021027 15056754172 0020634 0 ustar 00root root 0000000 0000000 # coding: utf-8
import pytest # type: ignore # NOQA
from roundtrip import YAML # type: ignore # does an automatic dedent on load
"""
YAML 1.0 allowed root level literal style without indentation:
"Usually top level nodes are not indented" (example 4.21 in 4.6.3)
YAML 1.1 is a bit vague but says:
"Regardless of style, scalar content must always be indented by at least one space"
(4.4.3)
"In general, the document’s node is indented as if it has a parent indented at -1 spaces."
(4.3.3)
YAML 1.2 is again clear about root literal level scalar after directive in example 9.5:
%YAML 1.2
--- |
%!PS-Adobe-2.0
...
%YAML1.2
---
# Empty
...
"""
class TestNoIndent:
def test_root_literal_scalar_indent_example_9_5(self) -> None:
yaml = YAML()
s = '%!PS-Adobe-2.0'
inp = """
--- |
{}
"""
d = yaml.load(inp.format(s))
print(d)
assert d == s + '\n'
def test_root_literal_scalar_no_indent(self) -> None:
yaml = YAML()
s = 'testing123'
inp = """
--- |
{}
"""
d = yaml.load(inp.format(s))
print(d)
assert d == s + '\n'
def test_root_literal_scalar_no_indent_1_1(self) -> None:
yaml = YAML()
s = 'testing123'
inp = """
%YAML 1.1
--- |
{}
"""
d = yaml.load(inp.format(s))
print(d)
assert d == s + '\n'
def test_root_literal_scalar_no_indent_1_1_old_style(self) -> None:
from textwrap import dedent
from ruyaml import YAML
yaml = YAML(typ='safe', pure=True)
s = 'testing123'
inp = """
%YAML 1.1
--- |
{}
"""
d = yaml.load(dedent(inp.format(s)))
print(d)
assert d == s + '\n'
def test_root_literal_scalar_no_indent_1_1_no_raise(self) -> None:
# from ruyaml.parser import ParserError
yaml = YAML()
yaml.root_level_block_style_scalar_no_indent_error_1_1 = True
s = 'testing123'
# with pytest.raises(ParserError):
if True:
inp = """
%YAML 1.1
--- |
{}
"""
yaml.load(inp.format(s))
def test_root_literal_scalar_indent_offset_one(self) -> None:
yaml = YAML()
s = 'testing123'
inp = """
--- |1
{}
"""
d = yaml.load(inp.format(s))
print(d)
assert d == s + '\n'
def test_root_literal_scalar_indent_offset_four(self) -> None:
yaml = YAML()
s = 'testing123'
inp = """
--- |4
{}
"""
d = yaml.load(inp.format(s))
print(d)
assert d == s + '\n'
def test_root_literal_scalar_indent_offset_two_leading_space(self) -> None:
yaml = YAML()
s = ' testing123'
inp = """
--- |4
{s}
{s}
"""
d = yaml.load(inp.format(s=s))
print(d)
assert d == (s + '\n') * 2
def test_root_literal_scalar_no_indent_special(self) -> None:
yaml = YAML()
s = '%!PS-Adobe-2.0'
inp = """
--- |
{}
"""
d = yaml.load(inp.format(s))
print(d)
assert d == s + '\n'
def test_root_folding_scalar_indent(self) -> None:
yaml = YAML()
s = '%!PS-Adobe-2.0'
inp = """
--- >
{}
"""
d = yaml.load(inp.format(s))
print(d)
assert d == s + '\n'
def test_root_folding_scalar_no_indent(self) -> None:
yaml = YAML()
s = 'testing123'
inp = """
--- >
{}
"""
d = yaml.load(inp.format(s))
print(d)
assert d == s + '\n'
def test_root_folding_scalar_no_indent_special(self) -> None:
yaml = YAML()
s = '%!PS-Adobe-2.0'
inp = """
--- >
{}
"""
d = yaml.load(inp.format(s))
print(d)
assert d == s + '\n'
def test_root_literal_multi_doc(self) -> None:
yaml = YAML(typ='safe', pure=True)
s1 = 'abc'
s2 = 'klm'
inp = """
--- |-
{}
--- |
{}
"""
for idx, d1 in enumerate(yaml.load_all(inp.format(s1, s2))):
print('d1:', d1)
assert ['abc', 'klm\n'][idx] == d1
def test_root_literal_doc_indent_directives_end(self) -> None:
yaml = YAML()
yaml.explicit_start = True
inp = """
--- |-
%YAML 1.3
---
this: is a test
"""
yaml.round_trip(inp)
def test_root_literal_doc_indent_document_end(self) -> None:
yaml = YAML()
yaml.explicit_start = True
inp = """
--- |-
some more
...
text
"""
yaml.round_trip(inp)
def test_root_literal_doc_indent_marker(self) -> None:
yaml = YAML()
yaml.explicit_start = True
inp = """
--- |2
some more
text
"""
d = yaml.load(inp)
print(type(d), repr(d))
yaml.round_trip(inp)
def test_nested_literal_doc_indent_marker(self) -> None:
yaml = YAML()
yaml.explicit_start = True
inp = """
---
a: |2
some more
text
"""
d = yaml.load(inp)
print(type(d), repr(d))
yaml.round_trip(inp)
class Test_RoundTripLiteral:
def test_rt_root_literal_scalar_no_indent(self) -> None:
yaml = YAML()
yaml.explicit_start = True
s = 'testing123'
ys = """
--- |
{}
"""
ys = ys.format(s)
d = yaml.load(ys)
yaml.dump(d, compare=ys)
def test_rt_root_literal_scalar_indent(self) -> None:
yaml = YAML()
yaml.explicit_start = True
yaml.indent = 4
s = 'testing123'
ys = """
--- |
{}
"""
ys = ys.format(s)
d = yaml.load(ys)
yaml.dump(d, compare=ys)
def test_rt_root_plain_scalar_no_indent(self) -> None:
yaml = YAML()
yaml.explicit_start = True
yaml.indent = 0
s = 'testing123'
ys = """
---
{}
"""
ys = ys.format(s)
d = yaml.load(ys)
yaml.dump(d, compare=ys)
def test_rt_root_plain_scalar_expl_indent(self) -> None:
yaml = YAML()
yaml.explicit_start = True
yaml.indent = 4
s = 'testing123'
ys = """
---
{}
"""
ys = ys.format(s)
d = yaml.load(ys)
yaml.dump(d, compare=ys)
def test_rt_root_sq_scalar_expl_indent(self) -> None:
yaml = YAML()
yaml.explicit_start = True
yaml.indent = 4
s = "'testing: 123'"
ys = """
---
{}
"""
ys = ys.format(s)
d = yaml.load(ys)
yaml.dump(d, compare=ys)
def test_rt_root_dq_scalar_expl_indent(self) -> None:
# if yaml.indent is the default (None)
# then write after the directive indicator
yaml = YAML()
yaml.explicit_start = True
yaml.indent = 0
s = '"\'testing123"'
ys = """
---
{}
"""
ys = ys.format(s)
d = yaml.load(ys)
yaml.dump(d, compare=ys)
def test_rt_root_literal_scalar_no_indent_no_eol(self) -> None:
yaml = YAML()
yaml.explicit_start = True
s = 'testing123'
ys = """
--- |-
{}
"""
ys = ys.format(s)
d = yaml.load(ys)
yaml.dump(d, compare=ys)
def test_rt_non_root_literal_scalar(self) -> None:
yaml = YAML()
s = 'testing123'
ys = """
- |
{}
"""
ys = ys.format(s)
d = yaml.load(ys)
yaml.dump(d, compare=ys)
def test_regular_spaces(self) -> None:
import ruyaml
yaml = ruyaml.YAML()
ys = "key: |\n\n\n content\n"
d = yaml.load(ys)
assert d['key'] == '\n\ncontent\n'
def test_irregular_spaces_content(self) -> None:
import ruyaml
yaml = ruyaml.YAML()
ys = "key: |\n \n \n irregular content\n"
with pytest.raises(ruyaml.scanner.ScannerError):
d = yaml.load(ys)
print(d)
def test_irregular_spaces_comment(self) -> None:
import ruyaml
yaml = ruyaml.YAML()
ys = "key: |\n \n \n # comment\n"
with pytest.raises(ruyaml.scanner.ScannerError):
d = yaml.load(ys)
print(d)
python-ruyaml-0.92.1/_test/test_merge.py 0000664 0000000 0000000 00000002505 15056754172 0020277 0 ustar 00root root 0000000 0000000 """
testing of anchors and the aliases referring to them
"""
from typing import Any
import pytest # type: ignore # NOQA
from roundtrip import ( # type: ignore # NOQA
YAML,
dedent,
round_trip,
round_trip_dump,
round_trip_load,
)
def load(s: str) -> Any:
return round_trip_load(dedent(s))
def compare(d: Any, s: str) -> None:
assert round_trip_dump(d) == dedent(s)
class TestMerge:
def test_remove_key_before_merge(self) -> None:
data = load(
"""
a: &aa
b: 1
c: 2
d:
e: 3
f: 4
<<: *aa
g: 5
h: 6
"""
)
del data['d']['f']
compare(
data,
"""
a: &aa
b: 1
c: 2
d:
e: 3
<<: *aa
g: 5
h: 6
""",
)
def test_remove_key_after_merge(self) -> None:
data = load(
"""
a: &aa
b: 1
c: 2
d:
e: 3
f: 4
<<: *aa
g: 5
h: 6
"""
)
del data['d']['g']
compare(
data,
"""
a: &aa
b: 1
c: 2
d:
e: 3
f: 4
<<: *aa
h: 6
""",
)
python-ruyaml-0.92.1/_test/test_none.py 0000664 0000000 0000000 00000002166 15056754172 0020142 0 ustar 00root root 0000000 0000000 # coding: utf-8
import pytest # type: ignore # NOQA
from roundtrip import round_trip_dump, round_trip_load # type: ignore
class TestNone:
def test_dump00(self) -> None:
data = None
s = round_trip_dump(data)
assert s == 'null\n...\n'
d = round_trip_load(s)
assert d == data
def test_dump01(self) -> None:
data = None
s = round_trip_dump(data, explicit_end=True)
assert s == 'null\n...\n'
d = round_trip_load(s)
assert d == data
def test_dump02(self) -> None:
data = None
s = round_trip_dump(data, explicit_end=False)
assert s == 'null\n...\n'
d = round_trip_load(s)
assert d == data
def test_dump03(self) -> None:
data = None
s = round_trip_dump(data, explicit_start=True)
assert s == '---\n...\n'
d = round_trip_load(s)
assert d == data
def test_dump04(self) -> None:
data = None
s = round_trip_dump(data, explicit_start=True, explicit_end=False)
assert s == '---\n...\n'
d = round_trip_load(s)
assert d == data
python-ruyaml-0.92.1/_test/test_numpy.py 0000664 0000000 0000000 00000001013 15056754172 0020341 0 ustar 00root root 0000000 0000000 # coding: utf-8
# try:
# import numpy
# except: # NOQA
# numpy = None
# def Xtest_numpy() -> None:
# import ruyaml
#
# if numpy is None:
# return
# data = numpy.arange(10)
# print('data', type(data), data)
#
# buf = io.BytesIO()
# ruyaml.dump(data) # needs updating to use buffer
# yaml_str = buf.getvalue().decode('utf-8')
# datb = ruyaml.load(yaml_str)
# print('datb', type(datb), datb)
#
# print('\nYAML', yaml_str)
# assert data == datb
python-ruyaml-0.92.1/_test/test_program_config.py 0000664 0000000 0000000 00000003612 15056754172 0022174 0 ustar 00root root 0000000 0000000 # coding: utf-8
import pytest # type: ignore # NOQA
# import ruyaml
from roundtrip import round_trip # type: ignore
class TestProgramConfig:
def test_application_arguments(self) -> None:
# application configur
round_trip(
"""
args:
username: anthon
passwd: secret
fullname: Anthon van der Neut
tmux:
session-name: test
loop:
wait: 10
"""
)
def test_single(self) -> None:
# application configuration
round_trip(
"""
# default arguments for the program
args: # needed to prevent comment wrapping
# this should be your username
username: anthon
passwd: secret # this is plaintext don't reuse \
# important/system passwords
fullname: Anthon van der Neut
tmux:
session-name: test # make sure this doesn't clash with
# other sessions
loop: # looping related defaults
# experiment with the following
wait: 10
# no more argument info to pass
"""
)
def test_multi(self) -> None:
# application configuration
round_trip(
"""
# default arguments for the program
args: # needed to prevent comment wrapping
# this should be your username
username: anthon
passwd: secret # this is plaintext don't reuse
# important/system passwords
fullname: Anthon van der Neut
tmux:
session-name: test # make sure this doesn't clash with
# other sessions
loop: # looping related defaults
# experiment with the following
wait: 10
# no more argument info to pass
"""
)
python-ruyaml-0.92.1/_test/test_spec_examples.py 0000664 0000000 0000000 00000014321 15056754172 0022027 0 ustar 00root root 0000000 0000000 # coding: utf-8
import pytest # type: ignore # NOQA
from roundtrip import YAML # type: ignore
def test_example_2_1() -> None:
yaml = YAML()
yaml.round_trip(
"""
- Mark McGwire
- Sammy Sosa
- Ken Griffey
"""
)
@pytest.mark.xfail(strict=True) # type: ignore
def test_example_2_2() -> None:
yaml = YAML()
yaml.mapping_value_align = True
yaml.round_trip(
"""
hr: 65 # Home runs
avg: 0.278 # Batting average
rbi: 147 # Runs Batted In
"""
)
def test_example_2_3() -> None:
yaml = YAML()
yaml.indent(sequence=4, offset=2)
yaml.round_trip(
"""
american:
- Boston Red Sox
- Detroit Tigers
- New York Yankees
national:
- New York Mets
- Chicago Cubs
- Atlanta Braves
"""
)
@pytest.mark.xfail(strict=True) # type: ignore
def test_example_2_4() -> None:
yaml = YAML()
yaml.mapping_value_align = True
yaml.round_trip(
"""
-
name: Mark McGwire
hr: 65
avg: 0.278
-
name: Sammy Sosa
hr: 63
avg: 0.288
"""
)
@pytest.mark.xfail(strict=True) # type: ignore
def test_example_2_5() -> None:
yaml = YAML()
yaml.flow_sequence_element_align = True
yaml.round_trip(
"""
- [name , hr, avg ]
- [Mark McGwire, 65, 0.278]
- [Sammy Sosa , 63, 0.288]
"""
)
@pytest.mark.xfail(strict=True) # type: ignore
def test_example_2_6() -> None:
yaml = YAML()
# yaml.flow_mapping_final_comma = False
yaml.flow_mapping_one_element_per_line = True
yaml.round_trip(
"""
Mark McGwire: {hr: 65, avg: 0.278}
Sammy Sosa: {
hr: 63,
avg: 0.288
}
"""
)
@pytest.mark.xfail(strict=True) # type: ignore
def test_example_2_7() -> None:
yaml = YAML()
yaml.round_trip_all(
"""
# Ranking of 1998 home runs
---
- Mark McGwire
- Sammy Sosa
- Ken Griffey
# Team ranking
---
- Chicago Cubs
- St Louis Cardinals
"""
)
def test_example_2_8() -> None:
yaml = YAML()
yaml.explicit_start = True
yaml.explicit_end = True
yaml.round_trip_all(
"""
---
time: 20:03:20
player: Sammy Sosa
action: strike (miss)
...
---
time: 20:03:47
player: Sammy Sosa
action: grand slam
...
"""
)
def test_example_2_9() -> None:
yaml = YAML()
yaml.explicit_start = True
yaml.indent(sequence=4, offset=2)
yaml.round_trip(
"""
---
hr: # 1998 hr ranking
- Mark McGwire
- Sammy Sosa
rbi:
# 1998 rbi ranking
- Sammy Sosa
- Ken Griffey
"""
)
@pytest.mark.xfail(strict=True) # type: ignore
def test_example_2_10() -> None:
yaml = YAML()
yaml.explicit_start = True
yaml.indent(sequence=4, offset=2)
yaml.round_trip(
"""
---
hr:
- Mark McGwire
# Following node labeled SS
- &SS Sammy Sosa
rbi:
- *SS # Subsequent occurrence
- Ken Griffey
"""
)
@pytest.mark.xfail(strict=True) # type: ignore
def test_example_2_11() -> None:
yaml = YAML()
yaml.round_trip(
"""
? - Detroit Tigers
- Chicago cubs
:
- 2001-07-23
? [ New York Yankees,
Atlanta Braves ]
: [ 2001-07-02, 2001-08-12,
2001-08-14 ]
"""
)
@pytest.mark.xfail(strict=True) # type: ignore
def test_example_2_12() -> None:
yaml = YAML()
yaml.explicit_start = True
yaml.round_trip(
"""
---
# Products purchased
- item : Super Hoop
quantity: 1
- item : Basketball
quantity: 4
- item : Big Shoes
quantity: 1
"""
)
@pytest.mark.xfail(strict=True) # type: ignore
def test_example_2_13() -> None:
yaml = YAML()
yaml.round_trip(
r"""
# ASCII Art
--- |
\//||\/||
// || ||__
"""
)
@pytest.mark.xfail(strict=True) # type: ignore
def test_example_2_14() -> None:
yaml = YAML()
yaml.explicit_start = True
yaml.indent(root_scalar=2) # needs to be added
yaml.round_trip(
"""
--- >
Mark McGwire's
year was crippled
by a knee injury.
"""
)
@pytest.mark.xfail(strict=True) # type: ignore
def test_example_2_15() -> None:
yaml = YAML()
yaml.round_trip(
"""
>
Sammy Sosa completed another
fine season with great stats.
63 Home Runs
0.288 Batting Average
What a year!
"""
)
def test_example_2_16() -> None:
yaml = YAML()
yaml.round_trip(
"""
name: Mark McGwire
accomplishment: >
Mark set a major league
home run record in 1998.
stats: |
65 Home Runs
0.278 Batting Average
"""
)
@pytest.mark.xfail( # type: ignore
strict=True,
reason='cannot YAML dump escape sequences (\n) as hex and normal',
)
def test_example_2_17() -> None:
yaml = YAML()
yaml.allow_unicode = False
yaml.preserve_quotes = True
yaml.round_trip(
r"""
unicode: "Sosa did fine.\u263A"
control: "\b1998\t1999\t2000\n"
hex esc: "\x0d\x0a is \r\n"
single: '"Howdy!" he cried.'
quoted: ' # Not a ''comment''.'
tie-fighter: '|\-*-/|'
"""
)
@pytest.mark.xfail(
strict=True, # type: ignore # NOQA
reason='non-literal/folding multiline scalars not supported',
)
def test_example_2_18() -> None:
yaml = YAML()
yaml.round_trip(
"""
plain:
This unquoted scalar
spans many lines.
quoted: "So does this
quoted scalar.\n"
"""
)
@pytest.mark.xfail(strict=True, reason='leading + on decimal dropped') # type: ignore
def test_example_2_19() -> None:
yaml = YAML()
yaml.round_trip(
"""
canonical: 12345
decimal: +12345
octal: 0o14
hexadecimal: 0xC
"""
)
@pytest.mark.xfail(strict=True, reason='case of NaN not preserved') # type: ignore
def test_example_2_20() -> None:
yaml = YAML()
yaml.round_trip(
"""
canonical: 1.23015e+3
exponential: 12.3015e+02
fixed: 1230.15
negative infinity: -.inf
not a number: .NaN
"""
)
def Xtest_example_2_X() -> None:
yaml = YAML()
yaml.round_trip(
"""
"""
)
python-ruyaml-0.92.1/_test/test_string.py 0000664 0000000 0000000 00000013025 15056754172 0020505 0 ustar 00root root 0000000 0000000 # coding: utf-8
"""
various test cases for string scalars in YAML files
'|' for preserved newlines
'>' for folded (newlines become spaces)
and the chomping modifiers:
'-' for stripping: final line break and any trailing empty lines are excluded
'+' for keeping: final line break and empty lines are preserved
'' for clipping: final line break preserved, empty lines at end not
included in content (no modifier)
"""
import platform
import pytest
# from ruyaml.compat import ordereddict
from roundtrip import ( # type: ignore # NOQA
dedent,
round_trip,
round_trip_dump,
round_trip_load,
)
import ruyaml
class TestLiteralScalarString:
def test_basic_string(self) -> None:
round_trip(
"""
a: abcdefg
"""
)
def test_quoted_integer_string(self) -> None:
round_trip(
"""
a: '12345'
"""
)
@pytest.mark.skipif( # type: ignore
platform.python_implementation() == 'Jython',
reason='Jython throws RepresenterError',
)
def test_preserve_string(self) -> None:
inp = """
a: |
abc
def
"""
round_trip(inp, intermediate=dict(a='abc\ndef\n'))
@pytest.mark.skipif( # type: ignore
platform.python_implementation() == 'Jython',
reason='Jython throws RepresenterError',
)
def test_preserve_string_strip(self) -> None:
s = """
a: |-
abc
def
"""
round_trip(s, intermediate=dict(a='abc\ndef'))
@pytest.mark.skipif( # type: ignore
platform.python_implementation() == 'Jython',
reason='Jython throws RepresenterError',
)
def test_preserve_string_keep(self) -> None:
# with pytest.raises(AssertionError) as excinfo:
inp = """
a: |+
ghi
jkl
b: x
"""
round_trip(inp, intermediate=dict(a='ghi\njkl\n\n\n', b='x'))
@pytest.mark.skipif( # type: ignore
platform.python_implementation() == 'Jython',
reason='Jython throws RepresenterError',
)
def test_preserve_string_keep_at_end(self) -> None:
# at EOF you have to specify the ... to get proper "closure"
# of the multiline scalar
inp = """
a: |+
ghi
jkl
...
"""
round_trip(inp, intermediate=dict(a='ghi\njkl\n\n'))
def test_fold_string(self) -> None:
inp = """
a: >
abc
def
"""
round_trip(inp)
def test_fold_string_strip(self) -> None:
inp = """
a: >-
abc
def
"""
round_trip(inp)
def test_fold_string_keep(self) -> None:
with pytest.raises(AssertionError) as excinfo: # NOQA
inp = """
a: >+
abc
def
"""
round_trip(inp, intermediate=dict(a='abc def\n\n'))
class TestQuotedScalarString:
def test_single_quoted_string(self) -> None:
inp = """
a: 'abc'
"""
round_trip(inp, preserve_quotes=True)
def test_double_quoted_string(self) -> None:
inp = """
a: "abc"
"""
round_trip(inp, preserve_quotes=True)
def test_non_preserved_double_quoted_string(self) -> None:
inp = """
a: "abc"
"""
exp = """
a: abc
"""
round_trip(inp, outp=exp)
class TestReplace:
"""inspired by issue 110 from sandres23"""
def test_replace_preserved_scalar_string(self) -> None:
s = dedent(
"""\
foo: |
foo
foo
bar
foo
"""
)
data = round_trip_load(s, preserve_quotes=True)
so = data['foo'].replace('foo', 'bar', 2)
assert isinstance(so, ruyaml.scalarstring.LiteralScalarString)
assert so == dedent(
"""
bar
bar
bar
foo
"""
)
def test_replace_double_quoted_scalar_string(self) -> None:
s = dedent(
"""\
foo: "foo foo bar foo"
"""
)
data = round_trip_load(s, preserve_quotes=True)
so = data['foo'].replace('foo', 'bar', 2)
assert isinstance(so, ruyaml.scalarstring.DoubleQuotedScalarString)
assert so == 'bar bar bar foo'
class TestWalkTree:
def test_basic(self) -> None:
from ruyaml.comments import CommentedMap
from ruyaml.scalarstring import walk_tree
data = CommentedMap()
data[1] = 'a'
data[2] = 'with\nnewline\n'
walk_tree(data)
exp = """\
1: a
2: |
with
newline
"""
assert round_trip_dump(data) == dedent(exp)
def test_map(self) -> None:
from ruyaml.comments import CommentedMap
from ruyaml.compat import ordereddict
from ruyaml.scalarstring import DoubleQuotedScalarString as dq
from ruyaml.scalarstring import SingleQuotedScalarString as sq
from ruyaml.scalarstring import preserve_literal, walk_tree
data = CommentedMap()
data[1] = 'a'
data[2] = 'with\nnew : line\n'
data[3] = '${abc}'
data[4] = 'almost:mapping'
m = ordereddict([('\n', preserve_literal), ('${', sq), (':', dq)])
walk_tree(data, map=m)
exp = """\
1: a
2: |
with
new : line
3: '${abc}'
4: "almost:mapping"
"""
assert round_trip_dump(data) == dedent(exp)
python-ruyaml-0.92.1/_test/test_tag.py 0000664 0000000 0000000 00000012063 15056754172 0017753 0 ustar 00root root 0000000 0000000 # coding: utf-8
from typing import Any
import pytest # type: ignore # NOQA
from roundtrip import YAML, round_trip, round_trip_load # type: ignore
def register_xxx(**kw: Any) -> None:
import ruyaml as yaml
class XXX(yaml.comments.CommentedMap):
@staticmethod
def yaml_dump(dumper: Any, data: Any) -> Any:
return dumper.represent_mapping('!xxx', data)
@classmethod
def yaml_load(cls, constructor: Any, node: Any) -> Any:
data = cls()
yield data
constructor.construct_mapping(node, data)
yaml.add_constructor('!xxx', XXX.yaml_load, constructor=yaml.RoundTripConstructor)
yaml.add_representer(XXX, XXX.yaml_dump, representer=yaml.RoundTripRepresenter)
class TestIndentFailures:
def test_tag(self) -> None:
round_trip(
"""\
!!python/object:__main__.Developer
name: Anthon
location: Germany
language: python
""",
)
def test_full_tag(self) -> None:
round_trip(
"""\
!!tag:yaml.org,2002:python/object:__main__.Developer
name: Anthon
location: Germany
language: python
""",
)
def test_standard_tag(self) -> None:
round_trip(
"""\
!!tag:yaml.org,2002:python/object:map
name: Anthon
location: Germany
language: python
""",
)
def test_Y1(self) -> None:
round_trip(
"""\
!yyy
name: Anthon
location: Germany
language: python
""",
)
def test_Y2(self) -> None:
round_trip(
"""\
!!yyy
name: Anthon
location: Germany
language: python
""",
)
# @pytest.mark.xfail(strict=True) # type: ignore
def test_spec_6_26_tag_shorthands(self) -> None:
from io import StringIO
from textwrap import dedent
from ruyaml import YAML
inp = dedent(
"""\
%TAG !e! tag:example.com,2000:app/
---
- !local foo
- !!str bar
- !e!tag%21 baz
""",
)
yaml = YAML()
data = yaml.load(inp)
buf = StringIO()
yaml.dump(data, buf)
print('buf:\n', buf.getvalue(), sep='')
assert buf.getvalue() == inp
class TestTagGeneral:
def test_unknow_handle(self) -> None:
from ruyaml.parser import ParserError
with pytest.raises(ParserError):
round_trip(
"""\
%TAG !x! tag:example.com,2000:app/
---
- !y!tag%21 baz
""",
)
class TestRoundTripCustom:
def test_X1(self) -> None:
register_xxx()
round_trip(
"""\
!xxx
name: Anthon
location: Germany
language: python
""",
)
@pytest.mark.xfail(strict=True) # type: ignore
def test_X_pre_tag_comment(self) -> None:
register_xxx()
round_trip(
"""\
-
# hello
!xxx
name: Anthon
location: Germany
language: python
""",
)
@pytest.mark.xfail(strict=True) # type: ignore
def test_X_post_tag_comment(self) -> None:
register_xxx()
round_trip(
"""\
- !xxx
# hello
name: Anthon
location: Germany
language: python
""",
)
def test_scalar_00(self) -> None:
# https://stackoverflow.com/a/45967047/1307905
round_trip(
"""\
Outputs:
Vpc:
Value: !Ref: vpc # first tag
Export:
Name: !Sub "${AWS::StackName}-Vpc" # second tag
""",
)
class TestIssue201:
def test_encoded_unicode_tag(self) -> None:
round_trip_load(
"""
s: !!python/%75nicode 'abc'
""",
)
class TestImplicitTaggedNodes:
def test_scalar(self) -> None:
data = round_trip(
"""\
- !SString abcdefg
- !SFloat 1.0
- !SInt 1961
- !SBool true
- !SLit |
glitter in the dark near the Tanhäuser gate
""",
)
# tagged scalers have string or string types as value
assert data[0].count('d') == 1
assert data[1].count('1') == 1
assert data[2].count('1') == 2
assert data[3].count('u') == 1
assert data[4].count('a') == 4
def test_mapping(self) -> None:
round_trip(
"""\
- !Mapping {a: 1, b: 2}
""",
)
def test_sequence(self) -> None:
yaml = YAML()
yaml.brace_single_entry_mapping_in_flow_sequence = True
yaml.mapping_value_align = True
yaml.round_trip(
"""
- !Sequence [a, {b: 1}, {c: {d: 3}}]
""",
)
def test_sequence2(self) -> None:
yaml = YAML()
yaml.mapping_value_align = True
yaml.round_trip(
"""
- !Sequence [a, b: 1, c: {d: 3}]
""",
)
python-ruyaml-0.92.1/_test/test_version.py 0000664 0000000 0000000 00000011461 15056754172 0020666 0 ustar 00root root 0000000 0000000 # coding: utf-8
from typing import Any, Optional
import pytest # type: ignore # NOQA
from roundtrip import dedent, round_trip, round_trip_load # type: ignore
def load(s: str, version: Optional[Any] = None) -> Any:
import ruyaml # NOQA
yaml = ruyaml.YAML()
yaml.version = version
return yaml.load(dedent(s))
class TestVersions:
def test_explicit_1_2(self) -> None:
r = load(
"""\
%YAML 1.2
---
- 12:34:56
- 012
- 012345678
- 0o12
- on
- off
- yes
- no
- true
"""
)
assert r[0] == '12:34:56'
assert r[1] == 12
assert r[2] == 12345678
assert r[3] == 10
assert r[4] == 'on'
assert r[5] == 'off'
assert r[6] == 'yes'
assert r[7] == 'no'
assert r[8] is True
def test_explicit_1_1(self) -> None:
r = load(
"""\
%YAML 1.1
---
- 12:34:56
- 012
- 012345678
- 0o12
- on
- off
- yes
- no
- true
"""
)
assert r[0] == 45296
assert r[1] == 10
assert r[2] == '012345678'
assert r[3] == '0o12'
assert r[4] is True
assert r[5] is False
assert r[6] is True
assert r[7] is False
assert r[8] is True
def test_implicit_1_2(self) -> None:
r = load(
"""\
- 12:34:56
- 12:34:56.78
- 012
- 012345678
- 0o12
- on
- off
- yes
- no
- true
"""
)
assert r[0] == '12:34:56'
assert r[1] == '12:34:56.78'
assert r[2] == 12
assert r[3] == 12345678
assert r[4] == 10
assert r[5] == 'on'
assert r[6] == 'off'
assert r[7] == 'yes'
assert r[8] == 'no'
assert r[9] is True
def test_load_version_1_1(self) -> None:
inp = """\
- 12:34:56
- 12:34:56.78
- 012
- 012345678
- 0o12
- on
- off
- yes
- no
- true
"""
r = load(inp, version='1.1')
assert r[0] == 45296
assert r[1] == 45296.78
assert r[2] == 10
assert r[3] == '012345678'
assert r[4] == '0o12'
assert r[5] is True
assert r[6] is False
assert r[7] is True
assert r[8] is False
assert r[9] is True
class TestIssue62:
# bitbucket issue 62, issue_62
def test_00(self) -> None:
import ruyaml # NOQA
s = dedent(
"""\
{}# Outside flow collection:
- ::vector
- ": - ()"
- Up, up, and away!
- -123
- http://example.com/foo#bar
# Inside flow collection:
- [::vector, ": - ()", "Down, down and away!", -456, http://example.com/foo#bar]
"""
)
with pytest.raises(ruyaml.parser.ParserError):
round_trip(s.format('%YAML 1.1\n---\n'), preserve_quotes=True)
round_trip(s.format(""), preserve_quotes=True)
def test_00_single_comment(self) -> None:
import ruyaml # NOQA
s = dedent(
"""\
{}# Outside flow collection:
- ::vector
- ": - ()"
- Up, up, and away!
- -123
- http://example.com/foo#bar
- [::vector, ": - ()", "Down, down and away!", -456, http://example.com/foo#bar]
"""
)
with pytest.raises(ruyaml.parser.ParserError):
round_trip(s.format('%YAML 1.1\n---\n'), preserve_quotes=True)
round_trip(s.format(""), preserve_quotes=True)
# round_trip(s.format('%YAML 1.2\n---\n'), preserve_quotes=True, version=(1, 2))
def test_01(self) -> None:
import ruyaml # NOQA
s = dedent(
"""\
{}[random plain value that contains a ? character]
"""
)
with pytest.raises(ruyaml.parser.ParserError):
round_trip(s.format('%YAML 1.1\n---\n'), preserve_quotes=True)
round_trip(s.format(""), preserve_quotes=True)
# note the flow seq on the --- line!
round_trip(s.format('%YAML 1.2\n--- '), preserve_quotes=True, version='1.2')
def test_so_45681626(self) -> None:
# was not properly parsing
round_trip_load('{"in":{},"out":{}}')
class TestVersionComparison:
def test_vc(self) -> None:
from ruyaml.docinfo import Version
assert Version(1, 1) <= Version(2, 0)
assert Version(1, 1) <= Version(1, 2)
assert Version(1, 1) <= Version(1, 1)
assert Version(1, 3) == Version(1, 3)
assert Version(1, 2) > Version(1, 1)
assert Version(2, 0) > Version(1, 1)
assert Version(2, 0) >= Version(1, 1)
assert Version(1, 2) >= Version(1, 2)
python-ruyaml-0.92.1/_test/test_yamlfile.py 0000664 0000000 0000000 00000014301 15056754172 0020777 0 ustar 00root root 0000000 0000000 # coding: utf-8
"""
various test cases for YAML files
"""
import io
import platform
import sys
import pytest # type: ignore # NOQA
from roundtrip import ( # type: ignore # NOQA
dedent,
round_trip,
round_trip_dump,
round_trip_load,
)
class TestYAML:
def test_backslash(self) -> None:
round_trip(
"""
handlers:
static_files: applications/\\1/static/\\2
"""
)
def test_omap_out(self) -> None:
# ordereddict mapped to !!omap
import ruyaml # NOQA
from ruyaml.compat import ordereddict
x = ordereddict([('a', 1), ('b', 2)])
res = round_trip_dump(x, default_flow_style=False)
assert res == dedent(
"""
!!omap
- a: 1
- b: 2
"""
)
def test_omap_roundtrip(self) -> None:
round_trip(
"""
!!omap
- a: 1
- b: 2
- c: 3
- d: 4
"""
)
# @pytest.mark.skipif(sys.version_info < (2, 7),
# reason='collections not available')
# def test_dump_collections_ordereddict(self) -> None:
# from collections import OrderedDict
# import ruyaml # NOQA
# # OrderedDict mapped to !!omap
# x = OrderedDict([('a', 1), ('b', 2)])
# res = round_trip_dump(x, default_flow_style=False)
# assert res == dedent("""
# !!omap
# - a: 1
# - b: 2
# """)
@pytest.mark.skipif( # type: ignore
sys.version_info >= (3, 0) or platform.python_implementation() != 'CPython',
reason='ruyaml not available',
)
def test_dump_ruyaml_ordereddict(self) -> None:
import ruyaml # NOQA
from ruyaml.compat import ordereddict
# OrderedDict mapped to !!omap
x = ordereddict([('a', 1), ('b', 2)])
res = round_trip_dump(x, default_flow_style=False)
assert res == dedent(
"""
!!omap
- a: 1
- b: 2
"""
)
def test_CommentedSet(self) -> None:
from ruyaml.constructor import CommentedSet
s = CommentedSet(['a', 'b', 'c'])
s.remove('b')
s.add('d')
assert s == CommentedSet(['a', 'c', 'd'])
s.add('e')
s.add('f')
s.remove('e')
assert s == CommentedSet(['a', 'c', 'd', 'f'])
def test_set_out(self) -> None:
# preferable would be the shorter format without the ': null'
import ruyaml # NOQA
x = set(['a', 'b', 'c']) # NOQA
# cannot use round_trip_dump, it doesn't show null in block style
buf = io.StringIO()
with pytest.warns(PendingDeprecationWarning):
yaml = ruyaml.YAML(typ='unsafe', pure=True)
yaml.default_flow_style = False
yaml.dump(x, buf)
assert buf.getvalue() == dedent(
"""
!!set
a: null
b: null
c: null
"""
)
# ordering is not preserved in a set
def test_set_compact(self) -> None:
# this format is read and also should be written by default
round_trip(
"""
!!set
? a
? b
? c
"""
)
def test_set_compact_flow(self) -> None:
# this format is read and also should be written by default
round_trip(
"""
!!set {a, b, c}
"""
)
def test_blank_line_after_comment(self) -> None:
round_trip(
"""
# Comment with spaces after it.
a: 1
"""
)
def test_blank_line_between_seq_items(self) -> None:
round_trip(
"""
# Seq with empty lines in between items.
b:
- bar
- baz
"""
)
@pytest.mark.skipif( # type: ignore
platform.python_implementation() == 'Jython',
reason='Jython throws RepresenterError',
)
def test_blank_line_after_literal_chip(self) -> None:
s = """
c:
- |
This item
has a blank line
following it.
- |
To visually separate it from this item.
This item contains a blank line.
"""
d = round_trip_load(dedent(s))
print(d)
round_trip(s)
assert d['c'][0].split('it.')[1] == '\n'
assert d['c'][1].split('line.')[1] == '\n'
@pytest.mark.skipif( # type: ignore
platform.python_implementation() == 'Jython',
reason='Jython throws RepresenterError',
)
def test_blank_line_after_literal_keep(self) -> None:
"""have to insert an eof marker in YAML to test this"""
s = """
c:
- |+
This item
has a blank line
following it.
- |+
To visually separate it from this item.
This item contains a blank line.
...
"""
d = round_trip_load(dedent(s))
print(d)
round_trip(s)
assert d['c'][0].split('it.')[1] == '\n\n'
assert d['c'][1].split('line.')[1] == '\n\n\n'
@pytest.mark.skipif( # type: ignore
platform.python_implementation() == 'Jython',
reason='Jython throws RepresenterError',
)
def test_blank_line_after_literal_strip(self) -> None:
s = """
c:
- |-
This item
has a blank line
following it.
- |-
To visually separate it from this item.
This item contains a blank line.
"""
d = round_trip_load(dedent(s))
print(d)
round_trip(s)
assert d['c'][0].split('it.')[1] == ""
assert d['c'][1].split('line.')[1] == ""
def test_load_all_perserve_quotes(self) -> None:
import ruyaml # NOQA
yaml = ruyaml.YAML()
yaml.preserve_quotes = True
s = dedent(
"""\
a: 'hello'
---
b: "goodbye"
"""
)
data = []
for x in yaml.load_all(s):
data.append(x)
buf = ruyaml.compat.StringIO()
yaml.dump_all(data, buf)
out = buf.getvalue()
print(type(data[0]['a']), data[0]['a'])
# out = ruyaml.round_trip_dump_all(data)
print(out)
assert out == s
python-ruyaml-0.92.1/_test/test_yamlobject.py 0000664 0000000 0000000 00000005002 15056754172 0021324 0 ustar 00root root 0000000 0000000 # coding: utf-8
import sys
from typing import Any
import pytest # type: ignore # NOQA
from roundtrip import save_and_run # type: ignore # NOQA
def test_monster(tmpdir: Any) -> None:
program_src = '''\
import ruyaml
from textwrap import dedent
class Monster(ruyaml.YAMLObject):
yaml_tag = '!Monster'
def __init__(self, name, hp, ac, attacks):
self.name = name
self.hp = hp
self.ac = ac
self.attacks = attacks
def __repr__(self):
return "%s(name=%r, hp=%r, ac=%r, attacks=%r)" % (
self.__class__.__name__, self.name, self.hp, self.ac, self.attacks)
yaml = ruyaml.YAML(typ='safe', pure='True')
yaml = ruyaml.YAML()
data = yaml.load(dedent("""\\
--- !Monster
name: Cave spider
hp: [2,6] # 2d6
ac: 16
attacks: [BITE, HURT]
"""))
# normal dump, keys will be sorted
from io import BytesIO
buf = BytesIO()
yaml.dump(data, buf)
print(buf.getvalue().decode('utf-8'))
assert buf.getvalue().decode('utf8') == dedent("""\\
!Monster
name: Cave spider
hp: [2, 6] # 2d6
ac: 16
attacks: [BITE, HURT]
""")
'''
assert save_and_run(program_src, tmpdir) == 0
@pytest.mark.skipif(sys.version_info < (3, 0), reason='no __qualname__') # type: ignore
def test_qualified_name00(tmpdir: Any) -> None:
"""issue 214"""
program_src = """\
import ruyaml
from io import StringIO
class A:
def f(self):
pass
yaml = ruyaml.YAML(typ='unsafe', pure=True)
yaml.explicit_end = True
buf = StringIO()
yaml.dump(A.f, buf)
res = buf.getvalue()
print('res', repr(res))
assert res == "!!python/name:__main__.A.f ''\\n...\\n"
x = ruyaml.load(res)
assert x == A.f
"""
assert save_and_run(program_src, tmpdir) == 0
@pytest.mark.skipif(sys.version_info < (3, 0), reason='no __qualname__') # type: ignore
def test_qualified_name01(tmpdir: Any) -> None:
"""issue 214"""
from io import StringIO
import ruyaml.comments
from ruyaml import YAML
with pytest.warns(PendingDeprecationWarning):
yaml = YAML(typ='unsafe', pure=True)
yaml.explicit_end = True
buf = StringIO()
yaml.dump(ruyaml.comments.CommentedBase.yaml_anchor, buf)
res = buf.getvalue()
assert res == "!!python/name:ruyaml.comments.CommentedBase.yaml_anchor ''\n...\n"
x = yaml.load(res)
assert x == ruyaml.comments.CommentedBase.yaml_anchor
python-ruyaml-0.92.1/_test/test_z_check_debug_leftovers.py 0000664 0000000 0000000 00000001712 15056754172 0024044 0 ustar 00root root 0000000 0000000 # coding: utf-8
import sys
from typing import Any
import pytest # type: ignore # NOQA
from roundtrip import dedent, round_trip_dump, round_trip_load # type: ignore
class TestLeftOverDebug:
# idea here is to capture round_trip_output via pytest stdout capture
# if there is are any leftover debug statements they should show up
def test_00(self, capsys: Any) -> None:
s = dedent(
"""
a: 1
b: []
c: [a, 1]
d: {f: 3.14, g: 42}
"""
)
d = round_trip_load(s)
round_trip_dump(d, sys.stdout)
out, err = capsys.readouterr()
assert out == s
def test_01(self, capsys: Any) -> None:
s = dedent(
"""
- 1
- []
- [a, 1]
- {f: 3.14, g: 42}
- - 123
"""
)
d = round_trip_load(s)
round_trip_dump(d, sys.stdout)
out, err = capsys.readouterr()
assert out == s
python-ruyaml-0.92.1/_test/test_z_data.py 0000664 0000000 0000000 00000033055 15056754172 0020446 0 ustar 00root root 0000000 0000000 # coding: utf-8
import sys
import os
import warnings # NOQA
from pathlib import Path
from typing import Any, List, Optional, Tuple
import pytest # type: ignore # NOQA
base_path = Path('data') # that is ruyaml.data
class YAMLData:
yaml_tag = '!YAML'
def __init__(self, s: Any) -> None:
self._s = s
# Conversion tables for input. E.g. "" is replaced by "\t"
# fmt: off
special = {
'SPC': ' ',
'TAB': '\t',
'---': '---',
'...': '...',
'NL': '\n',
}
# fmt: on
@property
def value(self) -> Any:
if hasattr(self, '_p'):
return self._p # type: ignore
assert ' \n' not in self._s
assert '\t\n' not in self._s
self._p = self._s
for k, v in YAMLData.special.items():
k = '<' + k + '>'
self._p = self._p.replace(k, v)
return self._p
def test_rewrite(self, s: str) -> str:
assert ' \n' not in s
assert '\t\n' not in s
for k, v in YAMLData.special.items():
k = '<' + k + '>'
s = s.replace(k, v)
return s
@classmethod
def from_yaml(cls, constructor: Any, node: Any) -> 'YAMLData':
from ruyaml.nodes import MappingNode
if isinstance(node, MappingNode):
return cls(constructor.construct_mapping(node))
return cls(node.value)
class Python(YAMLData):
yaml_tag = '!Python'
class Output(YAMLData):
yaml_tag = '!Output'
class Assert(YAMLData):
yaml_tag = '!Assert'
@property
def value(self) -> Any:
from collections.abc import Mapping
if hasattr(self, '_pa'):
return self._pa # type: ignore
if isinstance(self._s, Mapping):
self._s['lines'] = self.test_rewrite(self._s['lines']) # type: ignore
self._pa = self._s
return self._pa
class Events(YAMLData):
yaml_tag = '!Events'
class JSONData(YAMLData):
yaml_tag = '!JSON'
class Dump(YAMLData):
yaml_tag = '!Dump'
class Emit(YAMLData):
yaml_tag = '!Emit'
def pytest_generate_tests(metafunc: Any) -> None:
test_yaml = []
paths = sorted(base_path.glob('**/*.yaml'))
idlist = []
for path in paths:
# while developing tests put them in data/debug and run:
# auto -c "pytest _test/test_z_data.py" data/debug/*.yaml *.py _test/*.py
if os.environ.get('RUAMELAUTOTEST') == '1':
if path.parent.stem != 'debug':
continue
elif path.parent.stem == 'debug':
# don't test debug entries for production
continue
stem = path.stem
if stem.startswith('.#'): # skip emacs temporary file
continue
idlist.append(stem)
test_yaml.append([path])
metafunc.parametrize(['yaml'], test_yaml, ids=idlist, scope='class')
class TestYAMLData:
def yaml(
self,
yaml_version: Optional[Any] = None,
typ: Any = 'rt',
pure: Any = None,
) -> Any:
from ruyaml import YAML
y = YAML(typ=typ, pure=pure)
y.preserve_quotes = True
if yaml_version:
y.version = yaml_version
y.composer.warn_double_anchors = False
return y
def docs(self, path: Path) -> List[Any]:
from ruyaml import YAML
tyaml = YAML(typ='safe', pure=True)
tyaml.register_class(YAMLData)
tyaml.register_class(Python)
tyaml.register_class(Output)
tyaml.register_class(Assert)
tyaml.register_class(Events)
tyaml.register_class(JSONData)
tyaml.register_class(Dump)
tyaml.register_class(Emit)
return list(tyaml.load_all(path))
def yaml_load(
self, value: Any, yaml_version: Optional[Any] = None
) -> Tuple[Any, Any]:
yaml = self.yaml(yaml_version=yaml_version)
data = yaml.load(value)
return yaml, data
def round_trip(
self,
input: Any,
output: Optional[Any] = None,
yaml_version: Optional[Any] = None,
) -> None:
from ruyaml.compat import StringIO
yaml, data = self.yaml_load(input.value, yaml_version=yaml_version)
buf = StringIO()
yaml.dump(data, buf)
expected = input.value if output is None else output.value
value = buf.getvalue()
print('>>>> rt output\n', value.replace(' ', '\u2423'), sep='') # 2423 open box
assert value == expected
def gen_events(
self,
input: Any,
output: Any,
yaml_version: Optional[Any] = None,
) -> None:
from ruyaml.compat import StringIO
buf = StringIO()
yaml = self.yaml(yaml_version=yaml_version)
indent = 0
try:
for event in yaml.parse(input.value):
compact = event.compact_repr()
assert compact[0] in '+=-'
if compact[0] == '-':
indent -= 1
print(f'{" " * indent}{compact}', file=buf)
if compact[0] == '+':
indent += 1
except Exception as e: # NOQA
print('=EXCEPTION', file=buf) # exceptions not indented
if '=EXCEPTION' not in output.value:
raise
print('>>>> buf\n', buf.getvalue(), sep='')
assert buf.getvalue() == output.value
def load_compare_json(
self,
input: Any,
output: Any,
yaml_version: Optional[Any] = None,
) -> None:
import json
from ruyaml.comments import CommentedMap, TaggedScalar
from ruyaml.compat import StringIO
def serialize_obj(obj: Any) -> Any:
if isinstance(obj, CommentedMap):
return {k: v for k, v in obj.items()} # NOQA
elif isinstance(obj, TaggedScalar):
return str(obj.value)
elif isinstance(obj, set):
return {k: None for k in obj}
return str(obj)
buf = StringIO()
yaml = self.yaml(typ='rt', yaml_version=yaml_version)
for data in yaml.load_all(input.value):
if isinstance(data, dict):
data = {str(k): v for k, v in data.items()}
json.dump(data, buf, sort_keys=True, indent=2, default=serialize_obj)
buf.write('\n')
print('>>>> buf\n', buf.getvalue(), sep='')
# jsons = json.dumps(json.loads(output.value)) # normalize formatting of JSON
assert buf.getvalue() == output.value
def load_compare_emit(
self,
input: Any,
output: Any,
yaml_version: Optional[Any] = None,
) -> None:
from ruyaml.compat import StringIO
buf = StringIO()
yaml = self.yaml(yaml_version=yaml_version)
yaml.preserve_quotes = True
data = input.value
if data.startswith('---') or '\n--- ' in data or '\n---' in data:
yaml.explicit_start = True
data = list(yaml.load_all(data))
yaml.dump_all(data, buf)
print('>>>> buf\n', buf.getvalue(), sep='')
assert buf.getvalue() == output.value
def load_assert(
self,
input: Any,
confirm: Any,
yaml_version: Optional[Any] = None,
) -> None:
from collections.abc import Mapping
d = self.yaml_load(input.value, yaml_version=yaml_version)[1] # NOQA
print('confirm.value', confirm.value, type(confirm.value))
if isinstance(confirm.value, Mapping):
r = range(confirm.value['range'])
lines = confirm.value['lines'].splitlines()
for idx in r: # NOQA
for line in lines:
line = 'assert ' + line
print(line)
exec(line)
else:
for line in confirm.value.splitlines():
line = 'assert ' + line
print(line)
exec(line)
def run_python(
self,
python: Any,
data: Any,
tmpdir: Any,
input: Optional[Any] = None,
) -> None:
from roundtrip import save_and_run # type: ignore
if input is not None:
(tmpdir / 'input.yaml').write_text(input.value, encoding='utf-8')
assert save_and_run(python.value, base_dir=tmpdir, output=data.value) == 0
def insert_comments(self, data: Any, actions: Any) -> None:
"""this is to automatically insert based on:
path (a.1.b),
position (before, after, between), and
offset (absolute/relative)
"""
raise NotImplementedError
expected = []
for line in data.value.splitlines(True):
idx = line.index['?']
if idx < 0:
expected.append(line)
continue
assert line.lstrip()[0] == '#' # it has to be comment line
print(data)
assert ''.join(expected) == data.value
# this is executed by pytest the methods with names not starting with
# test_ are helper methods
def test_yaml_data(self, yaml: Any, tmpdir: Any) -> None:
from collections.abc import Mapping
idx = 0
typs = [] # list of test to be performed
yaml_version = None
docs = self.docs(yaml)
if isinstance(docs[0], Mapping):
d = docs[0]
if d.get('skip'):
pytest.skip('explicit skip')
if '1.3-mod' in d.get('tags', []):
pytest.skip('YAML 1.3')
typ = d.get('type')
if isinstance(typ, str):
typs.append(typ)
elif isinstance(typ, list):
typs.extend(typ[:])
del typ
yaml_version = d.get('yaml_version')
if 'python' in d:
if not check_python_version(d['python']):
pytest.skip('unsupported version')
idx += 1
data = output = confirm = python = events = json = dump = emit = None
for doc in docs[idx:]:
if isinstance(doc, Output):
output = doc
elif isinstance(doc, Events):
events = doc
elif isinstance(doc, JSONData):
json = doc
elif isinstance(doc, Dump):
dump = doc # NOQA
elif isinstance(doc, Emit):
emit = doc # NOQA
elif isinstance(doc, Assert):
confirm = doc
elif isinstance(doc, Python):
python = doc
if len(typs) == 0:
typs = ['python_run']
elif isinstance(doc, YAMLData):
data = doc
else:
print('no handler for type:', type(doc), repr(doc))
raise AssertionError()
if len(typs) == 0:
if data is not None and output is not None:
typs = ['rt']
elif data is not None and confirm is not None:
typs = ['load_assert']
else:
assert data is not None
typs = ['rt']
print('type:', typs)
if data is not None:
print('>>>> data:\n', data.value.replace(' ', '\u2423'), sep='', end='')
if events is not None:
print('>>>> events:\n', events.value, sep='')
else:
print(
'>>>> output:\n', output.value if output is not None else output, sep=''
)
for typ in typs:
if typ == 'rt':
self.round_trip(data, output, yaml_version=yaml_version)
elif typ == 'python_run':
inp = None if output is None or data is None else data
self.run_python(
python,
output if output is not None else data,
tmpdir,
input=inp,
)
elif typ == 'load_assert':
self.load_assert(data, confirm, yaml_version=yaml_version)
elif typ == 'comment':
actions: List[Any] = []
self.insert_comments(data, actions)
elif typ == 'events':
if events is None:
print('need to specify !Events for type:', typ)
sys.exit(1)
self.gen_events(data, events, yaml_version=yaml_version)
elif typ == 'json':
if json is None:
print('need to specify !JSON for type:', typ)
sys.exit(1)
self.load_compare_json(data, json, yaml_version=yaml_version)
elif typ == 'dump':
continue
elif typ == 'emit':
self.load_compare_emit(data, emit)
else:
f'\n>>>>>> run type unknown: "{typ}" <<<<<<\n'
raise AssertionError()
def check_python_version(match: Any, current: Optional[Any] = None) -> bool:
"""
version indication, return True if version matches.
match should be something like 3.6+, or [2.7, 3.3] etc. Floats
are converted to strings. Single values are made into lists.
"""
if current is None:
current = list(sys.version_info[:3])
if not isinstance(match, list):
match = [match]
for m in match:
minimal = False
if isinstance(m, float):
m = str(m)
if m.endswith('+'):
minimal = True
m = m[:-1]
# assert m[0].isdigit()
# assert m[-1].isdigit()
m = [int(x) for x in m.split('.')]
current_len = current[: len(m)]
# print(m, current, current_len)
if minimal:
if current_len >= m:
return True
else:
if current_len == m:
return True
return False
python-ruyaml-0.92.1/_test/test_z_olddata.py 0000664 0000000 0000000 00000001742 15056754172 0021143 0 ustar 00root root 0000000 0000000 # coding: utf-8
import os
import sys
import pytest # type: ignore # NOQA
sys.path.insert(0, os.path.dirname(__file__) + '/lib')
import warnings # NOQA
from typing import Any, List # NOQA
args: List[Any] = []
def test_data() -> None:
import test_appliance # type: ignore # NOQA
warnings.simplefilter('ignore', PendingDeprecationWarning)
collections = []
import test_yaml # type: ignore
collections.append(test_yaml)
test_appliance.run(collections, args)
# @pytest.mark.skipif(not ruyaml.__with_libyaml__,
# reason="no libyaml")
def test_data_ext() -> None:
collections = []
import test_appliance # NOQA
import ruyaml
warnings.simplefilter('ignore', ruyaml.error.UnsafeLoaderWarning)
warnings.simplefilter('ignore', PendingDeprecationWarning)
if ruyaml.__with_libyaml__:
import test_yaml_ext # type: ignore
collections.append(test_yaml_ext)
test_appliance.run(collections, args)
python-ruyaml-0.92.1/lib/ 0000775 0000000 0000000 00000000000 15056754172 0015215 5 ustar 00root root 0000000 0000000 python-ruyaml-0.92.1/lib/ruyaml/ 0000775 0000000 0000000 00000000000 15056754172 0016526 5 ustar 00root root 0000000 0000000 python-ruyaml-0.92.1/lib/ruyaml/__init__.py 0000664 0000000 0000000 00000001726 15056754172 0020645 0 ustar 00root root 0000000 0000000 # coding: utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
if False: # MYPY
from typing import Any, Dict # NOQA
_package_data = dict(
full_package_name='ruyaml',
version_info=(0, 18, 15),
__version__='0.18.15',
author='ruyaml contributors',
author_email='none.yet@github.org',
description='ruyaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order', # NOQA
entry_points=None,
since=2014,
extras_require={
':platform_python_implementation=="CPython" and python_version<"3.8"': [
'ruyaml.clib>=0.1.2',
],
},
) # type: Dict[Any, Any]
version_info = _package_data['version_info']
__version__ = _package_data['__version__']
try:
from .cyaml import * # NOQA
__with_libyaml__ = True
except (ImportError, ValueError): # for Jython
__with_libyaml__ = False
from ruyaml.main import * # NOQA
python-ruyaml-0.92.1/lib/ruyaml/anchor.py 0000664 0000000 0000000 00000000726 15056754172 0020357 0 ustar 00root root 0000000 0000000 from __future__ import annotations
if False: # MYPY
from typing import Any, Dict, Iterator, List, Optional, Union # NOQA
anchor_attrib = '_yaml_anchor'
class Anchor:
__slots__ = 'value', 'always_dump'
attrib = anchor_attrib
def __init__(self) -> None:
self.value = None
self.always_dump = False
def __repr__(self) -> Any:
ad = ', (always dump)' if self.always_dump else ""
return f'Anchor({self.value!r}{ad})'
python-ruyaml-0.92.1/lib/ruyaml/comments.py 0000664 0000000 0000000 00000115225 15056754172 0020733 0 ustar 00root root 0000000 0000000 from __future__ import annotations
"""
stuff to deal with comments and formatting on dict/list/ordereddict/set
these are not really related, formatting could be factored out as
a separate base
"""
import copy
import sys
from collections.abc import Mapping, MutableSet, Set, Sized
from typing import Any, Dict, Iterator, List, Optional, Union
from ruyaml.anchor import Anchor
from ruyaml.compat import MutableSliceableSequence, nprintf, ordereddict # NOQA
from ruyaml.scalarstring import ScalarString
from ruyaml.tag import Tag
if False: # MYPY
from typing import Any, Dict, Iterator, List, Optional, Union # NOQA
# fmt: off
__all__ = ['CommentedSeq', 'CommentedKeySeq',
'CommentedMap', 'CommentedOrderedMap',
'CommentedSet', 'comment_attrib', 'merge_attrib',
'TaggedScalar',
'C_POST', 'C_PRE', 'C_SPLIT_ON_FIRST_BLANK', 'C_BLANK_LINE_PRESERVE_SPACE',
]
# fmt: on
# splitting of comments by the scanner
# an EOLC (End-Of-Line Comment) is preceded by some token
# an FLC (Full Line Comment) is a comment not preceded by a token, i.e. # is
# the first non-blank on line
# a BL is a blank line i.e. empty or spaces/tabs only
# bits 0 and 1 are combined, you can choose only one
C_POST = 0b00
C_PRE = 0b01
C_SPLIT_ON_FIRST_BLANK = (
0b10 # as C_POST, but if blank line then C_PRE all lines before
)
# first blank goes to POST even if no following real FLC
# (first blank -> first of post)
# 0b11 -> reserved for future use
C_BLANK_LINE_PRESERVE_SPACE = 0b100
# C_EOL_PRESERVE_SPACE2 = 0b1000
class IDX:
# temporary auto increment, so rearranging is easier
def __init__(self) -> None:
self._idx = 0
def __call__(self) -> Any:
x = self._idx
self._idx += 1
return x
def __str__(self) -> Any:
return str(self._idx)
cidx = IDX()
# more or less in order of subjective expected likelyhood
# the _POST and _PRE ones are lists themselves
C_VALUE_EOL = C_ELEM_EOL = cidx()
C_KEY_EOL = cidx()
C_KEY_PRE = C_ELEM_PRE = cidx() # not this is not value
C_VALUE_POST = C_ELEM_POST = cidx() # not this is not value
C_VALUE_PRE = cidx()
C_KEY_POST = cidx()
C_TAG_EOL = cidx()
C_TAG_POST = cidx()
C_TAG_PRE = cidx()
C_ANCHOR_EOL = cidx()
C_ANCHOR_POST = cidx()
C_ANCHOR_PRE = cidx()
comment_attrib = '_yaml_comment'
format_attrib = '_yaml_format'
line_col_attrib = '_yaml_line_col'
merge_attrib = '_yaml_merge'
class Comment:
# using sys.getsize tested the Comment objects, __slots__ makes them bigger
# and adding self.end did not matter
__slots__ = 'comment', '_items', '_post', '_pre'
attrib = comment_attrib
def __init__(self, old: bool = True) -> None:
self._pre = None if old else [] # type: ignore
self.comment = None # [post, [pre]]
# map key (mapping/omap/dict) or index (sequence/list) to a list of
# dict: post_key, pre_key, post_value, pre_value
# list: pre item, post item
self._items: Dict[Any, Any] = {}
# self._start = [] # should not put these on first item
self._post: List[Any] = [] # end of document comments
def __str__(self) -> str:
if bool(self._post):
end = ',\n end=' + str(self._post)
else:
end = ""
return f'Comment(comment={self.comment},\n items={self._items}{end})'
def _old__repr__(self) -> str:
if bool(self._post):
end = ',\n end=' + str(self._post)
else:
end = ""
ln = '' # type: Union[str,int]
try:
ln = max([len(str(k)) for k in self._items]) + 1
except ValueError:
ln = '' # type: ignore
it = ' '.join([f'{str(k) + ":":{ln}} {v}\n' for k, v in self._items.items()])
if it:
it = '\n ' + it + ' '
return f'Comment(\n start={self.comment},\n items={{{it}}}{end})'
def __repr__(self) -> str:
if self._pre is None:
return self._old__repr__()
if bool(self._post):
end = ',\n end=' + repr(self._post)
else:
end = ""
try:
ln = max([len(str(k)) for k in self._items]) + 1
except ValueError:
ln = '' # type: ignore
# noqa:E231
it = ' '.join([f'{str(k) + ":":{ln}} {v}\n' for k, v in self._items.items()])
if it:
it = '\n ' + it + ' '
return f'Comment(\n pre={self.pre},\n items={{{it}}}{end})'
@property
def items(self) -> Any:
return self._items
@property
def end(self) -> Any:
return self._post
@end.setter
def end(self, value: Any) -> None:
self._post = value
@property
def pre(self) -> Any:
return self._pre
@pre.setter
def pre(self, value: Any) -> None:
self._pre = value
def get(self, item: Any, pos: Any) -> Any:
x = self._items.get(item)
if x is None or len(x) < pos:
return None
return x[pos] # can be None
def set(self, item: Any, pos: Any, value: Any) -> Any:
x = self._items.get(item)
if x is None:
self._items[item] = x = [None] * (pos + 1)
else:
while len(x) <= pos:
x.append(None)
assert x[pos] is None
x[pos] = value
def __contains__(self, x: Any) -> Any:
# test if a substring is in any of the attached comments
if self.comment:
if self.comment[0] and x in self.comment[0].value:
return True
if self.comment[1]:
for c in self.comment[1]:
if x in c.value:
return True
for value in self.items.values():
if not value:
continue
for c in value:
if c and x in c.value:
return True
if self.end:
for c in self.end:
if x in c.value:
return True
return False
# to distinguish key from None
class NotNone:
pass # NOQA
class Format:
__slots__ = ('_flow_style',)
attrib = format_attrib
def __init__(self) -> None:
self._flow_style: Any = None
def set_flow_style(self) -> None:
self._flow_style = True
def set_block_style(self) -> None:
self._flow_style = False
def flow_style(self, default: Optional[Any] = None) -> Any:
"""if default (the flow_style) is None, the flow style tacked on to
the object explicitly will be taken. If that is None as well the
default flow style rules the format down the line, or the type
of the constituent values (simple -> flow, map/list -> block)"""
if self._flow_style is None:
return default
return self._flow_style
def __repr__(self) -> str:
return f'Format({self._flow_style})'
class LineCol:
"""
line and column information wrt document, values start at zero (0)
"""
attrib = line_col_attrib
def __init__(self) -> None:
self.line = None
self.col = None
self.data: Optional[Dict[Any, Any]] = None
def add_kv_line_col(self, key: Any, data: Any) -> None:
if self.data is None:
self.data = {}
self.data[key] = data
def key(self, k: Any) -> Any:
return self._kv(k, 0, 1)
def value(self, k: Any) -> Any:
return self._kv(k, 2, 3)
def _kv(self, k: Any, x0: Any, x1: Any) -> Any:
if self.data is None:
return None
data = self.data[k]
return data[x0], data[x1]
def item(self, idx: Any) -> Any:
if self.data is None:
return None
return self.data[idx][0], self.data[idx][1]
def add_idx_line_col(self, key: Any, data: Any) -> None:
if self.data is None:
self.data = {}
self.data[key] = data
def __repr__(self) -> str:
return f'LineCol({self.line}, {self.col})'
class CommentedBase:
@property
def ca(self):
# type: () -> Any
if not hasattr(self, Comment.attrib):
setattr(self, Comment.attrib, Comment())
return getattr(self, Comment.attrib)
def yaml_end_comment_extend(self, comment: Any, clear: bool = False) -> None:
if comment is None:
return
if clear or self.ca.end is None:
self.ca.end = []
self.ca.end.extend(comment)
def yaml_key_comment_extend(
self, key: Any, comment: Any, clear: bool = False
) -> None:
r = self.ca._items.setdefault(key, [None, None, None, None])
if clear or r[1] is None:
if comment[1] is not None:
assert isinstance(comment[1], list)
r[1] = comment[1]
else:
r[1].extend(comment[0])
r[0] = comment[0]
def yaml_value_comment_extend(
self, key: Any, comment: Any, clear: bool = False
) -> None:
r = self.ca._items.setdefault(key, [None, None, None, None])
if clear or r[3] is None:
if comment[1] is not None:
assert isinstance(comment[1], list)
r[3] = comment[1]
else:
r[3].extend(comment[0])
r[2] = comment[0]
def yaml_set_start_comment(self, comment: Any, indent: Any = 0) -> None:
"""overwrites any preceding comment lines on an object
expects comment to be without `#` and possible have multiple lines
"""
from ruyaml.error import CommentMark
from ruyaml.tokens import CommentToken
pre_comments = self._yaml_clear_pre_comment() # type: ignore
if comment[-1] == '\n':
comment = comment[:-1] # strip final newline if there
start_mark = CommentMark(indent)
for com in comment.split('\n'):
c = com.strip()
if len(c) > 0 and c[0] != '#':
com = '# ' + com
pre_comments.append(CommentToken(com + '\n', start_mark))
def yaml_set_comment_before_after_key(
self,
key: Any,
before: Any = None,
indent: Any = 0,
after: Any = None,
after_indent: Any = None,
) -> None:
"""
expects comment (before/after) to be without `#` and possible have multiple lines
"""
from ruyaml.error import CommentMark
from ruyaml.tokens import CommentToken
def comment_token(s: Any, mark: Any) -> Any:
# handle empty lines as having no comment
return CommentToken(('# ' if s else "") + s + '\n', mark)
if after_indent is None:
after_indent = indent + 2
if before and (len(before) > 1) and before[-1] == '\n':
before = before[:-1] # strip final newline if there
if after and after[-1] == '\n':
after = after[:-1] # strip final newline if there
start_mark = CommentMark(indent)
c = self.ca.items.setdefault(key, [None, [], None, None])
if before is not None:
if c[1] is None:
c[1] = []
if before == '\n':
c[1].append(comment_token("", start_mark)) # type: ignore
else:
for com in before.split('\n'):
c[1].append(comment_token(com, start_mark)) # type: ignore
if after:
start_mark = CommentMark(after_indent)
if c[3] is None:
c[3] = []
for com in after.split('\n'):
c[3].append(comment_token(com, start_mark)) # type: ignore
@property
def fa(self) -> Any:
"""format attribute
set_flow_style()/set_block_style()"""
if not hasattr(self, Format.attrib):
setattr(self, Format.attrib, Format())
return getattr(self, Format.attrib)
def yaml_add_eol_comment(
self,
comment: Any,
key: Optional[Any] = NotNone,
column: Optional[Any] = None,
) -> None:
"""
there is a problem as eol comments should start with ' #'
(but at the beginning of the line the space doesn't have to be before
the #. The column index is for the # mark
"""
from ruyaml.error import CommentMark
from ruyaml.tokens import CommentToken
if column is None:
try:
column = self._yaml_get_column(key)
except AttributeError:
column = 0
if comment[0] != '#':
comment = '# ' + comment
if column is None:
if comment[0] == '#':
comment = ' ' + comment
column = 0
start_mark = CommentMark(column)
ct = [CommentToken(comment, start_mark), None]
self._yaml_add_eol_comment(ct, key=key)
@property
def lc(self) -> Any:
if not hasattr(self, LineCol.attrib):
setattr(self, LineCol.attrib, LineCol())
return getattr(self, LineCol.attrib)
def _yaml_set_line_col(self, line: Any, col: Any) -> None:
self.lc.line = line
self.lc.col = col
def _yaml_set_kv_line_col(self, key: Any, data: Any) -> None:
self.lc.add_kv_line_col(key, data)
def _yaml_set_idx_line_col(self, key: Any, data: Any) -> None:
self.lc.add_idx_line_col(key, data)
@property
def anchor(self) -> Any:
if not hasattr(self, Anchor.attrib):
setattr(self, Anchor.attrib, Anchor())
return getattr(self, Anchor.attrib)
def yaml_anchor(self) -> Any:
if not hasattr(self, Anchor.attrib):
return None
return self.anchor
def yaml_set_anchor(self, value: Any, always_dump: bool = False) -> None:
self.anchor.value = value
self.anchor.always_dump = always_dump
@property
def tag(self) -> Any:
if not hasattr(self, Tag.attrib):
setattr(self, Tag.attrib, Tag())
return getattr(self, Tag.attrib)
def yaml_set_ctag(self, value: Tag) -> None:
setattr(self, Tag.attrib, value)
def copy_attributes(self, t: Any, memo: Any = None) -> Any:
"""
copies the YAML related attributes, not e.g. .values
returns target
"""
# fmt: off
for a in [Comment.attrib, Format.attrib, LineCol.attrib, Anchor.attrib,
Tag.attrib, merge_attrib]:
if hasattr(self, a):
if memo is not None:
setattr(t, a, copy.deepcopy(getattr(self, a, memo)))
else:
setattr(t, a, getattr(self, a))
return t
# fmt: on
def _yaml_add_eol_comment(self, comment: Any, key: Any) -> None:
raise NotImplementedError
def _yaml_get_pre_comment(self) -> Any:
raise NotImplementedError
def _yaml_get_column(self, key: Any) -> Any:
raise NotImplementedError
class CommentedSeq(MutableSliceableSequence, list, CommentedBase): # type: ignore
__slots__ = (Comment.attrib, '_lst')
def __init__(self, *args: Any, **kw: Any) -> None:
list.__init__(self, *args, **kw)
def __getsingleitem__(self, idx: Any) -> Any:
return list.__getitem__(self, idx)
def __setsingleitem__(self, idx: Any, value: Any) -> None:
# try to preserve the scalarstring type if setting an existing key to a new value
if idx < len(self):
if (
isinstance(value, str)
and not isinstance(value, ScalarString)
and isinstance(self[idx], ScalarString)
):
value = type(self[idx])(value)
list.__setitem__(self, idx, value)
def __delsingleitem__(self, idx: Any = None) -> Any:
list.__delitem__(self, idx)
self.ca.items.pop(idx, None) # might not be there -> default value
for list_index in sorted(self.ca.items):
if list_index < idx:
continue
self.ca.items[list_index - 1] = self.ca.items.pop(list_index)
def __len__(self) -> int:
return list.__len__(self)
def insert(self, idx: Any, val: Any) -> None:
"""the comments after the insertion have to move forward"""
list.insert(self, idx, val)
for list_index in sorted(self.ca.items, reverse=True):
if list_index < idx:
break
self.ca.items[list_index + 1] = self.ca.items.pop(list_index)
def extend(self, val: Any) -> None:
list.extend(self, val)
def __eq__(self, other: Any) -> bool:
return list.__eq__(self, other)
def _yaml_add_comment(self, comment: Any, key: Optional[Any] = NotNone) -> None:
if key is not NotNone:
self.yaml_key_comment_extend(key, comment, clear=True)
else:
self.ca.comment = comment
def _yaml_add_eol_comment(self, comment: Any, key: Any) -> None:
self._yaml_add_comment(comment, key=key)
def _yaml_get_columnX(self, key: Any) -> Any:
return self.ca.items[key][0].start_mark.column
def _yaml_get_column(self, key: Any) -> Any:
column = None
sel_idx = None
pre, post = key - 1, key + 1
if pre in self.ca.items:
sel_idx = pre
elif post in self.ca.items:
sel_idx = post
else:
# self.ca.items is not ordered
for row_idx, _k1 in enumerate(self):
if row_idx >= key:
break
if row_idx not in self.ca.items:
continue
sel_idx = row_idx
if sel_idx is not None:
column = self._yaml_get_columnX(sel_idx)
return column
def _yaml_get_pre_comment(self) -> Any:
pre_comments: List[Any] = []
if self.ca.comment is None:
self.ca.comment = [None, pre_comments]
else:
pre_comments = self.ca.comment[1]
return pre_comments
def _yaml_clear_pre_comment(self) -> Any:
pre_comments: List[Any] = []
if self.ca.comment is None:
self.ca.comment = [None, pre_comments]
else:
self.ca.comment[1] = pre_comments
return pre_comments
def __deepcopy__(self, memo: Any) -> Any:
res = self.__class__()
memo[id(self)] = res
for k in self:
res.append(copy.deepcopy(k, memo))
self.copy_attributes(res, memo=memo)
return res
def __add__(self, other: Any) -> Any:
return list.__add__(self, other)
def sort(self, key: Any = None, reverse: bool = False) -> None:
if key is None:
tmp_lst = sorted(zip(self, range(len(self))), reverse=reverse)
list.__init__(self, [x[0] for x in tmp_lst])
else:
tmp_lst = sorted(
zip(map(key, list.__iter__(self)), range(len(self))),
reverse=reverse,
)
list.__init__(self, [list.__getitem__(self, x[1]) for x in tmp_lst])
itm = self.ca.items
self.ca._items = {}
for idx, x in enumerate(tmp_lst):
old_index = x[1]
if old_index in itm:
self.ca.items[idx] = itm[old_index]
def __repr__(self) -> Any:
return list.__repr__(self)
class CommentedKeySeq(tuple, CommentedBase): # type: ignore
"""This primarily exists to be able to roundtrip keys that are sequences"""
def _yaml_add_comment(self, comment: Any, key: Optional[Any] = NotNone) -> None:
if key is not NotNone:
self.yaml_key_comment_extend(key, comment)
else:
self.ca.comment = comment
def _yaml_add_eol_comment(self, comment: Any, key: Any) -> None:
self._yaml_add_comment(comment, key=key)
def _yaml_get_columnX(self, key: Any) -> Any:
return self.ca.items[key][0].start_mark.column
def _yaml_get_column(self, key: Any) -> Any:
column = None
sel_idx = None
pre, post = key - 1, key + 1
if pre in self.ca.items:
sel_idx = pre
elif post in self.ca.items:
sel_idx = post
else:
# self.ca.items is not ordered
for row_idx, _k1 in enumerate(self):
if row_idx >= key:
break
if row_idx not in self.ca.items:
continue
sel_idx = row_idx
if sel_idx is not None:
column = self._yaml_get_columnX(sel_idx)
return column
def _yaml_get_pre_comment(self) -> Any:
pre_comments: List[Any] = []
if self.ca.comment is None:
self.ca.comment = [None, pre_comments]
else:
pre_comments = self.ca.comment[1]
return pre_comments
def _yaml_clear_pre_comment(self) -> Any:
pre_comments: List[Any] = []
if self.ca.comment is None:
self.ca.comment = [None, pre_comments]
else:
self.ca.comment[1] = pre_comments
return pre_comments
class CommentedMapView(Sized):
__slots__ = ('_mapping',)
def __init__(self, mapping: Any) -> None:
self._mapping = mapping
def __len__(self) -> int:
count = len(self._mapping)
return count
class CommentedMapKeysView(CommentedMapView, Set): # type: ignore
__slots__ = ()
@classmethod
def _from_iterable(self, it: Any) -> Any:
return set(it)
def __contains__(self, key: Any) -> Any:
return key in self._mapping
def __iter__(self) -> Any:
# yield from self._mapping # not in py27, pypy
# for x in self._mapping._keys():
for x in self._mapping:
yield x
class CommentedMapItemsView(CommentedMapView, Set): # type: ignore
__slots__ = ()
@classmethod
def _from_iterable(self, it: Any) -> Any:
return set(it)
def __contains__(self, item: Any) -> Any:
key, value = item
try:
v = self._mapping[key]
except KeyError:
return False
else:
return v == value
def __iter__(self) -> Any:
for key in self._mapping._keys():
yield (key, self._mapping[key])
class CommentedMapValuesView(CommentedMapView):
__slots__ = ()
def __contains__(self, value: Any) -> Any:
for key in self._mapping:
if value == self._mapping[key]:
return True
return False
def __iter__(self) -> Any:
for key in self._mapping._keys():
yield self._mapping[key]
class CommentedMap(ordereddict, CommentedBase):
__slots__ = (Comment.attrib, '_ok', '_ref')
def __init__(self, *args: Any, **kw: Any) -> None:
self._ok: MutableSet[Any] = set() # own keys
self._ref: List[CommentedMap] = []
ordereddict.__init__(self, *args, **kw)
def _yaml_add_comment(
self,
comment: Any,
key: Optional[Any] = NotNone,
value: Optional[Any] = NotNone,
) -> None:
"""values is set to key to indicate a value attachment of comment"""
if key is not NotNone:
self.yaml_key_comment_extend(key, comment)
return
if value is not NotNone:
self.yaml_value_comment_extend(value, comment)
else:
self.ca.comment = comment
def _yaml_add_eol_comment(self, comment: Any, key: Any) -> None:
"""add on the value line, with value specified by the key"""
self._yaml_add_comment(comment, value=key)
def _yaml_get_columnX(self, key: Any) -> Any:
return self.ca.items[key][2].start_mark.column
def _yaml_get_column(self, key: Any) -> Any:
column = None
sel_idx = None
pre, post, last = None, None, None
for x in self:
if pre is not None and x != key:
post = x
break
if x == key:
pre = last
last = x
if pre in self.ca.items:
sel_idx = pre
elif post in self.ca.items:
sel_idx = post
else:
# self.ca.items is not ordered
for k1 in self:
if k1 >= key:
break
if k1 not in self.ca.items:
continue
sel_idx = k1
if sel_idx is not None:
column = self._yaml_get_columnX(sel_idx)
return column
def _yaml_get_pre_comment(self) -> Any:
pre_comments: List[Any] = []
if self.ca.comment is None:
self.ca.comment = [None, pre_comments]
else:
pre_comments = self.ca.comment[1]
return pre_comments
def _yaml_clear_pre_comment(self) -> Any:
pre_comments: List[Any] = []
if self.ca.comment is None:
self.ca.comment = [None, pre_comments]
else:
self.ca.comment[1] = pre_comments
return pre_comments
def update(self, *vals: Any, **kw: Any) -> None:
try:
ordereddict.update(self, *vals, **kw)
except TypeError:
# probably a dict that is used
for x in vals[0]:
self[x] = vals[0][x]
if vals:
try:
self._ok.update(vals[0].keys()) # type: ignore
except AttributeError:
# assume one argument that is a list/tuple of two element lists/tuples
for x in vals[0]:
self._ok.add(x[0])
if kw:
self._ok.update(*kw.keys()) # type: ignore
def insert(
self, pos: Any, key: Any, value: Any, comment: Optional[Any] = None
) -> None:
"""insert key value into given position, as defined by source YAML
attach comment if provided
"""
if key in self._ok:
del self[key]
keys = [k for k in self.keys() if k in self._ok]
try:
merge_value = getattr(self, merge_attrib)
merge_pos = merge_value.merge_pos
except (AttributeError, IndexError):
merge_pos = -1
if merge_pos >= 0:
if merge_pos >= pos:
# getattr(self, merge_attrib)[0] = (merge_pos + 1, ma0[1])
merge_value.merge_pos += 1
idx_min = pos
idx_max = len(self._ok)
else:
idx_min = pos - 1
idx_max = len(self._ok)
else:
idx_min = pos
idx_max = len(self._ok)
self[key] = value # at the end
# print(f'{idx_min=} {idx_max=}')
for idx in range(idx_min, idx_max):
self.move_to_end(keys[idx])
self._ok.add(key)
# for referer in self._ref:
# for keytmp in keys:
# referer.update_key_value(keytmp)
if comment is not None:
self.yaml_add_eol_comment(comment, key=key)
def mlget(self, key: Any, default: Any = None, list_ok: Any = False) -> Any:
"""multi-level get that expects dicts within dicts"""
if not isinstance(key, list):
return self.get(key, default)
# assume that the key is a list of recursively accessible dicts
def get_one_level(key_list: Any, level: Any, d: Any) -> Any:
if not list_ok:
assert isinstance(d, dict)
if level >= len(key_list):
if level > len(key_list):
raise IndexError
return d[key_list[level - 1]]
return get_one_level(key_list, level + 1, d[key_list[level - 1]])
try:
return get_one_level(key, 1, self)
except KeyError:
return default
except (TypeError, IndexError):
if not list_ok:
raise
return default
def __getitem__(self, key: Any) -> Any:
try:
return ordereddict.__getitem__(self, key)
except KeyError:
for merged in getattr(self, merge_attrib, []):
# if isinstance(merged, tuple):
# if key in merged[1]:
# return merged[1][key]
# else:
if True:
if key in merged:
return merged[key]
raise
def __setitem__(self, key: Any, value: Any) -> None:
# try to preserve the scalarstring type if setting an existing key to a new value
if key in self:
if (
isinstance(value, str)
and not isinstance(value, ScalarString)
and isinstance(self[key], ScalarString)
):
value = type(self[key])(value)
ordereddict.__setitem__(self, key, value)
self._ok.add(key)
def _unmerged_contains(self, key: Any) -> Any:
if key in self._ok:
return True
return None
def __contains__(self, key: Any) -> bool:
return bool(ordereddict.__contains__(self, key))
def get(self, key: Any, default: Any = None) -> Any:
try:
return self.__getitem__(key)
except: # NOQA
return default
def __repr__(self) -> Any:
res = '{'
sep = ''
for k, v in self.items():
res += f'{sep}{k!r}: {v!r}'
if not sep:
sep = ', '
res += '}'
return res
def non_merged_items(self) -> Any:
for x in ordereddict.__iter__(self):
if x in self._ok:
yield x, ordereddict.__getitem__(self, x)
def __delitem__(self, key: Any) -> None:
# for merged in getattr(self, merge_attrib, []):
# if key in merged[1]:
# value = merged[1][key]
# break
# else:
# # not found in merged in stuff
# ordereddict.__delitem__(self, key)
# for referer in self._ref:
# referer.update=_key_value(key)
# return
#
# ordereddict.__setitem__(self, key, value) # merge might have different value
# self._ok.discard(key)
try:
merge_value = getattr(self, merge_attrib)
merge_pos = merge_value.merge_pos
except AttributeError:
merge_pos = -1
if merge_pos >= 0:
try:
pos = list(ordereddict.keys(self)).index(key)
# the merge is not in the dict, so don't use >=
if merge_pos > pos:
merge_value.merge_pos -= 1
except ValueError:
pass # let the removal of the key throw a "normal" error
self._ok.discard(key)
ordereddict.__delitem__(self, key)
for referer in self._ref:
referer.update_key_value(key)
def __iter__(self) -> Any:
for x in ordereddict.__iter__(self):
yield x
def pop(self, key: Any, default: Any = NotNone) -> Any:
try:
result = self[key]
except KeyError:
if default is NotNone:
raise
return default
del self[key]
return result
def _keys(self) -> Any:
for x in ordereddict.__iter__(self):
yield x
def __len__(self) -> int:
return int(ordereddict.__len__(self))
def __eq__(self, other: Any) -> bool:
return bool(dict(self) == other)
def keys(self) -> Any:
return CommentedMapKeysView(self)
def values(self) -> Any:
return CommentedMapValuesView(self)
def _items(self) -> Any:
for x in ordereddict.__iter__(self):
yield x, ordereddict.__getitem__(self, x)
def items(self) -> Any:
return CommentedMapItemsView(self)
@property
def merge(self) -> Any:
if not hasattr(self, merge_attrib):
setattr(self, merge_attrib, [])
return getattr(self, merge_attrib)
def copy(self) -> Any:
x = type(self)() # update doesn't work
for k, v in self._items():
x[k] = v
self.copy_attributes(x)
return x
def add_referent(self, cm: Any) -> None:
if cm not in self._ref:
self._ref.append(cm)
def add_yaml_merge(self, value: Any) -> None:
assert not hasattr(self, merge_attrib)
setattr(self, merge_attrib, value)
for v in value:
# if isinstance(v, tuple):
# assert len(v) == 2
# # print('vvv', v, type(v[1]))
# v[1].add_referent(self)
# for k1, v1 in v[1].items():
# if ordereddict.__contains__(self, k1):
# continue
# ordereddict.__setitem__(self, k1, v1)
# else:
if True:
v.add_referent(self)
for k1, v1 in v.items():
if ordereddict.__contains__(self, k1):
continue
ordereddict.__setitem__(self, k1, v1)
def update_key_value(self, key: Any) -> None:
if key in self._ok:
return
for v in self.merge:
if key in v[1]:
ordereddict.__setitem__(self, key, v[1][key])
return
ordereddict.__delitem__(self, key)
def __deepcopy__(self, memo: Any) -> Any:
res = self.__class__()
memo[id(self)] = res
for k in self:
res[k] = copy.deepcopy(self[k], memo)
self.copy_attributes(res, memo=memo)
return res
# based on brownie mappings
@classmethod # type: ignore
def raise_immutable(cls: Any, *args: Any, **kwargs: Any) -> None:
raise TypeError(f'{cls.__name__} objects are immutable')
class CommentedKeyMap(CommentedBase, Mapping): # type: ignore
__slots__ = Comment.attrib, '_od'
"""This primarily exists to be able to roundtrip keys that are mappings"""
def __init__(self, *args: Any, **kw: Any) -> None:
if hasattr(self, '_od'):
raise_immutable(self)
try:
self._od = ordereddict(*args, **kw)
except TypeError:
raise
__delitem__ = (
__setitem__
) = clear = pop = popitem = setdefault = update = raise_immutable
# need to implement __getitem__, __iter__ and __len__
def __getitem__(self, index: Any) -> Any:
return self._od[index]
def __iter__(self) -> Iterator[Any]:
for x in self._od.__iter__():
yield x
def __len__(self) -> int:
return len(self._od)
def __hash__(self) -> Any:
return hash(tuple(self.items()))
def __repr__(self) -> Any:
if not hasattr(self, merge_attrib):
return self._od.__repr__()
return 'ordereddict(' + repr(list(self._od.items())) + ')'
@classmethod
def fromkeys(keys: Any, v: Any = None) -> Any:
return CommentedKeyMap(dict.fromkeys(keys, v))
def _yaml_add_comment(self, comment: Any, key: Optional[Any] = NotNone) -> None:
if key is not NotNone:
self.yaml_key_comment_extend(key, comment)
else:
self.ca.comment = comment
def _yaml_add_eol_comment(self, comment: Any, key: Any) -> None:
self._yaml_add_comment(comment, key=key)
def _yaml_get_columnX(self, key: Any) -> Any:
return self.ca.items[key][0].start_mark.column
def _yaml_get_column(self, key: Any) -> Any:
column = None
sel_idx = None
pre, post = key - 1, key + 1
if pre in self.ca.items:
sel_idx = pre
elif post in self.ca.items:
sel_idx = post
else:
# self.ca.items is not ordered
for row_idx, _k1 in enumerate(self):
if row_idx >= key:
break
if row_idx not in self.ca.items:
continue
sel_idx = row_idx
if sel_idx is not None:
column = self._yaml_get_columnX(sel_idx)
return column
def _yaml_get_pre_comment(self) -> Any:
pre_comments: List[Any] = []
if self.ca.comment is None:
self.ca.comment = [None, pre_comments]
else:
self.ca.comment[1] = pre_comments
return pre_comments
class CommentedOrderedMap(CommentedMap):
__slots__ = (Comment.attrib,)
class CommentedSet(MutableSet, CommentedBase): # type: ignore # NOQA
__slots__ = Comment.attrib, 'odict'
def __init__(self, values: Any = None) -> None:
self.odict = ordereddict()
MutableSet.__init__(self)
if values is not None:
self |= values
def _yaml_add_comment(
self,
comment: Any,
key: Optional[Any] = NotNone,
value: Optional[Any] = NotNone,
) -> None:
"""values is set to key to indicate a value attachment of comment"""
if key is not NotNone:
self.yaml_key_comment_extend(key, comment)
return
if value is not NotNone:
self.yaml_value_comment_extend(value, comment)
else:
self.ca.comment = comment
def _yaml_add_eol_comment(self, comment: Any, key: Any) -> None:
"""add on the value line, with value specified by the key"""
self._yaml_add_comment(comment, value=key)
def add(self, value: Any) -> None:
"""Add an element."""
self.odict[value] = None
def discard(self, value: Any) -> None:
"""Remove an element. Do not raise an exception if absent."""
del self.odict[value]
def __contains__(self, x: Any) -> Any:
return x in self.odict
def __iter__(self) -> Any:
for x in self.odict:
yield x
def __len__(self) -> int:
return len(self.odict)
def __repr__(self) -> str:
return f'set({self.odict.keys()!r})'
class TaggedScalar(CommentedBase):
# the value and style attributes are set during roundtrip construction
def __init__(self, value: Any = None, style: Any = None, tag: Any = None) -> None:
self.value = value
self.style = style
if tag is not None:
if isinstance(tag, str):
tag = Tag(suffix=tag)
self.yaml_set_ctag(tag)
def __str__(self) -> Any:
return self.value
def count(
self, s: str, start: Optional[int] = None, end: Optional[int] = None
) -> Any:
return self.value.count(s, start, end)
def __getitem__(self, pos: int) -> Any:
return self.value[pos]
def __repr__(self) -> str:
return f'TaggedScalar(value={self.value!r}, style={self.style!r}, tag={self.tag!r})'
def dump_comments(
d: Any, name: str = "", sep: str = '.', out: Any = sys.stdout
) -> None:
"""
recursively dump comments, all but the toplevel preceded by the path
in dotted form x.0.a
"""
if isinstance(d, dict) and hasattr(d, 'ca'):
if name:
out.write(f'{name} {type(d)}\n')
out.write(f'{d.ca!r}\n')
for k in d:
dump_comments(
d[k], name=(name + sep + str(k)) if name else k, sep=sep, out=out
)
elif isinstance(d, list) and hasattr(d, 'ca'):
if name:
out.write(f'{name} {type(d)}\n')
out.write(f'{d.ca!r}\n')
for idx, k in enumerate(d):
dump_comments(
k,
name=(name + sep + str(idx)) if name else str(idx),
sep=sep,
out=out,
)
python-ruyaml-0.92.1/lib/ruyaml/compat.py 0000664 0000000 0000000 00000015470 15056754172 0020372 0 ustar 00root root 0000000 0000000 from __future__ import annotations
import collections.abc
import io
import os
import sys
from abc import abstractmethod
from typing import Any, Dict, List, Optional, Tuple, Union
from ruyaml.docinfo import Version # NOQA
# partially from package six by Benjamin Peterson
# partially from package six by Benjamin Peterson
# fmt: off
if False: # MYPY
from typing import Optional # NOQA
from typing import IO, Any, BinaryIO, Dict, List, Text, Tuple, Union # NOQA
try:
from typing import (
SupportsIndex as SupportsIndex, # in order to reexport for mypy
)
except ImportError:
SupportsIndex = int # type: ignore
StreamType = Any
StreamTextType = StreamType
VersionType = Union[str , Tuple[int, int] , List[int] , Version , None]
# fmt: on
_DEFAULT_YAML_VERSION = (1, 2)
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict # type: ignore
# to get the right name import ... as ordereddict doesn't do that
class ordereddict(OrderedDict): # type: ignore
if not hasattr(OrderedDict, 'insert'):
def insert(self, pos: int, key: Any, value: Any) -> None:
if pos >= len(self):
self[key] = value
return
od = ordereddict()
od.update(self)
for k in od:
del self[k]
for index, old_key in enumerate(od):
if pos == index:
self[key] = value
self[old_key] = od[old_key]
StringIO = io.StringIO
BytesIO = io.BytesIO
builtins_module = 'builtins'
def with_metaclass(meta: Any, *bases: Any) -> Any:
"""Create a base class with a metaclass."""
return meta('NewBase', bases, {})
DBG_TOKEN = 1
DBG_EVENT = 2
DBG_NODE = 4
_debug: Optional[int] = None
if 'RUAMELDEBUG' in os.environ:
_debugx = os.environ.get('RUAMELDEBUG')
if _debugx is None:
_debug = 0
else:
_debug = int(_debugx)
if bool(_debug):
class ObjectCounter:
def __init__(self) -> None:
self.map: Dict[Any, Any] = {}
def __call__(self, k: Any) -> None:
self.map[k] = self.map.get(k, 0) + 1
def dump(self) -> None:
for k in sorted(self.map):
sys.stdout.write(f'{k} -> {self.map[k]}')
object_counter = ObjectCounter()
# used from yaml util when testing
def dbg(val: Any = None) -> Any:
debug = _debug
if debug is None:
# set to true or false
_debugx = os.environ.get('YAMLDEBUG')
if _debugx is None:
debug = 0
else:
debug = int(_debugx)
if val is None:
return debug
return debug & val
class Nprint:
def __init__(self, file_name: Any = None) -> None:
self._max_print: Any = None
self._count: Any = None
self._file_name = file_name
def __call__(self, *args: Any, **kw: Any) -> None:
if not bool(_debug):
return
import traceback
out = sys.stdout if self._file_name is None else open(self._file_name, 'a')
dbgprint = print # to fool checking for print statements by dv utility
kw1 = kw.copy()
kw1['file'] = out
dbgprint(*args, **kw1)
out.flush()
if self._max_print is not None:
if self._count is None:
self._count = self._max_print
self._count -= 1
if self._count == 0:
dbgprint('forced exit\n')
traceback.print_stack()
out.flush()
sys.exit(0)
if self._file_name:
out.close()
def set_max_print(self, i: int) -> None:
self._max_print = i
self._count = None
def fp(self, mode: str = 'a') -> Any:
out = sys.stdout if self._file_name is None else open(self._file_name, mode)
return out
nprint = Nprint()
nprintf = Nprint('/var/tmp/ruyaml.log')
# char checkers following production rules
def check_namespace_char(ch: Any) -> bool:
if '\x21' <= ch <= '\x7E': # ! to ~
return True
if '\xA0' <= ch <= '\uD7FF':
return True
if ('\uE000' <= ch <= '\uFFFD') and ch != '\uFEFF': # excl. byte order mark
return True
if '\U00010000' <= ch <= '\U0010FFFF':
return True
return False
def check_anchorname_char(ch: Any) -> bool:
if ch in ',[]{}':
return False
return check_namespace_char(ch)
def version_tnf(t1: Any, t2: Any = None) -> Any:
"""
return True if ruyaml version_info < t1, None if t2 is specified and bigger else False
"""
from ruyaml import version_info # NOQA
if version_info < t1:
return True
if t2 is not None and version_info < t2:
return None
return False
class MutableSliceableSequence(collections.abc.MutableSequence): # type: ignore
__slots__ = ()
def __getitem__(self, index: Any) -> Any:
if not isinstance(index, slice):
return self.__getsingleitem__(index)
return type(self)([self[i] for i in range(*index.indices(len(self)))]) # type: ignore
def __setitem__(self, index: Any, value: Any) -> None:
if not isinstance(index, slice):
return self.__setsingleitem__(index, value)
assert iter(value)
# nprint(index.start, index.stop, index.step, index.indices(len(self)))
if index.step is None:
del self[index.start : index.stop]
for elem in reversed(value):
self.insert(0 if index.start is None else index.start, elem)
else:
range_parms = index.indices(len(self))
nr_assigned_items = (range_parms[1] - range_parms[0] - 1) // range_parms[
2
] + 1
# need to test before changing, in case TypeError is caught
if nr_assigned_items < len(value):
raise TypeError(
f'too many elements in value {nr_assigned_items} < {len(value)}',
)
elif nr_assigned_items > len(value):
raise TypeError(
f'not enough elements in value {nr_assigned_items} > {len(value)}',
)
for idx, i in enumerate(range(*range_parms)):
self[i] = value[idx]
def __delitem__(self, index: Any) -> None:
if not isinstance(index, slice):
return self.__delsingleitem__(index)
# nprint(index.start, index.stop, index.step, index.indices(len(self)))
for i in reversed(range(*index.indices(len(self)))):
del self[i]
@abstractmethod
def __getsingleitem__(self, index: Any) -> Any:
raise IndexError
@abstractmethod
def __setsingleitem__(self, index: Any, value: Any) -> None:
raise IndexError
@abstractmethod
def __delsingleitem__(self, index: Any) -> None:
raise IndexError
python-ruyaml-0.92.1/lib/ruyaml/composer.py 0000664 0000000 0000000 00000020233 15056754172 0020727 0 ustar 00root root 0000000 0000000 from __future__ import annotations
import warnings
from typing import Any, Dict
from ruyaml.compat import nprint, nprintf # NOQA
from ruyaml.error import MarkedYAMLError, ReusedAnchorWarning
from ruyaml.events import (
AliasEvent,
MappingStartEvent,
MappingEndEvent,
ScalarEvent,
SequenceStartEvent,
SequenceEndEvent,
StreamStartEvent,
StreamEndEvent,
)
from ruyaml.nodes import MappingNode, ScalarNode, SequenceNode
if False: # MYPY
from typing import Any, Dict, List, Optional # NOQA
__all__ = ['Composer', 'ComposerError']
class ComposerError(MarkedYAMLError):
pass
class Composer:
def __init__(self, loader: Any = None) -> None:
self.loader = loader
if self.loader is not None and getattr(self.loader, '_composer', None) is None:
self.loader._composer = self
self.anchors: Dict[Any, Any] = {}
self.warn_double_anchors = True
@property
def parser(self) -> Any:
if hasattr(self.loader, 'typ'):
self.loader.parser # type: ignore
return self.loader._parser # type: ignore
@property
def resolver(self) -> Any:
# assert self.loader._resolver is not None
if hasattr(self.loader, 'typ'):
self.loader.resolver # type: ignore
return self.loader._resolver # type: ignore
def check_node(self) -> Any:
# Drop the STREAM-START event.
if self.parser.check_event(StreamStartEvent):
self.parser.get_event()
# If there are more documents available?
return not self.parser.check_event(StreamEndEvent)
def get_node(self) -> Any:
# Get the root node of the next document.
if not self.parser.check_event(StreamEndEvent):
return self.compose_document()
def get_single_node(self) -> Any:
# Drop the STREAM-START event.
self.parser.get_event()
# Compose a document if the stream is not empty.
document: Any = None
if not self.parser.check_event(StreamEndEvent):
document = self.compose_document()
# Ensure that the stream contains no more documents.
if not self.parser.check_event(StreamEndEvent):
event = self.parser.get_event()
raise ComposerError(
'expected a single document in the stream',
document.start_mark,
'but found another document',
event.start_mark,
)
# Drop the STREAM-END event.
self.parser.get_event()
return document
def compose_document(self: Any) -> Any:
self.anchors = {}
# Drop the DOCUMENT-START event.
self.parser.get_event()
# Compose the root node.
node = self.compose_node(None, None)
# Drop the DOCUMENT-END event.
self.parser.get_event()
return node
def return_alias(self, a: Any) -> Any:
return a
def compose_node(self, parent: Any, index: Any) -> Any:
if self.parser.check_event(AliasEvent):
event = self.parser.get_event()
alias = event.anchor
if alias not in self.anchors:
raise ComposerError(
None,
None,
f'found undefined alias {alias!r}',
event.start_mark,
)
return self.return_alias(self.anchors[alias])
event = self.parser.peek_event()
anchor = event.anchor
if anchor is not None: # have an anchor
if self.warn_double_anchors and anchor in self.anchors:
ws = (
f'\nfound duplicate anchor {anchor!r}\n'
f'first occurrence {self.anchors[anchor].start_mark}\n'
f'second occurrence {event.start_mark}'
)
warnings.warn(ws, ReusedAnchorWarning, stacklevel=2)
self.resolver.descend_resolver(parent, index)
if self.parser.check_event(ScalarEvent):
node = self.compose_scalar_node(anchor)
elif self.parser.check_event(SequenceStartEvent):
node = self.compose_sequence_node(anchor)
elif self.parser.check_event(MappingStartEvent):
node = self.compose_mapping_node(anchor)
self.resolver.ascend_resolver()
return node
def compose_scalar_node(self, anchor: Any) -> Any:
event = self.parser.get_event()
tag = event.ctag
if tag is None or str(tag) == '!':
tag = self.resolver.resolve(ScalarNode, event.value, event.implicit)
assert not isinstance(tag, str)
# e.g tag.yaml.org,2002:str
node = ScalarNode(
tag,
event.value,
event.start_mark,
event.end_mark,
style=event.style,
comment=event.comment,
anchor=anchor,
)
if anchor is not None:
self.anchors[anchor] = node
return node
def compose_sequence_node(self, anchor: Any) -> Any:
start_event = self.parser.get_event()
tag = start_event.ctag
if tag is None or str(tag) == '!':
tag = self.resolver.resolve(SequenceNode, None, start_event.implicit)
assert not isinstance(tag, str)
node = SequenceNode(
tag,
[],
start_event.start_mark,
None,
flow_style=start_event.flow_style,
comment=start_event.comment,
anchor=anchor,
)
if anchor is not None:
self.anchors[anchor] = node
index = 0
while not self.parser.check_event(SequenceEndEvent):
node.value.append(self.compose_node(node, index))
index += 1
end_event = self.parser.get_event()
if node.flow_style is True and end_event.comment is not None:
if node.comment is not None:
x = node.flow_style
nprint(
f'Warning: unexpected end_event commment in sequence node {x}\n',
' if possible, please report an issue with reproducable data/code',
)
node.comment = end_event.comment
node.end_mark = end_event.end_mark
self.check_end_doc_comment(end_event, node)
return node
def compose_mapping_node(self, anchor: Any) -> Any:
start_event = self.parser.get_event()
tag = start_event.ctag
if tag is None or str(tag) == '!':
tag = self.resolver.resolve(MappingNode, None, start_event.implicit)
assert not isinstance(tag, str)
node = MappingNode(
tag,
[],
start_event.start_mark,
None,
flow_style=start_event.flow_style,
comment=start_event.comment,
anchor=anchor,
)
if anchor is not None:
self.anchors[anchor] = node
while not self.parser.check_event(MappingEndEvent):
# key_event = self.parser.peek_event()
item_key = self.compose_node(node, None)
# if item_key in node.value:
# raise ComposerError("while composing a mapping",
# start_event.start_mark,
# "found duplicate key", key_event.start_mark)
item_value = self.compose_node(node, item_key)
# node.value[item_key] = item_value
node.value.append((item_key, item_value))
end_event = self.parser.get_event()
if node.flow_style is True and end_event.comment is not None:
node.comment = end_event.comment
node.end_mark = end_event.end_mark
self.check_end_doc_comment(end_event, node)
return node
def check_end_doc_comment(self, end_event: Any, node: Any) -> None:
if end_event.comment and end_event.comment[1]:
# pre comments on an end_event, no following to move to
if node.comment is None:
node.comment = [None, None]
assert not isinstance(node, ScalarEvent)
# this is a post comment on a mapping node, add as third element
# in the list
node.comment.append(end_event.comment[1])
end_event.comment[1] = None
python-ruyaml-0.92.1/lib/ruyaml/configobjwalker.py 0000664 0000000 0000000 00000000557 15056754172 0022255 0 ustar 00root root 0000000 0000000 from __future__ import annotations
import warnings
from ruyaml.util import configobj_walker as new_configobj_walker
if False: # MYPY
from typing import Any
def configobj_walker(cfg: Any) -> Any:
warnings.warn(
'configobj_walker has moved to ruyaml.util, please update your code',
stacklevel=2,
)
return new_configobj_walker(cfg)
python-ruyaml-0.92.1/lib/ruyaml/constructor.py 0000664 0000000 0000000 00000212336 15056754172 0021474 0 ustar 00root root 0000000 0000000 from __future__ import annotations
import binascii
import datetime
import sys
import types
import warnings
from collections.abc import Hashable, MutableMapping, MutableSequence # type: ignore
from datetime import timedelta as TimeDelta
# fmt: off
from ruyaml.error import (MarkedYAMLError, MarkedYAMLFutureWarning,
MantissaNoDotYAML1_1Warning)
from ruyaml.nodes import * # NOQA
from ruyaml.nodes import (SequenceNode, MappingNode, ScalarNode)
from ruyaml.compat import (builtins_module, # NOQA
nprint, nprintf, version_tnf)
from ruyaml.compat import ordereddict
from ruyaml.tag import Tag
from ruyaml.comments import * # NOQA
from ruyaml.comments import (CommentedMap, CommentedOrderedMap, CommentedSet,
CommentedKeySeq, CommentedSeq, TaggedScalar,
CommentedKeyMap,
C_KEY_PRE, C_KEY_EOL, C_KEY_POST,
C_VALUE_PRE, C_VALUE_EOL, C_VALUE_POST,
)
from ruyaml.scalarstring import (SingleQuotedScalarString, DoubleQuotedScalarString,
LiteralScalarString, FoldedScalarString,
PlainScalarString, ScalarString)
from ruyaml.scalarint import ScalarInt, BinaryInt, OctalInt, HexInt, HexCapsInt
from ruyaml.scalarfloat import ScalarFloat
from ruyaml.scalarbool import ScalarBoolean
from ruyaml.timestamp import TimeStamp
from ruyaml.util import create_timestamp, timestamp_regexp
if False: # MYPY
from typing import Any, Dict, Iterator, List, Optional, Set, Union # NOQA
__all__ = ['BaseConstructor', 'SafeConstructor', 'Constructor',
'ConstructorError', 'RoundTripConstructor']
# fmt: on
class ConstructorError(MarkedYAMLError):
pass
class DuplicateKeyFutureWarning(MarkedYAMLFutureWarning):
pass
DUPKEY_URL = 'https://yaml.dev/doc/ruyaml/api/#Duplicate_keys'
class DuplicateKeyError(MarkedYAMLError):
pass
class BaseConstructor:
yaml_constructors = {} # type: Dict[Any, Any]
yaml_multi_constructors = {} # type: Dict[Any, Any]
def __init__(
self, preserve_quotes: Optional[bool] = None, loader: Any = None
) -> None:
self.loader = loader
if (
self.loader is not None
and getattr(self.loader, '_constructor', None) is None
):
self.loader._constructor = self
self.loader = loader
self.yaml_base_dict_type = dict
self.yaml_base_list_type = list
self.constructed_objects: Dict[Any, Any] = {}
self.recursive_objects: Dict[Any, Any] = {}
self.state_generators: List[Any] = []
self.deep_construct = False
self._preserve_quotes = preserve_quotes
self.allow_duplicate_keys = False
@property
def composer(self) -> Any:
if hasattr(self.loader, 'typ'):
return self.loader.composer # type: ignore
try:
return self.loader._composer # type: ignore
except AttributeError:
sys.stdout.write(f'slt {type(self)}\n')
sys.stdout.write(f'slc {self.loader._composer}\n')
sys.stdout.write(f'{dir(self)}\n')
raise
@property
def resolver(self) -> Any:
if hasattr(self.loader, 'typ'):
return self.loader.resolver # type: ignore
return self.loader._resolver # type: ignore
@property
def scanner(self) -> Any:
# needed to get to the expanded comments
if hasattr(self.loader, 'typ'):
return self.loader.scanner # type: ignore
return self.loader._scanner # type: ignore
def check_data(self) -> Any:
# If there are more documents available?
return self.composer.check_node()
def get_data(self) -> Any:
# Construct and return the next document.
if self.composer.check_node():
return self.construct_document(self.composer.get_node())
def get_single_data(self) -> Any:
# Ensure that the stream contains a single document and construct it.
node = self.composer.get_single_node()
if node is not None:
return self.construct_document(node)
return None
def construct_document(self, node: Any) -> Any:
data = self.construct_object(node)
while bool(self.state_generators):
state_generators = self.state_generators
self.state_generators = []
for generator in state_generators:
for _dummy in generator:
pass
self.constructed_objects = {}
self.recursive_objects = {}
self.deep_construct = False
return data
def construct_object(self, node: Any, deep: bool = False) -> Any:
"""deep is True when creating an object/mapping recursively,
in that case want the underlying elements available during construction
"""
if node in self.constructed_objects:
return self.constructed_objects[node]
if deep:
old_deep = self.deep_construct
self.deep_construct = True
if node in self.recursive_objects:
return self.recursive_objects[node]
# raise ConstructorError(
# None, None, 'found unconstructable recursive node', node.start_mark
# )
self.recursive_objects[node] = None
data = self.construct_non_recursive_object(node)
self.constructed_objects[node] = data
del self.recursive_objects[node]
if deep:
self.deep_construct = old_deep
return data
def construct_non_recursive_object(
self, node: Any, tag: Optional[str] = None
) -> Any:
constructor: Any = None
tag_suffix = None
if tag is None:
tag = node.tag
if tag in self.yaml_constructors:
constructor = self.yaml_constructors[tag]
else:
for tag_prefix in self.yaml_multi_constructors:
if tag.startswith(tag_prefix):
tag_suffix = tag[len(tag_prefix) :]
constructor = self.yaml_multi_constructors[tag_prefix]
break
else:
if None in self.yaml_multi_constructors:
tag_suffix = tag
constructor = self.yaml_multi_constructors[None]
elif None in self.yaml_constructors:
constructor = self.yaml_constructors[None]
elif isinstance(node, ScalarNode):
constructor = self.__class__.construct_scalar
elif isinstance(node, SequenceNode):
constructor = self.__class__.construct_sequence
elif isinstance(node, MappingNode):
constructor = self.__class__.construct_mapping
if tag_suffix is None:
data = constructor(self, node)
else:
data = constructor(self, tag_suffix, node)
if isinstance(data, types.GeneratorType):
generator = data
data = next(generator)
if self.deep_construct:
for _dummy in generator:
pass
else:
self.state_generators.append(generator)
return data
def construct_scalar(self, node: Any) -> Any:
if not isinstance(node, ScalarNode):
raise ConstructorError(
None,
None,
f'expected a scalar node, but found {node.id!s}',
node.start_mark,
)
return node.value
def construct_sequence(self, node: Any, deep: bool = False) -> Any:
"""deep is True when creating an object/mapping recursively,
in that case want the underlying elements available during construction
"""
if not isinstance(node, SequenceNode):
raise ConstructorError(
None,
None,
f'expected a sequence node, but found {node.id!s}',
node.start_mark,
)
return [self.construct_object(child, deep=deep) for child in node.value]
def construct_mapping(self, node: Any, deep: bool = False) -> Any:
"""deep is True when creating an object/mapping recursively,
in that case want the underlying elements available during construction
"""
if not isinstance(node, MappingNode):
raise ConstructorError(
None,
None,
f'expected a mapping node, but found {node.id!s}',
node.start_mark,
)
total_mapping = self.yaml_base_dict_type()
if getattr(node, 'merge', None) is not None:
todo = [(node.merge, False), (node.value, False)]
else:
todo = [(node.value, True)]
for values, check in todo:
mapping: Dict[Any, Any] = self.yaml_base_dict_type()
for key_node, value_node in values:
# keys can be list -> deep
key = self.construct_object(key_node, deep=True)
# lists are not hashable, but tuples are
if not isinstance(key, Hashable):
if isinstance(key, list):
key = tuple(key)
if not isinstance(key, Hashable):
raise ConstructorError(
'while constructing a mapping',
node.start_mark,
'found unhashable key',
key_node.start_mark,
)
value = self.construct_object(value_node, deep=deep)
if check:
if self.check_mapping_key(node, key_node, mapping, key, value):
mapping[key] = value
else:
mapping[key] = value
total_mapping.update(mapping)
return total_mapping
def check_mapping_key(
self,
node: Any,
key_node: Any,
mapping: Any,
key: Any,
value: Any,
) -> bool:
"""return True if key is unique"""
if key in mapping:
if not self.allow_duplicate_keys:
mk = mapping.get(key)
args = [
'while constructing a mapping',
node.start_mark,
f'found duplicate key "{key}" with value "{value}" '
f'(original value: "{mk}")',
key_node.start_mark,
f"""
To suppress this check see:
{DUPKEY_URL}
""",
"""\
Duplicate keys will become an error in future releases, and are errors
by default when using the new API.
""",
]
if self.allow_duplicate_keys is None:
warnings.warn(DuplicateKeyFutureWarning(*args), stacklevel=1)
else:
raise DuplicateKeyError(*args)
return False
return True
def check_set_key(
self: Any, node: Any, key_node: Any, setting: Any, key: Any
) -> None:
if key in setting:
if not self.allow_duplicate_keys:
args = [
'while constructing a set',
node.start_mark,
f'found duplicate key "{key}"',
key_node.start_mark,
f"""
To suppress this check see:
{DUPKEY_URL}
""",
"""\
Duplicate keys will become an error in future releases, and are errors
by default when using the new API.
""",
]
if self.allow_duplicate_keys is None:
warnings.warn(DuplicateKeyFutureWarning(*args), stacklevel=1)
else:
raise DuplicateKeyError(*args)
def construct_pairs(self, node: Any, deep: bool = False) -> Any:
if not isinstance(node, MappingNode):
raise ConstructorError(
None,
None,
f'expected a mapping node, but found {node.id!s}',
node.start_mark,
)
pairs = []
for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=deep)
value = self.construct_object(value_node, deep=deep)
pairs.append((key, value))
return pairs
# ToDo: putting stuff on the class makes it global, consider making this to work on an
# instance variable once function load is dropped.
@classmethod
def add_constructor(cls, tag: Any, constructor: Any) -> Any:
if isinstance(tag, Tag):
tag = str(tag)
if 'yaml_constructors' not in cls.__dict__:
cls.yaml_constructors = cls.yaml_constructors.copy()
ret_val = cls.yaml_constructors.get(tag, None)
cls.yaml_constructors[tag] = constructor
return ret_val
@classmethod
def add_multi_constructor(cls, tag_prefix: Any, multi_constructor: Any) -> None:
if 'yaml_multi_constructors' not in cls.__dict__:
cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy()
cls.yaml_multi_constructors[tag_prefix] = multi_constructor
@classmethod
def add_default_constructor(
cls,
tag: str,
method: Any = None,
tag_base: str = 'tag:yaml.org,2002:',
) -> None:
if not tag.startswith('tag:'):
if method is None:
method = 'construct_yaml_' + tag
tag = tag_base + tag
cls.add_constructor(tag, getattr(cls, method))
class SafeConstructor(BaseConstructor):
def construct_scalar(self, node: Any) -> Any:
if isinstance(node, MappingNode):
for key_node, value_node in node.value:
if key_node.tag == 'tag:yaml.org,2002:value':
return self.construct_scalar(value_node)
return BaseConstructor.construct_scalar(self, node)
def flatten_mapping(self, node: Any) -> Any: # SafeConstructor
"""
This implements the merge key feature http://yaml.org/type/merge.html
by inserting keys from the merge dict/list of dicts if not yet
available in this node
"""
merge: List[Any] = []
index = 0
while index < len(node.value):
key_node, value_node = node.value[index]
if key_node.tag == 'tag:yaml.org,2002:merge':
if merge: # double << key
if self.allow_duplicate_keys:
del node.value[index]
index += 1
continue
args = [
'while constructing a mapping',
node.start_mark,
'found duplicate merge key "<<"',
key_node.start_mark,
"""\
Duplicate merge keys are never allowed, not even when
`.allow_duplicate_keys` is set to True
""",
]
raise DuplicateKeyError(*args)
del node.value[index]
if isinstance(value_node, MappingNode):
self.flatten_mapping(value_node)
merge.extend(value_node.value)
elif isinstance(value_node, SequenceNode):
submerge = []
for subnode in value_node.value:
if not isinstance(subnode, MappingNode):
raise ConstructorError(
'while constructing a mapping',
node.start_mark,
f'expected a mapping for merging, but found {subnode.id!s}',
subnode.start_mark,
)
self.flatten_mapping(subnode)
submerge.append(subnode.value)
submerge.reverse()
for value in submerge:
merge.extend(value)
else:
raise ConstructorError(
'while constructing a mapping',
node.start_mark,
'expected a mapping or list of mappings for merging, '
f'but found {value_node.id!s}',
value_node.start_mark,
)
elif key_node.tag == 'tag:yaml.org,2002:value':
key_node.tag = 'tag:yaml.org,2002:str'
index += 1
else:
index += 1
if bool(merge):
node.merge = (
merge # separate merge keys to be able to update without duplicate
)
node.value = merge + node.value
def construct_mapping(self, node: Any, deep: bool = False) -> Any:
"""deep is True when creating an object/mapping recursively,
in that case want the underlying elements available during construction
"""
if isinstance(node, MappingNode):
self.flatten_mapping(node)
return BaseConstructor.construct_mapping(self, node, deep=deep)
def construct_yaml_null(self, node: Any) -> Any:
self.construct_scalar(node)
return None
# YAML 1.2 spec doesn't mention yes/no etc any more, 1.1 does
bool_values = {
'yes': True,
'no': False,
'y': True,
'n': False,
'true': True,
'false': False,
'on': True,
'off': False,
}
def construct_yaml_bool(self, node: Any) -> bool:
value = self.construct_scalar(node)
return self.bool_values[value.lower()]
def construct_yaml_int(self, node: Any) -> int:
value_s = self.construct_scalar(node)
value_s = value_s.replace('_', "")
sign = +1
if value_s[0] == '-':
sign = -1
if value_s[0] in '+-':
value_s = value_s[1:]
if value_s == '0':
return 0
elif value_s.startswith('0b'):
return sign * int(value_s[2:], 2)
elif value_s.startswith('0x'):
return sign * int(value_s[2:], 16)
elif value_s.startswith('0o'):
return sign * int(value_s[2:], 8)
elif self.resolver.processing_version == (1, 1) and value_s[0] == '0':
return sign * int(value_s, 8)
elif self.resolver.processing_version == (1, 1) and ':' in value_s:
digits = [int(part) for part in value_s.split(':')]
digits.reverse()
base = 1
value = 0
for digit in digits:
value += digit * base
base *= 60
return sign * value
else:
return sign * int(value_s)
inf_value = 1e300
while inf_value != inf_value * inf_value:
inf_value *= inf_value
nan_value = -inf_value / inf_value # Trying to make a quiet NaN (like C99).
def construct_yaml_float(self, node: Any) -> float:
value_so = self.construct_scalar(node)
value_s = value_so.replace('_', "").lower()
sign = +1
if value_s[0] == '-':
sign = -1
if value_s[0] in '+-':
value_s = value_s[1:]
if value_s == '.inf':
return sign * self.inf_value
elif value_s == '.nan':
return self.nan_value
elif self.resolver.processing_version != (1, 2) and ':' in value_s:
digits = [float(part) for part in value_s.split(':')]
digits.reverse()
base = 1
value = 0.0
for digit in digits:
value += digit * base
base *= 60
return sign * value
else:
if self.resolver.processing_version != (1, 2) and 'e' in value_s:
# value_s is lower case independent of input
mantissa, exponent = value_s.split('e')
if '.' not in mantissa:
warnings.warn(
MantissaNoDotYAML1_1Warning(node, value_so), stacklevel=1
)
return sign * float(value_s)
def construct_yaml_binary(self, node: Any) -> Any:
import base64
try:
value = self.construct_scalar(node).encode('ascii')
except UnicodeEncodeError as exc:
raise ConstructorError(
None,
None,
f'failed to convert base64 data into ascii: {exc!s}',
node.start_mark,
)
try:
return base64.decodebytes(value)
except binascii.Error as exc:
raise ConstructorError(
None,
None,
f'failed to decode base64 data: {exc!s}',
node.start_mark,
)
timestamp_regexp = timestamp_regexp # moved to util 0.17.17
def construct_yaml_timestamp(self, node: Any, values: Any = None) -> Any:
if values is None:
try:
match = self.timestamp_regexp.match(node.value)
except TypeError:
match = None
if match is None:
raise ConstructorError(
None,
None,
f'failed to construct timestamp from "{node.value}"',
node.start_mark,
)
values = match.groupdict()
return create_timestamp(**values)
def construct_yaml_omap(self, node: Any) -> Any:
# Note: we do now check for duplicate keys
omap = ordereddict()
yield omap
if not isinstance(node, SequenceNode):
raise ConstructorError(
'while constructing an ordered map',
node.start_mark,
f'expected a sequence, but found {node.id!s}',
node.start_mark,
)
for subnode in node.value:
if not isinstance(subnode, MappingNode):
raise ConstructorError(
'while constructing an ordered map',
node.start_mark,
f'expected a mapping of length 1, but found {subnode.id!s}',
subnode.start_mark,
)
if len(subnode.value) != 1:
raise ConstructorError(
'while constructing an ordered map',
node.start_mark,
f'expected a single mapping item, but found {len(subnode.value):d} items',
subnode.start_mark,
)
key_node, value_node = subnode.value[0]
key = self.construct_object(key_node)
assert key not in omap
value = self.construct_object(value_node)
omap[key] = value
def construct_yaml_pairs(self, node: Any) -> Any:
# Note: the same code as `construct_yaml_omap`.
pairs: List[Any] = []
yield pairs
if not isinstance(node, SequenceNode):
raise ConstructorError(
'while constructing pairs',
node.start_mark,
f'expected a sequence, but found {node.id!s}',
node.start_mark,
)
for subnode in node.value:
if not isinstance(subnode, MappingNode):
raise ConstructorError(
'while constructing pairs',
node.start_mark,
f'expected a mapping of length 1, but found {subnode.id!s}',
subnode.start_mark,
)
if len(subnode.value) != 1:
raise ConstructorError(
'while constructing pairs',
node.start_mark,
f'expected a single mapping item, but found {len(subnode.value):d} items',
subnode.start_mark,
)
key_node, value_node = subnode.value[0]
key = self.construct_object(key_node)
value = self.construct_object(value_node)
pairs.append((key, value))
def construct_yaml_set(self, node: Any) -> Any:
data: Set[Any] = set()
yield data
value = self.construct_mapping(node)
data.update(value)
def construct_yaml_str(self, node: Any) -> Any:
value = self.construct_scalar(node)
return value
def construct_yaml_seq(self, node: Any) -> Any:
data: List[Any] = self.yaml_base_list_type()
yield data
data.extend(self.construct_sequence(node))
def construct_yaml_map(self, node: Any) -> Any:
data: Dict[Any, Any] = self.yaml_base_dict_type()
yield data
value = self.construct_mapping(node)
data.update(value)
def construct_yaml_object(self, node: Any, cls: Any) -> Any:
data = cls.__new__(cls)
yield data
if hasattr(data, '__setstate__'):
state = self.construct_mapping(node, deep=True)
data.__setstate__(state)
else:
state = self.construct_mapping(node)
data.__dict__.update(state)
def construct_undefined(self, node: Any) -> None:
raise ConstructorError(
None,
None,
f'could not determine a constructor for the tag {node.tag!r}',
node.start_mark,
)
for tag in 'null bool int float binary timestamp omap pairs set str seq map'.split():
SafeConstructor.add_default_constructor(tag)
SafeConstructor.add_constructor(None, SafeConstructor.construct_undefined)
class Constructor(SafeConstructor):
def construct_python_str(self, node: Any) -> Any:
return self.construct_scalar(node)
def construct_python_unicode(self, node: Any) -> Any:
return self.construct_scalar(node)
def construct_python_bytes(self, node: Any) -> Any:
import base64
try:
value = self.construct_scalar(node).encode('ascii')
except UnicodeEncodeError as exc:
raise ConstructorError(
None,
None,
f'failed to convert base64 data into ascii: {exc!s}',
node.start_mark,
)
try:
return base64.decodebytes(value)
except binascii.Error as exc:
raise ConstructorError(
None,
None,
f'failed to decode base64 data: {exc!s}',
node.start_mark,
)
def construct_python_long(self, node: Any) -> int:
val = self.construct_yaml_int(node)
return val
def construct_python_complex(self, node: Any) -> Any:
return complex(self.construct_scalar(node))
def construct_python_tuple(self, node: Any) -> Any:
return tuple(self.construct_sequence(node))
def find_python_module(self, name: Any, mark: Any) -> Any:
if not name:
raise ConstructorError(
'while constructing a Python module',
mark,
'expected non-empty name appended to the tag',
mark,
)
try:
__import__(name)
except ImportError as exc:
raise ConstructorError(
'while constructing a Python module',
mark,
f'cannot find module {name!r} ({exc!s})',
mark,
)
return sys.modules[name]
def find_python_name(self, name: Any, mark: Any) -> Any:
if not name:
raise ConstructorError(
'while constructing a Python object',
mark,
'expected non-empty name appended to the tag',
mark,
)
if '.' in name:
lname = name.split('.')
lmodule_name = lname
lobject_name: List[Any] = []
while len(lmodule_name) > 1:
lobject_name.insert(0, lmodule_name.pop())
module_name = '.'.join(lmodule_name)
try:
__import__(module_name)
# object_name = '.'.join(object_name)
break
except ImportError:
continue
else:
module_name = builtins_module
lobject_name = [name]
try:
__import__(module_name)
except ImportError as exc:
raise ConstructorError(
'while constructing a Python object',
mark,
f'cannot find module {module_name!r} ({exc!s})',
mark,
)
module = sys.modules[module_name]
object_name = '.'.join(lobject_name)
obj = module
while lobject_name:
if not hasattr(obj, lobject_name[0]):
raise ConstructorError(
'while constructing a Python object',
mark,
f'cannot find {object_name!r} in the module {module.__name__!r}',
mark,
)
obj = getattr(obj, lobject_name.pop(0))
return obj
def construct_python_name(self, suffix: Any, node: Any) -> Any:
value = self.construct_scalar(node)
if value:
raise ConstructorError(
'while constructing a Python name',
node.start_mark,
f'expected the empty value, but found {value!r}',
node.start_mark,
)
return self.find_python_name(suffix, node.start_mark)
def construct_python_module(self, suffix: Any, node: Any) -> Any:
value = self.construct_scalar(node)
if value:
raise ConstructorError(
'while constructing a Python module',
node.start_mark,
f'expected the empty value, but found {value!r}',
node.start_mark,
)
return self.find_python_module(suffix, node.start_mark)
def make_python_instance(
self,
suffix: Any,
node: Any,
args: Any = None,
kwds: Any = None,
newobj: bool = False,
) -> Any:
if not args:
args = []
if not kwds:
kwds = {}
cls = self.find_python_name(suffix, node.start_mark)
if newobj and isinstance(cls, type):
return cls.__new__(cls, *args, **kwds)
else:
return cls(*args, **kwds)
def set_python_instance_state(self, instance: Any, state: Any) -> None:
if hasattr(instance, '__setstate__'):
instance.__setstate__(state)
else:
slotstate: Dict[Any, Any] = {}
if isinstance(state, tuple) and len(state) == 2:
state, slotstate = state
if hasattr(instance, '__dict__'):
instance.__dict__.update(state)
elif state:
slotstate.update(state)
for key, value in slotstate.items():
setattr(instance, key, value)
def construct_python_object(self, suffix: Any, node: Any) -> Any:
# Format:
# !!python/object:module.name { ... state ... }
instance = self.make_python_instance(suffix, node, newobj=True)
self.recursive_objects[node] = instance
yield instance
deep = hasattr(instance, '__setstate__')
state = self.construct_mapping(node, deep=deep)
self.set_python_instance_state(instance, state)
def construct_python_object_apply(
self,
suffix: Any,
node: Any,
newobj: bool = False,
) -> Any:
# Format:
# !!python/object/apply # (or !!python/object/new)
# args: [ ... arguments ... ]
# kwds: { ... keywords ... }
# state: ... state ...
# listitems: [ ... listitems ... ]
# dictitems: { ... dictitems ... }
# or short format:
# !!python/object/apply [ ... arguments ... ]
# The difference between !!python/object/apply and !!python/object/new
# is how an object is created, check make_python_instance for details.
if isinstance(node, SequenceNode):
args = self.construct_sequence(node, deep=True)
kwds: Dict[Any, Any] = {}
state: Dict[Any, Any] = {}
listitems: List[Any] = []
dictitems: Dict[Any, Any] = {}
else:
value = self.construct_mapping(node, deep=True)
args = value.get('args', [])
kwds = value.get('kwds', {})
state = value.get('state', {})
listitems = value.get('listitems', [])
dictitems = value.get('dictitems', {})
instance = self.make_python_instance(suffix, node, args, kwds, newobj)
if bool(state):
self.set_python_instance_state(instance, state)
if bool(listitems):
instance.extend(listitems)
if bool(dictitems):
for key in dictitems:
instance[key] = dictitems[key]
return instance
def construct_python_object_new(self, suffix: Any, node: Any) -> Any:
return self.construct_python_object_apply(suffix, node, newobj=True)
@classmethod
def add_default_constructor(
cls,
tag: str,
method: Any = None,
tag_base: str = 'tag:yaml.org,2002:python/',
) -> None:
if not tag.startswith('tag:'):
if method is None:
method = 'construct_yaml_' + tag
tag = tag_base + tag
cls.add_constructor(tag, getattr(cls, method))
Constructor.add_constructor(
'tag:yaml.org,2002:python/none', Constructor.construct_yaml_null
)
Constructor.add_constructor(
'tag:yaml.org,2002:python/unicode',
Constructor.construct_python_unicode,
)
Constructor.add_constructor(
'tag:yaml.org,2002:python/bytes',
Constructor.construct_python_bytes,
)
Constructor.add_constructor(
'tag:yaml.org,2002:python/int', Constructor.construct_yaml_int
)
Constructor.add_constructor(
'tag:yaml.org,2002:python/long', Constructor.construct_python_long
)
Constructor.add_constructor(
'tag:yaml.org,2002:python/float', Constructor.construct_yaml_float
)
Constructor.add_constructor(
'tag:yaml.org,2002:python/complex',
Constructor.construct_python_complex,
)
Constructor.add_constructor(
'tag:yaml.org,2002:python/list', Constructor.construct_yaml_seq
)
Constructor.add_constructor(
'tag:yaml.org,2002:python/tuple',
Constructor.construct_python_tuple,
)
# for tag in 'bool str unicode bytes int long float complex tuple'.split():
# Constructor.add_default_constructor(tag)
Constructor.add_constructor(
'tag:yaml.org,2002:python/dict', Constructor.construct_yaml_map
)
Constructor.add_multi_constructor(
'tag:yaml.org,2002:python/name:',
Constructor.construct_python_name,
)
Constructor.add_multi_constructor(
'tag:yaml.org,2002:python/module:',
Constructor.construct_python_module,
)
Constructor.add_multi_constructor(
'tag:yaml.org,2002:python/object:',
Constructor.construct_python_object,
)
Constructor.add_multi_constructor(
'tag:yaml.org,2002:python/object/apply:',
Constructor.construct_python_object_apply,
)
Constructor.add_multi_constructor(
'tag:yaml.org,2002:python/object/new:',
Constructor.construct_python_object_new,
)
class RoundTripConstructor(SafeConstructor):
"""need to store the comments on the node itself,
as well as on the items
"""
def comment(self, idx: Any) -> Any:
assert self.loader.comment_handling is not None
x = self.scanner.comments[idx]
x.set_assigned()
return x
def comments(self, list_of_comments: Any, idx: Optional[Any] = None) -> Any:
# hand in the comment and optional pre, eol, post segment
if list_of_comments is None:
return []
if idx is not None:
if list_of_comments[idx] is None:
return []
list_of_comments = list_of_comments[idx]
for x in list_of_comments:
yield self.comment(x)
def construct_scalar(self, node: Any) -> Any:
if not isinstance(node, ScalarNode):
raise ConstructorError(
None,
None,
f'expected a scalar node, but found {node.id!s}',
node.start_mark,
)
if node.style == '|' and isinstance(node.value, str):
lss = LiteralScalarString(node.value, anchor=node.anchor)
if self.loader and self.loader.comment_handling is None:
if node.comment and node.comment[1]:
lss.comment = node.comment[1][0] # type: ignore
else:
# NEWCMNT
if node.comment is not None and node.comment[1]:
# nprintf('>>>>nc1', node.comment)
# EOL comment after |
lss.comment = self.comment(node.comment[1][0]) # type: ignore
return lss
if node.style == '>' and isinstance(node.value, str):
fold_positions: List[int] = []
idx = -1
while True:
idx = node.value.find('\a', idx + 1)
if idx < 0:
break
fold_positions.append(idx - len(fold_positions))
fss = FoldedScalarString(node.value.replace('\a', ''), anchor=node.anchor)
if self.loader and self.loader.comment_handling is None:
if node.comment and node.comment[1]:
fss.comment = node.comment[1][0] # type: ignore
else:
# NEWCMNT
if node.comment is not None and node.comment[1]:
# nprintf('>>>>nc2', node.comment)
# EOL comment after >
fss.comment = self.comment(node.comment[1][0]) # type: ignore
if fold_positions:
fss.fold_pos = fold_positions # type: ignore
return fss
elif bool(self._preserve_quotes) and isinstance(node.value, str):
if node.style == "'":
return SingleQuotedScalarString(node.value, anchor=node.anchor)
if node.style == '"':
return DoubleQuotedScalarString(node.value, anchor=node.anchor)
# if node.ctag:
# data2 = TaggedScalar()
# data2.value = node.value
# data2.style = node.style
# data2.yaml_set_ctag(node.ctag)
# if node.anchor:
# from ruyaml.serializer import templated_id
# if not templated_id(node.anchor):
# data2.yaml_set_anchor(node.anchor, always_dump=True)
# return data2
if node.anchor:
return PlainScalarString(node.value, anchor=node.anchor)
return node.value
def construct_yaml_int(self, node: Any) -> Any:
width: Any = None
value_su = self.construct_scalar(node)
try:
sx = value_su.rstrip('_')
underscore: Any = [len(sx) - sx.rindex('_') - 1, False, False]
except ValueError:
underscore = None
except IndexError:
underscore = None
value_s = value_su.replace('_', "")
sign = +1
if value_s[0] in '+-':
if value_s[0] == '-':
sign = -1
value_s = value_s[1:]
if value_s.startswith('0b'):
if self.resolver.processing_version > (1, 1) and value_s[2] == '0':
width = len(value_s[2:])
if underscore is not None:
underscore[1] = value_su[2] == '_'
underscore[2] = len(value_su[2:]) > 1 and value_su[-1] == '_'
return BinaryInt(
sign * int(value_s[2:], 2),
width=width,
underscore=underscore,
anchor=node.anchor,
)
elif value_s.startswith('0x'):
# default to lower-case if no a-fA-F in string
if self.resolver.processing_version > (1, 1) and value_s[2] == '0':
width = len(value_s[2:])
hex_fun: Any = HexInt
for ch in value_s[2:]:
if ch in 'ABCDEF': # first non-digit is capital
hex_fun = HexCapsInt
break
if ch in 'abcdef':
break
if underscore is not None:
underscore[1] = value_su[2] == '_'
underscore[2] = len(value_su[2:]) > 1 and value_su[-1] == '_'
return hex_fun(
sign * int(value_s[2:], 16),
width=width,
underscore=underscore,
anchor=node.anchor,
)
elif value_s.startswith('0o'):
if self.resolver.processing_version > (1, 1) and value_s[2] == '0':
width = len(value_s[2:])
if underscore is not None:
underscore[1] = value_su[2] == '_'
underscore[2] = len(value_su[2:]) > 1 and value_su[-1] == '_'
return OctalInt(
sign * int(value_s[2:], 8),
width=width,
underscore=underscore,
anchor=node.anchor,
)
elif (
self.resolver.processing_version != (1, 2)
and len(value_s) > 1
and value_s[0] == '0'
):
return OctalInt(
sign * int(value_s, 8),
width=width,
underscore=underscore,
anchor=node.anchor,
)
elif self.resolver.processing_version != (1, 2) and ':' in value_s:
digits = [int(part) for part in value_s.split(':')]
digits.reverse()
base = 1
value = 0
for digit in digits:
value += digit * base
base *= 60
return sign * value
elif self.resolver.processing_version > (1, 1) and value_s[0] == '0':
# not an octal, an integer with leading zero(s)
if underscore is not None:
# cannot have a leading underscore
underscore[2] = len(value_su) > 1 and value_su[-1] == '_'
return ScalarInt(
sign * int(value_s),
width=len(value_s),
underscore=underscore,
anchor=node.anchor,
)
elif underscore:
# cannot have a leading underscore
underscore[2] = len(value_su) > 1 and value_su[-1] == '_'
return ScalarInt(
sign * int(value_s),
width=None,
underscore=underscore,
anchor=node.anchor,
)
elif node.anchor:
return ScalarInt(sign * int(value_s), width=None, anchor=node.anchor)
else:
return sign * int(value_s)
def construct_yaml_float(self, node: Any) -> Any:
def leading_zeros(v: Any) -> int:
lead0 = 0
idx = 0
while idx < len(v) and v[idx] in '0.':
if v[idx] == '0':
lead0 += 1
idx += 1
return lead0
# underscore = None
m_sign: Any = False
value_so = self.construct_scalar(node)
value_s = value_so.replace('_', "").lower()
sign = +1
if value_s[0] == '-':
sign = -1
if value_s[0] in '+-':
m_sign = value_s[0]
value_s = value_s[1:]
if value_s == '.inf':
return sign * self.inf_value
if value_s == '.nan':
return self.nan_value
if self.resolver.processing_version != (1, 2) and ':' in value_s:
digits = [float(part) for part in value_s.split(':')]
digits.reverse()
base = 1
value = 0.0
for digit in digits:
value += digit * base
base *= 60
return sign * value
if 'e' in value_s:
try:
mantissa, exponent = value_so.split('e')
exp = 'e'
except ValueError:
mantissa, exponent = value_so.split('E')
exp = 'E'
if self.resolver.processing_version != (1, 2):
# value_s is lower case independent of input
if '.' not in mantissa:
warnings.warn(
MantissaNoDotYAML1_1Warning(node, value_so), stacklevel=1
)
lead0 = leading_zeros(mantissa)
width = len(mantissa)
prec = mantissa.find('.')
if m_sign:
width -= 1
e_width = len(exponent)
e_sign = exponent[0] in '+-'
# nprint('sf', width, prec, m_sign, exp, e_width, e_sign)
return ScalarFloat(
sign * float(value_s),
width=width,
prec=prec,
m_sign=m_sign,
m_lead0=lead0,
exp=exp,
e_width=e_width,
e_sign=e_sign,
anchor=node.anchor,
)
width = len(value_so)
# you can't use index, !!float 42 would be a float without a dot
prec = value_so.find('.')
lead0 = leading_zeros(value_so)
return ScalarFloat(
sign * float(value_s),
width=width,
prec=prec,
m_sign=m_sign,
m_lead0=lead0,
anchor=node.anchor,
)
def construct_yaml_str(self, node: Any) -> Any:
if node.ctag.handle:
value = self.construct_unknown(node)
else:
value = self.construct_scalar(node)
if isinstance(value, ScalarString):
return value
return value
def construct_rt_sequence(self, node: Any, seqtyp: Any, deep: bool = False) -> Any:
if not isinstance(node, SequenceNode):
raise ConstructorError(
None,
None,
f'expected a sequence node, but found {node.id!s}',
node.start_mark,
)
ret_val = []
if self.loader and self.loader.comment_handling is None:
if node.comment:
seqtyp._yaml_add_comment(node.comment[:2])
if len(node.comment) > 2:
# this happens e.g. if you have a sequence element that is a flow-style
# mapping and that has no EOL comment but a following commentline or
# empty line
seqtyp.yaml_end_comment_extend(node.comment[2], clear=True)
else:
# NEWCMNT
if node.comment:
nprintf('nc3', node.comment)
if node.anchor:
from ruyaml.serializer import templated_id
if not templated_id(node.anchor):
seqtyp.yaml_set_anchor(node.anchor)
for idx, child in enumerate(node.value):
if child.comment:
seqtyp._yaml_add_comment(child.comment, key=idx)
child.comment = None # if moved to sequence remove from child
ret_val.append(self.construct_object(child, deep=deep))
seqtyp._yaml_set_idx_line_col(
idx,
[child.start_mark.line, child.start_mark.column],
)
return ret_val
def flatten_mapping(self, node: Any) -> Any: # RTConstructor
"""
This implements the merge key feature http://yaml.org/type/merge.html
by referencing the merge dict/list of dicts
"""
from ruyaml.mergevalue import MergeValue
def constructed(value_node: Any) -> Any:
# If the contents of a merge are defined within the
# merge marker, then they won't have been constructed
# yet. But if they were already constructed, we need to use
# the existing object.
if value_node in self.constructed_objects:
value = self.constructed_objects[value_node]
else:
value = self.construct_object(value_node, deep=True)
return value
# merge = []
# merge_map_list: List[Any] = []
merge_map_list = MergeValue()
index = 0
while index < len(node.value):
key_node, value_node = node.value[index]
if key_node.tag == 'tag:yaml.org,2002:merge':
if not self.allow_duplicate_keys and len(
merge_map_list
): # double << key
# sorry but this works, so I fail to see why it should
# stop working just because "not allowed".
args = [
'while constructing a mapping',
node.start_mark,
'found duplicate merge key "<<"',
key_node.start_mark,
"""\
Duplicate merge keys are never allowed, not even when
`.allow_duplicate_keys` is set to True
""",
]
raise DuplicateKeyError(*args)
del node.value[index]
merge_map_list.merge_pos = index
if isinstance(value_node, MappingNode):
merge_map_list.append(constructed(value_node))
elif isinstance(value_node, SequenceNode):
# submerge = []
merge_map_list.set_sequence(constructed(value_node))
for subnode in value_node.value:
if not isinstance(subnode, MappingNode):
raise ConstructorError(
'while constructing a mapping',
node.start_mark,
f'expected a mapping for merging, but found {subnode.id!s}',
subnode.start_mark,
)
merge_map_list.append(constructed(subnode))
else:
raise ConstructorError(
'while constructing a mapping',
node.start_mark,
'expected a mapping or list of mappings for merging, '
f'but found {value_node.id!s}',
value_node.start_mark,
)
elif key_node.tag == 'tag:yaml.org,2002:value':
key_node.tag = 'tag:yaml.org,2002:str'
index += 1
else:
index += 1
return merge_map_list
# if merge:
# node.value = merge + node.value
def _sentinel(self) -> None:
pass
# RoundTrip
def construct_mapping(self, node: Any, maptyp: Any, deep: bool = False) -> Any: # type: ignore # NOQA
if not isinstance(node, MappingNode):
raise ConstructorError(
None,
None,
f'expected a mapping node, but found {node.id!s}',
node.start_mark,
)
merge_map = self.flatten_mapping(node)
# mapping = {}
if self.loader and self.loader.comment_handling is None:
if node.comment:
maptyp._yaml_add_comment(node.comment[:2])
if len(node.comment) > 2:
maptyp.yaml_end_comment_extend(node.comment[2], clear=True)
else:
# NEWCMNT
if node.comment:
# nprintf('nc4', node.comment, node.start_mark)
if maptyp.ca.pre is None:
maptyp.ca.pre = []
for cmnt in self.comments(node.comment, 0):
maptyp.ca.pre.append(cmnt)
if node.anchor:
from ruyaml.serializer import templated_id
if not templated_id(node.anchor):
maptyp.yaml_set_anchor(node.anchor)
last_key, last_value = None, self._sentinel
for key_node, value_node in node.value:
# keys can be list -> deep
key = self.construct_object(key_node, deep=True)
# lists are not hashable, but tuples are
if not isinstance(key, Hashable):
if isinstance(key, MutableSequence):
key_s = CommentedKeySeq(key)
if key_node.flow_style is True:
key_s.fa.set_flow_style()
elif key_node.flow_style is False:
key_s.fa.set_block_style()
key_s._yaml_set_line_col(key.lc.line, key.lc.col) # type: ignore
key = key_s
elif isinstance(key, MutableMapping):
key_m = CommentedKeyMap(key)
if key_node.flow_style is True:
key_m.fa.set_flow_style()
elif key_node.flow_style is False:
key_m.fa.set_block_style()
key_m._yaml_set_line_col(key.lc.line, key.lc.col) # type: ignore
key = key_m
if not isinstance(key, Hashable):
raise ConstructorError(
'while constructing a mapping',
node.start_mark,
'found unhashable key',
key_node.start_mark,
)
value = self.construct_object(value_node, deep=deep)
if self.check_mapping_key(node, key_node, maptyp, key, value):
if self.loader and self.loader.comment_handling is None:
if (
key_node.comment
and len(key_node.comment) > 4
and key_node.comment[4]
):
if last_value is None:
key_node.comment[0] = key_node.comment.pop(4)
maptyp._yaml_add_comment(key_node.comment, value=last_key)
else:
key_node.comment[2] = key_node.comment.pop(4)
maptyp._yaml_add_comment(key_node.comment, key=key)
key_node.comment = None
if key_node.comment:
maptyp._yaml_add_comment(key_node.comment, key=key)
if value_node.comment:
maptyp._yaml_add_comment(value_node.comment, value=key)
else:
# NEWCMNT
if key_node.comment:
# nprintf('nc5a', key, key_node.comment)
if key_node.comment[0]:
maptyp.ca.set(key, C_KEY_PRE, key_node.comment[0])
if key_node.comment[1]:
maptyp.ca.set(key, C_KEY_EOL, key_node.comment[1])
if key_node.comment[2]:
maptyp.ca.set(key, C_KEY_POST, key_node.comment[2])
if value_node.comment:
nprintf('nc5b', key, value_node.comment)
if value_node.comment[0]:
maptyp.ca.set(key, C_VALUE_PRE, value_node.comment[0])
if value_node.comment[1]:
maptyp.ca.set(key, C_VALUE_EOL, value_node.comment[1])
if value_node.comment[2]:
maptyp.ca.set(key, C_VALUE_POST, value_node.comment[2])
maptyp._yaml_set_kv_line_col(
key,
[
key_node.start_mark.line,
key_node.start_mark.column,
value_node.start_mark.line,
value_node.start_mark.column,
],
)
maptyp[key] = value
last_key, last_value = key, value # could use indexing
# do this last, or <<: before a key will prevent insertion in instances
# of collections.OrderedDict (as they have no __contains__
if merge_map:
maptyp.add_yaml_merge(merge_map)
def construct_setting(self, node: Any, typ: Any, deep: bool = False) -> Any:
if not isinstance(node, MappingNode):
raise ConstructorError(
None,
None,
f'expected a mapping node, but found {node.id!s}',
node.start_mark,
)
if self.loader and self.loader.comment_handling is None:
if node.comment:
typ._yaml_add_comment(node.comment[:2])
if len(node.comment) > 2:
typ.yaml_end_comment_extend(node.comment[2], clear=True)
else:
# NEWCMNT
if node.comment:
nprintf('nc6', node.comment)
if node.anchor:
from ruyaml.serializer import templated_id
if not templated_id(node.anchor):
typ.yaml_set_anchor(node.anchor)
for key_node, value_node in node.value:
# keys can be list -> deep
key = self.construct_object(key_node, deep=True)
# lists are not hashable, but tuples are
if not isinstance(key, Hashable):
if isinstance(key, list):
key = tuple(key)
if not isinstance(key, Hashable):
raise ConstructorError(
'while constructing a mapping',
node.start_mark,
'found unhashable key',
key_node.start_mark,
)
# construct but should be null
value = self.construct_object(value_node, deep=deep) # NOQA
self.check_set_key(node, key_node, typ, key)
if self.loader and self.loader.comment_handling is None:
if key_node.comment:
typ._yaml_add_comment(key_node.comment, key=key)
if value_node.comment:
typ._yaml_add_comment(value_node.comment, value=key)
else:
# NEWCMNT
if key_node.comment:
nprintf('nc7a', key_node.comment)
if value_node.comment:
nprintf('nc7b', value_node.comment)
typ.add(key)
def construct_yaml_seq(self, node: Any) -> Iterator[CommentedSeq]:
data = CommentedSeq()
data._yaml_set_line_col(node.start_mark.line, node.start_mark.column)
# if node.comment:
# data._yaml_add_comment(node.comment)
yield data
data.extend(self.construct_rt_sequence(node, data))
self.set_collection_style(data, node)
def construct_yaml_map(self, node: Any) -> Iterator[CommentedMap]:
data = CommentedMap()
data._yaml_set_line_col(node.start_mark.line, node.start_mark.column)
yield data
self.construct_mapping(node, data, deep=True)
self.set_collection_style(data, node)
def set_collection_style(self, data: Any, node: Any) -> None:
if len(data) == 0:
return
if node.flow_style is True:
data.fa.set_flow_style()
elif node.flow_style is False:
data.fa.set_block_style()
def construct_yaml_object(self, node: Any, cls: Any) -> Any:
from dataclasses import MISSING, InitVar, is_dataclass
data = cls.__new__(cls)
yield data
if hasattr(data, '__setstate__'):
state = SafeConstructor.construct_mapping(self, node, deep=True)
data.__setstate__(state)
elif is_dataclass(data):
mapping = SafeConstructor.construct_mapping(self, node, deep=True)
init_var_defaults = {}
for field in data.__dataclass_fields__.values():
# nprintf('field', field, field.default is MISSING, isinstance(field.type, InitVar)) # NOQA
# in 3.7, InitVar is a singleton
if (
isinstance(field.type, InitVar)
or field.type is InitVar
# this following is for handling from __future__ import allocations
or (
isinstance(field.type, str) and field.type.startswith('InitVar')
)
) and field.default is not MISSING:
init_var_defaults[field.name] = field.default
for attr, value in mapping.items():
if attr not in init_var_defaults:
setattr(data, attr, value)
post_init = getattr(data, '__post_init__', None)
if post_init is not None:
kw = {}
for name, default in init_var_defaults.items():
kw[name] = mapping.get(name, default)
post_init(**kw)
for field in data.__dataclass_fields__.values():
if field.name not in mapping and field.default_factory is not MISSING:
setattr(data, field.name, field.default_factory())
else:
state = SafeConstructor.construct_mapping(self, node)
if hasattr(data, '__attrs_attrs__'): # issue 394
data.__init__(**state)
else:
data.__dict__.update(state)
if node.anchor:
from ruyaml.anchor import Anchor
from ruyaml.serializer import templated_id
if not templated_id(node.anchor):
if not hasattr(data, Anchor.attrib):
a = Anchor()
setattr(data, Anchor.attrib, a)
else:
a = getattr(data, Anchor.attrib)
a.value = node.anchor
def construct_yaml_omap(self, node: Any) -> Iterator[CommentedOrderedMap]:
# Note: we do now check for duplicate keys
omap = CommentedOrderedMap()
omap._yaml_set_line_col(node.start_mark.line, node.start_mark.column)
if node.flow_style is True:
omap.fa.set_flow_style()
elif node.flow_style is False:
omap.fa.set_block_style()
yield omap
if self.loader and self.loader.comment_handling is None:
if node.comment:
omap._yaml_add_comment(node.comment[:2])
if len(node.comment) > 2:
omap.yaml_end_comment_extend(node.comment[2], clear=True)
else:
# NEWCMNT
if node.comment:
nprintf('nc8', node.comment)
if not isinstance(node, SequenceNode):
raise ConstructorError(
'while constructing an ordered map',
node.start_mark,
f'expected a sequence, but found {node.id!s}',
node.start_mark,
)
for subnode in node.value:
if not isinstance(subnode, MappingNode):
raise ConstructorError(
'while constructing an ordered map',
node.start_mark,
f'expected a mapping of length 1, but found {subnode.id!s}',
subnode.start_mark,
)
if len(subnode.value) != 1:
raise ConstructorError(
'while constructing an ordered map',
node.start_mark,
f'expected a single mapping item, but found {len(subnode.value):d} items',
subnode.start_mark,
)
key_node, value_node = subnode.value[0]
key = self.construct_object(key_node)
assert key not in omap
value = self.construct_object(value_node)
if self.loader and self.loader.comment_handling is None:
if key_node.comment:
omap._yaml_add_comment(key_node.comment, key=key)
if subnode.comment:
omap._yaml_add_comment(subnode.comment, key=key)
if value_node.comment:
omap._yaml_add_comment(value_node.comment, value=key)
else:
# NEWCMNT
if key_node.comment:
nprintf('nc9a', key_node.comment)
if subnode.comment:
nprintf('nc9b', subnode.comment)
if value_node.comment:
nprintf('nc9c', value_node.comment)
omap[key] = value
def construct_yaml_set(self, node: Any) -> Iterator[CommentedSet]:
data = CommentedSet()
data._yaml_set_line_col(node.start_mark.line, node.start_mark.column)
if node.flow_style is True:
data.fa.set_flow_style()
elif node.flow_style is False:
data.fa.set_block_style()
yield data
self.construct_setting(node, data)
def construct_unknown(
self,
node: Any,
) -> Iterator[Union[CommentedMap, TaggedScalar, CommentedSeq]]:
try:
if isinstance(node, MappingNode):
data = CommentedMap()
data._yaml_set_line_col(node.start_mark.line, node.start_mark.column)
if node.flow_style is True:
data.fa.set_flow_style()
elif node.flow_style is False:
data.fa.set_block_style()
data.yaml_set_ctag(node.ctag)
yield data
if node.anchor:
from ruyaml.serializer import templated_id
if not templated_id(node.anchor):
data.yaml_set_anchor(node.anchor)
self.construct_mapping(node, data)
return
elif isinstance(node, ScalarNode):
data2 = TaggedScalar()
data2.value = self.construct_scalar(node)
data2.style = node.style
data2.yaml_set_ctag(node.ctag)
yield data2
if node.anchor:
from ruyaml.serializer import templated_id
if not templated_id(node.anchor):
data2.yaml_set_anchor(node.anchor, always_dump=True)
return
elif isinstance(node, SequenceNode):
data3 = CommentedSeq()
data3._yaml_set_line_col(node.start_mark.line, node.start_mark.column)
if node.flow_style is True:
data3.fa.set_flow_style()
elif node.flow_style is False:
data3.fa.set_block_style()
data3.yaml_set_ctag(node.ctag)
yield data3
if node.anchor:
from ruyaml.serializer import templated_id
if not templated_id(node.anchor):
data3.yaml_set_anchor(node.anchor)
data3.extend(self.construct_sequence(node))
return
except: # NOQA
pass
raise ConstructorError(
None,
None,
f'could not determine a constructor for the tag {node.tag!r}',
node.start_mark,
)
def construct_yaml_timestamp(
self,
node: Any,
values: Any = None,
) -> Union[datetime.date, datetime.datetime, TimeStamp]:
try:
match = self.timestamp_regexp.match(node.value)
except TypeError:
match = None
if match is None:
raise ConstructorError(
None,
None,
f'failed to construct timestamp from "{node.value}"',
node.start_mark,
)
values = match.groupdict()
if not values['hour']:
return create_timestamp(**values)
# return SafeConstructor.construct_yaml_timestamp(self, node, values)
for part in ['t', 'tz_sign', 'tz_hour', 'tz_minute']:
if values[part]:
break
else:
return create_timestamp(**values)
# return SafeConstructor.construct_yaml_timestamp(self, node, values)
# print('>>>>>>>> here', values)
dd = create_timestamp(**values) # this has tzinfo
delta = None
if values['tz_sign']:
hours = values['tz_hour']
tz_hour = int(hours)
minutes = values['tz_minute']
tz_minute = int(minutes) if minutes else 0
# ToDo: double work, replace with extraction from dd.tzinfo
delta = datetime.timedelta(hours=tz_hour, minutes=tz_minute)
if values['tz_sign'] == '-':
delta = -delta
if isinstance(dd, datetime.datetime):
data = TimeStamp(
dd.year,
dd.month,
dd.day,
dd.hour,
dd.minute,
dd.second,
dd.microsecond,
dd.tzinfo, # NOQA
)
else:
# ToDo: make this into a DateStamp?
data = TimeStamp(dd.year, dd.month, dd.day, 0, 0, 0, 0)
return data
if delta:
data._yaml['delta'] = delta
tz = values['tz_sign'] + values['tz_hour']
if values['tz_minute']:
tz += ':' + values['tz_minute']
data._yaml['tz'] = tz
else:
if values['tz']: # no delta
data._yaml['tz'] = values['tz']
if values['t']:
data._yaml['t'] = True
return data
def construct_yaml_sbool(self, node: Any) -> Union[bool, ScalarBoolean]:
b = SafeConstructor.construct_yaml_bool(self, node)
if node.anchor:
return ScalarBoolean(b, anchor=node.anchor)
return b
RoundTripConstructor.add_default_constructor('bool', method='construct_yaml_sbool')
for tag in 'null int float binary timestamp omap pairs set str seq map'.split():
RoundTripConstructor.add_default_constructor(tag)
RoundTripConstructor.add_constructor(None, RoundTripConstructor.construct_unknown)
python-ruyaml-0.92.1/lib/ruyaml/cyaml.py 0000664 0000000 0000000 00000015137 15056754172 0020214 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from _ruyaml import CEmitter, CParser # type: ignore
from ruyaml.constructor import BaseConstructor, Constructor, SafeConstructor
from ruyaml.representer import BaseRepresenter, Representer, SafeRepresenter
from ruyaml.resolver import BaseResolver, Resolver
if False: # MYPY
from typing import Any, Optional, Union # NOQA
from ruyaml.compat import StreamTextType, StreamType, VersionType # NOQA
__all__ = [
'CBaseLoader',
'CSafeLoader',
'CLoader',
'CBaseDumper',
'CSafeDumper',
'CDumper',
]
# this includes some hacks to solve the usage of resolver by lower level
# parts of the parser
class CBaseLoader(CParser, BaseConstructor, BaseResolver): # type: ignore
def __init__(
self,
stream: StreamTextType,
version: Optional[VersionType] = None,
preserve_quotes: Optional[bool] = None,
) -> None:
CParser.__init__(self, stream)
self._parser = self._composer = self
BaseConstructor.__init__(self, loader=self)
BaseResolver.__init__(self, loadumper=self)
# self.descend_resolver = self._resolver.descend_resolver
# self.ascend_resolver = self._resolver.ascend_resolver
# self.resolve = self._resolver.resolve
class CSafeLoader(CParser, SafeConstructor, Resolver): # type: ignore
def __init__(
self,
stream: StreamTextType,
version: Optional[VersionType] = None,
preserve_quotes: Optional[bool] = None,
) -> None:
CParser.__init__(self, stream)
self._parser = self._composer = self
SafeConstructor.__init__(self, loader=self)
Resolver.__init__(self, loadumper=self)
# self.descend_resolver = self._resolver.descend_resolver
# self.ascend_resolver = self._resolver.ascend_resolver
# self.resolve = self._resolver.resolve
class CLoader(CParser, Constructor, Resolver): # type: ignore
def __init__(
self,
stream: StreamTextType,
version: Optional[VersionType] = None,
preserve_quotes: Optional[bool] = None,
) -> None:
CParser.__init__(self, stream)
self._parser = self._composer = self
Constructor.__init__(self, loader=self)
Resolver.__init__(self, loadumper=self)
# self.descend_resolver = self._resolver.descend_resolver
# self.ascend_resolver = self._resolver.ascend_resolver
# self.resolve = self._resolver.resolve
class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver): # type: ignore
def __init__(
self: StreamType,
stream: Any,
default_style: Any = None,
default_flow_style: Any = None,
canonical: Optional[bool] = None,
indent: Optional[int] = None,
width: Optional[int] = None,
allow_unicode: Optional[bool] = None,
line_break: Any = None,
encoding: Any = None,
explicit_start: Optional[bool] = None,
explicit_end: Optional[bool] = None,
version: Any = None,
tags: Any = None,
block_seq_indent: Any = None,
top_level_colon_align: Any = None,
prefix_colon: Any = None,
) -> None:
# NOQA
CEmitter.__init__(
self,
stream,
canonical=canonical,
indent=indent,
width=width,
encoding=encoding,
allow_unicode=allow_unicode,
line_break=line_break,
explicit_start=explicit_start,
explicit_end=explicit_end,
version=version,
tags=tags,
)
self._emitter = self._serializer = self._representer = self
BaseRepresenter.__init__(
self,
default_style=default_style,
default_flow_style=default_flow_style,
dumper=self,
)
BaseResolver.__init__(self, loadumper=self)
class CSafeDumper(CEmitter, SafeRepresenter, Resolver): # type: ignore
def __init__(
self: StreamType,
stream: Any,
default_style: Any = None,
default_flow_style: Any = None,
canonical: Optional[bool] = None,
indent: Optional[int] = None,
width: Optional[int] = None,
allow_unicode: Optional[bool] = None,
line_break: Any = None,
encoding: Any = None,
explicit_start: Optional[bool] = None,
explicit_end: Optional[bool] = None,
version: Any = None,
tags: Any = None,
block_seq_indent: Any = None,
top_level_colon_align: Any = None,
prefix_colon: Any = None,
) -> None:
# NOQA
self._emitter = self._serializer = self._representer = self
CEmitter.__init__(
self,
stream,
canonical=canonical,
indent=indent,
width=width,
encoding=encoding,
allow_unicode=allow_unicode,
line_break=line_break,
explicit_start=explicit_start,
explicit_end=explicit_end,
version=version,
tags=tags,
)
self._emitter = self._serializer = self._representer = self
SafeRepresenter.__init__(
self,
default_style=default_style,
default_flow_style=default_flow_style,
)
Resolver.__init__(self)
class CDumper(CEmitter, Representer, Resolver): # type: ignore
def __init__(
self: StreamType,
stream: Any,
default_style: Any = None,
default_flow_style: Any = None,
canonical: Optional[bool] = None,
indent: Optional[int] = None,
width: Optional[int] = None,
allow_unicode: Optional[bool] = None,
line_break: Any = None,
encoding: Any = None,
explicit_start: Optional[bool] = None,
explicit_end: Optional[bool] = None,
version: Any = None,
tags: Any = None,
block_seq_indent: Any = None,
top_level_colon_align: Any = None,
prefix_colon: Any = None,
) -> None:
# NOQA
CEmitter.__init__(
self,
stream,
canonical=canonical,
indent=indent,
width=width,
encoding=encoding,
allow_unicode=allow_unicode,
line_break=line_break,
explicit_start=explicit_start,
explicit_end=explicit_end,
version=version,
tags=tags,
)
self._emitter = self._serializer = self._representer = self
Representer.__init__(
self,
default_style=default_style,
default_flow_style=default_flow_style,
)
Resolver.__init__(self)
python-ruyaml-0.92.1/lib/ruyaml/docinfo.py 0000664 0000000 0000000 00000006672 15056754172 0020534 0 ustar 00root root 0000000 0000000 from __future__ import annotations
"""
DocInfo
Although it was possible to read tag directives before this, all handle/prefix
pairs for all documents in all streams were stored in one dictionary per
YAML instance, making it impossible to distinguish where such a pair came
from without sublassing the scanner.
ToDo:
DocInfo can be used by a yaml dumper to dump a class
- if connected to the root of a data structure
- if provided to the dumper?
"""
if False: # MYPY
from typing import Any, Optional, Tuple
# from dataclasses import dataclass, field, MISSING # NOQA
# @dataclass(order=True, frozen=True)
class Version:
# major: int
# minor: int
def __init__(self, major: int, minor: int) -> None:
self._major = major
self._minor = minor
@property
def major(self) -> int:
return self._major
@property
def minor(self) -> int:
return self._minor
def __eq__(self, v: Any) -> bool:
if not isinstance(v, Version):
return False
return self._major == v._major and self._minor == v._minor
def __lt__(self, v: Version) -> bool:
if self._major < v._major:
return True
if self._major > v._major:
return False
return self._minor < v._minor
def __le__(self, v: Version) -> bool:
if self._major < v._major:
return True
if self._major > v._major:
return False
return self._minor <= v._minor
def __gt__(self, v: Version) -> bool:
if self._major > v._major:
return True
if self._major < v._major:
return False
return self._minor > v._minor
def __ge__(self, v: Version) -> bool:
if self._major > v._major:
return True
if self._major < v._major:
return False
return self._minor >= v._minor
def version(
major: int | str | Tuple[int, int] | None,
minor: Optional[int] = None,
) -> Optional[Version]:
if major is None:
assert minor is None
return None
if isinstance(major, str):
assert minor is None
parts = major.split('.')
assert len(parts) == 2
return Version(int(parts[0]), int(parts[1]))
elif isinstance(major, tuple):
assert minor is None
assert len(major) == 2
major, minor = major
assert minor is not None
return Version(major, minor)
# @dataclass(frozen=True)
class Tag:
# handle: str
# prefix: str
def __init__(self, handle: str, prefix: str) -> None:
self._handle = handle
self._prefix = prefix
@property
def handle(self) -> str:
return self._handle
@property
def prefix(self) -> str:
return self._prefix
# @dataclass
class DocInfo:
"""
Store document information, can be used for analysis of a loaded YAML document
requested_version: if explicitly set before load
doc_version: from %YAML directive
tags: from %TAG directives in scanned order
"""
# requested_version: Optional[Version] = None
# doc_version: Optional[Version] = None
# tags: list[Tag] = field(default_factory=list)
def __init__(
self,
requested_version: Optional[Version] = None,
doc_version: Optional[Version] = None,
tags: Optional[list[Tag]] = None,
):
self.requested_version = requested_version
self.doc_version = doc_version
self.tags = [] if tags is None else tags
python-ruyaml-0.92.1/lib/ruyaml/dumper.py 0000664 0000000 0000000 00000015002 15056754172 0020372 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from ruyaml.emitter import Emitter
from ruyaml.representer import (
BaseRepresenter,
Representer,
RoundTripRepresenter,
SafeRepresenter,
)
from ruyaml.resolver import BaseResolver, Resolver, VersionedResolver
from ruyaml.serializer import Serializer
if False: # MYPY
from typing import Any, Dict, List, Optional, Union # NOQA
from ruyaml.compat import StreamType, VersionType # NOQA
__all__ = ['BaseDumper', 'SafeDumper', 'Dumper', 'RoundTripDumper']
class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver):
def __init__(
self: Any,
stream: StreamType,
default_style: Any = None,
default_flow_style: Any = None,
canonical: Optional[bool] = None,
indent: Optional[int] = None,
width: Optional[int] = None,
allow_unicode: Optional[bool] = None,
line_break: Any = None,
encoding: Any = None,
explicit_start: Optional[bool] = None,
explicit_end: Optional[bool] = None,
version: Any = None,
tags: Any = None,
block_seq_indent: Any = None,
top_level_colon_align: Any = None,
prefix_colon: Any = None,
) -> None:
# NOQA
Emitter.__init__(
self,
stream,
canonical=canonical,
indent=indent,
width=width,
allow_unicode=allow_unicode,
line_break=line_break,
block_seq_indent=block_seq_indent,
dumper=self,
)
Serializer.__init__(
self,
encoding=encoding,
explicit_start=explicit_start,
explicit_end=explicit_end,
version=version,
tags=tags,
dumper=self,
)
BaseRepresenter.__init__(
self,
default_style=default_style,
default_flow_style=default_flow_style,
dumper=self,
)
BaseResolver.__init__(self, loadumper=self)
class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver):
def __init__(
self,
stream: StreamType,
default_style: Any = None,
default_flow_style: Any = None,
canonical: Optional[bool] = None,
indent: Optional[int] = None,
width: Optional[int] = None,
allow_unicode: Optional[bool] = None,
line_break: Any = None,
encoding: Any = None,
explicit_start: Optional[bool] = None,
explicit_end: Optional[bool] = None,
version: Any = None,
tags: Any = None,
block_seq_indent: Any = None,
top_level_colon_align: Any = None,
prefix_colon: Any = None,
) -> None:
# NOQA
Emitter.__init__(
self,
stream,
canonical=canonical,
indent=indent,
width=width,
allow_unicode=allow_unicode,
line_break=line_break,
block_seq_indent=block_seq_indent,
dumper=self,
)
Serializer.__init__(
self,
encoding=encoding,
explicit_start=explicit_start,
explicit_end=explicit_end,
version=version,
tags=tags,
dumper=self,
)
SafeRepresenter.__init__(
self,
default_style=default_style,
default_flow_style=default_flow_style,
dumper=self,
)
Resolver.__init__(self, loadumper=self)
class Dumper(Emitter, Serializer, Representer, Resolver):
def __init__(
self,
stream: StreamType,
default_style: Any = None,
default_flow_style: Any = None,
canonical: Optional[bool] = None,
indent: Optional[int] = None,
width: Optional[int] = None,
allow_unicode: Optional[bool] = None,
line_break: Any = None,
encoding: Any = None,
explicit_start: Optional[bool] = None,
explicit_end: Optional[bool] = None,
version: Any = None,
tags: Any = None,
block_seq_indent: Any = None,
top_level_colon_align: Any = None,
prefix_colon: Any = None,
) -> None:
# NOQA
Emitter.__init__(
self,
stream,
canonical=canonical,
indent=indent,
width=width,
allow_unicode=allow_unicode,
line_break=line_break,
block_seq_indent=block_seq_indent,
dumper=self,
)
Serializer.__init__(
self,
encoding=encoding,
explicit_start=explicit_start,
explicit_end=explicit_end,
version=version,
tags=tags,
dumper=self,
)
Representer.__init__(
self,
default_style=default_style,
default_flow_style=default_flow_style,
dumper=self,
)
Resolver.__init__(self, loadumper=self)
class RoundTripDumper(Emitter, Serializer, RoundTripRepresenter, VersionedResolver):
def __init__(
self,
stream: StreamType,
default_style: Any = None,
default_flow_style: Optional[bool] = None,
canonical: Optional[int] = None,
indent: Optional[int] = None,
width: Optional[int] = None,
allow_unicode: Optional[bool] = None,
line_break: Any = None,
encoding: Any = None,
explicit_start: Optional[bool] = None,
explicit_end: Optional[bool] = None,
version: Any = None,
tags: Any = None,
block_seq_indent: Any = None,
top_level_colon_align: Any = None,
prefix_colon: Any = None,
) -> None:
# NOQA
Emitter.__init__(
self,
stream,
canonical=canonical,
indent=indent,
width=width,
allow_unicode=allow_unicode,
line_break=line_break,
block_seq_indent=block_seq_indent,
top_level_colon_align=top_level_colon_align,
prefix_colon=prefix_colon,
dumper=self,
)
Serializer.__init__(
self,
encoding=encoding,
explicit_start=explicit_start,
explicit_end=explicit_end,
version=version,
tags=tags,
dumper=self,
)
RoundTripRepresenter.__init__(
self,
default_style=default_style,
default_flow_style=default_flow_style,
dumper=self,
)
VersionedResolver.__init__(self, loader=self)
python-ruyaml-0.92.1/lib/ruyaml/emitter.py 0000664 0000000 0000000 00000212714 15056754172 0020560 0 ustar 00root root 0000000 0000000 from __future__ import annotations
import sys
# fmt: off
from ruyaml.compat import DBG_EVENT, check_anchorname_char, dbg, nprint, nprintf # NOQA
from ruyaml.error import YAMLError, YAMLStreamError
from ruyaml.events import * # NOQA
# Emitter expects events obeying the following grammar:
# stream ::= STREAM-START document* STREAM-END
# document ::= DOCUMENT-START node DOCUMENT-END
# node ::= SCALAR | sequence | mapping
# sequence ::= SEQUENCE-START node* SEQUENCE-END
# mapping ::= MAPPING-START (node node)* MAPPING-END
# fmt: on
if False: # MYPY
from typing import Any, Dict, List, Optional, Text, Tuple, Union # NOQA
from ruyaml.compat import StreamType # NOQA
__all__ = ['Emitter', 'EmitterError']
class EmitterError(YAMLError):
pass
class ScalarAnalysis:
def __init__(
self,
scalar: Any,
empty: Any,
multiline: Any,
allow_flow_plain: bool,
allow_block_plain: bool,
allow_single_quoted: bool,
allow_double_quoted: bool,
allow_block: bool,
) -> None:
self.scalar = scalar
self.empty = empty
self.multiline = multiline
self.allow_flow_plain = allow_flow_plain
self.allow_block_plain = allow_block_plain
self.allow_single_quoted = allow_single_quoted
self.allow_double_quoted = allow_double_quoted
self.allow_block = allow_block
def __repr__(self) -> str:
return f'scalar={self.scalar!r}, empty={self.empty}, multiline={self.multiline}, allow_flow_plain={self.allow_flow_plain}, allow_block_plain={self.allow_block_plain}, allow_single_quoted={self.allow_single_quoted}, allow_double_quoted={self.allow_double_quoted}, allow_block={self.allow_block}' # NOQA
class Indents:
# replacement for the list based stack of None/int
def __init__(self) -> None:
self.values: List[Tuple[Any, bool]] = []
def append(self, val: Any, seq: Any) -> None:
self.values.append((val, seq))
def pop(self) -> Any:
return self.values.pop()[0]
def seq_seq(self) -> bool:
try:
if self.values[-2][1] and self.values[-1][1]:
return True
except IndexError:
pass
return False
def last_seq(self) -> bool:
# return the seq(uence) value for the element added before the last one
# in increase_indent()
try:
return self.values[-2][1]
except IndexError:
return False
def seq_flow_align(
self,
seq_indent: int,
column: int,
pre_comment: Optional[bool] = False,
) -> int:
# extra spaces because of dash
# nprint('seq_flow_align', self.values, pre_comment)
if len(self.values) < 2 or not self.values[-1][1]:
if len(self.values) == 0 or not pre_comment:
return 0
base = self.values[-1][0] if self.values[-1][0] is not None else 0
if pre_comment:
return base + seq_indent # type: ignore
# return (len(self.values)) * seq_indent
# -1 for the dash
return base + seq_indent - column - 1 # type: ignore
def __len__(self) -> int:
return len(self.values)
class Emitter:
# fmt: off
DEFAULT_TAG_PREFIXES = {
'!': '!',
'tag:yaml.org,2002:': '!!',
'!!': '!!',
}
# fmt: on
MAX_SIMPLE_KEY_LENGTH = 128
flow_seq_start = '['
flow_seq_end = ']'
flow_seq_separator = ','
flow_map_start = '{'
flow_map_end = '}'
flow_map_separator = ','
def __init__(
self,
stream: StreamType,
canonical: Any = None,
indent: Optional[int] = None,
width: Optional[int] = None,
allow_unicode: Optional[bool] = None,
line_break: Any = None,
block_seq_indent: Optional[int] = None,
top_level_colon_align: Optional[bool] = None,
prefix_colon: Any = None,
brace_single_entry_mapping_in_flow_sequence: Optional[bool] = None,
dumper: Any = None,
) -> None:
# NOQA
self.dumper = dumper
if self.dumper is not None and getattr(self.dumper, '_emitter', None) is None:
self.dumper._emitter = self
self.stream = stream
# Encoding can be overriden by STREAM-START.
self.encoding: Optional[Text] = None
self.allow_space_break = None
# Emitter is a state machine with a stack of states to handle nested
# structures.
self.states: List[Any] = []
self.state: Any = self.expect_stream_start
# Current event and the event queue.
self.events: List[Any] = []
self.event: Any = None
# The current indentation level and the stack of previous indents.
self.indents = Indents()
self.indent: Optional[int] = None
# flow_context is an expanding/shrinking list consisting of '{' and '['
# for each unclosed flow context. If empty list that means block context
self.flow_context: List[Text] = []
# Contexts.
self.root_context = False
self.sequence_context = False
self.mapping_context = False
self.simple_key_context = False
# Characteristics of the last emitted character:
# - current position.
# - is it a whitespace?
# - is it an indention character
# (indentation space, '-', '?', or ':')?
self.line = 0
self.column = 0
self.whitespace = True
self.indention = True
self.compact_seq_seq = True # dash after dash
self.compact_seq_map = True # key after dash
# self.compact_ms = False # dash after key, only when excplicit key with ?
self.no_newline: Optional[bool] = None # set if directly after `- `
# Whether the document requires an explicit document end indicator
self.open_ended = False
# colon handling
self.colon = ':'
self.prefixed_colon = (
self.colon if prefix_colon is None else prefix_colon + self.colon
)
# single entry mappings in flow sequence
self.brace_single_entry_mapping_in_flow_sequence = (
brace_single_entry_mapping_in_flow_sequence # NOQA
)
# Formatting details.
self.canonical = canonical
self.allow_unicode = allow_unicode
# set to False to get "\Uxxxxxxxx" for non-basic unicode like emojis
self.unicode_supplementary = sys.maxunicode > 0xFFFF
self.sequence_dash_offset = block_seq_indent if block_seq_indent else 0
self.top_level_colon_align = top_level_colon_align
self.best_sequence_indent = 2
self.requested_indent = indent # specific for literal zero indent
if indent and 1 < indent < 10:
self.best_sequence_indent = indent
self.best_map_indent = self.best_sequence_indent
# if self.best_sequence_indent < self.sequence_dash_offset + 1:
# self.best_sequence_indent = self.sequence_dash_offset + 1
self.best_width = 80
if width and width > self.best_sequence_indent * 2:
self.best_width = width
self.best_line_break: Any = '\n'
if line_break in ['\r', '\n', '\r\n']:
self.best_line_break = line_break
# Tag prefixes.
self.tag_prefixes: Any = None
# Prepared anchor and tag.
self.prepared_anchor: Any = None
self.prepared_tag: Any = None
# Scalar analysis and style.
self.analysis: Any = None
self.style: Any = None
self.scalar_after_indicator = True # write a scalar on the same line as `---`
self.alt_null = 'null'
@property
def stream(self) -> Any:
try:
return self._stream
except AttributeError:
raise YAMLStreamError('output stream needs to be specified')
@stream.setter
def stream(self, val: Any) -> None:
if val is None:
return
if not hasattr(val, 'write'):
raise YAMLStreamError('stream argument needs to have a write() method')
self._stream = val
@property
def serializer(self) -> Any:
try:
if hasattr(self.dumper, 'typ'):
return self.dumper.serializer # type: ignore
return self.dumper._serializer # type: ignore
except AttributeError:
return self # cyaml
@property
def flow_level(self) -> int:
return len(self.flow_context)
def dispose(self) -> None:
# Reset the state attributes (to clear self-references)
self.states = []
self.state = None
def emit(self, event: Any) -> None:
if dbg(DBG_EVENT):
nprint(event)
self.events.append(event)
while not self.need_more_events():
self.event = self.events.pop(0)
self.state()
self.event = None
# In some cases, we wait for a few next events before emitting.
def need_more_events(self) -> bool:
if not self.events:
return True
event = self.events[0]
if isinstance(event, DocumentStartEvent):
return self.need_events(1)
elif isinstance(event, SequenceStartEvent):
return self.need_events(2)
elif isinstance(event, MappingStartEvent):
return self.need_events(3)
else:
return False
def need_events(self, count: int) -> bool:
level = 0
for event in self.events[1:]:
if isinstance(event, (DocumentStartEvent, CollectionStartEvent)):
level += 1
elif isinstance(event, (DocumentEndEvent, CollectionEndEvent)):
level -= 1
elif isinstance(event, StreamEndEvent):
level = -1
if level < 0:
return False
return len(self.events) < count + 1
def increase_indent(
self,
flow: bool = False,
sequence: Optional[bool] = None,
indentless: bool = False,
) -> None:
self.indents.append(self.indent, sequence)
if self.indent is None: # top level
if flow:
# self.indent = self.best_sequence_indent if self.indents.last_seq() else \
# self.best_map_indent
# self.indent = self.best_sequence_indent
self.indent = self.requested_indent
else:
self.indent = 0
elif not indentless:
self.indent += (
self.best_sequence_indent
if self.indents.last_seq()
else self.best_map_indent
)
# if self.indents.last_seq():
# if self.indent == 0: # top level block sequence
# self.indent = self.best_sequence_indent - self.sequence_dash_offset
# else:
# self.indent += self.best_sequence_indent
# else:
# self.indent += self.best_map_indent
# States.
# Stream handlers.
def expect_stream_start(self) -> None:
if isinstance(self.event, StreamStartEvent):
if self.event.encoding and not hasattr(self.stream, 'encoding'):
self.encoding = self.event.encoding
self.write_stream_start()
self.state = self.expect_first_document_start
else:
raise EmitterError(f'expected StreamStartEvent, but got {self.event!s}')
def expect_nothing(self) -> None:
raise EmitterError(f'expected nothing, but got {self.event!s}')
# Document handlers.
def expect_first_document_start(self) -> Any:
return self.expect_document_start(first=True)
def expect_document_start(self, first: bool = False) -> None:
if isinstance(self.event, DocumentStartEvent):
if (self.event.version or self.event.tags) and self.open_ended:
self.write_indicator('...', True)
self.write_indent()
if self.event.version:
version_text = self.prepare_version(self.event.version)
self.write_version_directive(version_text)
self.tag_prefixes = self.DEFAULT_TAG_PREFIXES.copy()
if self.event.tags:
handles = sorted(self.event.tags.keys())
for handle in handles:
prefix = self.event.tags[handle]
self.tag_prefixes[prefix] = handle
handle_text = self.prepare_tag_handle(handle)
prefix_text = self.prepare_tag_prefix(prefix)
self.write_tag_directive(handle_text, prefix_text)
implicit = (
first
and not self.event.explicit
and not self.canonical
and not self.event.version
and not self.event.tags
and not self.check_empty_document()
)
if not implicit:
self.write_indent()
self.write_indicator('---', True)
if self.canonical:
self.write_indent()
self.state = self.expect_document_root
elif isinstance(self.event, StreamEndEvent):
if self.open_ended:
self.write_indicator('...', True)
self.write_indent()
self.write_stream_end()
self.state = self.expect_nothing
else:
raise EmitterError(f'expected DocumentStartEvent, but got {self.event!s}')
def expect_document_end(self) -> None:
if isinstance(self.event, DocumentEndEvent):
self.write_indent()
if self.event.explicit:
self.write_indicator('...', True)
self.write_indent()
self.flush_stream()
self.state = self.expect_document_start
else:
raise EmitterError(f'expected DocumentEndEvent, but got {self.event!s}')
def expect_document_root(self) -> None:
self.states.append(self.expect_document_end)
self.expect_node(root=True)
# Node handlers.
def expect_node(
self,
root: bool = False,
sequence: bool = False,
mapping: bool = False,
simple_key: bool = False,
) -> None:
self.root_context = root
self.sequence_context = sequence # not used in PyYAML
force_flow_indent = False
self.mapping_context = mapping
self.simple_key_context = simple_key
if isinstance(self.event, AliasEvent):
self.expect_alias()
elif isinstance(self.event, (ScalarEvent, CollectionStartEvent)):
if (
self.process_anchor('&')
and isinstance(self.event, ScalarEvent)
and self.sequence_context
):
self.sequence_context = False
if (
root
and isinstance(self.event, ScalarEvent)
and not self.scalar_after_indicator
):
self.write_indent()
self.process_tag()
if isinstance(self.event, ScalarEvent):
# nprint('@', self.indention, self.no_newline, self.column)
self.expect_scalar()
elif isinstance(self.event, SequenceStartEvent):
i2, n2 = self.indention, self.no_newline # NOQA
if self.event.comment:
if self.event.flow_style is False:
if self.write_post_comment(self.event):
self.indention = False
self.no_newline = True
if self.event.flow_style:
column = self.column
if self.write_pre_comment(self.event):
if self.event.flow_style:
# force_flow_indent = True
force_flow_indent = not self.indents.values[-1][1]
self.indention = i2
self.no_newline = not self.indention
if self.event.flow_style:
self.column = column
if (
self.flow_level
or self.canonical
or self.event.flow_style
or self.check_empty_sequence()
):
self.expect_flow_sequence(force_flow_indent)
else:
self.expect_block_sequence()
if self.indents.seq_seq():
# - -
self.indention = True
self.no_newline = False
elif isinstance(self.event, MappingStartEvent):
if self.event.flow_style is False and self.event.comment:
self.write_post_comment(self.event)
if self.event.comment and self.event.comment[1]:
self.write_pre_comment(self.event)
if self.event.flow_style and self.indents.values:
force_flow_indent = not self.indents.values[-1][1]
if (
self.flow_level
or self.canonical
or self.event.flow_style
or self.check_empty_mapping()
):
self.expect_flow_mapping(
single=self.event.nr_items == 1,
force_flow_indent=force_flow_indent,
)
else:
self.expect_block_mapping()
else:
raise EmitterError(f'expected NodeEvent, but got {self.event!s}')
def expect_alias(self) -> None:
if self.event.anchor is None:
raise EmitterError('anchor is not specified for alias')
self.process_anchor('*')
self.state = self.states.pop()
def expect_scalar(self) -> None:
self.increase_indent(flow=True)
self.process_scalar()
self.indent = self.indents.pop()
self.state = self.states.pop()
# Flow sequence handlers.
def expect_flow_sequence(self, force_flow_indent: Optional[bool] = False) -> None:
if force_flow_indent:
self.increase_indent(flow=True, sequence=True)
ind = self.indents.seq_flow_align(
self.best_sequence_indent,
self.column,
force_flow_indent,
)
self.write_indicator(' ' * ind + self.flow_seq_start, True, whitespace=True)
if not force_flow_indent:
self.increase_indent(flow=True, sequence=True)
self.flow_context.append('[')
self.state = self.expect_first_flow_sequence_item
def expect_first_flow_sequence_item(self) -> None:
if isinstance(self.event, SequenceEndEvent):
self.indent = self.indents.pop()
popped = self.flow_context.pop()
assert popped == '['
self.write_indicator(self.flow_seq_end, False)
if self.event.comment and self.event.comment[0]:
# eol comment on empty flow sequence
self.write_post_comment(self.event)
elif self.flow_level == 0:
self.write_line_break()
self.state = self.states.pop()
else:
if self.canonical or self.column > self.best_width:
self.write_indent()
self.states.append(self.expect_flow_sequence_item)
self.expect_node(sequence=True)
def expect_flow_sequence_item(self) -> None:
if isinstance(self.event, SequenceEndEvent):
self.indent = self.indents.pop()
popped = self.flow_context.pop()
assert popped == '['
if self.canonical:
# ToDo: so-39595807, maybe add a space to the flow_seq_separator
# and strip the last space, if space then indent, else do not
# not sure that [1,2,3] is a valid YAML seq
self.write_indicator(self.flow_seq_separator, False)
self.write_indent()
self.write_indicator(self.flow_seq_end, False)
if self.event.comment and self.event.comment[0]:
# eol comment on flow sequence
self.write_post_comment(self.event)
else:
self.no_newline = False
self.state = self.states.pop()
else:
self.write_indicator(self.flow_seq_separator, False)
if self.canonical or self.column > self.best_width:
self.write_indent()
self.states.append(self.expect_flow_sequence_item)
self.expect_node(sequence=True)
# Flow mapping handlers.
def expect_flow_mapping(
self,
single: Optional[bool] = False,
force_flow_indent: Optional[bool] = False,
) -> None:
if force_flow_indent:
self.increase_indent(flow=True, sequence=False)
ind = self.indents.seq_flow_align(
self.best_sequence_indent,
self.column,
force_flow_indent,
)
map_init = self.flow_map_start
if (
single
and self.flow_level
and self.flow_context[-1] == '['
and not self.canonical
and not self.brace_single_entry_mapping_in_flow_sequence
):
# single map item with flow context, no curly braces necessary
map_init = ''
self.write_indicator(' ' * ind + map_init, True, whitespace=True)
self.flow_context.append(map_init)
if not force_flow_indent:
self.increase_indent(flow=True, sequence=False)
self.state = self.expect_first_flow_mapping_key
def expect_first_flow_mapping_key(self) -> None:
if isinstance(self.event, MappingEndEvent):
self.indent = self.indents.pop()
popped = self.flow_context.pop()
assert popped == '{' # empty flow mapping
self.write_indicator(self.flow_map_end, False)
if self.event.comment and self.event.comment[0]:
# eol comment on empty mapping
self.write_post_comment(self.event)
elif self.flow_level == 0:
self.write_line_break()
self.state = self.states.pop()
else:
if self.canonical or self.column > self.best_width:
self.write_indent()
if not self.canonical and self.check_simple_key():
self.states.append(self.expect_flow_mapping_simple_value)
self.expect_node(mapping=True, simple_key=True)
else:
self.write_indicator('?', True)
self.states.append(self.expect_flow_mapping_value)
self.expect_node(mapping=True)
def expect_flow_mapping_key(self) -> None:
if isinstance(self.event, MappingEndEvent):
# if self.event.comment and self.event.comment[1]:
# self.write_pre_comment(self.event)
self.indent = self.indents.pop()
popped = self.flow_context.pop()
assert popped in ['{', '']
if self.canonical:
self.write_indicator(self.flow_map_separator, False)
self.write_indent()
if popped != '':
self.write_indicator(self.flow_map_end, False)
if self.event.comment and self.event.comment[0]:
# eol comment on flow mapping, never reached on empty mappings
self.write_post_comment(self.event)
else:
self.no_newline = False
self.state = self.states.pop()
else:
self.write_indicator(self.flow_map_separator, False)
if self.canonical or self.column > self.best_width:
self.write_indent()
if not self.canonical and self.check_simple_key():
self.states.append(self.expect_flow_mapping_simple_value)
self.expect_node(mapping=True, simple_key=True)
else:
self.write_indicator('?', True)
self.states.append(self.expect_flow_mapping_value)
self.expect_node(mapping=True)
def expect_flow_mapping_simple_value(self) -> None:
if getattr(self.event, 'style', '?') != '-': # suppress for flow style sets
self.write_indicator(self.prefixed_colon, False)
self.states.append(self.expect_flow_mapping_key)
self.expect_node(mapping=True)
def expect_flow_mapping_value(self) -> None:
if self.canonical or self.column > self.best_width:
self.write_indent()
self.write_indicator(self.prefixed_colon, True)
self.states.append(self.expect_flow_mapping_key)
self.expect_node(mapping=True)
# Block sequence handlers.
def expect_block_sequence(self) -> None:
if self.mapping_context:
indentless = not self.indention
else:
indentless = False
if not self.compact_seq_seq and self.column != 0:
self.write_line_break()
self.increase_indent(flow=False, sequence=True, indentless=indentless)
self.state = self.expect_first_block_sequence_item
def expect_first_block_sequence_item(self) -> Any:
return self.expect_block_sequence_item(first=True)
def expect_block_sequence_item(self, first: bool = False) -> None:
if not first and isinstance(self.event, SequenceEndEvent):
if self.event.comment and self.event.comment[1]:
# final comments on a block list e.g. empty line
self.write_pre_comment(self.event)
self.indent = self.indents.pop()
self.state = self.states.pop()
self.no_newline = False
else:
if self.event.comment and self.event.comment[1]:
self.write_pre_comment(self.event)
nonl = self.no_newline if self.column == 0 else False
self.write_indent()
ind = self.sequence_dash_offset # if len(self.indents) > 1 else 0
self.write_indicator(' ' * ind + '-', True, indention=True)
if nonl or self.sequence_dash_offset + 2 > self.best_sequence_indent:
self.no_newline = True
self.states.append(self.expect_block_sequence_item)
self.expect_node(sequence=True)
# Block mapping handlers.
def expect_block_mapping(self) -> None:
if not self.mapping_context and not (self.compact_seq_map or self.column == 0):
self.write_line_break()
self.increase_indent(flow=False, sequence=False)
self.state = self.expect_first_block_mapping_key
def expect_first_block_mapping_key(self) -> None:
return self.expect_block_mapping_key(first=True)
def expect_block_mapping_key(self, first: Any = False) -> None:
if not first and isinstance(self.event, MappingEndEvent):
if self.event.comment and self.event.comment[1]:
# final comments from a doc
self.write_pre_comment(self.event)
self.indent = self.indents.pop()
self.state = self.states.pop()
else:
if self.event.comment and self.event.comment[1]:
# final comments from a doc
self.write_pre_comment(self.event)
self.write_indent()
if self.check_simple_key():
if not isinstance(
self.event,
(SequenceStartEvent, MappingStartEvent),
): # sequence keys
try:
if self.event.style == '?':
self.write_indicator('?', True, indention=True)
except AttributeError: # aliases have no style
pass
self.states.append(self.expect_block_mapping_simple_value)
self.expect_node(mapping=True, simple_key=True)
# test on style for alias in !!set
if isinstance(self.event, AliasEvent) and not self.event.style == '?':
self.stream.write(' ')
else:
self.write_indicator('?', True, indention=True)
self.states.append(self.expect_block_mapping_value)
self.expect_node(mapping=True)
def expect_block_mapping_simple_value(self) -> None:
if getattr(self.event, 'style', None) != '?':
# prefix = ''
if self.indent == 0 and self.top_level_colon_align is not None:
# write non-prefixed colon
c = ' ' * (self.top_level_colon_align - self.column) + self.colon
else:
c = self.prefixed_colon
self.write_indicator(c, False)
self.states.append(self.expect_block_mapping_key)
self.expect_node(mapping=True)
def expect_block_mapping_value(self) -> None:
self.write_indent()
self.write_indicator(self.prefixed_colon, True, indention=True)
self.states.append(self.expect_block_mapping_key)
self.expect_node(mapping=True)
# Checkers.
def check_empty_sequence(self) -> bool:
return (
isinstance(self.event, SequenceStartEvent)
and bool(self.events)
and isinstance(self.events[0], SequenceEndEvent)
)
def check_empty_mapping(self) -> bool:
return (
isinstance(self.event, MappingStartEvent)
and bool(self.events)
and isinstance(self.events[0], MappingEndEvent)
)
def check_empty_document(self) -> bool:
if not isinstance(self.event, DocumentStartEvent) or not self.events:
return False
event = self.events[0]
return (
isinstance(event, ScalarEvent)
and event.anchor is None
and event.tag is None
and event.implicit
and event.value == ""
)
def check_simple_key(self) -> bool:
length = 0
if isinstance(self.event, NodeEvent) and self.event.anchor is not None:
if self.prepared_anchor is None:
self.prepared_anchor = self.prepare_anchor(self.event.anchor)
length += len(self.prepared_anchor)
if (
isinstance(self.event, (ScalarEvent, CollectionStartEvent))
and self.event.tag is not None
):
if self.prepared_tag is None:
self.prepared_tag = self.prepare_tag(self.event.ctag)
length += len(self.prepared_tag)
if isinstance(self.event, ScalarEvent):
if self.analysis is None:
self.analysis = self.analyze_scalar(self.event.value)
length += len(self.analysis.scalar)
return length < self.MAX_SIMPLE_KEY_LENGTH and (
isinstance(self.event, AliasEvent)
or (
isinstance(self.event, SequenceStartEvent)
and self.event.flow_style is True
)
or (
isinstance(self.event, MappingStartEvent)
and self.event.flow_style is True
)
or (
isinstance(self.event, ScalarEvent)
# if there is an explicit style for an empty string, it is a simple key
and not (self.analysis.empty and self.style and self.style not in '\'"')
and not self.analysis.multiline
)
or self.check_empty_sequence()
or self.check_empty_mapping()
)
# Anchor, Tag, and Scalar processors.
def process_anchor(self, indicator: Any) -> bool:
if self.event.anchor is None:
self.prepared_anchor = None
return False
if self.prepared_anchor is None:
self.prepared_anchor = self.prepare_anchor(self.event.anchor)
if self.prepared_anchor:
self.write_indicator(indicator + self.prepared_anchor, True)
# issue 288
self.no_newline = False
self.prepared_anchor = None
return True
def process_tag(self) -> None:
tag = self.event.tag
if isinstance(self.event, ScalarEvent):
if self.style is None:
self.style = self.choose_scalar_style()
if (
self.event.value == ''
and self.style == "'"
and tag == 'tag:yaml.org,2002:null'
and self.alt_null is not None
):
self.event.value = self.alt_null
self.analysis = None
self.style = self.choose_scalar_style()
if (not self.canonical or tag is None) and (
(self.style == "" and self.event.implicit[0])
or (self.style != "" and self.event.implicit[1])
):
self.prepared_tag = None
return
if self.event.implicit[0] and tag is None:
tag = '!'
self.prepared_tag = None
else:
if (not self.canonical or tag is None) and self.event.implicit:
self.prepared_tag = None
return
if tag is None:
raise EmitterError('tag is not specified')
if self.prepared_tag is None:
self.prepared_tag = self.prepare_tag(self.event.ctag)
if self.prepared_tag:
self.write_indicator(self.prepared_tag, True)
if (
self.sequence_context
and not self.flow_level
and isinstance(self.event, ScalarEvent)
):
self.no_newline = True
self.prepared_tag = None
def choose_scalar_style(self) -> Any:
# issue 449 needs this otherwise emits single quoted empty string
if self.event.value == '' and self.event.ctag.handle == '!!':
return None
if self.analysis is None:
self.analysis = self.analyze_scalar(self.event.value)
if self.event.style == '"' or self.canonical:
return '"'
if (
not self.event.style or self.event.style == '?' or self.event.style == '-'
) and (self.event.implicit[0] or not self.event.implicit[2]):
if not (
self.simple_key_context
and (self.analysis.empty or self.analysis.multiline)
) and (
self.flow_level
and self.analysis.allow_flow_plain
or (not self.flow_level and self.analysis.allow_block_plain)
):
return ""
if self.event.style == '-':
return ""
self.analysis.allow_block = True
if self.event.style and self.event.style in '|>':
if (
not self.flow_level
and not self.simple_key_context
and self.analysis.allow_block
):
return self.event.style
if not self.event.style and self.analysis.allow_double_quoted:
if "'" in self.event.value or '\n' in self.event.value:
return '"'
if not self.event.style or self.event.style == "'":
if self.analysis.allow_single_quoted and not (
self.simple_key_context and self.analysis.multiline
):
return "'"
return '"'
def process_scalar(self) -> None:
if self.analysis is None:
self.analysis = self.analyze_scalar(self.event.value)
if self.style is None:
self.style = self.choose_scalar_style()
split = not self.simple_key_context
# if self.analysis.multiline and split \
# and (not self.style or self.style in '\'\"'):
# self.write_indent()
# nprint('xx', self.sequence_context, self.flow_level)
if self.sequence_context and not self.flow_level:
self.write_indent()
if self.style == '"':
self.write_double_quoted(self.analysis.scalar, split)
elif self.style == "'":
self.write_single_quoted(self.analysis.scalar, split)
elif self.style == '>':
try:
cmx = self.event.comment[1][0]
except (IndexError, TypeError) as e: # NOQA
cmx = ""
self.write_folded(self.analysis.scalar, cmx)
if (
self.event.comment
and self.indent is not None
and self.event.comment[0]
and self.event.comment[0].column >= self.indent
):
# comment following a folded scalar must dedent (issue 376)
self.event.comment[0].column = self.indent - 1 # type: ignore
elif self.style == '|':
# self.write_literal(self.analysis.scalar, self.event.comment)
try:
cmx = self.event.comment[1][0]
except (IndexError, TypeError):
cmx = ""
self.write_literal(self.analysis.scalar, cmx)
if (
self.event.comment
and self.indent is not None
and self.event.comment[0]
and self.event.comment[0].column >= self.indent
):
# comment following a literal scalar must dedent (issue 376)
self.event.comment[0].column = self.indent - 1 # type: ignore
else:
self.write_plain(self.analysis.scalar, split)
self.analysis = None
self.style = None
if self.event.comment:
self.write_post_comment(self.event)
# Analyzers.
def prepare_version(self, version: Any) -> Any:
major, minor = version
if major != 1:
raise EmitterError(f'unsupported YAML version: {major:d}.{minor:d}')
return f'{major:d}.{minor:d}'
def prepare_tag_handle(self, handle: Any) -> Any:
if not handle:
raise EmitterError('tag handle must not be empty')
if handle[0] != '!' or handle[-1] != '!':
raise EmitterError(f"tag handle must start and end with '!': {handle!r}")
for ch in handle[1:-1]:
if not (
'0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' or ch in '-_'
):
raise EmitterError(
f'invalid character {ch!r} in the tag handle: {handle!r}'
)
return handle
def prepare_tag_prefix(self, prefix: Any) -> Any:
if not prefix:
raise EmitterError('tag prefix must not be empty')
chunks: List[Any] = []
start = end = 0
if prefix[0] == '!':
end = 1
ch_set = "-;/?:@&=+$,_.~*'()[]"
if self.dumper:
version = getattr(self.dumper, 'version', (1, 2))
if version is None or version >= (1, 2):
ch_set += '#'
while end < len(prefix):
ch = prefix[end]
if '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' or ch in ch_set:
end += 1
else:
if start < end:
chunks.append(prefix[start:end])
start = end = end + 1
data = ch
for ch in data:
chunks.append(f'%{ord(ch):02X}')
if start < end:
chunks.append(prefix[start:end])
return "".join(chunks)
def prepare_tag(self, tag: Any) -> Any:
if not tag:
raise EmitterError('tag must not be empty')
tag = str(tag)
if tag == '!' or tag == '!!':
return tag
handle = None
suffix = tag
prefixes = sorted(self.tag_prefixes.keys())
for prefix in prefixes:
if tag.startswith(prefix) and (prefix == '!' or len(prefix) < len(tag)):
handle = self.tag_prefixes[prefix]
suffix = tag[len(prefix) :]
chunks: List[Any] = []
start = end = 0
ch_set = "-;/?:@&=+$,_.~*'()[]"
if self.dumper:
version = getattr(self.dumper, 'version', (1, 2))
if version is None or version >= (1, 2):
ch_set += '#'
while end < len(suffix):
ch = suffix[end]
if (
'0' <= ch <= '9'
or 'A' <= ch <= 'Z'
or 'a' <= ch <= 'z'
or ch in ch_set
or (ch == '!' and handle != '!')
):
end += 1
else:
if start < end:
chunks.append(suffix[start:end])
start = end = end + 1
data = ch
for ch in data:
chunks.append(f'%{ord(ch):02X}')
if start < end:
chunks.append(suffix[start:end])
suffix_text = "".join(chunks)
if handle:
return f'{handle!s}{suffix_text!s}'
else:
return f'!<{suffix_text!s}>'
def prepare_anchor(self, anchor: Any) -> Any:
if not anchor:
raise EmitterError('anchor must not be empty')
for ch in anchor:
if not check_anchorname_char(ch):
raise EmitterError(
f'invalid character {ch!r} in the anchor: {anchor!r}'
)
return anchor
def analyze_scalar(self, scalar: Any) -> Any:
# Empty scalar is a special case.
if not scalar:
return ScalarAnalysis(
scalar=scalar,
empty=True,
multiline=False,
allow_flow_plain=False,
allow_block_plain=True,
allow_single_quoted=True,
allow_double_quoted=True,
allow_block=False,
)
# Indicators and special characters.
block_indicators = False
flow_indicators = False
line_breaks = False
special_characters = False
# Important whitespace combinations.
leading_space = False
leading_break = False
trailing_space = False
trailing_break = False
break_space = False
space_break = False
# Check document indicators.
if scalar.startswith('---') or scalar.startswith('...'):
block_indicators = True
flow_indicators = True
# First character or preceded by a whitespace.
preceeded_by_whitespace = True
# Last character or followed by a whitespace.
followed_by_whitespace = (
len(scalar) == 1 or scalar[1] in '\0 \t\r\n\x85\u2028\u2029'
)
# The previous character is a space.
previous_space = False
# The previous character is a break.
previous_break = False
index = 0
while index < len(scalar):
ch = scalar[index]
# Check for indicators.
if index == 0:
# Leading indicators are special characters.
if ch in '#,[]{}&*!|>\'"%@`':
flow_indicators = True
block_indicators = True
if ch in '?:': # ToDo
if self.serializer.use_version == (1, 1):
flow_indicators = True
elif len(scalar) == 1: # single character
flow_indicators = True
if followed_by_whitespace:
block_indicators = True
if ch == '-' and followed_by_whitespace:
flow_indicators = True
block_indicators = True
else:
# Some indicators cannot appear within a scalar as well.
if ch in ',[]{}': # http://yaml.org/spec/1.2/spec.html#id2788859
flow_indicators = True
if ch == '?' and self.serializer.use_version == (1, 1):
flow_indicators = True
if ch == ':':
if followed_by_whitespace:
flow_indicators = True
block_indicators = True
if ch == '#' and preceeded_by_whitespace:
flow_indicators = True
block_indicators = True
# Check for line breaks, special, and unicode characters.
if ch in '\n\x85\u2028\u2029':
line_breaks = True
if not (ch == '\n' or '\x20' <= ch <= '\x7E'):
if (
ch == '\x85'
or '\xA0' <= ch <= '\uD7FF'
or '\uE000' <= ch <= '\uFFFD'
or (
self.unicode_supplementary
and ('\U00010000' <= ch <= '\U0010FFFF')
)
) and ch != '\uFEFF':
# unicode_characters = True
if not self.allow_unicode:
special_characters = True
else:
special_characters = True
# Detect important whitespace combinations.
if ch == ' ':
if index == 0:
leading_space = True
if index == len(scalar) - 1:
trailing_space = True
if previous_break:
break_space = True
previous_space = True
previous_break = False
elif ch in '\n\x85\u2028\u2029':
if index == 0:
leading_break = True
if index == len(scalar) - 1:
trailing_break = True
if previous_space:
space_break = True
previous_space = False
previous_break = True
else:
previous_space = False
previous_break = False
# Prepare for the next character.
index += 1
preceeded_by_whitespace = ch in '\0 \t\r\n\x85\u2028\u2029'
followed_by_whitespace = (
index + 1 >= len(scalar)
or scalar[index + 1] in '\0 \t\r\n\x85\u2028\u2029'
)
# Let's decide what styles are allowed.
allow_flow_plain = True
allow_block_plain = True
allow_single_quoted = True
allow_double_quoted = True
allow_block = True
# Leading and trailing whitespaces are bad for plain scalars.
if leading_space or leading_break or trailing_space or trailing_break:
allow_flow_plain = allow_block_plain = False
# We do not permit trailing spaces for block scalars.
if trailing_space:
allow_block = False
# Spaces at the beginning of a new line are only acceptable for block
# scalars.
if break_space:
allow_flow_plain = allow_block_plain = allow_single_quoted = False
# Spaces followed by breaks, as well as special character are only
# allowed for double quoted scalars.
if special_characters:
allow_flow_plain = (
allow_block_plain
) = allow_single_quoted = allow_block = False
elif space_break:
allow_flow_plain = allow_block_plain = allow_single_quoted = False
if not self.allow_space_break:
allow_block = False
# Although the plain scalar writer supports breaks, we never emit
# multiline plain scalars.
if line_breaks:
allow_flow_plain = allow_block_plain = False
# Flow indicators are forbidden for flow plain scalars.
if flow_indicators:
allow_flow_plain = False
# Block indicators are forbidden for block plain scalars.
if block_indicators:
allow_block_plain = False
return ScalarAnalysis(
scalar=scalar,
empty=False,
multiline=line_breaks,
allow_flow_plain=allow_flow_plain,
allow_block_plain=allow_block_plain,
allow_single_quoted=allow_single_quoted,
allow_double_quoted=allow_double_quoted,
allow_block=allow_block,
)
# Writers.
def flush_stream(self) -> None:
if hasattr(self.stream, 'flush'):
self.stream.flush()
def write_stream_start(self) -> None:
# Write BOM if needed.
if self.encoding and self.encoding.startswith('utf-16'):
self.stream.write('\uFEFF'.encode(self.encoding))
def write_stream_end(self) -> None:
self.flush_stream()
def write_indicator(
self,
indicator: Any,
need_whitespace: Any,
whitespace: bool = False,
indention: bool = False,
) -> None:
if self.whitespace or not need_whitespace:
data = indicator
else:
data = ' ' + indicator
self.whitespace = whitespace
self.indention = self.indention and indention
self.column += len(data)
self.open_ended = False
if bool(self.encoding):
data = data.encode(self.encoding)
self.stream.write(data)
def write_indent(self) -> None:
indent = self.indent or 0
if (
not self.indention
or self.column > indent
or (self.column == indent and not self.whitespace)
):
if bool(self.no_newline):
self.no_newline = False
else:
self.write_line_break()
if self.column < indent:
self.whitespace = True
data = ' ' * (indent - self.column)
self.column = indent
if self.encoding:
data = data.encode(self.encoding) # type: ignore
self.stream.write(data)
def write_line_break(self, data: Any = None) -> None:
if data is None:
data = self.best_line_break
self.whitespace = True
self.indention = True
self.line += 1
self.column = 0
if bool(self.encoding):
data = data.encode(self.encoding)
self.stream.write(data)
def write_version_directive(self, version_text: Any) -> None:
data: Any = f'%YAML {version_text!s}'
if self.encoding:
data = data.encode(self.encoding) # type: ignore
self.stream.write(data)
self.write_line_break()
def write_tag_directive(self, handle_text: Any, prefix_text: Any) -> None:
data: Any = f'%TAG {handle_text!s} {prefix_text!s}'
if self.encoding:
data = data.encode(self.encoding) # type: ignore
self.stream.write(data)
self.write_line_break()
# Scalar streams.
def write_single_quoted(self, text: Any, split: Any = True) -> None:
if self.root_context:
if self.requested_indent is not None:
self.write_line_break()
if self.requested_indent != 0:
self.write_indent()
self.write_indicator("'", True)
spaces = False
breaks = False
start = end = 0
while end <= len(text):
ch = None
if end < len(text):
ch = text[end]
if spaces:
if ch is None or ch != ' ':
if (
start + 1 == end
and self.column > self.best_width
and split
and start != 0
and end != len(text)
):
self.write_indent()
else:
data = text[start:end]
self.column += len(data)
if bool(self.encoding):
data = data.encode(self.encoding)
self.stream.write(data)
start = end
elif breaks:
if ch is None or ch not in '\n\x85\u2028\u2029':
if text[start] == '\n':
self.write_line_break()
for br in text[start:end]:
if br == '\n':
self.write_line_break()
else:
self.write_line_break(br)
self.write_indent()
start = end
else:
if ch is None or ch in ' \n\x85\u2028\u2029' or ch == "'":
if start < end:
data = text[start:end]
self.column += len(data)
if bool(self.encoding):
data = data.encode(self.encoding)
self.stream.write(data)
start = end
if ch == "'":
data = "''"
self.column += 2
if bool(self.encoding):
data = data.encode(self.encoding)
self.stream.write(data)
start = end + 1
if ch is not None:
spaces = ch == ' '
breaks = ch in '\n\x85\u2028\u2029'
end += 1
self.write_indicator("'", False)
ESCAPE_REPLACEMENTS = {
'\0': '0',
'\x07': 'a',
'\x08': 'b',
'\x09': 't',
'\x0A': 'n',
'\x0B': 'v',
'\x0C': 'f',
'\x0D': 'r',
'\x1B': 'e',
'"': '"',
'\\': '\\',
'\x85': 'N',
'\xA0': '_',
'\u2028': 'L',
'\u2029': 'P',
}
def write_double_quoted(self, text: Any, split: Any = True) -> None:
"""
a newline, as written by self.write_indent(), might need to be escaped with a backslash
as on reading this will produce a possibly unwanted space.
"""
if self.root_context:
if self.requested_indent is not None:
self.write_line_break()
if self.requested_indent != 0:
self.write_indent()
self.write_indicator('"', True)
start = end = 0
while end <= len(text):
ch = None
if end < len(text):
ch = text[end]
if (
ch is None
or ch in '"\\\x85\u2028\u2029\uFEFF'
or not (
'\x20' <= ch <= '\x7E'
or (
self.allow_unicode
and (
('\xA0' <= ch <= '\uD7FF')
or ('\uE000' <= ch <= '\uFFFD')
or ('\U00010000' <= ch <= '\U0010FFFF')
)
)
)
):
if start < end:
data = text[start:end]
self.column += len(data)
if bool(self.encoding):
data = data.encode(self.encoding)
self.stream.write(data)
start = end
if ch is not None:
if ch in self.ESCAPE_REPLACEMENTS:
data = '\\' + self.ESCAPE_REPLACEMENTS[ch]
elif ch <= '\xFF':
data = '\\x%02X' % ord(ch)
elif ch <= '\uFFFF':
data = '\\u%04X' % ord(ch)
else:
data = '\\U%08X' % ord(ch)
self.column += len(data)
if bool(self.encoding):
data = data.encode(self.encoding)
self.stream.write(data)
start = end + 1
if (
0 < end < len(text) - 1
and (ch == ' ' or start >= end)
and self.column + (end - start) > self.best_width
and split
):
# SO https://stackoverflow.com/a/75634614/1307905
# data = text[start:end] + u'\\' # <<< replaced with following lines
need_backslash = True
if len(text) > end:
try:
space_pos = text.index(' ', end)
try:
space_pos = text.index('\n', end, space_pos)
except (ValueError, IndexError):
pass
# nprint('backslash?', space_pos, repr(text[:space_pos]), repr(text[space_pos:]), (text[space_pos] == '\n' and text[space_pos+1] == ' ')) # NOQA
if text[space_pos] == '\n' and text[space_pos + 1] != ' ':
pass
elif (
'"' not in text[end:space_pos]
and "'" not in text[end:space_pos]
# and text[space_pos + 1] != ' '
and text[space_pos + 1] not in ' \n'
and text[end - 1 : end + 1] != ' '
and start != end
):
need_backslash = False
except (ValueError, IndexError):
pass
data = text[start:end] + ('\\' if need_backslash else '')
if start < end:
start = end
self.column += len(data)
if bool(self.encoding):
data = data.encode(self.encoding)
self.stream.write(data)
self.write_indent()
self.whitespace = False
self.indention = False
if text[start] == ' ':
if not need_backslash:
# remove leading space it will load from the newline
start += 1
# data = u'\\' # <<< replaced with following line
data = '\\' if need_backslash else ''
self.column += len(data)
if bool(self.encoding):
data = data.encode(self.encoding)
self.stream.write(data)
end += 1
self.write_indicator('"', False)
def determine_block_hints(self, text: Any) -> Any:
indent = 0
indicator = ''
hints = ''
if text:
if text[0] in ' \n\x85\u2028\u2029':
indent = 2
hints += str(indent)
elif self.root_context:
for end in ['\n---', '\n...']:
pos = 0
while True:
pos = text.find(end, pos)
if pos == -1:
break
try:
if text[pos + 4] in ' \r\n':
break
except IndexError:
pass
pos += 1
if pos > -1:
break
if pos > 0:
indent = 2
if text[-1] not in '\n\x85\u2028\u2029':
indicator = '-'
elif len(text) == 1 or text[-2] in '\n\x85\u2028\u2029':
indicator = '+'
hints += indicator
return hints, indent, indicator
def write_folded(self, text: Any, comment: Any) -> None:
hints, _indent, _indicator = self.determine_block_hints(text)
if not isinstance(comment, str):
comment = ''
self.write_indicator('>' + hints + comment, True)
if _indicator == '+':
self.open_ended = True
self.write_line_break()
leading_space = True
spaces = False
breaks = True
start = end = 0
while end <= len(text):
ch = None
if end < len(text):
ch = text[end]
if breaks:
if ch is None or ch not in '\n\x85\u2028\u2029\a':
if (
not leading_space
and ch is not None
and ch != ' '
and text[start] == '\n'
):
self.write_line_break()
leading_space = ch == ' '
for br in text[start:end]:
if br == '\n':
self.write_line_break()
else:
self.write_line_break(br)
if ch is not None:
self.write_indent()
start = end
elif spaces:
if ch != ' ':
if start + 1 == end and self.column > self.best_width:
self.write_indent()
else:
data = text[start:end]
self.column += len(data)
if bool(self.encoding):
data = data.encode(self.encoding)
self.stream.write(data)
start = end
else:
if ch is None or ch in ' \n\x85\u2028\u2029\a':
data = text[start:end]
self.column += len(data)
if bool(self.encoding):
data = data.encode(self.encoding)
self.stream.write(data)
if ch == '\a':
if end < (len(text) - 1) and not text[end + 2].isspace():
self.write_line_break()
self.write_indent()
end += 2 # \a and the space that is inserted on the fold
else:
raise EmitterError(
'unexcpected fold indicator \\a before space'
)
if ch is None:
self.write_line_break()
start = end
if ch is not None:
breaks = ch in '\n\x85\u2028\u2029'
spaces = ch == ' '
end += 1
def write_literal(self, text: Any, comment: Any = None) -> None:
hints, _indent, _indicator = self.determine_block_hints(text)
# if comment is not None:
# try:
# hints += comment[1][0]
# except (TypeError, IndexError) as e:
# pass
if not isinstance(comment, str):
comment = ''
self.write_indicator('|' + hints + comment, True)
# try:
# nprintf('selfev', comment)
# cmx = comment[1][0]
# if cmx:
# self.stream.write(cmx)
# except (TypeError, IndexError) as e:
# pass
if _indicator == '+':
self.open_ended = True
self.write_line_break()
breaks = True
start = end = 0
while end <= len(text):
ch = None
if end < len(text):
ch = text[end]
if breaks:
if ch is None or ch not in '\n\x85\u2028\u2029':
for br in text[start:end]:
if br == '\n':
self.write_line_break()
else:
self.write_line_break(br)
if ch is not None:
if self.root_context:
idnx = self.indent if self.indent is not None else 0
self.stream.write(' ' * (_indent + idnx))
else:
self.write_indent()
start = end
else:
if ch is None or ch in '\n\x85\u2028\u2029':
data = text[start:end]
if bool(self.encoding):
data = data.encode(self.encoding)
self.stream.write(data)
if ch is None:
self.write_line_break()
start = end
if ch is not None:
breaks = ch in '\n\x85\u2028\u2029'
end += 1
def write_plain(self, text: Any, split: Any = True) -> None:
if self.root_context:
if self.requested_indent is not None:
self.write_line_break()
if self.requested_indent != 0:
self.write_indent()
else:
self.open_ended = True
if not text:
return
if not self.whitespace:
data = ' '
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding) # type: ignore
self.stream.write(data)
self.whitespace = False
self.indention = False
spaces = False
breaks = False
start = end = 0
while end <= len(text):
# ToDo: there is an empty space at the end of the wrapped line, if that line
# does not exceed self.best_width, that space is superfluous if wrapping is on
ch = None
if end < len(text):
ch = text[end]
if spaces:
if ch != ' ':
if start + 1 == end and self.column >= self.best_width and split:
self.write_indent()
self.whitespace = False
self.indention = False
else:
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding) # type: ignore
self.stream.write(data)
start = end
elif breaks:
if ch not in '\n\x85\u2028\u2029': # type: ignore
if text[start] == '\n':
self.write_line_break()
for br in text[start:end]:
if br == '\n':
self.write_line_break()
else:
self.write_line_break(br)
self.write_indent()
self.whitespace = False
self.indention = False
start = end
else:
if ch is None or ch in ' \n\x85\u2028\u2029':
data = text[start:end]
if (
(len(data) + self.column) > self.best_width
and self.indent is not None
and self.column > self.indent
):
# words longer than line length get a line of their own
self.write_indent()
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding) # type: ignore
try:
self.stream.write(data)
except: # NOQA
sys.stdout.write(repr(data) + '\n')
raise
start = end
if ch is not None:
spaces = ch == ' '
breaks = ch in '\n\x85\u2028\u2029'
end += 1
def write_comment(self, comment: Any, pre: bool = False) -> None:
value = comment.value
# nprintf(f'{self.column:02d} {comment.start_mark.column:02d} {value!r}')
if not pre and value[-1] == '\n':
value = value[:-1]
try:
# get original column position
col = comment.start_mark.column
if comment.value and comment.value.startswith('\n'):
# never inject extra spaces if the comment starts with a newline
# and not a real comment (e.g. if you have an empty line following a key-value
col = self.column
elif col < self.column + 1:
ValueError
except ValueError:
col = self.column + 1
# nprint('post_comment', self.line, self.column, value)
try:
# at least one space if the current column >= the start column of the comment
# but not at the start of a line
nr_spaces = col - self.column
if self.column and value.strip() and nr_spaces < 1 and value[0] != '\n':
nr_spaces = 1
value = ' ' * nr_spaces + value
try:
if bool(self.encoding):
value = value.encode(self.encoding)
except UnicodeDecodeError:
pass
self.stream.write(value)
except TypeError:
raise
if not pre:
self.write_line_break()
def write_pre_comment(self, event: Any) -> bool:
if event.comment is None:
return False
comments = event.comment[1]
if comments is None:
return False
try:
start_events = (MappingStartEvent, SequenceStartEvent)
for comment in comments:
if isinstance(event, start_events) and getattr(
comment, 'pre_done', None
):
continue
if self.column != 0:
self.write_line_break()
self.write_comment(comment, pre=True)
if isinstance(event, start_events):
comment.pre_done = True
except TypeError:
sys.stdout.write(f'eventtt {type(event)} {event}')
raise
return True
def write_post_comment(self, event: Any) -> bool:
if self.event.comment[0] is None:
return False
comment = event.comment[0]
self.write_comment(comment)
return True
class RoundTripEmitter(Emitter):
def prepare_tag(self, ctag: Any) -> Any:
if not ctag:
raise EmitterError('tag must not be empty')
tag = str(ctag)
if tag == '!' or tag == '!!':
return tag
handle = ctag.handle
suffix = ctag.suffix
prefixes = sorted(self.tag_prefixes.keys())
# print('handling', repr(tag), repr(suffix), repr(handle))
if handle is None:
for prefix in prefixes:
if tag.startswith(prefix) and (prefix == '!' or len(prefix) < len(tag)):
handle = self.tag_prefixes[prefix]
suffix = suffix[len(prefix) :]
if handle:
return f'{handle!s}{suffix!s}'
else:
return f'!<{suffix!s}>'
python-ruyaml-0.92.1/lib/ruyaml/error.py 0000664 0000000 0000000 00000023232 15056754172 0020233 0 ustar 00root root 0000000 0000000 from __future__ import annotations
import warnings
# import textwrap
if False: # MYPY
from typing import Any, Dict, List, Optional, Text # NOQA
__all__ = [
'FileMark',
'StringMark',
'CommentMark',
'YAMLError',
'MarkedYAMLError',
'ReusedAnchorWarning',
'UnsafeLoaderWarning',
'MarkedYAMLWarning',
'MarkedYAMLFutureWarning',
]
class StreamMark:
__slots__ = 'name', 'index', 'line', 'column'
def __init__(self, name: Any, index: int, line: int, column: int) -> None:
self.name = name
self.index = index
self.line = line
self.column = column
def __str__(self) -> Any:
where = (
f' in "{self.name!s}", line {self.line + 1:d}, column {self.column + 1:d}'
)
return where
def __eq__(self, other: Any) -> bool:
if self.line != other.line or self.column != other.column:
return False
if self.name != other.name or self.index != other.index:
return False
return True
def __ne__(self, other: Any) -> bool:
return not self.__eq__(other)
class FileMark(StreamMark):
__slots__ = ()
class StringMark(StreamMark):
__slots__ = 'name', 'index', 'line', 'column', 'buffer', 'pointer'
def __init__(
self,
name: Any,
index: int,
line: int,
column: int,
buffer: Any,
pointer: Any,
) -> None:
StreamMark.__init__(self, name, index, line, column)
self.buffer = buffer
self.pointer = pointer
def get_snippet(self, indent: int = 4, max_length: int = 75) -> Any:
if self.buffer is None: # always False
return None
head = ""
start = self.pointer
while start > 0 and self.buffer[start - 1] not in '\0\r\n\x85\u2028\u2029':
start -= 1
if self.pointer - start > max_length / 2 - 1:
head = ' ... '
start += 5
break
tail = ""
end = self.pointer
while (
end < len(self.buffer) and self.buffer[end] not in '\0\r\n\x85\u2028\u2029'
):
end += 1
if end - self.pointer > max_length / 2 - 1:
tail = ' ... '
end -= 5
break
snippet = self.buffer[start:end]
caret = '^'
caret = f'^ (line: {self.line + 1})'
return (
' ' * indent
+ head
+ snippet
+ tail
+ '\n'
+ ' ' * (indent + self.pointer - start + len(head))
+ caret
)
def __str__(self) -> Any:
snippet = self.get_snippet()
where = (
f' in "{self.name!s}", line {self.line + 1:d}, column {self.column + 1:d}'
)
if snippet is not None:
where += ':\n' + snippet
return where
def __repr__(self) -> Any:
snippet = self.get_snippet()
where = (
f' in "{self.name!s}", line {self.line + 1:d}, column {self.column + 1:d}'
)
if snippet is not None:
where += ':\n' + snippet
return where
class CommentMark:
__slots__ = ('column',)
def __init__(self, column: Any) -> None:
self.column = column
class YAMLError(Exception):
pass
class MarkedYAMLError(YAMLError):
def __init__(
self,
context: Any = None,
context_mark: Any = None,
problem: Any = None,
problem_mark: Any = None,
note: Any = None,
warn: Any = None,
) -> None:
self.context = context
self.context_mark = context_mark
self.problem = problem
self.problem_mark = problem_mark
self.note = note
# warn is ignored
def __str__(self) -> Any:
lines: list[str] = []
if self.context is not None:
lines.append(self.context)
if self.context_mark is not None and (
self.problem is None
or self.problem_mark is None
or self.context_mark.name != self.problem_mark.name
or self.context_mark.line != self.problem_mark.line
or self.context_mark.column != self.problem_mark.column
):
lines.append(str(self.context_mark))
if self.problem is not None:
lines.append(self.problem)
if self.problem_mark is not None:
lines.append(str(self.problem_mark))
# if self.note is not None and self.note:
# note = textwrap.dedent(self.note)
# lines.append(note)
self.check_append(lines, self.note)
return '\n'.join(lines)
def check_append(self, lines: list[str], val: Optional[str]) -> None:
if val is None or not val:
return
import textwrap
note = textwrap.dedent(val)
lines.append(note)
class YAMLStreamError(Exception):
pass
class YAMLWarning(Warning):
pass
class MarkedYAMLWarning(YAMLWarning):
def __init__(
self,
context: Any = None,
context_mark: Any = None,
problem: Any = None,
problem_mark: Any = None,
note: Any = None,
warn: Any = None,
) -> None:
self.context = context
self.context_mark = context_mark
self.problem = problem
self.problem_mark = problem_mark
self.note = note
self.warn = warn
def __str__(self) -> Any:
lines: List[str] = []
if self.context is not None:
lines.append(self.context)
if self.context_mark is not None and (
self.problem is None
or self.problem_mark is None
or self.context_mark.name != self.problem_mark.name
or self.context_mark.line != self.problem_mark.line
or self.context_mark.column != self.problem_mark.column
):
lines.append(str(self.context_mark))
if self.problem is not None:
lines.append(self.problem)
if self.problem_mark is not None:
lines.append(str(self.problem_mark))
# if self.note is not None and self.note:
# note = textwrap.dedent(self.note)
# lines.append(note)
self.check_append(lines, self.note)
# if self.warn is not None and self.warn:
# warn = textwrap.dedent(self.warn)
# lines.append(warn)
self.check_append(lines, self.warn)
return '\n'.join(lines)
def check_append(self, lines: list[str], val: Optional[str]) -> None:
if val is None or not val:
return
import textwrap
note = textwrap.dedent(val)
lines.append(note)
class ReusedAnchorWarning(YAMLWarning):
pass
class UnsafeLoaderWarning(YAMLWarning):
text = """
The default 'Loader' for 'load(stream)' without further arguments can be unsafe.
Use 'load(stream, Loader=ruyaml.Loader)' explicitly if that is OK.
Alternatively include the following in your code:
import warnings
warnings.simplefilter('ignore', ruyaml.error.UnsafeLoaderWarning)
In most other cases you should consider using 'safe_load(stream)'"""
pass
warnings.simplefilter('once', UnsafeLoaderWarning)
class MantissaNoDotYAML1_1Warning(YAMLWarning):
def __init__(self, node: Any, flt_str: Any) -> None:
self.node = node
self.flt = flt_str
def __str__(self) -> Any:
line = self.node.start_mark.line
col = self.node.start_mark.column
return f"""
In YAML 1.1 floating point values should have a dot ('.') in their mantissa.
See the Floating-Point Language-Independent Type for YAML™ Version 1.1 specification
( http://yaml.org/type/float.html ). This dot is not required for JSON nor for YAML 1.2
Correct your float: "{self.flt}" on line: {line}, column: {col}
or alternatively include the following in your code:
import warnings
warnings.simplefilter('ignore', ruyaml.error.MantissaNoDotYAML1_1Warning)
"""
warnings.simplefilter('once', MantissaNoDotYAML1_1Warning)
class YAMLFutureWarning(Warning):
pass
class MarkedYAMLFutureWarning(YAMLFutureWarning):
def __init__(
self,
context: Any = None,
context_mark: Any = None,
problem: Any = None,
problem_mark: Any = None,
note: Any = None,
warn: Any = None,
) -> None:
self.context = context
self.context_mark = context_mark
self.problem = problem
self.problem_mark = problem_mark
self.note = note
self.warn = warn
def __str__(self) -> Any:
lines: List[str] = []
if self.context is not None:
lines.append(self.context)
if self.context_mark is not None and (
self.problem is None
or self.problem_mark is None
or self.context_mark.name != self.problem_mark.name
or self.context_mark.line != self.problem_mark.line
or self.context_mark.column != self.problem_mark.column
):
lines.append(str(self.context_mark))
if self.problem is not None:
lines.append(self.problem)
if self.problem_mark is not None:
lines.append(str(self.problem_mark))
# if self.note is not None and self.note:
# note = textwrap.dedent(self.note)
# lines.append(note)
self.check_append(lines, self.note)
# if self.warn is not None and self.warn:
# warn = textwrap.dedent(self.warn)
# lines.append(warn)
self.check_append(lines, self.warn)
return '\n'.join(lines)
def check_append(self, lines: list[str], val: Optional[str]) -> None:
if val is None or not val:
return
import textwrap
note = textwrap.dedent(val)
lines.append(note)
python-ruyaml-0.92.1/lib/ruyaml/events.py 0000664 0000000 0000000 00000016333 15056754172 0020412 0 ustar 00root root 0000000 0000000 from __future__ import annotations
# Abstract classes.
if False: # MYPY
from typing import Any, Dict, Optional, List # NOQA
from ruyaml.tag import Tag
SHOW_LINES = False
def CommentCheck() -> None:
pass
class Event:
__slots__ = 'start_mark', 'end_mark', 'comment'
crepr = 'Unspecified Event'
def __init__(
self,
start_mark: Any = None,
end_mark: Any = None,
comment: Any = CommentCheck,
) -> None:
self.start_mark = start_mark
self.end_mark = end_mark
# assert comment is not CommentCheck
if comment is CommentCheck:
comment = None
self.comment = comment
def __repr__(self) -> Any:
if True:
arguments = []
if hasattr(self, 'value'):
# if you use repr(getattr(self, 'value')) then flake8 complains about
# abuse of getattr with a constant. When you change to self.value
# then mypy throws an error
arguments.append(repr(self.value))
for key in ['anchor', 'tag', 'implicit', 'flow_style', 'style']:
v = getattr(self, key, None)
if v is not None:
arguments.append(f'{key!s}={v!r}')
if self.comment not in [None, CommentCheck]:
arguments.append(f'comment={self.comment!r}')
if SHOW_LINES:
arguments.append(
f'({self.start_mark.line}:{self.start_mark.column}/'
f'{self.end_mark.line}:{self.end_mark.column})',
)
arguments = ', '.join(arguments) # type: ignore
else:
attributes = [
key
for key in ['anchor', 'tag', 'implicit', 'value', 'flow_style', 'style']
if hasattr(self, key)
]
arguments = ', '.join(
[f'{key!s}={getattr(self, key)!r}' for key in attributes]
)
if self.comment not in [None, CommentCheck]:
arguments += f', comment={self.comment!r}'
return f'{self.__class__.__name__!s}({arguments!s})'
def compact_repr(self) -> str:
return f'{self.crepr}'
class NodeEvent(Event):
__slots__ = ('anchor',)
def __init__(
self,
anchor: Any,
start_mark: Any = None,
end_mark: Any = None,
comment: Any = None,
) -> None:
Event.__init__(self, start_mark, end_mark, comment)
self.anchor = anchor
class CollectionStartEvent(NodeEvent):
__slots__ = 'ctag', 'implicit', 'flow_style', 'nr_items'
def __init__(
self,
anchor: Any,
tag: Any,
implicit: Any,
start_mark: Any = None,
end_mark: Any = None,
flow_style: Any = None,
comment: Any = None,
nr_items: Optional[int] = None,
) -> None:
NodeEvent.__init__(self, anchor, start_mark, end_mark, comment)
self.ctag = tag
self.implicit = implicit
self.flow_style = flow_style
self.nr_items = nr_items
@property
def tag(self) -> Optional[str]:
return None if self.ctag is None else str(self.ctag)
class CollectionEndEvent(Event):
__slots__ = ()
# Implementations.
class StreamStartEvent(Event):
__slots__ = ('encoding',)
crepr = '+STR'
def __init__(
self,
start_mark: Any = None,
end_mark: Any = None,
encoding: Any = None,
comment: Any = None,
) -> None:
Event.__init__(self, start_mark, end_mark, comment)
self.encoding = encoding
class StreamEndEvent(Event):
__slots__ = ()
crepr = '-STR'
class DocumentStartEvent(Event):
__slots__ = 'explicit', 'version', 'tags'
crepr = '+DOC'
def __init__(
self,
start_mark: Any = None,
end_mark: Any = None,
explicit: Any = None,
version: Any = None,
tags: Any = None,
comment: Any = None,
) -> None:
Event.__init__(self, start_mark, end_mark, comment)
self.explicit = explicit
self.version = version
self.tags = tags
def compact_repr(self) -> str:
start = ' ---' if self.explicit else ''
return f'{self.crepr}{start}'
class DocumentEndEvent(Event):
__slots__ = ('explicit',)
crepr = '-DOC'
def __init__(
self,
start_mark: Any = None,
end_mark: Any = None,
explicit: Any = None,
comment: Any = None,
) -> None:
Event.__init__(self, start_mark, end_mark, comment)
self.explicit = explicit
def compact_repr(self) -> str:
end = ' ...' if self.explicit else ''
return f'{self.crepr}{end}'
class AliasEvent(NodeEvent):
__slots__ = 'style'
crepr = '=ALI'
def __init__(
self,
anchor: Any,
start_mark: Any = None,
end_mark: Any = None,
style: Any = None,
comment: Any = None,
) -> None:
NodeEvent.__init__(self, anchor, start_mark, end_mark, comment)
self.style = style
def compact_repr(self) -> str:
return f'{self.crepr} *{self.anchor}'
class ScalarEvent(NodeEvent):
__slots__ = 'ctag', 'implicit', 'value', 'style'
crepr = '=VAL'
def __init__(
self,
anchor: Any,
tag: Any,
implicit: Any,
value: Any,
start_mark: Any = None,
end_mark: Any = None,
style: Any = None,
comment: Any = None,
) -> None:
NodeEvent.__init__(self, anchor, start_mark, end_mark, comment)
self.ctag = tag
self.implicit = implicit
self.value = value
self.style = style
@property
def tag(self) -> Optional[str]:
return None if self.ctag is None else str(self.ctag)
@tag.setter
def tag(self, val: Any) -> None:
if isinstance(val, str):
val = Tag(suffix=val)
self.ctag = val
def compact_repr(self) -> str:
style = ':' if self.style is None else self.style
anchor = f'&{self.anchor} ' if self.anchor else ''
tag = f'<{self.tag!s}> ' if self.tag else ''
value = self.value
for ch, rep in [
('\\', '\\\\'),
('\t', '\\t'),
('\n', '\\n'),
('\a', ''), # remove from folded
('\r', '\\r'),
('\b', '\\b'),
]:
value = value.replace(ch, rep)
return f'{self.crepr} {anchor}{tag}{style}{value}'
class SequenceStartEvent(CollectionStartEvent):
__slots__ = ()
crepr = '+SEQ'
def compact_repr(self) -> str:
flow = ' []' if self.flow_style else ''
anchor = f' &{self.anchor}' if self.anchor else ''
tag = f' <{self.tag!s}>' if self.tag else ''
return f'{self.crepr}{flow}{anchor}{tag}'
class SequenceEndEvent(CollectionEndEvent):
__slots__ = ()
crepr = '-SEQ'
class MappingStartEvent(CollectionStartEvent):
__slots__ = ()
crepr = '+MAP'
def compact_repr(self) -> str:
flow = ' {}' if self.flow_style else ''
anchor = f' &{self.anchor}' if self.anchor else ''
tag = f' <{self.tag!s}>' if self.tag else ''
return f'{self.crepr}{flow}{anchor}{tag}'
class MappingEndEvent(CollectionEndEvent):
__slots__ = ()
crepr = '-MAP'
python-ruyaml-0.92.1/lib/ruyaml/loader.py 0000664 0000000 0000000 00000006156 15056754172 0020356 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from ruyaml.composer import Composer
from ruyaml.constructor import (
BaseConstructor,
Constructor,
RoundTripConstructor,
SafeConstructor,
)
from ruyaml.parser import Parser, RoundTripParser
from ruyaml.reader import Reader
from ruyaml.resolver import VersionedResolver
from ruyaml.scanner import RoundTripScanner, Scanner
if False: # MYPY
from typing import Any, Dict, List, Optional, Union # NOQA
from ruyaml.compat import StreamTextType, VersionType # NOQA
__all__ = ['BaseLoader', 'SafeLoader', 'Loader', 'RoundTripLoader']
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, VersionedResolver):
def __init__(
self,
stream: StreamTextType,
version: Optional[VersionType] = None,
preserve_quotes: Optional[bool] = None,
) -> None:
self.comment_handling = None
Reader.__init__(self, stream, loader=self)
Scanner.__init__(self, loader=self)
Parser.__init__(self, loader=self)
Composer.__init__(self, loader=self)
BaseConstructor.__init__(self, loader=self)
VersionedResolver.__init__(self, version, loader=self)
class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, VersionedResolver):
def __init__(
self,
stream: StreamTextType,
version: Optional[VersionType] = None,
preserve_quotes: Optional[bool] = None,
) -> None:
self.comment_handling = None
Reader.__init__(self, stream, loader=self)
Scanner.__init__(self, loader=self)
Parser.__init__(self, loader=self)
Composer.__init__(self, loader=self)
SafeConstructor.__init__(self, loader=self)
VersionedResolver.__init__(self, version, loader=self)
class Loader(Reader, Scanner, Parser, Composer, Constructor, VersionedResolver):
def __init__(
self,
stream: StreamTextType,
version: Optional[VersionType] = None,
preserve_quotes: Optional[bool] = None,
) -> None:
self.comment_handling = None
Reader.__init__(self, stream, loader=self)
Scanner.__init__(self, loader=self)
Parser.__init__(self, loader=self)
Composer.__init__(self, loader=self)
Constructor.__init__(self, loader=self)
VersionedResolver.__init__(self, version, loader=self)
class RoundTripLoader(
Reader,
RoundTripScanner,
RoundTripParser,
Composer,
RoundTripConstructor,
VersionedResolver,
):
def __init__(
self,
stream: StreamTextType,
version: Optional[VersionType] = None,
preserve_quotes: Optional[bool] = None,
) -> None:
# self.reader = Reader.__init__(self, stream)
self.comment_handling = None # issue 385
Reader.__init__(self, stream, loader=self)
RoundTripScanner.__init__(self, loader=self)
RoundTripParser.__init__(self, loader=self)
Composer.__init__(self, loader=self)
RoundTripConstructor.__init__(
self, preserve_quotes=preserve_quotes, loader=self
)
VersionedResolver.__init__(self, version, loader=self)
python-ruyaml-0.92.1/lib/ruyaml/main.py 0000664 0000000 0000000 00000157102 15056754172 0020032 0 ustar 00root root 0000000 0000000 from __future__ import annotations
import glob
from importlib import import_module
from io import BytesIO, StringIO
from typing import TYPE_CHECKING, Any, List, Optional, Text, Union
import warnings
import ruyaml
from ruyaml.comments import C_PRE, CommentedMap, CommentedSeq
from ruyaml.compat import BytesIO, StringIO, nprint, nprintf, with_metaclass # NOQA
from ruyaml.constructor import (
BaseConstructor,
Constructor,
RoundTripConstructor,
SafeConstructor,
)
from ruyaml.docinfo import DocInfo, Version, version
from ruyaml.dumper import BaseDumper, Dumper, RoundTripDumper, SafeDumper # NOQA
from ruyaml.error import UnsafeLoaderWarning, YAMLError # NOQA
from ruyaml.events import * # NOQA
from ruyaml.loader import BaseLoader # NOQA
from ruyaml.loader import Loader # NOQA
from ruyaml.loader import Loader as UnsafeLoader
from ruyaml.loader import RoundTripLoader, SafeLoader # NOQA
from ruyaml.nodes import * # NOQA
from ruyaml.representer import (
BaseRepresenter,
Representer,
RoundTripRepresenter,
SafeRepresenter,
)
from ruyaml.resolver import Resolver, VersionedResolver # NOQA
from ruyaml.tokens import * # NOQA
if False: # MYPY
from pathlib import Path
from types import TracebackType
from typing import ( # NOQA
Any,
Callable,
Dict,
List,
Optional,
Set,
Text,
Tuple,
Type,
Union,
)
from ruyaml.compat import StreamTextType, StreamType, VersionType # NOQA
# import io
CParser = None
CEmitter = None
# YAML is an acronym, i.e. spoken: rhymes with "camel". And thus a
# subset of abbreviations, which should be all caps according to PEP8
class YAML:
def __init__(
self: Any,
*,
typ: Optional[Union[List[Text], Text]] = None,
pure: Any = False,
output: Any = None,
plug_ins: Any = None,
) -> None: # input=None,
"""
typ: 'rt'/None -> RoundTripLoader/RoundTripDumper, (default)
'safe' -> SafeLoader/SafeDumper,
'unsafe' -> normal/unsafe Loader/Dumper (pending deprecation)
'full' -> full Dumper only, including python built-ins that are
potentially unsafe to load
'base' -> baseloader
pure: if True only use Python modules
input/output: needed to work as context manager
plug_ins: a list of plug-in files
"""
self.typ = ['rt'] if typ is None else (typ if isinstance(typ, list) else [typ])
self.pure = pure
# self._input = input
self._output = output
self._context_manager: Any = None
self.plug_ins: List[Any] = []
for pu in ([] if plug_ins is None else plug_ins) + self.official_plug_ins():
file_name = pu.replace(os.sep, '.')
self.plug_ins.append(import_module(file_name))
self.Resolver: Any = ruyaml.resolver.VersionedResolver
self.allow_unicode = True
self.Reader: Any = None
self.Representer: Any = None
self.Constructor: Any = None
self.Scanner: Any = None
self.Serializer: Any = None
self.default_flow_style: Any = None
self.comment_handling = None
typ_found = 1
setup_rt = False
if 'rt' in self.typ:
setup_rt = True
elif 'safe' in self.typ:
self.Emitter = (
ruyaml.emitter.Emitter if pure or CEmitter is None else CEmitter
)
self.Representer = ruyaml.representer.SafeRepresenter
self.Parser = ruyaml.parser.Parser if pure or CParser is None else CParser
self.Composer = ruyaml.composer.Composer
self.Constructor = ruyaml.constructor.SafeConstructor
elif 'base' in self.typ:
self.Emitter = ruyaml.emitter.Emitter
self.Representer = ruyaml.representer.BaseRepresenter
self.Parser = ruyaml.parser.Parser if pure or CParser is None else CParser
self.Composer = ruyaml.composer.Composer
self.Constructor = ruyaml.constructor.BaseConstructor
elif 'unsafe' in self.typ:
warnings.warn(
"\nyou should no longer specify 'unsafe'.\nFor **dumping only** use yaml=YAML(typ='full')\n", # NOQA
PendingDeprecationWarning,
stacklevel=2,
)
self.Emitter = (
ruyaml.emitter.Emitter if pure or CEmitter is None else CEmitter
)
self.Representer = ruyaml.representer.Representer
self.Parser = ruyaml.parser.Parser if pure or CParser is None else CParser
self.Composer = ruyaml.composer.Composer
self.Constructor = ruyaml.constructor.Constructor
elif 'full' in self.typ:
self.Emitter = (
ruyaml.emitter.Emitter if pure or CEmitter is None else CEmitter
)
self.Representer = ruyaml.representer.Representer
self.Parser = ruyaml.parser.Parser if pure or CParser is None else CParser
# self.Composer = ruyaml.composer.Composer
# self.Constructor = ruyaml.constructor.Constructor
elif 'rtsc' in self.typ:
self.default_flow_style = False
# no optimized rt-dumper yet
self.Emitter = ruyaml.emitter.RoundTripEmitter
self.Serializer = ruyaml.serializer.Serializer
self.Representer = ruyaml.representer.RoundTripRepresenter
self.Scanner = ruyaml.scanner.RoundTripScannerSC
# no optimized rt-parser yet
self.Parser = ruyaml.parser.RoundTripParserSC
self.Composer = ruyaml.composer.Composer
self.Constructor = ruyaml.constructor.RoundTripConstructor
self.comment_handling = C_PRE
else:
setup_rt = True
typ_found = 0
if setup_rt:
self.default_flow_style = False
# no optimized rt-dumper yet
self.Emitter = ruyaml.emitter.RoundTripEmitter
self.Serializer = ruyaml.serializer.Serializer
self.Representer = ruyaml.representer.RoundTripRepresenter
self.Scanner = ruyaml.scanner.RoundTripScanner
# no optimized rt-parser yet
self.Parser = ruyaml.parser.RoundTripParser
self.Composer = ruyaml.composer.Composer
self.Constructor = ruyaml.constructor.RoundTripConstructor
del setup_rt
self.stream = None
self.canonical = None
self.old_indent = None
self.width: Union[int, None] = None
self.line_break = None
self.map_indent: Union[int, None] = None
self.sequence_indent: Union[int, None] = None
self.sequence_dash_offset: int = 0
self.compact_seq_seq = None
self.compact_seq_map = None
self.sort_base_mapping_type_on_output = None # default: sort
self.top_level_colon_align = None
self.prefix_colon = None
self._version: Optional[Any] = None
self.preserve_quotes: Optional[bool] = None
self.allow_duplicate_keys = False # duplicate keys in map, set
self.encoding = 'utf-8'
self.explicit_start: Union[bool, None] = None
self.explicit_end: Union[bool, None] = None
self._tags = None
self.doc_infos: List[DocInfo] = []
self.default_style = None
self.top_level_block_style_scalar_no_indent_error_1_1 = False
# directives end indicator with single scalar document
self.scalar_after_indicator: Optional[bool] = None
# [a, b: 1, c: {d: 2}] vs. [a, {b: 1}, {c: {d: 2}}]
self.brace_single_entry_mapping_in_flow_sequence = False
for module in self.plug_ins:
if getattr(module, 'typ', None) in self.typ:
typ_found += 1
module.init_typ(self)
break
if typ_found == 0:
raise NotImplementedError(
f'typ "{self.typ}" not recognised (need to install plug-in?)',
)
@property
def reader(self) -> Any:
try:
return self._reader # type: ignore
except AttributeError:
self._reader = self.Reader(None, loader=self)
return self._reader
@property
def scanner(self) -> Any:
try:
return self._scanner # type: ignore
except AttributeError:
if self.Scanner is None:
raise
self._scanner = self.Scanner(loader=self)
return self._scanner
@property
def parser(self) -> Any:
attr = '_' + sys._getframe().f_code.co_name
if not hasattr(self, attr):
if self.Parser is not CParser:
setattr(self, attr, self.Parser(loader=self))
else:
if getattr(self, '_stream', None) is None:
# wait for the stream
return None
else:
# if not hasattr(self._stream, 'read') and hasattr(self._stream, 'open'):
# # pathlib.Path() instance
# setattr(self, attr, CParser(self._stream))
# else:
setattr(self, attr, CParser(self._stream))
# self._parser = self._composer = self
# nprint('scanner', self.loader.scanner)
return getattr(self, attr)
@property
def composer(self) -> Any:
attr = '_' + sys._getframe().f_code.co_name
if not hasattr(self, attr):
setattr(self, attr, self.Composer(loader=self))
return getattr(self, attr)
@property
def constructor(self) -> Any:
attr = '_' + sys._getframe().f_code.co_name
if not hasattr(self, attr):
if self.Constructor is None:
if 'full' in self.typ:
raise YAMLError(
"\nyou can only use yaml=YAML(typ='full') for dumping\n", # NOQA
)
cnst = self.Constructor(preserve_quotes=self.preserve_quotes, loader=self) # type: ignore # NOQA
cnst.allow_duplicate_keys = self.allow_duplicate_keys
setattr(self, attr, cnst)
return getattr(self, attr)
@property
def resolver(self) -> Any:
try:
rslvr = self._resolver # type: ignore
except AttributeError:
rslvr = None
if rslvr is None or rslvr._loader_version != self.version:
rslvr = self._resolver = self.Resolver(version=self.version, loader=self)
return rslvr
@property
def emitter(self) -> Any:
attr = '_' + sys._getframe().f_code.co_name
if not hasattr(self, attr):
if self.Emitter is not CEmitter:
_emitter = self.Emitter(
None,
canonical=self.canonical,
indent=self.old_indent,
width=self.width,
allow_unicode=self.allow_unicode,
line_break=self.line_break,
prefix_colon=self.prefix_colon,
brace_single_entry_mapping_in_flow_sequence=self.brace_single_entry_mapping_in_flow_sequence, # NOQA
dumper=self,
)
setattr(self, attr, _emitter)
if self.map_indent is not None:
_emitter.best_map_indent = self.map_indent
if self.sequence_indent is not None:
_emitter.best_sequence_indent = self.sequence_indent
if self.sequence_dash_offset is not None:
_emitter.sequence_dash_offset = self.sequence_dash_offset
# _emitter.block_seq_indent = self.sequence_dash_offset
if self.compact_seq_seq is not None:
_emitter.compact_seq_seq = self.compact_seq_seq
if self.compact_seq_map is not None:
_emitter.compact_seq_map = self.compact_seq_map
else:
if getattr(self, '_stream', None) is None:
# wait for the stream
return None
return None
return getattr(self, attr)
@property
def serializer(self) -> Any:
attr = '_' + sys._getframe().f_code.co_name
if not hasattr(self, attr):
setattr(
self,
attr,
self.Serializer(
encoding=self.encoding,
explicit_start=self.explicit_start,
explicit_end=self.explicit_end,
version=self.version,
tags=self.tags,
dumper=self,
),
)
return getattr(self, attr)
@property
def representer(self) -> Any:
attr = '_' + sys._getframe().f_code.co_name
if not hasattr(self, attr):
repres = self.Representer(
default_style=self.default_style,
default_flow_style=self.default_flow_style,
dumper=self,
)
if self.sort_base_mapping_type_on_output is not None:
repres.sort_base_mapping_type_on_output = (
self.sort_base_mapping_type_on_output
)
setattr(self, attr, repres)
return getattr(self, attr)
def scan(self, stream: StreamTextType) -> Any:
"""
Scan a YAML stream and produce scanning tokens.
"""
if not hasattr(stream, 'read') and hasattr(stream, 'open'):
# pathlib.Path() instance
with stream.open('rb') as fp:
return self.scan(fp)
self.doc_infos.append(DocInfo(requested_version=version(self.version)))
self.tags = {}
_, parser = self.get_constructor_parser(stream)
try:
while self.scanner.check_token():
yield self.scanner.get_token()
finally:
parser.dispose()
for comp in ('reader', 'scanner'):
try:
getattr(getattr(self, '_' + comp), f'reset_{comp}')()
except AttributeError:
pass
def parse(self, stream: StreamTextType) -> Any:
"""
Parse a YAML stream and produce parsing events.
"""
if not hasattr(stream, 'read') and hasattr(stream, 'open'):
# pathlib.Path() instance
with stream.open('rb') as fp:
return self.parse(fp)
self.doc_infos.append(DocInfo(requested_version=version(self.version)))
self.tags = {}
_, parser = self.get_constructor_parser(stream)
try:
while parser.check_event():
yield parser.get_event()
finally:
parser.dispose()
for comp in ('reader', 'scanner'):
try:
getattr(getattr(self, '_' + comp), f'reset_{comp}')()
except AttributeError:
pass
def compose(self, stream: Union[Path, StreamTextType]) -> Any:
"""
Parse the first YAML document in a stream
and produce the corresponding representation tree.
"""
if not hasattr(stream, 'read') and hasattr(stream, 'open'):
# pathlib.Path() instance
with stream.open('rb') as fp:
return self.compose(fp)
self.doc_infos.append(DocInfo(requested_version=version(self.version)))
self.tags = {}
constructor, parser = self.get_constructor_parser(stream)
try:
return constructor.composer.get_single_node()
finally:
parser.dispose()
for comp in ('reader', 'scanner'):
try:
getattr(getattr(self, '_' + comp), f'reset_{comp}')()
except AttributeError:
pass
def compose_all(self, stream: Union[Path, StreamTextType]) -> Any:
"""
Parse all YAML documents in a stream
and produce corresponding representation trees.
"""
self.doc_infos.append(DocInfo(requested_version=version(self.version)))
self.tags = {}
constructor, parser = self.get_constructor_parser(stream)
try:
while constructor.composer.check_node():
yield constructor.composer.get_node()
finally:
parser.dispose()
for comp in ('reader', 'scanner'):
try:
getattr(getattr(self, '_' + comp), f'reset_{comp}')()
except AttributeError:
pass
# separate output resolver?
# def load(self, stream=None):
# if self._context_manager:
# if not self._input:
# raise TypeError("Missing input stream while dumping from context manager")
# for data in self._context_manager.load():
# yield data
# return
# if stream is None:
# raise TypeError("Need a stream argument when not loading from context manager")
# return self.load_one(stream)
def load(self, stream: Union[Path, StreamTextType]) -> Any:
"""
at this point you either have the non-pure Parser (which has its own reader and
scanner) or you have the pure Parser.
If the pure Parser is set, then set the Reader and Scanner, if not already set.
If either the Scanner or Reader are set, you cannot use the non-pure Parser,
so reset it to the pure parser and set the Reader resp. Scanner if necessary
"""
if not hasattr(stream, 'read') and hasattr(stream, 'open'):
# pathlib.Path() instance
with stream.open('rb') as fp:
return self.load(fp)
self.doc_infos.append(DocInfo(requested_version=version(self.version)))
self.tags = {}
constructor, parser = self.get_constructor_parser(stream)
try:
return constructor.get_single_data()
finally:
parser.dispose()
for comp in ('reader', 'scanner'):
try:
getattr(getattr(self, '_' + comp), f'reset_{comp}')()
except AttributeError:
pass
def load_all(self, stream: Union[Path, StreamTextType]) -> Any: # *, skip=None):
if not hasattr(stream, 'read') and hasattr(stream, 'open'):
# pathlib.Path() instance
with stream.open('r') as fp:
yield from self.load_all(fp)
return
# if skip is None:
# skip = []
# elif isinstance(skip, int):
# skip = [skip]
self.doc_infos.append(DocInfo(requested_version=version(self.version)))
self.tags = {}
constructor, parser = self.get_constructor_parser(stream)
try:
while constructor.check_data():
yield constructor.get_data()
self.doc_infos.append(DocInfo(requested_version=version(self.version)))
finally:
parser.dispose()
for comp in ('reader', 'scanner'):
try:
getattr(getattr(self, '_' + comp), f'reset_{comp}')()
except AttributeError:
pass
def get_constructor_parser(self, stream: StreamTextType) -> Any:
"""
the old cyaml needs special setup, and therefore the stream
"""
if self.Constructor is None:
if 'full' in self.typ:
raise YAMLError(
"\nyou can only use yaml=YAML(typ='full') for dumping\n", # NOQA
)
if self.Parser is not CParser:
if self.Reader is None:
self.Reader = ruyaml.reader.Reader
if self.Scanner is None:
self.Scanner = ruyaml.scanner.Scanner
self.reader.stream = stream
else:
if self.Reader is not None:
if self.Scanner is None:
self.Scanner = ruyaml.scanner.Scanner
self.Parser = ruyaml.parser.Parser
self.reader.stream = stream
elif self.Scanner is not None:
if self.Reader is None:
self.Reader = ruyaml.reader.Reader
self.Parser = ruyaml.parser.Parser
self.reader.stream = stream
else:
# combined C level reader>scanner>parser
# does some calls to the resolver, e.g. BaseResolver.descend_resolver
# if you just initialise the CParser, too much of resolver.py
# is actually used
rslvr = self.Resolver
# if rslvr is ruyaml.resolver.VersionedResolver:
# rslvr = ruyaml.resolver.Resolver
class XLoader(self.Parser, self.Constructor, rslvr): # type: ignore
def __init__(
selfx,
stream: StreamTextType,
version: Optional[VersionType] = self.version,
preserve_quotes: Optional[bool] = None,
) -> None:
# NOQA
CParser.__init__(selfx, stream)
selfx._parser = selfx._composer = selfx
self.Constructor.__init__(selfx, loader=selfx)
selfx.allow_duplicate_keys = self.allow_duplicate_keys
rslvr.__init__(selfx, version=version, loadumper=selfx)
self._stream = stream
loader = XLoader(stream)
self._scanner = loader
return loader, loader
return self.constructor, self.parser
def emit(self, events: Any, stream: Any) -> None:
"""
Emit YAML parsing events into a stream.
If stream is None, return the produced string instead.
"""
_, _, emitter = self.get_serializer_representer_emitter(stream, None)
try:
for event in events:
emitter.emit(event)
finally:
try:
emitter.dispose()
except AttributeError:
raise
def serialize(self, node: Any, stream: Optional[StreamType]) -> Any:
"""
Serialize a representation tree into a YAML stream.
If stream is None, return the produced string instead.
"""
self.serialize_all([node], stream)
def serialize_all(self, nodes: Any, stream: Optional[StreamType]) -> Any:
"""
Serialize a sequence of representation trees into a YAML stream.
If stream is None, return the produced string instead.
"""
serializer, _, emitter = self.get_serializer_representer_emitter(stream, None)
try:
serializer.open()
for node in nodes:
serializer.serialize(node)
serializer.close()
finally:
try:
emitter.dispose()
except AttributeError:
raise
def dump(
self: Any,
data: Union[Path, StreamType],
stream: Any = None,
*,
transform: Any = None,
) -> Any:
if self._context_manager:
if not self._output:
raise TypeError(
'Missing output stream while dumping from context manager'
)
if transform is not None:
x = self.__class__.__name__
raise TypeError(
f'{x}.dump() in the context manager cannot have transform keyword',
)
self._context_manager.dump(data)
else: # old style
if stream is None:
raise TypeError(
'Need a stream argument when not dumping from context manager'
)
return self.dump_all([data], stream, transform=transform)
def dump_all(
self,
documents: Any,
stream: Union[Path, StreamType],
*,
transform: Any = None,
) -> Any:
if self._context_manager:
raise NotImplementedError
self._output = stream
self._context_manager = YAMLContextManager(self, transform=transform)
for data in documents:
self._context_manager.dump(data)
self._context_manager.teardown_output()
self._output = None
self._context_manager = None
def Xdump_all(self, documents: Any, stream: Any, *, transform: Any = None) -> Any:
"""
Serialize a sequence of Python objects into a YAML stream.
"""
if not hasattr(stream, 'write') and hasattr(stream, 'open'):
# pathlib.Path() instance
with stream.open('w') as fp:
return self.dump_all(documents, fp, transform=transform)
# The stream should have the methods `write` and possibly `flush`.
documents: StreamType = documents # mypy workaround
if self.top_level_colon_align is True:
tlca: Any = max([len(str(x)) for x in documents[0]])
else:
tlca = self.top_level_colon_align
if transform is not None:
fstream = stream
if self.encoding is None:
stream = StringIO()
else:
stream = BytesIO()
serializer, representer, emitter = self.get_serializer_representer_emitter(
stream,
tlca,
)
try:
self.serializer.open()
for data in documents: # NOQA
try:
self.representer.represent(data)
except AttributeError:
# nprint(dir(dumper._representer))
raise
self.serializer.close()
finally:
try:
self.emitter.dispose()
except AttributeError:
raise
# self.dumper.dispose() # cyaml
delattr(self, '_serializer')
delattr(self, '_emitter')
if transform:
val = stream.getvalue() # type: ignore
if self.encoding:
val = val.decode(self.encoding)
if fstream is None:
transform(val)
else:
fstream.write(transform(val)) # type: ignore
return None
def get_serializer_representer_emitter(self, stream: StreamType, tlca: Any) -> Any:
# we have only .Serializer to deal with (vs .Reader & .Scanner), much simpler
if self.Emitter is not CEmitter:
if self.Serializer is None:
self.Serializer = ruyaml.serializer.Serializer
self.emitter.stream = stream
self.emitter.top_level_colon_align = tlca
if self.scalar_after_indicator is not None:
self.emitter.scalar_after_indicator = self.scalar_after_indicator
return self.serializer, self.representer, self.emitter
if self.Serializer is not None:
# cannot set serializer with CEmitter
self.Emitter = ruyaml.emitter.Emitter
self.emitter.stream = stream
self.emitter.top_level_colon_align = tlca
if self.scalar_after_indicator is not None:
self.emitter.scalar_after_indicator = self.scalar_after_indicator
return self.serializer, self.representer, self.emitter
# C routines
rslvr = (
ruyaml.resolver.BaseResolver
if 'base' in self.typ
else ruyaml.resolver.Resolver
)
class XDumper(CEmitter, self.Representer, rslvr): # type: ignore
def __init__(
selfx: StreamType,
stream: Any,
default_style: Any = None,
default_flow_style: Any = None,
canonical: Optional[bool] = None,
indent: Optional[int] = None,
width: Optional[int] = None,
allow_unicode: Optional[bool] = None,
line_break: Any = None,
encoding: Any = None,
explicit_start: Optional[bool] = None,
explicit_end: Optional[bool] = None,
version: Any = None,
tags: Any = None,
block_seq_indent: Any = None,
top_level_colon_align: Any = None,
prefix_colon: Any = None,
) -> None:
# NOQA
CEmitter.__init__(
selfx,
stream,
canonical=canonical,
indent=indent,
width=width,
encoding=encoding,
allow_unicode=allow_unicode,
line_break=line_break,
explicit_start=explicit_start,
explicit_end=explicit_end,
version=version,
tags=tags,
)
selfx._emitter = selfx._serializer = selfx._representer = selfx
self.Representer.__init__(
selfx,
default_style=default_style,
default_flow_style=default_flow_style,
)
rslvr.__init__(selfx)
self._stream = stream
dumper = XDumper(
stream,
default_style=self.default_style,
default_flow_style=self.default_flow_style,
canonical=self.canonical,
indent=self.old_indent,
width=self.width,
allow_unicode=self.allow_unicode,
line_break=self.line_break,
encoding=self.encoding,
explicit_start=self.explicit_start,
explicit_end=self.explicit_end,
version=self.version,
tags=self.tags,
)
self._emitter = self._serializer = dumper
return dumper, dumper, dumper
# basic types
def map(self, **kw: Any) -> Any:
if 'rt' in self.typ:
return CommentedMap(**kw)
else:
return dict(**kw)
def seq(self, *args: Any) -> Any:
if 'rt' in self.typ:
return CommentedSeq(*args)
else:
return list(*args)
# helpers
def official_plug_ins(self) -> Any:
"""search for list of subdirs that are plug-ins, if __file__ is not available, e.g.
single file installers that are not properly emulating a file-system (issue 324)
no plug-ins will be found. If any are packaged, you know which file that are
and you can explicitly provide it during instantiation:
yaml = ruyaml.YAML(plug_ins=['ruyaml/jinja2/__plug_in__'])
"""
try:
bd = os.path.dirname(__file__)
except NameError:
return []
gpbd = os.path.dirname(os.path.dirname(bd))
res = [x.replace(gpbd, "")[1:-3] for x in glob.glob(bd + '/*/__plug_in__.py')]
return res
def register_class(self, cls: Any) -> Any:
"""
register a class for dumping/loading
- if it has attribute yaml_tag use that to register, else use class name
- if it has methods to_yaml/from_yaml use those to dump/load else dump attributes
as mapping
"""
tag = getattr(cls, 'yaml_tag', '!' + cls.__name__)
try:
self.representer.add_representer(cls, cls.to_yaml)
except AttributeError:
def t_y(representer: Any, data: Any) -> Any:
return representer.represent_yaml_object(
tag,
data,
cls,
flow_style=representer.default_flow_style,
)
self.representer.add_representer(cls, t_y)
try:
self.constructor.add_constructor(tag, cls.from_yaml)
except AttributeError:
def f_y(constructor: Any, node: Any) -> Any:
return constructor.construct_yaml_object(node, cls)
self.constructor.add_constructor(tag, f_y)
return cls
# ### context manager
def __enter__(self) -> Any:
self._context_manager = YAMLContextManager(self)
return self
def __exit__(
self,
typ: Optional[Type[BaseException]],
value: Optional[BaseException],
traceback: Optional[TracebackType],
) -> None:
if typ:
nprint('typ', typ)
self._context_manager.teardown_output()
# self._context_manager.teardown_input()
self._context_manager = None
# ### backwards compatibility
def _indent(
self, mapping: Any = None, sequence: Any = None, offset: Any = None
) -> None:
if mapping is not None:
self.map_indent = mapping
if sequence is not None:
self.sequence_indent = sequence
if offset is not None:
self.sequence_dash_offset = offset
@property
def version(self) -> Optional[Tuple[int, int]]:
return self._version
@version.setter
def version(self, val: VersionType) -> None:
if val is None:
self._version = val
return
elif isinstance(val, str):
sval = tuple(int(x) for x in val.split('.'))
elif isinstance(val, (list, tuple)):
sval = tuple(int(x) for x in val)
elif isinstance(val, Version):
sval = (val.major, val.minor)
else:
raise TypeError(f'unknown version type {type(val)}')
assert len(sval) == 2, f'version can only have major.minor, got {val}'
assert sval[0] == 1, f'version major part can only be 1, got {val}'
assert sval[1] in [1, 2], f'version minor part can only be 2 or 1, got {val}'
self._version = sval
@property
def tags(self) -> Any:
return self._tags
@tags.setter
def tags(self, val: Any) -> None:
self._tags = val
@property
def indent(self) -> Any:
return self._indent
@indent.setter
def indent(self, val: Any) -> None:
self.old_indent = val
@property
def block_seq_indent(self) -> Any:
return self.sequence_dash_offset
@block_seq_indent.setter
def block_seq_indent(self, val: Any) -> None:
self.sequence_dash_offset = val
def compact(self, seq_seq: Any = None, seq_map: Any = None) -> None:
self.compact_seq_seq = seq_seq
self.compact_seq_map = seq_map
class YAMLContextManager:
def __init__(self, yaml: Any, transform: Any = None) -> None:
# used to be: (Any, Optional[Callable]) -> None
self._yaml = yaml
self._output_inited = False
self._output_path = None
self._output = self._yaml._output
self._transform = transform
# self._input_inited = False
# self._input = input
# self._input_path = None
# self._transform = yaml.transform
# self._fstream = None
if not hasattr(self._output, 'write') and hasattr(self._output, 'open'):
# pathlib.Path() instance, open with the same mode
self._output_path = self._output
self._output = self._output_path.open('w')
# if not hasattr(self._stream, 'write') and hasattr(stream, 'open'):
# if not hasattr(self._input, 'read') and hasattr(self._input, 'open'):
# # pathlib.Path() instance, open with the same mode
# self._input_path = self._input
# self._input = self._input_path.open('r')
if self._transform is not None:
self._fstream = self._output
if self._yaml.encoding is None:
self._output = StringIO()
else:
self._output = BytesIO()
def teardown_output(self) -> None:
if self._output_inited:
self._yaml.serializer.close()
else:
return
try:
self._yaml.emitter.dispose()
except AttributeError:
raise
# self.dumper.dispose() # cyaml
try:
delattr(self._yaml, '_serializer')
delattr(self._yaml, '_emitter')
except AttributeError:
raise
if self._transform:
val = self._output.getvalue()
if self._yaml.encoding:
val = val.decode(self._yaml.encoding)
if self._fstream is None:
self._transform(val)
else:
self._fstream.write(self._transform(val))
self._fstream.flush()
self._output = self._fstream # maybe not necessary
if self._output_path is not None:
self._output.close()
def init_output(self, first_data: Any) -> None:
if self._yaml.top_level_colon_align is True:
tlca: Any = max([len(str(x)) for x in first_data])
else:
tlca = self._yaml.top_level_colon_align
self._yaml.get_serializer_representer_emitter(self._output, tlca)
self._yaml.serializer.open()
self._output_inited = True
def dump(self, data: Any) -> None:
if not self._output_inited:
self.init_output(data)
try:
self._yaml.representer.represent(data)
except AttributeError:
# nprint(dir(dumper._representer))
raise
# def teardown_input(self):
# pass
#
# def init_input(self):
# # set the constructor and parser on YAML() instance
# self._yaml.get_constructor_parser(stream)
#
# def load(self):
# if not self._input_inited:
# self.init_input()
# try:
# while self._yaml.constructor.check_data():
# yield self._yaml.constructor.get_data()
# finally:
# parser.dispose()
# try:
# self._reader.reset_reader() # type: ignore
# except AttributeError:
# pass
# try:
# self._scanner.reset_scanner() # type: ignore
# except AttributeError:
# pass
def yaml_object(yml: Any) -> Any:
"""decorator for classes that needs to dump/load objects
The tag for such objects is taken from the class attribute yaml_tag (or the
class name in lowercase in case unavailable)
If methods to_yaml and/or from_yaml are available, these are called for dumping resp.
loading, default routines (dumping a mapping of the attributes) used otherwise.
"""
def yo_deco(cls: Any) -> Any:
tag = getattr(cls, 'yaml_tag', '!' + cls.__name__)
try:
yml.representer.add_representer(cls, cls.to_yaml)
except AttributeError:
def t_y(representer: Any, data: Any) -> Any:
return representer.represent_yaml_object(
tag,
data,
cls,
flow_style=representer.default_flow_style,
)
yml.representer.add_representer(cls, t_y)
try:
yml.constructor.add_constructor(tag, cls.from_yaml)
except AttributeError:
def f_y(constructor: Any, node: Any) -> Any:
return constructor.construct_yaml_object(node, cls)
yml.constructor.add_constructor(tag, f_y)
return cls
return yo_deco
########################################################################################
def warn_deprecation(fun: Any, method: Any, arg: str = '') -> None:
warnings.warn(
f'\n{fun} will be removed, use\n\n yaml=YAML({arg})\n yaml.{method}(...)\n\ninstead', # NOQA
PendingDeprecationWarning, # this will show when testing with pytest/tox
stacklevel=3,
)
def error_deprecation(
fun: Any, method: Any, arg: str = '', comment: str = 'instead of'
) -> None: # NOQA
import inspect
s = f'\n"{fun}()" has been removed, use\n\n yaml = YAML({arg})\n yaml.{method}(...)\n\n{comment}' # NOQA
try:
info = inspect.getframeinfo(inspect.stack()[2][0])
context = '' if info.code_context is None else "".join(info.code_context)
s += f' file "{info.filename}", line {info.lineno}\n\n{context}'
except Exception as e:
_ = e
s += '\n'
if sys.version_info < (3, 10):
raise AttributeError(s)
else:
raise AttributeError(s, name=None)
_error_dep_arg = "typ='rt'"
_error_dep_comment = "and register any classes that you use, or check the tag attribute on the loaded data,\ninstead of" # NOQA
########################################################################################
def scan(stream: StreamTextType, Loader: Any = Loader) -> Any:
"""
Scan a YAML stream and produce scanning tokens.
"""
error_deprecation('scan', 'scan', arg=_error_dep_arg, comment=_error_dep_comment)
def parse(stream: StreamTextType, Loader: Any = Loader) -> Any:
"""
Parse a YAML stream and produce parsing events.
"""
error_deprecation('parse', 'parse', arg=_error_dep_arg, comment=_error_dep_comment)
def compose(stream: StreamTextType, Loader: Any = Loader) -> Any:
"""
Parse the first YAML document in a stream
and produce the corresponding representation tree.
"""
error_deprecation(
'compose', 'compose', arg=_error_dep_arg, comment=_error_dep_comment
)
def compose_all(stream: StreamTextType, Loader: Any = Loader) -> Any:
"""
Parse all YAML documents in a stream
and produce corresponding representation trees.
"""
error_deprecation(
'compose', 'compose', arg=_error_dep_arg, comment=_error_dep_comment
)
def load(
stream: Any,
Loader: Any = None,
version: Any = None,
preserve_quotes: Any = None,
) -> Any:
"""
Parse the first YAML document in a stream
and produce the corresponding Python object.
"""
error_deprecation('load', 'load', arg=_error_dep_arg, comment=_error_dep_comment)
def load_all(
stream: Any,
Loader: Any = None,
version: Any = None,
preserve_quotes: Any = None,
) -> Any:
# NOQA
"""
Parse all YAML documents in a stream
and produce corresponding Python objects.
"""
error_deprecation(
'load_all', 'load_all', arg=_error_dep_arg, comment=_error_dep_comment
)
def safe_load(stream: StreamTextType, version: Optional[VersionType] = None) -> Any:
"""
Parse the first YAML document in a stream
and produce the corresponding Python object.
Resolve only basic YAML tags.
"""
error_deprecation('safe_load', 'load', arg="typ='safe', pure=True")
def safe_load_all(stream: StreamTextType, version: Optional[VersionType] = None) -> Any:
"""
Parse all YAML documents in a stream
and produce corresponding Python objects.
Resolve only basic YAML tags.
"""
error_deprecation('safe_load_all', 'load_all', arg="typ='safe', pure=True")
def round_trip_load(
stream: StreamTextType,
version: Optional[VersionType] = None,
preserve_quotes: Optional[bool] = None,
) -> Any:
"""
Parse the first YAML document in a stream
and produce the corresponding Python object.
Resolve only basic YAML tags.
"""
error_deprecation('round_trip_load_all', 'load')
def round_trip_load_all(
stream: StreamTextType,
version: Optional[VersionType] = None,
preserve_quotes: Optional[bool] = None,
) -> Any:
"""
Parse all YAML documents in a stream
and produce corresponding Python objects.
Resolve only basic YAML tags.
"""
error_deprecation('round_trip_load_all', 'load_all')
def emit(
events: Any,
stream: Optional[StreamType] = None,
Dumper: Any = Dumper,
canonical: Optional[bool] = None,
indent: Union[int, None] = None,
width: Optional[int] = None,
allow_unicode: Optional[bool] = None,
line_break: Any = None,
) -> Any:
# NOQA
"""
Emit YAML parsing events into a stream.
If stream is None, return the produced string instead.
"""
error_deprecation('emit', 'emit', arg="typ='safe', pure=True")
enc = None
def serialize_all(
nodes: Any,
stream: Optional[StreamType] = None,
Dumper: Any = Dumper,
canonical: Any = None,
indent: Optional[int] = None,
width: Optional[int] = None,
allow_unicode: Optional[bool] = None,
line_break: Any = None,
encoding: Any = enc,
explicit_start: Optional[bool] = None,
explicit_end: Optional[bool] = None,
version: Optional[VersionType] = None,
tags: Any = None,
) -> Any:
# NOQA
"""
Serialize a sequence of representation trees into a YAML stream.
If stream is None, return the produced string instead.
"""
error_deprecation('serialize_all', 'serialize_all', arg="typ='safe', pure=True")
def serialize(
node: Any,
stream: Optional[StreamType] = None,
Dumper: Any = Dumper,
**kwds: Any,
) -> Any:
"""
Serialize a representation tree into a YAML stream.
If stream is None, return the produced string instead.
"""
error_deprecation('serialize', 'serialize', arg="typ='safe', pure=True")
def dump_all(
documents: Any,
stream: Optional[StreamType] = None,
Dumper: Any = Dumper,
default_style: Any = None,
default_flow_style: Any = None,
canonical: Optional[bool] = None,
indent: Optional[int] = None,
width: Optional[int] = None,
allow_unicode: Optional[bool] = None,
line_break: Any = None,
encoding: Any = enc,
explicit_start: Optional[bool] = None,
explicit_end: Optional[bool] = None,
version: Any = None,
tags: Any = None,
block_seq_indent: Any = None,
top_level_colon_align: Any = None,
prefix_colon: Any = None,
) -> Any:
# NOQA
"""
Serialize a sequence of Python objects into a YAML stream.
If stream is None, return the produced string instead.
"""
error_deprecation('dump_all', 'dump_all', arg="typ='unsafe', pure=True")
def dump(
data: Any,
stream: Optional[StreamType] = None,
Dumper: Any = Dumper,
default_style: Any = None,
default_flow_style: Any = None,
canonical: Optional[bool] = None,
indent: Optional[int] = None,
width: Optional[int] = None,
allow_unicode: Optional[bool] = None,
line_break: Any = None,
encoding: Any = enc,
explicit_start: Optional[bool] = None,
explicit_end: Optional[bool] = None,
version: Optional[VersionType] = None,
tags: Any = None,
block_seq_indent: Any = None,
) -> Any:
# NOQA
"""
Serialize a Python object into a YAML stream.
If stream is None, return the produced string instead.
default_style ∈ None, '', '"', "'", '|', '>'
"""
error_deprecation('dump', 'dump', arg="typ='unsafe', pure=True")
def safe_dump(data: Any, stream: Optional[StreamType] = None, **kwds: Any) -> Any:
"""
Serialize a Python object into a YAML stream.
Produce only basic YAML tags.
If stream is None, return the produced string instead.
"""
error_deprecation('safe_dump', 'dump', arg="typ='safe', pure=True")
def round_trip_dump(
data: Any,
stream: Optional[StreamType] = None,
Dumper: Any = RoundTripDumper,
default_style: Any = None,
default_flow_style: Any = None,
canonical: Optional[bool] = None,
indent: Optional[int] = None,
width: Optional[int] = None,
allow_unicode: Optional[bool] = None,
line_break: Any = None,
encoding: Any = enc,
explicit_start: Optional[bool] = None,
explicit_end: Optional[bool] = None,
version: Optional[VersionType] = None,
tags: Any = None,
block_seq_indent: Any = None,
top_level_colon_align: Any = None,
prefix_colon: Any = None,
) -> Any:
allow_unicode = True if allow_unicode is None else allow_unicode
error_deprecation('round_trip_dump', 'dump')
# Loader/Dumper are no longer composites, to get to the associated
# Resolver()/Representer(), etc., you need to instantiate the class
def add_implicit_resolver(
tag: Any,
regexp: Any,
first: Any = None,
Loader: Any = None,
Dumper: Any = None,
resolver: Any = Resolver,
) -> None:
"""
Add an implicit scalar detector.
If an implicit scalar value matches the given regexp,
the corresponding tag is assigned to the scalar.
first is a sequence of possible initial characters or None.
"""
if Loader is None and Dumper is None:
resolver.add_implicit_resolver(tag, regexp, first)
return
if Loader:
if hasattr(Loader, 'add_implicit_resolver'):
Loader.add_implicit_resolver(tag, regexp, first)
elif issubclass(
Loader,
(BaseLoader, SafeLoader, ruyaml.loader.Loader, RoundTripLoader),
):
Resolver.add_implicit_resolver(tag, regexp, first)
else:
raise NotImplementedError
if Dumper:
if hasattr(Dumper, 'add_implicit_resolver'):
Dumper.add_implicit_resolver(tag, regexp, first)
elif issubclass(
Dumper,
(BaseDumper, SafeDumper, ruyaml.dumper.Dumper, RoundTripDumper),
):
Resolver.add_implicit_resolver(tag, regexp, first)
else:
raise NotImplementedError
# this code currently not tested
def add_path_resolver(
tag: Any,
path: Any,
kind: Any = None,
Loader: Any = None,
Dumper: Any = None,
resolver: Any = Resolver,
) -> None:
"""
Add a path based resolver for the given tag.
A path is a list of keys that forms a path
to a node in the representation tree.
Keys can be string values, integers, or None.
"""
if Loader is None and Dumper is None:
resolver.add_path_resolver(tag, path, kind)
return
if Loader:
if hasattr(Loader, 'add_path_resolver'):
Loader.add_path_resolver(tag, path, kind)
elif issubclass(
Loader,
(BaseLoader, SafeLoader, ruyaml.loader.Loader, RoundTripLoader),
):
Resolver.add_path_resolver(tag, path, kind)
else:
raise NotImplementedError
if Dumper:
if hasattr(Dumper, 'add_path_resolver'):
Dumper.add_path_resolver(tag, path, kind)
elif issubclass(
Dumper,
(BaseDumper, SafeDumper, ruyaml.dumper.Dumper, RoundTripDumper),
):
Resolver.add_path_resolver(tag, path, kind)
else:
raise NotImplementedError
def add_constructor(
tag: Any,
object_constructor: Any,
Loader: Any = None,
constructor: Any = Constructor,
) -> None:
"""
Add an object constructor for the given tag.
object_onstructor is a function that accepts a Loader instance
and a node object and produces the corresponding Python object.
"""
if Loader is None:
constructor.add_constructor(tag, object_constructor)
else:
if hasattr(Loader, 'add_constructor'):
Loader.add_constructor(tag, object_constructor)
return
if issubclass(Loader, BaseLoader):
BaseConstructor.add_constructor(tag, object_constructor)
elif issubclass(Loader, SafeLoader):
SafeConstructor.add_constructor(tag, object_constructor)
elif issubclass(Loader, Loader):
Constructor.add_constructor(tag, object_constructor)
elif issubclass(Loader, RoundTripLoader):
RoundTripConstructor.add_constructor(tag, object_constructor)
else:
raise NotImplementedError
def add_multi_constructor(
tag_prefix: Any,
multi_constructor: Any,
Loader: Any = None,
constructor: Any = Constructor, # NOQA
) -> None:
"""
Add a multi-constructor for the given tag prefix.
Multi-constructor is called for a node if its tag starts with tag_prefix.
Multi-constructor accepts a Loader instance, a tag suffix,
and a node object and produces the corresponding Python object.
"""
if Loader is None:
constructor.add_multi_constructor(tag_prefix, multi_constructor)
else:
if False and hasattr(Loader, 'add_multi_constructor'):
Loader.add_multi_constructor(tag_prefix, constructor)
return
if issubclass(Loader, BaseLoader):
BaseConstructor.add_multi_constructor(tag_prefix, multi_constructor)
elif issubclass(Loader, SafeLoader):
SafeConstructor.add_multi_constructor(tag_prefix, multi_constructor)
elif issubclass(Loader, ruyaml.loader.Loader):
Constructor.add_multi_constructor(tag_prefix, multi_constructor)
elif issubclass(Loader, RoundTripLoader):
RoundTripConstructor.add_multi_constructor(tag_prefix, multi_constructor)
else:
raise NotImplementedError
def add_representer(
data_type: Any,
object_representer: Any,
Dumper: Any = None,
representer: Any = Representer, # NOQA
) -> None:
"""
Add a representer for the given type.
object_representer is a function accepting a Dumper instance
and an instance of the given data type
and producing the corresponding representation node.
"""
if Dumper is None:
representer.add_representer(data_type, object_representer)
else:
if hasattr(Dumper, 'add_representer'):
Dumper.add_representer(data_type, object_representer)
return
if issubclass(Dumper, BaseDumper):
BaseRepresenter.add_representer(data_type, object_representer)
elif issubclass(Dumper, SafeDumper):
SafeRepresenter.add_representer(data_type, object_representer)
elif issubclass(Dumper, Dumper):
Representer.add_representer(data_type, object_representer)
elif issubclass(Dumper, RoundTripDumper):
RoundTripRepresenter.add_representer(data_type, object_representer)
else:
raise NotImplementedError
# this code currently not tested
def add_multi_representer(
data_type: Any,
multi_representer: Any,
Dumper: Any = None,
representer: Any = Representer,
) -> None:
"""
Add a representer for the given type.
multi_representer is a function accepting a Dumper instance
and an instance of the given data type or subtype
and producing the corresponding representation node.
"""
if Dumper is None:
representer.add_multi_representer(data_type, multi_representer)
else:
if hasattr(Dumper, 'add_multi_representer'):
Dumper.add_multi_representer(data_type, multi_representer)
return
if issubclass(Dumper, BaseDumper):
BaseRepresenter.add_multi_representer(data_type, multi_representer)
elif issubclass(Dumper, SafeDumper):
SafeRepresenter.add_multi_representer(data_type, multi_representer)
elif issubclass(Dumper, Dumper):
Representer.add_multi_representer(data_type, multi_representer)
elif issubclass(Dumper, RoundTripDumper):
RoundTripRepresenter.add_multi_representer(data_type, multi_representer)
else:
raise NotImplementedError
class YAMLObjectMetaclass(type):
"""
The metaclass for YAMLObject.
"""
def __init__(cls, name: Any, bases: Any, kwds: Any) -> None:
super().__init__(name, bases, kwds)
if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None:
cls.yaml_constructor.add_constructor(cls.yaml_tag, cls.from_yaml) # type: ignore
cls.yaml_representer.add_representer(cls, cls.to_yaml) # type: ignore
class YAMLObject(metaclass=YAMLObjectMetaclass): # type: ignore
"""
An object that can dump itself to a YAML stream
and load itself from a YAML stream.
"""
__slots__ = () # no direct instantiation, so allow immutable subclasses
yaml_constructor = Constructor
yaml_representer = Representer
yaml_tag: Any = None
yaml_flow_style: Any = None
@classmethod
def from_yaml(cls, constructor: Any, node: Any) -> Any:
"""
Convert a representation node to a Python object.
"""
return constructor.construct_yaml_object(node, cls)
@classmethod
def to_yaml(cls, representer: Any, data: Any) -> Any:
"""
Convert a Python object to a representation node.
"""
return representer.represent_yaml_object(
cls.yaml_tag,
data,
cls,
flow_style=cls.yaml_flow_style,
)
python-ruyaml-0.92.1/lib/ruyaml/mergevalue.py 0000664 0000000 0000000 00000001727 15056754172 0021243 0 ustar 00root root 0000000 0000000 from __future__ import annotations
if False: # MYPY
from typing import Any, Dict, Iterator, List, Optional, Union # NOQA
merge_attrib = '_yaml_merge'
class MergeValue:
attrib = merge_attrib
def __init__(self) -> None:
self.value: List[Any] = []
self.sequence = None
self.merge_pos: Optional[int] = None # position of merge in the mapping
def __getitem__(self, index: Any) -> Any:
return self.value[index]
def __setitem__(self, index: Any, val: Any) -> None:
self.value[index] = val
def __repr__(self) -> Any:
return f'MergeValue({self.value!r})'
def __len__(self) -> Any:
return len(self.value)
def append(self, elem: Any) -> Any:
self.value.append(elem)
def extend(self, elements: Any) -> None:
self.value.extend(elements)
def set_sequence(self, seq: Any) -> None:
# print('mergevalue.set_sequence node', node.anchor)
self.sequence = seq
python-ruyaml-0.92.1/lib/ruyaml/nodes.py 0000664 0000000 0000000 00000010203 15056754172 0020204 0 ustar 00root root 0000000 0000000 from __future__ import annotations
import sys
if False: # MYPY
from typing import Dict, Any, Text, Optional # NOQA
from ruyaml.tag import Tag
class Node:
__slots__ = 'ctag', 'value', 'start_mark', 'end_mark', 'comment', 'anchor'
def __init__(
self,
tag: Any,
value: Any,
start_mark: Any,
end_mark: Any,
comment: Any = None,
anchor: Any = None,
) -> None:
# you can still get a string from the serializer
self.ctag = tag if isinstance(tag, Tag) else Tag(suffix=tag)
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
self.comment = comment
self.anchor = anchor
@property
def tag(self) -> Optional[str]:
return None if self.ctag is None else str(self.ctag)
@tag.setter
def tag(self, val: Any) -> None:
if isinstance(val, str):
val = Tag(suffix=val)
self.ctag = val
def __repr__(self) -> Any:
value = self.value
# if isinstance(value, list):
# if len(value) == 0:
# value = ''
# elif len(value) == 1:
# value = '<1 item>'
# else:
# value = f'<{len(value)} items>'
# else:
# if len(value) > 75:
# value = repr(value[:70]+' ... ')
# else:
# value = repr(value)
value = repr(value)
if self.anchor is not None:
return f'{self.__class__.__name__!s}(tag={self.tag!r}, anchor={self.anchor!r}, value={value!s})' # NOQA
return f'{self.__class__.__name__!s}(tag={self.tag!r}, value={value!s})'
def dump(self, indent: int = 0) -> None:
xx = self.__class__.__name__
xi = ' ' * indent
if isinstance(self.value, str):
sys.stdout.write(f'{xi}{xx}(tag={self.tag!r}, value={self.value!r})\n')
if self.comment:
sys.stdout.write(f' {xi}comment: {self.comment})\n')
return
sys.stdout.write(f'{xi}{xx}(tag={self.tag!r})\n')
if self.comment:
sys.stdout.write(f' {xi}comment: {self.comment})\n')
for v in self.value:
if isinstance(v, tuple):
for v1 in v:
v1.dump(indent + 1)
elif isinstance(v, Node):
v.dump(indent + 1)
else:
sys.stdout.write(f'Node value type? {type(v)}\n')
class ScalarNode(Node):
"""
styles:
? -> set() ? key, no value
- -> suppressable null value in set
" -> double quoted
' -> single quoted
| -> literal style
> -> folding style
"""
__slots__ = ('style',)
id = 'scalar'
def __init__(
self,
tag: Any,
value: Any,
start_mark: Any = None,
end_mark: Any = None,
style: Any = None,
comment: Any = None,
anchor: Any = None,
) -> None:
Node.__init__(
self, tag, value, start_mark, end_mark, comment=comment, anchor=anchor
)
self.style = style
class CollectionNode(Node):
__slots__ = ('flow_style',)
def __init__(
self,
tag: Any,
value: Any,
start_mark: Any = None,
end_mark: Any = None,
flow_style: Any = None,
comment: Any = None,
anchor: Any = None,
) -> None:
Node.__init__(self, tag, value, start_mark, end_mark, comment=comment)
self.flow_style = flow_style
self.anchor = anchor
class SequenceNode(CollectionNode):
__slots__ = ()
id = 'sequence'
class MappingNode(CollectionNode):
__slots__ = ('merge',)
id = 'mapping'
def __init__(
self,
tag: Any,
value: Any,
start_mark: Any = None,
end_mark: Any = None,
flow_style: Any = None,
comment: Any = None,
anchor: Any = None,
) -> None:
CollectionNode.__init__(
self,
tag,
value,
start_mark,
end_mark,
flow_style,
comment,
anchor,
)
self.merge = None
python-ruyaml-0.92.1/lib/ruyaml/parser.py 0000664 0000000 0000000 00000111055 15056754172 0020377 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from ruyaml.comments import C_POST, C_PRE, C_SPLIT_ON_FIRST_BLANK
from ruyaml.compat import nprint, nprintf # NOQA
from ruyaml.error import MarkedYAMLError
from ruyaml.events import * # NOQA
from ruyaml.scanner import ( # NOQA
BlankLineComment,
RoundTripScanner,
Scanner,
ScannerError,
)
from ruyaml.tag import Tag
from ruyaml.tokens import * # NOQA
# The following YAML grammar is LL(1) and is parsed by a recursive descent
# parser.
#
# stream ::= STREAM-START implicit_document? explicit_document*
# STREAM-END
# implicit_document ::= block_node DOCUMENT-END*
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
# block_node_or_indentless_sequence ::=
# ALIAS
# | properties (block_content |
# indentless_block_sequence)?
# | block_content
# | indentless_block_sequence
# block_node ::= ALIAS
# | properties block_content?
# | block_content
# flow_node ::= ALIAS
# | properties flow_content?
# | flow_content
# properties ::= TAG ANCHOR? | ANCHOR TAG?
# block_content ::= block_collection | flow_collection | SCALAR
# flow_content ::= flow_collection | SCALAR
# block_collection ::= block_sequence | block_mapping
# flow_collection ::= flow_sequence | flow_mapping
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)*
# BLOCK-END
# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
# block_mapping ::= BLOCK-MAPPING_START
# ((KEY block_node_or_indentless_sequence?)?
# (VALUE block_node_or_indentless_sequence?)?)*
# BLOCK-END
# flow_sequence ::= FLOW-SEQUENCE-START
# (flow_sequence_entry FLOW-ENTRY)*
# flow_sequence_entry?
# FLOW-SEQUENCE-END
# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
# flow_mapping ::= FLOW-MAPPING-START
# (flow_mapping_entry FLOW-ENTRY)*
# flow_mapping_entry?
# FLOW-MAPPING-END
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
#
# FIRST sets:
#
# stream: { STREAM-START <}
# explicit_document: { DIRECTIVE DOCUMENT-START }
# implicit_document: FIRST(block_node)
# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START
# BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START
# FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
# block_sequence: { BLOCK-SEQUENCE-START }
# block_mapping: { BLOCK-MAPPING-START }
# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR
# BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START
# FLOW-MAPPING-START BLOCK-ENTRY }
# indentless_sequence: { ENTRY }
# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
# flow_sequence: { FLOW-SEQUENCE-START }
# flow_mapping: { FLOW-MAPPING-START }
# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START
# FLOW-MAPPING-START KEY }
# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START
# FLOW-MAPPING-START KEY }
# need to have full path with import, as pkg_resources tries to load parser.py in __init__.py
# only to not do anything with the package afterwards
# and for Jython too
if False: # MYPY
from typing import Any, Dict, List, Optional # NOQA
__all__ = ['Parser', 'RoundTripParser', 'ParserError']
def xprintf(*args: Any, **kw: Any) -> Any:
return nprintf(*args, **kw)
pass
class ParserError(MarkedYAMLError):
pass
class Parser:
# Since writing a recursive-descendant parser is a straightforward task, we
# do not give many comments here.
DEFAULT_TAGS = {'!': '!', '!!': 'tag:yaml.org,2002:'}
def __init__(self, loader: Any) -> None:
self.loader = loader
if self.loader is not None and getattr(self.loader, '_parser', None) is None:
self.loader._parser = self
self.reset_parser()
def reset_parser(self) -> None:
# Reset the state attributes (to clear self-references)
self.current_event = self.last_event = None
self.tag_handles: Dict[Any, Any] = {}
self.states: List[Any] = []
self.marks: List[Any] = []
self.state: Any = self.parse_stream_start
def dispose(self) -> None:
self.reset_parser()
@property
def scanner(self) -> Any:
if hasattr(self.loader, 'typ'):
return self.loader.scanner
return self.loader._scanner
@property
def resolver(self) -> Any:
if hasattr(self.loader, 'typ'):
return self.loader.resolver
return self.loader._resolver
def check_event(self, *choices: Any) -> bool:
# Check the type of the next event.
if self.current_event is None:
if self.state:
self.current_event = self.state()
if self.current_event is not None:
if not choices:
return True
for choice in choices:
if isinstance(self.current_event, choice):
return True
return False
def peek_event(self) -> Any:
# Get the next event.
if self.current_event is None:
if self.state:
self.current_event = self.state()
return self.current_event
def get_event(self) -> Any:
# Get the next event and proceed further.
if self.current_event is None:
if self.state:
self.current_event = self.state()
# assert self.current_event is not None
# if self.current_event.end_mark.line != self.peek_event().start_mark.line:
# xprintf('get_event', repr(self.current_event), self.peek_event().start_mark.line)
self.last_event = value = self.current_event
self.current_event = None
return value
# stream ::= STREAM-START implicit_document? explicit_document*
# STREAM-END
# implicit_document ::= block_node DOCUMENT-END*
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
def parse_stream_start(self) -> Any:
# Parse the stream start.
token = self.scanner.get_token()
self.move_token_comment(token)
event = StreamStartEvent(
token.start_mark, token.end_mark, encoding=token.encoding
)
# Prepare the next state.
self.state = self.parse_implicit_document_start
return event
def parse_implicit_document_start(self) -> Any:
# Parse an implicit document.
if not self.scanner.check_token(
DirectiveToken, DocumentStartToken, StreamEndToken
):
# don't need copy, as an implicit tag doesn't add tag_handles
self.tag_handles = self.DEFAULT_TAGS
token = self.scanner.peek_token()
start_mark = end_mark = token.start_mark
event = DocumentStartEvent(start_mark, end_mark, explicit=False)
# Prepare the next state.
self.states.append(self.parse_document_end)
self.state = self.parse_block_node
return event
else:
return self.parse_document_start()
def parse_document_start(self) -> Any:
# Parse any extra document end indicators.
while self.scanner.check_token(DocumentEndToken):
self.scanner.get_token()
# Parse an explicit document.
if not self.scanner.check_token(StreamEndToken):
version, tags = self.process_directives()
if not self.scanner.check_token(DocumentStartToken):
raise ParserError(
None,
None,
"expected '', "
f'but found {self.scanner.peek_token().id !r}',
self.scanner.peek_token().start_mark,
)
token = self.scanner.get_token()
start_mark = token.start_mark
end_mark = token.end_mark
# if self.loader is not None and \
# end_mark.line != self.scanner.peek_token().start_mark.line:
# self.loader.scalar_after_indicator = False
event: Any = DocumentStartEvent(
start_mark,
end_mark,
explicit=True,
version=version,
tags=tags,
comment=token.comment,
)
self.states.append(self.parse_document_end)
self.state = self.parse_document_content
else:
# Parse the end of the stream.
token = self.scanner.get_token()
event = StreamEndEvent(
token.start_mark, token.end_mark, comment=token.comment
)
assert not self.states
assert not self.marks
self.state = None
return event
def parse_document_end(self) -> Any:
# Parse the document end.
token = self.scanner.peek_token()
start_mark = end_mark = token.start_mark
explicit = False
if self.scanner.check_token(DocumentEndToken):
token = self.scanner.get_token()
# if token.end_mark.line != self.peek_event().start_mark.line:
pt = self.scanner.peek_token()
if not isinstance(pt, StreamEndToken) and (
token.end_mark.line == pt.start_mark.line
):
raise ParserError(
None,
None,
'found non-comment content after document end marker, '
f'{self.scanner.peek_token().id !r}',
self.scanner.peek_token().start_mark,
)
end_mark = token.end_mark
explicit = True
event = DocumentEndEvent(start_mark, end_mark, explicit=explicit)
# Prepare the next state.
if self.resolver.processing_version == (1, 1):
self.state = self.parse_document_start
else:
if explicit:
# found a document end marker, can be followed by implicit document
self.state = self.parse_implicit_document_start
else:
self.state = self.parse_document_start
return event
def parse_document_content(self) -> Any:
if self.scanner.check_token(
DirectiveToken,
DocumentStartToken,
DocumentEndToken,
StreamEndToken,
):
event = self.process_empty_scalar(self.scanner.peek_token().start_mark)
self.state = self.states.pop()
return event
else:
return self.parse_block_node()
def process_directives(self) -> Any:
yaml_version = None
self.tag_handles = {}
while self.scanner.check_token(DirectiveToken):
token = self.scanner.get_token()
if token.name == 'YAML':
if yaml_version is not None:
raise ParserError(
None,
None,
'found duplicate YAML directive',
token.start_mark,
)
major, minor = token.value
if major != 1:
raise ParserError(
None,
None,
'found incompatible YAML document (version 1.* is required)',
token.start_mark,
)
yaml_version = token.value
elif token.name == 'TAG':
handle, prefix = token.value
if handle in self.tag_handles:
raise ParserError(
None,
None,
f'duplicate tag handle {handle!r}',
token.start_mark,
)
self.tag_handles[handle] = prefix
if bool(self.tag_handles):
value: Any = (yaml_version, self.tag_handles.copy())
else:
value = yaml_version, None
if self.loader is not None and hasattr(self.loader, 'tags'):
# ToDo: this is used to keep a single loaded file from losing its version
# info, but it affects following versions that have no explicit directive
self.loader.version = yaml_version
if self.loader.tags is None:
self.loader.tags = {}
for k in self.tag_handles:
self.loader.tags[k] = self.tag_handles[k]
self.loader.doc_infos[-1].tags.append((k, self.tag_handles[k]))
for key in self.DEFAULT_TAGS:
if key not in self.tag_handles:
self.tag_handles[key] = self.DEFAULT_TAGS[key]
return value
# block_node_or_indentless_sequence ::= ALIAS
# | properties (block_content | indentless_block_sequence)?
# | block_content
# | indentless_block_sequence
# block_node ::= ALIAS
# | properties block_content?
# | block_content
# flow_node ::= ALIAS
# | properties flow_content?
# | flow_content
# properties ::= TAG ANCHOR? | ANCHOR TAG?
# block_content ::= block_collection | flow_collection | SCALAR
# flow_content ::= flow_collection | SCALAR
# block_collection ::= block_sequence | block_mapping
# flow_collection ::= flow_sequence | flow_mapping
def parse_block_node(self) -> Any:
return self.parse_node(block=True)
def parse_flow_node(self) -> Any:
return self.parse_node()
def parse_block_node_or_indentless_sequence(self) -> Any:
return self.parse_node(block=True, indentless_sequence=True)
# def transform_tag(self, handle: Any, suffix: Any) -> Any:
# return self.tag_handles[handle] + suffix
def select_tag_transform(self, tag: Tag) -> None:
if tag is None:
return
tag.select_transform(False)
def parse_node(self, block: bool = False, indentless_sequence: bool = False) -> Any:
if self.scanner.check_token(AliasToken):
token = self.scanner.get_token()
event: Any = AliasEvent(token.value, token.start_mark, token.end_mark)
self.state = self.states.pop()
return event
anchor = None
tag = None
start_mark = end_mark = tag_mark = None
if self.scanner.check_token(AnchorToken):
token = self.scanner.get_token()
self.move_token_comment(token)
start_mark = token.start_mark
end_mark = token.end_mark
anchor = token.value
if self.scanner.check_token(TagToken):
token = self.scanner.get_token()
tag_mark = token.start_mark
end_mark = token.end_mark
# tag = token.value
tag = Tag(
handle=token.value[0],
suffix=token.value[1],
handles=self.tag_handles,
)
elif self.scanner.check_token(TagToken):
token = self.scanner.get_token()
try:
self.move_token_comment(token)
except NotImplementedError:
pass
start_mark = tag_mark = token.start_mark
end_mark = token.end_mark
# tag = token.value
tag = Tag(
handle=token.value[0], suffix=token.value[1], handles=self.tag_handles
)
if self.scanner.check_token(AnchorToken):
token = self.scanner.get_token()
start_mark = tag_mark = token.start_mark
end_mark = token.end_mark
anchor = token.value
if tag is not None:
self.select_tag_transform(tag)
if tag.check_handle():
raise ParserError(
'while parsing a node',
start_mark,
f'found undefined tag handle {tag.handle!r}',
tag_mark,
)
if start_mark is None:
start_mark = end_mark = self.scanner.peek_token().start_mark
event = None
implicit = tag is None or str(tag) == '!'
if indentless_sequence and self.scanner.check_token(BlockEntryToken):
comment = None
pt = self.scanner.peek_token()
if self.loader and self.loader.comment_handling is None:
if pt.comment and pt.comment[0]:
comment = [pt.comment[0], []]
pt.comment[0] = None
elif pt.comment and pt.comment[0] is None and pt.comment[1]:
comment = [None, pt.comment[1]]
pt.comment[1] = None
elif self.loader:
if pt.comment:
comment = pt.comment
end_mark = self.scanner.peek_token().end_mark
event = SequenceStartEvent(
anchor,
tag,
implicit,
start_mark,
end_mark,
flow_style=False,
comment=comment,
)
self.state = self.parse_indentless_sequence_entry
return event
if self.scanner.check_token(ScalarToken):
token = self.scanner.get_token()
# self.scanner.peek_token_same_line_comment(token)
end_mark = token.end_mark
if (token.plain and tag is None) or str(tag) == '!':
dimplicit = (True, False)
elif tag is None:
dimplicit = (False, True)
else:
dimplicit = (False, False)
event = ScalarEvent(
anchor,
tag,
dimplicit,
token.value,
start_mark,
end_mark,
style=token.style,
comment=token.comment,
)
self.state = self.states.pop()
elif self.scanner.check_token(FlowSequenceStartToken):
pt = self.scanner.peek_token()
end_mark = pt.end_mark
event = SequenceStartEvent(
anchor,
tag,
implicit,
start_mark,
end_mark,
flow_style=True,
comment=pt.comment,
)
self.state = self.parse_flow_sequence_first_entry
elif self.scanner.check_token(FlowMappingStartToken):
pt = self.scanner.peek_token()
end_mark = pt.end_mark
event = MappingStartEvent(
anchor,
tag,
implicit,
start_mark,
end_mark,
flow_style=True,
comment=pt.comment,
)
self.state = self.parse_flow_mapping_first_key
elif block and self.scanner.check_token(BlockSequenceStartToken):
end_mark = self.scanner.peek_token().start_mark
# should inserting the comment be dependent on the
# indentation?
pt = self.scanner.peek_token()
comment = pt.comment
# nprint('pt0', type(pt))
if comment is None or comment[1] is None:
comment = pt.split_old_comment()
# nprint('pt1', comment)
event = SequenceStartEvent(
anchor,
tag,
implicit,
start_mark,
end_mark,
flow_style=False,
comment=comment,
)
self.state = self.parse_block_sequence_first_entry
elif block and self.scanner.check_token(BlockMappingStartToken):
end_mark = self.scanner.peek_token().start_mark
comment = self.scanner.peek_token().comment
event = MappingStartEvent(
anchor,
tag,
implicit,
start_mark,
end_mark,
flow_style=False,
comment=comment,
)
self.state = self.parse_block_mapping_first_key
elif anchor is not None or tag is not None:
# Empty scalars are allowed even if a tag or an anchor is
# specified.
event = ScalarEvent(
anchor, tag, (implicit, False), "", start_mark, end_mark
)
self.state = self.states.pop()
else:
if block:
node = 'block'
else:
node = 'flow'
token = self.scanner.peek_token()
raise ParserError(
f'while parsing a {node!s} node',
start_mark,
f'expected the node content, but found {token.id!r}',
token.start_mark,
)
return event
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)*
# BLOCK-END
def parse_block_sequence_first_entry(self) -> Any:
token = self.scanner.get_token()
# move any comment from start token
# self.move_token_comment(token)
self.marks.append(token.start_mark)
return self.parse_block_sequence_entry()
def parse_block_sequence_entry(self) -> Any:
if self.scanner.check_token(BlockEntryToken):
token = self.scanner.get_token()
self.move_token_comment(token)
if not self.scanner.check_token(BlockEntryToken, BlockEndToken):
self.states.append(self.parse_block_sequence_entry)
return self.parse_block_node()
else:
self.state = self.parse_block_sequence_entry
return self.process_empty_scalar(token.end_mark)
if not self.scanner.check_token(BlockEndToken):
token = self.scanner.peek_token()
raise ParserError(
'while parsing a block collection',
self.marks[-1],
f'expected , but found {token.id!r}',
token.start_mark,
)
token = self.scanner.get_token() # BlockEndToken
event = SequenceEndEvent(
token.start_mark, token.end_mark, comment=token.comment
)
self.state = self.states.pop()
self.marks.pop()
return event
# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
# indentless_sequence?
# sequence:
# - entry
# - nested
def parse_indentless_sequence_entry(self) -> Any:
if self.scanner.check_token(BlockEntryToken):
token = self.scanner.get_token()
self.move_token_comment(token)
if not self.scanner.check_token(
BlockEntryToken,
KeyToken,
ValueToken,
BlockEndToken,
):
self.states.append(self.parse_indentless_sequence_entry)
return self.parse_block_node()
else:
self.state = self.parse_indentless_sequence_entry
return self.process_empty_scalar(token.end_mark)
token = self.scanner.peek_token()
c = None
if self.loader and self.loader.comment_handling is None:
c = token.comment
start_mark = token.start_mark
else:
start_mark = self.last_event.end_mark # type: ignore
c = self.distribute_comment(token.comment, start_mark.line) # type: ignore
event = SequenceEndEvent(start_mark, start_mark, comment=c)
self.state = self.states.pop()
return event
# block_mapping ::= BLOCK-MAPPING_START
# ((KEY block_node_or_indentless_sequence?)?
# (VALUE block_node_or_indentless_sequence?)?)*
# BLOCK-END
def parse_block_mapping_first_key(self) -> Any:
token = self.scanner.get_token()
self.marks.append(token.start_mark)
return self.parse_block_mapping_key()
def parse_block_mapping_key(self) -> Any:
if self.scanner.check_token(KeyToken):
token = self.scanner.get_token()
self.move_token_comment(token)
if not self.scanner.check_token(KeyToken, ValueToken, BlockEndToken):
self.states.append(self.parse_block_mapping_value)
return self.parse_block_node_or_indentless_sequence()
else:
self.state = self.parse_block_mapping_value
return self.process_empty_scalar(token.end_mark)
if self.resolver.processing_version > (1, 1) and self.scanner.check_token(
ValueToken
):
self.state = self.parse_block_mapping_value
return self.process_empty_scalar(self.scanner.peek_token().start_mark)
if not self.scanner.check_token(BlockEndToken):
token = self.scanner.peek_token()
raise ParserError(
'while parsing a block mapping',
self.marks[-1],
f'expected , but found {token.id!r}',
token.start_mark,
)
token = self.scanner.get_token()
self.move_token_comment(token)
event = MappingEndEvent(token.start_mark, token.end_mark, comment=token.comment)
self.state = self.states.pop()
self.marks.pop()
return event
def parse_block_mapping_value(self) -> Any:
if self.scanner.check_token(ValueToken):
token = self.scanner.get_token()
# value token might have post comment move it to e.g. block
if self.scanner.check_token(ValueToken):
self.move_token_comment(token)
else:
if not self.scanner.check_token(KeyToken):
self.move_token_comment(token, empty=True)
# else: empty value for this key cannot move token.comment
if not self.scanner.check_token(KeyToken, ValueToken, BlockEndToken):
self.states.append(self.parse_block_mapping_key)
return self.parse_block_node_or_indentless_sequence()
else:
self.state = self.parse_block_mapping_key
comment = token.comment
if comment is None:
token = self.scanner.peek_token()
comment = token.comment
if comment:
token._comment = [None, comment[1]]
comment = [comment[0], None]
return self.process_empty_scalar(token.end_mark, comment=comment)
else:
self.state = self.parse_block_mapping_key
token = self.scanner.peek_token()
return self.process_empty_scalar(token.start_mark)
# flow_sequence ::= FLOW-SEQUENCE-START
# (flow_sequence_entry FLOW-ENTRY)*
# flow_sequence_entry?
# FLOW-SEQUENCE-END
# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
#
# Note that while production rules for both flow_sequence_entry and
# flow_mapping_entry are equal, their interpretations are different.
# For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
# generate an inline mapping (set syntax).
def parse_flow_sequence_first_entry(self) -> Any:
token = self.scanner.get_token()
self.marks.append(token.start_mark)
return self.parse_flow_sequence_entry(first=True)
def parse_flow_sequence_entry(self, first: bool = False) -> Any:
if not self.scanner.check_token(FlowSequenceEndToken):
if not first:
if self.scanner.check_token(FlowEntryToken):
self.scanner.get_token()
else:
token = self.scanner.peek_token()
raise ParserError(
'while parsing a flow sequence',
self.marks[-1],
f"expected ',' or ']', but got {token.id!r}",
token.start_mark,
)
if self.scanner.check_token(KeyToken):
token = self.scanner.peek_token()
event: Any = MappingStartEvent(
None,
None,
True,
token.start_mark,
token.end_mark,
flow_style=True,
)
self.state = self.parse_flow_sequence_entry_mapping_key
return event
elif not self.scanner.check_token(FlowSequenceEndToken):
self.states.append(self.parse_flow_sequence_entry)
return self.parse_flow_node()
token = self.scanner.get_token()
event = SequenceEndEvent(
token.start_mark, token.end_mark, comment=token.comment
)
self.state = self.states.pop()
self.marks.pop()
return event
def parse_flow_sequence_entry_mapping_key(self) -> Any:
token = self.scanner.get_token()
if not self.scanner.check_token(
ValueToken, FlowEntryToken, FlowSequenceEndToken
):
self.states.append(self.parse_flow_sequence_entry_mapping_value)
return self.parse_flow_node()
else:
self.state = self.parse_flow_sequence_entry_mapping_value
return self.process_empty_scalar(token.end_mark)
def parse_flow_sequence_entry_mapping_value(self) -> Any:
if self.scanner.check_token(ValueToken):
token = self.scanner.get_token()
if not self.scanner.check_token(FlowEntryToken, FlowSequenceEndToken):
self.states.append(self.parse_flow_sequence_entry_mapping_end)
return self.parse_flow_node()
else:
self.state = self.parse_flow_sequence_entry_mapping_end
return self.process_empty_scalar(token.end_mark)
else:
self.state = self.parse_flow_sequence_entry_mapping_end
token = self.scanner.peek_token()
return self.process_empty_scalar(token.start_mark)
def parse_flow_sequence_entry_mapping_end(self) -> Any:
self.state = self.parse_flow_sequence_entry
token = self.scanner.peek_token()
return MappingEndEvent(token.start_mark, token.start_mark)
# flow_mapping ::= FLOW-MAPPING-START
# (flow_mapping_entry FLOW-ENTRY)*
# flow_mapping_entry?
# FLOW-MAPPING-END
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
def parse_flow_mapping_first_key(self) -> Any:
token = self.scanner.get_token()
self.marks.append(token.start_mark)
return self.parse_flow_mapping_key(first=True)
def parse_flow_mapping_key(self, first: Any = False) -> Any:
if not self.scanner.check_token(FlowMappingEndToken):
if not first:
if self.scanner.check_token(FlowEntryToken):
self.scanner.get_token()
else:
token = self.scanner.peek_token()
raise ParserError(
'while parsing a flow mapping',
self.marks[-1],
f"expected ',' or '}}', but got {token.id!r}",
token.start_mark,
)
if self.scanner.check_token(KeyToken):
token = self.scanner.get_token()
if not self.scanner.check_token(
ValueToken,
FlowEntryToken,
FlowMappingEndToken,
):
self.states.append(self.parse_flow_mapping_value)
return self.parse_flow_node()
else:
self.state = self.parse_flow_mapping_value
return self.process_empty_scalar(token.end_mark)
elif self.resolver.processing_version > (1, 1) and self.scanner.check_token(
ValueToken,
):
self.state = self.parse_flow_mapping_value
return self.process_empty_scalar(self.scanner.peek_token().end_mark)
elif not self.scanner.check_token(FlowMappingEndToken):
self.states.append(self.parse_flow_mapping_empty_value)
return self.parse_flow_node()
token = self.scanner.get_token()
event = MappingEndEvent(token.start_mark, token.end_mark, comment=token.comment)
self.state = self.states.pop()
self.marks.pop()
return event
def parse_flow_mapping_value(self) -> Any:
if self.scanner.check_token(ValueToken):
token = self.scanner.get_token()
if not self.scanner.check_token(FlowEntryToken, FlowMappingEndToken):
self.states.append(self.parse_flow_mapping_key)
return self.parse_flow_node()
else:
self.state = self.parse_flow_mapping_key
return self.process_empty_scalar(token.end_mark)
else:
self.state = self.parse_flow_mapping_key
token = self.scanner.peek_token()
return self.process_empty_scalar(token.start_mark)
def parse_flow_mapping_empty_value(self) -> Any:
self.state = self.parse_flow_mapping_key
return self.process_empty_scalar(self.scanner.peek_token().start_mark)
def process_empty_scalar(self, mark: Any, comment: Any = None) -> Any:
return ScalarEvent(None, None, (True, False), "", mark, mark, comment=comment)
def move_token_comment(
self,
token: Any,
nt: Optional[Any] = None,
empty: Optional[bool] = False,
) -> Any:
pass
class RoundTripParser(Parser):
"""roundtrip is a safe loader, that wants to see the unmangled tag"""
def select_tag_transform(self, tag: Tag) -> None:
if tag is None:
return
tag.select_transform(True)
def move_token_comment(
self,
token: Any,
nt: Optional[Any] = None,
empty: Optional[bool] = False,
) -> Any:
token.move_old_comment(
self.scanner.peek_token() if nt is None else nt, empty=empty
)
class RoundTripParserSC(RoundTripParser):
"""roundtrip is a safe loader, that wants to see the unmangled tag"""
# some of the differences are based on the superclass testing
# if self.loader.comment_handling is not None
def move_token_comment(
self: Any,
token: Any,
nt: Any = None,
empty: Optional[bool] = False,
) -> None:
token.move_new_comment(
self.scanner.peek_token() if nt is None else nt, empty=empty
)
def distribute_comment(self, comment: Any, line: Any) -> Any:
# ToDo, look at indentation of the comment to determine attachment
if comment is None:
return None
if not comment[0]:
return None
# if comment[0][0] != line + 1:
# nprintf('>>>dcxxx', comment, line)
assert comment[0][0] == line + 1
# if comment[0] - line > 1:
# return
typ = self.loader.comment_handling & 0b11
# nprintf('>>>dca', comment, line, typ)
if typ == C_POST:
return None
if typ == C_PRE:
c = [None, None, comment[0]]
comment[0] = None
return c
# nprintf('>>>dcb', comment[0])
for _idx, cmntidx in enumerate(comment[0]):
# nprintf('>>>dcb', cmntidx)
if isinstance(self.scanner.comments[cmntidx], BlankLineComment):
break
else:
return None # no space found
if _idx == 0:
return None # first line was blank
# nprintf('>>>dcc', idx)
if typ == C_SPLIT_ON_FIRST_BLANK:
c = [None, None, comment[0][:_idx]]
comment[0] = comment[0][_idx:]
return c
raise NotImplementedError # reserved
python-ruyaml-0.92.1/lib/ruyaml/py.typed 0000664 0000000 0000000 00000000000 15056754172 0020213 0 ustar 00root root 0000000 0000000 python-ruyaml-0.92.1/lib/ruyaml/reader.py 0000664 0000000 0000000 00000024156 15056754172 0020352 0 ustar 00root root 0000000 0000000 from __future__ import annotations
import codecs
from typing import Any, Optional, Text, Tuple
from ruyaml.error import FileMark, StringMark, YAMLError, YAMLStreamError
from ruyaml.util import RegExp
# This module contains abstractions for the input stream. You don't have to
# looks further, there are no pretty code.
#
# We define two classes here.
#
# Mark(source, line, column)
# It's just a record and its only use is producing nice error messages.
# Parser does not use it for any other purposes.
#
# Reader(source, data)
# Reader determines the encoding of `data` and converts it to unicode.
# Reader provides the following methods and attributes:
# reader.peek(length=1) - return the next `length` characters
# reader.forward(length=1) - move the current position to `length`
# characters.
# reader.index - the number of the current character.
# reader.line, stream.column - the line and the column of the current
# character.
if False: # MYPY
from typing import Any, Dict, List, Optional, Text, Tuple, Union # NOQA
# from ruyaml.compat import StreamTextType # NOQA
__all__ = ['Reader', 'ReaderError']
class ReaderError(YAMLError):
def __init__(
self,
name: Any,
position: Any,
character: Any,
encoding: Any,
reason: Any,
) -> None:
self.name = name
self.character = character
self.position = position
self.encoding = encoding
self.reason = reason
def __str__(self) -> Any:
if isinstance(self.character, bytes):
return (
f"'{self.encoding!s}' codec can't decode byte #x{ord(self.character):02x}: "
f'{self.reason!s}\n'
f' in "{self.name!s}", position {self.position:d}'
)
else:
return (
f'unacceptable character #x{self.character:04x}: {self.reason!s}\n'
f' in "{self.name!s}", position {self.position:d}'
)
class Reader:
# Reader:
# - determines the data encoding and converts it to a unicode string,
# - checks if characters are in allowed range,
# - adds '\0' to the end.
# Reader accepts
# - a `bytes` object,
# - a `str` object,
# - a file-like object with its `read` method returning `str`,
# - a file-like object with its `read` method returning `unicode`.
# Yeah, it's ugly and slow.
def __init__(self, stream: Any, loader: Any = None) -> None:
self.loader = loader
if self.loader is not None and getattr(self.loader, '_reader', None) is None:
self.loader._reader = self
self.reset_reader()
self.stream: Any = stream # as .read is called
def reset_reader(self) -> None:
self.name: Any = None
self.stream_pointer = 0
self.eof = True
self.buffer = ""
self.pointer = 0
self.raw_buffer: Any = None
self.raw_decode = None
self.encoding: Optional[Text] = None
self.index = 0
self.line = 0
self.column = 0
@property
def stream(self) -> Any:
try:
return self._stream
except AttributeError:
raise YAMLStreamError('input stream needs to be specified')
@stream.setter
def stream(self, val: Any) -> None:
if val is None:
return
self._stream = None
if isinstance(val, str):
self.name = ''
self.check_printable(val)
self.buffer = val + '\0'
elif isinstance(val, bytes):
self.name = ''
self.raw_buffer = val
self.determine_encoding()
else:
if not hasattr(val, 'read'):
raise YAMLStreamError('stream argument needs to have a read() method')
self._stream = val
self.name = getattr(self.stream, 'name', '')
self.eof = False
self.raw_buffer = None
self.determine_encoding()
def peek(self, index: int = 0) -> Text:
try:
return self.buffer[self.pointer + index]
except IndexError:
self.update(index + 1)
return self.buffer[self.pointer + index]
def prefix(self, length: int = 1) -> Any:
if self.pointer + length >= len(self.buffer):
self.update(length)
return self.buffer[self.pointer : self.pointer + length]
def forward_1_1(self, length: int = 1) -> None:
if self.pointer + length + 1 >= len(self.buffer):
self.update(length + 1)
while length != 0:
ch = self.buffer[self.pointer]
self.pointer += 1
self.index += 1
if ch in '\n\x85\u2028\u2029' or (
ch == '\r' and self.buffer[self.pointer] != '\n'
):
self.line += 1
self.column = 0
elif ch != '\uFEFF':
self.column += 1
length -= 1
def forward(self, length: int = 1) -> None:
if self.pointer + length + 1 >= len(self.buffer):
self.update(length + 1)
while length != 0:
ch = self.buffer[self.pointer]
self.pointer += 1
self.index += 1
if ch == '\n' or (ch == '\r' and self.buffer[self.pointer] != '\n'):
self.line += 1
self.column = 0
elif ch != '\uFEFF':
self.column += 1
length -= 1
def get_mark(self) -> Any:
if self.stream is None:
return StringMark(
self.name,
self.index,
self.line,
self.column,
self.buffer,
self.pointer,
)
else:
return FileMark(self.name, self.index, self.line, self.column)
def determine_encoding(self) -> None:
while not self.eof and (self.raw_buffer is None or len(self.raw_buffer) < 2):
self.update_raw()
if isinstance(self.raw_buffer, bytes):
if self.raw_buffer.startswith(codecs.BOM_UTF16_LE):
self.raw_decode = codecs.utf_16_le_decode # type: ignore
self.encoding = 'utf-16-le'
elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE):
self.raw_decode = codecs.utf_16_be_decode # type: ignore
self.encoding = 'utf-16-be'
else:
self.raw_decode = codecs.utf_8_decode # type: ignore
self.encoding = 'utf-8'
self.update(1)
NON_PRINTABLE = RegExp(
'[^\x09\x0A\x0D\x20-\x7E\x85'
'\xA0-\uD7FF'
'\uE000-\uFFFD'
'\U00010000-\U0010FFFF'
']' # NOQA
)
_printable_ascii = ('\x09\x0A\x0D' + "".join(map(chr, range(0x20, 0x7F)))).encode(
'ascii'
)
@classmethod
def _get_non_printable_ascii(cls: Text, data: bytes) -> Optional[Tuple[int, Text]]: # type: ignore # NOQA
ascii_bytes = data.encode('ascii') # type: ignore
non_printables = ascii_bytes.translate(None, cls._printable_ascii) # type: ignore
if not non_printables:
return None
non_printable = non_printables[:1]
return ascii_bytes.index(non_printable), non_printable.decode('ascii')
@classmethod
def _get_non_printable_regex(cls, data: Text) -> Optional[Tuple[int, Text]]:
match = cls.NON_PRINTABLE.search(data)
if not bool(match):
return None
return match.start(), match.group()
@classmethod
def _get_non_printable(cls, data: Text) -> Optional[Tuple[int, Text]]:
try:
return cls._get_non_printable_ascii(data) # type: ignore
except UnicodeEncodeError:
return cls._get_non_printable_regex(data)
def check_printable(self, data: Any) -> None:
non_printable_match = self._get_non_printable(data)
if non_printable_match is not None:
start, character = non_printable_match
position = self.index + (len(self.buffer) - self.pointer) + start
raise ReaderError(
self.name,
position,
ord(character),
'unicode',
'special characters are not allowed',
)
def update(self, length: int) -> None:
if self.raw_buffer is None:
return
self.buffer = self.buffer[self.pointer :]
self.pointer = 0
while len(self.buffer) < length:
if not self.eof:
self.update_raw()
if self.raw_decode is not None:
try:
data, converted = self.raw_decode(
self.raw_buffer, 'strict', self.eof
)
except UnicodeDecodeError as exc:
character = self.raw_buffer[exc.start]
if self.stream is not None:
position = (
self.stream_pointer - len(self.raw_buffer) + exc.start
)
elif self.stream is not None:
position = (
self.stream_pointer - len(self.raw_buffer) + exc.start
)
else:
position = exc.start
raise ReaderError(
self.name, position, character, exc.encoding, exc.reason
)
else:
data = self.raw_buffer
converted = len(data)
self.check_printable(data)
self.buffer += data
self.raw_buffer = self.raw_buffer[converted:]
if self.eof:
self.buffer += '\0'
self.raw_buffer = None
break
def update_raw(self, size: Optional[int] = None) -> None:
if size is None:
size = 4096
data = self.stream.read(size)
if self.raw_buffer is None:
self.raw_buffer = data
else:
self.raw_buffer += data
self.stream_pointer += len(data)
if not data:
self.eof = True
# try:
# import psyco
# psyco.bind(Reader)
# except ImportError:
# pass
python-ruyaml-0.92.1/lib/ruyaml/representer.py 0000664 0000000 0000000 00000131004 15056754172 0021435 0 ustar 00root root 0000000 0000000 from __future__ import annotations
import base64
import collections
import copyreg
import datetime
import types
from ruyaml.anchor import Anchor
from ruyaml.comments import (
CommentedKeyMap,
CommentedKeySeq,
CommentedMap,
CommentedOrderedMap,
CommentedSeq,
CommentedSet,
TaggedScalar,
comment_attrib,
merge_attrib,
)
from ruyaml.compat import nprint, nprintf, ordereddict # NOQA
from ruyaml.error import * # NOQA
from ruyaml.nodes import * # NOQA
from ruyaml.scalarbool import ScalarBoolean
from ruyaml.scalarfloat import ScalarFloat
from ruyaml.scalarint import BinaryInt, HexCapsInt, HexInt, OctalInt, ScalarInt
from ruyaml.scalarstring import (
DoubleQuotedScalarString,
FoldedScalarString,
LiteralScalarString,
PlainScalarString,
SingleQuotedScalarString,
)
from ruyaml.timestamp import TimeStamp
if False: # MYPY
from typing import Any, Dict, List, Optional, Text, Union # NOQA
# fmt: off
__all__ = ['BaseRepresenter', 'SafeRepresenter', 'Representer',
'RepresenterError', 'RoundTripRepresenter']
# fmt: on
class RepresenterError(YAMLError):
pass
class BaseRepresenter:
yaml_representers: Dict[Any, Any] = {}
yaml_multi_representers: Dict[Any, Any] = {}
def __init__(
self: Any,
default_style: Any = None,
default_flow_style: Any = None,
dumper: Any = None,
) -> None:
self.dumper = dumper
if self.dumper is not None:
self.dumper._representer = self
self.default_style = default_style
self.default_flow_style = default_flow_style
self.represented_objects: Dict[Any, Any] = {}
self.object_keeper: List[Any] = []
self.alias_key: Optional[int] = None
self.sort_base_mapping_type_on_output = True
@property
def serializer(self) -> Any:
try:
if hasattr(self.dumper, 'typ'):
return self.dumper.serializer # type: ignore
return self.dumper._serializer # type: ignore
except AttributeError:
return self # cyaml
def represent(self, data: Any) -> None:
node = self.represent_data(data)
self.serializer.serialize(node)
self.represented_objects = {}
self.object_keeper = []
self.alias_key = None
def represent_data(self, data: Any) -> Any:
if self.ignore_aliases(data):
self.alias_key = None
else:
self.alias_key = id(data)
if self.alias_key is not None:
if self.alias_key in self.represented_objects:
node = self.represented_objects[self.alias_key]
# if node is None:
# raise RepresenterError(
# f"recursive objects are not allowed: {data!r}")
return node
# self.represented_objects[alias_key] = None
self.object_keeper.append(data)
data_types = type(data).__mro__
if data_types[0] in self.yaml_representers:
node = self.yaml_representers[data_types[0]](self, data)
else:
for data_type in data_types:
if data_type in self.yaml_multi_representers:
node = self.yaml_multi_representers[data_type](self, data)
break
else:
if None in self.yaml_multi_representers:
node = self.yaml_multi_representers[None](self, data)
elif None in self.yaml_representers:
node = self.yaml_representers[None](self, data)
else:
node = ScalarNode(None, str(data))
# if alias_key is not None:
# self.represented_objects[alias_key] = node
return node
def represent_key(self, data: Any) -> Any:
"""
David Fraser: Extract a method to represent keys in mappings, so that
a subclass can choose not to quote them (for example)
used in represent_mapping
https://bitbucket.org/davidfraser/pyyaml/commits/d81df6eb95f20cac4a79eed95ae553b5c6f77b8c
"""
return self.represent_data(data)
@classmethod
def add_representer(cls, data_type: Any, representer: Any) -> None:
if 'yaml_representers' not in cls.__dict__:
cls.yaml_representers = cls.yaml_representers.copy()
cls.yaml_representers[data_type] = representer
@classmethod
def add_multi_representer(cls, data_type: Any, representer: Any) -> None:
if 'yaml_multi_representers' not in cls.__dict__:
cls.yaml_multi_representers = cls.yaml_multi_representers.copy()
cls.yaml_multi_representers[data_type] = representer
def represent_scalar(
self,
tag: Any,
value: Any,
style: Any = None,
anchor: Any = None,
) -> ScalarNode:
if style is None:
style = self.default_style
comment = None
if style and style[0] in '|>':
comment = getattr(value, 'comment', None)
if comment:
comment = [None, [comment]]
if isinstance(tag, str):
tag = Tag(suffix=tag)
node = ScalarNode(tag, value, style=style, comment=comment, anchor=anchor)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
return node
def represent_sequence(
self,
tag: Any,
sequence: Any,
flow_style: Any = None,
) -> SequenceNode:
value: List[Any] = []
if isinstance(tag, str):
tag = Tag(suffix=tag)
node = SequenceNode(tag, value, flow_style=flow_style)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
best_style = True
for item in sequence:
node_item = self.represent_data(item)
if not (isinstance(node_item, ScalarNode) and not node_item.style):
best_style = False
value.append(node_item)
if flow_style is None:
if self.default_flow_style is not None:
node.flow_style = self.default_flow_style
else:
node.flow_style = best_style
return node
def represent_omap(
self, tag: Any, omap: Any, flow_style: Any = None
) -> SequenceNode:
value: List[Any] = []
if isinstance(tag, str):
tag = Tag(suffix=tag)
node = SequenceNode(tag, value, flow_style=flow_style)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
best_style = True
for item_key in omap:
item_val = omap[item_key]
node_item = self.represent_data({item_key: item_val})
# if not (isinstance(node_item, ScalarNode) \
# and not node_item.style):
# best_style = False
value.append(node_item)
if flow_style is None:
if self.default_flow_style is not None:
node.flow_style = self.default_flow_style
else:
node.flow_style = best_style
return node
def represent_mapping(
self, tag: Any, mapping: Any, flow_style: Any = None
) -> MappingNode:
value: List[Any] = []
if isinstance(tag, str):
tag = Tag(suffix=tag)
node = MappingNode(tag, value, flow_style=flow_style)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
best_style = True
if hasattr(mapping, 'items'):
mapping = list(mapping.items())
if self.sort_base_mapping_type_on_output:
try:
mapping = sorted(mapping)
except TypeError:
pass
for item_key, item_value in mapping:
node_key = self.represent_key(item_key)
node_value = self.represent_data(item_value)
if not (isinstance(node_key, ScalarNode) and not node_key.style):
best_style = False
if not (isinstance(node_value, ScalarNode) and not node_value.style):
best_style = False
value.append((node_key, node_value))
if flow_style is None:
if self.default_flow_style is not None:
node.flow_style = self.default_flow_style
else:
node.flow_style = best_style
return node
def ignore_aliases(self, data: Any) -> bool:
return False
class SafeRepresenter(BaseRepresenter):
def ignore_aliases(self, data: Any) -> bool:
# https://docs.python.org/3/reference/expressions.html#parenthesized-forms :
# "i.e. two occurrences of the empty tuple may or may not yield the same object"
# so "data is ()" should not be used
if data is None or (isinstance(data, tuple) and data == ()):
return True
if isinstance(data, (bytes, str, bool, int, float)):
return True
return False
def represent_none(self, data: Any) -> ScalarNode:
return self.represent_scalar('tag:yaml.org,2002:null', 'null')
def represent_str(self, data: Any) -> Any:
return self.represent_scalar('tag:yaml.org,2002:str', data)
def represent_binary(self, data: Any) -> ScalarNode:
if hasattr(base64, 'encodebytes'):
data = base64.encodebytes(data).decode('ascii')
else:
# check py2 only?
data = base64.encodestring(data).decode('ascii') # type: ignore
return self.represent_scalar('tag:yaml.org,2002:binary', data, style='|')
def represent_bool(self, data: Any, anchor: Optional[Any] = None) -> ScalarNode:
try:
value = self.dumper.boolean_representation[bool(data)] # type: ignore
except AttributeError:
if data:
value = 'true'
else:
value = 'false'
return self.represent_scalar('tag:yaml.org,2002:bool', value, anchor=anchor)
def represent_int(self, data: Any) -> ScalarNode:
return self.represent_scalar('tag:yaml.org,2002:int', str(data))
inf_value = 1e300
while repr(inf_value) != repr(inf_value * inf_value):
inf_value *= inf_value
def represent_float(self, data: Any) -> ScalarNode:
if data != data or (data == 0.0 and data == 1.0):
value = '.nan'
elif data == self.inf_value:
value = '.inf'
elif data == -self.inf_value:
value = '-.inf'
else:
value = repr(data).lower()
if getattr(self.serializer, 'use_version', None) == (1, 1):
if '.' not in value and 'e' in value:
# Note that in some cases `repr(data)` represents a float number
# without the decimal parts. For instance:
# >>> repr(1e17)
# '1e17'
# Unfortunately, this is not a valid float representation according
# to the definition of the `!!float` tag in YAML 1.1. We fix
# this by adding '.0' before the 'e' symbol.
value = value.replace('e', '.0e', 1)
return self.represent_scalar('tag:yaml.org,2002:float', value)
def represent_list(self, data: Any) -> SequenceNode:
# pairs = (len(data) > 0 and isinstance(data, list))
# if pairs:
# for item in data:
# if not isinstance(item, tuple) or len(item) != 2:
# pairs = False
# break
# if not pairs:
return self.represent_sequence('tag:yaml.org,2002:seq', data)
# value = []
# for item_key, item_value in data:
# value.append(self.represent_mapping('tag:yaml.org,2002:map',
# [(item_key, item_value)]))
# return SequenceNode('tag:yaml.org,2002:pairs', value)
def represent_dict(self, data: Any) -> MappingNode:
return self.represent_mapping('tag:yaml.org,2002:map', data)
def represent_ordereddict(self, data: Any) -> SequenceNode:
return self.represent_omap('tag:yaml.org,2002:omap', data)
def represent_set(self, data: Any) -> MappingNode:
value: Dict[Any, None] = {}
for key in data:
value[key] = None
return self.represent_mapping('tag:yaml.org,2002:set', value)
def represent_date(self, data: Any) -> ScalarNode:
value = data.isoformat()
return self.represent_scalar('tag:yaml.org,2002:timestamp', value)
def represent_datetime(self, data: Any) -> ScalarNode:
value = data.isoformat(' ')
return self.represent_scalar('tag:yaml.org,2002:timestamp', value)
def represent_yaml_object(
self,
tag: Any,
data: Any,
cls: Any,
flow_style: Any = None,
) -> MappingNode:
if hasattr(data, '__getstate__'):
state = data.__getstate__()
else:
state = data.__dict__.copy()
return self.represent_mapping(tag, state, flow_style=flow_style)
def represent_undefined(self, data: Any) -> None:
raise RepresenterError(
f'cannot represent {data.__class__.__name__!r}: {data!s}'
)
SafeRepresenter.add_representer(type(None), SafeRepresenter.represent_none)
SafeRepresenter.add_representer(str, SafeRepresenter.represent_str)
SafeRepresenter.add_representer(bytes, SafeRepresenter.represent_binary)
SafeRepresenter.add_representer(bool, SafeRepresenter.represent_bool)
SafeRepresenter.add_representer(int, SafeRepresenter.represent_int)
SafeRepresenter.add_representer(float, SafeRepresenter.represent_float)
SafeRepresenter.add_representer(list, SafeRepresenter.represent_list)
SafeRepresenter.add_representer(tuple, SafeRepresenter.represent_list)
SafeRepresenter.add_representer(dict, SafeRepresenter.represent_dict)
SafeRepresenter.add_representer(set, SafeRepresenter.represent_set)
SafeRepresenter.add_representer(ordereddict, SafeRepresenter.represent_ordereddict)
SafeRepresenter.add_representer(
collections.OrderedDict,
SafeRepresenter.represent_ordereddict,
)
SafeRepresenter.add_representer(datetime.date, SafeRepresenter.represent_date)
SafeRepresenter.add_representer(datetime.datetime, SafeRepresenter.represent_datetime)
SafeRepresenter.add_representer(None, SafeRepresenter.represent_undefined)
class Representer(SafeRepresenter):
def represent_complex(self, data: Any) -> Any:
if data.imag == 0.0:
data = repr(data.real)
elif data.real == 0.0:
data = f'{data.imag!r}j'
elif data.imag > 0:
data = f'{data.real!r}+{data.imag!r}j'
else:
data = f'{data.real!r}{data.imag!r}j'
return self.represent_scalar('tag:yaml.org,2002:python/complex', data)
def represent_tuple(self, data: Any) -> SequenceNode:
return self.represent_sequence('tag:yaml.org,2002:python/tuple', data)
def represent_name(self, data: Any) -> ScalarNode:
try:
name = f'{data.__module__!s}.{data.__qualname__!s}'
except AttributeError:
# ToDo: check if this can be reached in Py3
name = f'{data.__module__!s}.{data.__name__!s}'
return self.represent_scalar('tag:yaml.org,2002:python/name:' + name, "")
def represent_module(self, data: Any) -> ScalarNode:
return self.represent_scalar(
'tag:yaml.org,2002:python/module:' + data.__name__, ""
)
def represent_object(self, data: Any) -> Union[SequenceNode, MappingNode]:
# We use __reduce__ API to save the data. data.__reduce__ returns
# a tuple of length 2-5:
# (function, args, state, listitems, dictitems)
# For reconstructing, we calls function(*args), then set its state,
# listitems, and dictitems if they are not None.
# A special case is when function.__name__ == '__newobj__'. In this
# case we create the object with args[0].__new__(*args).
# Another special case is when __reduce__ returns a string - we don't
# support it.
# We produce a !!python/object, !!python/object/new or
# !!python/object/apply node.
cls = type(data)
if cls in copyreg.dispatch_table:
reduce: Any = copyreg.dispatch_table[cls](data)
elif hasattr(data, '__reduce_ex__'):
reduce = data.__reduce_ex__(2)
elif hasattr(data, '__reduce__'):
reduce = data.__reduce__()
else:
raise RepresenterError(f'cannot represent object: {data!r}')
reduce = (list(reduce) + [None] * 5)[:5]
function, args, state, listitems, dictitems = reduce
args = list(args)
if state is None:
state = {}
if listitems is not None:
listitems = list(listitems)
if dictitems is not None:
dictitems = dict(dictitems)
if function.__name__ == '__newobj__':
function = args[0]
args = args[1:]
tag = 'tag:yaml.org,2002:python/object/new:'
newobj = True
else:
tag = 'tag:yaml.org,2002:python/object/apply:'
newobj = False
try:
function_name = f'{function.__module__!s}.{function.__qualname__!s}'
except AttributeError:
# ToDo: check if this can be reached in Py3
function_name = f'{function.__module__!s}.{function.__name__!s}'
if (
not args
and not listitems
and not dictitems
and isinstance(state, dict)
and newobj
):
return self.represent_mapping(
'tag:yaml.org,2002:python/object:' + function_name,
state,
)
if not listitems and not dictitems and isinstance(state, dict) and not state:
return self.represent_sequence(tag + function_name, args)
value = {}
if args:
value['args'] = args
if state or not isinstance(state, dict):
value['state'] = state
if listitems:
value['listitems'] = listitems
if dictitems:
value['dictitems'] = dictitems
return self.represent_mapping(tag + function_name, value)
Representer.add_representer(complex, Representer.represent_complex)
Representer.add_representer(tuple, Representer.represent_tuple)
Representer.add_representer(type, Representer.represent_name)
Representer.add_representer(types.FunctionType, Representer.represent_name)
Representer.add_representer(types.BuiltinFunctionType, Representer.represent_name)
Representer.add_representer(types.ModuleType, Representer.represent_module)
Representer.add_multi_representer(object, Representer.represent_object)
Representer.add_multi_representer(type, Representer.represent_name)
class RoundTripRepresenter(SafeRepresenter):
# need to add type here and write out the .comment
# in serializer and emitter
def __init__(
self,
default_style: Any = None,
default_flow_style: Any = None,
dumper: Any = None,
) -> None:
if not hasattr(dumper, 'typ') and default_flow_style is None:
default_flow_style = False
SafeRepresenter.__init__(
self,
default_style=default_style,
default_flow_style=default_flow_style,
dumper=dumper,
)
def ignore_aliases(self, data: Any) -> bool:
try:
if data.anchor is not None and data.anchor.value is not None:
return False
except AttributeError:
pass
return SafeRepresenter.ignore_aliases(self, data)
def represent_none(self, data: Any) -> ScalarNode:
if (
len(self.represented_objects) == 0
and not self.serializer.use_explicit_start
):
# this will be open ended (although it is not yet)
return self.represent_scalar('tag:yaml.org,2002:null', 'null')
return self.represent_scalar('tag:yaml.org,2002:null', "")
def represent_literal_scalarstring(self, data: Any) -> ScalarNode:
tag = None
style = '|'
anchor = data.yaml_anchor(any=True)
tag = 'tag:yaml.org,2002:str'
return self.represent_scalar(tag, data, style=style, anchor=anchor)
represent_preserved_scalarstring = represent_literal_scalarstring
def represent_folded_scalarstring(self, data: Any) -> ScalarNode:
tag = None
style = '>'
anchor = data.yaml_anchor(any=True)
for fold_pos in reversed(getattr(data, 'fold_pos', [])):
if (
data[fold_pos] == ' '
and (fold_pos > 0 and not data[fold_pos - 1].isspace())
and (fold_pos < len(data) and not data[fold_pos + 1].isspace())
):
data = data[:fold_pos] + '\a' + data[fold_pos:]
tag = 'tag:yaml.org,2002:str'
return self.represent_scalar(tag, data, style=style, anchor=anchor)
def represent_single_quoted_scalarstring(self, data: Any) -> ScalarNode:
tag = None
style = "'"
anchor = data.yaml_anchor(any=True)
tag = 'tag:yaml.org,2002:str'
return self.represent_scalar(tag, data, style=style, anchor=anchor)
def represent_double_quoted_scalarstring(self, data: Any) -> ScalarNode:
tag = None
style = '"'
anchor = data.yaml_anchor(any=True)
tag = 'tag:yaml.org,2002:str'
return self.represent_scalar(tag, data, style=style, anchor=anchor)
def represent_plain_scalarstring(self, data: Any) -> ScalarNode:
tag = None
style = ''
anchor = data.yaml_anchor(any=True)
tag = 'tag:yaml.org,2002:str'
return self.represent_scalar(tag, data, style=style, anchor=anchor)
def insert_underscore(
self,
prefix: Any,
s: Any,
underscore: Any,
anchor: Any = None,
) -> ScalarNode:
if underscore is None:
return self.represent_scalar(
'tag:yaml.org,2002:int', prefix + s, anchor=anchor
)
if underscore[0]:
sl = list(s)
pos = len(s) - underscore[0]
while pos > 0:
sl.insert(pos, '_')
pos -= underscore[0]
s = "".join(sl)
if underscore[1]:
s = '_' + s
if underscore[2]:
s += '_'
return self.represent_scalar('tag:yaml.org,2002:int', prefix + s, anchor=anchor)
def represent_scalar_int(self, data: Any) -> ScalarNode:
if data._width is not None:
s = f'{data:0{data._width}d}'
else:
s = format(data, 'd')
anchor = data.yaml_anchor(any=True)
return self.insert_underscore("", s, data._underscore, anchor=anchor)
def represent_binary_int(self, data: Any) -> ScalarNode:
if data._width is not None:
# cannot use '{:#0{}b}', that strips the zeros
s = f'{data:0{data._width}b}'
else:
s = format(data, 'b')
anchor = data.yaml_anchor(any=True)
return self.insert_underscore('0b', s, data._underscore, anchor=anchor)
def represent_octal_int(self, data: Any) -> ScalarNode:
if data._width is not None:
# cannot use '{:#0{}o}', that strips the zeros
s = f'{data:0{data._width}o}'
else:
s = format(data, 'o')
anchor = data.yaml_anchor(any=True)
prefix = '0o'
if getattr(self.serializer, 'use_version', None) == (1, 1):
prefix = '0'
return self.insert_underscore(prefix, s, data._underscore, anchor=anchor)
def represent_hex_int(self, data: Any) -> ScalarNode:
if data._width is not None:
# cannot use '{:#0{}x}', that strips the zeros
s = f'{data:0{data._width}x}'
else:
s = format(data, 'x')
anchor = data.yaml_anchor(any=True)
return self.insert_underscore('0x', s, data._underscore, anchor=anchor)
def represent_hex_caps_int(self, data: Any) -> ScalarNode:
if data._width is not None:
# cannot use '{:#0{}X}', that strips the zeros
s = f'{data:0{data._width}X}'
else:
s = format(data, 'X')
anchor = data.yaml_anchor(any=True)
return self.insert_underscore('0x', s, data._underscore, anchor=anchor)
def represent_scalar_float(self, data: Any) -> ScalarNode:
"""this is way more complicated"""
value = None
anchor = data.yaml_anchor(any=True)
if data != data or (data == 0.0 and data == 1.0):
value = '.nan'
elif data == self.inf_value:
value = '.inf'
elif data == -self.inf_value:
value = '-.inf'
if value:
return self.represent_scalar(
'tag:yaml.org,2002:float', value, anchor=anchor
)
if data._exp is None and data._prec > 0 and data._prec == data._width - 1:
# no exponent, but trailing dot
value = f'{data._m_sign if data._m_sign else ""}{abs(int(data)):d}.'
elif data._exp is None:
# no exponent, "normal" dot
prec = data._prec
ms = data._m_sign if data._m_sign else ""
if prec < 0:
value = f'{ms}{abs(int(data)):0{data._width - len(ms)}d}'
else:
# -1 for the dot
value = f'{ms}{abs(data):0{data._width - len(ms)}.{data._width - prec - 1}f}'
if prec == 0 or (prec == 1 and ms != ""):
value = value.replace('0.', '.')
while len(value) < data._width:
value += '0'
else:
# exponent
(
m,
es,
) = f'{data:{data._width}.{data._width + (1 if data._m_sign else 0)}e}'.split(
'e'
)
w = data._width if data._prec > 0 else (data._width + 1)
if data < 0:
w += 1
m = m[:w]
e = int(es)
m1, m2 = m.split('.') # always second?
while len(m1) + len(m2) < data._width - (1 if data._prec >= 0 else 0):
m2 += '0'
if data._m_sign and data > 0:
m1 = '+' + m1
esgn = '+' if data._e_sign else ""
if data._prec < 0: # mantissa without dot
if m2 != '0':
e -= len(m2)
else:
m2 = ""
while (len(m1) + len(m2) - (1 if data._m_sign else 0)) < data._width:
m2 += '0'
e -= 1
value = m1 + m2 + data._exp + f'{e:{esgn}0{data._e_width}d}'
elif data._prec == 0: # mantissa with trailing dot
e -= len(m2)
value = m1 + m2 + '.' + data._exp + f'{e:{esgn}0{data._e_width}d}'
else:
if data._m_lead0 > 0:
m2 = '0' * (data._m_lead0 - 1) + m1 + m2
m1 = '0'
m2 = m2[: -data._m_lead0] # these should be zeros
e += data._m_lead0
while len(m1) < data._prec:
m1 += m2[0]
m2 = m2[1:]
e -= 1
value = m1 + '.' + m2 + data._exp + f'{e:{esgn}0{data._e_width}d}'
if value is None:
value = repr(data).lower()
return self.represent_scalar('tag:yaml.org,2002:float', value, anchor=anchor)
def represent_sequence(
self,
tag: Any,
sequence: Any,
flow_style: Any = None,
) -> SequenceNode:
value: List[Any] = []
# if the flow_style is None, the flow style tacked on to the object
# explicitly will be taken. If that is None as well the default flow
# style rules
try:
flow_style = sequence.fa.flow_style(flow_style)
except AttributeError:
flow_style = flow_style
try:
anchor = sequence.yaml_anchor()
except AttributeError:
anchor = None
if isinstance(tag, str):
tag = Tag(suffix=tag)
node = SequenceNode(tag, value, flow_style=flow_style, anchor=anchor)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
best_style = True
try:
comment = getattr(sequence, comment_attrib)
node.comment = comment.comment
# reset any comment already printed information
if node.comment and node.comment[1]:
for ct in node.comment[1]:
ct.reset()
item_comments = comment.items
for v in item_comments.values():
if v and v[1]:
for ct in v[1]:
ct.reset()
item_comments = comment.items
if node.comment is None:
node.comment = comment.comment
else:
# as we are potentially going to extend this, make a new list
node.comment = comment.comment[:]
try:
node.comment.append(comment.end)
except AttributeError:
pass
except AttributeError:
item_comments = {}
for idx, item in enumerate(sequence):
node_item = self.represent_data(item)
self.merge_comments(node_item, item_comments.get(idx))
if not (isinstance(node_item, ScalarNode) and not node_item.style):
best_style = False
value.append(node_item)
if flow_style is None:
if len(sequence) != 0 and self.default_flow_style is not None:
node.flow_style = self.default_flow_style
else:
node.flow_style = best_style
return node
def merge_comments(self, node: Any, comments: Any) -> Any:
if comments is None:
assert hasattr(node, 'comment')
return node
if getattr(node, 'comment', None) is not None:
for idx, val in enumerate(comments):
if idx >= len(node.comment):
continue
nc = node.comment[idx]
if nc is not None:
assert val is None or val == nc
comments[idx] = nc
node.comment = comments
return node
def represent_key(self, data: Any) -> Any:
if isinstance(data, CommentedKeySeq):
self.alias_key = None
return self.represent_sequence(
'tag:yaml.org,2002:seq', data, flow_style=True
)
if isinstance(data, CommentedKeyMap):
self.alias_key = None
return self.represent_mapping(
'tag:yaml.org,2002:map', data, flow_style=True
)
return SafeRepresenter.represent_key(self, data)
def represent_mapping(
self, tag: Any, mapping: Any, flow_style: Any = None
) -> MappingNode:
value: List[Any] = []
try:
flow_style = mapping.fa.flow_style(flow_style)
except AttributeError:
flow_style = flow_style
try:
anchor = mapping.yaml_anchor()
except AttributeError:
anchor = None
if isinstance(tag, str):
tag = Tag(suffix=tag)
node = MappingNode(tag, value, flow_style=flow_style, anchor=anchor)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
best_style = True
# no sorting! !!
try:
comment = getattr(mapping, comment_attrib)
if node.comment is None:
node.comment = comment.comment
else:
# as we are potentially going to extend this, make a new list
node.comment = comment.comment[:]
if node.comment and node.comment[1]:
for ct in node.comment[1]:
ct.reset()
item_comments = comment.items
if self.dumper.comment_handling is None: # type: ignore
for v in item_comments.values():
if v and v[1]:
for ct in v[1]:
ct.reset()
try:
node.comment.append(comment.end)
except AttributeError:
pass
else:
# NEWCMNT
pass
except AttributeError:
item_comments = {}
merge_value = getattr(mapping, merge_attrib, [])
# merge_list = [m[1] for m in merge_value]
# merge_list = [m for m in merge_value]
try:
# merge_pos = getattr(mapping, merge_attrib, [[0]])[0][0]
# print('merge_pos', merge_pos, merge_value.merge_pos)
merge_pos = merge_value.merge_pos # type: ignore
except (AttributeError, IndexError):
merge_pos = 0
item_count = 0
# if bool(merge_list):
if len(merge_value) > 0:
items = mapping.non_merged_items()
else:
items = mapping.items()
for item_key, item_value in items:
item_count += 1
node_key = self.represent_key(item_key)
node_value = self.represent_data(item_value)
item_comment = item_comments.get(item_key)
if item_comment:
# assert getattr(node_key, 'comment', None) is None
# issue 351 did throw this because the comment from the list item was
# moved to the dict
node_key.comment = item_comment[:2]
nvc = getattr(node_value, 'comment', None)
if nvc is not None: # end comment already there
nvc[0] = item_comment[2]
nvc[1] = item_comment[3]
else:
node_value.comment = item_comment[2:]
if not (isinstance(node_key, ScalarNode) and not node_key.style):
best_style = False
if not (isinstance(node_value, ScalarNode) and not node_value.style):
best_style = False
value.append((node_key, node_value))
if flow_style is None:
if (
(item_count != 0) or (len(merge_value) > 0)
) and self.default_flow_style is not None: # NOQA
node.flow_style = self.default_flow_style
else:
node.flow_style = best_style
if len(merge_value) > 0:
# because of the call to represent_data here, the anchors
# are marked as being used and thereby created
# if len(merge_list) == 1:
if merge_value.sequence is None: # type: ignore
arg = self.represent_data(merge_value[0])
else:
# arg = self.represent_data(merge_list)
# arg.flow_style = True
arg = self.represent_data(merge_value.sequence) # type: ignore
value.insert(
merge_pos,
(ScalarNode(Tag(suffix='tag:yaml.org,2002:merge'), '<<'), arg),
)
return node
def represent_omap(
self, tag: Any, omap: Any, flow_style: Any = None
) -> SequenceNode:
value: List[Any] = []
try:
flow_style = omap.fa.flow_style(flow_style)
except AttributeError:
flow_style = flow_style
try:
anchor = omap.yaml_anchor()
except AttributeError:
anchor = None
if isinstance(tag, str):
tag = Tag(suffix=tag)
node = SequenceNode(tag, value, flow_style=flow_style, anchor=anchor)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
best_style = True
try:
comment = getattr(omap, comment_attrib)
if node.comment is None:
node.comment = comment.comment
else:
# as we are potentially going to extend this, make a new list
node.comment = comment.comment[:]
if node.comment and node.comment[1]:
for ct in node.comment[1]:
ct.reset()
item_comments = comment.items
for v in item_comments.values():
if v and v[1]:
for ct in v[1]:
ct.reset()
try:
node.comment.append(comment.end)
except AttributeError:
pass
except AttributeError:
item_comments = {}
for item_key in omap:
item_val = omap[item_key]
node_item = self.represent_data({item_key: item_val})
# node_item.flow_style = False
# node item has two scalars in value: node_key and node_value
item_comment = item_comments.get(item_key)
if item_comment:
if item_comment[1]:
node_item.comment = [None, item_comment[1]]
assert getattr(node_item.value[0][0], 'comment', None) is None
node_item.value[0][0].comment = [item_comment[0], None]
nvc = getattr(node_item.value[0][1], 'comment', None)
if nvc is not None: # end comment already there
nvc[0] = item_comment[2]
nvc[1] = item_comment[3]
else:
node_item.value[0][1].comment = item_comment[2:]
# if not (isinstance(node_item, ScalarNode) \
# and not node_item.style):
# best_style = False
value.append(node_item)
if flow_style is None:
if self.default_flow_style is not None:
node.flow_style = self.default_flow_style
else:
node.flow_style = best_style
return node
def represent_set(self, setting: Any) -> MappingNode:
flow_style = False
tag = Tag(suffix='tag:yaml.org,2002:set')
# return self.represent_mapping(tag, value)
value: List[Any] = []
flow_style = setting.fa.flow_style(flow_style)
try:
anchor = setting.yaml_anchor()
except AttributeError:
anchor = None
node = MappingNode(tag, value, flow_style=flow_style, anchor=anchor)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
best_style = True
# no sorting! !!
try:
comment = getattr(setting, comment_attrib)
if node.comment is None:
node.comment = comment.comment
else:
# as we are potentially going to extend this, make a new list
node.comment = comment.comment[:]
if node.comment and node.comment[1]:
for ct in node.comment[1]:
ct.reset()
item_comments = comment.items
for v in item_comments.values():
if v and v[1]:
for ct in v[1]:
ct.reset()
try:
node.comment.append(comment.end)
except AttributeError:
pass
except AttributeError:
item_comments = {}
for item_key in setting.odict:
node_key = self.represent_key(item_key)
node_value = self.represent_data(None)
item_comment = item_comments.get(item_key)
if item_comment:
assert getattr(node_key, 'comment', None) is None
node_key.comment = item_comment[:2]
node_key.style = '?'
node_value.style = '-' if flow_style else '?'
if not (isinstance(node_key, ScalarNode) and not node_key.style):
best_style = False
if not (isinstance(node_value, ScalarNode) and not node_value.style):
best_style = False
value.append((node_key, node_value))
best_style = best_style
return node
def represent_dict(self, data: Any) -> MappingNode:
"""write out tag if saved on loading"""
try:
_ = data.tag
except AttributeError:
tag = Tag(suffix='tag:yaml.org,2002:map')
else:
if data.tag.trval:
if data.tag.startswith('!!'):
tag = Tag(suffix='tag:yaml.org,2002:' + data.tag.trval[2:])
else:
tag = data.tag
else:
tag = Tag(suffix='tag:yaml.org,2002:map')
return self.represent_mapping(tag, data)
def represent_list(self, data: Any) -> SequenceNode:
try:
_ = data.tag
except AttributeError:
tag = Tag(suffix='tag:yaml.org,2002:seq')
else:
if data.tag.trval:
if data.tag.startswith('!!'):
tag = Tag(suffix='tag:yaml.org,2002:' + data.tag.trval[2:])
else:
tag = data.tag
else:
tag = Tag(suffix='tag:yaml.org,2002:seq')
return self.represent_sequence(tag, data)
def represent_datetime(self, data: Any) -> ScalarNode:
inter = 'T' if data._yaml['t'] else ' '
_yaml = data._yaml
if False and _yaml['delta']:
data += _yaml['delta']
value = data.isoformat(inter)
else:
value = data.isoformat(inter).strip()
if False and _yaml['tz']:
value += _yaml['tz']
if data.tzinfo and str(data.tzinfo):
if value[-6] in '+-':
value = value[:-6] + str(data.tzinfo)
return self.represent_scalar('tag:yaml.org,2002:timestamp', value)
def represent_tagged_scalar(self, data: Any) -> ScalarNode:
try:
if data.tag.handle == '!!':
tag = f'{data.tag.handle} {data.tag.suffix}'
else:
tag = data.tag
except AttributeError:
tag = None
try:
anchor = data.yaml_anchor()
except AttributeError:
anchor = None
return self.represent_scalar(tag, data.value, style=data.style, anchor=anchor)
def represent_scalar_bool(self, data: Any) -> ScalarNode:
try:
anchor = data.yaml_anchor()
except AttributeError:
anchor = None
return SafeRepresenter.represent_bool(self, data, anchor=anchor)
def represent_yaml_object(
self,
tag: Any,
data: Any,
cls: Any,
flow_style: Optional[Any] = None,
) -> MappingNode:
if hasattr(data, '__getstate__'):
state = data.__getstate__()
else:
state = data.__dict__.copy()
anchor = state.pop(Anchor.attrib, None)
res = self.represent_mapping(tag, state, flow_style=flow_style)
if anchor is not None:
res.anchor = anchor
return res
RoundTripRepresenter.add_representer(type(None), RoundTripRepresenter.represent_none)
RoundTripRepresenter.add_representer(
LiteralScalarString,
RoundTripRepresenter.represent_literal_scalarstring,
)
RoundTripRepresenter.add_representer(
FoldedScalarString,
RoundTripRepresenter.represent_folded_scalarstring,
)
RoundTripRepresenter.add_representer(
SingleQuotedScalarString,
RoundTripRepresenter.represent_single_quoted_scalarstring,
)
RoundTripRepresenter.add_representer(
DoubleQuotedScalarString,
RoundTripRepresenter.represent_double_quoted_scalarstring,
)
RoundTripRepresenter.add_representer(
PlainScalarString,
RoundTripRepresenter.represent_plain_scalarstring,
)
# RoundTripRepresenter.add_representer(tuple, Representer.represent_tuple)
RoundTripRepresenter.add_representer(
ScalarInt, RoundTripRepresenter.represent_scalar_int
)
RoundTripRepresenter.add_representer(
BinaryInt, RoundTripRepresenter.represent_binary_int
)
RoundTripRepresenter.add_representer(OctalInt, RoundTripRepresenter.represent_octal_int)
RoundTripRepresenter.add_representer(HexInt, RoundTripRepresenter.represent_hex_int)
RoundTripRepresenter.add_representer(
HexCapsInt, RoundTripRepresenter.represent_hex_caps_int
)
RoundTripRepresenter.add_representer(
ScalarFloat, RoundTripRepresenter.represent_scalar_float
)
RoundTripRepresenter.add_representer(
ScalarBoolean, RoundTripRepresenter.represent_scalar_bool
)
RoundTripRepresenter.add_representer(CommentedSeq, RoundTripRepresenter.represent_list)
RoundTripRepresenter.add_representer(CommentedMap, RoundTripRepresenter.represent_dict)
RoundTripRepresenter.add_representer(
CommentedOrderedMap,
RoundTripRepresenter.represent_ordereddict,
)
RoundTripRepresenter.add_representer(
collections.OrderedDict,
RoundTripRepresenter.represent_ordereddict,
)
RoundTripRepresenter.add_representer(CommentedSet, RoundTripRepresenter.represent_set)
RoundTripRepresenter.add_representer(
TaggedScalar,
RoundTripRepresenter.represent_tagged_scalar,
)
RoundTripRepresenter.add_representer(TimeStamp, RoundTripRepresenter.represent_datetime)
python-ruyaml-0.92.1/lib/ruyaml/resolver.py 0000664 0000000 0000000 00000036475 15056754172 0020760 0 ustar 00root root 0000000 0000000 from __future__ import annotations
import re
from typing import Any, Dict, List, Optional, Text, Union # NOQA
if False: # MYPY
from typing import Any, Dict, List, Union, Text, Optional # NOQA
from ruyaml.compat import VersionType # NOQA
from ruyaml.compat import _DEFAULT_YAML_VERSION # NOQA
from ruyaml.error import * # NOQA
from ruyaml.nodes import MappingNode, ScalarNode, SequenceNode # NOQA
from ruyaml.tag import Tag
from ruyaml.util import RegExp # NOQA
__all__ = ['BaseResolver', 'Resolver', 'VersionedResolver']
# fmt: off
# resolvers consist of
# - a list of applicable version
# - a tag
# - a regexp
# - a list of first characters to match
implicit_resolvers = [
([(1, 2)],
'tag:yaml.org,2002:bool',
RegExp('''^(?:true|True|TRUE|false|False|FALSE)$''', re.X),
list('tTfF')),
([(1, 1)],
'tag:yaml.org,2002:bool',
RegExp('''^(?:y|Y|yes|Yes|YES|n|N|no|No|NO
|true|True|TRUE|false|False|FALSE
|on|On|ON|off|Off|OFF)$''', re.X),
list('yYnNtTfFoO')),
([(1, 2)],
'tag:yaml.org,2002:float',
RegExp('''^(?:
[-+]?(?:[0-9][0-9_]*)\\.[0-9_]*(?:[eE][-+]?[0-9]+)?
|[-+]?(?:[0-9][0-9_]*)(?:[eE][-+]?[0-9]+)
|[-+]?\\.[0-9_]+(?:[eE][-+][0-9]+)?
|[-+]?\\.(?:inf|Inf|INF)
|\\.(?:nan|NaN|NAN))$''', re.X),
list('-+0123456789.')),
([(1, 1)],
'tag:yaml.org,2002:float',
RegExp('''^(?:
[-+]?(?:[0-9][0-9_]*)\\.[0-9_]*(?:[eE][-+]?[0-9]+)?
|[-+]?(?:[0-9][0-9_]*)(?:[eE][-+]?[0-9]+)
|\\.[0-9_]+(?:[eE][-+][0-9]+)?
|[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]* # sexagesimal float
|[-+]?\\.(?:inf|Inf|INF)
|\\.(?:nan|NaN|NAN))$''', re.X),
list('-+0123456789.')),
([(1, 2)],
'tag:yaml.org,2002:int',
RegExp('''^(?:[-+]?0b[0-1_]+
|[-+]?0o?[0-7_]+
|[-+]?[0-9_]+
|[-+]?0x[0-9a-fA-F_]+)$''', re.X),
list('-+0123456789')),
([(1, 1)],
'tag:yaml.org,2002:int',
RegExp('''^(?:[-+]?0b[0-1_]+
|[-+]?0?[0-7_]+
|[-+]?(?:0|[1-9][0-9_]*)
|[-+]?0x[0-9a-fA-F_]+
|[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X), # sexagesimal int
list('-+0123456789')),
([(1, 2), (1, 1)],
'tag:yaml.org,2002:merge',
RegExp('^(?:<<)$'),
['<']),
([(1, 2), (1, 1)],
'tag:yaml.org,2002:null',
RegExp('''^(?: ~
|null|Null|NULL
| )$''', re.X),
['~', 'n', 'N', '']),
([(1, 2), (1, 1)],
'tag:yaml.org,2002:timestamp',
RegExp('''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]
|[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]?
(?:[Tt]|[ \\t]+)[0-9][0-9]?
:[0-9][0-9] :[0-9][0-9] (?:\\.[0-9]*)?
(?:[ \\t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X),
list('0123456789')),
([(1, 2), (1, 1)],
'tag:yaml.org,2002:value',
RegExp('^(?:=)$'),
['=']),
# The following resolver is only for documentation purposes. It cannot work
# because plain scalars cannot start with '!', '&', or '*'.
([(1, 2), (1, 1)],
'tag:yaml.org,2002:yaml',
RegExp('^(?:!|&|\\*)$'),
list('!&*')),
]
# fmt: on
class ResolverError(YAMLError):
pass
class BaseResolver:
DEFAULT_SCALAR_TAG = Tag(suffix='tag:yaml.org,2002:str')
DEFAULT_SEQUENCE_TAG = Tag(suffix='tag:yaml.org,2002:seq')
DEFAULT_MAPPING_TAG = Tag(suffix='tag:yaml.org,2002:map')
yaml_implicit_resolvers: Dict[Any, Any] = {}
yaml_path_resolvers: Dict[Any, Any] = {}
def __init__(self: Any, loadumper: Any = None) -> None:
self.loadumper = loadumper
if (
self.loadumper is not None
and getattr(self.loadumper, '_resolver', None) is None
):
self.loadumper._resolver = self.loadumper
self._loader_version: Any = None
self.resolver_exact_paths: List[Any] = []
self.resolver_prefix_paths: List[Any] = []
@property
def parser(self) -> Any:
if self.loadumper is not None:
if hasattr(self.loadumper, 'typ'):
return self.loadumper.parser
return self.loadumper._parser
return None
@classmethod
def add_implicit_resolver_base(cls, tag: Any, regexp: Any, first: Any) -> None:
if 'yaml_implicit_resolvers' not in cls.__dict__:
# deepcopy doesn't work here
cls.yaml_implicit_resolvers = {
k: cls.yaml_implicit_resolvers[k][:]
for k in cls.yaml_implicit_resolvers
}
if first is None:
first = [None]
for ch in first:
cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp))
@classmethod
def add_implicit_resolver(cls, tag: Any, regexp: Any, first: Any) -> None:
if 'yaml_implicit_resolvers' not in cls.__dict__:
# deepcopy doesn't work here
cls.yaml_implicit_resolvers = {
k: cls.yaml_implicit_resolvers[k][:]
for k in cls.yaml_implicit_resolvers
}
if first is None:
first = [None]
for ch in first:
cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp))
implicit_resolvers.append(([(1, 2), (1, 1)], tag, regexp, first))
# @classmethod
# def add_implicit_resolver(cls, tag, regexp, first):
@classmethod
def add_path_resolver(cls, tag: Any, path: Any, kind: Any = None) -> None:
# Note: `add_path_resolver` is experimental. The API could be changed.
# `new_path` is a pattern that is matched against the path from the
# root to the node that is being considered. `node_path` elements are
# tuples `(node_check, index_check)`. `node_check` is a node class:
# `ScalarNode`, `SequenceNode`, `MappingNode` or `None`. `None`
# matches any kind of a node. `index_check` could be `None`, a boolean
# value, a string value, or a number. `None` and `False` match against
# any _value_ of sequence and mapping nodes. `True` matches against
# any _key_ of a mapping node. A string `index_check` matches against
# a mapping value that corresponds to a scalar key which content is
# equal to the `index_check` value. An integer `index_check` matches
# against a sequence value with the index equal to `index_check`.
if 'yaml_path_resolvers' not in cls.__dict__:
cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy()
new_path: List[Any] = []
for element in path:
if isinstance(element, (list, tuple)):
if len(element) == 2:
node_check, index_check = element
elif len(element) == 1:
node_check = element[0]
index_check = True
else:
raise ResolverError(f'Invalid path element: {element!s}')
else:
node_check = None
index_check = element
if node_check is str:
node_check = ScalarNode
elif node_check is list:
node_check = SequenceNode
elif node_check is dict:
node_check = MappingNode
elif (
node_check not in [ScalarNode, SequenceNode, MappingNode]
and not isinstance(node_check, str)
and node_check is not None
):
raise ResolverError(f'Invalid node checker: {node_check!s}')
if not isinstance(index_check, (str, int)) and index_check is not None:
raise ResolverError(f'Invalid index checker: {index_check!s}')
new_path.append((node_check, index_check))
if kind is str:
kind = ScalarNode
elif kind is list:
kind = SequenceNode
elif kind is dict:
kind = MappingNode
elif kind not in [ScalarNode, SequenceNode, MappingNode] and kind is not None:
raise ResolverError(f'Invalid node kind: {kind!s}')
cls.yaml_path_resolvers[tuple(new_path), kind] = tag
def descend_resolver(self, current_node: Any, current_index: Any) -> None:
if not self.yaml_path_resolvers:
return
exact_paths = {}
prefix_paths = []
if current_node:
depth = len(self.resolver_prefix_paths)
for path, kind in self.resolver_prefix_paths[-1]:
if self.check_resolver_prefix(
depth, path, kind, current_node, current_index
):
if len(path) > depth:
prefix_paths.append((path, kind))
else:
exact_paths[kind] = self.yaml_path_resolvers[path, kind]
else:
for path, kind in self.yaml_path_resolvers:
if not path:
exact_paths[kind] = self.yaml_path_resolvers[path, kind]
else:
prefix_paths.append((path, kind))
self.resolver_exact_paths.append(exact_paths)
self.resolver_prefix_paths.append(prefix_paths)
def ascend_resolver(self) -> None:
if not self.yaml_path_resolvers:
return
self.resolver_exact_paths.pop()
self.resolver_prefix_paths.pop()
def check_resolver_prefix(
self,
depth: int,
path: Any,
kind: Any,
current_node: Any,
current_index: Any,
) -> bool:
node_check, index_check = path[depth - 1]
if isinstance(node_check, str):
if current_node.tag != node_check:
return False
elif node_check is not None: # type: ignore
if not isinstance(current_node, node_check): # type: ignore
return False
if index_check is True and current_index is not None: # type: ignore
return False
if (
index_check is False or index_check is None # type: ignore
) and current_index is None: # type: ignore
return False
if isinstance(index_check, str):
if not (
isinstance(current_index, ScalarNode)
and index_check == current_index.value # type: ignore
):
return False
elif isinstance(index_check, int) and not isinstance( # type: ignore
index_check, bool # type: ignore
):
if index_check != current_index: # type: ignore
return False
return True
def resolve(self, kind: Any, value: Any, implicit: Any) -> Any:
if kind is ScalarNode and implicit[0]:
if value == "":
resolvers = self.yaml_implicit_resolvers.get("", [])
else:
resolvers = self.yaml_implicit_resolvers.get(value[0], [])
resolvers += self.yaml_implicit_resolvers.get(None, [])
for tag, regexp in resolvers:
if regexp.match(value):
return Tag(suffix=tag)
implicit = implicit[1]
if bool(self.yaml_path_resolvers):
exact_paths = self.resolver_exact_paths[-1]
if kind in exact_paths:
return Tag(suffix=exact_paths[kind])
if None in exact_paths:
return Tag(suffix=exact_paths[None])
if kind is ScalarNode:
return self.DEFAULT_SCALAR_TAG
elif kind is SequenceNode:
return self.DEFAULT_SEQUENCE_TAG
elif kind is MappingNode:
return self.DEFAULT_MAPPING_TAG
@property
def processing_version(self) -> Any:
return None
class Resolver(BaseResolver):
pass
for ir in implicit_resolvers:
if (1, 2) in ir[0]:
Resolver.add_implicit_resolver_base(*ir[1:])
class VersionedResolver(BaseResolver):
"""
contrary to the "normal" resolver, the smart resolver delays loading
the pattern matching rules. That way it can decide to load 1.1 rules
or the (default) 1.2 rules, that no longer support octal without 0o, sexagesimals
and Yes/No/On/Off booleans.
"""
def __init__(
self,
version: Optional[VersionType] = None,
loader: Any = None,
loadumper: Any = None,
) -> None:
if loader is None and loadumper is not None:
loader = loadumper
BaseResolver.__init__(self, loader)
self._loader_version = self.get_loader_version(version)
self._version_implicit_resolver: Dict[Any, Any] = {}
def add_version_implicit_resolver(
self,
version: VersionType,
tag: Any,
regexp: Any,
first: Any,
) -> None:
if first is None:
first = [None]
impl_resolver = self._version_implicit_resolver.setdefault(version, {})
for ch in first:
impl_resolver.setdefault(ch, []).append((tag, regexp))
def get_loader_version(self, version: Optional[VersionType]) -> Any:
if version is None or isinstance(version, tuple):
return version
if isinstance(version, list):
return tuple(version)
# assume string
assert isinstance(version, str)
return tuple(map(int, version.split('.')))
@property
def versioned_resolver(self) -> Any:
"""
select the resolver based on the version we are parsing
"""
version = self.processing_version
if isinstance(version, str):
version = tuple(map(int, version.split('.')))
if version not in self._version_implicit_resolver:
for x in implicit_resolvers:
if version in x[0]:
self.add_version_implicit_resolver(version, x[1], x[2], x[3])
return self._version_implicit_resolver[version]
def resolve(self, kind: Any, value: Any, implicit: Any) -> Any:
if kind is ScalarNode and implicit[0]:
if value == "":
resolvers = self.versioned_resolver.get("", [])
else:
resolvers = self.versioned_resolver.get(value[0], [])
resolvers += self.versioned_resolver.get(None, [])
for tag, regexp in resolvers:
if regexp.match(value):
return Tag(suffix=tag)
implicit = implicit[1]
if bool(self.yaml_path_resolvers):
exact_paths = self.resolver_exact_paths[-1]
if kind in exact_paths:
return Tag(suffix=exact_paths[kind])
if None in exact_paths:
return Tag(suffix=exact_paths[None])
if kind is ScalarNode:
return self.DEFAULT_SCALAR_TAG
elif kind is SequenceNode:
return self.DEFAULT_SEQUENCE_TAG
elif kind is MappingNode:
return self.DEFAULT_MAPPING_TAG
@property
def processing_version(self) -> Any:
try:
version = self.loadumper._scanner.yaml_version # type: ignore
except AttributeError:
try:
if hasattr(self.loadumper, 'typ'):
version = self.loadumper.version # type: ignore
else:
version = self.loadumper._serializer.use_version # type: ignore # dumping
except AttributeError:
version = None
if version is None:
version = self._loader_version
if version is None:
version = _DEFAULT_YAML_VERSION
return version
python-ruyaml-0.92.1/lib/ruyaml/scalarbool.py 0000664 0000000 0000000 00000002460 15056754172 0021223 0 ustar 00root root 0000000 0000000 from __future__ import annotations
"""
You cannot subclass bool, and this is necessary for round-tripping anchored
bool values (and also if you want to preserve the original way of writing)
bool.__bases__ is type 'int', so that is what is used as the basis for ScalarBoolean as well.
You can use these in an if statement, but not when testing equivalence
"""
from ruyaml.anchor import Anchor
if False: # MYPY
from typing import Any, Dict, List, Text # NOQA
__all__ = ['ScalarBoolean']
class ScalarBoolean(int):
def __new__(cls: Any, *args: Any, **kw: Any) -> Any:
anchor = kw.pop('anchor', None)
b = int.__new__(cls, *args, **kw)
if anchor is not None:
b.yaml_set_anchor(anchor, always_dump=True)
return b
@property
def anchor(self) -> Any:
if not hasattr(self, Anchor.attrib):
setattr(self, Anchor.attrib, Anchor())
return getattr(self, Anchor.attrib)
def yaml_anchor(self, any: bool = False) -> Any:
if not hasattr(self, Anchor.attrib):
return None
if any or self.anchor.always_dump:
return self.anchor
return None
def yaml_set_anchor(self, value: Any, always_dump: bool = False) -> None:
self.anchor.value = value
self.anchor.always_dump = always_dump
python-ruyaml-0.92.1/lib/ruyaml/scalarfloat.py 0000664 0000000 0000000 00000007331 15056754172 0021377 0 ustar 00root root 0000000 0000000 from __future__ import annotations
import sys
from ruyaml.anchor import Anchor
if False: # MYPY
from typing import Any, Dict, List, Text # NOQA
__all__ = ['ScalarFloat', 'ExponentialFloat', 'ExponentialCapsFloat']
class ScalarFloat(float):
def __new__(cls: Any, *args: Any, **kw: Any) -> Any:
width = kw.pop('width', None)
prec = kw.pop('prec', None)
m_sign = kw.pop('m_sign', None)
m_lead0 = kw.pop('m_lead0', 0)
exp = kw.pop('exp', None)
e_width = kw.pop('e_width', None)
e_sign = kw.pop('e_sign', None)
underscore = kw.pop('underscore', None)
anchor = kw.pop('anchor', None)
v = float.__new__(cls, *args, **kw)
v._width = width
v._prec = prec
v._m_sign = m_sign
v._m_lead0 = m_lead0
v._exp = exp
v._e_width = e_width
v._e_sign = e_sign
v._underscore = underscore
if anchor is not None:
v.yaml_set_anchor(anchor, always_dump=True)
return v
def __iadd__(self, a: Any) -> Any: # type: ignore
return float(self) + a
x = type(self)(self + a)
x._width = self._width
x._underscore = (
self._underscore[:] if self._underscore is not None else None
) # NOQA
return x
def __ifloordiv__(self, a: Any) -> Any: # type: ignore
return float(self) // a
x = type(self)(self // a)
x._width = self._width
x._underscore = (
self._underscore[:] if self._underscore is not None else None
) # NOQA
return x
def __imul__(self, a: Any) -> Any: # type: ignore
return float(self) * a
x = type(self)(self * a)
x._width = self._width
x._underscore = (
self._underscore[:] if self._underscore is not None else None
) # NOQA
x._prec = self._prec # check for others
return x
def __ipow__(self, a: Any) -> Any: # type: ignore
return float(self) ** a
x = type(self)(self**a)
x._width = self._width
x._underscore = (
self._underscore[:] if self._underscore is not None else None
) # NOQA
return x
def __isub__(self, a: Any) -> Any: # type: ignore
return float(self) - a
x = type(self)(self - a)
x._width = self._width
x._underscore = (
self._underscore[:] if self._underscore is not None else None
) # NOQA
return x
@property
def anchor(self) -> Any:
if not hasattr(self, Anchor.attrib):
setattr(self, Anchor.attrib, Anchor())
return getattr(self, Anchor.attrib)
def yaml_anchor(self, any: bool = False) -> Any:
if not hasattr(self, Anchor.attrib):
return None
if any or self.anchor.always_dump:
return self.anchor
return None
def yaml_set_anchor(self, value: Any, always_dump: bool = False) -> None:
self.anchor.value = value
self.anchor.always_dump = always_dump
def dump(self, out: Any = sys.stdout) -> None:
out.write(
f'ScalarFloat({self}| w:{self._width}, p:{self._prec}, ' # type: ignore
f's:{self._m_sign}, lz:{self._m_lead0}, _:{self._underscore}|{self._exp}'
f', w:{self._e_width}, s:{self._e_sign})\n',
)
class ExponentialFloat(ScalarFloat):
def __new__(cls, value: Any, width: Any = None, underscore: Any = None) -> Any:
return ScalarFloat.__new__(cls, value, width=width, underscore=underscore)
class ExponentialCapsFloat(ScalarFloat):
def __new__(cls, value: Any, width: Any = None, underscore: Any = None) -> Any:
return ScalarFloat.__new__(cls, value, width=width, underscore=underscore)
python-ruyaml-0.92.1/lib/ruyaml/scalarint.py 0000664 0000000 0000000 00000010466 15056754172 0021067 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from ruyaml.anchor import Anchor
if False: # MYPY
from typing import Any, Dict, List, Text # NOQA
__all__ = ['ScalarInt', 'BinaryInt', 'OctalInt', 'HexInt', 'HexCapsInt', 'DecimalInt']
class ScalarInt(int):
def __new__(cls: Any, *args: Any, **kw: Any) -> Any:
width = kw.pop('width', None)
underscore = kw.pop('underscore', None)
anchor = kw.pop('anchor', None)
v = int.__new__(cls, *args, **kw)
v._width = width
v._underscore = underscore
if anchor is not None:
v.yaml_set_anchor(anchor, always_dump=True)
return v
def __iadd__(self, a: Any) -> Any: # type: ignore
x = type(self)(self + a)
x._width = self._width # type: ignore
x._underscore = ( # type: ignore
self._underscore[:] if self._underscore is not None else None # type: ignore
) # NOQA
return x
def __ifloordiv__(self, a: Any) -> Any: # type: ignore
x = type(self)(self // a)
x._width = self._width # type: ignore
x._underscore = ( # type: ignore
self._underscore[:] if self._underscore is not None else None # type: ignore
) # NOQA
return x
def __imul__(self, a: Any) -> Any: # type: ignore
x = type(self)(self * a)
x._width = self._width # type: ignore
x._underscore = ( # type: ignore
self._underscore[:] if self._underscore is not None else None # type: ignore
) # NOQA
return x
def __ipow__(self, a: Any) -> Any: # type: ignore
x = type(self)(self**a)
x._width = self._width # type: ignore
x._underscore = ( # type: ignore
self._underscore[:] if self._underscore is not None else None # type: ignore
) # NOQA
return x
def __isub__(self, a: Any) -> Any: # type: ignore
x = type(self)(self - a)
x._width = self._width # type: ignore
x._underscore = ( # type: ignore
self._underscore[:] if self._underscore is not None else None # type: ignore
) # NOQA
return x
@property
def anchor(self) -> Any:
if not hasattr(self, Anchor.attrib):
setattr(self, Anchor.attrib, Anchor())
return getattr(self, Anchor.attrib)
def yaml_anchor(self, any: bool = False) -> Any:
if not hasattr(self, Anchor.attrib):
return None
if any or self.anchor.always_dump:
return self.anchor
return None
def yaml_set_anchor(self, value: Any, always_dump: bool = False) -> None:
self.anchor.value = value
self.anchor.always_dump = always_dump
class BinaryInt(ScalarInt):
def __new__(
cls,
value: Any,
width: Any = None,
underscore: Any = None,
anchor: Any = None,
) -> Any:
return ScalarInt.__new__(
cls, value, width=width, underscore=underscore, anchor=anchor
)
class OctalInt(ScalarInt):
def __new__(
cls,
value: Any,
width: Any = None,
underscore: Any = None,
anchor: Any = None,
) -> Any:
return ScalarInt.__new__(
cls, value, width=width, underscore=underscore, anchor=anchor
)
# mixed casing of A-F is not supported, when loading the first non digit
# determines the case
class HexInt(ScalarInt):
"""uses lower case (a-f)"""
def __new__(
cls,
value: Any,
width: Any = None,
underscore: Any = None,
anchor: Any = None,
) -> Any:
return ScalarInt.__new__(
cls, value, width=width, underscore=underscore, anchor=anchor
)
class HexCapsInt(ScalarInt):
"""uses upper case (A-F)"""
def __new__(
cls,
value: Any,
width: Any = None,
underscore: Any = None,
anchor: Any = None,
) -> Any:
return ScalarInt.__new__(
cls, value, width=width, underscore=underscore, anchor=anchor
)
class DecimalInt(ScalarInt):
"""needed if anchor"""
def __new__(
cls,
value: Any,
width: Any = None,
underscore: Any = None,
anchor: Any = None,
) -> Any:
return ScalarInt.__new__(
cls, value, width=width, underscore=underscore, anchor=anchor
)
python-ruyaml-0.92.1/lib/ruyaml/scalarstring.py 0000664 0000000 0000000 00000010050 15056754172 0021570 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from ruyaml.anchor import Anchor
if False: # MYPY
from typing import Any, Dict, List, Text # NOQA
from ruyaml.compat import SupportsIndex
__all__ = [
'ScalarString',
'LiteralScalarString',
'FoldedScalarString',
'SingleQuotedScalarString',
'DoubleQuotedScalarString',
'PlainScalarString',
# PreservedScalarString is the old name, as it was the first to be preserved on rt,
# use LiteralScalarString instead
'PreservedScalarString',
]
class ScalarString(str):
__slots__ = Anchor.attrib
def __new__(cls, *args: Any, **kw: Any) -> Any:
anchor = kw.pop('anchor', None)
ret_val = str.__new__(cls, *args, **kw)
if anchor is not None:
ret_val.yaml_set_anchor(anchor, always_dump=True)
return ret_val
def replace(self, old: Any, new: Any, maxreplace: SupportsIndex = -1) -> Any:
return type(self)((str.replace(self, old, new, maxreplace)))
@property
def anchor(self) -> Any:
if not hasattr(self, Anchor.attrib):
setattr(self, Anchor.attrib, Anchor())
return getattr(self, Anchor.attrib)
def yaml_anchor(self, any: bool = False) -> Any:
if not hasattr(self, Anchor.attrib):
return None
if any or self.anchor.always_dump:
return self.anchor
return None
def yaml_set_anchor(self, value: Any, always_dump: bool = False) -> None:
self.anchor.value = value
self.anchor.always_dump = always_dump
class LiteralScalarString(ScalarString):
__slots__ = 'comment' # the comment after the | on the first line
style = '|'
def __new__(cls, value: Text, anchor: Any = None) -> Any:
return ScalarString.__new__(cls, value, anchor=anchor)
PreservedScalarString = LiteralScalarString
class FoldedScalarString(ScalarString):
__slots__ = ('fold_pos', 'comment') # the comment after the > on the first line
style = '>'
def __new__(cls, value: Text, anchor: Any = None) -> Any:
return ScalarString.__new__(cls, value, anchor=anchor)
class SingleQuotedScalarString(ScalarString):
__slots__ = ()
style = "'"
def __new__(cls, value: Text, anchor: Any = None) -> Any:
return ScalarString.__new__(cls, value, anchor=anchor)
class DoubleQuotedScalarString(ScalarString):
__slots__ = ()
style = '"'
def __new__(cls, value: Text, anchor: Any = None) -> Any:
return ScalarString.__new__(cls, value, anchor=anchor)
class PlainScalarString(ScalarString):
__slots__ = ()
style = ''
def __new__(cls, value: Text, anchor: Any = None) -> Any:
return ScalarString.__new__(cls, value, anchor=anchor)
def preserve_literal(s: Text) -> Text:
return LiteralScalarString(s.replace('\r\n', '\n').replace('\r', '\n'))
def walk_tree(base: Any, map: Any = None) -> None:
"""
the routine here walks over a simple yaml tree (recursing in
dict values and list items) and converts strings that
have multiple lines to literal scalars
You can also provide an explicit (ordered) mapping for multiple transforms
(first of which is executed):
map = ruyaml.compat.ordereddict
map['\n'] = preserve_literal
map[':'] = SingleQuotedScalarString
walk_tree(data, map=map)
"""
from collections.abc import MutableMapping, MutableSequence
if map is None:
map = {'\n': preserve_literal}
if isinstance(base, MutableMapping):
for k in base:
v: Text = base[k]
if isinstance(v, str):
for ch in map:
if ch in v:
base[k] = map[ch](v)
break
else:
walk_tree(v, map=map)
elif isinstance(base, MutableSequence):
for idx, elem in enumerate(base):
if isinstance(elem, str):
for ch in map:
if ch in elem:
base[idx] = map[ch](elem)
break
else:
walk_tree(elem, map=map)
python-ruyaml-0.92.1/lib/ruyaml/scanner.py 0000664 0000000 0000000 00000257545 15056754172 0020553 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from ruyaml.compat import _debug, check_anchorname_char, nprint, nprintf # NOQA
from ruyaml.docinfo import Tag, Version # NOQA
from ruyaml.error import CommentMark, MarkedYAMLError # NOQA
from ruyaml.tokens import * # NOQA
# Scanner produces tokens of the following types:
# STREAM-START
# STREAM-END
# DIRECTIVE(name, value)
# DOCUMENT-START
# DOCUMENT-END
# BLOCK-SEQUENCE-START
# BLOCK-MAPPING-START
# BLOCK-END
# FLOW-SEQUENCE-START
# FLOW-MAPPING-START
# FLOW-SEQUENCE-END
# FLOW-MAPPING-END
# BLOCK-ENTRY
# FLOW-ENTRY
# KEY
# VALUE
# ALIAS(value)
# ANCHOR(value)
# TAG(value)
# SCALAR(value, plain, style)
#
# RoundTripScanner
# COMMENT(value)
#
# Read comments in the Scanner code for more details.
#
if False: # MYPY
from typing import Any, Dict, List, Optional, Text, Tuple, Union # NOQA
__all__ = ['Scanner', 'RoundTripScanner', 'ScannerError']
_THE_END = '\n\0\r\x85\u2028\u2029'
_THE_END_SPACE_TAB = ' \n\0\t\r\x85\u2028\u2029'
_SPACE_TAB = ' \t'
if _debug != 0:
def xprintf(*args: Any, **kw: Any) -> Any:
return nprintf(*args, **kw)
class ScannerError(MarkedYAMLError):
pass
class SimpleKey:
# See below simple keys treatment.
def __init__(
self,
token_number: Any,
required: Any,
index: int,
line: int,
column: int,
mark: Any,
) -> None:
self.token_number = token_number
self.required = required
self.index = index
self.line = line
self.column = column
self.mark = mark
class Scanner:
def __init__(self, loader: Any = None) -> None:
"""Initialize the scanner."""
# It is assumed that Scanner and Reader will have a common descendant.
# Reader do the dirty work of checking for BOM and converting the
# input data to Unicode. It also adds NUL to the end.
#
# Reader supports the following methods
# self.peek(i=0) # peek the next i-th character
# self.prefix(l=1) # peek the next l characters
# self.forward(l=1) # read the next l characters and move the pointer
self.loader = loader
if self.loader is not None and getattr(self.loader, '_scanner', None) is None:
self.loader._scanner = self
self.reset_scanner()
self.first_time = False
@property
def flow_level(self) -> int:
return len(self.flow_context)
def reset_scanner(self) -> None:
# Had we reached the end of the stream?
self.done = False
# flow_context is an expanding/shrinking list consisting of '{' and '['
# for each unclosed flow context. If empty list that means block context
self.flow_context: List[Text] = []
# List of processed tokens that are not yet emitted.
self.tokens: List[Any] = []
# Add the STREAM-START token.
self.fetch_stream_start()
# Number of tokens that were emitted through the `get_token` method.
self.tokens_taken = 0
# The current indentation level.
self.indent = -1
# Past indentation levels.
self.indents: List[int] = []
# Variables related to simple keys treatment.
# A simple key is a key that is not denoted by the '?' indicator.
# Example of simple keys:
# ---
# block simple key: value
# ? not a simple key:
# : { flow simple key: value }
# We emit the KEY token before all keys, so when we find a potential
# simple key, we try to locate the corresponding ':' indicator.
# Simple keys should be limited to a single line and 1024 characters.
# Can a simple key start at the current position? A simple key may
# start:
# - at the beginning of the line, not counting indentation spaces
# (in block context),
# - after '{', '[', ',' (in the flow context),
# - after '?', ':', '-' (in the block context).
# In the block context, this flag also signifies if a block collection
# may start at the current position.
self.allow_simple_key = True
# Keep track of possible simple keys. This is a dictionary. The key
# is `flow_level`; there can be no more that one possible simple key
# for each level. The value is a SimpleKey record:
# (token_number, required, index, line, column, mark)
# A simple key may start with ALIAS, ANCHOR, TAG, SCALAR(flow),
# '[', or '{' tokens.
self.possible_simple_keys: Dict[Any, Any] = {}
self.yaml_version: Any = None
self.tag_directives: List[Tuple[Any, Any]] = []
@property
def reader(self) -> Any:
try:
return self._scanner_reader # type: ignore
except AttributeError:
if hasattr(self.loader, 'typ'):
self._scanner_reader = self.loader.reader # type: ignore
else:
self._scanner_reader = self.loader._reader # type: ignore
return self._scanner_reader
@property
def scanner_processing_version(self) -> Any: # prefix until un-composited
if hasattr(self.loader, 'typ'):
return self.loader.resolver.processing_version # type: ignore
return self.loader.processing_version # type: ignore
# Public methods.
def check_token(self, *choices: Any) -> bool:
# Check if the next token is one of the given types.
while self.need_more_tokens():
self.fetch_more_tokens()
if len(self.tokens) > 0:
if not choices:
return True
for choice in choices:
if isinstance(self.tokens[0], choice):
return True
return False
def peek_token(self) -> Any:
# Return the next token, but do not delete if from the queue.
while self.need_more_tokens():
self.fetch_more_tokens()
if len(self.tokens) > 0:
return self.tokens[0]
def get_token(self) -> Any:
# Return the next token.
while self.need_more_tokens():
self.fetch_more_tokens()
if len(self.tokens) > 0:
self.tokens_taken += 1
return self.tokens.pop(0)
# Private methods.
def need_more_tokens(self) -> bool:
if self.done:
return False
if len(self.tokens) == 0:
return True
# The current token may be a potential simple key, so we
# need to look further.
self.stale_possible_simple_keys()
if self.next_possible_simple_key() == self.tokens_taken:
return True
return False
def fetch_comment(self, comment: Any) -> None:
raise NotImplementedError
def fetch_more_tokens(self) -> Any:
# Eat whitespaces and comments until we reach the next token.
comment = self.scan_to_next_token()
if comment is not None: # never happens for base scanner
return self.fetch_comment(comment)
# Remove obsolete possible simple keys.
self.stale_possible_simple_keys()
# Compare the current indentation and column. It may add some tokens
# and decrease the current indentation level.
self.unwind_indent(self.reader.column)
# Peek the next character.
ch = self.reader.peek()
# Is it the end of stream?
if ch == '\0':
return self.fetch_stream_end()
# Is it a directive?
if ch == '%' and self.check_directive():
return self.fetch_directive()
# Is it the document start?
if ch == '-' and self.check_document_start():
return self.fetch_document_start()
# Is it the document end?
if ch == '.' and self.check_document_end():
return self.fetch_document_end()
# TODO: support for BOM within a stream.
# if ch == '\uFEFF':
# return self.fetch_bom() <-- issue BOMToken
# Note: the order of the following checks is NOT significant.
# Is it the flow sequence start indicator?
if ch == '[':
return self.fetch_flow_sequence_start()
# Is it the flow mapping start indicator?
if ch == '{':
return self.fetch_flow_mapping_start()
# Is it the flow sequence end indicator?
if ch == ']':
return self.fetch_flow_sequence_end()
# Is it the flow mapping end indicator?
if ch == '}':
return self.fetch_flow_mapping_end()
# Is it the flow entry indicator?
if ch == ',':
return self.fetch_flow_entry()
# Is it the block entry indicator?
if ch == '-' and self.check_block_entry():
return self.fetch_block_entry()
# Is it the key indicator?
if ch == '?' and self.check_key():
return self.fetch_key()
# Is it the value indicator?
if ch == ':' and self.check_value():
return self.fetch_value()
# Is it an alias?
if ch == '*':
return self.fetch_alias()
# Is it an anchor?
if ch == '&':
return self.fetch_anchor()
# Is it a tag?
if ch == '!':
return self.fetch_tag()
# Is it a literal scalar?
if ch == '|' and not self.flow_level:
return self.fetch_literal()
# Is it a folded scalar?
if ch == '>' and not self.flow_level:
return self.fetch_folded()
# Is it a single quoted scalar?
if ch == "'":
return self.fetch_single()
# Is it a double quoted scalar?
if ch == '"':
return self.fetch_double()
# It must be a plain scalar then.
if self.check_plain():
return self.fetch_plain()
# No? It's an error. Let's produce a nice error message.
raise ScannerError(
'while scanning for the next token',
None,
f'found character {ch!r} that cannot start any token',
self.reader.get_mark(),
)
# Simple keys treatment.
def next_possible_simple_key(self) -> Any:
# Return the number of the nearest possible simple key. Actually we
# don't need to loop through the whole dictionary. We may replace it
# with the following code:
# if not self.possible_simple_keys:
# return None
# return self.possible_simple_keys[
# min(self.possible_simple_keys.keys())].token_number
min_token_number = None
for level in self.possible_simple_keys:
key = self.possible_simple_keys[level]
if min_token_number is None or key.token_number < min_token_number:
min_token_number = key.token_number
return min_token_number
def stale_possible_simple_keys(self) -> None:
# Remove entries that are no longer possible simple keys. According to
# the YAML specification, simple keys
# - should be limited to a single line,
# - should be no longer than 1024 characters.
# Disabling this procedure will allow simple keys of any length and
# height (may cause problems if indentation is broken though).
for level in list(self.possible_simple_keys):
key = self.possible_simple_keys[level]
if key.line != self.reader.line or self.reader.index - key.index > 1024:
if key.required:
raise ScannerError(
'while scanning a simple key',
key.mark,
"could not find expected ':'",
self.reader.get_mark(),
)
del self.possible_simple_keys[level]
def save_possible_simple_key(self) -> None:
# The next token may start a simple key. We check if it's possible
# and save its position. This function is called for
# ALIAS, ANCHOR, TAG, SCALAR(flow), '[', and '{'.
# Check if a simple key is required at the current position.
required = not self.flow_level and self.indent == self.reader.column
# The next token might be a simple key. Let's save it's number and
# position.
if self.allow_simple_key:
self.remove_possible_simple_key()
token_number = self.tokens_taken + len(self.tokens)
key = SimpleKey(
token_number,
required,
self.reader.index,
self.reader.line,
self.reader.column,
self.reader.get_mark(),
)
self.possible_simple_keys[self.flow_level] = key
def remove_possible_simple_key(self) -> None:
# Remove the saved possible key position at the current flow level.
if self.flow_level in self.possible_simple_keys:
key = self.possible_simple_keys[self.flow_level]
if key.required:
raise ScannerError(
'while scanning a simple key',
key.mark,
"could not find expected ':'",
self.reader.get_mark(),
)
del self.possible_simple_keys[self.flow_level]
# Indentation functions.
def unwind_indent(self, column: Any) -> None:
# In flow context, tokens should respect indentation.
# Actually the condition should be `self.indent >= column` according to
# the spec. But this condition will prohibit intuitively correct
# constructions such as
# key : {
# }
# ####
# if self.flow_level and self.indent > column:
# raise ScannerError(None, None,
# "invalid intendation or unclosed '[' or '{'",
# self.reader.get_mark())
# In the flow context, indentation is ignored. We make the scanner less
# restrictive then specification requires.
if bool(self.flow_level):
return
# In block context, we may need to issue the BLOCK-END tokens.
while self.indent > column:
mark = self.reader.get_mark()
self.indent = self.indents.pop()
self.tokens.append(BlockEndToken(mark, mark))
def add_indent(self, column: int) -> bool:
# Check if we need to increase indentation.
if self.indent < column:
self.indents.append(self.indent)
self.indent = column
return True
return False
# Fetchers.
def fetch_stream_start(self) -> None:
# We always add STREAM-START as the first token and STREAM-END as the
# last token.
# Read the token.
mark = self.reader.get_mark()
# Add STREAM-START.
self.tokens.append(StreamStartToken(mark, mark, encoding=self.reader.encoding))
def fetch_stream_end(self) -> None:
# Set the current intendation to -1.
self.unwind_indent(-1)
# Reset simple keys.
self.remove_possible_simple_key()
self.allow_simple_key = False
self.possible_simple_keys = {}
# Read the token.
mark = self.reader.get_mark()
# Add STREAM-END.
self.tokens.append(StreamEndToken(mark, mark))
# The steam is finished.
self.done = True
def fetch_directive(self) -> None:
# Set the current intendation to -1.
self.unwind_indent(-1)
# Reset simple keys.
self.remove_possible_simple_key()
self.allow_simple_key = False
# Scan and add DIRECTIVE.
self.tokens.append(self.scan_directive())
def fetch_document_start(self) -> None:
self.fetch_document_indicator(DocumentStartToken)
def fetch_document_end(self) -> None:
self.fetch_document_indicator(DocumentEndToken)
def fetch_document_indicator(self, TokenClass: Any) -> None:
# Set the current intendation to -1.
self.unwind_indent(-1)
# Reset simple keys. Note that there could not be a block collection
# after '---'.
self.remove_possible_simple_key()
self.allow_simple_key = False
# Add DOCUMENT-START or DOCUMENT-END.
start_mark = self.reader.get_mark()
self.reader.forward(3)
end_mark = self.reader.get_mark()
self.tokens.append(TokenClass(start_mark, end_mark))
def fetch_flow_sequence_start(self) -> None:
self.fetch_flow_collection_start(FlowSequenceStartToken, to_push='[')
def fetch_flow_mapping_start(self) -> None:
self.fetch_flow_collection_start(FlowMappingStartToken, to_push='{')
def fetch_flow_collection_start(self, TokenClass: Any, to_push: Text) -> None:
# '[' and '{' may start a simple key.
self.save_possible_simple_key()
# Increase the flow level.
self.flow_context.append(to_push)
# Simple keys are allowed after '[' and '{'.
self.allow_simple_key = True
# Add FLOW-SEQUENCE-START or FLOW-MAPPING-START.
start_mark = self.reader.get_mark()
self.reader.forward()
end_mark = self.reader.get_mark()
self.tokens.append(TokenClass(start_mark, end_mark))
def fetch_flow_sequence_end(self) -> None:
self.fetch_flow_collection_end(FlowSequenceEndToken)
def fetch_flow_mapping_end(self) -> None:
self.fetch_flow_collection_end(FlowMappingEndToken)
def fetch_flow_collection_end(self, TokenClass: Any) -> None:
# Reset possible simple key on the current level.
self.remove_possible_simple_key()
# Decrease the flow level.
try:
popped = self.flow_context.pop() # NOQA
except IndexError:
# We must not be in a list or object.
# Defer error handling to the parser.
pass
# No simple keys after ']' or '}'.
self.allow_simple_key = False
# Add FLOW-SEQUENCE-END or FLOW-MAPPING-END.
start_mark = self.reader.get_mark()
self.reader.forward()
end_mark = self.reader.get_mark()
self.tokens.append(TokenClass(start_mark, end_mark))
def fetch_flow_entry(self) -> None:
# Simple keys are allowed after ','.
self.allow_simple_key = True
# Reset possible simple key on the current level.
self.remove_possible_simple_key()
# Add FLOW-ENTRY.
start_mark = self.reader.get_mark()
self.reader.forward()
end_mark = self.reader.get_mark()
self.tokens.append(FlowEntryToken(start_mark, end_mark))
def fetch_block_entry(self) -> None:
# Block context needs additional checks.
if not self.flow_level:
# Are we allowed to start a new entry?
if not self.allow_simple_key:
raise ScannerError(
None,
None,
'sequence entries are not allowed here',
self.reader.get_mark(),
)
# We may need to add BLOCK-SEQUENCE-START.
if self.add_indent(self.reader.column):
mark = self.reader.get_mark()
self.tokens.append(BlockSequenceStartToken(mark, mark))
# It's an error for the block entry to occur in the flow context,
# but we let the parser detect this.
else:
pass
# Simple keys are allowed after '-'.
self.allow_simple_key = True
# Reset possible simple key on the current level.
self.remove_possible_simple_key()
# Add BLOCK-ENTRY.
start_mark = self.reader.get_mark()
self.reader.forward()
end_mark = self.reader.get_mark()
self.tokens.append(BlockEntryToken(start_mark, end_mark))
def fetch_key(self) -> None:
# Block context needs additional checks.
if not self.flow_level:
# Are we allowed to start a key (not nessesary a simple)?
if not self.allow_simple_key:
raise ScannerError(
None,
None,
'mapping keys are not allowed here',
self.reader.get_mark(),
)
# We may need to add BLOCK-MAPPING-START.
if self.add_indent(self.reader.column):
mark = self.reader.get_mark()
self.tokens.append(BlockMappingStartToken(mark, mark))
# Simple keys are allowed after '?' in the block context.
self.allow_simple_key = not self.flow_level
# Reset possible simple key on the current level.
self.remove_possible_simple_key()
# Add KEY.
start_mark = self.reader.get_mark()
self.reader.forward()
end_mark = self.reader.get_mark()
self.tokens.append(KeyToken(start_mark, end_mark))
def fetch_value(self) -> None:
# Do we determine a simple key?
if self.flow_level in self.possible_simple_keys:
# Add KEY.
key = self.possible_simple_keys[self.flow_level]
del self.possible_simple_keys[self.flow_level]
self.tokens.insert(
key.token_number - self.tokens_taken,
KeyToken(key.mark, key.mark),
)
# If this key starts a new block mapping, we need to add
# BLOCK-MAPPING-START.
if not self.flow_level:
if self.add_indent(key.column):
self.tokens.insert(
key.token_number - self.tokens_taken,
BlockMappingStartToken(key.mark, key.mark),
)
# There cannot be two simple keys one after another.
self.allow_simple_key = False
# It must be a part of a complex key.
else:
# Block context needs additional checks.
# (Do we really need them? They will be caught by the parser
# anyway.)
if not self.flow_level:
# We are allowed to start a complex value if and only if
# we can start a simple key.
if not self.allow_simple_key:
raise ScannerError(
None,
None,
'mapping values are not allowed here',
self.reader.get_mark(),
)
# If this value starts a new block mapping, we need to add
# BLOCK-MAPPING-START. It will be detected as an error later by
# the parser.
if not self.flow_level:
if self.add_indent(self.reader.column):
mark = self.reader.get_mark()
self.tokens.append(BlockMappingStartToken(mark, mark))
# Simple keys are allowed after ':' in the block context.
self.allow_simple_key = not self.flow_level
# Reset possible simple key on the current level.
self.remove_possible_simple_key()
# Add VALUE.
start_mark = self.reader.get_mark()
self.reader.forward()
end_mark = self.reader.get_mark()
self.tokens.append(ValueToken(start_mark, end_mark))
def fetch_alias(self) -> None:
# ALIAS could be a simple key.
self.save_possible_simple_key()
# No simple keys after ALIAS.
self.allow_simple_key = False
# Scan and add ALIAS.
self.tokens.append(self.scan_anchor(AliasToken))
def fetch_anchor(self) -> None:
# ANCHOR could start a simple key.
self.save_possible_simple_key()
# No simple keys after ANCHOR.
self.allow_simple_key = False
# Scan and add ANCHOR.
self.tokens.append(self.scan_anchor(AnchorToken))
def fetch_tag(self) -> None:
# TAG could start a simple key.
self.save_possible_simple_key()
# No simple keys after TAG.
self.allow_simple_key = False
# Scan and add TAG.
self.tokens.append(self.scan_tag())
def fetch_literal(self) -> None:
self.fetch_block_scalar(style='|')
def fetch_folded(self) -> None:
self.fetch_block_scalar(style='>')
def fetch_block_scalar(self, style: Any) -> None:
# A simple key may follow a block scalar.
self.allow_simple_key = True
# Reset possible simple key on the current level.
self.remove_possible_simple_key()
# Scan and add SCALAR.
self.tokens.append(self.scan_block_scalar(style))
def fetch_single(self) -> None:
self.fetch_flow_scalar(style="'")
def fetch_double(self) -> None:
self.fetch_flow_scalar(style='"')
def fetch_flow_scalar(self, style: Any) -> None:
# A flow scalar could be a simple key.
self.save_possible_simple_key()
# No simple keys after flow scalars.
self.allow_simple_key = False
# Scan and add SCALAR.
self.tokens.append(self.scan_flow_scalar(style))
def fetch_plain(self) -> None:
# A plain scalar could be a simple key.
self.save_possible_simple_key()
# No simple keys after plain scalars. But note that `scan_plain` will
# change this flag if the scan is finished at the beginning of the
# line.
self.allow_simple_key = False
# Scan and add SCALAR. May change `allow_simple_key`.
self.tokens.append(self.scan_plain())
# Checkers.
def check_directive(self) -> Any:
# DIRECTIVE: ^ '%' ...
# The '%' indicator is already checked.
if self.reader.column == 0:
return True
return None
def check_document_start(self) -> Any:
# DOCUMENT-START: ^ '---' (' '|'\n')
if self.reader.column == 0:
if (
self.reader.prefix(3) == '---'
and self.reader.peek(3) in _THE_END_SPACE_TAB
):
return True
return None
def check_document_end(self) -> Any:
# DOCUMENT-END: ^ '...' (' '|'\n')
if self.reader.column == 0:
if (
self.reader.prefix(3) == '...'
and self.reader.peek(3) in _THE_END_SPACE_TAB
):
return True
return None
def check_block_entry(self) -> Any:
# BLOCK-ENTRY: '-' (' '|'\n')
return self.reader.peek(1) in _THE_END_SPACE_TAB
def check_key(self) -> Any:
# KEY(flow context): '?'
if bool(self.flow_level):
return True
# KEY(block context): '?' (' '|'\n')
return self.reader.peek(1) in _THE_END_SPACE_TAB
def check_value(self) -> Any:
# VALUE(flow context): ':'
if self.scanner_processing_version == (1, 1):
if bool(self.flow_level):
return True
else:
if bool(self.flow_level):
if self.flow_context[-1] == '[':
if self.reader.peek(1) not in _THE_END_SPACE_TAB:
return False
elif self.tokens and isinstance(self.tokens[-1], ValueToken):
# mapping flow context scanning a value token
if self.reader.peek(1) not in _THE_END_SPACE_TAB:
return False
return True
# VALUE(block context): ':' (' '|'\n')
return self.reader.peek(1) in _THE_END_SPACE_TAB
def check_plain(self) -> Any:
# A plain scalar may start with any non-space character except:
# '-', '?', ':', ',', '[', ']', '{', '}',
# '#', '&', '*', '!', '|', '>', '\'', '\"',
# '%', '@', '`'.
#
# It may also start with
# '-', '?', ':'
# if it is followed by a non-space character.
#
# Note that we limit the last rule to the block context (except the
# '-' character) because we want the flow context to be space
# independent.
srp = self.reader.peek
ch = srp()
if self.scanner_processing_version == (1, 1):
return ch not in '\0 \t\r\n\x85\u2028\u2029-?:,[]{}#&*!|>\'"%@`' or (
srp(1) not in _THE_END_SPACE_TAB
and (ch == '-' or (not self.flow_level and ch in '?:'))
)
# YAML 1.2
if ch not in '\0 \t\r\n\x85\u2028\u2029-?:,[]{}#&*!|>\'"%@`':
# ################### ^ ???
return True
ch1 = srp(1)
if ch == '-' and ch1 not in _THE_END_SPACE_TAB:
return True
if ch == ':' and bool(self.flow_level) and ch1 not in _SPACE_TAB:
return True
return srp(1) not in _THE_END_SPACE_TAB and (
ch == '-' or (not self.flow_level and ch in '?:')
)
# Scanners.
def scan_to_next_token(self) -> Any:
# We ignore spaces, line breaks and comments.
# If we find a line break in the block context, we set the flag
# `allow_simple_key` on.
# The byte order mark is stripped if it's the first character in the
# stream. We do not yet support BOM inside the stream as the
# specification requires. Any such mark will be considered as a part
# of the document.
#
# TODO: We need to make tab handling rules more sane. A good rule is
# Tabs cannot precede tokens
# BLOCK-SEQUENCE-START, BLOCK-MAPPING-START, BLOCK-END,
# KEY(block), VALUE(block), BLOCK-ENTRY
# So the checking code is
# if :
# self.allow_simple_keys = False
# We also need to add the check for `allow_simple_keys == True` to
# `unwind_indent` before issuing BLOCK-END.
# Scanners for block, flow, and plain scalars need to be modified.
srp = self.reader.peek
srf = self.reader.forward
if self.reader.index == 0 and srp() == '\uFEFF':
srf()
found = False
_the_end = _THE_END
white_space = ' \t' if self.flow_level > 0 else ' '
while not found:
while srp() in white_space:
srf()
if srp() == '#':
while srp() not in _the_end:
srf()
if self.scan_line_break():
if not self.flow_level:
self.allow_simple_key = True
else:
found = True
return None
def scan_directive(self) -> Any:
# See the specification for details.
srp = self.reader.peek
srf = self.reader.forward
start_mark = self.reader.get_mark()
srf()
name = self.scan_directive_name(start_mark)
value = None
if name == 'YAML':
value = self.scan_yaml_directive_value(start_mark)
end_mark = self.reader.get_mark()
elif name == 'TAG':
value = self.scan_tag_directive_value(start_mark)
end_mark = self.reader.get_mark()
else:
end_mark = self.reader.get_mark()
while srp() not in _THE_END:
srf()
self.scan_directive_ignored_line(start_mark)
return DirectiveToken(name, value, start_mark, end_mark)
def scan_directive_name(self, start_mark: Any) -> Any:
# See the specification for details.
length = 0
srp = self.reader.peek
ch = srp(length)
while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' or ch in '-_:.':
length += 1
ch = srp(length)
if not length:
raise ScannerError(
'while scanning a directive',
start_mark,
f'expected alphabetic or numeric character, but found {ch!r}',
self.reader.get_mark(),
)
value = self.reader.prefix(length)
self.reader.forward(length)
ch = srp()
if ch not in '\0 \r\n\x85\u2028\u2029':
raise ScannerError(
'while scanning a directive',
start_mark,
f'expected alphabetic or numeric character, but found {ch!r}',
self.reader.get_mark(),
)
return value
def scan_yaml_directive_value(self, start_mark: Any) -> Any:
# See the specification for details.
srp = self.reader.peek
srf = self.reader.forward
while srp() == ' ':
srf()
major = self.scan_yaml_directive_number(start_mark)
if srp() != '.':
raise ScannerError(
'while scanning a directive',
start_mark,
f"expected a digit or '.', but found {srp()!r}",
self.reader.get_mark(),
)
srf()
minor = self.scan_yaml_directive_number(start_mark)
if srp() not in '\0 \r\n\x85\u2028\u2029':
raise ScannerError(
'while scanning a directive',
start_mark,
f"expected a digit or '.', but found {srp()!r}",
self.reader.get_mark(),
)
self.yaml_version = (major, minor)
self.loader.doc_infos[-1].doc_version = Version(major, minor)
return self.yaml_version
def scan_yaml_directive_number(self, start_mark: Any) -> Any:
# See the specification for details.
srp = self.reader.peek
srf = self.reader.forward
ch = srp()
if not ('0' <= ch <= '9'):
raise ScannerError(
'while scanning a directive',
start_mark,
f'expected a digit, but found {ch!r}',
self.reader.get_mark(),
)
length = 0
while '0' <= srp(length) <= '9':
length += 1
value = int(self.reader.prefix(length))
srf(length)
return value
def scan_tag_directive_value(self, start_mark: Any) -> Any:
# See the specification for details.
srp = self.reader.peek
srf = self.reader.forward
while srp() == ' ':
srf()
handle = self.scan_tag_directive_handle(start_mark)
while srp() == ' ':
srf()
prefix = self.scan_tag_directive_prefix(start_mark)
ret_val = (handle, prefix)
self.tag_directives.append(ret_val)
return ret_val
def scan_tag_directive_handle(self, start_mark: Any) -> Any:
# See the specification for details.
value = self.scan_tag_handle('directive', start_mark)
ch = self.reader.peek()
if ch != ' ':
raise ScannerError(
'while scanning a directive',
start_mark,
f"expected ' ', but found {ch!r}",
self.reader.get_mark(),
)
return value
def scan_tag_directive_prefix(self, start_mark: Any) -> Any:
# See the specification for details.
value = self.scan_tag_uri('directive', start_mark)
ch = self.reader.peek()
if ch not in '\0 \r\n\x85\u2028\u2029':
raise ScannerError(
'while scanning a directive',
start_mark,
f"expected ' ', but found {ch!r}",
self.reader.get_mark(),
)
return value
def scan_directive_ignored_line(self, start_mark: Any) -> None:
# See the specification for details.
srp = self.reader.peek
srf = self.reader.forward
while srp() == ' ':
srf()
if srp() == '#':
while srp() not in _THE_END:
srf()
ch = srp()
if ch not in _THE_END:
raise ScannerError(
'while scanning a directive',
start_mark,
f'expected a comment or a line break, but found {ch!r}',
self.reader.get_mark(),
)
self.scan_line_break()
def scan_anchor(self, TokenClass: Any) -> Any:
# The specification does not restrict characters for anchors and
# aliases. This may lead to problems, for instance, the document:
# [ *alias, value ]
# can be interpteted in two ways, as
# [ "value" ]
# and
# [ *alias , "value" ]
# Therefore we restrict aliases to numbers and ASCII letters.
srp = self.reader.peek
start_mark = self.reader.get_mark()
indicator = srp()
if indicator == '*':
name = 'alias'
else:
name = 'anchor'
self.reader.forward()
length = 0
ch = srp(length)
# while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
# or ch in '-_':
while check_anchorname_char(ch):
length += 1
ch = srp(length)
if not length:
raise ScannerError(
f'while scanning an {name!s}',
start_mark,
f'expected alphabetic or numeric character, but found {ch!r}',
self.reader.get_mark(),
)
value = self.reader.prefix(length)
self.reader.forward(length)
# ch1 = ch
# ch = srp() # no need to peek, ch is already set
# assert ch1 == ch
if ch not in '\0 \t\r\n\x85\u2028\u2029?:,[]{}%@`':
raise ScannerError(
f'while scanning an {name!s}',
start_mark,
f'expected alphabetic or numeric character, but found {ch!r}',
self.reader.get_mark(),
)
end_mark = self.reader.get_mark()
return TokenClass(value, start_mark, end_mark)
def scan_tag(self) -> Any:
# See the specification for details.
srp = self.reader.peek
start_mark = self.reader.get_mark()
ch = srp(1)
short_handle = '!'
if ch == '!':
short_handle = '!!'
self.reader.forward()
srp = self.reader.peek
ch = srp(1)
if ch == '<':
handle = None
self.reader.forward(2)
suffix = self.scan_tag_uri('tag', start_mark)
if srp() != '>':
raise ScannerError(
'while parsing a tag',
start_mark,
f"expected '>' but found {srp()!r}",
self.reader.get_mark(),
)
self.reader.forward()
elif ch in _THE_END_SPACE_TAB:
handle = None
suffix = short_handle
self.reader.forward()
else:
length = 1
use_handle = False
while ch not in '\0 \r\n\x85\u2028\u2029':
if ch == '!':
use_handle = True
break
length += 1
ch = srp(length)
handle = short_handle
if use_handle:
handle = self.scan_tag_handle('tag', start_mark)
else:
handle = short_handle
self.reader.forward()
suffix = self.scan_tag_uri('tag', start_mark)
ch = srp()
if ch not in '\0 \r\n\x85\u2028\u2029':
raise ScannerError(
'while scanning a tag',
start_mark,
f"expected ' ', but found {ch!r}",
self.reader.get_mark(),
)
value = (handle, suffix)
end_mark = self.reader.get_mark()
return TagToken(value, start_mark, end_mark)
def scan_block_scalar(self, style: Any, rt: Optional[bool] = False) -> Any:
# See the specification for details.
srp = self.reader.peek
if style == '>':
folded = True
else:
folded = False
chunks: List[Any] = []
start_mark = self.reader.get_mark()
# Scan the header.
self.reader.forward()
chomping, increment = self.scan_block_scalar_indicators(start_mark)
# block scalar comment e.g. : |+ # comment text
block_scalar_comment = self.scan_block_scalar_ignored_line(start_mark)
# Determine the indentation level and go to the first non-empty line.
min_indent = self.indent + 1
if increment is None:
# no increment and top level, min_indent could be 0
if min_indent < 1 and (
style not in '|>'
or (self.scanner_processing_version == (1, 1))
and getattr(
self.loader,
'top_level_block_style_scalar_no_indent_error_1_1',
False,
)
):
min_indent = 1
breaks, max_indent, end_mark = self.scan_block_scalar_indentation()
indent = max(min_indent, max_indent)
else:
if min_indent < 1:
min_indent = 1
indent = min_indent + increment - 1
breaks, end_mark = self.scan_block_scalar_breaks(indent)
line_break = ""
# Scan the inner part of the block scalar.
while self.reader.column == indent and srp() != '\0':
chunks.extend(breaks)
leading_non_space = srp() not in ' \t'
length = 0
while srp(length) not in _THE_END:
length += 1
chunks.append(self.reader.prefix(length))
self.reader.forward(length)
line_break = self.scan_line_break()
breaks, end_mark = self.scan_block_scalar_breaks(indent)
if style in '|>' and min_indent == 0:
# at the beginning of a line, if in block style see if
# end of document/start_new_document
if self.check_document_start() or self.check_document_end():
break
if self.reader.column == indent and srp() != '\0':
# Unfortunately, folding rules are ambiguous.
#
# This is the folding according to the specification:
if rt and folded and line_break == '\n':
chunks.append('\a')
if (
folded
and line_break == '\n'
and leading_non_space
and srp() not in ' \t'
):
if not breaks:
chunks.append(' ')
else:
chunks.append(line_break)
# This is Clark Evans's interpretation (also in the spec
# examples):
#
# if folded and line_break == '\n':
# if not breaks:
# if srp() not in ' \t':
# chunks.append(' ')
# else:
# chunks.append(line_break)
# else:
# chunks.append(line_break)
else:
break
# Process trailing line breaks. The 'chomping' setting determines
# whether they are included in the value.
trailing: List[Any] = []
if chomping in [None, True]:
chunks.append(line_break)
if chomping is True:
chunks.extend(breaks)
elif chomping in [None, False]:
trailing.extend(breaks)
# We are done.
token = ScalarToken("".join(chunks), False, start_mark, end_mark, style)
if self.loader is not None:
comment_handler = getattr(self.loader, 'comment_handling', False)
if comment_handler is None:
if block_scalar_comment is not None:
token.add_pre_comments([block_scalar_comment])
if len(trailing) > 0:
# Eat whitespaces and comments until we reach the next token.
if self.loader is not None:
comment_handler = getattr(self.loader, 'comment_handling', None)
if comment_handler is not None:
line = end_mark.line - len(trailing)
for x in trailing:
assert x[-1] == '\n'
self.comments.add_blank_line(x, 0, line) # type: ignore
line += 1
comment = self.scan_to_next_token()
while comment:
trailing.append(' ' * comment[1].column + comment[0])
comment = self.scan_to_next_token()
if self.loader is not None:
comment_handler = getattr(self.loader, 'comment_handling', False)
if comment_handler is None:
# Keep track of the trailing whitespace and following comments
# as a comment token, if isn't all included in the actual value.
comment_end_mark = self.reader.get_mark()
comment = CommentToken(
"".join(trailing), end_mark, comment_end_mark
)
token.add_post_comment(comment)
return token
def scan_block_scalar_indicators(self, start_mark: Any) -> Any:
# See the specification for details.
srp = self.reader.peek
chomping = None
increment = None
ch = srp()
if ch in '+-':
if ch == '+':
chomping = True
else:
chomping = False
self.reader.forward()
ch = srp()
if ch in '0123456789':
increment = int(ch)
if increment == 0:
raise ScannerError(
'while scanning a block scalar',
start_mark,
'expected indentation indicator in the range 1-9, '
'but found 0',
self.reader.get_mark(),
)
self.reader.forward()
elif ch in '0123456789':
increment = int(ch)
if increment == 0:
raise ScannerError(
'while scanning a block scalar',
start_mark,
'expected indentation indicator in the range 1-9, ' 'but found 0',
self.reader.get_mark(),
)
self.reader.forward()
ch = srp()
if ch in '+-':
if ch == '+':
chomping = True
else:
chomping = False
self.reader.forward()
ch = srp()
if ch not in '\0 \r\n\x85\u2028\u2029':
raise ScannerError(
'while scanning a block scalar',
start_mark,
f'expected chomping or indentation indicators, but found {ch!r}',
self.reader.get_mark(),
)
return chomping, increment
def scan_block_scalar_ignored_line(self, start_mark: Any) -> Any:
# See the specification for details.
srp = self.reader.peek
srf = self.reader.forward
prefix = ''
comment = None
while srp() == ' ':
prefix += srp()
srf()
if srp() == '#':
comment = prefix
while srp() not in _THE_END:
comment += srp()
srf()
ch = srp()
if ch not in _THE_END:
raise ScannerError(
'while scanning a block scalar',
start_mark,
f'expected a comment or a line break, but found {ch!r}',
self.reader.get_mark(),
)
self.scan_line_break()
return comment
def scan_block_scalar_indentation(self) -> Any:
# See the specification for details.
srp = self.reader.peek
srf = self.reader.forward
chunks = []
first_indent = -1
max_indent = 0
end_mark = self.reader.get_mark()
while srp() in ' \r\n\x85\u2028\u2029':
if srp() != ' ':
if first_indent < 0:
first_indent = self.reader.column
chunks.append(self.scan_line_break())
end_mark = self.reader.get_mark()
else:
srf()
if self.reader.column > max_indent:
max_indent = self.reader.column
if first_indent > 0 and max_indent > first_indent:
start_mark = self.reader.get_mark()
raise ScannerError(
'more indented follow up line than first in a block scalar',
start_mark,
)
return chunks, max_indent, end_mark
def scan_block_scalar_breaks(self, indent: int) -> Any:
# See the specification for details.
chunks = []
srp = self.reader.peek
srf = self.reader.forward
end_mark = self.reader.get_mark()
while self.reader.column < indent and srp() == ' ':
srf()
while srp() in '\r\n\x85\u2028\u2029':
chunks.append(self.scan_line_break())
end_mark = self.reader.get_mark()
while self.reader.column < indent and srp() == ' ':
srf()
return chunks, end_mark
def scan_flow_scalar(self, style: Any) -> Any:
# See the specification for details.
# Note that we loose indentation rules for quoted scalars. Quoted
# scalars don't need to adhere indentation because " and ' clearly
# mark the beginning and the end of them. Therefore we are less
# restrictive then the specification requires. We only need to check
# that document separators are not included in scalars.
if style == '"':
double = True
else:
double = False
srp = self.reader.peek
chunks: List[Any] = []
start_mark = self.reader.get_mark()
quote = srp()
self.reader.forward()
chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark))
while srp() != quote:
chunks.extend(self.scan_flow_scalar_spaces(double, start_mark))
chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark))
self.reader.forward()
end_mark = self.reader.get_mark()
return ScalarToken("".join(chunks), False, start_mark, end_mark, style)
ESCAPE_REPLACEMENTS = {
'0': '\0',
'a': '\x07',
'b': '\x08',
't': '\x09',
'\t': '\x09',
'n': '\x0A',
'v': '\x0B',
'f': '\x0C',
'r': '\x0D',
'e': '\x1B',
' ': '\x20',
'"': '"',
'/': '/', # as per http://www.json.org/
'\\': '\\',
'N': '\x85',
'_': '\xA0',
'L': '\u2028',
'P': '\u2029',
}
ESCAPE_CODES = {'x': 2, 'u': 4, 'U': 8}
def scan_flow_scalar_non_spaces(self, double: Any, start_mark: Any) -> Any:
# See the specification for details.
chunks: List[Any] = []
srp = self.reader.peek
srf = self.reader.forward
while True:
length = 0
while srp(length) not in ' \n\'"\\\0\t\r\x85\u2028\u2029':
length += 1
if length != 0:
chunks.append(self.reader.prefix(length))
srf(length)
ch = srp()
if not double and ch == "'" and srp(1) == "'":
chunks.append("'")
srf(2)
elif (double and ch == "'") or (not double and ch in '"\\'):
chunks.append(ch)
srf()
elif double and ch == '\\':
srf()
ch = srp()
if ch in self.ESCAPE_REPLACEMENTS:
chunks.append(self.ESCAPE_REPLACEMENTS[ch])
srf()
elif ch in self.ESCAPE_CODES:
length = self.ESCAPE_CODES[ch]
srf()
for k in range(length):
if srp(k) not in '0123456789ABCDEFabcdef':
raise ScannerError(
'while scanning a double-quoted scalar',
start_mark,
f'expected escape sequence of {length:d} '
f'hexdecimal numbers, but found {srp(k)!r}',
self.reader.get_mark(),
)
code = int(self.reader.prefix(length), 16)
chunks.append(chr(code))
srf(length)
elif ch in '\n\r\x85\u2028\u2029':
self.scan_line_break()
chunks.extend(self.scan_flow_scalar_breaks(double, start_mark))
else:
raise ScannerError(
'while scanning a double-quoted scalar',
start_mark,
f'found unknown escape character {ch!r}',
self.reader.get_mark(),
)
else:
return chunks
def scan_flow_scalar_spaces(self, double: Any, start_mark: Any) -> Any:
# See the specification for details.
srp = self.reader.peek
chunks = []
length = 0
while srp(length) in ' \t':
length += 1
whitespaces = self.reader.prefix(length)
self.reader.forward(length)
ch = srp()
if ch == '\0':
raise ScannerError(
'while scanning a quoted scalar',
start_mark,
'found unexpected end of stream',
self.reader.get_mark(),
)
elif ch in '\r\n\x85\u2028\u2029':
line_break = self.scan_line_break()
breaks = self.scan_flow_scalar_breaks(double, start_mark)
if line_break != '\n':
chunks.append(line_break)
elif not breaks:
chunks.append(' ')
chunks.extend(breaks)
else:
chunks.append(whitespaces)
return chunks
def scan_flow_scalar_breaks(self, double: Any, start_mark: Any) -> Any:
# See the specification for details.
chunks: List[Any] = []
srp = self.reader.peek
srf = self.reader.forward
while True:
# Instead of checking indentation, we check for document
# separators.
prefix = self.reader.prefix(3)
if (prefix == '---' or prefix == '...') and srp(3) in _THE_END_SPACE_TAB:
raise ScannerError(
'while scanning a quoted scalar',
start_mark,
'found unexpected document separator',
self.reader.get_mark(),
)
while srp() in ' \t':
srf()
if srp() in '\r\n\x85\u2028\u2029':
chunks.append(self.scan_line_break())
else:
return chunks
def scan_plain(self) -> Any:
# See the specification for details.
# We add an additional restriction for the flow context:
# plain scalars in the flow context cannot contain ',', ': ' and '?'.
# We also keep track of the `allow_simple_key` flag here.
# Indentation rules are loosed for the flow context.
srp = self.reader.peek
srf = self.reader.forward
chunks: List[Any] = []
start_mark = self.reader.get_mark()
end_mark = start_mark
indent = self.indent + 1
# We allow zero indentation for scalars, but then we need to check for
# document separators at the beginning of the line.
# if indent == 0:
# indent = 1
spaces: List[Any] = []
while True:
length = 0
if srp() == '#':
break
while True:
ch = srp(length)
if False and ch == ':' and srp(length + 1) == ',':
break
elif ch == ':' and srp(length + 1) not in _THE_END_SPACE_TAB:
pass
elif ch == '?' and self.scanner_processing_version != (1, 1):
pass
elif (
ch in _THE_END_SPACE_TAB
or (
not self.flow_level
and ch == ':'
and srp(length + 1) in _THE_END_SPACE_TAB
)
or (self.flow_level and ch in ',:?[]{}')
):
break
length += 1
# It's not clear what we should do with ':' in the flow context.
if (
self.flow_level
and ch == ':'
and srp(length + 1) not in '\0 \t\r\n\x85\u2028\u2029,[]{}'
):
srf(length)
raise ScannerError(
'while scanning a plain scalar',
start_mark,
"found unexpected ':'",
self.reader.get_mark(),
'Please check '
'http://pyyaml.org/wiki/YAMLColonInFlowContext '
'for details.',
)
if length == 0:
break
self.allow_simple_key = False
chunks.extend(spaces)
chunks.append(self.reader.prefix(length))
srf(length)
end_mark = self.reader.get_mark()
spaces = self.scan_plain_spaces(indent, start_mark)
if (
not spaces
or srp() == '#'
or (not self.flow_level and self.reader.column < indent)
):
break
token = ScalarToken("".join(chunks), True, start_mark, end_mark)
# getattr provides True so C type loader, which cannot handle comment,
# will not make CommentToken
if self.loader is not None:
comment_handler = getattr(self.loader, 'comment_handling', False)
if comment_handler is None:
if spaces and spaces[0] == '\n':
# Create a comment token to preserve the trailing line breaks.
comment = CommentToken("".join(spaces) + '\n', start_mark, end_mark)
token.add_post_comment(comment)
elif comment_handler is not False:
line = start_mark.line + 1
for ch in spaces:
if ch == '\n':
self.comments.add_blank_line('\n', 0, line) # type: ignore
line += 1
return token
def scan_plain_spaces(self, indent: Any, start_mark: Any) -> Any:
# See the specification for details.
# The specification is really confusing about tabs in plain scalars.
# We just forbid them completely. Do not use tabs in YAML!
srp = self.reader.peek
srf = self.reader.forward
chunks = []
length = 0
while srp(length) in ' ':
length += 1
whitespaces = self.reader.prefix(length)
self.reader.forward(length)
ch = srp()
if ch in '\r\n\x85\u2028\u2029':
line_break = self.scan_line_break()
self.allow_simple_key = True
prefix = self.reader.prefix(3)
if (prefix == '---' or prefix == '...') and srp(3) in _THE_END_SPACE_TAB:
return
breaks = []
while srp() in ' \r\n\x85\u2028\u2029':
if srp() == ' ':
srf()
else:
breaks.append(self.scan_line_break())
prefix = self.reader.prefix(3)
if (prefix == '---' or prefix == '...') and srp(
3
) in _THE_END_SPACE_TAB:
return
if line_break != '\n':
chunks.append(line_break)
elif not breaks:
chunks.append(' ')
chunks.extend(breaks)
elif whitespaces:
chunks.append(whitespaces)
return chunks
def scan_tag_handle(self, name: Any, start_mark: Any) -> Any:
# See the specification for details.
# For some strange reasons, the specification does not allow '_' in
# tag handles. I have allowed it anyway.
srp = self.reader.peek
ch = srp()
if ch != '!':
raise ScannerError(
f'while scanning an {name!s}',
start_mark,
f"expected '!', but found {ch!r}",
self.reader.get_mark(),
)
length = 1
ch = srp(length)
if ch != ' ':
while (
'0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' or ch in '-_'
):
length += 1
ch = srp(length)
if ch != '!':
self.reader.forward(length)
raise ScannerError(
f'while scanning an {name!s}',
start_mark,
f"expected '!' but found {ch!r}",
self.reader.get_mark(),
)
length += 1
value = self.reader.prefix(length)
self.reader.forward(length)
return value
def scan_tag_uri(self, name: Any, start_mark: Any) -> Any:
# See the specification for details.
# Note: we do not check if URI is well-formed.
srp = self.reader.peek
chunks = []
length = 0
ch = srp(length)
while (
'0' <= ch <= '9'
or 'A' <= ch <= 'Z'
or 'a' <= ch <= 'z'
or ch in "-;/?:@&=+$,_.!~*'()[]%"
or ((self.scanner_processing_version > (1, 1)) and ch == '#')
):
if ch == '%':
chunks.append(self.reader.prefix(length))
self.reader.forward(length)
length = 0
chunks.append(self.scan_uri_escapes(name, start_mark))
else:
length += 1
ch = srp(length)
if length != 0:
chunks.append(self.reader.prefix(length))
self.reader.forward(length)
length = 0
if not chunks:
raise ScannerError(
f'while parsing an {name!s}',
start_mark,
f'expected URI, but found {ch!r}',
self.reader.get_mark(),
)
return "".join(chunks)
def scan_uri_escapes(self, name: Any, start_mark: Any) -> Any:
# See the specification for details.
srp = self.reader.peek
srf = self.reader.forward
code_bytes: List[Any] = []
mark = self.reader.get_mark()
while srp() == '%':
srf()
for k in range(2):
if srp(k) not in '0123456789ABCDEFabcdef':
raise ScannerError(
f'while scanning an {name!s}',
start_mark,
f'expected URI escape sequence of 2 hexdecimal numbers, '
f'but found {srp(k)!r}',
self.reader.get_mark(),
)
code_bytes.append(int(self.reader.prefix(2), 16))
srf(2)
try:
value = bytes(code_bytes).decode('utf-8')
except UnicodeDecodeError as exc:
raise ScannerError(
f'while scanning an {name!s}', start_mark, str(exc), mark
)
return value
def scan_line_break(self) -> Any:
# Transforms:
# '\r\n' : '\n'
# '\r' : '\n'
# '\n' : '\n'
# '\x85' : '\n'
# '\u2028' : '\u2028'
# '\u2029 : '\u2029'
# default : ''
ch = self.reader.peek()
if ch in '\r\n\x85':
if self.reader.prefix(2) == '\r\n':
self.reader.forward(2)
else:
self.reader.forward()
return '\n'
elif ch in '\u2028\u2029':
self.reader.forward()
return ch
return ""
class RoundTripScanner(Scanner):
def check_token(self, *choices: Any) -> bool:
# Check if the next token is one of the given types.
while self.need_more_tokens():
self.fetch_more_tokens()
self._gather_comments()
if len(self.tokens) > 0:
if not choices:
return True
for choice in choices:
if isinstance(self.tokens[0], choice):
return True
return False
def peek_token(self) -> Any:
# Return the next token, but do not delete if from the queue.
while self.need_more_tokens():
self.fetch_more_tokens()
self._gather_comments()
if len(self.tokens) > 0:
return self.tokens[0]
return None
def _gather_comments(self) -> Any:
"""combine multiple comment lines and assign to next non-comment-token"""
comments: List[Any] = []
if not self.tokens:
return comments
if isinstance(self.tokens[0], CommentToken):
comment = self.tokens.pop(0)
self.tokens_taken += 1
comments.append(comment)
while self.need_more_tokens():
self.fetch_more_tokens()
if not self.tokens:
return comments
if isinstance(self.tokens[0], CommentToken):
self.tokens_taken += 1
comment = self.tokens.pop(0)
# nprint('dropping2', comment)
comments.append(comment)
if len(comments) >= 1:
self.tokens[0].add_pre_comments(comments)
# pull in post comment on e.g. ':'
if not self.done and len(self.tokens) < 2:
self.fetch_more_tokens()
def get_token(self) -> Any:
# Return the next token.
while self.need_more_tokens():
self.fetch_more_tokens()
self._gather_comments()
if len(self.tokens) > 0:
# nprint('tk', self.tokens)
# only add post comment to single line tokens:
# scalar, value token. FlowXEndToken, otherwise
# hidden streamtokens could get them (leave them and they will be
# pre comments for the next map/seq
if (
len(self.tokens) > 1
and isinstance(
self.tokens[0],
(
ScalarToken,
ValueToken,
FlowSequenceEndToken,
FlowMappingEndToken,
),
)
and isinstance(self.tokens[1], CommentToken)
and self.tokens[0].end_mark.line == self.tokens[1].start_mark.line
):
self.tokens_taken += 1
c = self.tokens.pop(1)
self.fetch_more_tokens()
while len(self.tokens) > 1 and isinstance(self.tokens[1], CommentToken):
self.tokens_taken += 1
c1 = self.tokens.pop(1)
c.value = c.value + (' ' * c1.start_mark.column) + c1.value
self.fetch_more_tokens()
self.tokens[0].add_post_comment(c)
elif (
len(self.tokens) > 1
and isinstance(self.tokens[0], ScalarToken)
and isinstance(self.tokens[1], CommentToken)
and self.tokens[0].end_mark.line != self.tokens[1].start_mark.line
):
self.tokens_taken += 1
c = self.tokens.pop(1)
c.value = (
'\n' * (c.start_mark.line - self.tokens[0].end_mark.line)
+ (' ' * c.start_mark.column)
+ c.value
)
self.tokens[0].add_post_comment(c)
self.fetch_more_tokens()
while len(self.tokens) > 1 and isinstance(self.tokens[1], CommentToken):
self.tokens_taken += 1
c1 = self.tokens.pop(1)
c.value = c.value + (' ' * c1.start_mark.column) + c1.value
self.fetch_more_tokens()
self.tokens_taken += 1
return self.tokens.pop(0)
return None
def fetch_comment(self, comment: Any) -> None:
value, start_mark, end_mark = comment
while value and value[-1] == ' ':
# empty line within indented key context
# no need to update end-mark, that is not used
value = value[:-1]
self.tokens.append(CommentToken(value, start_mark, end_mark))
# scanner
def scan_to_next_token(self) -> Any:
# We ignore spaces, line breaks and comments.
# If we find a line break in the block context, we set the flag
# `allow_simple_key` on.
# The byte order mark is stripped if it's the first character in the
# stream. We do not yet support BOM inside the stream as the
# specification requires. Any such mark will be considered as a part
# of the document.
#
# TODO: We need to make tab handling rules more sane. A good rule is
# Tabs cannot precede tokens
# BLOCK-SEQUENCE-START, BLOCK-MAPPING-START, BLOCK-END,
# KEY(block), VALUE(block), BLOCK-ENTRY
# So the checking code is
# if :
# self.allow_simple_keys = False
# We also need to add the check for `allow_simple_keys == True` to
# `unwind_indent` before issuing BLOCK-END.
# Scanners for block, flow, and plain scalars need to be modified.
srp = self.reader.peek
srf = self.reader.forward
if self.reader.index == 0 and srp() == '\uFEFF':
srf()
found = False
white_space = ' \t' if self.flow_level > 0 else ' '
while not found:
while srp() in white_space:
srf()
ch = srp()
if ch == '#':
start_mark = self.reader.get_mark()
comment = ch
srf()
while ch not in _THE_END:
ch = srp()
if ch == '\0': # don't gobble the end-of-stream character
# but add an explicit newline as "YAML processors should terminate
# the stream with an explicit line break
# https://yaml.org/spec/1.2/spec.html#id2780069
comment += '\n'
break
comment += ch
srf()
# gather any blank lines following the comment
ch = self.scan_line_break()
while len(ch) > 0:
comment += ch
ch = self.scan_line_break()
end_mark = self.reader.get_mark()
if not self.flow_level:
self.allow_simple_key = True
return comment, start_mark, end_mark
if self.scan_line_break() != '':
start_mark = self.reader.get_mark()
if not self.flow_level:
self.allow_simple_key = True
ch = srp()
if ch == '\n': # empty toplevel lines
start_mark = self.reader.get_mark()
comment = ""
while ch:
ch = self.scan_line_break(empty_line=True)
comment += ch
if srp() == '#':
# empty line followed by indented real comment
comment = comment.rsplit('\n', 1)[0] + '\n'
end_mark = self.reader.get_mark()
return comment, start_mark, end_mark
else:
found = True
return None
def scan_line_break(self, empty_line: bool = False) -> Text:
# Transforms:
# '\r\n' : '\n'
# '\r' : '\n'
# '\n' : '\n'
# '\x85' : '\n'
# '\u2028' : '\u2028'
# '\u2029 : '\u2029'
# default : ''
ch: Text = self.reader.peek()
if ch in '\r\n\x85':
if self.reader.prefix(2) == '\r\n':
self.reader.forward(2)
else:
self.reader.forward()
return '\n'
elif ch in '\u2028\u2029':
self.reader.forward()
return ch
elif empty_line and ch in '\t ':
self.reader.forward()
return ch
return ""
def scan_block_scalar(self, style: Any, rt: Optional[bool] = True) -> Any:
return Scanner.scan_block_scalar(self, style, rt=rt)
def scan_uri_escapes(self, name: Any, start_mark: Any) -> Any:
"""
The roundtripscanner doesn't do URI escaping
"""
# See the specification for details.
srp = self.reader.peek
srf = self.reader.forward
code_bytes: List[Any] = []
chunk = ''
mark = self.reader.get_mark()
while srp() == '%':
chunk += '%'
srf()
for k in range(2):
if srp(k) not in '0123456789ABCDEFabcdef':
raise ScannerError(
f'while scanning an {name!s}',
start_mark,
f'expected URI escape sequence of 2 hexdecimal numbers, '
f'but found {srp(k)!r}',
self.reader.get_mark(),
)
code_bytes.append(int(self.reader.prefix(2), 16))
chunk += self.reader.prefix(2)
srf(2)
try:
_ = bytes(code_bytes).decode('utf-8')
except UnicodeDecodeError as exc:
raise ScannerError(
f'while scanning an {name!s}', start_mark, str(exc), mark
)
return chunk
# commenthandling 2021, differentiatiation not needed
VALUECMNT = 0
KEYCMNT = 0 # 1
# TAGCMNT = 2
# ANCHORCMNT = 3
class CommentBase:
__slots__ = (
'value',
'line',
'column',
'used',
'function',
'fline',
'ufun',
'uline',
)
def __init__(self, value: Any, line: Any, column: Any) -> None:
self.value = value
self.line = line
self.column = column
self.used = ' '
if _debug != 0:
import inspect
info = inspect.getframeinfo(inspect.stack()[3][0])
self.function = info.function
self.fline = info.lineno
self.ufun = None
self.uline = None
def set_used(self, v: Any = '+') -> None:
self.used = v
if _debug != 0:
import inspect
info = inspect.getframeinfo(inspect.stack()[1][0])
self.ufun = info.function # type: ignore
self.uline = info.lineno # type: ignore
def set_assigned(self) -> None:
self.used = '|'
def __str__(self) -> str:
return f'{self.value}'
def __repr__(self) -> str:
return f'{self.value!r}'
def info(self) -> str:
xv = self.value + '"'
name = self.name # type: ignore
return (
f'{name}{self.used} {self.line:2}:{self.column:<2} "{xv:40s} '
f'{self.function}:{self.fline} {self.ufun}:{self.uline}'
)
class EOLComment(CommentBase):
name = 'EOLC'
def __init__(self, value: Any, line: Any, column: Any) -> None:
super().__init__(value, line, column)
class FullLineComment(CommentBase):
name = 'FULL'
def __init__(self, value: Any, line: Any, column: Any) -> None:
super().__init__(value, line, column)
class BlankLineComment(CommentBase):
name = 'BLNK'
def __init__(self, value: Any, line: Any, column: Any) -> None:
super().__init__(value, line, column)
class ScannedComments:
def __init__(self: Any) -> None:
self.comments = {} # type: ignore
self.unused = [] # type: ignore
def add_eol_comment(self, comment: Any, column: Any, line: Any) -> Any:
# info = inspect.getframeinfo(inspect.stack()[1][0])
if comment.count('\n') == 1:
assert comment[-1] == '\n'
else:
assert '\n' not in comment
self.comments[line] = retval = EOLComment(comment[:-1], line, column)
self.unused.append(line)
return retval
def add_blank_line(self, comment: Any, column: Any, line: Any) -> Any:
# info = inspect.getframeinfo(inspect.stack()[1][0])
assert comment.count('\n') == 1 and comment[-1] == '\n'
assert line not in self.comments
self.comments[line] = retval = BlankLineComment(comment[:-1], line, column)
self.unused.append(line)
return retval
def add_full_line_comment(self, comment: Any, column: Any, line: Any) -> Any:
# info = inspect.getframeinfo(inspect.stack()[1][0])
assert comment.count('\n') == 1 and comment[-1] == '\n'
# if comment.startswith('# C12'):
# raise
# this raises in line 2127 fro 330
self.comments[line] = retval = FullLineComment(comment[:-1], line, column)
self.unused.append(line)
return retval
def __getitem__(self, idx: Any) -> Any:
return self.comments[idx]
def __str__(self) -> Any:
return (
'ParsedComments:\n '
+ '\n '.join(
(f'{lineno:2} {x.info()}' for lineno, x in self.comments.items())
)
+ '\n'
)
def last(self) -> str:
lineno, x = list(self.comments.items())[-1]
return f'{lineno:2} {x.info()}\n'
def any_unprocessed(self) -> bool:
# ToDo: might want to differentiate based on lineno
return len(self.unused) > 0
# for lno, comment in reversed(self.comments.items()):
# if comment.used == ' ':
# return True
# return False
def unprocessed(self, use: Any = False) -> Any:
while len(self.unused) > 0:
if _debug != 0:
import inspect
first = self.unused.pop(0) if use else self.unused[0]
info = inspect.getframeinfo(inspect.stack()[1][0])
xprintf(
'using',
first,
self.comments[first].value,
info.function,
info.lineno,
)
yield first, self.comments[first]
if use:
self.comments[first].set_used()
def assign_pre(self, token: Any) -> Any:
token_line = token.start_mark.line
if _debug != 0:
import inspect
info = inspect.getframeinfo(inspect.stack()[1][0])
xprintf('assign_pre', token_line, self.unused, info.function, info.lineno)
gobbled = False
while self.unused and self.unused[0] < token_line:
gobbled = True
first = self.unused.pop(0)
if _debug != 0:
xprintf('assign_pre < ', first)
self.comments[first].set_used()
token.add_comment_pre(first)
return gobbled
def assign_eol(self, tokens: Any) -> Any:
try:
comment_line = self.unused[0]
except IndexError:
return
if not isinstance(self.comments[comment_line], EOLComment):
return
idx = 1
while tokens[-idx].start_mark.line > comment_line or isinstance(
tokens[-idx],
ValueToken,
):
idx += 1
if _debug != 0:
xprintf('idx1', idx)
if (
len(tokens) > idx
and isinstance(tokens[-idx], ScalarToken)
and isinstance(tokens[-(idx + 1)], ScalarToken)
):
return
try:
if isinstance(tokens[-idx], ScalarToken) and isinstance(
tokens[-(idx + 1)],
KeyToken,
):
try:
eol_idx = self.unused.pop(0)
self.comments[eol_idx].set_used()
if _debug != 0:
xprintf('>>>>>a', idx, eol_idx, KEYCMNT)
tokens[-idx].add_comment_eol(eol_idx, KEYCMNT)
except IndexError:
raise NotImplementedError
return
except IndexError:
if _debug != 0:
xprintf('IndexError1')
pass
try:
if isinstance(tokens[-idx], ScalarToken) and isinstance(
tokens[-(idx + 1)],
(ValueToken, BlockEntryToken),
):
try:
eol_idx = self.unused.pop(0)
self.comments[eol_idx].set_used()
tokens[-idx].add_comment_eol(eol_idx, VALUECMNT)
except IndexError:
raise NotImplementedError
return
except IndexError:
if _debug != 0:
xprintf('IndexError2')
pass
for t in tokens:
xprintf('tt-', t)
if _debug != 0:
xprintf('not implemented EOL', type(tokens[-idx]))
import sys
sys.exit(0)
def assign_post(self, token: Any) -> Any:
token_line = token.start_mark.line
if _debug != 0:
import inspect
info = inspect.getframeinfo(inspect.stack()[1][0])
xprintf('assign_post', token_line, self.unused, info.function, info.lineno)
gobbled = False
while self.unused and self.unused[0] < token_line:
gobbled = True
first = self.unused.pop(0)
if _debug != 0:
xprintf('assign_post < ', first)
self.comments[first].set_used()
token.add_comment_post(first)
return gobbled
def str_unprocessed(self) -> Any:
return ''.join(
(
f' {ind:2} {x.info()}\n'
for ind, x in self.comments.items()
if x.used == ' '
),
)
class RoundTripScannerSC(Scanner): # RoundTripScanner Split Comments
def __init__(self, *arg: Any, **kw: Any) -> None:
super().__init__(*arg, **kw)
assert self.loader is not None
# comments isinitialised on .need_more_tokens and persist on
# self.loader.parsed_comments
self.comments = None
def get_token(self) -> Any:
# Return the next token.
while self.need_more_tokens():
self.fetch_more_tokens()
if len(self.tokens) > 0:
if isinstance(self.tokens[0], BlockEndToken):
self.comments.assign_post(self.tokens[0]) # type: ignore
else:
self.comments.assign_pre(self.tokens[0]) # type: ignore
self.tokens_taken += 1
return self.tokens.pop(0)
def need_more_tokens(self) -> bool:
if self.comments is None:
self.loader.parsed_comments = self.comments = ScannedComments() # type: ignore
if self.done:
return False
if len(self.tokens) == 0:
return True
# The current token may be a potential simple key, so we
# need to look further.
self.stale_possible_simple_keys()
if self.next_possible_simple_key() == self.tokens_taken:
return True
if len(self.tokens) < 2:
return True
if self.tokens[0].start_mark.line == self.tokens[-1].start_mark.line:
return True
if True:
if _debug != 0:
xprintf('-x--', len(self.tokens))
for t in self.tokens:
xprintf(t)
# xprintf(self.comments.last())
xprintf(self.comments.str_unprocessed()) # type: ignore
self.comments.assign_pre(self.tokens[0]) # type: ignore
self.comments.assign_eol(self.tokens) # type: ignore
return False
def scan_to_next_token(self) -> None:
srp = self.reader.peek
srf = self.reader.forward
if self.reader.index == 0 and srp() == '\uFEFF':
srf()
start_mark = self.reader.get_mark()
# xprintf('current_mark', start_mark.line, start_mark.column)
found = False
while not found:
while srp() == ' ':
srf()
ch = srp()
if ch == '#':
comment_start_mark = self.reader.get_mark()
comment = ch
srf() # skipt the '#'
while ch not in _THE_END:
ch = srp()
if ch == '\0': # don't gobble the end-of-stream character
# but add an explicit newline as "YAML processors should terminate
# the stream with an explicit line break
# https://yaml.org/spec/1.2/spec.html#id2780069
comment += '\n'
break
comment += ch
srf()
# we have a comment
if start_mark.column == 0:
self.comments.add_full_line_comment( # type: ignore
comment,
comment_start_mark.column,
comment_start_mark.line,
)
else:
self.comments.add_eol_comment( # type: ignore
comment,
comment_start_mark.column,
comment_start_mark.line,
)
comment = ""
# gather any blank lines or full line comments following the comment as well
self.scan_empty_or_full_line_comments()
if not self.flow_level:
self.allow_simple_key = True
return
if bool(self.scan_line_break()):
# start_mark = self.reader.get_mark()
if not self.flow_level:
self.allow_simple_key = True
self.scan_empty_or_full_line_comments()
return None
ch = srp()
if ch == '\n': # empty toplevel lines
start_mark = self.reader.get_mark()
comment = ""
while ch:
ch = self.scan_line_break(empty_line=True)
comment += ch
if srp() == '#':
# empty line followed by indented real comment
comment = comment.rsplit('\n', 1)[0] + '\n'
_ = self.reader.get_mark() # gobble end_mark
return None
else:
found = True
return None
def scan_empty_or_full_line_comments(self) -> None:
blmark = self.reader.get_mark()
assert blmark.column == 0
blanks = ""
comment = None
mark = None
ch = self.reader.peek()
while True:
# nprint('ch', repr(ch), self.reader.get_mark().column)
if ch in '\r\n\x85\u2028\u2029':
if self.reader.prefix(2) == '\r\n':
self.reader.forward(2)
else:
self.reader.forward()
if comment is not None:
comment += '\n'
self.comments.add_full_line_comment(comment, mark.column, mark.line)
comment = None
else:
blanks += '\n'
self.comments.add_blank_line(blanks, blmark.column, blmark.line) # type: ignore # NOQA
blanks = ""
blmark = self.reader.get_mark()
ch = self.reader.peek()
continue
if comment is None:
if ch in ' \t':
blanks += ch
elif ch == '#':
mark = self.reader.get_mark()
comment = '#'
else:
# xprintf('breaking on', repr(ch))
break
else:
comment += ch
self.reader.forward()
ch = self.reader.peek()
def scan_block_scalar_ignored_line(self, start_mark: Any) -> Any:
# See the specification for details.
srp = self.reader.peek
srf = self.reader.forward
prefix = ''
comment = None
while srp() == ' ':
prefix += srp()
srf()
if srp() == '#':
comment = ''
mark = self.reader.get_mark()
while srp() not in _THE_END:
comment += srp()
srf()
comment += '\n' # type: ignore
ch = srp()
if ch not in _THE_END:
raise ScannerError(
'while scanning a block scalar',
start_mark,
f'expected a comment or a line break, but found {ch!r}',
self.reader.get_mark(),
)
if comment is not None:
self.comments.add_eol_comment(comment, mark.column, mark.line) # type: ignore
self.scan_line_break()
return None
python-ruyaml-0.92.1/lib/ruyaml/serializer.py 0000664 0000000 0000000 00000020574 15056754172 0021261 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from ruyaml.compat import DBG_NODE, dbg, nprint, nprintf # NOQA
from ruyaml.error import YAMLError
from ruyaml.events import (
AliasEvent,
DocumentEndEvent,
DocumentStartEvent,
MappingEndEvent,
MappingStartEvent,
ScalarEvent,
SequenceEndEvent,
SequenceStartEvent,
StreamEndEvent,
StreamStartEvent,
)
from ruyaml.nodes import MappingNode, ScalarNode, SequenceNode
from ruyaml.util import RegExp
if False: # MYPY
from typing import Any, Dict, Optional, Text, Union # NOQA
from ruyaml.compat import VersionType # NOQA
__all__ = ['Serializer', 'SerializerError']
class SerializerError(YAMLError):
pass
class Serializer:
# 'id' and 3+ numbers, but not 000
ANCHOR_TEMPLATE = 'id{:03d}'
ANCHOR_RE = RegExp('id(?!000$)\\d{3,}')
def __init__(
self,
encoding: Any = None,
explicit_start: Optional[bool] = None,
explicit_end: Optional[bool] = None,
version: Optional[VersionType] = None,
tags: Any = None,
dumper: Any = None,
) -> None:
# NOQA
self.dumper = dumper
if self.dumper is not None:
self.dumper._serializer = self
self.use_encoding = encoding
self.use_explicit_start = explicit_start
self.use_explicit_end = explicit_end
if isinstance(version, str):
self.use_version = tuple(map(int, version.split('.')))
else:
self.use_version = version # type: ignore
self.use_tags = tags
self.serialized_nodes: Dict[Any, Any] = {}
self.anchors: Dict[Any, Any] = {}
self.last_anchor_id = 0
self.closed: Optional[bool] = None
self._templated_id = None
@property
def emitter(self) -> Any:
if hasattr(self.dumper, 'typ'):
return self.dumper.emitter # type: ignore
return self.dumper._emitter # type: ignore
@property
def resolver(self) -> Any:
if hasattr(self.dumper, 'typ'):
self.dumper.resolver # type: ignore
return self.dumper._resolver # type: ignore
def open(self) -> None:
if self.closed is None:
self.emitter.emit(StreamStartEvent(encoding=self.use_encoding))
self.closed = False
elif self.closed:
raise SerializerError('serializer is closed')
else:
raise SerializerError('serializer is already opened')
def close(self) -> None:
if self.closed is None:
raise SerializerError('serializer is not opened')
elif not self.closed:
self.emitter.emit(StreamEndEvent())
self.closed = True
# def __del__(self):
# self.close()
def serialize(self, node: Any) -> None:
if dbg(DBG_NODE):
nprint('Serializing nodes')
node.dump()
if self.closed is None:
raise SerializerError('serializer is not opened')
elif self.closed:
raise SerializerError('serializer is closed')
self.emitter.emit(
DocumentStartEvent(
explicit=self.use_explicit_start,
version=self.use_version,
tags=self.use_tags,
),
)
self.anchor_node(node)
self.serialize_node(node, None, None)
self.emitter.emit(DocumentEndEvent(explicit=self.use_explicit_end))
self.serialized_nodes = {}
self.anchors = {}
self.last_anchor_id = 0
def anchor_node(self, node: Any) -> None:
if node in self.anchors:
if self.anchors[node] is None:
self.anchors[node] = self.generate_anchor(node)
else:
anchor = None
try:
if node.anchor.always_dump:
anchor = node.anchor.value
except: # NOQA
pass
self.anchors[node] = anchor
if isinstance(node, SequenceNode):
for item in node.value:
self.anchor_node(item)
elif isinstance(node, MappingNode):
for key, value in node.value:
self.anchor_node(key)
self.anchor_node(value)
def generate_anchor(self, node: Any) -> Any:
try:
anchor = node.anchor.value
except: # NOQA
anchor = None
if anchor is None:
self.last_anchor_id += 1
return self.ANCHOR_TEMPLATE.format(self.last_anchor_id)
return anchor
def serialize_node(self, node: Any, parent: Any, index: Any) -> None:
alias = self.anchors[node]
if node in self.serialized_nodes:
node_style = getattr(node, 'style', None)
if node_style != '?':
node_style = None
self.emitter.emit(AliasEvent(alias, style=node_style))
else:
self.serialized_nodes[node] = True
self.resolver.descend_resolver(parent, index)
if isinstance(node, ScalarNode):
# here check if the node.tag equals the one that would result from parsing
# if not equal quoting is necessary for strings
detected_tag = self.resolver.resolve(
ScalarNode, node.value, (True, False)
)
default_tag = self.resolver.resolve(
ScalarNode, node.value, (False, True)
)
implicit = (
(node.ctag == detected_tag),
(node.ctag == default_tag),
node.tag.startswith('tag:yaml.org,2002:'), # type: ignore
)
self.emitter.emit(
ScalarEvent(
alias,
node.ctag,
implicit,
node.value,
style=node.style,
comment=node.comment,
),
)
elif isinstance(node, SequenceNode):
implicit = node.ctag == self.resolver.resolve(
SequenceNode, node.value, True
)
comment = node.comment
end_comment = None
seq_comment = None
if node.flow_style is True:
if comment: # eol comment on flow style sequence
seq_comment = comment[0]
# comment[0] = None
if comment and len(comment) > 2:
end_comment = comment[2]
else:
end_comment = None
self.emitter.emit(
SequenceStartEvent(
alias,
node.ctag,
implicit,
flow_style=node.flow_style,
comment=node.comment,
),
)
index = 0
for item in node.value:
self.serialize_node(item, node, index)
index += 1
self.emitter.emit(SequenceEndEvent(comment=[seq_comment, end_comment]))
elif isinstance(node, MappingNode):
implicit = node.ctag == self.resolver.resolve(
MappingNode, node.value, True
)
comment = node.comment
end_comment = None
map_comment = None
if node.flow_style is True:
if comment: # eol comment on flow style sequence
map_comment = comment[0]
# comment[0] = None
if comment and len(comment) > 2:
end_comment = comment[2]
self.emitter.emit(
MappingStartEvent(
alias,
node.ctag,
implicit,
flow_style=node.flow_style,
comment=node.comment,
nr_items=len(node.value),
),
)
for key, value in node.value:
self.serialize_node(key, node, None)
self.serialize_node(value, node, key)
self.emitter.emit(MappingEndEvent(comment=[map_comment, end_comment]))
self.resolver.ascend_resolver()
def templated_id(s: Text) -> Any:
return Serializer.ANCHOR_RE.match(s)
python-ruyaml-0.92.1/lib/ruyaml/tag.py 0000664 0000000 0000000 00000007327 15056754172 0017664 0 ustar 00root root 0000000 0000000 from __future__ import annotations
"""
In round-trip mode the original tag needs to be preserved, but the tag
transformed based on the directives needs to be available as well.
A Tag that is created during loading has a handle and a suffix.
Not all objects loaded currently have a Tag, that .tag attribute can be None
A Tag that is created for dumping only (on an object loaded without a tag) has a suffix
only.
"""
if False: # MYPY
from typing import Any, Dict, Iterator, List, Optional, Union # NOQA
tag_attrib = '_yaml_tag'
class Tag:
"""store original tag information for roundtripping"""
attrib = tag_attrib
def __init__(
self, handle: Any = None, suffix: Any = None, handles: Any = None
) -> None:
self.handle = handle
self.suffix = suffix
self.handles = handles
self._transform_type: Optional[bool] = None
def __repr__(self) -> str:
return f'{self.__class__.__name__}({self.trval!r})'
def __str__(self) -> str:
return f'{self.trval}'
def __hash__(self) -> int:
try:
return self._hash_id # type: ignore
except AttributeError:
self._hash_id = res = hash((self.handle, self.suffix))
return res
def __eq__(self, other: Any) -> bool:
# other should not be a string, but the serializer sometimes provides these
if isinstance(other, str):
return self.trval == other
return bool(self.trval == other.trval)
def startswith(self, x: str) -> bool:
if self.trval is not None:
return self.trval.startswith(x)
return False
@property
def trval(self) -> Optional[str]:
try:
return self._trval
except AttributeError:
pass
if self.handle is None:
self._trval: Optional[str] = self.uri_decoded_suffix
return self._trval
assert self._transform_type is not None
if not self._transform_type:
# the non-round-trip case
self._trval = self.handles[self.handle] + self.uri_decoded_suffix
return self._trval
# round-trip case
if self.handle == '!!' and self.suffix in (
'null',
'bool',
'int',
'float',
'binary',
'timestamp',
'omap',
'pairs',
'set',
'str',
'seq',
'map',
):
self._trval = self.handles[self.handle] + self.uri_decoded_suffix
else:
# self._trval = self.handle + self.suffix
self._trval = self.handles[self.handle] + self.uri_decoded_suffix
return self._trval
value = trval
@property
def uri_decoded_suffix(self) -> Optional[str]:
try:
return self._uri_decoded_suffix
except AttributeError:
pass
if self.suffix is None:
self._uri_decoded_suffix: Optional[str] = None
return None
res = ''
# don't have to check for scanner errors here
idx = 0
while idx < len(self.suffix):
ch = self.suffix[idx]
idx += 1
if ch != '%':
res += ch
else:
res += chr(int(self.suffix[idx : idx + 2], 16))
idx += 2
self._uri_decoded_suffix = res
return res
def select_transform(self, val: bool) -> None:
"""
val: False -> non-round-trip
True -> round-trip
"""
assert self._transform_type is None
self._transform_type = val
def check_handle(self) -> bool:
if self.handle is None:
return False
return self.handle not in self.handles
python-ruyaml-0.92.1/lib/ruyaml/timestamp.py 0000664 0000000 0000000 00000003633 15056754172 0021110 0 ustar 00root root 0000000 0000000 from __future__ import annotations
import copy
import datetime
# ToDo: you could probably attach the tzinfo correctly to the object
# a more complete datetime might be used by safe loading as well
#
# add type information (iso8601, spaced)
if False: # MYPY
from typing import Any, Dict, List, Optional # NOQA
class TimeStamp(datetime.datetime):
def __init__(self, *args: Any, **kw: Any) -> None:
self._yaml: Dict[str, Any] = dict(t=False, tz=None, delta=0)
def __new__(cls, *args: Any, **kw: Any) -> Any: # datetime is immutable
return datetime.datetime.__new__(cls, *args, **kw)
def __deepcopy__(self, memo: Any) -> Any:
ts = TimeStamp(
self.year, self.month, self.day, self.hour, self.minute, self.second
)
ts._yaml = copy.deepcopy(self._yaml)
return ts
def replace(
self,
year: Any = None,
month: Any = None,
day: Any = None,
hour: Any = None,
minute: Any = None,
second: Any = None,
microsecond: Any = None,
tzinfo: Any = True,
fold: Any = None,
) -> Any:
if year is None:
year = self.year
if month is None:
month = self.month
if day is None:
day = self.day
if hour is None:
hour = self.hour
if minute is None:
minute = self.minute
if second is None:
second = self.second
if microsecond is None:
microsecond = self.microsecond
if tzinfo is True:
tzinfo = self.tzinfo
if fold is None:
fold = self.fold
ts = type(self)(
year, month, day, hour, minute, second, microsecond, tzinfo, fold=fold
)
ts._yaml = copy.deepcopy(self._yaml)
return ts
def __str__(self) -> str:
return self.isoformat('T' if self._yaml['t'] else ' ')
python-ruyaml-0.92.1/lib/ruyaml/tokens.py 0000664 0000000 0000000 00000027016 15056754172 0020411 0 ustar 00root root 0000000 0000000 from __future__ import annotations
from ruyaml.compat import nprintf # NOQA
if False: # MYPY
from typing import Text, Any, Dict, Optional, List # NOQA
from .error import StreamMark # NOQA
SHOW_LINES = True
class Token:
__slots__ = 'start_mark', 'end_mark', '_comment'
def __init__(self, start_mark: StreamMark, end_mark: StreamMark) -> None:
self.start_mark = start_mark
self.end_mark = end_mark
def __repr__(self) -> Any:
# attributes = [key for key in self.__slots__ if not key.endswith('_mark') and
# hasattr('self', key)]
attributes = [key for key in self.__slots__ if not key.endswith('_mark')]
attributes.sort()
# arguments = ', '.join(
# [f'{key!s}={getattr(self, key)!r})' for key in attributes]
# )
arguments = [f'{key!s}={getattr(self, key)!r}' for key in attributes]
if SHOW_LINES:
try:
arguments.append('line: ' + str(self.start_mark.line))
except: # NOQA
pass
try:
arguments.append('comment: ' + str(self._comment))
except: # NOQA
pass
return f'{self.__class__.__name__}({", ".join(arguments)})'
@property
def column(self) -> int:
return self.start_mark.column
@column.setter
def column(self, pos: Any) -> None:
self.start_mark.column = pos
# old style ( <= 0.17) is a TWO element list with first being the EOL
# comment concatenated with following FLC/BLNK; and second being a list of FLC/BLNK
# preceding the token
# new style ( >= 0.17 ) is a THREE element list with the first being a list of
# preceding FLC/BLNK, the second EOL and the third following FLC/BLNK
# note that new style has differing order, and does not consist of CommentToken(s)
# but of CommentInfo instances
# any non-assigned values in new style are None, but first and last can be empty list
# new style routines add one comment at a time
# going to be deprecated in favour of add_comment_eol/post
def add_post_comment(self, comment: Any) -> None:
if not hasattr(self, '_comment'):
self._comment = [None, None]
else:
assert len(self._comment) in [2, 5] # make sure it is version 0
# if isinstance(comment, CommentToken):
# if comment.value.startswith('# C09'):
# raise
self._comment[0] = comment
# going to be deprecated in favour of add_comment_pre
def add_pre_comments(self, comments: Any) -> None:
if not hasattr(self, '_comment'):
self._comment = [None, None]
else:
assert len(self._comment) == 2 # make sure it is version 0
assert self._comment[1] is None
self._comment[1] = comments
return
# new style
def add_comment_pre(self, comment: Any) -> None:
if not hasattr(self, '_comment'):
self._comment = [[], None, None] # type: ignore
else:
assert len(self._comment) == 3
if self._comment[0] is None:
self._comment[0] = [] # type: ignore
self._comment[0].append(comment) # type: ignore
def add_comment_eol(self, comment: Any, comment_type: Any) -> None:
if not hasattr(self, '_comment'):
self._comment = [None, None, None]
else:
assert len(self._comment) == 3
assert self._comment[1] is None
if self.comment[1] is None:
self._comment[1] = [] # type: ignore
self._comment[1].extend([None] * (comment_type + 1 - len(self.comment[1]))) # type: ignore # NOQA
# nprintf('commy', self.comment, comment_type)
self._comment[1][comment_type] = comment # type: ignore
def add_comment_post(self, comment: Any) -> None:
if not hasattr(self, '_comment'):
self._comment = [None, None, []] # type: ignore
else:
assert len(self._comment) == 3
if self._comment[2] is None:
self._comment[2] = [] # type: ignore
self._comment[2].append(comment) # type: ignore
# def get_comment(self) -> Any:
# return getattr(self, '_comment', None)
@property
def comment(self) -> Any:
return getattr(self, '_comment', None)
def move_old_comment(self, target: Any, empty: bool = False) -> Any:
"""move a comment from this token to target (normally next token)
used to combine e.g. comments before a BlockEntryToken to the
ScalarToken that follows it
empty is a special for empty values -> comment after key
"""
c = self.comment
if c is None:
return
# don't push beyond last element
if isinstance(target, (StreamEndToken, DocumentStartToken)):
return
delattr(self, '_comment')
tc = target.comment
if not tc: # target comment, just insert
# special for empty value in key: value issue 25
if empty:
c = [c[0], c[1], None, None, c[0]]
target._comment = c
# nprint('mco2:', self, target, target.comment, empty)
return self
if c[0] and tc[0] or c[1] and tc[1]:
if isinstance(c[1], list) and isinstance(tc[1], list):
c[1].extend(tc[1])
else:
raise NotImplementedError(f'overlap in comment {c!r} {tc!r}')
if c[0]:
tc[0] = c[0]
if c[1]:
tc[1] = c[1]
return self
def split_old_comment(self) -> Any:
"""split the post part of a comment, and return it
as comment to be added. Delete second part if [None, None]
abc: # this goes to sequence
# this goes to first element
- first element
"""
comment = self.comment
if comment is None or comment[0] is None:
return None # nothing to do
ret_val = [comment[0], None]
if comment[1] is None:
delattr(self, '_comment')
return ret_val
def move_new_comment(self, target: Any, empty: bool = False) -> Any:
"""move a comment from this token to target (normally next token)
used to combine e.g. comments before a BlockEntryToken to the
ScalarToken that follows it
empty is a special for empty values -> comment after key
"""
c = self.comment
if c is None:
return
# don't push beyond last element
if isinstance(target, (StreamEndToken, DocumentStartToken)):
return
delattr(self, '_comment')
tc = target.comment
if not tc: # target comment, just insert
# special for empty value in key: value issue 25
if empty:
c = [c[0], c[1], c[2]]
target._comment = c
# nprint('mco2:', self, target, target.comment, empty)
return self
# if self and target have both pre, eol or post comments, something seems wrong
for idx in range(3):
if c[idx] is not None and tc[idx] is not None:
raise NotImplementedError(f'overlap in comment {c!r} {tc!r}')
# move the comment parts
for idx in range(3):
if c[idx]:
tc[idx] = c[idx]
return self
# class BOMToken(Token):
# id = ''
class DirectiveToken(Token):
__slots__ = 'name', 'value'
id = ''
def __init__(self, name: Any, value: Any, start_mark: Any, end_mark: Any) -> None:
Token.__init__(self, start_mark, end_mark)
self.name = name
self.value = value
class DocumentStartToken(Token):
__slots__ = ()
id = ''
class DocumentEndToken(Token):
__slots__ = ()
id = ''
class StreamStartToken(Token):
__slots__ = ('encoding',)
id = ''
def __init__(
self,
start_mark: Any = None,
end_mark: Any = None,
encoding: Any = None,
) -> None:
Token.__init__(self, start_mark, end_mark)
self.encoding = encoding
class StreamEndToken(Token):
__slots__ = ()
id = ''
class BlockSequenceStartToken(Token):
__slots__ = ()
id = ''
class BlockMappingStartToken(Token):
__slots__ = ()
id = ''
class BlockEndToken(Token):
__slots__ = ()
id = ''
class FlowSequenceStartToken(Token):
__slots__ = ()
id = '['
class FlowMappingStartToken(Token):
__slots__ = ()
id = '{'
class FlowSequenceEndToken(Token):
__slots__ = ()
id = ']'
class FlowMappingEndToken(Token):
__slots__ = ()
id = '}'
class KeyToken(Token):
__slots__ = ()
id = '?'
# def x__repr__(self):
# return f'KeyToken({self.start_mark.buffer[self.start_mark.index:].split(None, 1)[0]})'
class ValueToken(Token):
__slots__ = ()
id = ':'
class BlockEntryToken(Token):
__slots__ = ()
id = '-'
class FlowEntryToken(Token):
__slots__ = ()
id = ','
class AliasToken(Token):
__slots__ = ('value',)
id = ''
def __init__(self, value: Any, start_mark: Any, end_mark: Any) -> None:
Token.__init__(self, start_mark, end_mark)
self.value = value
class AnchorToken(Token):
__slots__ = ('value',)
id = ''
def __init__(self, value: Any, start_mark: Any, end_mark: Any) -> None:
Token.__init__(self, start_mark, end_mark)
self.value = value
class TagToken(Token):
__slots__ = ('value',)
id = ''
def __init__(self, value: Any, start_mark: Any, end_mark: Any) -> None:
Token.__init__(self, start_mark, end_mark)
self.value = value
class ScalarToken(Token):
__slots__ = 'value', 'plain', 'style'
id = ''
def __init__(
self,
value: Any,
plain: Any,
start_mark: Any,
end_mark: Any,
style: Any = None,
) -> None:
Token.__init__(self, start_mark, end_mark)
self.value = value
self.plain = plain
self.style = style
class CommentToken(Token):
__slots__ = '_value', '_column', 'pre_done'
id = ''
def __init__(
self,
value: Any,
start_mark: Any = None,
end_mark: Any = None,
column: Any = None,
) -> None:
if start_mark is None:
assert column is not None
self._column = column # type: ignore
Token.__init__(self, start_mark, None) # type: ignore
self._value = value
@property
def value(self) -> str:
if isinstance(self._value, str):
return self._value
return "".join(self._value)
@value.setter
def value(self, val: Any) -> None:
self._value = val
def reset(self) -> None:
if hasattr(self, 'pre_done'):
delattr(self, 'pre_done')
def __repr__(self) -> Any:
v = f'{self.value!r}'
if SHOW_LINES:
try:
v += ', line: ' + str(self.start_mark.line)
except: # NOQA
pass
try:
v += ', col: ' + str(self.start_mark.column)
except: # NOQA
pass
return f'CommentToken({v})'
def __eq__(self, other: Any) -> bool:
if self.start_mark != other.start_mark:
return False
if self.end_mark != other.end_mark:
return False
if self.value != other.value:
return False
return True
def __ne__(self, other: Any) -> bool:
return not self.__eq__(other)
python-ruyaml-0.92.1/lib/ruyaml/util.py 0000664 0000000 0000000 00000020436 15056754172 0020062 0 ustar 00root root 0000000 0000000 """
some helper functions that might be generally useful
"""
from __future__ import annotations
import datetime
import re
from functools import partial
from typing import Any
if False: # MYPY
from typing import Any, Callable, Dict, List, Optional, Text, Union # NOQA
from .compat import StreamTextType # NOQA
class LazyEval:
"""
Lightweight wrapper around lazily evaluated func(*args, **kwargs).
func is only evaluated when any attribute of its return value is accessed.
Every attribute access is passed through to the wrapped value.
(This only excludes special cases like method-wrappers, e.g., __hash__.)
The sole additional attribute is the lazy_self function which holds the
return value (or, prior to evaluation, func and arguments), in its closure.
"""
def __init__(self, func: Callable[..., Any], *args: Any, **kwargs: Any) -> None:
def lazy_self() -> Any:
return_value = func(*args, **kwargs)
object.__setattr__(self, 'lazy_self', lambda: return_value)
return return_value
object.__setattr__(self, 'lazy_self', lazy_self)
def __getattribute__(self, name: str) -> Any:
lazy_self = object.__getattribute__(self, 'lazy_self')
if name == 'lazy_self':
return lazy_self
return getattr(lazy_self(), name)
def __setattr__(self, name: str, value: Any) -> None:
setattr(self.lazy_self(), name, value)
RegExp = partial(LazyEval, re.compile)
timestamp_regexp = RegExp(
"""^(?P[0-9][0-9][0-9][0-9])
-(?P[0-9][0-9]?)
-(?P[0-9][0-9]?)
(?:((?P[Tt])|[ \\t]+) # explictly not retaining extra spaces
(?P[0-9][0-9]?)
:(?P[0-9][0-9])
:(?P[0-9][0-9])
(?:\\.(?P[0-9]*))?
(?:[ \\t]*(?PZ|(?P[-+])(?P[0-9][0-9]?)
(?::(?P[0-9][0-9]))?))?)?$""",
re.X,
)
def create_timestamp(
year: Any,
month: Any,
day: Any,
t: Any,
hour: Any,
minute: Any,
second: Any,
fraction: Any,
tz: Any,
tz_sign: Any,
tz_hour: Any,
tz_minute: Any,
) -> Union[datetime.datetime, datetime.date]:
# create a timestamp from matching against timestamp_regexp
MAX_FRAC = 999999
year = int(year)
month = int(month)
day = int(day)
if hour is None:
return datetime.date(year, month, day)
hour = int(hour)
minute = int(minute)
second = int(second)
frac = 0
if fraction:
frac_s = fraction[:6]
while len(frac_s) < 6:
frac_s += '0'
frac = int(frac_s)
if len(fraction) > 6 and int(fraction[6]) > 4:
frac += 1
if frac > MAX_FRAC:
fraction = 0
else:
fraction = frac
else:
fraction = 0
tzinfo = None
delta = None
if tz_sign:
tz_hour = int(tz_hour)
tz_minute = int(tz_minute) if tz_minute else 0
td = datetime.timedelta(
hours=tz_hour,
minutes=tz_minute,
)
if tz_sign == '-':
td = -td
tzinfo = datetime.timezone(td, name=tz)
elif tz == 'Z':
tzinfo = datetime.timezone(datetime.timedelta(hours=0), name=tz)
if frac > MAX_FRAC:
delta = -datetime.timedelta(seconds=1)
# should do something else instead (or hook this up to the preceding if statement
# in reverse
# if delta is None:
# return datetime.datetime(year, month, day, hour, minute, second, fraction)
# return datetime.datetime(year, month, day, hour, minute, second, fraction,
# datetime.timezone.utc)
# the above is not good enough though, should provide tzinfo. In Python3 that is easily
# doable drop that kind of support for Python2 as it has not native tzinfo
data = datetime.datetime(year, month, day, hour, minute, second, fraction, tzinfo)
if delta:
data -= delta
return data
# originally as comment
# https://github.com/pre-commit/pre-commit/pull/211#issuecomment-186466605
# if you use this in your code, I suggest adding a test in your test suite
# that check this routines output against a known piece of your YAML
# before upgrades to this code break your round-tripped YAML
def load_yaml_guess_indent(stream: StreamTextType, **kw: Any) -> Any:
"""guess the indent and block sequence indent of yaml stream/string
returns round_trip_loaded stream, indent level, block sequence indent
- block sequence indent is the number of spaces before a dash relative to previous indent
- if there are no block sequences, indent is taken from nested mappings, block sequence
indent is unset (None) in that case
"""
from .main import YAML
# load a YAML document, guess the indentation, if you use TABs you are on your own
def leading_spaces(line: Any) -> int:
idx = 0
while idx < len(line) and line[idx] == ' ':
idx += 1
return idx
if isinstance(stream, str):
yaml_str: Any = stream
elif isinstance(stream, bytes):
# most likely, but the Reader checks BOM for this
yaml_str = stream.decode('utf-8')
else:
yaml_str = stream.read()
map_indent = None
indent = None # default if not found for some reason
block_seq_indent = None
prev_line_key_only = None
key_indent = 0
for line in yaml_str.splitlines():
rline = line.rstrip()
lline = rline.lstrip()
if lline.startswith('- '):
l_s = leading_spaces(line)
block_seq_indent = l_s - key_indent
idx = l_s + 1
while line[idx] == ' ': # this will end as we rstripped
idx += 1
if line[idx] == '#': # comment after -
continue
indent = idx - key_indent
break
if map_indent is None and prev_line_key_only is not None and rline:
idx = 0
while line[idx] in ' -':
idx += 1
if idx > prev_line_key_only:
map_indent = idx - prev_line_key_only
if rline.endswith(':'):
key_indent = leading_spaces(line)
idx = 0
while line[idx] == ' ': # this will end on ':'
idx += 1
prev_line_key_only = idx
continue
prev_line_key_only = None
if indent is None and map_indent is not None:
indent = map_indent
yaml = YAML() if 'yaml' not in kw else kw.pop('yaml')
return yaml.load(yaml_str, **kw), indent, block_seq_indent
def configobj_walker(cfg: Any) -> Any:
"""
walks over a ConfigObj (INI file with comments) generating
corresponding YAML output (including comments
"""
from configobj import ConfigObj # type: ignore
assert isinstance(cfg, ConfigObj)
for c in cfg.initial_comment:
if c.strip():
yield c
for s in _walk_section(cfg):
if s.strip():
yield s
for c in cfg.final_comment:
if c.strip():
yield c
def _walk_section(s: Any, level: int = 0) -> Any:
from configobj import Section
assert isinstance(s, Section)
indent = ' ' * level
for name in s.scalars:
for c in s.comments[name]:
yield indent + c.strip()
x = s[name]
if '\n' in x:
i = indent + ' '
x = '|\n' + i + x.strip().replace('\n', '\n' + i)
elif ':' in x:
x = "'" + x.replace("'", "''") + "'"
line = f'{indent}{name}: {x}'
c = s.inline_comments[name]
if c:
line += ' ' + c
yield line
for name in s.sections:
for c in s.comments[name]:
yield indent + c.strip()
line = f'{indent}{name}:'
c = s.inline_comments[name]
if c:
line += ' ' + c
yield line
for val in _walk_section(s[name], level=level + 1):
yield val
# def config_obj_2_rt_yaml(cfg):
# from .comments import CommentedMap, CommentedSeq
# from configobj import ConfigObj
# assert isinstance(cfg, ConfigObj)
# #for c in cfg.initial_comment:
# # if c.strip():
# # pass
# cm = CommentedMap()
# for name in s.sections:
# cm[name] = d = CommentedMap()
#
#
# #for c in cfg.final_comment:
# # if c.strip():
# # yield c
# return cm
python-ruyaml-0.92.1/pyproject.toml 0000664 0000000 0000000 00000001055 15056754172 0017364 0 ustar 00root root 0000000 0000000 [build-system]
requires = [
"pip >= 19.3.1",
"setuptools >= 42",
"setuptools_scm[toml] >= 3.5.0",
"setuptools_scm_git_archive >= 1.1",
"wheel >= 0.33.6",
]
build-backend = "setuptools.build_meta"
[tool.black]
skip-string-normalization = true
max-line-length = 95
[tool.isort]
profile = "black"
known_first_party = "ruyaml"
[tool.pytest.ini_options]
# ensure we treat warnings as error
filterwarnings = [
# "error",
"error::DeprecationWarning",
"error::PendingDeprecationWarning"
]
[tool.setuptools_scm]
local_scheme = "no-local-version"
python-ruyaml-0.92.1/setup.cfg 0000664 0000000 0000000 00000003557 15056754172 0016302 0 ustar 00root root 0000000 0000000 [metadata]
name = ruyaml
url = https://github.com/pycontribs/ruyaml
project_urls =
Bug Tracker = https://github.com/pycontribs/ruyaml/issues
Release Management = https://github.com/pycontribs/ruyaml/releases
Source Code = https://github.com/pycontribs/ruyaml
description = ruyaml is a fork of ruamel.yaml
long_description = file: README.rst
long_description_content_type = text/x-rst; charset=UTF-8
history = file: CHANGES
author = ruyaml Contributors
author_email = pycontribs@googlegroups.com
maintainer = Sorin Sbarnea
maintainer_email = sorin.sbarnea@gmail.com
license = MIT license
license_file = LICENSE
classifiers =
Development Status :: 5 - Production/Stable
Environment :: Console
Intended Audience :: Developers
Intended Audience :: Information Technology
Intended Audience :: System Administrators
License :: OSI Approved :: MIT License
Natural Language :: English
Operating System :: OS Independent
Programming Language :: Python :: 3.7
Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.9
Programming Language :: Python :: 3.10
Programming Language :: Python :: 3.11
Topic :: Utilities
keywords =
selinux
virtualenv
[files]
packages =
ruyaml
[options]
use_scm_version = True
python_requires = >=3.7
package_dir =
= lib
packages = find:
include_package_data = True
zip_safe = True
install_requires =
distro>=1.3.0
setuptools>=39.0
[options.extras_require]
docs =
Sphinx
[options.package_data]
ruyaml =
py.typed
[options.packages.find]
where = lib
[flake8]
show-source = True
max-line-length = 95
ignore =
W503,
# duplicate of pylint W0611 (unused-import)
F401,
F405,
E203,
E402,
E124,
# temporary until we have time to address them:
B028
exclude = _test/lib,.eggs,.hg,.git,.tox,dist,.cache,__pycache__,ruyaml.egg-info
python-ruyaml-0.92.1/tox.ini 0000664 0000000 0000000 00000005120 15056754172 0015760 0 ustar 00root root 0000000 0000000 [tox]
minversion = 3.16.1
envlist =
linters
docs
packaging
py
cs
py313
py312
py311
py310
py39
py38
isolated_build = true
requires =
setuptools >= 41.4.0
pip >= 19.3.0
skip_missing_interpreters = False
# toxworkdir = /data1/DATA/tox/ruamel.yaml
[testenv]
description = Unittest using {basepython}
install_command = pip install --disable-pip-version-check {opts} {packages}
commands =
sh -c 'pytest {posargs} _test/test_*.py'
deps =
pytest
setuptools
allowlist_externals =
make
sh
[testenv:cs]
basepython = python3.11
deps =
flake8
flake8-bugbear;python_version>="3.11"
flake8-2020==1.8.1
flake8-commas==2.1.0
flake8-comprehensions==3.14.0
flake8-length==0.3.1
flake8-logging-format==0.9.0
commands =
make singlehtml
changedir = {toxinidir}/_doc
[testenv:pep8]
basepython = python3.11
deps =
pre-commit>=2.8.2
flake8
flake8-bugbear;python_version>="3.11"
flake8-2020==1.8.1
flake8-commas==2.1.0
flake8-comprehensions==3.14.0
flake8-length==0.3.1
flake8-logging-format==0.9.0
commands =
pre-commit run -a
[testenv:docs]
description = Build docs
basepython = python3.8
deps =
--editable .[docs]
commands =
make singlehtml
changedir = {toxinidir}/_doc
[testenv:linters]
description = Linting
basepython = python3.8
deps =
pre-commit>=2.8.2
flake8
flake8-bugbear
commands =
pre-commit run -a
[testenv:packaging]
description =
Do packaging/distribution
# `usedevelop = true` overrides `skip_install` instruction, it's unwanted
usedevelop = false
# don't install package itself in this env
skip_install = true
deps =
build >= 0.7.0
twine >= 3.7.0
setenv =
commands =
# build wheel and sdist using PEP-517
{envpython} -c 'import os.path, shutil, sys; \
dist_dir = os.path.join("{toxinidir}", "dist"); \
os.path.isdir(dist_dir) or sys.exit(0); \
print("Removing \{!s\} contents...".format(dist_dir), file=sys.stderr); \
shutil.rmtree(dist_dir)'
{envpython} -m build \
--outdir {toxinidir}/dist/ \
{toxinidir}
# Validate metadata using twine
twine check --strict {toxinidir}/dist/*
# Install the wheel
sh -c "python3 -m pip install --force-reinstall {toxinidir}/dist/*.whl"
[flake8]
show-source = True
max-line-length = 95
ignore = W503,F405,E203,C408,E124
exclude = _test/lib,branch_default,split,jabsy,cmd,bytes,jinja2,clib,pytypes,string,data,base,convert,.hg,.git,.tox,dist,.cache,__pycache__,ruamel.zip2tar.egg-info
[pytest]
filterwarnings =
error::DeprecationWarning
error::PendingDeprecationWarning