pax_global_header00006660000000000000000000000064145366175270014532gustar00rootroot0000000000000052 comment=b40b8947a40ae287ca116a643bf3684a5d57abab cwl-upgrader-1.2.11/000077500000000000000000000000001453661752700142105ustar00rootroot00000000000000cwl-upgrader-1.2.11/.flake8000066400000000000000000000001441453661752700153620ustar00rootroot00000000000000[flake8] ignore = E203, E266, E501, W503, E211, E731 max-line-length = 88 select = B,C,E,F,W,T4,B9 cwl-upgrader-1.2.11/.github/000077500000000000000000000000001453661752700155505ustar00rootroot00000000000000cwl-upgrader-1.2.11/.github/dependabot.yml000066400000000000000000000005001453661752700203730ustar00rootroot00000000000000version: 2 updates: - package-ecosystem: "pip" # See documentation for possible values directory: "/" # Location of package manifests schedule: interval: "daily" # Maintain dependencies for GitHub Actions - package-ecosystem: "github-actions" directory: "/" schedule: interval: "daily" cwl-upgrader-1.2.11/.github/workflows/000077500000000000000000000000001453661752700176055ustar00rootroot00000000000000cwl-upgrader-1.2.11/.github/workflows/ci-tests.yml000066400000000000000000000077261453661752700220770ustar00rootroot00000000000000name: Continuous integration tests on: push: branches: [ main ] pull_request: branches: [ main ] concurrency: group: build-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true env: TOX_SKIP_MISSING_INTERPRETERS: False # Rich (pip) FORCE_COLOR: 1 # Tox PY_COLORS: 1 # Mypy (see https://github.com/python/mypy/issues/7771) TERM: xterm-color MYPY_FORCE_COLOR: 1 MYPY_FORCE_TERMINAL_WIDTH: 200 # Pytest PYTEST_ADDOPTS: --color=yes jobs: tox: name: CI tests via Tox runs-on: ubuntu-22.04 strategy: matrix: py-ver-major: [3] py-ver-minor: [8, 9, 10, 11, 12] step: [lint, unit, mypy] env: py-semver: ${{ format('{0}.{1}', matrix.py-ver-major, matrix.py-ver-minor) }} TOXENV: ${{ format('py{0}{1}-{2}', matrix.py-ver-major, matrix.py-ver-minor, matrix.step) }} steps: - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Set up Python uses: actions/setup-python@v5 with: python-version: ${{ env.py-semver }} allow-prereleases: true cache: pip cache-dependency-path: | requirements.txt test-requirements.txt mypy-requirements.txt - name: Upgrade setuptools and install tox run: | pip install -U pip setuptools wheel pip install tox tox-gh-actions - name: MyPy cache if: ${{ matrix.step == 'mypy' }} uses: actions/cache@v3 with: path: .mypy_cache/${{ env.py-semver }} key: mypy-${{ env.py-semver }} - name: Test with tox run: tox - name: Upload coverage to Codecov if: ${{ matrix.step == 'unit' }} uses: codecov/codecov-action@v3 with: fail_ci_if_error: true token: ${{ secrets.CODECOV_TOKEN }} tox-style: name: CI linters via Tox runs-on: ubuntu-22.04 strategy: matrix: step: [lintreadme, pydocstyle] env: py-semver: 3.12 TOXENV: ${{ format('py312-{0}', matrix.step) }} steps: - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Set up Python uses: actions/setup-python@v5 with: python-version: ${{ env.py-semver }} cache: pip - name: Upgrade setuptools and install tox run: | pip install -U pip setuptools wheel pip install tox tox-gh-actions - if: ${{ matrix.step == 'pydocstyle' && github.event_name == 'pull_request'}} name: Create local branch for diff-quality for PRs run: git branch ${{github.base_ref}} origin/${{github.base_ref}} - name: Test with tox run: tox conformance_tests: name: upgrade & test conformance tests runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v5 with: python-version: 3.12 cache: pip - name: "Test upgrading CWL conformance tests & running them" run: ./conformance-test.sh release_test: name: cwl-utils release test runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v5 with: python-version: 3.12 cache: pip cache-dependency-path: | requirements.txt test-requirements.txt mypy-requirements.txt - name: Install packages run: | pip install -U pip setuptools wheel pip install virtualenv - name: Release test env: RELEASE_SKIP: head run: ./release-test.sh - name: Publish package if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') uses: pypa/gh-action-pypi-publish@release/v1 with: user: __token__ password: ${{ secrets.PYPI_API_TOKEN }} packages_dir: testenv3_3/dist cwl-upgrader-1.2.11/.github/workflows/codeql-analysis.yml000066400000000000000000000014741453661752700234260ustar00rootroot00000000000000name: "Code scanning - action" on: push: branches: [main] pull_request: branches: [main] schedule: - cron: '0 1 * * 4' concurrency: group: codeql-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true jobs: CodeQL-Build: runs-on: ubuntu-latest permissions: security-events: write steps: - name: Checkout repository uses: actions/checkout@v4 with: # We must fetch at least the immediate parents so that if this is # a pull request then we can checkout the head. fetch-depth: 2 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL uses: github/codeql-action/init@v3 with: languages: python - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v3 cwl-upgrader-1.2.11/.gitignore000066400000000000000000000010021453661752700161710ustar00rootroot00000000000000# Generated during tests pytestdebug.log tmp/ # Python temps __pycache__/ *.py[cod] *$py.class build/ dist/ eggs/ .eggs/ *.egg-info/ *.egg .tox/ .pytest_cache # Editor Temps .*.sw? *~ \#*\# .desktop # virtualenv venv/ venv3/ # pycharm .idea/ # local type stubs mypy-stubs/ruamel/yaml #mypy .mypy_cache/ bin/ lib/ # Files generated by Makefile .cache/ cache/ .coverage .coverage.* coverage.xml htmlcov output output.txt pydocstyle_report.txt response.txt test.txt time.txt value .python-version testenv*/ cwl-upgrader-1.2.11/.isort.cfg000066400000000000000000000001611453661752700161050ustar00rootroot00000000000000[settings] multi_line_output=3 include_trailing_comma=True force_grid_wrap=0 use_parentheses=True line_length=88 cwl-upgrader-1.2.11/.pylintrc000066400000000000000000000414211453661752700160570ustar00rootroot00000000000000[MASTER] # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code. extension-pkg-whitelist= # Add files or directories to the blacklist. They should be base names, not # paths. ignore=CVS # Add files or directories matching the regex patterns to the blacklist. The # regex matches against base names, not paths. ignore-patterns= # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). #init-hook= # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the # number of processors available to use. jobs=1 # Control the amount of potential inferred values when inferring a single # object. This can help the performance when dealing with large functions or # complex, nested conditions. limit-inference-results=100 # List of plugins (as comma separated values of python modules names) to load, # usually to register additional checkers. load-plugins= # Pickle collected data for later comparisons. persistent=yes # Specify a configuration file. #rcfile= # When enabled, pylint would attempt to guess common misconfiguration and emit # user-friendly hints instead of false-positive error messages. suggestion-mode=yes # Allow loading of arbitrary C extensions. Extensions are imported into the # active Python interpreter and may run arbitrary code. unsafe-load-any-extension=no [MESSAGES CONTROL] # Only show warnings with the listed confidence levels. Leave empty to show # all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. confidence= # Disable the message, report, category or checker with the given id(s). You # can either give multiple identifiers separated by comma (,) or put this # option multiple times (only on the command line, not in the configuration # file where it should appear only once). You can also use "--disable=all" to # disable everything first and then reenable specific checks. For example, if # you want to run only the similarities checker, you can use "--disable=all # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use "--disable=all --enable=classes # --disable=W". disable=print-statement, parameter-unpacking, unpacking-in-except, old-raise-syntax, backtick, long-suffix, old-ne-operator, old-octal-literal, import-star-module-level, non-ascii-bytes-literal, raw-checker-failed, bad-inline-option, locally-disabled, locally-enabled, file-ignored, suppressed-message, useless-suppression, deprecated-pragma, use-symbolic-message-instead, apply-builtin, basestring-builtin, buffer-builtin, cmp-builtin, coerce-builtin, execfile-builtin, file-builtin, long-builtin, raw_input-builtin, reduce-builtin, standarderror-builtin, unicode-builtin, xrange-builtin, coerce-method, delslice-method, getslice-method, setslice-method, no-absolute-import, old-division, dict-iter-method, dict-view-method, next-method-called, metaclass-assignment, indexing-exception, raising-string, reload-builtin, oct-method, hex-method, nonzero-method, cmp-method, input-builtin, round-builtin, intern-builtin, unichr-builtin, map-builtin-not-iterating, zip-builtin-not-iterating, range-builtin-not-iterating, filter-builtin-not-iterating, using-cmp-argument, eq-without-hash, div-method, idiv-method, rdiv-method, exception-message-attribute, invalid-str-codec, sys-max-int, bad-python3-import, deprecated-string-function, deprecated-str-translate-call, deprecated-itertools-function, deprecated-types-field, next-method-defined, dict-items-not-iterating, dict-keys-not-iterating, dict-values-not-iterating, deprecated-operator-function, deprecated-urllib-function, xreadlines-attribute, deprecated-sys-function, exception-escape, comprehension-escape, useless-object-inheritance, bad-continuation, bad-whitespace # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option # multiple time (only on the command line, not in the configuration file where # it should appear only once). See also the "--disable" option for examples. enable=c-extension-no-member [REPORTS] # Python expression which should return a note less than 10 (10 is the highest # note). You have access to the variables errors warning, statement which # respectively contain the number of errors / warnings messages and the total # number of statements analyzed. This is used by the global evaluation report # (RP0004). evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) # Template used to display messages. This is a python new-style format string # used to format the message information. See doc for all details. #msg-template= # Set the output format. Available formats are text, parseable, colorized, json # and msvs (visual studio). You can also give a reporter class, e.g. # mypackage.mymodule.MyReporterClass. output-format=text # Tells whether to display a full report or only the messages. reports=no # Activate the evaluation score. score=yes [REFACTORING] # Maximum number of nested blocks for function / method body max-nested-blocks=5 # Complete name of functions that never returns. When checking for # inconsistent-return-statements if a never returning function is called then # it will be considered as an explicit return statement and no message will be # printed. never-returning-functions=sys.exit [SPELLING] # Limits count of emitted suggestions for spelling mistakes. max-spelling-suggestions=4 # Spelling dictionary name. Available dictionaries: none. To make it working # install python-enchant package.. spelling-dict= # List of comma separated words that should not be checked. spelling-ignore-words= # A path to a file that contains private dictionary; one word per line. spelling-private-dict-file= # Tells whether to store unknown words to indicated private dictionary in # --spelling-private-dict-file option instead of raising a message. spelling-store-unknown-words=no [BASIC] # Naming style matching correct argument names. argument-naming-style=snake_case # Regular expression matching correct argument names. Overrides argument- # naming-style. #argument-rgx= # Naming style matching correct attribute names. attr-naming-style=snake_case # Regular expression matching correct attribute names. Overrides attr-naming- # style. #attr-rgx= # Bad variable names which should always be refused, separated by a comma. bad-names=foo, bar, baz, toto, tutu, tata # Naming style matching correct class attribute names. class-attribute-naming-style=any # Regular expression matching correct class attribute names. Overrides class- # attribute-naming-style. #class-attribute-rgx= # Naming style matching correct class names. class-naming-style=PascalCase # Regular expression matching correct class names. Overrides class-naming- # style. #class-rgx= # Naming style matching correct constant names. const-naming-style=UPPER_CASE # Regular expression matching correct constant names. Overrides const-naming- # style. #const-rgx= # Minimum line length for functions/classes that require docstrings, shorter # ones are exempt. docstring-min-length=-1 # Naming style matching correct function names. function-naming-style=snake_case # Regular expression matching correct function names. Overrides function- # naming-style. #function-rgx= # Good variable names which should always be accepted, separated by a comma. good-names=i, j, k, ex, Run, _ # Include a hint for the correct naming format with invalid-name. include-naming-hint=no # Naming style matching correct inline iteration names. inlinevar-naming-style=any # Regular expression matching correct inline iteration names. Overrides # inlinevar-naming-style. #inlinevar-rgx= # Naming style matching correct method names. method-naming-style=snake_case # Regular expression matching correct method names. Overrides method-naming- # style. #method-rgx= # Naming style matching correct module names. module-naming-style=snake_case # Regular expression matching correct module names. Overrides module-naming- # style. #module-rgx= # Colon-delimited sets of names that determine each other's naming style when # the name regexes allow several styles. name-group= # Regular expression which should only match function or class names that do # not require a docstring. no-docstring-rgx=^_ # List of decorators that produce properties, such as abc.abstractproperty. Add # to this list to register other decorators that produce valid properties. # These decorators are taken in consideration only for invalid-name. property-classes=abc.abstractproperty # Naming style matching correct variable names. variable-naming-style=snake_case # Regular expression matching correct variable names. Overrides variable- # naming-style. #variable-rgx= [LOGGING] # Logging modules to check that the string format arguments are in logging # function parameter format. logging-modules=logging [VARIABLES] # List of additional names supposed to be defined in builtins. Remember that # you should avoid to define new builtins when possible. additional-builtins= # Tells whether unused global variables should be treated as a violation. allow-global-unused-variables=yes # List of strings which can identify a callback function by name. A callback # name must start or end with one of those strings. callbacks=cb_, _cb # A regular expression matching the name of dummy variables (i.e. expected to # not be used). dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ # Argument names that match this expression will be ignored. Default to name # with leading underscore. ignored-argument-names=_.*|^ignored_|^unused_ # Tells whether we should check for unused import in __init__ files. init-import=no # List of qualified module names which can have objects that can redefine # builtins. redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io [SIMILARITIES] # Ignore comments when computing similarities. ignore-comments=yes # Ignore docstrings when computing similarities. ignore-docstrings=yes # Ignore imports when computing similarities. ignore-imports=no # Minimum lines number of a similarity. min-similarity-lines=4 [TYPECHECK] # List of decorators that produce context managers, such as # contextlib.contextmanager. Add to this list to register other decorators that # produce valid context managers. contextmanager-decorators=contextlib.contextmanager # List of members which are set dynamically and missed by pylint inference # system, and so shouldn't trigger E1101 when accessed. Python regular # expressions are accepted. generated-members= # Tells whether missing members accessed in mixin class should be ignored. A # mixin class is detected if its name ends with "mixin" (case insensitive). ignore-mixin-members=yes # Tells whether to warn about missing members when the owner of the attribute # is inferred to be None. ignore-none=yes # This flag controls whether pylint should warn about no-member and similar # checks whenever an opaque object is returned when inferring. The inference # can return multiple potential results while evaluating a Python object, but # some branches might not be evaluated, which results in partial inference. In # that case, it might be useful to still emit no-member and other checks for # the rest of the inferred objects. ignore-on-opaque-inference=yes # List of class names for which member attributes should not be checked (useful # for classes with dynamically set attributes). This supports the use of # qualified names. ignored-classes=optparse.Values,thread._local,_thread._local # List of module names for which member attributes should not be checked # (useful for modules/projects where namespaces are manipulated during runtime # and thus existing member attributes cannot be deduced by static analysis. It # supports qualified module names, as well as Unix pattern matching. ignored-modules= # Show a hint with possible names when a member name was not found. The aspect # of finding the hint is based on edit distance. missing-member-hint=yes # The minimum edit distance a name should have in order to be considered a # similar match for a missing member name. missing-member-hint-distance=1 # The total number of similar names that should be taken in consideration when # showing a hint for a missing member. missing-member-max-choices=1 [MISCELLANEOUS] # List of note tags to take in consideration, separated by a comma. notes=FIXME, XXX, TODO [FORMAT] # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. expected-line-ending-format= # Regexp for a line that is allowed to be longer than the limit. ignore-long-lines=^\s*(# )??$ # Number of spaces of indent required inside a hanging or continued line. indent-after-paren=4 # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 # tab). indent-string=' ' # Maximum number of characters on a single line. max-line-length=100 # Maximum number of lines in a module. max-module-lines=1000 # List of optional constructs for which whitespace checking is disabled. `dict- # separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. # `trailing-comma` allows a space between comma and closing bracket: (a, ). # `empty-line` allows space-only lines. no-space-check=trailing-comma, dict-separator # Allow the body of a class to be on the same line as the declaration if body # contains single statement. single-line-class-stmt=no # Allow the body of an if to be on the same line as the test if there is no # else. single-line-if-stmt=no [IMPORTS] # Allow wildcard imports from modules that define __all__. allow-wildcard-with-all=no # Analyse import fallback blocks. This can be used to support both Python 2 and # 3 compatible code, which means that the block might have code that exists # only in one or another interpreter, leading to false positives when analysed. analyse-fallback-blocks=no # Deprecated modules which should not be used, separated by a comma. deprecated-modules=optparse,tkinter.tix # Create a graph of external dependencies in the given file (report RP0402 must # not be disabled). ext-import-graph= # Create a graph of every (i.e. internal and external) dependencies in the # given file (report RP0402 must not be disabled). import-graph= # Create a graph of internal dependencies in the given file (report RP0402 must # not be disabled). int-import-graph= # Force import order to recognize a module as part of the standard # compatibility libraries. known-standard-library= # Force import order to recognize a module as part of a third party library. known-third-party=enchant [DESIGN] # Maximum number of arguments for function / method. max-args=5 # Maximum number of attributes for a class (see R0902). max-attributes=7 # Maximum number of boolean expressions in an if statement. max-bool-expr=5 # Maximum number of branch for function / method body. max-branches=12 # Maximum number of locals for function / method body. max-locals=15 # Maximum number of parents for a class (see R0901). max-parents=7 # Maximum number of public methods for a class (see R0904). max-public-methods=20 # Maximum number of return / yield for function / method body. max-returns=6 # Maximum number of statements in function / method body. max-statements=50 # Minimum number of public methods for a class (see R0903). min-public-methods=2 [CLASSES] # List of method names used to declare (i.e. assign) instance attributes. defining-attr-methods=__init__, __new__, setUp # List of member names, which should be excluded from the protected access # warning. exclude-protected=_asdict, _fields, _replace, _source, _make # List of valid names for the first argument in a class method. valid-classmethod-first-arg=cls # List of valid names for the first argument in a metaclass class method. valid-metaclass-classmethod-first-arg=cls [EXCEPTIONS] # Exceptions that will emit a warning when being caught. Defaults to # "Exception". overgeneral-exceptions=Exception cwl-upgrader-1.2.11/LICENSE.txt000066400000000000000000000261361453661752700160430ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. cwl-upgrader-1.2.11/MANIFEST.in000066400000000000000000000001331453661752700157430ustar00rootroot00000000000000include MANIFEST.in Makefile include cwlupgrader/py.typed recursive-include testdata *.cwl cwl-upgrader-1.2.11/Makefile000066400000000000000000000137041453661752700156550ustar00rootroot00000000000000# This file is part of cwl-upgrader # https://github.com/common-workflow-language/cwl-upgrader/, and is # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Contact: common-workflow-language@googlegroups.com # make format to fix most python formatting errors # make pylint to check Python code for enhanced compliance including naming # and documentation # make coverage-report to check coverage of the python scripts by the tests MODULE=cwl-upgrader PACKAGE=cwlupgrader EXTRAS= # `SHELL=bash` doesn't work for some, so don't use BASH-isms like # `[[` conditional expressions. PYSOURCES=$(wildcard cwlupgrader/**.py tests/*.py) DEVPKGS=diff_cover black pylint pep257 pydocstyle flake8 tox virtualenv-pyenv \ isort wheel autoflake flake8-bugbear pyupgrade bandit build \ auto-walrus -rtest-requirements.txt -rmypy-requirements.txt DEBDEVPKGS=pylint python3-coverage sloccount \ python3-flake8 shellcheck VERSION=$(shell grep __version__ cwlupgrader/__init__.py | awk -F\" '{ print $$2 }') ## all : default task (install cwl-upgrader in dev mode) all: dev ## help : print this help message and exit help: Makefile @sed -n 's/^##//p' $< ## cleanup : shortcut for "make sort_imports format flake8 diff_pydocstyle_report" cleanup: sort_imports format flake8 diff_pydocstyle_report ## install-dep : install most of the development dependencies via pip install-dep: install-dependencies install-dependencies: FORCE pip install --upgrade $(DEVPKGS) pip install -r requirements.txt -r mypy-requirements.txt ## install : install the cwlupgrader package and scripts install: FORCE pip install .$(EXTRAS) ## dev : install the cwlupgrader package in dev mode dev: install-dep pip install -U pip setuptools wheel pip install -e .$(EXTRAS) ## dist : create a module package for distribution dist: dist/${MODULE}-$(VERSION).tar.gz dist/${MODULE}-$(VERSION).tar.gz: $(SOURCES) python -m build ## clean : clean up all temporary / machine-generated files clean: FORCE rm -f ${MODILE}/*.pyc tests/*.pyc rm -Rf .coverage rm -f diff-cover.html # Linting and code style related targets ## sort_import : sorting imports using isort: https://github.com/timothycrosley/isort sort_imports: $(PYSOURCES) mypy-stubs isort $^ remove_unused_imports: $(PYSOURCES) autoflake --in-place --remove-all-unused-imports $^ pep257: pydocstyle ## pydocstyle : check Python docstring style pydocstyle: $(PYSOURCES) pydocstyle --add-ignore=D100,D101,D102,D103 $^ || true pydocstyle_report.txt: $(PYSOURCES) pydocstyle $^ > $@ 2>&1 || true ## diff_pydocstyle_report : check Python docstring style for changed files only diff_pydocstyle_report: pydocstyle_report.txt diff-quality --compare-branch=main --violations=pydocstyle --fail-under=100 $^ ## codespell : check for common misspellings codespell: codespell -w $(shell git ls-files | grep -v mypy-stubs | grep -v gitignore | grep -v EDAM.owl | grep -v pre.yml | grep -v test_schema) ## format : check/fix all code indentation and formatting (runs black) format: black cwlupgrader tests mypy-stubs format-check: black --diff --check cwlupgrader mypy-stubs ## pylint : run static code analysis on Python code pylint: $(PYSOURCES) pylint --msg-template="{path}:{line}: [{msg_id}({symbol}), {obj}] {msg}" \ $^ -j0|| true pylint_report.txt: $(PYSOURCES) pylint --msg-template="{path}:{line}: [{msg_id}({symbol}), {obj}] {msg}" \ $^ -j0> $@ || true diff_pylint_report: pylint_report.txt diff-quality --compare-branch=main --violations=pylint pylint_report.txt .coverage: testcov coverage: .coverage coverage report coverage.xml: .coverage coverage xml coverage.html: htmlcov/index.html htmlcov/index.html: .coverage coverage html @echo Test coverage of the Python code is now in htmlcov/index.html coverage-report: .coverage coverage report diff-cover: coverage.xml diff-cover --compare-branch=main $^ diff-cover.html: coverage.xml diff-cover --compare-branch=main $^ --html-report $@ ## test : run the cwlupgrader test suite test: $(PYSOURCES) python -m pytest -rs ${PYTEST_EXTRA} ## testcov : run the cwlupgrader test suite and collect coverage testcov: $(PYSOURCES) python -m pytest --cov ${PYTEST_EXTRA} sloccount.sc: $(PYSOURCES) Makefile sloccount --duplicates --wide --details $^ > $@ ## sloccount : count lines of code sloccount: $(PYSOURCES) Makefile sloccount $^ list-author-emails: @echo 'name, E-Mail Address' @git log --format='%aN,%aE' | sort -u | grep -v 'root' mypy3: mypy mypy: $(PYSOURCES) MYPYPATH=$$MYPYPATH:mypy-stubs mypy $^ shellcheck: FORCE shellcheck conformance-test.sh release-test.sh pyupgrade: $(PYSOURCES) pyupgrade --exit-zero-even-if-changed --py38-plus $^ auto-walrus $^ release-test: FORCE git diff-index --quiet HEAD -- || ( echo You have uncommitted changes, please commit them and try again; false ) ./release-test.sh release: release-test . testenv2/bin/activate && \ pip install build && \ python -m build testenv2/src/${MODULE} && \ pip install twine && \ twine upload testenv2/src/${MODULE}/dist/* && \ git tag v${VERSION} && git push --tags flake8: FORCE flake8 $(PYSOURCES) FORCE: # Use this to print the value of a Makefile variable # Example `make print-VERSION` # From https://www.cmcrossroads.com/article/printing-value-makefile-variable print-% : ; @echo $* = $($*) cwl-upgrader-1.2.11/README.rst000066400000000000000000000017421453661752700157030ustar00rootroot00000000000000===================================================== Common workflow language standalone document upgrader ===================================================== This is a standalone upgrader for Common Workflow Language documents from version ``draft-3``, ``v1.0``, and ``v1.1`` to ``v1.2``. See https://github.com/sbg/sevenbridges-cwl-draft2-upgrader for upgrading from ``sbg:draft-2``. It does not check for correctness of the input document, for that one can use `the CWL reference implementation `_ (``cwltool --validate``). This is written and tested for Python 3.8, 3.9, 3.10, 3.11, and 3.12. Install ------- Installing the official package from PyPI:: pip install cwl-upgrader Or from source:: git clone https://github.com/common-workflow-language/cwl-upgrader.git pip install ./cwl-upgrader/ Run on the command line ----------------------- :: cwl-upgrader path-to-cwl-document [another-path-to-cwl-document ...] cwl-upgrader-1.2.11/conformance-test.sh000077500000000000000000000114411453661752700200170ustar00rootroot00000000000000#!/bin/bash -ex venv() { if ! test -d "$1" ; then if command -v virtualenv > /dev/null; then virtualenv -p python3 "$1" else python3 -m venv "$1" fi fi # shellcheck source=/dev/null source "$1"/bin/activate } # Set these variables when running the script, e.g.: # CONTAINER=podman ./conformance_test.sh # Which container runtime to use # Valid options: docker, singularity CONTAINER=${CONTAINER:-docker} # Comma-separated list of test names that should be excluded from execution # Defaults to "docker_entrypoint, inplace_update_on_file_content" # EXCLUDE=${EXCLUDE:-"some_default_test_to_exclude"} # Additional arguments for the pytest command # Defaults to none # PYTEST_EXTRA= # The directory where this script resides SCRIPT_DIRECTORY="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" # Download archive from GitHub rm -Rf main.tar.gz common-workflow-language-main wget "https://github.com/common-workflow-language/common-workflow-language/archive/main.tar.gz" tar xzf "main.tar.gz" if [ "${CONTAINER}" == "docker" ]; then docker pull docker.io/node:slim fi if [ "${CONTAINER}" == "podman" ]; then podman pull docker.io/node:slim fi if [ "${CONTAINER}" == "singularity" ]; then export CWL_SINGULARITY_CACHE="$SCRIPT_DIRECTORY/sifcache" mkdir --parents "${CWL_SINGULARITY_CACHE}" fi # Setup environment venv cwl-conformance-venv pip install -U setuptools wheel pip pip uninstall -y cwl_upgrader pip install cwltool 'cwltest>=2.3' -r "${SCRIPT_DIRECTORY}"/test-requirements.txt "${SCRIPT_DIRECTORY}" python -c 'import ruamel.yaml' CWLTOOL_OPTIONS+=" --parallel" unset exclusions declare -a exclusions if [[ "$CONTAINER" = "singularity" ]]; then CWLTOOL_OPTIONS+=" --singularity" # This test fails because Singularity and Docker have # different views on how to deal with this. exclusions+=(docker_entrypoint) if [[ "${VERSION}" = "v1.1" ]]; then # This fails because of a difference (in Singularity vs Docker) in # the way filehandles are passed to processes in the container and # wc can tell somehow. # See issue #1440 exclusions+=(stdin_shorcut) fi elif [[ "$CONTAINER" = "podman" ]]; then CWLTOOL_OPTIONS+=" --podman" fi if [[ -n "${EXCLUDE}" ]] ; then EXCLUDE="${EXCLUDE}," fi if (( "${#exclusions[*]}" > 0 )); then EXCLUDE=${EXCLUDE}$(IFS=,; echo "${exclusions[*]}") fi CONFORMANCE_TEST1="${SCRIPT_DIRECTORY}/common-workflow-language-main/v1.0/conformance_test_v1_0_to_v1_1.yaml" CONFORMANCE_TEST2="${SCRIPT_DIRECTORY}/common-workflow-language-main/v1.0/conformance_test_v1_0_to_v1_2.yaml" pushd "${SCRIPT_DIRECTORY}"/common-workflow-language-main/v1.0 cp -r v1.0 v1.1 cp -r v1.0 v1.2 rm v1.1/*.cwl rm v1.2/*.cwl set +x pushd v1.0 ; cwl-upgrader --v1.1-only --dir ../v1.1 --always-write ./*.cwl; popd pushd v1.0 ; cwl-upgrader --dir ../v1.2 --always-write ./*.cwl; popd set -x cp conformance_test_v1.0.yaml "${CONFORMANCE_TEST1}" cp conformance_test_v1.0.yaml "${CONFORMANCE_TEST2}" sed -i 's=v1.0/=v1.1/=g' "${CONFORMANCE_TEST1}" sed -i 's=v1.0/=v1.2/=g' "${CONFORMANCE_TEST2}" popd cp "${CONFORMANCE_TEST1}" "${CONFORMANCE_TEST1%".yaml"}.cwltest.yaml" CONFORMANCE_TEST1="${CONFORMANCE_TEST1%".yaml"}.cwltest.yaml" cp "${CONFORMANCE_TEST2}" "${CONFORMANCE_TEST2%".yaml"}.cwltest.yaml" CONFORMANCE_TEST2="${CONFORMANCE_TEST2%".yaml"}.cwltest.yaml" export CWLTOOL_OPTIONS echo CWLTOOL_OPTIONS="${CWLTOOL_OPTIONS}" cp "${SCRIPT_DIRECTORY}/tests/cwl-conformance/cwltool-conftest.py" "$(dirname "${CONFORMANCE_TEST1}")/conftest.py" if [[ -n "${EXCLUDE}" ]] ; then EXCLUDE_COMMAND="--cwl-exclude=${EXCLUDE}" fi pushd $(dirname "${CONFORMANCE_TEST1}") set +e python3 -m pytest "${CONFORMANCE_TEST1}" -n auto -rs --junit-xml="${SCRIPT_DIRECTORY}"/cwltool_v1.0_to_v1.1_"${CONTAINER}".xml -o junit_suite_name=cwltool_$(echo "${CWLTOOL_OPTIONS}" | tr "[:blank:]-" _) ${EXCLUDE_COMMAND} ; RETURN_CODE1=$? python3 -m pytest "${CONFORMANCE_TEST2}" -n auto -rs --junit-xml="${SCRIPT_DIRECTORY}"/cwltool_v1.0_to_v1.2_"${CONTAINER}".xml -o junit_suite_name=cwltool_$(echo "${CWLTOOL_OPTIONS}" | tr "[:blank:]-" _) ${EXCLUDE_COMMAND} ; RETURN_CODE2=$? set -e popd pushd "${SCRIPT_DIRECTORY}"/common-workflow-language-main/v1.0/ if find v1.1 -type f -print0 | xargs -0 grep cwlVersion | grep -v basename-fields-job.yml | grep --quiet 'v1\.0' then RETURN_CODE3=1 else RETURN_CODE3=0 fi if find v1.2 -type f -print0 | xargs -0 grep cwlVersion | grep -v basename-fields-job.yml | grep --quiet 'v1\.0' then RETURN_CODE4=1 else RETURN_CODE4=0 fi if find v1.2 -type f -print0 | xargs -0 grep cwlVersion | grep -v basename-fields-job.yml | grep --quiet 'v1\.1' then RETURN_CODE5=1 else RETURN_CODE5=0 fi popd # Cleanup deactivate # Exit exit $(( RETURN_CODE1 | RETURN_CODE2 | RETURN_CODE3 | RETURN_CODE4 | RETURN_CODE5 )) cwl-upgrader-1.2.11/cwlupgrader/000077500000000000000000000000001453661752700165275ustar00rootroot00000000000000cwl-upgrader-1.2.11/cwlupgrader/__init__.py000066400000000000000000000001471453661752700206420ustar00rootroot00000000000000"""Transforms draft-3 CWL documents into v1.0+ as idiomatically as possible.""" __version__ = "1.2.11" cwl-upgrader-1.2.11/cwlupgrader/main.cwl000066400000000000000000000005451453661752700201660ustar00rootroot00000000000000cwlVersion: v1.0 class: CommandLineTool doc: Common Workflow Language standalone document upgrader inputs: document: type: File streamable: true doc: CWL document to be upgraded baseCommand: [cwl-upgrader] stdout: revised-document outputs: document: type: File streamable: true outputBinding: glob: revised-document cwl-upgrader-1.2.11/cwlupgrader/main.py000077500000000000000000001032271453661752700200350ustar00rootroot00000000000000#!/usr/bin/env python """Transforms draft-3 CWL documents into v1.0 as idiomatically as possible.""" import argparse import copy import logging import os import os.path import stat import sys from collections.abc import MutableSequence, Sequence from pathlib import Path from typing import Any, Callable, Dict, List, MutableMapping, Optional, Set, Union import ruamel.yaml from ruamel.yaml.comments import CommentedMap # for consistent sort order from schema_salad.sourceline import SourceLine, add_lc_filename, cmap _logger = logging.getLogger("cwl-upgrader") # pylint: disable=invalid-name defaultStreamHandler = logging.StreamHandler() # pylint: disable=invalid-name _logger.addHandler(defaultStreamHandler) _logger.setLevel(logging.INFO) yaml = ruamel.yaml.main.YAML(typ="rt") yaml.allow_duplicate_keys = True yaml.preserve_quotes = True yaml.default_flow_style = False def parse_args(args: List[str]) -> argparse.Namespace: """Argument parser.""" parser = argparse.ArgumentParser( description="Tool to upgrade CWL documents from one version to another. " "Supports upgrading 'draft-3', 'v1.0', and 'v1.1' to 'v1.2'", formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument( "--v1-only", help="Don't upgrade past cwlVersion: v1.0", action="store_true" ) parser.add_argument( "--v1.1-only", dest="v1_1_only", help="Don't upgrade past cwlVersion: v1.1", action="store_true", ) parser.add_argument( "--dir", help="Directory in which to save converted files", default=Path.cwd() ) parser.add_argument( "--always-write", help="Always write a file, even if no changes were made.", action="store_true", ) parser.add_argument( "inputs", nargs="+", help="One or more CWL documents.", ) return parser.parse_args(args) def main(args: Optional[List[str]] = None) -> int: """Hook to set the args.""" if not args: args = sys.argv[1:] return run(parse_args(args)) def run(args: argparse.Namespace) -> int: """Main function.""" imports: Set[str] = set() if args.dir and not os.path.exists(args.dir): os.makedirs(args.dir) for path in args.inputs: _logger.info("Processing %s", path) document = load_cwl_document(path) if "cwlVersion" not in document: _logger.warn("No cwlVersion found in %s, skipping it.", path) else: if document["cwlVersion"] == "v1.0": if args.v1_only: _logger.info("Skipping v1.0 document as requested: %s.", path) continue elif document["cwlVersion"] == "v1.1": if args.v1_1_only: _logger.info("Skipping v1.1 document as requested: %s.", path) continue if args.v1_only: target_version = "v1.0" elif args.v1_1_only: target_version = "v1.1" else: target_version = "latest" upgraded_document = upgrade_document( document, args.dir, target_version=target_version, imports=imports, ) if upgraded_document is not document or not args.always_write: write_cwl_document(upgraded_document, Path(path).name, args.dir) return 0 def upgrade_document( document: Any, output_dir: str, target_version: Optional[str] = "latest", imports: Optional[Set[str]] = None, ) -> Any: if imports is None: imports = set() supported_versions = ["v1.0", "v1.1", "v1.2", "latest"] if target_version not in supported_versions: _logger.error(f"Unsupported target cwlVersion: {target_version}") return version = document["cwlVersion"] main_updater = None inner_updater = None if version == "cwl:draft-3" or version == "draft-3": if target_version == "v1.0": main_updater = draft3_to_v1_0 inner_updater = _draft3_to_v1_0 elif target_version == "v1.1": main_updater = draft3_to_v1_1 inner_updater = _draft3_to_v1_1 elif target_version == "v1.2": main_updater = draft3_to_v1_2 inner_updater = _draft3_to_v1_2 elif target_version == "latest": main_updater = draft3_to_v1_2 inner_updater = _draft3_to_v1_2 elif version == "v1.0": if target_version == "v1.0": _logger.info("Not upgrading v1.0 document as requested.") return elif target_version == "v1.1": main_updater = v1_0_to_v1_1 inner_updater = _v1_0_to_v1_1 elif target_version == "v1.2": main_updater = v1_0_to_v1_2 inner_updater = _v1_0_to_v1_2 elif target_version == "latest": main_updater = v1_0_to_v1_2 inner_updater = _v1_0_to_v1_2 elif version == "v1.1": if target_version == "v1.1": _logger.info("Not upgrading v1.1 document as requested.") return elif target_version == "v1.2": main_updater = v1_1_to_v1_2 inner_updater = _v1_1_to_v1_2 elif target_version == "latest": main_updater = v1_1_to_v1_2 inner_updater = _v1_1_to_v1_2 elif version == "v1.2": if target_version == "v1.2": _logger.info("Not upgrading v1.2 document as requested.") return document elif target_version == "latest": return document else: _logger.error(f"Unknown cwlVersion in source document: {version}") return if main_updater is None or inner_updater is None: _logger.error(f"Cannot downgrade from cwlVersion {version} to {target_version}") return process_imports(document, imports, inner_updater, output_dir) return main_updater(document, output_dir) def load_cwl_document(path: str) -> Any: """ Load the given path using the Ruamel YAML round-trip loader. Also ensures that the filename is recorded so that SourceLine can produce informative error messages. """ with open(path) as entry: document = yaml.load(entry) add_lc_filename(document, entry.name) return document def write_cwl_document(document: Any, name: str, dirname: str) -> None: """ Serialize the document using the Ruamel YAML round trip dumper. Will also prepend "#!/usr/bin/env cwl-runner\n" and set the executable bit if it is a CWL document. """ ruamel.yaml.scalarstring.walk_tree(document) path = Path(dirname) / name with open(path, "w") as handle: if "cwlVersion" in document: if not ( document.ca and document.ca.comment and "cwl-runner" in document.ca.comment[1][0].value ): handle.write("#!/usr/bin/env cwl-runner\n") yaml.dump(document, stream=handle) if "cwlVersion" in document: path.chmod(path.stat().st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) def process_imports( document: Any, imports: Set[str], updater: Callable[[Any, str], Any], outdir: str ) -> None: """Find any '$import's and process them.""" if isinstance(document, CommentedMap): for key, value in document.items(): if key == "$import": if value not in imports: write_cwl_document( updater( load_cwl_document( Path(document.lc.filename).parent / value ), outdir, ), Path(value).name, outdir, ) imports.add(value) else: process_imports(value, imports, updater, outdir) elif isinstance(document, MutableSequence): for entry in document: process_imports(entry, imports, updater, outdir) def v1_0_to_v1_1(document: CommentedMap, outdir: str) -> CommentedMap: """CWL v1.0.x to v1.1 transformation loop.""" _v1_0_to_v1_1(document, outdir) for key, value in document.items(): with SourceLine(document, key, Exception): if isinstance(value, CommentedMap): document[key] = _v1_0_to_v1_1(value, outdir) elif isinstance(value, list): for index, entry in enumerate(value): if isinstance(entry, CommentedMap): value[index] = _v1_0_to_v1_1(entry, outdir) document["cwlVersion"] = "v1.1" return sort_v1_0(document) def v1_0_to_v1_2(document: CommentedMap, outdir: str) -> CommentedMap: """CWL v1.0.x to v1.2 transformation.""" document = v1_0_to_v1_1(document, outdir) document = v1_1_to_v1_2(document, outdir) return document def v1_1_to_v1_2(document: CommentedMap, outdir: str) -> CommentedMap: """CWL v1.1 to v1.2 transformation.""" document = _v1_1_to_v1_2(document, outdir) document["cwlVersion"] = "v1.2" return document def draft3_to_v1_0(document: CommentedMap, outdir: str) -> CommentedMap: """Transformation loop.""" _draft3_to_v1_0(document, outdir) if isinstance(document, MutableMapping): for key, value in document.items(): with SourceLine(document, key, Exception): if isinstance(value, CommentedMap): document[key] = _draft3_to_v1_0(value, outdir) elif isinstance(value, list): for index, entry in enumerate(value): if isinstance(entry, CommentedMap): value[index] = _draft3_to_v1_0(entry, outdir) document["cwlVersion"] = "v1.0" return sort_v1_0(document) def draft3_to_v1_1(document: CommentedMap, outdir: str) -> CommentedMap: """transformation loop.""" return v1_0_to_v1_1(draft3_to_v1_0(document, outdir), outdir) def draft3_to_v1_2(document: CommentedMap, outdir: str) -> CommentedMap: """transformation loop.""" return v1_1_to_v1_2(v1_0_to_v1_1(draft3_to_v1_0(document, outdir), outdir), outdir) def _draft3_to_v1_0(document: CommentedMap, outdir: str) -> CommentedMap: """Inner loop for transforming draft-3 to v1.0.""" if "class" in document: if document["class"] == "Workflow": workflow_clean(document) elif document["class"] == "File": document["location"] = document.pop("path") elif document["class"] == "CommandLineTool": input_output_clean(document) hints_and_requirements_clean(document) if ( isinstance(document["baseCommand"], list) and len(document["baseCommand"]) == 1 ): document["baseCommand"] = document["baseCommand"][0] if "arguments" in document and not document["arguments"]: del document["arguments"] clean_secondary_files(document) if "description" in document: document["doc"] = document.pop("description") return document def _draft3_to_v1_1(document: CommentedMap, outdir: str) -> CommentedMap: return v1_0_to_v1_1(_draft3_to_v1_0(document, outdir), outdir) def _draft3_to_v1_2(document: CommentedMap, outdir: str) -> CommentedMap: return _draft3_to_v1_1(document, outdir) # nothing needs doing for 1.2 WORKFLOW_INPUT_INPUTBINDING = ( "{}[cwl-upgrader_v1_0_to_v1_1] Original input had the following " "(unused) inputBinding element: {}" ) V1_0_TO_V1_1_REWRITE = { "http://commonwl.org/cwltool#WorkReuse": "WorkReuse", "http://arvados.org/cwl#ReuseRequirement": "WorkReuse", "http://commonwl.org/cwltool#TimeLimit": "ToolTimeLimit", "http://commonwl.org/cwltool#NetworkAccess": "NetworkAccess", "http://commonwl.org/cwltool#InplaceUpdateRequirement": "InplaceUpdateRequirement", "http://commonwl.org/cwltool#LoadListingRequirement": "LoadListingRequirement", } def _v1_0_to_v1_1(document: CommentedMap, outdir: str) -> CommentedMap: """Inner loop for transforming draft-3 to v1.0.""" if "class" in document: if document["class"] == "Workflow": upgrade_v1_0_hints_and_reqs(document) move_up_loadcontents(document) cleanup_v1_0_input_bindings(document) steps = document["steps"] if isinstance(steps, MutableSequence): for index, entry in enumerate(steps): with SourceLine(steps, index, Exception): upgrade_v1_0_hints_and_reqs(entry) if "run" in entry and isinstance(entry["run"], CommentedMap): process = entry["run"] _v1_0_to_v1_1(process, outdir) if "cwlVersion" in process: del process["cwlVersion"] elif isinstance(entry["run"], str) and "#" not in entry["run"]: path = Path(document.lc.filename).parent / entry["run"] process = v1_0_to_v1_1(load_cwl_document(str(path)), outdir) write_cwl_document(process, path.name, outdir) elif isinstance(steps, MutableMapping): for step_name in steps: with SourceLine(steps, step_name, Exception): entry = steps[step_name] upgrade_v1_0_hints_and_reqs(entry) if "run" in entry: if isinstance(entry["run"], CommentedMap): process = entry["run"] _v1_0_to_v1_1(process, outdir) if "cwlVersion" in process: del process["cwlVersion"] elif ( isinstance(entry["run"], str) and "#" not in entry["run"] ): path = Path(document.lc.filename).parent / entry["run"] process = v1_0_to_v1_1( load_cwl_document(str(path)), outdir ) write_cwl_document(process, path.name, outdir) elif isinstance(entry["run"], str) and "#" in entry["run"]: pass # reference to $graph entry else: raise Exception( "'run' entry was neither a CWL Process nor " "a path to one: %s.", entry["run"], ) elif document["class"] == "CommandLineTool": upgrade_v1_0_hints_and_reqs(document) move_up_loadcontents(document) network_access = has_hint_or_req(document, "NetworkAccess") listing = has_hint_or_req(document, "LoadListingRequirement") reqs = document.get("requirements", {}) # TODO: add comments to explain the extra hints if isinstance(reqs, MutableSequence): if not network_access: reqs.append({"class": "NetworkAccess", "networkAccess": True}) if not listing: reqs.append( cmap( { "class": "LoadListingRequirement", "loadListing": "deep_listing", } ) ) elif isinstance(reqs, MutableMapping): if not network_access: reqs["NetworkAccess"] = {"networkAccess": True} if not listing: reqs["LoadListingRequirement"] = cmap( {"loadListing": "deep_listing"} ) if "requirements" not in document: document["requirements"] = reqs elif document["class"] == "ExpressionTool": move_up_loadcontents(document) cleanup_v1_0_input_bindings(document) return document def _v1_0_to_v1_2(document: CommentedMap, outdir: str) -> CommentedMap: document = _v1_0_to_v1_1(document, outdir) return _v1_1_to_v1_2(document, outdir) def _v1_1_to_v1_2(document: CommentedMap, outdir: str) -> CommentedMap: if "class" in document: if document["class"] == "Workflow": steps = document["steps"] if isinstance(steps, MutableSequence): for index, entry in enumerate(steps): with SourceLine(steps, index, Exception): if "run" in entry and isinstance(entry["run"], CommentedMap): process = entry["run"] _v1_1_to_v1_2(process, outdir) if "cwlVersion" in process: del process["cwlVersion"] elif isinstance(entry["run"], str) and "#" not in entry["run"]: if hasattr(document.lc, "filename"): dirname = Path(document.lc.filename).parent else: dirname = Path(outdir) path = dirname / entry["run"] process = v1_1_to_v1_2(load_cwl_document(str(path)), outdir) write_cwl_document(process, path.name, outdir) elif isinstance(steps, MutableMapping): for step_name in steps: with SourceLine(steps, step_name, Exception): entry = steps[step_name] if "run" in entry: if isinstance(entry["run"], CommentedMap): process = entry["run"] _v1_1_to_v1_2(process, outdir) if "cwlVersion" in process: del process["cwlVersion"] elif ( isinstance(entry["run"], str) and "#" not in entry["run"] ): if hasattr(document.lc, "filename"): dirname = Path(document.lc.filename).parent else: dirname = Path(outdir) path = dirname / entry["run"] process = v1_1_to_v1_2( load_cwl_document(str(path)), outdir ) write_cwl_document(process, path.name, outdir) elif isinstance(entry["run"], str) and "#" in entry["run"]: pass # reference to $graph entry else: raise Exception( "'run' entry was neither a CWL Process nor " "a path to one: %s.", entry["run"], ) return document def cleanup_v1_0_input_bindings(document: Dict[str, Any]) -> None: """In v1.1 Workflow or ExpressionTool level inputBindings are deprecated.""" def cleanup(inp: Dict[str, Any]) -> None: """Serialize non loadContents fields and add that to the doc.""" if "inputBinding" in inp: bindings = inp["inputBinding"] for field in list(bindings.keys()): if field != "loadContents": prefix = "" if "doc" not in inp else "{}\n".format(inp["doc"]) inp["doc"] = WORKFLOW_INPUT_INPUTBINDING.format(prefix, field) del bindings[field] if not bindings: del inp["inputBinding"] inputs = document["inputs"] if isinstance(inputs, MutableSequence): for entry in inputs: cleanup(entry) elif isinstance(inputs, MutableMapping): for input_name in inputs: cleanup(inputs[input_name]) def move_up_loadcontents(document: Dict[str, Any]) -> None: """Promote 'loadContents' up a level for CWL v1.1.""" def cleanup(inp: Dict[str, Any]) -> None: """Move loadContents to the preferred location.""" if "inputBinding" in inp: bindings = inp["inputBinding"] for field in list(bindings.keys()): if field == "loadContents": inp[field] = bindings.pop(field) inputs = document["inputs"] if isinstance(inputs, MutableSequence): for entry in inputs: cleanup(entry) elif isinstance(inputs, MutableMapping): for input_name in inputs: cleanup(inputs[input_name]) def upgrade_v1_0_hints_and_reqs(document: Dict[str, Any]) -> None: """Rename some pre-v1.1 extensions to their official CWL v1.1 names.""" for extra in ("requirements", "hints"): if extra in document: with SourceLine(document, extra, Exception): if isinstance(document[extra], MutableMapping): for req_name in document[extra]: with SourceLine(document[extra], req_name, Exception): if req_name in V1_0_TO_V1_1_REWRITE: document[extra][ V1_0_TO_V1_1_REWRITE[req_name] ] = document[extra].pop(req_name) elif isinstance(document[extra], MutableSequence): for index, entry in enumerate(document[extra]): with SourceLine(document[extra], index, Exception): if ( isinstance(entry, MutableMapping) and "class" in entry and entry["class"] in V1_0_TO_V1_1_REWRITE ): entry["class"] = V1_0_TO_V1_1_REWRITE[entry["id"]] else: raise Exception( "{} section must be either a list of dictionaries " "or a dictionary of dictionaries!: {}".format( extra, document[extra] ) ) def has_hint_or_req(document: Dict[str, Any], name: str) -> bool: """Detects an existing named hint or requirement.""" for extra in ("requirements", "hints"): if extra in document: with SourceLine(document, extra, Exception): if isinstance(document[extra], MutableMapping): if name in document[extra]: return True elif isinstance(document[extra], MutableSequence): for index, entry in enumerate(document[extra]): with SourceLine(document[extra], index, Exception): if "class" == entry and entry["class"] == name: return True return False def workflow_clean(document: Dict[str, Any]) -> None: """Transform draft-3 style Workflows to more idiomatic v1.0""" input_output_clean(document) hints_and_requirements_clean(document) outputs = document["outputs"] for index, output_id in enumerate(outputs): with SourceLine(outputs, index, Exception): outputs[output_id]["outputSource"] = ( outputs[output_id].pop("source").lstrip("#").replace(".", "/") ) new_steps = CommentedMap() for index, step in enumerate(document["steps"]): with SourceLine(document["steps"], index, Exception): new_step = CommentedMap() new_step.update(step) step = new_step step_id = step.pop("id") step_id_len = len(step_id) + 1 step["out"] = [] for index2, outp in enumerate(step["outputs"]): with SourceLine(step["outputs"], index2, Exception): clean_outp_id = outp["id"] if clean_outp_id.startswith(step_id): clean_outp_id = clean_outp_id[step_id_len:] step["out"].append(clean_outp_id) del step["outputs"] ins = CommentedMap() for index3, inp in enumerate(step["inputs"]): with SourceLine(step["inputs"], index3, Exception): ident = inp["id"] if ident.startswith(step_id): ident = ident[step_id_len:] if "source" in inp: with SourceLine(inp, "source", Exception): if isinstance(inp["source"], str): inp["source"] = ( inp["source"].lstrip("#").replace(".", "/") ) else: for index4, inp_source in enumerate(inp["source"]): with SourceLine(inp["source"], index4, Exception): inp["source"][index4] = inp_source.lstrip( "#" ).replace(".", "/") del inp["id"] if len(inp) > 1: ins[ident] = inp elif len(inp) == 1: if "source" in inp: ins[ident] = inp.popitem()[1] else: ins[ident] = inp else: ins[ident] = {} step["in"] = ins del step["inputs"] if "scatter" in step: with SourceLine(step, "scatter", Exception): if isinstance(step["scatter"], str) == 1: source = step["scatter"] if source.startswith(step_id): source = source[step_id_len:] step["scatter"] = source elif isinstance(step["scatter"], list) and len(step["scatter"]) > 1: step["scatter"] = [] for index4, source in enumerate(step["scatter"]): with SourceLine(step["scatter"], index4, Exception): if source.startswith(step_id): source = source[step_id_len:] step["scatter"].append(source) else: source = step["scatter"][0] if source.startswith(step_id): source = source[step_id_len:] step["scatter"] = source if "description" in step: step["doc"] = step.pop("description") new_steps[step_id.lstrip("#")] = step document["steps"] = new_steps def input_output_clean(document: Dict[str, Any]) -> None: """Transform draft-3 style input/output listings into idiomatic v1.0.""" for param_type in ["inputs", "outputs"]: if param_type not in document: break new_section = CommentedMap() meta = False for index, param in enumerate(document[param_type]): with SourceLine(document[param_type], index, Exception): if "$import" in param: meta = True if not meta: for index2, param2 in enumerate(document[param_type]): with SourceLine(document[param_type], index2, Exception): param_id = param2.pop("id").lstrip("#") if "type" in param2: param2["type"] = shorten_type(param2["type"]) array_type_raise_sf(param2) if "description" in param2: param2["doc"] = param2.pop("description") if len(param2) > 1: new_section[param_id] = sort_input_or_output(param2) elif "type" in param2 and isinstance(param2["type"], str): new_section[param_id] = param2.popitem()[1] else: new_section[param_id] = param2 document[param_type] = new_section def array_type_raise_sf(param: MutableMapping[str, Any]) -> None: """Move up draft-3 secondaryFile specs on File members in Arrays.""" typ = param["type"] if isinstance(typ, MutableSequence): for index, param2 in enumerate(typ): with SourceLine(typ, index, Exception): if isinstance(param2, MutableMapping) and "type" in param2: array_type_raise_sf(param2) elif ( isinstance(typ, MutableMapping) and "type" in typ and typ["type"] == "array" and "items" in typ and "File" in typ["items"] and "secondaryFiles" in typ ): param["secondaryFiles"] = typ["secondaryFiles"] del typ["secondaryFiles"] def hints_and_requirements_clean(document: Dict[str, Any]) -> None: """Transform draft-3 style hints/reqs into idiomatic v1.0 hints/reqs.""" for section in ["hints", "requirements"]: if section in document: new_section = {} meta = False for index, entry in enumerate(document[section]): with SourceLine(document[section], index, Exception): if isinstance(entry, MutableMapping): if "$import" in entry or "$include" in entry: meta = True for index2, entry2 in enumerate(document[section]): with SourceLine(document[section], index2, Exception): if isinstance(entry2, MutableMapping): if ( "class" in entry2 and entry2["class"] == "CreateFileRequirement" ): entry2["class"] = "InitialWorkDirRequirement" entry2["listing"] = [] for filedef in entry2["fileDef"]: entry2["listing"].append( { "entryname": filedef["filename"], "entry": filedef["fileContent"], } ) del entry2["fileDef"] if not meta: new_section[entry2["class"]] = entry2 del entry2["class"] if not meta: document[section] = new_section def shorten_type(type_obj: Union[str, List[Any]]) -> Union[str, List[Any]]: """Transform draft-3 style type declarations into idiomatic v1.0 types.""" if isinstance(type_obj, str) or not isinstance(type_obj, Sequence): return type_obj new_type = [] # type: List[str] for entry in type_obj: # find arrays that we can shorten and do so if isinstance(entry, Dict): if entry["type"] == "array" and isinstance(entry["items"], str): entry = entry["items"] + "[]" elif entry["type"] == "enum": entry = sort_enum(entry) new_type.extend([entry]) if len(new_type) == 2: if "null" in new_type: type_copy = copy.deepcopy(new_type) type_copy.remove("null") if isinstance(type_copy[0], str): return type_copy[0] + "?" if len(new_type) == 1: return new_type[0] return new_type def clean_secondary_files(document: Dict[str, Any]) -> None: """Cleanup for secondaryFiles""" if "secondaryFiles" in document: for i, sfile in enumerate(document["secondaryFiles"]): if "$(" in sfile or "${" in sfile: document["secondaryFiles"][i] = sfile.replace( '"path"', '"location"' ).replace(".path", ".location") def sort_v1_0(document: Dict[str, Any]) -> CommentedMap: """Sort the sections of the CWL document in a more meaningful order.""" keyorder = [ "cwlVersion", "class", "id", "label", "doc", "requirements", "hints", "inputs", "stdin", "baseCommand", "steps", "expression", "arguments", "stderr", "stdout", "outputs", "successCodes", "temporaryFailCodes", "permanentFailCodes", ] return CommentedMap( sorted( document.items(), key=lambda i: keyorder.index(i[0]) if i[0] in keyorder else 100, ) ) def sort_enum(enum: Dict[str, Any]) -> Dict[str, Any]: """Sort the enum type definitions in a more meaningful order.""" keyorder = ["type", "name", "label", "symbols", "inputBinding"] return CommentedMap( sorted( enum.items(), key=lambda i: keyorder.index(i[0]) if i[0] in keyorder else 100, ) ) def sort_input_or_output(io_def: Dict[str, Any]) -> Dict[str, Any]: """Sort the input definitions in a more meaningful order.""" keyorder = [ "label", "doc", "type", "format", "secondaryFiles", "default", "inputBinding", "outputBinding", "streamable", ] return CommentedMap( sorted( io_def.items(), key=lambda i: keyorder.index(i[0]) if i[0] in keyorder else 100, ) ) if __name__ == "__main__": sys.exit(main()) cwl-upgrader-1.2.11/cwlupgrader/py.typed000066400000000000000000000000001453661752700202140ustar00rootroot00000000000000cwl-upgrader-1.2.11/mypy-requirements.txt000066400000000000000000000000351453661752700204660ustar00rootroot00000000000000mypy==1.7.1 types-setuptools cwl-upgrader-1.2.11/mypy-stubs/000077500000000000000000000000001453661752700163445ustar00rootroot00000000000000cwl-upgrader-1.2.11/mypy-stubs/ruamel/000077500000000000000000000000001453661752700176315ustar00rootroot00000000000000cwl-upgrader-1.2.11/mypy-stubs/ruamel/__init__.py000066400000000000000000000000001453661752700217300ustar00rootroot00000000000000cwl-upgrader-1.2.11/mypy.ini000066400000000000000000000002101453661752700157000ustar00rootroot00000000000000[mypy] strict = True show_error_context = true show_column_numbers = true show_error_codes = true pretty = true warn_unreachable = True cwl-upgrader-1.2.11/pyproject.toml000066400000000000000000000042561453661752700171330ustar00rootroot00000000000000[build-system] requires = ["setuptools>=61.2"] build-backend = "setuptools.build_meta" [project] name = "cwl-upgrader" authors = [{name = "Common Workflow Language project contributors", email = "common-workflow-language@googlegroups.com"}] license = {text = "Apache 2.0"} keywords = ["cwl", "commonwl", "common-workflow-language"] description = "Upgrade a CWL tool or workflow document from one version to another" readme = "README.rst" classifiers = [ "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: POSIX", "Operating System :: MacOS :: MacOS X", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: File Formats", "Topic :: Software Development :: Libraries", "Typing :: Typed", ] requires-python = ">=3.8" dependencies = [ "setuptools", "ruamel.yaml >= 0.16.0, < 0.19", "schema_salad", ] dynamic = ["version"] [project.urls] Homepage = "https://www.commonwl.org" Repository = "https://github.com/common-workflow-language/cwl-upgrader" Issues = "https://github.com/common-workflow-language/cwl-upgrader/issues" Changelog = "https://github.com/common-workflow-language/cwl-upgrader/releases" "Related Tools" = "https://www.commonwl.org/tools/" [project.scripts] cwl-upgrader = "cwlupgrader.main:main" [project.optional-dependencies] testing = ["pytest < 8"] [tool.aliases] test = "pytest" [tool.setuptools] package-dir = {"cwlupgrader.tests" = "tests"} packages = ["cwlupgrader", "cwlupgrader.tests"] zip-safe = true include-package-data = true license-files = ["LICENSE.txt"] [tool.setuptools.dynamic] version = {attr = "cwlupgrader.__version__"} [tool.setuptools.package-data] "cwlupgrader.tests" = ["../testdata/**/*.cwl"] [tool.isort] multi_line_output = "3" include_trailing_comma = "True" force_grid_wrap = "0" use_parentheses = "True" line_length = "88" cwl-upgrader-1.2.11/release-test.sh000077500000000000000000000066641453661752700171600ustar00rootroot00000000000000#!/bin/bash set -e set -x export LC_ALL=C package=cwl-upgrader module=cwlupgrader extras= if [ "$GITHUB_ACTIONS" = "true" ]; then # We are running as a GH Action repo=${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}.git HEAD=${GITHUB_REF} else repo=https://github.com/common-workflow-language/cwl-upgrader.git HEAD=$(git rev-parse HEAD) fi run_tests="bin/py.test --pyargs ${module}" pipver=23.1 # minimum required version of pip for Python 3.12 setuptoolsver=67.6.1 # required for Python 3.12 DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )" rm -Rf testenv? || /bin/true if [ "${RELEASE_SKIP}" != "head" ] then python3 -m venv testenv1 # First we test the head # shellcheck source=/dev/null source testenv1/bin/activate rm -Rf testenv1/local rm -f testenv1/lib/python-wheels/setuptools* \ && pip install --force-reinstall -U pip==${pipver} \ && pip install setuptools==${setuptoolsver} wheel pip install -rtest-requirements.txt ".${extras}" build make test pip uninstall -y ${package} || true; pip uninstall -y ${package} || true; make install mkdir testenv1/not-${module} # if there is a subdir named '${module}' py.test will execute tests # there instead of the installed module's tests pushd testenv1/not-${module} # shellcheck disable=SC2086 ../${run_tests}; popd fi python3 -m venv testenv2 python3 -m venv testenv3 python3 -m venv testenv4 python3 -m venv testenv5 rm -Rf testenv[2345]/local # Secondly we test via pip pushd testenv2 # shellcheck source=/dev/null source bin/activate rm -f lib/python-wheels/setuptools* \ && pip install --force-reinstall -U pip==${pipver} \ && pip install setuptools==${setuptoolsver} wheel # The following can fail if you haven't pushed your commits to ${repo} pip install -e "git+${repo}@${HEAD}#egg=${package}${extras}" pushd src/${package} pip install -rtest-requirements.txt build make dist make test cp dist/${package}*tar.gz ../../../testenv3/ cp dist/cwl_upgrader*whl ../../../testenv4/ pip uninstall -y ${package} || true; pip uninstall -y ${package} || true; make install popd # ../.. no subdir named ${proj} here, safe for py.testing the installed module # shellcheck disable=SC2086 ${run_tests} popd # Is the source distribution in testenv2 complete enough to build # another functional distribution? pushd testenv3/ # shellcheck source=/dev/null source bin/activate rm -f lib/python-wheels/setuptools* \ && pip install --force-reinstall -U pip==${pipver} \ && pip install setuptools==${setuptoolsver} wheel package_tar=$(find . -name "${package}*tar.gz") pip install "-r${DIR}/test-requirements.txt" build pip install "${package_tar}${extras}" mkdir out tar --extract --directory=out -z -f ${package}*.tar.gz pushd out/${package}* make dist make test pip install "-r${DIR}/mypy-requirements.txt" make mypy pip uninstall -y ${package} || true; pip uninstall -y ${package} || true; make install mkdir ../not-${module} pushd ../not-${module} # shellcheck disable=SC2086 ../../${run_tests}; popd popd popd # Is the wheel in testenv2 installable and will it pass the tests pushd testenv4/ # shellcheck source=/dev/null source bin/activate rm -f lib/python-wheels/setuptools* \ && pip install --force-reinstall -U pip==${pipver} \ && pip install setuptools==${setuptoolsver} wheel pip install "$(ls cwl_upgrader*.whl)${extras}" pip install "-r${DIR}/test-requirements.txt" mkdir not-${module} pushd not-${module} # shellcheck disable=SC2086 ../${run_tests}; popd popd cwl-upgrader-1.2.11/requirements.txt000066400000000000000000000000531453661752700174720ustar00rootroot00000000000000ruamel.yaml >= 0.16.0, < 0.19 schema-salad cwl-upgrader-1.2.11/setup.cfg000066400000000000000000000002461453661752700160330ustar00rootroot00000000000000[flake8] ignore = E124,E128,E129,E201,E202,E225,E226,E231,E265,E271,E302,E303,F401,E402,E501,W503,E731,F811,F821,F841 max-line-length = 88 extend-ignore = E203, W503 cwl-upgrader-1.2.11/test-requirements.txt000066400000000000000000000000651453661752700204520ustar00rootroot00000000000000pytest < 7.5.0 pytest-runner pytest-cov pytest-xdist cwl-upgrader-1.2.11/testdata/000077500000000000000000000000001453661752700160215ustar00rootroot00000000000000cwl-upgrader-1.2.11/testdata/draft-3/000077500000000000000000000000001453661752700172615ustar00rootroot00000000000000cwl-upgrader-1.2.11/testdata/draft-3/attributor-prok-cheetah.cwl000066400000000000000000000255361453661752700245520ustar00rootroot00000000000000#!/usr/bin/env cwl-runner # Source: https://github.com/jorvis/GALES/blob/cc869204cdb004a7c952900692a97b4edd228e6d/cwl/tools/attributor-prok-cheetah.cwl # Copyright (c) 2016 Joshua Orvis # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. cwlVersion: "cwl:draft-3" class: CommandLineTool baseCommand: attributor requirements: - class: InlineJavascriptRequirement - class: CreateFileRequirement fileDef: - filename: attributor.config fileContent: | general: default_product_name: hypothetical protein allow_attributes_from_multiple_sources: No debugging_polypeptide_limit: 0 indexes: coding_hmm_lib: $(inputs.hmm_attribute_lookup_file.path) uniref100: $(inputs.blast_attribute_lookup_file.path) input: polypeptide_fasta: $(inputs.polypeptide_fasta.path) gff3: $(inputs.source_gff3.path) order: - coding_hmm_lib__equivalog - rapsearch2__trusted_full_full - coding_hmm_lib__equivalog_domain - rapsearch2__trusted_partial_full - coding_hmm_lib__subfamily - coding_hmm_lib__superfamily - coding_hmm_lib__subfamily_domain - coding_hmm_lib__domain - coding_hmm_lib__pfam - rapsearch2__trusted_full_partial - rapsearch2__all_full_full - tmhmm #- lipoprotein_motif - coding_hmm_lib__hypothetical_equivalog evidence: - label: coding_hmm_lib__equivalog type: HMMer3_htab path: ${ var r = ""; for (var i = 0; i < inputs.hmm_files.length; i++) { if (i > 0) { r += ","; } r += inputs.hmm_files[i].path.replace('file://',''); } return r; } class: equivalog index: coding_hmm_lib - label: coding_hmm_lib__equivalog_domain type: HMMer3_htab path: ${ var r = ""; for (var i = 0; i < inputs.hmm_files.length; i++) { if (i > 0) { r += ","; } r += inputs.hmm_files[i].path.replace('file://',''); } return r; } class: equivalog_domain index: coding_hmm_lib - label: coding_hmm_lib__subfamily type: HMMer3_htab path: ${ var r = ""; for (var i = 0; i < inputs.hmm_files.length; i++) { if (i > 0) { r += ","; } r += inputs.hmm_files[i].path.replace('file://',''); } return r; } class: subfamily index: coding_hmm_lib append_text: family protein - label: coding_hmm_lib__superfamily type: HMMer3_htab path: ${ var r = ""; for (var i = 0; i < inputs.hmm_files.length; i++) { if (i > 0) { r += ","; } r += inputs.hmm_files[i].path.replace('file://',''); } return r; } class: superfamily index: coding_hmm_lib append_text: family protein - label: coding_hmm_lib__subfamily_domain type: HMMer3_htab path: ${ var r = ""; for (var i = 0; i < inputs.hmm_files.length; i++) { if (i > 0) { r += ","; } r += inputs.hmm_files[i].path.replace('file://',''); } return r; } class: subfamily_domain index: coding_hmm_lib append_text: domain protein - label: coding_hmm_lib__domain type: HMMer3_htab path: ${ var r = ""; for (var i = 0; i < inputs.hmm_files.length; i++) { if (i > 0) { r += ","; } r += inputs.hmm_files[i].path.replace('file://',''); } return r; } class: domain index: coding_hmm_lib append_text: domain protein - label: coding_hmm_lib__pfam type: HMMer3_htab path: ${ var r = ""; for (var i = 0; i < inputs.hmm_files.length; i++) { if (i > 0) { r += ","; } r += inputs.hmm_files[i].path.replace('file://',''); } return r; } class: pfam index: coding_hmm_lib append_text: family protein - label: coding_hmm_lib__hypothetical_equivalog type: HMMer3_htab path: ${ var r = ""; for (var i = 0; i < inputs.hmm_files.length; i++) { if (i > 0) { r += ","; } r += inputs.hmm_files[i].path.replace('file://',''); } return r; } class: hypoth_equivalog index: coding_hmm_lib - label: rapsearch2__trusted_full_full type: RAPSearch2_m8 path: ${ var r = ""; for (var i = 0; i < inputs.m8_files.length; i++) { if (i > 0) { r += ","; } r += inputs.m8_files[i].path.replace('file://',''); } return r; } class: trusted index: uniref100 query_cov: 80% match_cov: 80% percent_identity_cutoff: 50% - label: rapsearch2__trusted_partial_full type: RAPSearch2_m8 path: ${ var r = ""; for (var i = 0; i < inputs.m8_files.length; i++) { if (i > 0) { r += ","; } r += inputs.m8_files[i].path.replace('file://',''); } return r; } class: trusted index: uniref100 match_cov: 80% percent_identity_cutoff: 50% append_text: domain protein - label: rapsearch2__trusted_full_partial type: RAPSearch2_m8 path: ${ var r = ""; for (var i = 0; i < inputs.m8_files.length; i++) { if (i > 0) { r += ","; } r += inputs.m8_files[i].path.replace('file://',''); } return r; } class: trusted index: uniref100 query_cov: 80% percent_identity_cutoff: 50% append_text: domain protein - label: rapsearch2__all_full_full type: RAPSearch2_m8 path: ${ var r = ""; for (var i = 0; i < inputs.m8_files.length; i++) { if (i > 0) { r += ","; } r += inputs.m8_files[i].path.replace('file://',''); } return r; } index: uniref100 query_cov: 80% match_cov: 80% percent_identity_cutoff: 50% prepend_text: putative - label: tmhmm type: TMHMM product_name: putative integral membrane protein min_helical_spans: 5 path: ${ var r = ""; for (var i = 0; i < inputs.tmhmm_files.length; i++) { if (i > 0) { r += ","; } r += inputs.tmhmm_files[i].path.replace('file://',''); } return r; } hints: - class: DockerRequirement dockerPull: jorvis/gales-gce inputs: - id: config_file type: string inputBinding: prefix: -c separate: true position: 1 - id: output_base type: string inputBinding: position: 2 prefix: -o separate: true - id: output_format type: string inputBinding: position: 3 prefix: -f separate: true - id: hmm_attribute_lookup_file type: File - id: blast_attribute_lookup_file type: File - id: polypeptide_fasta type: File - id: source_gff3 type: File - id: hmm_files type: type: array items: File - id: m8_files type: type: array items: File - id: tmhmm_files type: type: array items: File outputs: - id: output_files type: type: array items: File outputBinding: glob: $(inputs.output_base + '*') - id: the_config type: File outputBinding: glob: 'attributor.config' cwl-upgrader-1.2.11/testdata/draft-3/wf.cwl000066400000000000000000000011611453661752700204030ustar00rootroot00000000000000class: Workflow cwlVersion: draft-3 inputs: - id: '#input_file' type: ['null', File] outputs: - id: '#validatefiles_report' source: '#validatefiles.report' type: ['null', File] - id: '#md5_report' source: '#md5.report' type: ['null', File] requirements: - {class: InlineJavascriptRequirement} steps: - id: '#md5' inputs: - {id: '#md5.input_file', source: '#input_file'} outputs: - {id: '#md5.report'} run: md5.cwl - id: '#validatefiles' inputs: - {id: '#validatefiles.input_file', source: '#input_file'} - {id: '#validatefiles.type'} outputs: - {id: '#validatefiles.report'} run: validate.cwl cwl-upgrader-1.2.11/testdata/v1.0/000077500000000000000000000000001453661752700165055ustar00rootroot00000000000000cwl-upgrader-1.2.11/testdata/v1.0/1st-workflow.cwl000066400000000000000000000006611453661752700215760ustar00rootroot00000000000000#!/usr/bin/env cwl-runner cwlVersion: v1.0 class: Workflow inputs: tarball: File name_of_file_to_extract: string outputs: compiled_class: type: File outputSource: compile/classfile steps: untar: run: tar-param.cwl in: tarfile: tarball extractfile: name_of_file_to_extract out: [extracted_file] compile: run: arguments.cwl in: src: untar/extracted_file out: [classfile] cwl-upgrader-1.2.11/testdata/v1.0/arguments.cwl000066400000000000000000000006011453661752700212160ustar00rootroot00000000000000#!/usr/bin/env cwl-runner cwlVersion: v1.0 class: CommandLineTool label: Example trivial wrapper for Java 9 compiler hints: DockerRequirement: dockerPull: openjdk:9.0.1-11-slim baseCommand: javac arguments: ["-d", $(runtime.outdir)] inputs: src: type: File inputBinding: position: 1 outputs: classfile: type: File outputBinding: glob: "*.class" cwl-upgrader-1.2.11/testdata/v1.0/attributor-prok-cheetah.cwl000077500000000000000000000212131453661752700237650ustar00rootroot00000000000000#!/usr/bin/env cwl-runner cwlVersion: "v1.0" class: CommandLineTool requirements: InlineJavascriptRequirement: {} InitialWorkDirRequirement: listing: - entryname: attributor.config entry: | general: default_product_name: hypothetical protein allow_attributes_from_multiple_sources: No debugging_polypeptide_limit: 0 indexes: coding_hmm_lib: $(inputs.hmm_attribute_lookup_file.path) uniref100: $(inputs.blast_attribute_lookup_file.path) input: polypeptide_fasta: $(inputs.polypeptide_fasta.path) gff3: $(inputs.source_gff3.path) order: - coding_hmm_lib__equivalog - rapsearch2__trusted_full_full - coding_hmm_lib__equivalog_domain - rapsearch2__trusted_partial_full - coding_hmm_lib__subfamily - coding_hmm_lib__superfamily - coding_hmm_lib__subfamily_domain - coding_hmm_lib__domain - coding_hmm_lib__pfam - rapsearch2__trusted_full_partial - rapsearch2__all_full_full - tmhmm #- lipoprotein_motif - coding_hmm_lib__hypothetical_equivalog evidence: - label: coding_hmm_lib__equivalog type: HMMer3_htab path: ${ var r = ""; for (var i = 0; i < inputs.hmm_files.length; i++) { if (i > 0) { r += ","; } r += inputs.hmm_files[i].path.replace('file://',''); } return r; } class: equivalog index: coding_hmm_lib - label: coding_hmm_lib__equivalog_domain type: HMMer3_htab path: ${ var r = ""; for (var i = 0; i < inputs.hmm_files.length; i++) { if (i > 0) { r += ","; } r += inputs.hmm_files[i].path.replace('file://',''); } return r; } class: equivalog_domain index: coding_hmm_lib - label: coding_hmm_lib__subfamily type: HMMer3_htab path: ${ var r = ""; for (var i = 0; i < inputs.hmm_files.length; i++) { if (i > 0) { r += ","; } r += inputs.hmm_files[i].path.replace('file://',''); } return r; } class: subfamily index: coding_hmm_lib append_text: family protein - label: coding_hmm_lib__superfamily type: HMMer3_htab path: ${ var r = ""; for (var i = 0; i < inputs.hmm_files.length; i++) { if (i > 0) { r += ","; } r += inputs.hmm_files[i].path.replace('file://',''); } return r; } class: superfamily index: coding_hmm_lib append_text: family protein - label: coding_hmm_lib__subfamily_domain type: HMMer3_htab path: ${ var r = ""; for (var i = 0; i < inputs.hmm_files.length; i++) { if (i > 0) { r += ","; } r += inputs.hmm_files[i].path.replace('file://',''); } return r; } class: subfamily_domain index: coding_hmm_lib append_text: domain protein - label: coding_hmm_lib__domain type: HMMer3_htab path: ${ var r = ""; for (var i = 0; i < inputs.hmm_files.length; i++) { if (i > 0) { r += ","; } r += inputs.hmm_files[i].path.replace('file://',''); } return r; } class: domain index: coding_hmm_lib append_text: domain protein - label: coding_hmm_lib__pfam type: HMMer3_htab path: ${ var r = ""; for (var i = 0; i < inputs.hmm_files.length; i++) { if (i > 0) { r += ","; } r += inputs.hmm_files[i].path.replace('file://',''); } return r; } class: pfam index: coding_hmm_lib append_text: family protein - label: coding_hmm_lib__hypothetical_equivalog type: HMMer3_htab path: ${ var r = ""; for (var i = 0; i < inputs.hmm_files.length; i++) { if (i > 0) { r += ","; } r += inputs.hmm_files[i].path.replace('file://',''); } return r; } class: hypoth_equivalog index: coding_hmm_lib - label: rapsearch2__trusted_full_full type: RAPSearch2_m8 path: ${ var r = ""; for (var i = 0; i < inputs.m8_files.length; i++) { if (i > 0) { r += ","; } r += inputs.m8_files[i].path.replace('file://',''); } return r; } class: trusted index: uniref100 query_cov: 80% match_cov: 80% percent_identity_cutoff: 50% - label: rapsearch2__trusted_partial_full type: RAPSearch2_m8 path: ${ var r = ""; for (var i = 0; i < inputs.m8_files.length; i++) { if (i > 0) { r += ","; } r += inputs.m8_files[i].path.replace('file://',''); } return r; } class: trusted index: uniref100 match_cov: 80% percent_identity_cutoff: 50% append_text: domain protein - label: rapsearch2__trusted_full_partial type: RAPSearch2_m8 path: ${ var r = ""; for (var i = 0; i < inputs.m8_files.length; i++) { if (i > 0) { r += ","; } r += inputs.m8_files[i].path.replace('file://',''); } return r; } class: trusted index: uniref100 query_cov: 80% percent_identity_cutoff: 50% append_text: domain protein - label: rapsearch2__all_full_full type: RAPSearch2_m8 path: ${ var r = ""; for (var i = 0; i < inputs.m8_files.length; i++) { if (i > 0) { r += ","; } r += inputs.m8_files[i].path.replace('file://',''); } return r; } index: uniref100 query_cov: 80% match_cov: 80% percent_identity_cutoff: 50% prepend_text: putative - label: tmhmm type: TMHMM product_name: putative integral membrane protein min_helical_spans: 5 path: ${ var r = ""; for (var i = 0; i < inputs.tmhmm_files.length; i++) { if (i > 0) { r += ","; } r += inputs.tmhmm_files[i].path.replace('file://',''); } return r; } hints: DockerRequirement: dockerPull: jorvis/gales-gce inputs: config_file: type: string inputBinding: prefix: -c separate: true position: 1 output_base: type: string inputBinding: position: 2 prefix: -o separate: true output_format: type: string inputBinding: position: 3 prefix: -f separate: true hmm_attribute_lookup_file: File blast_attribute_lookup_file: File polypeptide_fasta: File source_gff3: File hmm_files: type: type: array items: File m8_files: type: type: array items: File tmhmm_files: type: type: array items: File baseCommand: attributor outputs: output_files: type: type: array items: File outputBinding: glob: $(inputs.output_base + '*') the_config: type: File outputBinding: glob: 'attributor.config' cwl-upgrader-1.2.11/testdata/v1.0/conflict-wf.cwl000066400000000000000000000022051453661752700214260ustar00rootroot00000000000000cwlVersion: v1.0 $graph: - id: echo class: CommandLineTool hints: ResourceRequirement: ramMin: 8 inputs: text: type: string inputBinding: {} outputs: fileout: type: File outputBinding: glob: out.txt baseCommand: echo stdout: out.txt - id: cat class: CommandLineTool hints: ResourceRequirement: ramMin: 8 inputs: file1: type: File inputBinding: position: 1 file2: type: File inputBinding: position: 2 outputs: fileout: type: File outputBinding: glob: out.txt baseCommand: cat stdout: out.txt - class: Workflow id: collision inputs: input_1: string input_2: string outputs: fileout: type: File outputSource: cat_step/fileout steps: echo_1: run: "#echo" in: text: input_1 out: [fileout] echo_2: run: "#echo" in: text: input_2 out: [fileout] cat_step: run: "#cat" in: file1: source: echo_1/fileout file2: source: echo_2/fileout out: [fileout] cwl-upgrader-1.2.11/testdata/v1.0/listing_deep1-arr.cwl000077500000000000000000000004701453661752700225310ustar00rootroot00000000000000#!/usr/bin/env cwl-runner class: CommandLineTool cwlVersion: v1.0 requirements: - class: InlineJavascriptRequirement inputs: d: Directory outputs: out: type: boolean outputBinding: outputEval: '$(inputs.d.listing.length === 1 && inputs.d.listing[0].listing.length === 1)' baseCommand: "true" cwl-upgrader-1.2.11/testdata/v1.0/listing_deep1.cwl000077500000000000000000000004631453661752700217510ustar00rootroot00000000000000#!/usr/bin/env cwl-runner class: CommandLineTool cwlVersion: v1.0 requirements: InlineJavascriptRequirement: {} inputs: d: Directory outputs: out: type: boolean outputBinding: outputEval: '$(inputs.d.listing.length === 1 && inputs.d.listing[0].listing.length === 1)' baseCommand: "true" cwl-upgrader-1.2.11/testdata/v1.0/networkaccess.cwl000066400000000000000000000003331453661752700220660ustar00rootroot00000000000000class: CommandLineTool cwlVersion: v1.0 inputs: [] outputs: [] baseCommand: python arguments: - "-c" - valueFrom: | import urllib.request assert(urllib.request.urlopen("http://commonwl.org").code == 200)cwl-upgrader-1.2.11/testdata/v1.0/tar-param.cwl000066400000000000000000000005151453661752700211010ustar00rootroot00000000000000#!/usr/bin/env cwl-runner cwlVersion: v1.0 class: CommandLineTool baseCommand: [tar, --extract] inputs: tarfile: type: File inputBinding: prefix: --file extractfile: type: string inputBinding: position: 1 outputs: extracted_file: type: File outputBinding: glob: $(inputs.extractfile) cwl-upgrader-1.2.11/testdata/v1.0/wf.cwl000066400000000000000000000007511453661752700176330ustar00rootroot00000000000000#!/usr/bin/env cwl-runner cwlVersion: v1.0 class: Workflow requirements: InlineJavascriptRequirement: {} inputs: input_file: File? steps: md5: run: md5.cwl out: - report in: input_file: 'input_file' validatefiles: run: validate.cwl out: - report in: input_file: 'input_file' type: {} outputs: validatefiles_report: type: File? outputSource: validatefiles/report md5_report: type: File? outputSource: md5/report cwl-upgrader-1.2.11/testdata/v1.1/000077500000000000000000000000001453661752700165065ustar00rootroot00000000000000cwl-upgrader-1.2.11/testdata/v1.1/conflict-wf.cwl000077500000000000000000000026211453661752700214340ustar00rootroot00000000000000#!/usr/bin/env cwl-runner cwlVersion: v1.1 $graph: - id: echo class: CommandLineTool hints: ResourceRequirement: ramMin: 8 inputs: text: type: string inputBinding: {} outputs: fileout: type: File outputBinding: glob: out.txt baseCommand: echo stdout: out.txt requirements: NetworkAccess: networkAccess: true LoadListingRequirement: loadListing: deep_listing - id: cat class: CommandLineTool hints: ResourceRequirement: ramMin: 8 inputs: file1: type: File inputBinding: position: 1 file2: type: File inputBinding: position: 2 outputs: fileout: type: File outputBinding: glob: out.txt baseCommand: cat stdout: out.txt requirements: NetworkAccess: networkAccess: true LoadListingRequirement: loadListing: deep_listing - class: Workflow id: collision inputs: input_1: string input_2: string outputs: fileout: type: File outputSource: cat_step/fileout steps: echo_1: run: "#echo" in: text: input_1 out: [fileout] echo_2: run: "#echo" in: text: input_2 out: [fileout] cat_step: run: "#cat" in: file1: source: echo_1/fileout file2: source: echo_2/fileout out: [fileout] cwl-upgrader-1.2.11/testdata/v1.1/listing_deep1-arr.cwl000077500000000000000000000006511453661752700225330ustar00rootroot00000000000000#!/usr/bin/env cwl-runner class: CommandLineTool cwlVersion: v1.1 requirements: - class: InlineJavascriptRequirement - class: NetworkAccess networkAccess: true - class: LoadListingRequirement loadListing: deep_listing inputs: d: Directory outputs: out: type: boolean outputBinding: outputEval: '$(inputs.d.listing.length === 1 && inputs.d.listing[0].listing.length === 1)' baseCommand: "true" cwl-upgrader-1.2.11/testdata/v1.1/listing_deep1.cwl000077500000000000000000000006241453661752700217510ustar00rootroot00000000000000#!/usr/bin/env cwl-runner class: CommandLineTool cwlVersion: v1.1 requirements: LoadListingRequirement: loadListing: deep_listing InlineJavascriptRequirement: {} NetworkAccess: networkAccess: true inputs: d: Directory outputs: out: type: boolean outputBinding: outputEval: '$(inputs.d.listing.length === 1 && inputs.d.listing[0].listing.length === 1)' baseCommand: "true" cwl-upgrader-1.2.11/testdata/v1.1/networkaccess.cwl000066400000000000000000000005121453661752700220660ustar00rootroot00000000000000class: CommandLineTool cwlVersion: v1.1 requirements: NetworkAccess: networkAccess: true LoadListingRequirement: loadListing: deep_listing inputs: [] outputs: [] baseCommand: python arguments: - "-c" - valueFrom: | import urllib.request assert(urllib.request.urlopen("http://commonwl.org").code == 200)cwl-upgrader-1.2.11/testdata/v1.2/000077500000000000000000000000001453661752700165075ustar00rootroot00000000000000cwl-upgrader-1.2.11/testdata/v1.2/1st-workflow.cwl000077500000000000000000000006601453661752700216020ustar00rootroot00000000000000#!/usr/bin/env cwl-runner cwlVersion: v1.2 class: Workflow inputs: tarball: File name_of_file_to_extract: string steps: untar: run: tar-param.cwl in: tarfile: tarball extractfile: name_of_file_to_extract out: [extracted_file] compile: run: arguments.cwl in: src: untar/extracted_file out: [classfile] outputs: compiled_class: type: File outputSource: compile/classfile cwl-upgrader-1.2.11/testdata/v1.2/arguments.cwl000077500000000000000000000007571453661752700212370ustar00rootroot00000000000000#!/usr/bin/env cwl-runner cwlVersion: v1.2 class: CommandLineTool label: Example trivial wrapper for Java 9 compiler requirements: NetworkAccess: networkAccess: true LoadListingRequirement: loadListing: deep_listing hints: DockerRequirement: dockerPull: openjdk:9.0.1-11-slim inputs: src: type: File inputBinding: position: 1 baseCommand: javac arguments: ["-d", $(runtime.outdir)] outputs: classfile: type: File outputBinding: glob: "*.class" cwl-upgrader-1.2.11/testdata/v1.2/listing_deep1.cwl000077500000000000000000000006241453661752700217520ustar00rootroot00000000000000#!/usr/bin/env cwl-runner class: CommandLineTool cwlVersion: v1.2 requirements: LoadListingRequirement: loadListing: deep_listing InlineJavascriptRequirement: {} NetworkAccess: networkAccess: true inputs: d: Directory outputs: out: type: boolean outputBinding: outputEval: '$(inputs.d.listing.length === 1 && inputs.d.listing[0].listing.length === 1)' baseCommand: "true" cwl-upgrader-1.2.11/testdata/v1.2/networkaccess.cwl000066400000000000000000000005121453661752700220670ustar00rootroot00000000000000class: CommandLineTool cwlVersion: v1.2 requirements: NetworkAccess: networkAccess: true LoadListingRequirement: loadListing: deep_listing inputs: [] outputs: [] baseCommand: python arguments: - "-c" - valueFrom: | import urllib.request assert(urllib.request.urlopen("http://commonwl.org").code == 200)cwl-upgrader-1.2.11/testdata/v1.2/tar-param.cwl000077500000000000000000000006731453661752700211130ustar00rootroot00000000000000#!/usr/bin/env cwl-runner cwlVersion: v1.2 class: CommandLineTool requirements: NetworkAccess: networkAccess: true LoadListingRequirement: loadListing: deep_listing inputs: tarfile: type: File inputBinding: prefix: --file extractfile: type: string inputBinding: position: 1 baseCommand: [tar, --extract] outputs: extracted_file: type: File outputBinding: glob: $(inputs.extractfile) cwl-upgrader-1.2.11/tests/000077500000000000000000000000001453661752700153525ustar00rootroot00000000000000cwl-upgrader-1.2.11/tests/__init__.py000066400000000000000000000000001453661752700174510ustar00rootroot00000000000000cwl-upgrader-1.2.11/tests/cwl-conformance/000077500000000000000000000000001453661752700204275ustar00rootroot00000000000000cwl-upgrader-1.2.11/tests/cwl-conformance/cwltool-conftest.py000066400000000000000000000020471453661752700243120ustar00rootroot00000000000000""" Example configuration for pytest + cwltest plugin using cwltool directly. Calls cwltool via Python, instead of a subprocess via `--cwl-runner cwltool`. """ import json from io import StringIO from typing import Any, Dict, List, Optional, Tuple from cwltest import utils def pytest_cwl_execute_test( config: utils.CWLTestConfig, processfile: str, jobfile: Optional[str] ) -> Tuple[int, Optional[Dict[str, Any]]]: """Use the CWL reference runner (cwltool) to execute tests.""" from cwltool import main from cwltool.errors import WorkflowException stdout = StringIO() argsl: List[str] = [f"--outdir={config.outdir}"] if config.runner_quiet: argsl.append("--quiet") elif config.verbose: argsl.append("--debug") argsl.extend(config.args) argsl.append(processfile) if jobfile: argsl.append(jobfile) try: result = main.main(argsl=argsl, stdout=stdout) except WorkflowException: return 1, {} out = stdout.getvalue() return result, json.loads(out) if out else {} cwl-upgrader-1.2.11/tests/test_complete.py000066400000000000000000000101521453661752700205720ustar00rootroot00000000000000import filecmp from pathlib import Path from cwlupgrader.main import load_cwl_document, main, upgrade_document from .util import get_data def test_draft3_workflow(tmp_path: Path) -> None: """Basic draft3 to CWL v1.1 test.""" main([f"--dir={tmp_path}", "--v1-only", get_data("testdata/draft-3/wf.cwl")]) result = filecmp.cmp( get_data("testdata/v1.0/wf.cwl"), tmp_path / "wf.cwl", shallow=False, ) assert result def test_draft3_tool_long_form_arrays(tmp_path: Path) -> None: """Draft-3 document with long form array inputs.""" main( [ f"--dir={tmp_path}", "--v1-only", get_data("testdata/draft-3/attributor-prok-cheetah.cwl"), ] ) result = filecmp.cmp( get_data("testdata/v1.0/attributor-prok-cheetah.cwl"), tmp_path / "attributor-prok-cheetah.cwl", shallow=False, ) assert result def test_invalid_target(tmp_path: Path) -> None: """Test for invalid target version""" doc = load_cwl_document(get_data("testdata/v1.0/listing_deep1.cwl")) result = upgrade_document(doc, str(tmp_path), "invalid-version") assert result is None def test_v1_0_to_v1_1_load_listing(tmp_path: Path) -> None: """Basic CWL v1.0 to CWL v1.1 test with LoadListingRequirement (map notation).""" doc = load_cwl_document(get_data("testdata/v1.0/listing_deep1.cwl")) upgraded = upgrade_document(doc, str(tmp_path), "v1.1") expected = load_cwl_document(get_data("testdata/v1.1/listing_deep1.cwl")) assert upgraded == expected def test_v1_0_to_v1_1_load_listing_arr(tmp_path: Path) -> None: """Basic CWL v1.0 to CWL v1.1 test with LoadListingRequirement (array notation).""" doc = load_cwl_document(get_data("testdata/v1.0/listing_deep1-arr.cwl")) upgraded = upgrade_document(doc, str(tmp_path), "v1.1") expected = load_cwl_document(get_data("testdata/v1.1/listing_deep1-arr.cwl")) assert upgraded == expected def test_v1_0_to_v1_1_network_access(tmp_path: Path) -> None: """Basic CWL v1.0 to CWL v1.1 test with NetworkAccess.""" doc = load_cwl_document(get_data("testdata/v1.0/networkaccess.cwl")) upgraded = upgrade_document(doc, str(tmp_path), "v1.1") expected = load_cwl_document(get_data("testdata/v1.1/networkaccess.cwl")) assert upgraded == expected def test_v1_1_to_v1_2(tmp_path: Path) -> None: """Basic CWL v1.1 to CWL v1.2 test.""" doc = load_cwl_document(get_data("testdata/v1.1/listing_deep1.cwl")) upgraded = upgrade_document(doc, str(tmp_path), "v1.2") expected = load_cwl_document(get_data("testdata/v1.2/listing_deep1.cwl")) assert upgraded == expected def test_v1_2_to_v1_2(tmp_path: Path) -> None: """CWL v1.2 to CWL v1.2 no change test.""" doc = load_cwl_document(get_data("testdata/v1.2/networkaccess.cwl")) upgraded = upgrade_document(doc, str(tmp_path), "v1.2") expected = load_cwl_document(get_data("testdata/v1.2/networkaccess.cwl")) assert upgraded == expected def test_v1_2_to_latest(tmp_path: Path) -> None: """CWL v1.2 to latest no change test.""" doc = load_cwl_document(get_data("testdata/v1.2/networkaccess.cwl")) upgraded = upgrade_document(doc, str(tmp_path), "latest") expected = load_cwl_document(get_data("testdata/v1.2/networkaccess.cwl")) assert upgraded == expected def test_packed_graph(tmp_path: Path) -> None: """Test packed document with $graph.""" main( [f"--dir={tmp_path}", "--v1.1-only", get_data("testdata/v1.0/conflict-wf.cwl")] ) assert filecmp.cmp( get_data("testdata/v1.1/conflict-wf.cwl"), tmp_path / "conflict-wf.cwl", shallow=False, ) def test_multi_version_upgrade_external_steps(tmp_path: Path) -> None: """Test 1.0 to 1.2 upgrade of Workflow with external steps.""" main([f"--dir={tmp_path}", get_data("testdata/v1.0/1st-workflow.cwl")]) assert filecmp.cmp( get_data("testdata/v1.2/arguments.cwl"), tmp_path / "arguments.cwl", shallow=False, ) assert filecmp.cmp( get_data("testdata/v1.2/tar-param.cwl"), tmp_path / "tar-param.cwl", shallow=False, ) cwl-upgrader-1.2.11/tests/test_output_dir.py000066400000000000000000000010311453661752700211540ustar00rootroot00000000000000"""Tests related to the --dir command line option.""" import filecmp from pathlib import Path from cwlupgrader.main import main from .util import get_data def test_draft3_workflow(tmp_path: Path) -> None: """Confirm that --dir works when the directory doesn't exist yet.""" out_dir = tmp_path / "new" main([f"--dir={out_dir}", "--v1-only", get_data("testdata/draft-3/wf.cwl")]) result = filecmp.cmp( get_data("testdata/v1.0/wf.cwl"), out_dir / "wf.cwl", shallow=False, ) assert result cwl-upgrader-1.2.11/tests/util.py000066400000000000000000000014231453661752700167010ustar00rootroot00000000000000import os from pkg_resources import Requirement, ResolutionError, resource_filename def get_data(filename: str) -> str: filename = os.path.normpath(filename) # normalizing path depending on OS or else it will cause problem when # joining path filepath = None try: filepath = resource_filename(Requirement.parse("cwlupgrader"), filename) except ResolutionError: pass if not filepath or not os.path.isfile(filepath): filepath = os.path.join(os.path.dirname(__file__), os.pardir, filename) # warning, __file__ is all lowercase on Windows systems, this can # sometimes conflict with docker toolkit. Workaround: pip install . # and run the tests elsewhere via python -m pytest --pyarg cwltool return filepath cwl-upgrader-1.2.11/tox.ini000066400000000000000000000032771453661752700155340ustar00rootroot00000000000000[tox] envlist = py3{8,9,10,11,12}-lint, py3{8,9,10,11,12}-unit, py3{8,9,10,11,12}-mypy, py312-lintreadme, py312-pydocstyle isolated_build = True skip_missing_interpreters = True [pytest] addopts=-n auto [gh-actions] python = 3.8: py38 3.9: py39 3.10: py310 3.11: py311 3.12: py312 [testenv] description = py3{8,9,10,11,12}-unit: Run the unit tests py3{8,9,10,11,12}-lint: Lint the Python code py3{8,9,10,11,12}-mypy: Check for type safety py312-pydocstyle: docstring style checker py312-lintreadme: Lint the README.rst->.md conversion passenv = CI GITHUB_* deps = py3{8,9,10,11,12}-{unit,mypy}: -rrequirements.txt py3{8,9,10,11,12}-{unit,mypy}: -rtest-requirements.txt py3{8,9,10,11,12}-lint: flake8-bugbear py3{8,9,10,11,12}-lint: black py3{8,9,10,11,12}-mypy: -rmypy-requirements.txt setenv = py3{8,9,10,11,12}-unit: LC_ALL = C.UTF-8 commands = py3{8,9,10,11,12}-unit: python -m pip install -U pip setuptools wheel py3{8,9,10,11,12}-unit: make coverage-report coverage.xml PYTEST_EXTRA="{posargs}" py3{8,9,10,11,12}-lint: make flake8 py3{8,9,10,11,12}-lint: make format-check py3{8,9,10,11,12}-mypy: make mypy allowlist_externals = py3{8,9,10,11,12}-lint: flake8 py3{8,9,10,11,12}-lint: black py3{8,9,10,11,12}-{mypy,shellcheck,lint,unit}: make skip_install = py3{8,9,10,11,12}-lint: true [testenv:py312-pydocstyle] allowlist_externals = make commands = make diff_pydocstyle_report deps = pydocstyle diff-cover [testenv:py312-lintreadme] description = Lint the README.rst->.md conversion allowlist_externals = make commands = make clean dist twine check dist/cwl[-_]upgrader* deps = twine wheel build readme_renderer[rst]