pax_global_header 0000666 0000000 0000000 00000000064 14447617043 0014524 g ustar 00root root 0000000 0000000 52 comment=dec0e91b252b92904f91bf86d522ef40a8b8365b
bioblend-1.2.0/ 0000775 0000000 0000000 00000000000 14447617043 0013302 5 ustar 00root root 0000000 0000000 bioblend-1.2.0/.git-blame-ignore-revs 0000664 0000000 0000000 00000000123 14447617043 0017376 0 ustar 00root root 0000000 0000000 # Format Python code with black and isort
7bcd07db8392ac790d1b0b92f4a377945197e43d
bioblend-1.2.0/.github/ 0000775 0000000 0000000 00000000000 14447617043 0014642 5 ustar 00root root 0000000 0000000 bioblend-1.2.0/.github/workflows/ 0000775 0000000 0000000 00000000000 14447617043 0016677 5 ustar 00root root 0000000 0000000 bioblend-1.2.0/.github/workflows/deploy.yaml 0000664 0000000 0000000 00000001437 14447617043 0021064 0 ustar 00root root 0000000 0000000 name: Deploy
on: [push, pull_request]
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: Install dependencies
run: |
python3 -m pip install --upgrade pip setuptools
python3 -m pip install --upgrade build twine
- name: Create and check sdist and wheel packages
run: |
python3 -m build
twine check dist/*
- name: Publish to PyPI
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && github.repository_owner == 'galaxyproject'
uses: pypa/gh-action-pypi-publish@release/v1
with:
user: __token__
password: ${{ secrets.PYPI_PASSWORD }}
bioblend-1.2.0/.github/workflows/lint.yaml 0000664 0000000 0000000 00000000727 14447617043 0020537 0 ustar 00root root 0000000 0000000 name: Lint
on: [push, pull_request]
concurrency:
group: lint-${{ github.ref }}
cancel-in-progress: true
jobs:
lint:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.7', '3.11']
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Install tox
run: python -m pip install 'tox>=1.8.0'
- name: Lint
run: tox -e lint
bioblend-1.2.0/.github/workflows/test.yaml 0000664 0000000 0000000 00000011277 14447617043 0020552 0 ustar 00root root 0000000 0000000 name: Tests
on:
push:
pull_request:
schedule:
# Run at midnight UTC every Tuesday
- cron: '0 0 * * 2'
concurrency:
group: test-${{ github.ref }}
cancel-in-progress: true
jobs:
test:
if: github.event_name != 'schedule' || github.repository_owner == 'galaxyproject'
runs-on: ${{ matrix.os }}
services:
postgres:
image: postgres
# Provide the password for postgres
env:
POSTGRES_PASSWORD: postgres
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest]
tox_env: [py37]
galaxy_version:
- dev
- release_23.1
- release_23.0
- release_22.05
- release_22.01
- release_21.09
- release_21.05
- release_21.01
- release_20.09
- release_20.05
- release_20.01
- release_19.09
- release_19.05
include:
- os: ubuntu-latest
tox_env: py311
galaxy_version: dev
# Cannot test on macOS because service containers are not supported
# yet: https://github.community/t/github-actions-services-available-on-others-vms/16916
# - os: macos-latest
# tox_env: py37
# galaxy_version: dev
steps:
- uses: actions/checkout@v3
- name: Cache pip dir
uses: actions/cache@v3
with:
path: ~/.cache/pip
key: pip-cache-${{ matrix.tox_env }}-${{ matrix.galaxy_version }}
- name: Calculate Python version for BioBlend from tox_env
id: get_bioblend_python_version
run: echo "bioblend_python_version=$(echo "${{ matrix.tox_env }}" | sed -e 's/^py\([3-9]\)\([0-9]\+\)/\1.\2/')" >> $GITHUB_OUTPUT
- name: Set up Python for BioBlend
uses: actions/setup-python@v4
with:
python-version: ${{ steps.get_bioblend_python_version.outputs.bioblend_python_version }}
- name: Install tox
run: |
python3 -m pip install --upgrade pip setuptools
python3 -m pip install 'tox>=1.8.0' 'virtualenv>=20.0.14'
- name: Determine Python version for Galaxy
id: get_galaxy_python_version
run: |
case ${{ matrix.galaxy_version }} in
release_19.05 | release_19.09 | release_20.0* )
# The minimum Python version supported by the 19.05 and 19.09
# releases is 2.7, but virtualenv dropped support for creating
# Python <3.7 environments in v20.22.0 .
# The minimum Python version supported by the 20.0* releases is
# 3.5, but the setup-python GitHub action dropped support for
# Python 3.5 and 3.6 on Ubuntu 22.04, see
# https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
galaxy_python_version=3.7
;;
release_21.0* )
# The minimum Python supported version by these releases is 3.6,
# but same as above.
galaxy_python_version=3.7
;;
release_22.0* | release_23.* | dev )
galaxy_python_version=3.7
esac
echo "galaxy_python_version=$galaxy_python_version" >> $GITHUB_OUTPUT
- name: Set up Python for Galaxy
uses: actions/setup-python@v4
with:
python-version: ${{ steps.get_galaxy_python_version.outputs.galaxy_python_version }}
- name: Run tests
env:
PGPASSWORD: postgres
PGPORT: 5432
PGHOST: localhost
run: |
# Create a PostgreSQL database for Galaxy. The default SQLite3 database makes test fail randomly because of "database locked" error.
createdb -U postgres galaxy
# Run ToolShed tests only once per Python version
if [ "${{ matrix.galaxy_version }}" = 'dev' ]; then
export BIOBLEND_TOOLSHED_URL=https://testtoolshed.g2.bx.psu.edu/
fi
# Install Galaxy
GALAXY_DIR=galaxy-${{ matrix.galaxy_version }}
git clone --depth=1 -b ${{ matrix.galaxy_version }} https://github.com/galaxyproject/galaxy $GALAXY_DIR
export DATABASE_CONNECTION=postgresql://postgres:@localhost/galaxy
./run_bioblend_tests.sh -g $GALAXY_DIR -v python${{ steps.get_galaxy_python_version.outputs.galaxy_python_version }} -e ${{ matrix.tox_env }}
- name: The job has failed
if: ${{ failure() }}
run: |
cat galaxy-${{ matrix.galaxy_version }}/*.log
bioblend-1.2.0/.gitignore 0000664 0000000 0000000 00000000567 14447617043 0015302 0 ustar 00root root 0000000 0000000 *.py[co]
*~
# Packages
*.egg
*.egg-info
dist
build
eggs
parts
bin
var
sdist
develop-eggs
.installed.cfg
.eggs
# Installer logs
pip-log.txt
# Unit test / coverage reports
.coverage
.tox
#Translations
*.mo
#Mr Developer
.mr.developer.cfg
#Vim
*.swp
#Code coverage
cover
#eclipse/pydev
.project
.pydevproject
.idea
# compiled docs
docs/_build
# Python virtualenv
.venv
bioblend-1.2.0/.isort.cfg 0000664 0000000 0000000 00000000434 14447617043 0015202 0 ustar 00root root 0000000 0000000 [settings]
force_alphabetical_sort_within_sections=true
# Override force_grid_wrap value from profile=black, but black is still happy
force_grid_wrap=2
# Same line length as for black
line_length=120
no_lines_before=LOCALFOLDER
profile=black
reverse_relative=true
skip_gitignore=true
bioblend-1.2.0/.readthedocs.yaml 0000664 0000000 0000000 00000001302 14447617043 0016525 0 ustar 00root root 0000000 0000000 # .readthedocs.yaml
# Read the Docs configuration file
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
# Required
version: 2
# Set the OS, Python version and other tools you might need
build:
os: ubuntu-22.04
tools:
python: "3.11"
# Build documentation in the docs/ directory with Sphinx
sphinx:
configuration: docs/conf.py
# Optionally build your docs in additional formats such as PDF and ePub
formats:
- pdf
# Optional but recommended, declare the Python requirements required
# to build your documentation
# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
python:
install:
- path: .
- requirements: docs/requirements.txt
bioblend-1.2.0/ABOUT.rst 0000664 0000000 0000000 00000003411 14447617043 0014705 0 ustar 00root root 0000000 0000000 `BioBlend `_ is a Python library for
interacting with the `Galaxy`_ API.
BioBlend is supported and tested on:
- Python 3.7 - 3.11
- Galaxy release 19.05 and later.
BioBlend's goal is to make it easier to script and automate the running of
Galaxy analyses and administering of a Galaxy server.
In practice, it makes it possible to do things like this:
- Interact with Galaxy via a straightforward API::
from bioblend.galaxy import GalaxyInstance
gi = GalaxyInstance('', key='your API key')
libs = gi.libraries.get_libraries()
gi.workflows.show_workflow('workflow ID')
wf_invocation = gi.workflows.invoke_workflow('workflow ID', inputs)
- Interact with Galaxy via an object-oriented API::
from bioblend.galaxy.objects import GalaxyInstance
gi = GalaxyInstance("URL", "API_KEY")
wf = gi.workflows.list()[0]
hist = gi.histories.list()[0]
inputs = hist.get_datasets()[:2]
input_map = dict(zip(wf.input_labels, inputs))
params = {"Paste1": {"delimiter": "U"}}
wf_invocation = wf.invoke(input_map, params=params)
About the library name
~~~~~~~~~~~~~~~~~~~~~~
The library was originally called just ``Blend`` but we
`renamed it `_
to reflect more of its domain and a make it bit more unique so it can be easier to find.
The name was intended to be short and easily pronounceable. In its original
implementation, the goal was to provide a lot more support for `CloudMan`_
and other integration capabilities, allowing them to be *blended* together
via code. ``BioBlend`` fitted the bill.
.. References/hyperlinks used above
.. _CloudMan: https://galaxyproject.org/cloudman/
.. _Galaxy: https://galaxyproject.org/
bioblend-1.2.0/CHANGELOG.md 0000664 0000000 0000000 00000076270 14447617043 0015127 0 ustar 00root root 0000000 0000000 ### Bioblend v1.2.0 - 2023-06-30
* Dropped support for Galaxy releases 17.09-19.01. Added support for Galaxy
release 23.1.
* Added a new ``container_resolution`` attribute to ``GalaxyInstance`` objects,
which is an instance of the new ``ContainerResolutionClient``. This new module
can be used to list container resolvers, and to resolve (and install) tool
requirements against specified container resolvers (thanks to
[cat-bro](https://github.com/cat-bro) and
[Matthias Bernt](https://github.com/bernt-matthias)).
* Added ``reload_toolbox()`` method to ``ConfigClient`` (thanks to
[Anthony Bretaudeau](https://github.com/abretaud)).
* Added ``delete_unused_dependency_paths()`` and ``unused_dependency_paths()``
methods to ``ToolDependenciesClient`` (thanks to
[Matthias Bernt](https://github.com/bernt-matthias)).
* Added ``data_manager_mode`` parameter to ``ToolClient.run_tool()`` method
(thanks to [Marius van den Beek](https://github.com/mvdbeek)).
* Added ``user_data`` parameter to ``UserClient.update_user()`` method
(thanks to [Uwe Winter](https://github.com/uwwint)).
* Fixed bug in ``DatasetClient.download_dataset()`` and BioBlend.objects
``HistoryDatasetAssociation.get_stream()`` where the wrong download URL was
generated if the Galaxy instance is served at a subdirectory (reported by
[Anil Thanki](https://github.com/anilthanki)).
* Improvements to tests and documentation (thanks to
[kxk302](https://github.com/kxk302) and
[Simon Bray](https://github.com/simonbray)).
### BioBlend v1.1.1 - 2023-02-21
* Same as BioBlend v1.1.0, briefly released with wrong version number 1.0.1 on
GitHub, PyPI and Bioconda.
* Added support for Python 3.11. Added support for Galaxy release 23.0.
* Using the deprecated ``folder_id`` parameter of the
``LibraryClient.get_folders()`` method now raises a ``ValueError`` exception.
* Using the deprecated ``library_id`` parameter of the
``LibraryClient.get_libraries()`` method now raises a ``ValueError``
exception.
* Using the deprecated ``tool_id`` parameter of the ``ToolClient.get_tools()``
method now raises a ``ValueError`` exception.
* Using the deprecated ``workflow_id`` parameter of the
``WorkflowClient.get_workflows()`` method now raises a ``ValueError``
exception.
* Modified ``delete_workflow()`` method of ``WorkflowClient`` to return
``None`` instead of a string.
* Added ``py.typed`` marker file to distributed packages (as per PEP 561) to
declare type checking support.
* Improvements to tests and documentation.
### BioBlend v1.0.0 - 2022-10-13
* Dropped support for deprecated CloudMan, see
https://galaxyproject.org/blog/2021-10-sunsetting-cloudlaunch/
* Added dependency on ``typing-extensions`` package, removed dependencies on
``boto`` and ``pyyaml``.
* Deprecated ``max_get_retries()``, ``set_max_get_retries()``,
``get_retry_delay()`` and ``set_get_retry_delay()`` methods of ``Client``.
* Moved ``max_get_attempts`` and ``get_retry_delay`` properties from
``GalaxyInstance`` to ``GalaxyClient``, so they are also available in
``ToolshedInstance``.
* Added ``get_or_create_user_apikey()`` method to ``UserClient``.
* Added ``all`` parameter to ``HistoryClient.get_histories()`` method (thanks to
[Paprikant](https://github.com/Paprikant)).
* Added ``require_exact_tool_versions`` parameter to
``WorkflowClient.invoke_workflow()`` method (thanks to
[cat-bro](https://github.com/cat-bro)).
* Added ``name`` and ``owner`` parameters to
``ToolShedRepositoryClient.get_repositories()``.
* Remove unused methods from ``bioblend.config.Config``. If needed, use the
methods inherited from `configparser.ConfigParser` instead.
* Allowed any 2XX HTTP response status code in ``Client._delete()`` to correctly
support history purging via Celery (thanks to
[Nolan Woods](https://github.com/innovate-invent)).
* Fixed bug in ``FormsClient.create_form()`` where the ``form_xml_text``
argument was not passed correctly to the Galaxy API.
* Fixed bug in ``HistoryClient.show_dataset_provenance()`` where the ``follow``
argument was not passed to the Galaxy API.
* BioBlend.objects: Added ``delete()`` abstract method to ``DatasetContainer``
class.
* Added Code of Conduct for the project.
* Finished the full type annotation of the library (thanks to
[cat-bro](https://github.com/cat-bro),
[Fabio Cumbo](https://github.com/cumbof),
[Jayadev Joshi](https://github.com/jaidevjoshi83),
[thepineapplepirate](https://github.com/thepineapplepirate)).
* Improvements to tests and documentation.
### BioBlend v0.18.0 - 2022-07-07
* Added support for Galaxy release 22.05.
* Added tus support to ``ToolClient.upload_file()`` (thanks to
[Nate Coraor](https://github.com/natefoo)).
* Formatted Python code with black and isort.
* Improvements to type annotations, tests and documentation.
### BioBlend v0.17.0 - 2022-05-09
* Dropped support for Python 3.6. Added support for Python 3.10. Added support
for Galaxy releases 21.09 and 22.01.
* Removed deprecated ``run_workflow()`` method of ``WorkflowClient``.
* Using the deprecated ``history_id`` parameter of the
``HistoryClient.get_histories()`` method now raises a ``ValueError``
exception.
* Made ``tool_inputs_update`` parameter of ``JobsClient.rerun_job()`` more
flexible.
* Added ``whoami()`` method to ``ConfigClient`` (thanks to
[cat-bro](https://github.com/cat-bro)).
* Added ``get_extra_files()`` method to ``HistoryClient``.
* Added ``build()`` and ``reload()`` methods to ``ToolClient`` (thanks to
[Jayadev Joshi](https://github.com/jaidevjoshi83) and
[cat-bro](https://github.com/cat-bro) respectively).
* Added ``get_repositories()`` method to ``ToolShedCategoryClient`` (thanks to
[cat-bro](https://github.com/cat-bro)).
* Added ``update_repository_metadata()`` method to ``ToolShedRepositoryClient``.
* Added ``order_by`` parameter to ``JobsClient.get_jobs()`` method.
* BioBlend.objects: Removed deprecated ``run()`` method of ``Workflow``.
* BioBlend.objects: Fail if multiple libraries/histories/workflows match when
deleting by name, instead of deleting them all.
* BioBlend.objects: in ``HistoryDatasetAssociation.get_stream()``, wait for
the dataset to be ready.
* BioBlend.objects: in ``Workflow.invoke()``, check that the workflow is mapped
and runnable before invoking, allow the ``inputs`` parameter to be an instance
of a ``Dataset`` subclass, and allow the ``history`` parameter to be the name
of a new history.
* BioBlend.objects: Added new ``datasets`` and ``dataset_collections``
attributes to ``GalaxyInstance`` objects, which are instances of the new
``ObjDatasetClient`` and ``ObjDatasetCollectionClient`` respectively.
* BioBlend.objects: Added ``refresh()``, ``get_outputs()`` and
``get_output_collections()`` methods to ``InvocationStep``.
* Fixed [error](https://github.com/galaxyproject/bioblend/issues/398) when
instantiating ``GalaxyInstance`` with ``email`` and ``password`` (reported by
[Peter Briggs](https://github.com/pjbriggs)).
* Fixed parameter validation errors for POST requests with attached files on
upcoming Galaxy 22.05.
* Code cleanups (thanks to [Martmists](https://github.com/Martmists-GH)).
* Improvements to type annotations, tests and documentation.
### BioBlend v0.16.0 - 2021-06-13
* Added support for Galaxy release 21.05.
* Replaced the ``job_info`` parameter with separate ``tool_id``, ``inputs`` and
``state`` parameters in ``JobsClient.search_jobs()`` (thanks to
[rikeshi](https://github.com/rikeshi)).
* Pass the API key for all requests as the ``x-api-key`` header instead of as a
parameter (thanks to [rikeshi](https://github.com/rikeshi)).
* Try prepending "https://" and "http://" if the scheme is missing in the
``url`` argument passed to ``GalaxyClient``, i.e. when initialising a Galaxy
or ToolShed instance.
* Added a new ``dataset_collections`` attribute to ``GalaxyInstance`` objects,
which is an instance of the new ``DatasetCollectionClient``. This new module
can be used to get details of a dataset collection, wait until elements of a
dataset collection are in a terminal state, and download a history dataset
collection as an archive (thanks to [rikeshi](https://github.com/rikeshi)).
* Added a new ``tool_dependencies`` attribute to ``GalaxyInstance`` objects,
which is an instance of the new ``ToolDependenciesClient``. This new module
can be used to summarize requirements across toolbox (thanks to
[cat-bro](https://github.com/cat-bro)).
* Added ``publish_dataset()`` ``update_permissions()`` and
``wait_for_dataset()`` methods to ``DatasetClient``.
* Added ``get_invocation_biocompute_object()``, ``get_invocation_report_pdf()``,
``get_invocation_step_jobs_summary()``, ``rerun_invocation()`` and
``wait_for_invocation()`` methods to ``InvocationClient`` (thanks to
[rikeshi](https://github.com/rikeshi)).
* Added ``cancel_job()``, ``get_common_problems()``,
``get_destination_params()``, ``get_inputs()``, ``get_outputs()``,
``resume_job()``, ``show_job_lock()``, ``update_job_lock()`` and
``wait_for_job()`` methods to ``JobsClient`` (thanks to
[Andrew Mcgregor](https://github.com/Mcgregor381) and
[rikeshi](https://github.com/rikeshi)).
* Added ``get_citations()`` and ``uninstall_dependencies()`` methods to
``ToolClient`` (thanks to [rikeshi](https://github.com/rikeshi)).
* Added ``extract_workflow_from_history()``, ``refactor_workflow()`` and
``show_versions()`` methods to ``WorkflowClient`` (thanks to
[rikeshi](https://github.com/rikeshi)).
* Added several parameters to ``DatasetClient.get_datasets()`` method (thanks to
[rikeshi](https://github.com/rikeshi)).
* Added several parameters to ``InvocationClient.get_invocations()`` method
(thanks to [Nolan Woods](https://github.com/innovate-invent) and
[rikeshi](https://github.com/rikeshi)).
* Added several parameters to ``JobsClient.get_jobs()`` method (thanks to
[rikeshi](https://github.com/rikeshi)).
* Added ``parameters_normalized`` parameter to
``WorkflowClient.invoke_workflow()`` method (thanks to
[rikeshi](https://github.com/rikeshi)).
* Deprecated ``folder_id`` parameter of ``LibraryClient.get_folders()`` method.
* Deprecated ``library_id`` parameter of ``LibraryClient.get_libraries()``
method.
* Deprecated ``tool_id`` parameter of ``ToolClient.get_tools()`` method.
* Deprecated ``workflow_id`` parameter of ``WorkflowClient.get_workflows()``
method.
* BioBlend.objects: Removed deprecated ``container_id`` property of ``Dataset``
and ``Folder`` objects.
* BioBlend.objects: Removed ``Preview`` abstract class.
* BioBlend.objects: Added ``invoke()`` method to ``Workflow``. Added
``ObjInvocationClient``, and ``Invocation`` and ``InvocationPreview`` wrappers
(thanks to [rikeshi](https://github.com/rikeshi)).
* BioBlend.objects: Added ``latest_workflow_uuid`` property to ``Workflow``
objects. Added ``deleted``, ``latest_workflow_uuid``, ``number_of_steps``,
``owner`` and ``show_in_tool_panel`` properties to ``WorkflowPreview`` (thanks
to [Nolan Woods](https://github.com/innovate-invent)).
* BioBlend.objects: Deprecated ``run()`` method of ``Workflow``.
* Added ``use_ssl``, ``verify`` and ``authuser`` parameters to
``CloudManInstance.__init__()`` (thanks to
[Nathan Edwards](https://github.com/edwardsnj)).
* Improvements to type annotations, tests and documentation (thanks to
[rikeshi](https://github.com/rikeshi)).
### BioBlend v0.15.0 - 2021-02-10
* Dropped support for Python 3.5. Added support for Python 3.9. Added support
for Galaxy releases 20.09 and 21.01.
* Changed the return value of ``RolesClient.create_role()`` method from a
1-element list containing a dict to a dict.
* Removed deprecated ``download_dataset()`` and ``get_current_history``
methods of ``HistoryClient``.
* Removed deprecated ``export_workflow_json()`` and ``import_workflow_json``
methods of ``WorkflowClient``.
* Added ``copy_content()``, ``get_published_histories()`` and ``open_history()``
methods to ``HistoryClient``.
* Added ``rerun_job()`` method to ``JobsClient``.
* Added ``requirements()`` method to ``ToolClient`` (thanks to
[cat-bro](https://github.com/cat-bro)).
* Added ``published`` and ``slug`` parameters to
``HistoryClient.get_histories()``.
* Added ``require_state_ok`` parameter to ``DatasetClient.download_dataset()``.
* Added ``input_format`` parameter to ``ToolClient.run_tool()``.
* Deprecated ``history_id`` parameter of ``HistoryClient.get_histories()``
method.
* BioBlend.objects: Added ``owner`` property to ``Workflow`` objects. Added
``annotation``, ``published`` and ``purged`` properties to ``HistoryPreview``
objects.
* Fixed issue where specifying the Galaxy URL with "http://" instead of
"https://" when creating a ``GalaxyInstance`` made the subsequent non-GET
requests to silently fail.
* Moved the Continuous Integration (CI) from TravisCI to GitHub workflows
(thanks to [Oleg Zharkov](https://github.com/OlegZharkov)).
* Improvements to tests and documentation (thanks to
[Gianmauro Cuccuru](https://github.com/gmauro)).
### BioBlend v0.14.0 - 2020-07-04
* Dropped support for Python 2.7. Dropped support for Galaxy releases
14.10-17.05. Added support for Python 3.8. Added support for Galaxy releases
19.09, 20.01 and 20.05.
* Added a new ``invocations`` attribute to ``GalaxyInstance`` objects, which is
an instance of the new ``InvocationClient`` class. This new module can be used
to get all workflow invocations, show or cancel an invocation, show or pause
an invocation step, get a summary or a report for an invocation (thanks to
[Simon Bray](https://github.com/simonbray)).
* Added ``get_datasets()`` method to ``DatasetClient`` (thanks to
[Simon Bray](https://github.com/simonbray)).
* Added ``import_history()`` method to ``HistoryClient`` (thanks to
[David Christiany](https://github.com/davidchristiany) and
[Marius van den Beek](https://github.com/mvdbeek)).
* Added ``copy_dataset()`` method to ``HistoryClient`` (thanks to
[Simon Bray](https://github.com/simonbray)).
* Added ``get_metrics()`` method to ``JobsClient`` (thanks to
[Andreas Skorczyk](https://github.com/AndreasSko)).
* Added ``report_error()`` method to ``JobsClient`` (thanks to
[Peter Selten](https://github.com/selten)).
* Added ``get_dataset_permissions()`` and ``set_dataset_permissions()`` methods
to ``LibraryClient`` (thanks to
[Frederic Sapet](https://github.com/FredericBGA)).
* Added ``update_user()`` method to ``UserClient`` (thanks to
[Anthony Bretaudeau](https://github.com/abretaud)).
* Added ``update_workflow()`` method to ``WorkflowClient``.
* Added ``tags`` parameter to ``upload_file_from_url()``,
``upload_file_contents()``, ``upload_file_from_local_path()``,
``upload_file_from_server()`` and ``upload_from_galaxy_filesystem()`` methods
of ``LibraryClient`` (thanks to
[Anthony Bretaudeau](https://github.com/abretaud)).
* Changed the default for the ``tag_using_filenames`` parameter of
``upload_file_from_server()`` and ``upload_from_galaxy_filesystem()`` methods
of ``LibraryClient`` from ``True`` to ``False`` (thanks to
[Anthony Bretaudeau](https://github.com/abretaud)).
* Added ``version`` parameter to ``show_workflow()`` and
``export_workflow_dict()`` methods of ``WorkflowClient``.
* Added ``inputs_by`` option to ``invoke_workflow()`` method of
``WorkflowClient`` (thanks to
[Marius van den Beek](https://github.com/mvdbeek)).
* Removed deprecated ``show_stderr()`` and ``show_stdout`` methods of
``DatasetClient``.
* BioBlend.objects: Allowed workflow steps of type ``parameter_input`` and
``subworkflow``. Added ``parameter_input_ids`` property to ``Workflow``
objects (reported by [Nolan Woods](https://github.com/innovate-invent)).
* Fixed ``HistoryClient.export_history(..., wait=False, maxwait=None)``
(reported by [David Christiany](https://github.com/davidchristiany)).
* Moved internal ``_make_url()`` method from ``GalaxyClient`` to ``Client``
class.
### BioBlend v0.13.0 - 2019-08-09
* Dropped support for Python 3.4. Added support for Galaxy releases 19.01 and
19.05.
* Updated ``requests-toolbelt`` requirement to ``>=0.5.1`` to prevent failing of
uploads to Galaxy (reported by [m93](https://github.com/mmeier93)).
* Added ``toolshed`` attribute to ``GalaxyInstance`` and made ``toolShed`` an
alias to it (reported by [Miriam PayĆ”](https://github.com/mpaya)).
* Added ``uninstall_repository_revision()`` method to ``ToolShedClient`` (thanks
to [Helena Rasche](https://github.com/erasche), reported by
[Alexander Lenail](https://github.com/alexlenail)).
* Added ``maxwait`` parameter to ``HistoryClient.export_history()`` and
``History.export()`` methods.
* Fixed handling of ``type`` argument in ``HistoryClient.show_history()``
(thanks to [Marius van den Beek](https://github.com/mvdbeek)).
* Fixed handling of ``deleted`` argument in ``LibraryClient.get_libraries()``
(thanks to [Luke Sargent](https://github.com/luke-c-sargent), reported by
[Katie](https://github.com/emartchenko)).
* Fixed ``LibraryClient.wait_for_dataset()`` when ``maxwait`` or ``interval``
arguments are of type ``float``.
* Unify JSON-encoding of non-file parameters of POST requests inside
``GalaxyClient.make_post_request()``.
* Improvements to tests and documentation (thanks to
[Helena Rasche](https://github.com/erasche),
[Peter Selten](https://github.com/selten) and
[Pablo Moreno](https://github.com/pcm32)).
### BioBlend v0.12.0 - 2018-12-17
* Added support for Python 3.7. Added support for Galaxy releases 18.05 and
18.09.
* Added ``update_library_dataset()`` method to ``LibraryClient`` (thanks to
[Anthony Bretaudeau](https://github.com/abretaud)).
* Added ``preserve_dirs`` and ``tag_using_filenames`` parameters to
``upload_file_from_server()`` and ``upload_from_galaxy_filesystem()`` methods
of ``LibraryClient`` (thanks to
[Anthony Bretaudeau](https://github.com/abretaud)).
* Removed deprecated ``wait_for_completion`` parameter of
``DatasetClient.download_dataset()`` method.
* BioBlend.objects: added ``genome_build`` and ``misc_info`` attributes to
``Dataset`` objects. Moved ``deleted`` attribute from ``Dataset`` to
``HistoryDatasetAssociation`` and ``LibraryDatasetDatasetAssociation``
objects. Moved ``purged`` attribute from ``Dataset`` to
``HistoryDatasetAssociation`` objects.
* BioBlend.objects: added ``update()`` method to ``LibraryDataset`` (thanks to
[Anthony Bretaudeau](https://github.com/abretaud)).
* Run tests with pytest instead of nose.
### BioBlend v0.11.0 - 2018-04-18
* Dropped support for Python 3.3. Added support for Galaxy release 18.01.
* Always wait for terminal state when downloading a dataset.
* Deprecated ``wait_for_completion`` parameter of
``DatasetClient.download_dataset()`` method.
* Fixed downloading of datasets receiving a HTTP 500 status code (thanks to
[Helena Rasche](https://github.com/erasche)).
* Added ``wait_for_dataset()`` method to ``LibraryClient``.
* Added ``verify`` parameter to ``GalaxyInstance.__init__()`` method (thanks to
Devon Ryan).
* Improvements to tests and documentation.
### BioBlend v0.10.0 - 2017-09-26
* Dropped support for Python 2.6. Added support for Galaxy release 17.09.
* Added ``contents`` parameter to ``FoldersClient.show_folder()`` method
(thanks to [Helena Rasche](https://github.com/erasche)).
* Exposed the `verify` attribute of `GalaxyInstance` and `ToolShedInstance`
objects as `__init__()` parameter.
* Added ``create_role()`` method to ``RolesClient`` (thanks to Ashok
Varadharajan).
* Added ``timeout`` parameter to ``GalaxyClient.__init__()`` method.
* Added ``publish`` parameter to ``import_workflow_dict()`` and
``import_workflow_from_local_path()`` methods of ``WorkflowClient`` (thanks to
Marco Enrico Piras).
* BioBlend.objects: added ``publish`` parameter to
``ObjWorkflowClient.import_new()`` method (thanks to Marco Enrico Piras).
* Do not check for mismatching content size when streaming a dataset to file
(reported by Jorrit Boekel).
* Fixed delete requests when Galaxy uses external authentication (thanks to
[Helena Rasche](https://github.com/erasche)).
* Fixed retrieval of the API key when a ``GalaxyClient`` object is initialised
with email and password on Python 3 (thanks to
[Marius van den Beek](https://github.com/mvdbeek)).
* Documentation improvements.
### BioBlend v0.9.0 - 2017-05-25
* Dropped support for Galaxy releases 14.02, 14.04, 14.06 and 14.08. Added
support for Python 3.5 and 3.6, and Galaxy releases 16.07, 16.10, 17.01 and
17.05.
* Deprecated ``import_workflow_json()`` and ``export_workflow_json()`` methods
of ``WorkflowClient`` in favor of ``import_workflow_dict()`` and
``export_workflow_dict()`` (reported by @manabuishii).
* Deprecated ``show_stderr()`` and ``show_stdout()`` methods of
``DatasetClient`` in favour of ``JobsClient.show_job()`` with
``full_details=True``.
* Added ``install_dependencies()`` method to ``ToolClient`` (thanks to
[Marius van den Beek](https://github.com/mvdbeek)).
* Added ``reload_data_table()`` method to ``ToolDataClient`` (thanks to
[Marius van den Beek](https://github.com/mvdbeek)).
* Added ``create_folder()``, ``update_folder()``, ``get_permissions()``,
``set_permissions()`` methods to ``FoldersClient`` (thanks to
[Helena Rasche](https://github.com/erasche)).
* Added ``get_version()`` method to ``ConfigClient`` (thanks to
[Helena Rasche](https://github.com/erasche)).
* Added ``get_user_apikey()`` method to ``UserClient`` (thanks to
[Helena Rasche](https://github.com/erasche)).
* Added ``create_quota()``, ``update_quota()``, ``delete_quota()`` and
``undelete_quota()`` methods to ``QuotaClient`` (thanks to
[Helena Rasche](https://github.com/erasche)).
* Added ``purge`` parameter to ``HistoryClient.delete_dataset()`` method.
* Added ``f_email``, ``f_name``, and ``f_any`` parameters to
``UserClient.get_users()`` method (thanks to
[Helena Rasche](https://github.com/erasche)).
* Updated ``WorkflowClient.import_shared_workflow()`` method to use the newer
Galaxy API request (thanks to @DamCorreia).
* Fixed ``HistoryClient.update_history()`` and ``History.update()`` methods
when ``name`` argument is not specified.
* Added warning if content size differs from content-length header in
``DatasetClient.download_dataset()``.
* BioBlend.objects: added ``purge`` parameter to
``HistoryDatasetAssociation.delete()`` method.
* BioBlend.objects: added ``purged`` attribute to ``Dataset`` objects.
* BioBlend.objects: added ``published`` attribute to ``History`` objects.
* Code refactoring, added tests and documentation improvements.
### BioBlend v0.8.0 - 2016-08-11
* Removed deprecated ``create_user()`` method of ``UserClient``.
* Deprecated ``HistoryClient.download_dataset()`` in favor of
``DatasetClient.download_dataset()``.
* Modified ``update_dataset()``, ``update_dataset_collection()`` and
``update_history()`` methods of ``HistoryClient`` to return the details
instead of the status code.
* Modified ``update_dataset()``, ``update_dataset_collection()`` and
``update_history()`` methods of ``HistoryClient`` to return the details
instead of the status code.
* Modified ``GalaxyClient.make_put_request()`` to return the decoded response
content.
* Added ``install_resolver_dependencies`` parameter to
``ToolShedClient.install_repository_revision()``, applicable for Galaxy
release 16.07 and later (thanks to
[Marius van den Beek](https://github.com/mvdbeek)).
* Improve ``DatasetClient.download_dataset()`` by downloading the dataset in
chunks when saving to file (thanks to Jorrit Boekel).
* Added ``bioblend.toolshed.categories.ToolShedCategoryClient``;
renamed ``bioblend.toolshed.repositories.ToolShedClient`` class to
``bioblend.toolshed.repositories.ToolShedRepositoryClient``;
renamed ``bioblend.toolshed.tools.ToolShedClient`` class to
``bioblend.toolshed.tools.ToolShedToolClient``.
* Added ``delete_user()`` method to ``UserClient``.
* BioBlend.objects: added ``update()`` method to ``HistoryDatasetAssociation``.
* BioBlend.objects: added ``annotation`` and ``genome_build`` attributes to
``HistoryDatasetAssociation`` objects.
* BioBlend.objects: added ``update()`` method to ``HistoryDatasetAssociation``.
* BioBlend.objects: added ability to create and delete dataset collections
(thanks to Alex MacLean).
* BioBlend.objects: added dataset collections to the outputs of
``Workflow.run()``.
* Added ability to launch Galaxy CloudMan instances into AWS VPC.
* A number of testing tweaks, documentation improvements and minor fixes.
### BioBlend v0.7.0 - 2015-11-02
* BioBlend.objects: enabled import of workflows containing dataset collection
inputs.
* Implemented APIs for a modern Galaxy workflow APIs (i.e. delayed scheduling).
* Implemented APIs to search Tool Shed repositories and tools.
* Added support for uploading (importing) from FTP (thanks to
[Helena Rasche](https://github.com/erasche)).
* Added ``to_posix_lines`` and ``space_to_tab`` params to ``upload_file()``,
``upload_from_ftp()`` and ``paste_content()`` methods of ``ToolClient``.
* BioBlend.objects: added ``upload_from_ftp()`` method to ``History``.
* Updated the testing framework to work with Galaxy wheels; use TravisCI's
container infrastructure; test Galaxy release 15.07.
* Updated CloudmanLauncher's ``launch`` method to accept ``subnet_id``
parameter, for VPC support (thanks to Matthew Ralston).
* Properly pass extra arguments to cloud instance userdata.
* Updated placement finding methods and `get_clusters_pd` method to return a
dict vs. lists so error messages can be included.
* A numer of documentation improvements and minor updates/fixes (see individual
commits).
### BioBlend v0.6.1 - 2015-07-27
* BioBlend.objects: renamed ``ObjDatasetClient`` abstract class to
``ObjDatasetContainerClient``.
* BioBlend.objects: added ``ABCMeta`` metaclass and ``list()`` method to
``ObjClient``.
* BioBlend.objects: added ``io_details`` and ``link_details`` parameters to
``ObjToolClient.get()`` method.
* Open port 8800 when launching cloud instances for use by NodeJS proxy for
Galaxy IPython Interactive Environments.
* When launching cloud instances, propagate error messages back to the called.
The return types for methods ``create_cm_security_group``, ``create_key_pair``
in ``CloudManLauncher`` class have changed as a result of this.
### BioBlend v0.6.0 - 2015-06-30
* Added support for Python >= 3.3.
* Added ``get_library_permissions()`` method to ``LibraryClient``.
* Added ``update_group()``, ``get_group_users()``, ``get_group_roles()``,
``add_group_user()``, ``add_group_role()``, ``delete_group_user()`` and
``delete_group_role()`` methods to ``GroupsClient``.
* Added ``full_details`` parameter to ``JobsClient.show_job()`` (thanks to
Rossano Atzeni).
* BioBlend.objects: added ``ObjJobClient`` and ``Job`` wrapper (thanks to
Rossano Atzeni).
* BioBlend.objects: added check to verify that all tools in a workflow are
installed on the Galaxy instance (thanks to
[Gianmauro Cuccuru](https://github.com/gmauro)).
* Removed several deprecated parameters: see commits [19e168f](https://github.com/galaxyproject/bioblend/commit/19e168f5342f4c791d37694d7039a85f2669df71)
and [442ae98](https://github.com/galaxyproject/bioblend/commit/442ae98037be7455d57be15542553dc848d99431).
* Verify SSL certificates by default.
* Added documentation about the Tool Shed and properly link all the docs on
ReadTheDocs.
* Solidified automated testing by using [tox](https://tox.readthedocs.org/) and
[flake8](https://gitlab.com/pycqa/flake8).
### BioBlend v0.5.3 - 2015-03-18
* Project source moved to new URL - https://github.com/galaxyproject/bioblend
* Huge improvements to automated testing, tests now run against Galaxy
release 14.02 and all later versions to ensure backward compatibility
(see `.travis.yml` for details).
* Many documentation improvements (thanks to
[Helena Rasche](https://github.com/erasche)).
* Added Galaxy clients for the tool data tables, the roles, and library
folders (thanks to [Anthony Bretaudeau](https://github.com/abretaud)).
* Added method to get the standard error and standard output for the
job corresponding to a Galaxy dataset (thanks to
[Anthony Bretaudeau](https://github.com/abretaud)).
* Added ``get_state()`` method to ``JobsClient``.
* Added ``copy_from_dataset()`` method to ``LibraryClient``.
* Added ``create_repository()`` method to ``ToolShedRepositoryClient`` (thanks
to [Helena Rasche](https://github.com/erasche)).
* Fixed ``DatasetClient.download_dataset()`` for certain proxied Galaxy
deployments.
* Made ``LibraryClient._get_root_folder_id()`` method safer and faster for
Galaxy release 13.06 and later.
* Deprecate and ignore invalid ``deleted`` parameter to
``WorkflowClient.get_workflows()``.
* CloudMan: added method to fetch instance types.
* CloudMan: updated cluster options to reflect change to SLURM.
* BioBlend.objects: deprecate and ignore invalid ``deleted`` parameter
to ``ObjWorkflowClient.list()``.
* BioBlend.objects: added ``paste_content()`` method to ``History`` objects.
* BioBlend.objects: added ``copy_from_dataset()`` method and ``root_folder``
property to ``Library`` objects.
* BioBlend.objects: added ``container`` and ``deleted`` attributes to ``Folder``
objects.
* BioBlend.objects: the ``parent`` property of a ``Folder`` object is now set to
its parent folder object (thanks to John M. Eppley).
* BioBlend.objects: added ``deleted`` parameter to ``list()`` method
of libraries and histories.
* BioBlend.objects: added ``state`` and ``state_details`` attributes to
``History`` objects (thanks to [Gianmauro Cuccuru](https://github.com/gmauro)).
* BioBlend.objects: renamed ``upload_dataset()`` method to ``upload_file()``
for ``History`` objects.
* BioBlend.objects: renamed ``input_ids`` and ``output_ids`` attributes of
``Workflow`` objects to ``source_ids`` and ``sink_ids`` respectively.
* Add ``run_bioblend_tests.sh`` script (useful for Continuous Integration
testing).
### BioBlend v0.5.2 - 2014-10-17
* BioBlend.objects: enabled email&password auth
* Enabled Tool Shed tar ball uploads
* BioBlend.objects: implemented deletion of history and library datasets
* BioBlend.objects: fixed library dataset downloads
* Fixed the Tool Shed tool installation method
* Added 'deleted' attribute to DatasetContainer
* Handle `data_type` changes in the Oct 2014 Galaxy release
* Renamed `get_current_history()` to `get_most_recently_used_history()`
* A number of documentation improvements and other small fixes (see
the commit messages for more details)
### BioBlend v0.5.1 - 2014-08-19
* Fixed url joining problem described in issue #82
* Enabled Travis Continuous Inetgration testing
* Added script to create a user and get its API key
* Deprecated ``create_user()`` method in favor of clearer
``create_remote_user()``. Added ``create_local_user()``.
* Skip instead of fail tests when ``BIOBLEND_GALAXY_URL`` and
``BIOBLEND_GALAXY_API_KEY`` environment variables are not defined.
* Added export and download to objects API
* Added export/download history
* GalaxyClient: changed ``make_put_request()`` to return whole ``requests``
response object
* Added Tool wrapper to *BioBlend.objects* plus methods to list tools and get
one.
* Added ``show_tool()`` method to ``ToolClient`` class
* Added ``name``, ``in_panel`` and ``trackster`` filters to ``get_tools()``
* Added ``upload_dataset()`` method to ``History`` class.
* Removed ``DataInput`` and ``Tool`` classes for workflow steps. ``Tool`` is to
be used for running single tools.
bioblend-1.2.0/CITATION 0000664 0000000 0000000 00000002671 14447617043 0014445 0 ustar 00root root 0000000 0000000 If you use BioBlend in your published work, please cite the following article:
- Clare Sloggett, Nuwan Goonasekera, Enis Afgan "BioBlend: automating pipeline
analyses within Galaxy and CloudMan"
Bioinformatics (2013) 29(13):1685-1686 doi:10.1093/bioinformatics/btt199
BibTeX format:
@article{10.1093/bioinformatics/btt199,
author = {Sloggett, Clare and Goonasekera, Nuwan and Afgan, Enis},
doi = {10.1093/bioinformatics/btt199},
journal = {Bioinformatics},
number = {13},
pages = {1685-1686},
title = {{BioBlend: automating pipeline analyses within Galaxy and CloudMan}},
url = {https://doi.org/10.1093/bioinformatics/btt199},
volume = {29},
year = {2013},
}
If you use BioBlend.objects in your published work, please cite the following
article:
- Simone Leo, Luca Pireddu, Gianmauro Cuccuru, Luca Lianas, Nicola Soranzo, Enis
Afgan, Gianluigi Zanetti "BioBlend.objects: metacomputing with Galaxy"
Bioinformatics (2014) 30(19):2816-2817 doi:10.1093/bioinformatics/btu386
BibTeX format:
@article{10.1093/bioinformatics/btu386,
author = {Leo, Simone and Pireddu, Luca and Cuccuru, Gianmauro and Lianas, Luca and Soranzo, Nicola and Afgan, Enis and Zanetti, Gianluigi},
doi = {10.1093/bioinformatics/btu386},
journal = {Bioinformatics},
number = {19},
pages = {2816-2817},
title = {{BioBlend.objects: metacomputing with Galaxy}},
url = {https://doi.org/10.1093/bioinformatics/btu386},
volume = {30},
year = {2014},
}
bioblend-1.2.0/CODE_OF_CONDUCT.md 0000664 0000000 0000000 00000000445 14447617043 0016104 0 ustar 00root root 0000000 0000000 Code of Conduct
===============
As part of the Galaxy Community, this project is committed to providing a
welcoming and harassment-free experience for everyone. We therefore expect
participants to abide by our Code of Conduct, which can be found at:
https://galaxyproject.org/community/coc/
bioblend-1.2.0/CONTRIBUTING.md 0000664 0000000 0000000 00000003015 14447617043 0015532 0 ustar 00root root 0000000 0000000 Making a new release
--------------------
1. For a new major release, remove stuff (e.g. parameters, methods) deprecated in the previous cycle.
2. Update the `__version__` string in `bioblend/__init__.py` .
3. Update `CHANGELOG.md` .
4. Commit the changes above, push to GitHub, and wait for Continuous Integration (CI) tests to pass.
5. Make a new release through the GitHub interface. A CI job will automatically upload the packages to PyPI.
7. Check and merge the automatic pull request to update the [Bioconda package](https://github.com/bioconda/bioconda-recipes/blob/master/recipes/bioblend/meta.yaml).
How to run BioBlend tests
-------------------------
1. Clone Galaxy to a directory outside of BioBlend source directory via `git clone https://github.com/galaxyproject/galaxy.git`
2. Change directory to your BioBlend source and run the tests via `./run_bioblend_tests.sh -g GALAXY_PATH [-r GALAXY_REV] [-e TOX_ENV]` where `GALAXY_PATH` is the directory where the galaxy repository was cloned, `GALAXY_REV` is the branch or commit of Galaxy that you would like to test against (if different from the current state of your galaxy clone), and `TOX_ENV` is used to specify the Python version to use for BioBlend, e.g. `py37` for Python 3.7.
You can also add `2>&1 | tee log.txt` to the command above to contemporarily view the test output and save it to the `log.txt` file.
3. If needed, you can temporarily increase the Galaxy job timeout used by BioBlend tests with e.g. `export BIOBLEND_TEST_JOB_TIMEOUT=100`, and re-run the tests.
bioblend-1.2.0/LICENSE 0000664 0000000 0000000 00000002064 14447617043 0014311 0 ustar 00root root 0000000 0000000 MIT License
Copyright (c) 2012-2023 Galaxy Project
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
bioblend-1.2.0/MANIFEST.in 0000664 0000000 0000000 00000000223 14447617043 0015035 0 ustar 00root root 0000000 0000000 # Add non-Python files
graft bioblend/_tests
# Add documentation
graft docs
global-exclude *.swp *.pyc .gitignore
include *.rst CITATION LICENSE
bioblend-1.2.0/Makefile 0000664 0000000 0000000 00000001370 14447617043 0014743 0 ustar 00root root 0000000 0000000 IN_VENV=. .venv/bin/activate
.PHONY: clean release venv
all:
@echo "This makefile is used for the release process. A sensible all target is not implemented."
clean:
rm -rf bioblend.egg-info/ build/ dist/
make -C docs/ clean
venv:
# Create and activate a virtual environment
[ -f .venv/bin/activate ] || virtualenv -p python3 .venv
( $(IN_VENV) && \
# Install latest versions of pip and setuptools \
python3 -m pip install --upgrade pip setuptools && \
# Install latest versions of other needed packages in the virtualenv \
python3 -m pip install --upgrade twine wheel \
)
release: clean venv
( $(IN_VENV) && \
# Create files in dist/ \
python3 setup.py sdist bdist_wheel && \
twine check dist/* && \
twine upload dist/*
)
bioblend-1.2.0/README.rst 0000664 0000000 0000000 00000001626 14447617043 0014776 0 ustar 00root root 0000000 0000000 .. image:: https://img.shields.io/pypi/v/bioblend.svg
:target: https://pypi.org/project/bioblend/
:alt: latest version available on PyPI
.. image:: https://readthedocs.org/projects/bioblend/badge/
:alt: Documentation Status
:target: https://bioblend.readthedocs.io/
.. image:: https://badges.gitter.im/galaxyproject/bioblend.svg
:alt: Join the chat at https://gitter.im/galaxyproject/bioblend
:target: https://gitter.im/galaxyproject/bioblend?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge
BioBlend is a Python library for interacting with the `Galaxy`_ API.
BioBlend is supported and tested on:
- Python 3.7 - 3.11
- Galaxy release 19.05 and later.
Full docs are available at https://bioblend.readthedocs.io/ with a quick library
overview also available in `ABOUT.rst <./ABOUT.rst>`_.
.. References/hyperlinks used above
.. _Galaxy: https://galaxyproject.org/
bioblend-1.2.0/bioblend/ 0000775 0000000 0000000 00000000000 14447617043 0015060 5 ustar 00root root 0000000 0000000 bioblend-1.2.0/bioblend/__init__.py 0000664 0000000 0000000 00000006165 14447617043 0017201 0 ustar 00root root 0000000 0000000 import contextlib
import logging
import logging.config
import os
from typing import (
Optional,
Union,
)
from bioblend.config import (
BioBlendConfigLocations,
Config,
)
# Current version of the library
__version__ = "1.2.0"
# default chunk size (in bytes) for reading remote data
try:
import resource
CHUNK_SIZE = resource.getpagesize()
except Exception:
CHUNK_SIZE = 4096
config = Config()
def get_version() -> str:
"""
Returns a string with the current version of the library (e.g., "0.2.0")
"""
return __version__
def init_logging() -> None:
"""
Initialize BioBlend's logging from a configuration file.
"""
for config_file in BioBlendConfigLocations:
with contextlib.suppress(Exception):
logging.config.fileConfig(os.path.expanduser(config_file))
class NullHandler(logging.Handler):
def emit(self, record: logging.LogRecord) -> None:
pass
# By default, do not force any logging by the library. If you want to see the
# log messages in your scripts, add the following to the top of your script:
# import logging
# logging.basicConfig(filename="bioblend.log", level=logging.DEBUG)
default_format_string = "%(asctime)s %(name)s [%(levelname)s]: %(message)s"
log = logging.getLogger("bioblend")
log.addHandler(NullHandler())
init_logging()
# Convenience functions to set logging to a particular file or stream
# To enable either of these, simply add the following at the top of a
# bioblend module:
# import bioblend
# bioblend.set_stream_logger(__name__)
def set_file_logger(
name: str, filepath: str, level: Union[int, str] = logging.INFO, format_string: Optional[str] = None
) -> None:
global log
if not format_string:
format_string = default_format_string
logger = logging.getLogger(name)
logger.setLevel(level)
fh = logging.FileHandler(filepath)
fh.setLevel(level)
formatter = logging.Formatter(format_string)
fh.setFormatter(formatter)
logger.addHandler(fh)
log = logger
def set_stream_logger(name: str, level: Union[int, str] = logging.DEBUG, format_string: Optional[str] = None) -> None:
global log
if not format_string:
format_string = default_format_string
logger = logging.getLogger(name)
logger.setLevel(level)
fh = logging.StreamHandler()
fh.setLevel(level)
formatter = logging.Formatter(format_string)
fh.setFormatter(formatter)
logger.addHandler(fh)
log = logger
class ConnectionError(Exception):
"""
An exception class that is raised when unexpected HTTP responses come back.
Should make it easier to debug when strange HTTP things happen such as a
proxy server getting in the way of the request etc.
@see: body attribute to see the content of the http response
"""
def __init__(
self, message: str, body: Optional[Union[bytes, str]] = None, status_code: Optional[int] = None
) -> None:
super().__init__(message)
self.body = body
self.status_code = status_code
def __str__(self) -> str:
return f"{self.args[0]}: {self.body!s}"
class TimeoutException(Exception):
pass
bioblend-1.2.0/bioblend/_tests/ 0000775 0000000 0000000 00000000000 14447617043 0016361 5 ustar 00root root 0000000 0000000 bioblend-1.2.0/bioblend/_tests/GalaxyTestBase.py 0000664 0000000 0000000 00000003554 14447617043 0021622 0 ustar 00root root 0000000 0000000 import os
import unittest
from typing import (
Any,
Dict,
)
from typing_extensions import Literal
import bioblend
from bioblend.galaxy import GalaxyInstance
from . import test_util
bioblend.set_stream_logger("test", level="INFO")
BIOBLEND_TEST_JOB_TIMEOUT = int(os.environ.get("BIOBLEND_TEST_JOB_TIMEOUT", "60"))
@test_util.skip_unless_galaxy()
class GalaxyTestBase(unittest.TestCase):
gi: GalaxyInstance
@classmethod
def setUpClass(cls):
galaxy_key = os.environ["BIOBLEND_GALAXY_API_KEY"]
galaxy_url = os.environ["BIOBLEND_GALAXY_URL"]
cls.gi = GalaxyInstance(url=galaxy_url, key=galaxy_key)
def _test_dataset(self, history_id: str, contents: str = "1\t2\t3", **kwargs: Any) -> str:
tool_output = self.gi.tools.paste_content(contents, history_id, **kwargs)
return tool_output["outputs"][0]["id"]
def _wait_and_verify_dataset(
self, dataset_id: str, expected_contents: bytes, timeout_seconds: float = BIOBLEND_TEST_JOB_TIMEOUT
) -> None:
dataset_contents = self.gi.datasets.download_dataset(dataset_id, maxwait=timeout_seconds)
assert dataset_contents == expected_contents
def _run_random_lines1(
self, history_id: str, dataset_id: str, input_format: Literal["21.01", "legacy"] = "legacy"
) -> Dict[str, Any]:
tool_inputs = {
"num_lines": "1",
"input": {"src": "hda", "id": dataset_id},
}
if input_format == "21.01":
tool_inputs.update({"seed_source": {"seed_source_selector": "set_seed", "seed": "asdf"}})
else:
# legacy format
tool_inputs.update({"seed_source|seed_source_selector": "set_seed", "seed_source|seed": "asdf"})
return self.gi.tools.run_tool(
history_id=history_id, tool_id="random_lines1", tool_inputs=tool_inputs, input_format=input_format
)
bioblend-1.2.0/bioblend/_tests/README.TXT 0000664 0000000 0000000 00000001312 14447617043 0017714 0 ustar 00root root 0000000 0000000 To run Galaxy tests, the following environment variables must be set:
BIOBLEND_GALAXY_API_KEY =
BIOBLEND_GALAXY_URL =
To run ToolShed tests, the following environment variable must be set:
BIOBLEND_TOOLSHED_URL =
If you wish to run the entire suite, set all of the above. The integration
tests can subsequently be run by invoking `pytest` from the command line.
pytest should be invoked from the project root folder, and not the tests
child folder, since the test data is resolved relative to the bioblend folder.
bioblend-1.2.0/bioblend/_tests/TestGalaxyConfig.py 0000664 0000000 0000000 00000001272 14447617043 0022150 0 ustar 00root root 0000000 0000000 from . import GalaxyTestBase
class TestGalaxyConfig(GalaxyTestBase.GalaxyTestBase):
def test_get_config(self):
response = self.gi.config.get_config()
assert isinstance(response, dict)
assert "brand" in response.keys()
def test_get_version(self):
response = self.gi.config.get_version()
assert isinstance(response, dict)
assert "version_major" in response.keys()
def test_whoami(self):
response = self.gi.config.whoami()
assert isinstance(response, dict)
assert "username" in response.keys()
def test_reload_toolbox(self):
response = self.gi.config.reload_toolbox()
assert response is None
bioblend-1.2.0/bioblend/_tests/TestGalaxyDatasetCollections.py 0000664 0000000 0000000 00000023352 14447617043 0024532 0 ustar 00root root 0000000 0000000 import os
import tarfile
import tempfile
from inspect import signature
from typing import (
Any,
Dict,
Union,
)
from zipfile import ZipFile
from bioblend.galaxy import dataset_collections
from . import GalaxyTestBase
class TestGalaxyDatasetCollections(GalaxyTestBase.GalaxyTestBase):
def test_create_list_in_history(self):
history_id = self.gi.histories.create_history(name="TestDSListCreate")["id"]
dataset1_id = self._test_dataset(history_id)
dataset2_id = self._test_dataset(history_id)
dataset3_id = self._test_dataset(history_id)
collection_response = self.gi.histories.create_dataset_collection(
history_id=history_id,
collection_description=dataset_collections.CollectionDescription(
name="MyDatasetList",
elements=[
dataset_collections.HistoryDatasetElement(name="sample1", id=dataset1_id),
dataset_collections.HistoryDatasetElement(name="sample2", id=dataset2_id),
dataset_collections.HistoryDatasetElement(name="sample3", id=dataset3_id),
],
),
)
assert collection_response["name"] == "MyDatasetList"
assert collection_response["collection_type"] == "list"
elements = collection_response["elements"]
assert len(elements) == 3
assert elements[0]["element_index"] == 0
assert elements[0]["object"]["id"] == dataset1_id
assert elements[1]["object"]["id"] == dataset2_id
assert elements[2]["object"]["id"] == dataset3_id
assert elements[2]["element_identifier"] == "sample3"
def test_create_list_of_paired_datasets_in_history(self):
history_id = self.gi.histories.create_history(name="TestDSListCreate")["id"]
dataset1_id = self._test_dataset(history_id)
dataset2_id = self._test_dataset(history_id)
dataset3_id = self._test_dataset(history_id)
dataset4_id = self._test_dataset(history_id)
collection_response = self.gi.histories.create_dataset_collection(
history_id=history_id,
collection_description=dataset_collections.CollectionDescription(
name="MyListOfPairedDatasets",
type="list:paired",
elements=[
dataset_collections.CollectionElement(
name="sample1",
type="paired",
elements=[
dataset_collections.HistoryDatasetElement(name="forward", id=dataset1_id),
dataset_collections.HistoryDatasetElement(name="reverse", id=dataset2_id),
],
),
dataset_collections.CollectionElement(
name="sample2",
type="paired",
elements=[
dataset_collections.HistoryDatasetElement(name="forward", id=dataset3_id),
dataset_collections.HistoryDatasetElement(name="reverse", id=dataset4_id),
],
),
],
),
)
assert collection_response["name"] == "MyListOfPairedDatasets"
assert collection_response["collection_type"] == "list:paired"
elements = collection_response["elements"]
assert len(elements) == 2
assert elements[0]["element_index"] == 0
created_pair1 = elements[0]["object"]
assert created_pair1["collection_type"] == "paired"
assert len(created_pair1["elements"]) == 2
forward_element1 = created_pair1["elements"][0]
assert forward_element1["element_identifier"] == "forward"
assert forward_element1["element_index"] == 0
forward_dataset1 = forward_element1["object"]
assert forward_dataset1["id"] == dataset1_id
assert elements[1]["element_index"] == 1
created_pair2 = elements[1]["object"]
assert created_pair2["collection_type"] == "paired"
assert len(created_pair2["elements"]) == 2
reverse_element2 = created_pair2["elements"][1]
reverse_dataset2 = reverse_element2["object"]
assert reverse_element2["element_identifier"] == "reverse"
assert reverse_element2["element_index"] == 1
assert reverse_dataset2["id"] == dataset4_id
def test_collections_in_history_index(self):
history_id = self.gi.histories.create_history(name="TestHistoryDSIndex")["id"]
history_dataset_collection = self._create_pair_in_history(history_id)
contents = self.gi.histories.show_history(history_id, contents=True)
assert len(contents) == 3
assert contents[2]["id"] == history_dataset_collection["id"]
assert contents[2]["name"] == "MyTestPair"
assert contents[2]["collection_type"] == "paired"
def test_show_history_dataset_collection(self):
history_id = self.gi.histories.create_history(name="TestHistoryDSIndexShow")["id"]
history_dataset_collection = self._create_pair_in_history(history_id)
show_response = self.gi.histories.show_dataset_collection(history_id, history_dataset_collection["id"])
for key in ["collection_type", "elements", "name", "deleted", "visible"]:
assert key in show_response
assert not show_response["deleted"]
assert show_response["visible"]
def test_delete_history_dataset_collection(self):
history_id = self.gi.histories.create_history(name="TestHistoryDSDelete")["id"]
history_dataset_collection = self._create_pair_in_history(history_id)
self.gi.histories.delete_dataset_collection(history_id, history_dataset_collection["id"])
show_response = self.gi.histories.show_dataset_collection(history_id, history_dataset_collection["id"])
assert show_response["deleted"]
def test_update_history_dataset_collection(self):
history_id = self.gi.histories.create_history(name="TestHistoryDSDelete")["id"]
history_dataset_collection = self._create_pair_in_history(history_id)
self.gi.histories.update_dataset_collection(history_id, history_dataset_collection["id"], visible=False)
show_response = self.gi.histories.show_dataset_collection(history_id, history_dataset_collection["id"])
assert not show_response["visible"]
def test_show_dataset_collection(self):
history_id = self.gi.histories.create_history(name="TestDatasetCollectionShow")["id"]
dataset_collection1 = self._create_pair_in_history(history_id)
dataset_collection2 = self.gi.dataset_collections.show_dataset_collection(dataset_collection1["id"])
for key in (
"collection_type",
"deleted",
"id",
"hid",
"history_content_type",
"history_id",
"name",
"url",
"visible",
):
assert dataset_collection1[key] == dataset_collection2[key]
for element1, element2 in zip(dataset_collection1["elements"], dataset_collection2["elements"]):
assert element1["id"] == element2["id"]
assert element1.keys() == element2.keys()
for key in element1["object"].keys():
assert key in element2["object"].keys()
def test_download_dataset_collection(self):
history_id = self.gi.histories.create_history(name="TestDatasetCollectionDownload")["id"]
dataset_collection_id = self._create_pair_in_history(history_id)["id"]
self.gi.dataset_collections.wait_for_dataset_collection(dataset_collection_id)
tempdir = tempfile.mkdtemp(prefix="bioblend_test_dataset_collection_download_")
archive_path = os.path.join(tempdir, "dataset_collection")
archive_type = self.gi.dataset_collections.download_dataset_collection(
dataset_collection_id, file_path=archive_path
)["archive_type"]
expected_contents = signature(self._test_dataset).parameters["contents"].default + "\n"
extract_dir_path = os.path.join(tempdir, "extracted_files")
os.mkdir(extract_dir_path)
if archive_type == "zip":
archive: Union[ZipFile, tarfile.TarFile] = ZipFile(archive_path)
elif archive_type == "tgz":
archive = tarfile.open(archive_path)
archive.extractall(extract_dir_path)
for fname in os.listdir(extract_dir_path):
dataset_dir_path = os.path.join(extract_dir_path, fname)
file_path = os.path.join(dataset_dir_path, os.listdir(dataset_dir_path)[0])
with open(file_path) as f:
assert expected_contents == f.read()
archive.close()
def test_wait_for_dataset_collection(self):
history_id = self.gi.histories.create_history(name="TestDatasetCollectionWait")["id"]
dataset_collection_id = self._create_pair_in_history(history_id)["id"]
dataset_collection = self.gi.dataset_collections.wait_for_dataset_collection(dataset_collection_id)
for element in dataset_collection["elements"]:
assert element["object"]["state"] == "ok"
def _create_pair_in_history(self, history_id: str) -> Dict[str, Any]:
dataset1_id = self._test_dataset(history_id)
dataset2_id = self._test_dataset(history_id)
collection_response = self.gi.histories.create_dataset_collection(
history_id=history_id,
collection_description=dataset_collections.CollectionDescription(
name="MyTestPair",
type="paired",
elements=[
dataset_collections.HistoryDatasetElement(name="forward", id=dataset1_id),
dataset_collections.HistoryDatasetElement(name="reverse", id=dataset2_id),
],
),
)
return collection_response
bioblend-1.2.0/bioblend/_tests/TestGalaxyDatasets.py 0000664 0000000 0000000 00000024700 14447617043 0022514 0 ustar 00root root 0000000 0000000 import shutil
import tempfile
import pytest
from bioblend import (
ConnectionError,
galaxy,
)
from . import (
GalaxyTestBase,
test_util,
)
class TestGalaxyDatasets(GalaxyTestBase.GalaxyTestBase):
def setUp(self):
super().setUp()
self.history_id = self.gi.histories.create_history(name="TestDataset")["id"]
self.dataset_contents = "line 1\nline 2\rline 3\r\nline 4"
self.dataset_id = self._test_dataset(self.history_id, contents=self.dataset_contents)
self.gi.datasets.wait_for_dataset(self.dataset_id)
def tearDown(self):
self.gi.histories.delete_history(self.history_id, purge=True)
@test_util.skip_unless_galaxy("release_19.05")
def test_show_nonexistent_dataset(self):
with pytest.raises(ConnectionError):
self.gi.datasets.show_dataset("nonexistent_id")
def test_show_dataset(self):
self.gi.datasets.show_dataset(self.dataset_id)
def test_download_dataset(self):
with pytest.raises((TypeError, ConnectionError)):
self.gi.datasets.download_dataset(None) # type: ignore[call-overload]
expected_contents = ("\n".join(self.dataset_contents.splitlines()) + "\n").encode()
# download_dataset() with file_path=None is already tested in TestGalaxyTools.test_paste_content()
# self._wait_and_verify_dataset(self.dataset_id, expected_contents)
tempdir = tempfile.mkdtemp(prefix="bioblend_test_")
try:
downloaded_dataset = self.gi.datasets.download_dataset(
self.dataset_id, file_path=tempdir, maxwait=GalaxyTestBase.BIOBLEND_TEST_JOB_TIMEOUT * 2
)
assert downloaded_dataset.startswith(tempdir)
with open(downloaded_dataset, "rb") as f:
assert f.read() == expected_contents
finally:
shutil.rmtree(tempdir)
with tempfile.NamedTemporaryFile(prefix="bioblend_test_") as f:
download_filename = self.gi.datasets.download_dataset(
self.dataset_id,
file_path=f.name,
use_default_filename=False,
maxwait=GalaxyTestBase.BIOBLEND_TEST_JOB_TIMEOUT,
)
assert download_filename == f.name
f.flush()
assert f.read() == expected_contents
@test_util.skip_unless_galaxy("release_19.05")
def test_get_datasets(self):
datasets = self.gi.datasets.get_datasets()
dataset_ids = [dataset["id"] for dataset in datasets]
assert self.dataset_id in dataset_ids
@test_util.skip_unless_galaxy("release_19.05")
def test_get_datasets_history(self):
datasets = self.gi.datasets.get_datasets(history_id=self.history_id)
assert len(datasets) == 1
@test_util.skip_unless_galaxy("release_19.05")
def test_get_datasets_limit_offset(self):
datasets = self.gi.datasets.get_datasets(limit=1)
assert len(datasets) == 1
datasets = self.gi.datasets.get_datasets(history_id=self.history_id, offset=1)
assert datasets == []
@test_util.skip_unless_galaxy("release_19.05")
def test_get_datasets_name(self):
datasets = self.gi.datasets.get_datasets(history_id=self.history_id, name="Pasted Entry")
assert len(datasets) == 1
datasets = self.gi.datasets.get_datasets(history_id=self.history_id, name="Wrong Name")
assert datasets == []
@test_util.skip_unless_galaxy("release_20.05")
def test_get_datasets_time(self):
dataset = self.gi.datasets.show_dataset(self.dataset_id)
ct = dataset["create_time"]
datasets = self.gi.datasets.get_datasets(history_id=self.history_id, create_time_min=ct)
assert len(datasets) == 1
datasets = self.gi.datasets.get_datasets(history_id=self.history_id, create_time_max=ct)
assert len(datasets) == 1
datasets = self.gi.datasets.get_datasets(history_id=self.history_id, create_time_min="2100-01-01T00:00:00")
assert datasets == []
datasets = self.gi.datasets.get_datasets(history_id=self.history_id, create_time_max="2000-01-01T00:00:00")
assert datasets == []
ut = dataset["update_time"]
datasets = self.gi.datasets.get_datasets(history_id=self.history_id, update_time_min=ut)
assert len(datasets) == 1
datasets = self.gi.datasets.get_datasets(history_id=self.history_id, update_time_max=ut)
assert len(datasets) == 1
datasets = self.gi.datasets.get_datasets(history_id=self.history_id, update_time_min="2100-01-01T00:00:00")
assert datasets == []
datasets = self.gi.datasets.get_datasets(history_id=self.history_id, update_time_max="2000-01-01T00:00:00")
assert datasets == []
@test_util.skip_unless_galaxy("release_20.05")
def test_get_datasets_extension(self):
datasets = self.gi.datasets.get_datasets(history_id=self.history_id, extension="txt")
assert len(datasets) == 1
datasets = self.gi.datasets.get_datasets(history_id=self.history_id, extension="bam")
assert datasets == []
@test_util.skip_unless_galaxy("release_22.01")
def test_get_datasets_extension_list(self):
datasets = self.gi.datasets.get_datasets(history_id=self.history_id, extension=["bam", "txt"])
assert len(datasets) == 1
@test_util.skip_unless_galaxy("release_20.05")
def test_get_datasets_state(self):
datasets = self.gi.datasets.get_datasets(history_id=self.history_id, state="ok")
assert len(datasets) == 1
datasets = self.gi.datasets.get_datasets(history_id=self.history_id, state="queued")
assert datasets == []
with pytest.raises(ConnectionError):
self.gi.datasets.get_datasets(history_id=self.history_id, state="nonexistent_state")
datasets = self.gi.datasets.get_datasets(history_id=self.history_id, state=["ok", "queued"])
assert len(datasets) == 1
@test_util.skip_unless_galaxy("release_20.05")
def test_get_datasets_visible(self):
datasets = self.gi.datasets.get_datasets(history_id=self.history_id, visible=True)
assert len(datasets) == 1
datasets = self.gi.datasets.get_datasets(history_id=self.history_id, visible=False)
assert len(datasets) == 0
@test_util.skip_unless_galaxy("release_19.05")
def test_get_datasets_ordering(self):
self.dataset_id2 = self._test_dataset(self.history_id, contents=self.dataset_contents)
self.gi.datasets.wait_for_dataset(self.dataset_id2)
datasets = self.gi.datasets.get_datasets(history_id=self.history_id, order="create_time-dsc")
assert datasets[0]["id"] == self.dataset_id2
datasets = self.gi.datasets.get_datasets(history_id=self.history_id, order="create_time-asc")
assert datasets[0]["id"] == self.dataset_id
datasets = self.gi.datasets.get_datasets(history_id=self.history_id, order="hid-dsc")
assert datasets[0]["id"] == self.dataset_id2
datasets = self.gi.datasets.get_datasets(history_id=self.history_id, order="hid-asc")
assert datasets[0]["id"] == self.dataset_id
@test_util.skip_unless_galaxy("release_19.05")
def test_get_datasets_deleted(self):
deleted_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, deleted=True)
assert deleted_datasets == []
self.gi.histories.delete_dataset(self.history_id, self.dataset_id)
deleted_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, deleted=True)
assert len(deleted_datasets) == 1
purged_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, purged=True)
assert purged_datasets == []
self.gi.histories.delete_dataset(self.history_id, self.dataset_id, purge=True)
purged_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, purged=True)
assert len(purged_datasets) == 1
@test_util.skip_unless_galaxy("release_19.05")
def test_get_datasets_tool_id_and_tag(self):
cat1_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, tool_id="cat1")
assert cat1_datasets == []
upload1_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, tool_id="upload1")
assert len(upload1_datasets) == 1
self.gi.histories.update_dataset(self.history_id, self.dataset_id, tags=["test"])
tagged_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, tag="test")
assert len(tagged_datasets) == 1
def test_wait_for_dataset(self):
history_id = self.gi.histories.create_history(name="TestWaitForDataset")["id"]
dataset_contents = "line 1\nline 2\rline 3\r\nline 4"
dataset_id = self._test_dataset(history_id, contents=dataset_contents)
dataset = self.gi.datasets.wait_for_dataset(dataset_id)
assert dataset["state"] == "ok"
self.gi.histories.delete_history(history_id, purge=True)
@test_util.skip_unless_galaxy("release_19.05")
def test_dataset_permissions(self):
admin_user_id = self.gi.users.get_current_user()["id"]
username = test_util.random_string()
user_id = self.gi.users.create_local_user(username, f"{username}@example.org", test_util.random_string(20))[
"id"
]
user_api_key = self.gi.users.create_user_apikey(user_id)
anonymous_gi = galaxy.GalaxyInstance(url=self.gi.base_url, key=None)
user_gi = galaxy.GalaxyInstance(url=self.gi.base_url, key=user_api_key)
sharing_role = self.gi.roles.create_role("sharing_role", "sharing_role", [user_id, admin_user_id])["id"]
self.gi.datasets.publish_dataset(self.dataset_id, published=False)
with pytest.raises(ConnectionError):
anonymous_gi.datasets.show_dataset(self.dataset_id)
self.gi.datasets.publish_dataset(self.dataset_id, published=True)
# now dataset is public, i.e. accessible to anonymous users
assert anonymous_gi.datasets.show_dataset(self.dataset_id)["id"] == self.dataset_id
self.gi.datasets.publish_dataset(self.dataset_id, published=False)
with pytest.raises(ConnectionError):
user_gi.datasets.show_dataset(self.dataset_id)
self.gi.datasets.update_permissions(self.dataset_id, access_ids=[sharing_role], manage_ids=[sharing_role])
assert user_gi.datasets.show_dataset(self.dataset_id)["id"] == self.dataset_id
# anonymous access now fails because sharing is only with the shared user role
with pytest.raises(ConnectionError):
anonymous_gi.datasets.show_dataset(self.dataset_id)
bioblend-1.2.0/bioblend/_tests/TestGalaxyFolders.py 0000664 0000000 0000000 00000004574 14447617043 0022351 0 ustar 00root root 0000000 0000000 from typing import (
Dict,
List,
)
from . import GalaxyTestBase
FOO_DATA = "foo\nbar\n"
class TestGalaxyFolders(GalaxyTestBase.GalaxyTestBase):
def setUp(self):
super().setUp()
self.name = "automated test folder"
self.library = self.gi.libraries.create_library(
self.name, description="automated test", synopsis="automated test synopsis"
)
self.folder = self.gi.folders.create_folder(
self.library["root_folder_id"], self.name, description="automatically created folder"
)
def tearDown(self):
self.gi.libraries.delete_library(self.library["id"])
def test_create_folder(self):
assert self.folder["name"] == self.name
assert self.folder["description"] == "automatically created folder"
def test_show_folder(self):
f2 = self.gi.folders.show_folder(self.folder["id"])
assert f2["id"] == self.folder["id"]
def test_show_folder_contents(self):
f2 = self.gi.folders.show_folder(self.folder["id"], contents=True)
assert "folder_contents" in f2
assert "metadata" in f2
assert self.name == f2["metadata"]["folder_name"]
def test_delete_folder(self):
self.sub_folder = self.gi.folders.create_folder(self.folder["id"], self.name)
self.gi.folders.delete_folder(self.sub_folder["id"])
def test_update_folder(self):
self.folder = self.gi.folders.update_folder(self.folder["id"], "new-name", "new-description")
assert self.folder["name"] == "new-name"
assert self.folder["description"] == "new-description"
def test_get_set_permissions(self):
empty_permission: Dict[str, List] = {
"add_library_item_role_list": [],
"modify_folder_role_list": [],
"manage_folder_role_list": [],
}
# They should be empty to start with
assert self.gi.folders.get_permissions(self.folder["id"], scope="current") == empty_permission
assert self.gi.folders.get_permissions(self.folder["id"], scope="available") == empty_permission
# Then we'll add a role
role = self.gi.roles.get_roles()[0]
self.gi.folders.set_permissions(self.folder["id"], add_ids=[role["id"]])
assert (
role["id"]
in self.gi.folders.get_permissions(self.folder["id"], scope="available")["add_library_item_role_list"][0]
)
bioblend-1.2.0/bioblend/_tests/TestGalaxyGroups.py 0000664 0000000 0000000 00000004704 14447617043 0022225 0 ustar 00root root 0000000 0000000 """
WARNING: only admins can operate on groups!
"""
import uuid
from . import GalaxyTestBase
class TestGalaxyGroups(GalaxyTestBase.GalaxyTestBase):
def setUp(self):
super().setUp()
self.name = f"test_{uuid.uuid4().hex}"
self.group = self.gi.groups.create_group(self.name)[0]
def tearDown(self):
# As of 2015/04/13, deleting a group is not possible through the API
pass
def test_create_group(self):
assert self.group["name"] == self.name
assert self.group["id"] is not None
def test_get_groups(self):
groups = self.gi.groups.get_groups()
for group in groups:
assert group["id"] is not None
assert group["name"] is not None
def test_show_group(self):
group_data = self.gi.groups.show_group(self.group["id"])
assert self.group["id"] == group_data["id"]
assert self.group["name"] == group_data["name"]
def test_get_group_users(self):
group_users = self.gi.groups.get_group_users(self.group["id"])
assert group_users == []
def test_get_group_roles(self):
group_roles = self.gi.groups.get_group_roles(self.group["id"])
assert group_roles == []
def test_update_group(self):
new_name = f"test_{uuid.uuid4().hex}"
new_users = [self.gi.users.get_current_user()["id"]]
self.gi.groups.update_group(self.group["id"], new_name, user_ids=new_users)
updated_group = self.gi.groups.show_group(self.group["id"])
assert self.group["id"] == updated_group["id"]
assert updated_group["name"] == new_name
updated_group_users = [_["id"] for _ in self.gi.groups.get_group_users(self.group["id"])]
assert set(updated_group_users) == set(new_users)
updated_group_roles = [_["id"] for _ in self.gi.groups.get_group_roles(self.group["id"])]
assert set(updated_group_roles) == set()
def test_add_delete_group_user(self):
new_user = self.gi.users.get_current_user()["id"]
ret = self.gi.groups.add_group_user(self.group["id"], new_user)
assert ret["id"] == new_user
updated_group_users = [_["id"] for _ in self.gi.groups.get_group_users(self.group["id"])]
assert new_user in updated_group_users
self.gi.groups.delete_group_user(self.group["id"], new_user)
updated_group_users = [_["id"] for _ in self.gi.groups.get_group_users(self.group["id"])]
assert new_user not in updated_group_users
bioblend-1.2.0/bioblend/_tests/TestGalaxyHistories.py 0000664 0000000 0000000 00000030526 14447617043 0022720 0 ustar 00root root 0000000 0000000 """
"""
import os
import shutil
import tarfile
import tempfile
import pytest
from bioblend import (
ConnectionError,
galaxy,
)
from . import (
GalaxyTestBase,
test_util,
)
class TestGalaxyHistories(GalaxyTestBase.GalaxyTestBase):
def setUp(self):
super().setUp()
self.default_history_name = "buildbot - automated test"
self.history = self.gi.histories.create_history(name=self.default_history_name)
def test_create_history(self):
history_name = "another buildbot - automated test"
new_history = self.gi.histories.create_history(name=history_name)
assert new_history["id"] is not None
assert new_history["name"] == history_name
assert new_history["url"] is not None
def test_update_history(self):
new_name = "buildbot - automated test renamed"
new_annotation = f"Annotation for {new_name}"
new_tags = ["tag1", "tag2"]
updated_hist = self.gi.histories.update_history(
self.history["id"], name=new_name, annotation=new_annotation, tags=new_tags
)
if "id" not in updated_hist:
updated_hist = self.gi.histories.show_history(self.history["id"])
assert self.history["id"] == updated_hist["id"]
assert updated_hist["name"] == new_name
assert updated_hist["annotation"] == new_annotation
assert updated_hist["tags"] == new_tags
def test_publish_history(self):
# Verify that searching for published histories does not return the test history
published_histories = self.gi.histories.get_histories(published=True)
assert not any(h["id"] == self.history["id"] for h in published_histories)
updated_hist = self.gi.histories.update_history(self.history["id"], published=True)
if "id" not in updated_hist:
updated_hist = self.gi.histories.show_history(self.history["id"])
assert self.history["id"] == updated_hist["id"]
assert updated_hist["published"]
# Verify that searching for published histories now returns the test history
published_histories = self.gi.histories.get_histories(published=True)
assert any(h["id"] == self.history["id"] for h in published_histories)
# Verify that get_published_histories as an anonymous user also returns the test history
anonymous_gi = galaxy.GalaxyInstance(url=self.gi.base_url, key=None)
published_histories = anonymous_gi.histories.get_published_histories()
assert any(h["id"] == self.history["id"] for h in published_histories)
history_from_slug = anonymous_gi.histories.get_published_histories(slug=updated_hist["slug"])
assert len(history_from_slug) == 1
assert self.history["id"] == history_from_slug[0]["id"]
def test_get_histories(self):
# Make sure there's at least one value - the one we created
all_histories = self.gi.histories.get_histories()
assert len(all_histories) > 0
# Check whether id is present, when searched by name
histories = self.gi.histories.get_histories(name=self.default_history_name)
assert len([h for h in histories if h["id"] == self.history["id"]]) == 1
# TODO: check whether deleted history is returned correctly
# At the moment, get_histories() returns only not-deleted histories
# and get_histories(deleted=True) returns only deleted histories,
# so they are not comparable.
# In the future, according to https://trello.com/c/MoilsmVv/1673-api-incoherent-and-buggy-indexing-of-deleted-entities ,
# get_histories() will return both not-deleted and deleted histories
# and we can uncomment the following test.
# deleted_history = self.gi.histories.get_histories(deleted=True)
# assert len(all_histories) >= len(deleted_history)
@test_util.skip_unless_galaxy("release_20.01")
def test_other_users_histories(self):
username = test_util.random_string()
user_id = self.gi.users.create_local_user(username, f"{username}@example.org", test_util.random_string(20))[
"id"
]
user_api_key = self.gi.users.create_user_apikey(user_id)
user_gi = galaxy.GalaxyInstance(url=self.gi.base_url, key=user_api_key)
# Normal users cannot use the `all` parameter
with pytest.raises(ConnectionError):
other_user_histories = user_gi.histories.get_histories(all=True)
user_history_id = user_gi.histories.create_history(name=f"History for {username}")["id"]
# Get all users' histories from an admin account
other_user_histories = self.gi.histories.get_histories(all=True)
assert user_history_id in [h["id"] for h in other_user_histories]
def test_show_history(self):
history_data = self.gi.histories.show_history(self.history["id"])
assert self.history["id"] == history_data["id"]
assert self.history["name"] == history_data["name"]
assert "new" == history_data["state"]
def test_show_history_with_contents(self):
history_id = self.history["id"]
contents = self.gi.histories.show_history(history_id, contents=True)
# Empty history has no datasets, content length should be 0
assert len(contents) == 0
self._test_dataset(history_id)
contents = self.gi.histories.show_history(history_id, contents=True)
# history has 1 dataset, content length should be 1
assert len(contents) == 1
contents = self.gi.histories.show_history(history_id, contents=True, types=["dataset"])
# filtering for dataset, content length should still be 1
assert len(contents) == 1
contents = self.gi.histories.show_history(history_id, contents=True, types=["dataset_collection"])
# filtering for dataset collection but there's no collection in the history
assert len(contents) == 0
contents = self.gi.histories.show_history(history_id, contents=True, types=["dataset", "dataset_collection"])
assert len(contents) == 1
def test_create_history_tag(self):
new_tag = "tag1"
self.gi.histories.create_history_tag(self.history["id"], new_tag)
updated_hist = self.gi.histories.show_history(self.history["id"])
assert self.history["id"] == updated_hist["id"]
assert new_tag in updated_hist["tags"]
def test_show_dataset(self):
history_id = self.history["id"]
dataset1_id = self._test_dataset(history_id)
dataset = self.gi.histories.show_dataset(history_id, dataset1_id)
for key in ["name", "hid", "id", "deleted", "history_id", "visible"]:
assert key in dataset
assert dataset["history_id"] == history_id
assert dataset["hid"] == 1
assert dataset["id"] == dataset1_id
assert not dataset["deleted"]
assert dataset["visible"]
@test_util.skip_unless_galaxy("release_22.01")
def test_show_dataset_provenance(self) -> None:
MINIMAL_PROV_KEYS = ("id", "uuid")
OTHER_PROV_KEYS = ("job_id", "parameters", "stderr", "stdout", "tool_id")
ALL_PROV_KEYS = MINIMAL_PROV_KEYS + OTHER_PROV_KEYS
history_id = self.history["id"]
dataset1_id = self._test_dataset(history_id)
dataset2_id = self._run_random_lines1(history_id, dataset1_id)["outputs"][0]["id"]
prov = self.gi.histories.show_dataset_provenance(history_id, dataset2_id)
for key in ALL_PROV_KEYS:
assert key in prov
for key in MINIMAL_PROV_KEYS:
assert key in prov["parameters"]["input"]
for key in OTHER_PROV_KEYS:
assert key not in prov["parameters"]["input"]
recursive_prov = self.gi.histories.show_dataset_provenance(history_id, dataset2_id, follow=True)
for key in ALL_PROV_KEYS:
assert key in recursive_prov
for key in ALL_PROV_KEYS:
assert key in recursive_prov["parameters"]["input"]
def test_delete_dataset(self):
history_id = self.history["id"]
dataset1_id = self._test_dataset(history_id)
self.gi.histories.delete_dataset(history_id, dataset1_id)
dataset = self.gi.histories.show_dataset(history_id, dataset1_id)
assert dataset["deleted"]
assert not dataset["purged"]
def test_purge_dataset(self):
history_id = self.history["id"]
dataset1_id = self._test_dataset(history_id)
self.gi.histories.delete_dataset(history_id, dataset1_id, purge=True)
dataset = self.gi.histories.show_dataset(history_id, dataset1_id)
assert dataset["deleted"]
assert dataset["purged"]
def test_update_dataset(self):
history_id = self.history["id"]
dataset1_id = self._test_dataset(history_id)
updated_dataset = self.gi.histories.update_dataset(history_id, dataset1_id, visible=False)
if "id" not in updated_dataset:
updated_dataset = self.gi.histories.show_dataset(history_id, dataset1_id)
assert not updated_dataset["visible"]
def test_upload_dataset_from_library(self):
pass
# download_dataset() is already tested in TestGalaxyDatasets
def test_delete_history(self):
result = self.gi.histories.delete_history(self.history["id"])
assert result["deleted"]
all_histories = self.gi.histories.get_histories()
assert not any(d["id"] == self.history["id"] for d in all_histories)
def test_undelete_history(self):
self.gi.histories.delete_history(self.history["id"])
self.gi.histories.undelete_history(self.history["id"])
all_histories = self.gi.histories.get_histories()
assert any(d["id"] == self.history["id"] for d in all_histories)
def test_get_status(self):
state = self.gi.histories.get_status(self.history["id"])
assert "new" == state["state"]
def test_get_most_recently_used_history(self):
most_recently_used_history = self.gi.histories.get_most_recently_used_history()
# if the user has been created via the API, it does not have
# a session, therefore no history
if most_recently_used_history is not None:
assert most_recently_used_history["id"] is not None
assert most_recently_used_history["name"] is not None
assert most_recently_used_history["state"] is not None
def test_download_history(self):
jeha_id = self.gi.histories.export_history(self.history["id"], wait=True, maxwait=60)
assert jeha_id
tempdir = tempfile.mkdtemp(prefix="bioblend_test_")
temp_fn = os.path.join(tempdir, "export.tar.gz")
try:
with open(temp_fn, "wb") as fo:
self.gi.histories.download_history(self.history["id"], jeha_id, fo)
assert tarfile.is_tarfile(temp_fn)
finally:
shutil.rmtree(tempdir)
def test_import_history(self):
path = test_util.get_abspath(os.path.join("data", "Galaxy-History-Test-history-for-export.tar.gz"))
self.gi.histories.import_history(file_path=path)
def test_copy_dataset(self):
history_id = self.history["id"]
contents = "1\t2\t3"
dataset1_id = self._test_dataset(history_id, contents=contents)
self.history_id2 = self.gi.histories.create_history("TestCopyDataset")["id"]
copied_dataset = self.gi.histories.copy_dataset(self.history_id2, dataset1_id)
expected_contents = ("\n".join(contents.splitlines()) + "\n").encode()
self._wait_and_verify_dataset(copied_dataset["id"], expected_contents)
self.gi.histories.delete_history(self.history_id2, purge=True)
@test_util.skip_unless_galaxy("release_20.09")
def test_update_dataset_datatype(self):
history_id = self.history["id"]
dataset1_id = self._test_dataset(history_id)
self._wait_and_verify_dataset(dataset1_id, b"1\t2\t3\n")
original_hda = self.gi.datasets.show_dataset(dataset1_id)
assert original_hda["extension"] == "bed"
self.gi.histories.update_dataset(history_id, dataset1_id, datatype="tabular")
updated_hda = self.gi.datasets.show_dataset(dataset1_id)
assert updated_hda["extension"] == "tabular"
def test_get_extra_files(self):
history_id = self.history["id"]
dataset_id = self._test_dataset(history_id)
extra_files = self.gi.histories.get_extra_files(history_id, dataset_id)
assert extra_files == []
def tearDown(self):
self.gi.histories.delete_history(self.history["id"], purge=True)
bioblend-1.2.0/bioblend/_tests/TestGalaxyInstance.py 0000664 0000000 0000000 00000004074 14447617043 0022512 0 ustar 00root root 0000000 0000000 """
Tests on the GalaxyInstance object itself.
"""
import os
import time
import unittest
import pytest
from bioblend import ConnectionError
from bioblend.galaxy import GalaxyInstance
from . import test_util
class TestGalaxyInstance(unittest.TestCase):
def setUp(self):
# "connect" to a fake Galaxy instance
self.gi = GalaxyInstance("http://localhost:56789", key="whatever")
def test_url_attribute(self):
assert self.gi.base_url == "http://localhost:56789"
assert self.gi.url == "http://localhost:56789/api"
# Test instance served at a subdirectory
gi = GalaxyInstance("http://localhost:56789/galaxy/", key="whatever")
assert gi.base_url == "http://localhost:56789/galaxy"
assert gi.url == "http://localhost:56789/galaxy/api"
def test_set_max_get_attempts(self):
self.gi.max_get_attempts = 3
assert 3 == self.gi.max_get_attempts
def test_set_retry_delay(self):
self.gi.get_retry_delay = 5.0
assert 5.0 == self.gi.get_retry_delay
def test_get_retry(self):
# We set the client to try twice, with a delay of 5 seconds between
# attempts. So, we expect the call to take at least 5 seconds before
# failing.
self.gi.max_get_attempts = 3
self.gi.get_retry_delay = 2
start = time.time()
with pytest.raises(ConnectionError):
self.gi.libraries.get_libraries()
end = time.time()
duration = end - start
assert duration > self.gi.get_retry_delay * (self.gi.max_get_attempts - 1), "Didn't seem to retry long enough"
def test_missing_scheme_fake_url(self):
with pytest.raises(ValueError):
GalaxyInstance("localhost:56789", key="whatever")
@test_util.skip_unless_galaxy()
def test_missing_scheme_real_url(self):
galaxy_url = os.environ["BIOBLEND_GALAXY_URL"]
# Strip the scheme from galaxy_url
scheme_sep = "://"
if scheme_sep in galaxy_url:
galaxy_url = galaxy_url.partition(scheme_sep)[2]
GalaxyInstance(url=galaxy_url)
bioblend-1.2.0/bioblend/_tests/TestGalaxyInvocations.py 0000664 0000000 0000000 00000015326 14447617043 0023244 0 ustar 00root root 0000000 0000000 import contextlib
import os
import time
from typing import (
Any,
Dict,
)
from . import (
GalaxyTestBase,
test_util,
)
class TestGalaxyInvocations(GalaxyTestBase.GalaxyTestBase):
def setUp(self):
super().setUp()
path = test_util.get_abspath(os.path.join("data", "paste_columns.ga"))
self.workflow_id = self.gi.workflows.import_workflow_from_local_path(path)["id"]
self.history_id = self.gi.histories.create_history(name="TestGalaxyInvocations")["id"]
self.dataset_id = self._test_dataset(self.history_id)
def tearDown(self):
self.gi.histories.delete_history(self.history_id, purge=True)
@test_util.skip_unless_galaxy("release_19.09")
def test_cancel_invocation(self):
invocation = self._invoke_workflow()
invocation_id = invocation["id"]
invocations = self.gi.invocations.get_invocations()
assert len(invocations) == 1
assert invocations[0]["id"] == invocation_id
self.gi.invocations.cancel_invocation(invocation_id)
invocation = self.gi.invocations.show_invocation(invocation_id)
assert invocation["state"] == "cancelled"
@test_util.skip_unless_galaxy("release_20.01")
def test_get_invocations(self):
invoc1 = self._invoke_workflow()
# Run the first workflow on another history
dataset = {"src": "hda", "id": self.dataset_id}
hist2_id = self.gi.histories.create_history("hist2")["id"]
invoc2 = self.gi.workflows.invoke_workflow(
self.workflow_id, history_id=hist2_id, inputs={"Input 1": dataset, "Input 2": dataset}, inputs_by="name"
)
# Run another workflow on the 2nd history
path = test_util.get_abspath(os.path.join("data", "paste_columns.ga"))
workflow2_id = self.gi.workflows.import_workflow_from_local_path(path)["id"]
invoc3 = self.gi.workflows.invoke_workflow(
workflow2_id, history_id=hist2_id, inputs={"Input 1": dataset, "Input 2": dataset}, inputs_by="name"
)
for invoc in (invoc1, invoc2, invoc3):
self.gi.invocations.wait_for_invocation(invoc["id"])
# Test filtering by workflow ID
for wf_id, expected_invoc_num in {self.workflow_id: 2, workflow2_id: 1}.items():
invocs = self.gi.invocations.get_invocations(workflow_id=wf_id)
assert len(invocs) == expected_invoc_num
for invoc in invocs:
assert invoc["workflow_id"] == wf_id
# Test filtering by history ID
for hist_id, expected_invoc_num in {self.history_id: 1, hist2_id: 2}.items():
invocs = self.gi.invocations.get_invocations(history_id=hist_id)
assert len(invocs) == expected_invoc_num
for invoc in invocs:
assert invoc["history_id"] == hist_id
# Test limiting
limit_invocs = self.gi.invocations.get_invocations(limit=2)
assert len(limit_invocs) == 2
self.gi.histories.delete_history(hist2_id, purge=True)
@test_util.skip_unless_galaxy("release_19.09")
def test_get_invocation_report(self):
invocation = self._invoke_workflow()
invocation_id = invocation["id"]
workflow_id = invocation["workflow_id"]
report = self.gi.invocations.get_invocation_report(invocation_id)
assert report["workflows"] == {workflow_id: {"name": "paste_columns"}}
with contextlib.suppress(Exception):
# This can fail if dependencies as weasyprint are not installed on the Galaxy server
ret = self.gi.invocations.get_invocation_report_pdf(invocation_id, "report.pdf")
assert ret is None
@test_util.skip_unless_galaxy("release_20.09")
def test_get_invocation_biocompute_object(self):
invocation = self._invoke_workflow()
self.gi.invocations.wait_for_invocation(invocation["id"])
biocompute_object = self.gi.invocations.get_invocation_biocompute_object(invocation["id"])
assert len(biocompute_object["description_domain"]["pipeline_steps"]) == 1
@test_util.skip_unless_galaxy("release_19.09")
def test_get_invocation_jobs_summary(self):
invocation = self._invoke_workflow()
self.gi.invocations.wait_for_invocation(invocation["id"])
jobs_summary = self.gi.invocations.get_invocation_summary(invocation["id"])
assert jobs_summary["populated_state"] == "ok"
step_jobs_summary = self.gi.invocations.get_invocation_step_jobs_summary(invocation["id"])
assert len(step_jobs_summary) == 1
assert step_jobs_summary[0]["populated_state"] == "ok"
@test_util.skip_unless_galaxy("release_19.09")
@test_util.skip_unless_tool("cat1")
@test_util.skip_unless_tool("cat")
def test_workflow_scheduling(self):
path = test_util.get_abspath(os.path.join("data", "test_workflow_pause.ga"))
workflow = self.gi.workflows.import_workflow_from_local_path(path)
invocation = self.gi.workflows.invoke_workflow(
workflow["id"],
inputs={"0": {"src": "hda", "id": self.dataset_id}},
history_id=self.history_id,
)
invocation_id = invocation["id"]
def invocation_steps_by_order_index() -> Dict[int, Dict[str, Any]]:
invocation = self.gi.invocations.show_invocation(invocation_id)
return {s["order_index"]: s for s in invocation["steps"]}
for _ in range(20):
if 2 in invocation_steps_by_order_index():
break
time.sleep(0.5)
steps = invocation_steps_by_order_index()
pause_step = steps[2]
assert self.gi.invocations.show_invocation_step(invocation_id, pause_step["id"])["action"] is None
self.gi.invocations.run_invocation_step_action(invocation_id, pause_step["id"], action=True)
assert self.gi.invocations.show_invocation_step(invocation_id, pause_step["id"])["action"]
self.gi.invocations.wait_for_invocation(invocation["id"])
@test_util.skip_unless_galaxy("release_21.01")
def test_rerun_invocation(self):
invocation = self._invoke_workflow()
self.gi.invocations.wait_for_invocation(invocation["id"])
rerun_invocation = self.gi.invocations.rerun_invocation(invocation["id"], import_inputs_to_history=True)
self.gi.invocations.wait_for_invocation(rerun_invocation["id"])
history = self.gi.histories.show_history(rerun_invocation["history_id"], contents=True)
assert len(history) == 3
def _invoke_workflow(self) -> Dict[str, Any]:
dataset = {"src": "hda", "id": self.dataset_id}
return self.gi.workflows.invoke_workflow(
self.workflow_id,
inputs={"Input 1": dataset, "Input 2": dataset},
history_id=self.history_id,
inputs_by="name",
)
bioblend-1.2.0/bioblend/_tests/TestGalaxyJobs.py 0000664 0000000 0000000 00000024124 14447617043 0021641 0 ustar 00root root 0000000 0000000 import os
from datetime import (
datetime,
timedelta,
)
from operator import itemgetter
from typing_extensions import Literal
from bioblend.galaxy.tools.inputs import (
dataset,
inputs,
)
from . import (
GalaxyTestBase,
test_util,
)
class TestGalaxyJobs(GalaxyTestBase.GalaxyTestBase):
def setUp(self):
super().setUp()
self.history_id = self.gi.histories.create_history(name="TestGalaxyJobs")["id"]
self.dataset_contents = "line 1\nline 2\rline 3\r\nline 4"
self.dataset_id = self._test_dataset(self.history_id, contents=self.dataset_contents)
def tearDown(self):
self.gi.histories.delete_history(self.history_id, purge=True)
@test_util.skip_unless_tool("cat1")
def test_wait_for_job(self):
tool_inputs = inputs().set("input1", dataset(self.dataset_id))
tool_output = self.gi.tools.run_tool(history_id=self.history_id, tool_id="cat1", tool_inputs=tool_inputs)
job_id = tool_output["jobs"][0]["id"]
job = self.gi.jobs.wait_for_job(job_id)
assert job["state"] == "ok"
@test_util.skip_unless_tool("random_lines1")
def test_get_jobs(self):
self._run_tool()
self._run_tool()
jobs = self.gi.jobs.get_jobs(tool_id="random_lines1", history_id=self.history_id)
assert len(jobs) == 2
jobs = self.gi.jobs.get_jobs(history_id=self.history_id, state="failed")
assert len(jobs) == 0
yesterday = datetime.today() - timedelta(days=1)
jobs = self.gi.jobs.get_jobs(date_range_max=yesterday.strftime("%Y-%m-%d"), history_id=self.history_id)
assert len(jobs) == 0
tomorrow = datetime.today() + timedelta(days=1)
jobs = self.gi.jobs.get_jobs(date_range_min=tomorrow.strftime("%Y-%m-%d"))
assert len(jobs) == 0
jobs = self.gi.jobs.get_jobs(date_range_min=datetime.today().strftime("%Y-%m-%d"), history_id=self.history_id)
assert len(jobs) == 3
@test_util.skip_unless_galaxy("release_21.05")
def test_get_jobs_with_filtering(self):
path = test_util.get_abspath(os.path.join("data", "paste_columns.ga"))
workflow_id = self.gi.workflows.import_workflow_from_local_path(path)["id"]
dataset = {"src": "hda", "id": self.dataset_id}
invocation1 = self.gi.workflows.invoke_workflow(
workflow_id,
inputs={"Input 1": dataset, "Input 2": dataset},
history_id=self.history_id,
inputs_by="name",
)
invocation2 = self.gi.workflows.invoke_workflow(
workflow_id,
inputs={"Input 1": dataset, "Input 2": dataset},
history_id=self.history_id,
inputs_by="name",
)
self.gi.invocations.wait_for_invocation(invocation1["id"])
self.gi.invocations.wait_for_invocation(invocation2["id"])
all_jobs = self.gi.jobs.get_jobs(history_id=self.history_id, order_by="create_time")
assert len(all_jobs) == 3
job1_id = all_jobs[1]["id"]
jobs = self.gi.jobs.get_jobs(history_id=self.history_id, limit=1, offset=1, order_by="create_time")
assert len(jobs) == 1
assert jobs[0]["id"] == job1_id
jobs = self.gi.jobs.get_jobs(invocation_id=invocation1["id"])
assert len(jobs) == 1
job_id_inv = jobs[0]["id"]
jobs = self.gi.jobs.get_jobs(workflow_id=workflow_id)
assert len(jobs) == 2
assert job_id_inv in [job["id"] for job in jobs]
@test_util.skip_unless_galaxy("release_21.01")
@test_util.skip_unless_tool("random_lines1")
def test_run_and_rerun_random_lines(self):
original_output = self._run_tool(input_format="21.01")
original_job_id = original_output["jobs"][0]["id"]
rerun_output = self.gi.jobs.rerun_job(original_job_id)
original_output_content = self.gi.datasets.download_dataset(original_output["outputs"][0]["id"])
rerun_output_content = self.gi.datasets.download_dataset(rerun_output["outputs"][0]["id"])
assert rerun_output_content == original_output_content
@test_util.skip_unless_galaxy("release_21.01")
@test_util.skip_unless_tool("Show beginning1")
def test_rerun_and_remap(self):
path = test_util.get_abspath(os.path.join("data", "select_first.ga"))
wf = self.gi.workflows.import_workflow_from_local_path(path)
wf_inputs = {
"0": {"src": "hda", "id": self.dataset_id},
"1": "-1",
}
invocation_id = self.gi.workflows.invoke_workflow(wf["id"], inputs=wf_inputs, history_id=self.history_id)["id"]
invocation = self.gi.invocations.wait_for_invocation(invocation_id)
job_steps = [step for step in invocation["steps"] if step["job_id"]]
job_steps.sort(key=itemgetter("order_index"))
try:
self.gi.jobs.wait_for_job(job_steps[0]["job_id"])
except Exception:
pass # indicates the job failed as expected
else:
raise Exception("The job should have failed")
history_contents = self.gi.histories.show_history(self.history_id, contents=True)
assert len(history_contents) == 3
assert history_contents[1]["state"] == "error"
assert history_contents[2]["state"] == "paused"
# resume the paused step job
resumed_outputs = self.gi.jobs.resume_job(job_steps[-1]["job_id"])
assert resumed_outputs[0]["name"] == "out_file1"
# the following does not pass stably - the job goes back to paused too quickly
# history_contents_resumed = self.gi.histories.show_history(self.history_id, contents=True)
# assert history_contents_resumed[2]["state"] != "paused"
# now rerun and remap with correct input param
failed_job_id = self.gi.datasets.show_dataset(history_contents[1]["id"])["creating_job"]
tool_inputs_update = {"lineNum": "1"}
rerun_job = self.gi.jobs.rerun_job(failed_job_id, remap=True, tool_inputs_update=tool_inputs_update)
new_job_id = rerun_job["jobs"][0]["id"]
# Wait for the last dataset in the history to be unpaused and complete
last_dataset = self.gi.histories.show_history(self.history_id, contents=True)[-1]
last_job_id = self.gi.datasets.show_dataset(last_dataset["id"])["creating_job"]
self.gi.jobs.wait_for_job(new_job_id)
self.gi.jobs.resume_job(last_job_id) # last_job can get stuck on paused - resume it in case
self.gi.jobs.wait_for_job(last_job_id)
assert last_dataset["hid"] == 3
assert last_dataset["id"] == history_contents[2]["id"]
self._wait_and_verify_dataset(last_dataset["id"], b"line 1\tline 1\n")
@test_util.skip_unless_galaxy("release_19.05")
@test_util.skip_unless_tool("random_lines1")
def test_get_common_problems(self):
job_id = self._run_tool()["jobs"][0]["id"]
response = self.gi.jobs.get_common_problems(job_id)
assert response == {"has_duplicate_inputs": False, "has_empty_inputs": True}
@test_util.skip_unless_tool("random_lines1")
def test_get_inputs(self):
job_id = self._run_tool()["jobs"][0]["id"]
response = self.gi.jobs.get_inputs(job_id)
assert response == [{"name": "input", "dataset": {"src": "hda", "id": self.dataset_id}}]
@test_util.skip_unless_tool("random_lines1")
def test_get_outputs(self):
output = self._run_tool()
job_id, output_id = output["jobs"][0]["id"], output["outputs"][0]["id"]
response = self.gi.jobs.get_outputs(job_id)
assert response == [{"name": "out_file1", "dataset": {"src": "hda", "id": output_id}}]
@test_util.skip_unless_galaxy("release_20.05")
@test_util.skip_unless_tool("random_lines1")
def test_get_destination_params(self):
job_id = self._run_tool()["jobs"][0]["id"]
# In Galaxy 20.05 and 20.09 we need to wait for the job, otherwise
# `get_destination_params()` receives a 500 error code. Fixed upstream
# in https://github.com/galaxyproject/galaxy/commit/3e7f03cd1f229b8c9421ade02002728a33e131d8
self.gi.jobs.wait_for_job(job_id)
response = self.gi.jobs.get_destination_params(job_id)
assert "Runner" in response
assert "Runner Job ID" in response
assert "Handler" in response
@test_util.skip_unless_tool("random_lines1")
def test_search_jobs(self):
job_id = self._run_tool()["jobs"][0]["id"]
inputs = {
"num_lines": "1",
"input": {"src": "hda", "id": self.dataset_id},
"seed_source|seed_source_selector": "set_seed",
"seed_source|seed": "asdf",
}
response = self.gi.jobs.search_jobs("random_lines1", inputs)
assert job_id in [job["id"] for job in response]
@test_util.skip_unless_galaxy("release_20.01")
@test_util.skip_unless_tool("random_lines1")
def test_report_error(self):
output = self._run_tool()
job_id, output_id = output["jobs"][0]["id"], output["outputs"][0]["id"]
response = self.gi.jobs.report_error(job_id, output_id, "Test error")
# expected response when the Galaxy server does not have mail configured
assert response == {
"messages": [
[
"An error occurred sending the report by email: Mail is not configured for this Galaxy instance",
"danger",
]
]
}
@test_util.skip_unless_galaxy("release_20.05")
def test_show_job_lock(self):
status = self.gi.jobs.show_job_lock()
assert not status
@test_util.skip_unless_galaxy("release_20.05")
def test_update_job_lock(self):
status = self.gi.jobs.update_job_lock(active=True)
assert status
status = self.gi.jobs.update_job_lock(active=False)
assert not status
def test_cancel_job(self):
job_id = self._run_tool()["jobs"][0]["id"]
self.gi.jobs.cancel_job(job_id)
job = self.gi.jobs.wait_for_job(job_id, check=False)
assert job["state"] in ("deleted", "deleting")
def _run_tool(self, input_format: Literal["21.01", "legacy"] = "legacy") -> dict:
return super()._run_random_lines1(self.history_id, self.dataset_id, input_format=input_format)
bioblend-1.2.0/bioblend/_tests/TestGalaxyLibraries.py 0000664 0000000 0000000 00000017721 14447617043 0022665 0 ustar 00root root 0000000 0000000 import os
import tempfile
from . import (
GalaxyTestBase,
test_util,
)
FOO_DATA = "foo\nbar\n"
class TestGalaxyLibraries(GalaxyTestBase.GalaxyTestBase):
def setUp(self):
super().setUp()
self.name = "automated test library"
self.library = self.gi.libraries.create_library(
self.name, description="automated test", synopsis="automated test synopsis"
)
def tearDown(self):
self.gi.libraries.delete_library(self.library["id"])
def test_create_library(self):
assert self.library["name"] == self.name
assert self.library["id"] is not None
def test_get_libraries(self):
libraries_with_name = self.gi.libraries.get_libraries(name=self.name)
assert len([lib for lib in libraries_with_name if lib["id"] == self.library["id"]]) == 1
deleted_name = "deleted test library"
deleted_library_id = self.gi.libraries.create_library(
deleted_name, description="a deleted library", synopsis="automated test synopsis"
)["id"]
self.gi.libraries.delete_library(deleted_library_id)
deleted_libraries_with_name = self.gi.libraries.get_libraries(name=deleted_name, deleted=True)
assert len([lib for lib in deleted_libraries_with_name if lib["id"] == deleted_library_id]) == 1
all_non_deleted_libraries = self.gi.libraries.get_libraries(deleted=False)
assert len([lib for lib in all_non_deleted_libraries if lib["id"] == self.library["id"]]) == 1
assert [lib for lib in all_non_deleted_libraries if lib["id"] == deleted_library_id] == []
all_deleted_libraries = self.gi.libraries.get_libraries(deleted=True)
assert [lib for lib in all_deleted_libraries if lib["id"] == self.library["id"]] == []
assert len([lib for lib in all_deleted_libraries if lib["id"] == deleted_library_id]) == 1
all_libraries = self.gi.libraries.get_libraries(deleted=None)
assert len([lib for lib in all_libraries if lib["id"] == self.library["id"]]) == 1
assert len([lib for lib in all_libraries if lib["id"] == deleted_library_id]) == 1
def test_show_library(self):
library_data = self.gi.libraries.show_library(self.library["id"])
assert self.library["id"] == library_data["id"]
assert self.library["name"] == library_data["name"]
def test_upload_file_from_url(self):
url = "https://zenodo.org/record/582600/files/wildtype.fna?download=1"
ret = self.gi.libraries.upload_file_from_url(self.library["id"], url)
assert len(ret) == 1
ldda_dict = ret[0]
assert ldda_dict["name"] == url
def test_upload_file_contents(self):
ret = self.gi.libraries.upload_file_contents(self.library["id"], FOO_DATA)
assert len(ret) == 1
ldda_dict = ret[0]
assert ldda_dict["name"] == "Pasted Entry"
def test_upload_file_from_local_path(self):
with tempfile.NamedTemporaryFile(mode="w", prefix="bioblend_test_") as f:
f.write(FOO_DATA)
f.flush()
filename = f.name
ret = self.gi.libraries.upload_file_from_local_path(self.library["id"], filename)
assert len(ret) == 1
ldda_dict = ret[0]
assert ldda_dict["name"] == os.path.basename(filename)
# def test_upload_file_from_server(self):
# pass
def test_upload_from_galaxy_filesystem(self):
bnames = [f"f{i}.txt" for i in range(2)]
with tempfile.TemporaryDirectory() as tempdir:
fnames = [os.path.join(tempdir, _) for _ in bnames]
for fn in fnames:
with open(fn, "w") as f:
f.write(FOO_DATA)
filesystem_paths = "\n".join(fnames)
ret = self.gi.libraries.upload_from_galaxy_filesystem(self.library["id"], filesystem_paths)
for fn, dataset_dict in zip(fnames, ret):
dataset = self.gi.libraries.wait_for_dataset(self.library["id"], dataset_dict["id"])
assert dataset["state"] == "ok"
assert dataset["name"] == os.path.basename(fn)
ret = self.gi.libraries.upload_from_galaxy_filesystem(
self.library["id"], filesystem_paths, link_data_only="link_to_files"
)
for fn, dataset_dict in zip(fnames, ret):
dataset = self.gi.libraries.wait_for_dataset(self.library["id"], dataset_dict["id"])
assert dataset["state"] == "ok"
assert dataset["name"] == os.path.basename(fn)
def test_copy_from_dataset(self):
history = self.gi.histories.create_history()
dataset_id = self._test_dataset(history["id"])
self.gi.libraries.copy_from_dataset(self.library["id"], dataset_id, message="Copied from dataset")
def test_update_dataset(self):
library_id = self.library["id"]
dataset1 = self.gi.libraries.upload_file_contents(library_id, FOO_DATA)
updated_dataset = self.gi.libraries.update_library_dataset(
dataset1[0]["id"], name="Modified name", misc_info="Modified the name succesfully"
)
assert updated_dataset["name"] == "Modified name"
assert updated_dataset["misc_info"] == "Modified the name succesfully"
def test_library_permissions(self):
current_user = self.gi.users.get_current_user()
user_id_list_new = [current_user["id"]]
self.gi.libraries.set_library_permissions(
self.library["id"],
access_in=user_id_list_new,
modify_in=user_id_list_new,
add_in=user_id_list_new,
manage_in=user_id_list_new,
)
ret = self.gi.libraries.get_library_permissions(self.library["id"])
assert {_[1] for _ in ret["access_library_role_list"]} == set(user_id_list_new)
assert {_[1] for _ in ret["modify_library_role_list"]} == set(user_id_list_new)
assert {_[1] for _ in ret["add_library_item_role_list"]} == set(user_id_list_new)
assert {_[1] for _ in ret["manage_library_role_list"]} == set(user_id_list_new)
def test_dataset_permissions(self):
current_user = self.gi.users.get_current_user()
user_id_list_new = [current_user["id"]]
library_id = self.library["id"]
dataset1 = self.gi.libraries.upload_file_contents(library_id, FOO_DATA)
ret = self.gi.libraries.set_dataset_permissions(
dataset1[0]["id"], access_in=user_id_list_new, modify_in=user_id_list_new, manage_in=user_id_list_new
)
assert {_[1] for _ in ret["access_dataset_roles"]} == set(user_id_list_new)
assert {_[1] for _ in ret["modify_item_roles"]} == set(user_id_list_new)
assert {_[1] for _ in ret["manage_dataset_roles"]} == set(user_id_list_new)
# test get_dataset_permissions
ret_get = self.gi.libraries.get_dataset_permissions(dataset1[0]["id"])
assert {_[1] for _ in ret_get["access_dataset_roles"]} == set(user_id_list_new)
assert {_[1] for _ in ret_get["modify_item_roles"]} == set(user_id_list_new)
assert {_[1] for _ in ret_get["manage_dataset_roles"]} == set(user_id_list_new)
@test_util.skip_unless_galaxy("release_19.09")
def test_upload_file_contents_with_tags(self):
datasets = self.gi.libraries.upload_file_contents(self.library["id"], FOO_DATA, tags=["name:foobar", "barfoo"])
dataset_show = self.gi.libraries.show_dataset(self.library["id"], datasets[0]["id"])
assert dataset_show["tags"] == "name:foobar, barfoo"
@test_util.skip_unless_galaxy("release_19.09")
def test_update_dataset_tags(self):
datasets = self.gi.libraries.upload_file_contents(self.library["id"], FOO_DATA)
dataset_show = self.gi.libraries.show_dataset(self.library["id"], datasets[0]["id"])
assert dataset_show["tags"] == ""
updated_dataset = self.gi.libraries.update_library_dataset(datasets[0]["id"], tags=["name:foobar", "barfoo"])
dataset_show = self.gi.libraries.show_dataset(self.library["id"], updated_dataset["id"])
assert dataset_show["tags"] == "name:foobar, barfoo"
bioblend-1.2.0/bioblend/_tests/TestGalaxyObjects.py 0000664 0000000 0000000 00000115740 14447617043 0022342 0 ustar 00root root 0000000 0000000 # pylint: disable=C0103,E1101
import json
import os
import shutil
import socket
import sys
import tarfile
import tempfile
import unittest
import uuid
from ssl import SSLError
from typing import (
Any,
Callable,
Collection,
Dict,
Iterable,
List,
Set,
Tuple,
Union,
)
from urllib.error import URLError
from urllib.request import urlopen
import pytest
from typing_extensions import Literal
import bioblend
from bioblend.galaxy import dataset_collections
from bioblend.galaxy.objects import (
galaxy_instance,
wrappers,
)
from . import test_util
bioblend.set_stream_logger("test", level="INFO")
socket.setdefaulttimeout(10.0)
SAMPLE_FN = test_util.get_abspath(os.path.join("data", "paste_columns.ga"))
SAMPLE_WF_COLL_FN = test_util.get_abspath(os.path.join("data", "paste_columns_collections.ga"))
SAMPLE_WF_PARAMETER_INPUT_FN = test_util.get_abspath(os.path.join("data", "workflow_with_parameter_input.ga"))
FOO_DATA = "foo\nbar\n"
FOO_DATA_2 = "foo2\nbar2\n"
SAMPLE_WF_DICT = {
"deleted": False,
"id": "9005c5112febe774",
"inputs": {
"571": {"label": "Input Dataset", "value": ""},
"572": {"label": "Input Dataset", "value": ""},
},
"model_class": "StoredWorkflow",
"name": "paste_columns",
"owner": "user_foo",
"published": False,
"steps": {
"571": {
"id": 571,
"input_steps": {},
"tool_id": None,
"tool_inputs": {"name": "Input Dataset"},
"tool_version": None,
"type": "data_input",
},
"572": {
"id": 572,
"input_steps": {},
"tool_id": None,
"tool_inputs": {"name": "Input Dataset"},
"tool_version": None,
"type": "data_input",
},
"573": {
"id": 573,
"input_steps": {
"input1": {"source_step": 571, "step_output": "output"},
"input2": {"source_step": 572, "step_output": "output"},
},
"tool_id": "Paste1",
"tool_inputs": {
"delimiter": '"T"',
"input1": "null",
"input2": "null",
},
"tool_version": "1.0.0",
"type": "tool",
},
},
"tags": [],
"url": "/api/workflows/9005c5112febe774",
}
SAMPLE_INV_DICT: Dict[str, Any] = {
"history_id": "2f94e8ae9edff68a",
"id": "df7a1f0c02a5b08e",
"inputs": {"0": {"id": "a7db2fac67043c7e", "src": "hda", "uuid": "7932ffe0-2340-4952-8857-dbaa50f1f46a"}},
"model_class": "WorkflowInvocation",
"state": "ready",
"steps": [
{
"action": None,
"id": "d413a19dec13d11e",
"job_id": None,
"model_class": "WorkflowInvocationStep",
"order_index": 0,
"state": None,
"update_time": "2015-10-31T22:00:26",
"workflow_step_id": "cbbbf59e8f08c98c",
"workflow_step_label": None,
"workflow_step_uuid": "b81250fd-3278-4e6a-b269-56a1f01ef485",
},
{
"action": None,
"id": "2f94e8ae9edff68a",
"job_id": "e89067bb68bee7a0",
"model_class": "WorkflowInvocationStep",
"order_index": 1,
"state": "new",
"update_time": "2015-10-31T22:00:26",
"workflow_step_id": "964b37715ec9bd22",
"workflow_step_label": None,
"workflow_step_uuid": "e62440b8-e911-408b-b124-e05435d3125e",
},
],
"update_time": "2015-10-31T22:00:26",
"uuid": "c8aa2b1c-801a-11e5-a9e5-8ca98228593c",
"workflow_id": "03501d7626bd192f",
}
def is_reachable(url: str) -> bool:
res = None
try:
res = urlopen(url, timeout=5)
except (SSLError, URLError, socket.timeout):
return False
if res is not None:
res.close()
return True
def upload_from_fs(
lib: wrappers.Library, bnames: Iterable[str], **kwargs: Any
) -> Tuple[List[wrappers.LibraryDataset], List[str]]:
tempdir = tempfile.mkdtemp(prefix="bioblend_test_")
try:
fnames = [os.path.join(tempdir, _) for _ in bnames]
for fn in fnames:
with open(fn, "w") as f:
f.write(FOO_DATA)
dss = lib.upload_from_galaxy_fs(fnames, **kwargs)
finally:
shutil.rmtree(tempdir)
return dss, fnames
class MockWrapper(wrappers.Wrapper):
BASE_ATTRS = ("a", "b")
a: int
b: List[int]
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
class TestWrapper(unittest.TestCase):
def setUp(self):
self.d: Dict[str, Any] = {"a": 1, "b": [2, 3], "c": {"x": 4}}
with pytest.raises(TypeError):
wrappers.Wrapper(self.d)
self.w = MockWrapper(self.d)
def test_initialize(self):
for k in MockWrapper.BASE_ATTRS:
assert getattr(self.w, k) == self.d[k]
self.w.a = 222
self.w.b[0] = 222
assert self.w.a == 222
assert self.w.b[0] == 222
assert self.d["a"] == 1
assert self.d["b"][0] == 2
with pytest.raises(AttributeError):
_ = self.w.foo # type: ignore[attr-defined]
with pytest.raises(AttributeError):
self.w.foo = 0 # type: ignore[assignment]
def test_taint(self):
assert not self.w.is_modified
self.w.a = 111 # pylint: disable=W0201
assert self.w.is_modified
def test_serialize(self):
w = MockWrapper.from_json(self.w.to_json())
assert w.wrapped == self.w.wrapped
def test_clone(self):
w = self.w.clone()
assert w.wrapped == self.w.wrapped
w.b[0] = 111
assert self.w.b[0] == 2
def test_kwargs(self):
parent = MockWrapper({"a": 10})
w = MockWrapper(self.d, parent=parent)
assert w.parent is parent
with pytest.raises(AttributeError):
w.parent = 0 # type: ignore[assignment,misc]
@test_util.skip_unless_galaxy()
class GalaxyObjectsTestBase(unittest.TestCase):
gi: galaxy_instance.GalaxyInstance
@classmethod
def setUpClass(cls) -> None:
galaxy_key = os.environ["BIOBLEND_GALAXY_API_KEY"]
galaxy_url = os.environ["BIOBLEND_GALAXY_URL"]
cls.gi = galaxy_instance.GalaxyInstance(galaxy_url, galaxy_key)
class TestWorkflow(GalaxyObjectsTestBase):
def setUp(self):
self.wf = wrappers.Workflow(SAMPLE_WF_DICT)
def test_initialize(self):
assert self.wf.id == "9005c5112febe774"
assert self.wf.name == "paste_columns"
assert not self.wf.deleted
assert self.wf.owner == "user_foo"
assert not self.wf.published
assert self.wf.tags == []
assert self.wf.input_labels_to_ids == {"Input Dataset": {"571", "572"}}
assert self.wf.tool_labels_to_ids == {"Paste1": {"573"}}
assert self.wf.data_input_ids == {"571", "572"}
assert self.wf.source_ids == {"571", "572"}
assert self.wf.sink_ids == {"573"}
def test_dag(self):
inv_dag: Dict[str, Set[str]] = {}
for h, tails in self.wf.dag.items():
for t in tails:
inv_dag.setdefault(str(t), set()).add(h)
assert self.wf.inv_dag == inv_dag
heads = set(self.wf.dag)
assert heads == set.union(*self.wf.inv_dag.values())
tails = set(self.wf.inv_dag)
assert tails == set.union(*self.wf.dag.values())
ids = self.wf.sorted_step_ids()
assert set(ids) == heads | tails
for h, tails in self.wf.dag.items():
for t in tails:
assert ids.index(h) < ids.index(t)
def test_steps(self):
steps = SAMPLE_WF_DICT["steps"]
assert isinstance(steps, dict)
for sid, s in self.wf.steps.items():
assert isinstance(s, wrappers.Step)
assert s.id == sid
assert sid in steps
assert s.parent is self.wf
assert self.wf.data_input_ids == {"571", "572"}
assert self.wf.tool_ids == {"573"}
def test_taint(self):
assert not self.wf.is_modified
self.wf.steps["571"].tool_id = "foo"
assert self.wf.is_modified
def test_input_map(self):
history = wrappers.History({}, gi=self.gi)
library = wrappers.Library({}, gi=self.gi)
hda = wrappers.HistoryDatasetAssociation({"id": "hda_id"}, container=history, gi=self.gi)
ldda = wrappers.LibraryDatasetDatasetAssociation({"id": "ldda_id"}, container=library, gi=self.gi)
input_map = self.wf._convert_input_map({"0": hda, "1": ldda, "2": {"id": "hda2_id", "src": "hda"}})
assert input_map == {
"0": {"id": "hda_id", "src": "hda"},
"1": {"id": "ldda_id", "src": "ldda"},
"2": {"id": "hda2_id", "src": "hda"},
}
@test_util.skip_unless_galaxy("release_19.09")
class TestInvocation(GalaxyObjectsTestBase):
dataset: wrappers.HistoryDatasetAssociation
history: wrappers.History
inv: wrappers.Invocation
workflow: wrappers.Workflow
workflow_pause: wrappers.Workflow
@classmethod
def setUpClass(cls) -> None:
super().setUpClass()
cls.inv = wrappers.Invocation(SAMPLE_INV_DICT, gi=cls.gi)
with open(SAMPLE_FN) as f:
cls.workflow = cls.gi.workflows.import_new(f.read())
path_pause = test_util.get_abspath(os.path.join("data", "test_workflow_pause.ga"))
with open(path_pause) as f:
cls.workflow_pause = cls.gi.workflows.import_new(f.read())
cls.history = cls.gi.histories.create(name="TestInvocation")
cls.dataset = cls.history.paste_content("1\t2\t3")
@classmethod
def tearDownClass(cls):
cls.history.delete(purge=True)
def test_initialize(self):
assert self.inv.workflow_id == "03501d7626bd192f"
assert self.inv.history_id == "2f94e8ae9edff68a"
assert self.inv.id == "df7a1f0c02a5b08e"
assert self.inv.state == "ready"
assert self.inv.update_time == "2015-10-31T22:00:26"
assert self.inv.uuid == "c8aa2b1c-801a-11e5-a9e5-8ca98228593c"
def test_initialize_steps(self):
for step, step_dict in zip(self.inv.steps, SAMPLE_INV_DICT["steps"]):
assert isinstance(step_dict, dict)
assert isinstance(step, wrappers.InvocationStep)
assert step.parent is self.inv
assert step.id == step_dict["id"]
assert step.job_id == step_dict["job_id"]
assert step.order_index == step_dict["order_index"]
assert step.state == step_dict["state"]
assert step.update_time == step_dict["update_time"]
assert step.workflow_step_id == step_dict["workflow_step_id"]
assert step.workflow_step_label == step_dict["workflow_step_label"]
assert step.workflow_step_uuid == step_dict["workflow_step_uuid"]
def test_initialize_inputs(self):
for i, input in enumerate(self.inv.inputs):
assert input == {**SAMPLE_INV_DICT["inputs"][str(i)], "label": str(i)}
def test_sorted_step_ids(self):
assert self.inv.sorted_step_ids() == ["d413a19dec13d11e", "2f94e8ae9edff68a"]
def test_step_states(self):
assert self.inv.step_states() == {None, "new"}
def test_number_of_steps(self):
assert self.inv.number_of_steps() == 2
def test_sorted_steps_by(self):
assert len(self.inv.sorted_steps_by()) == 2
steps = self.inv.sorted_steps_by(step_ids={"2f94e8ae9edff68a"})
assert len(steps) == 1
assert steps[0].id == "2f94e8ae9edff68a"
assert self.inv.sorted_steps_by(step_ids={"unmatched_id"}) == []
steps = self.inv.sorted_steps_by(states={"new"})
assert len(steps) == 1
assert steps[0].state == "new"
assert self.inv.sorted_steps_by(states={"unmatched_state"}) == []
steps = self.inv.sorted_steps_by(indices={0}, states={None, "new"})
assert len(steps) == 1
assert steps[0].order_index == 0
assert self.inv.sorted_steps_by(indices={2}) == []
def test_cancel(self):
inv = self._obj_invoke_workflow()
inv.cancel()
assert inv.state == "cancelled"
def test_wait(self):
inv = self._obj_invoke_workflow()
inv.wait()
assert inv.state == "scheduled"
def test_refresh(self):
inv = self._obj_invoke_workflow()
inv.state = "placeholder"
# use wait_for_invocation() directly, because inv.wait() will update inv automatically
self.gi.gi.invocations.wait_for_invocation(inv.id)
inv.refresh()
assert inv.state == "scheduled"
def test_run_step_actions(self):
inv = self.workflow_pause.invoke(
inputs={"0": self.dataset},
history=self.history,
)
for _ in range(20):
with pytest.raises(bioblend.TimeoutException):
inv.wait(maxwait=0.5, interval=0.5)
inv.refresh()
if len(inv.steps) >= 3:
break
assert inv.steps[2].action is None
inv.run_step_actions([inv.steps[2]], [True])
assert inv.steps[2].action is True
def test_summary(self):
inv = self._obj_invoke_workflow()
inv.wait()
summary = inv.summary()
assert summary["populated_state"] == "ok"
def test_step_jobs_summary(self):
inv = self._obj_invoke_workflow()
inv.wait()
step_jobs_summary = inv.step_jobs_summary()
assert len(step_jobs_summary) == 1
assert step_jobs_summary[0]["populated_state"] == "ok"
def test_report(self):
inv = self._obj_invoke_workflow()
report = inv.report()
assert report["workflows"] == {self.workflow.id: {"name": "paste_columns"}}
@test_util.skip_unless_galaxy("release_20.09")
def test_biocompute_object(self):
inv = self._obj_invoke_workflow()
inv.wait()
biocompute_object = inv.biocompute_object()
assert len(biocompute_object["description_domain"]["pipeline_steps"]) == 1
def _obj_invoke_workflow(self) -> wrappers.Invocation:
return self.workflow.invoke(
inputs={"Input 1": self.dataset, "Input 2": self.dataset},
history=self.history,
inputs_by="name",
)
@test_util.skip_unless_galaxy("release_19.09")
class TestObjInvocationClient(GalaxyObjectsTestBase):
history: wrappers.History
inv: wrappers.Invocation
workflow: wrappers.Workflow
@classmethod
def setUpClass(cls) -> None:
super().setUpClass()
with open(SAMPLE_FN) as f:
cls.workflow = cls.gi.workflows.import_new(f.read())
cls.history = cls.gi.histories.create(name="TestGalaxyObjInvocationClient")
dataset = cls.history.paste_content("1\t2\t3")
cls.inv = cls.workflow.invoke(
inputs={"Input 1": dataset, "Input 2": dataset},
history=cls.history,
inputs_by="name",
)
cls.inv.wait()
@classmethod
def tearDownClass(cls):
cls.history.delete(purge=True)
def test_get(self):
inv = self.gi.invocations.get(self.inv.id)
assert inv.id == self.inv.id
assert inv.workflow_id == self.workflow.id
assert inv.history_id == self.history.id
assert inv.state == "scheduled"
assert inv.update_time == self.inv.update_time
assert inv.uuid == self.inv.uuid
def test_get_previews(self):
previews = self.gi.invocations.get_previews()
assert {type(preview) for preview in previews} == {wrappers.InvocationPreview}
inv_preview = next(p for p in previews if p.id == self.inv.id)
assert inv_preview.id == self.inv.id
assert inv_preview.workflow_id == self.workflow.id
assert inv_preview.history_id == self.history.id
assert inv_preview.state == "scheduled"
assert inv_preview.update_time == self.inv.update_time
assert inv_preview.uuid == self.inv.uuid
def test_list(self):
invs = self.gi.invocations.list()
inv = next(i for i in invs if i.id == self.inv.id)
assert inv.id == self.inv.id
assert inv.workflow_id == self.workflow.id
assert inv.history_id == self.history.id
assert inv.state == "scheduled"
assert inv.update_time == self.inv.update_time
assert inv.uuid == self.inv.uuid
assert len(self.inv.steps) > 0
history = self.gi.histories.create(name="TestGalaxyObjInvocationClientList")
assert self.gi.invocations.list(history=history) == []
history.delete(purge=True)
class TestGalaxyInstance(GalaxyObjectsTestBase):
def test_library(self):
name = f"test_{uuid.uuid4().hex}"
description, synopsis = "D", "S"
lib = self.gi.libraries.create(name, description=description, synopsis=synopsis)
assert lib.name == name
assert lib.description == description
assert lib.synopsis == synopsis
assert len(lib.content_infos) == 1 # root folder
assert len(lib.folder_ids) == 1
assert len(lib.dataset_ids) == 0
assert lib.id in [_.id for _ in self.gi.libraries.list()]
lib.delete()
assert not lib.is_mapped
def test_workflow_from_str(self):
with open(SAMPLE_FN) as f:
wf = self.gi.workflows.import_new(f.read())
self._check_and_del_workflow(wf)
def test_workflow_collections_from_str(self):
with open(SAMPLE_WF_COLL_FN) as f:
wf = self.gi.workflows.import_new(f.read())
self._check_and_del_workflow(wf)
def test_workflow_parameter_input(self):
with open(SAMPLE_WF_PARAMETER_INPUT_FN) as f:
self.gi.workflows.import_new(f.read())
def test_workflow_from_dict(self):
with open(SAMPLE_FN) as f:
wf = self.gi.workflows.import_new(json.load(f))
self._check_and_del_workflow(wf)
def test_workflow_publish_from_dict(self):
with open(SAMPLE_FN) as f:
wf = self.gi.workflows.import_new(json.load(f), publish=True)
self._check_and_del_workflow(wf, check_is_public=True)
def test_workflow_missing_tools(self):
with open(SAMPLE_FN) as f:
wf_dump = json.load(f)
wf_info = self.gi.gi.workflows.import_workflow_dict(wf_dump)
wf_dict = self.gi.gi.workflows.show_workflow(wf_info["id"])
for id_, step in wf_dict["steps"].items():
if step["type"] == "tool":
for k in "tool_inputs", "tool_version":
wf_dict["steps"][id_][k] = None
wf = wrappers.Workflow(wf_dict, gi=self.gi)
assert not wf.is_runnable
with pytest.raises(RuntimeError):
wf.invoke()
wf.delete()
def test_workflow_export(self):
with open(SAMPLE_FN) as f:
wf1 = self.gi.workflows.import_new(f.read())
wf2 = self.gi.workflows.import_new(wf1.export())
assert wf1.id != wf2.id
for wf in wf1, wf2:
self._check_and_del_workflow(wf)
def _check_and_del_workflow(self, wf: wrappers.Workflow, check_is_public: bool = False) -> None:
# Galaxy appends additional text to imported workflow names
assert wf.name.startswith("paste_columns")
assert len(wf.steps) == 3
for step_id, step in wf.steps.items():
assert isinstance(step, wrappers.Step)
assert step_id == step.id
assert isinstance(step.tool_inputs, dict)
if step.type == "tool":
assert step.tool_id is not None
assert step.tool_version is not None
assert isinstance(step.input_steps, dict)
elif step.type in ("data_collection_input", "data_input"):
assert step.tool_id is None
assert step.tool_version is None
assert step.input_steps == {}
wf_ids = {_.id for _ in self.gi.workflows.list()}
assert wf.id in wf_ids
if check_is_public:
assert wf.published
wf.delete()
# not very accurate:
# * we can't publish a wf from the API
# * we can't directly get another user's wf
def test_workflow_from_shared(self):
all_prevs = {_.id: _ for _ in self.gi.workflows.get_previews(published=True)}
pub_only_ids = set(all_prevs).difference(_.id for _ in self.gi.workflows.get_previews())
if pub_only_ids:
wf_id = pub_only_ids.pop()
imported = self.gi.workflows.import_shared(wf_id)
assert isinstance(imported, wrappers.Workflow)
imported.delete()
else:
self.skipTest("no published workflows, manually publish a workflow to run this test")
def test_get_libraries(self):
self._test_multi_get("libraries")
def test_get_histories(self):
self._test_multi_get("histories")
def test_get_workflows(self):
self._test_multi_get("workflows")
def _normalized_functions(
self, obj_type: Literal["histories", "libraries", "workflows"]
) -> Tuple[Callable, Dict[str, Any]]:
if obj_type == "libraries":
create: Callable = self.gi.libraries.create
del_kwargs = {}
elif obj_type == "histories":
create = self.gi.histories.create
del_kwargs = {"purge": True}
elif obj_type == "workflows":
def create(name):
with open(SAMPLE_FN) as f:
d = json.load(f)
d["name"] = name
return self.gi.workflows.import_new(d)
del_kwargs = {}
return create, del_kwargs
def _test_multi_get(self, obj_type: Literal["histories", "libraries", "workflows"]) -> None:
obj_gi_client = getattr(self.gi, obj_type)
create, del_kwargs = self._normalized_functions(obj_type)
def ids(seq: Iterable[wrappers.Wrapper]) -> Set[str]:
return {_.id for _ in seq}
names = [f"test_{uuid.uuid4().hex}" for _ in range(2)]
objs = []
try:
objs = [create(_) for _ in names]
assert ids(objs) <= ids(obj_gi_client.list())
if obj_type != "workflows":
filtered = obj_gi_client.list(name=names[0])
assert len(filtered) == 1
assert filtered[0].id == objs[0].id
del_id = objs[-1].id
objs.pop().delete(**del_kwargs)
assert del_id in ids(obj_gi_client.get_previews(deleted=True))
else:
# Galaxy appends info strings to imported workflow names
prev = obj_gi_client.get_previews()[0]
filtered = obj_gi_client.list(name=prev.name)
assert len(filtered) == 1
assert filtered[0].id == prev.id
finally:
for o in objs:
o.delete(**del_kwargs)
def test_delete_libraries_by_name(self):
self._test_delete_by_name("libraries")
self._test_delete_by_ambiguous_name("libraries")
def test_delete_histories_by_name(self):
self._test_delete_by_name("histories")
self._test_delete_by_ambiguous_name("histories")
def test_delete_workflows_by_name(self):
self._test_delete_by_name("workflows")
self._test_delete_by_ambiguous_name("workflows")
def _test_delete_by_name(self, obj_type: Literal["histories", "libraries", "workflows"]) -> None:
obj_gi_client = getattr(self.gi, obj_type)
create, del_kwargs = self._normalized_functions(obj_type)
name = f"test_{uuid.uuid4().hex}"
create(name)
prevs = [_ for _ in obj_gi_client.get_previews(name=name) if not _.deleted]
assert len(prevs) == 1
del_kwargs["name"] = name
obj_gi_client.delete(**del_kwargs)
prevs = [_ for _ in obj_gi_client.get_previews(name=name) if not _.deleted]
assert len(prevs) == 0
def _test_delete_by_ambiguous_name(self, obj_type: Literal["histories", "libraries", "workflows"]) -> None:
obj_gi_client = getattr(self.gi, obj_type)
create, del_kwargs = self._normalized_functions(obj_type)
name = f"test_{uuid.uuid4().hex}"
objs = [create(name) for _ in range(2)]
prevs = [_ for _ in obj_gi_client.get_previews(name=name) if not _.deleted]
assert len(prevs) == len(objs)
del_kwargs["name"] = name
with pytest.raises(ValueError):
obj_gi_client.delete(**del_kwargs)
# Cleanup
del del_kwargs["name"]
for prev in prevs:
del_kwargs["id_"] = prev.id
obj_gi_client.delete(**del_kwargs)
class TestLibrary(GalaxyObjectsTestBase):
# just something that can be expected to be always up
DS_URL = "https://tools.ietf.org/rfc/rfc1866.txt"
def setUp(self):
super().setUp()
self.lib = self.gi.libraries.create(f"test_{uuid.uuid4().hex}")
def tearDown(self):
self.lib.delete()
def test_root_folder(self):
r = self.lib.root_folder
assert r.parent is None
def test_folder(self):
name = f"test_{uuid.uuid4().hex}"
desc = "D"
folder = self.lib.create_folder(name, description=desc)
assert folder.name == name
assert folder.description == desc
assert folder.container is self.lib
assert folder.parent is not None
assert folder.parent.id == self.lib.root_folder.id
assert len(self.lib.content_infos) == 2
assert len(self.lib.folder_ids) == 2
assert folder.id in self.lib.folder_ids
retrieved = self.lib.get_folder(folder.id)
assert folder.id == retrieved.id
def _check_datasets(self, dss: Collection[wrappers.LibraryDataset]) -> None:
assert len(dss) == len(self.lib.dataset_ids)
assert {_.id for _ in dss} == set(self.lib.dataset_ids)
for ds in dss:
assert ds.container is self.lib
def test_dataset(self):
folder = self.lib.create_folder(f"test_{uuid.uuid4().hex}")
ds = self.lib.upload_data(FOO_DATA, folder=folder)
assert len(self.lib.content_infos) == 3
assert len(self.lib.folder_ids) == 2
self._check_datasets([ds])
def test_dataset_from_url(self):
if is_reachable(self.DS_URL):
ds = self.lib.upload_from_url(self.DS_URL)
self._check_datasets([ds])
else:
self.skipTest(f"{self.DS_URL} not reachable")
def test_dataset_from_local(self):
with tempfile.NamedTemporaryFile(mode="w", prefix="bioblend_test_") as f:
f.write(FOO_DATA)
f.flush()
ds = self.lib.upload_from_local(f.name)
self._check_datasets([ds])
def test_datasets_from_fs(self):
bnames = [f"f{i}.txt" for i in range(2)]
dss, fnames = upload_from_fs(self.lib, bnames)
self._check_datasets(dss)
dss, fnames = upload_from_fs(self.lib, bnames, link_data_only="link_to_files")
for ds, fn in zip(dss, fnames):
assert ds.file_name == fn
def test_copy_from_dataset(self):
hist = self.gi.histories.create(f"test_{uuid.uuid4().hex}")
try:
hda = hist.paste_content(FOO_DATA)
ds = self.lib.copy_from_dataset(hda)
finally:
hist.delete(purge=True)
self._check_datasets([ds])
def test_get_dataset(self):
ds = self.lib.upload_data(FOO_DATA)
retrieved = self.lib.get_dataset(ds.id)
assert ds.id == retrieved.id
def test_get_datasets(self):
bnames = [f"f{i}.txt" for i in range(2)]
dss, _ = upload_from_fs(self.lib, bnames)
retrieved = self.lib.get_datasets()
assert len(dss) == len(retrieved)
assert {_.id for _ in dss} == {_.id for _ in retrieved}
name = f"/{bnames[0]}"
selected = self.lib.get_datasets(name=name)
assert len(selected) == 1
assert selected[0].name == bnames[0]
class TestLDContents(GalaxyObjectsTestBase):
def setUp(self):
super().setUp()
self.lib = self.gi.libraries.create(f"test_{uuid.uuid4().hex}")
self.ds = self.lib.upload_data(FOO_DATA)
self.ds.wait()
def tearDown(self):
self.lib.delete()
def test_dataset_get_stream(self):
for idx, c in enumerate(self.ds.get_stream(chunk_size=1)):
assert FOO_DATA[idx].encode() == c
def test_dataset_peek(self):
fetched_data = self.ds.peek(chunk_size=4)
assert FOO_DATA[0:4].encode() == fetched_data
def test_dataset_download(self):
with tempfile.TemporaryFile() as f:
self.ds.download(f)
f.seek(0)
assert FOO_DATA.encode() == f.read()
def test_dataset_get_contents(self):
assert FOO_DATA.encode() == self.ds.get_contents()
def test_dataset_delete(self):
self.ds.delete()
# Cannot test this yet because the 'deleted' attribute is not exported
# by the API at the moment
# assert self.ds.deleted
def test_dataset_update(self):
new_name = f"test_{uuid.uuid4().hex}"
new_misc_info = f"Annotation for {new_name}"
new_genome_build = "hg19"
updated_ldda = self.ds.update(name=new_name, misc_info=new_misc_info, genome_build=new_genome_build)
assert self.ds.id == updated_ldda.id
assert self.ds.name == new_name
assert self.ds.misc_info == new_misc_info
assert self.ds.genome_build == new_genome_build
class TestHistory(GalaxyObjectsTestBase):
def setUp(self):
super().setUp()
self.hist = self.gi.histories.create(f"test_{uuid.uuid4().hex}")
def tearDown(self):
self.hist.delete(purge=True)
def test_create_delete(self):
name = f"test_{uuid.uuid4().hex}"
hist = self.gi.histories.create(name)
assert hist.name == name
hist_id = hist.id
assert hist_id in [_.id for _ in self.gi.histories.list()]
hist.delete(purge=True)
assert not hist.is_mapped
h = self.gi.histories.get(hist_id)
assert h.deleted
def _check_dataset(self, hda: wrappers.HistoryDatasetAssociation) -> None:
assert isinstance(hda, wrappers.HistoryDatasetAssociation)
assert hda.container is self.hist
assert len(self.hist.dataset_ids) == 1
assert self.hist.dataset_ids[0] == hda.id
def test_import_dataset(self):
lib = self.gi.libraries.create(f"test_{uuid.uuid4().hex}")
lds = lib.upload_data(FOO_DATA)
assert len(self.hist.dataset_ids) == 0
hda = self.hist.import_dataset(lds)
lib.delete()
self._check_dataset(hda)
def test_upload_file(self):
with tempfile.NamedTemporaryFile(mode="w", prefix="bioblend_test_") as f:
f.write(FOO_DATA)
f.flush()
hda = self.hist.upload_file(f.name)
self._check_dataset(hda)
def test_paste_content(self):
hda = self.hist.paste_content(FOO_DATA)
self._check_dataset(hda)
def test_get_dataset(self):
hda = self.hist.paste_content(FOO_DATA)
retrieved = self.hist.get_dataset(hda.id)
assert hda.id == retrieved.id
def test_get_datasets(self):
bnames = [f"f{i}.txt" for i in range(2)]
lib = self.gi.libraries.create(f"test_{uuid.uuid4().hex}")
lds = upload_from_fs(lib, bnames)[0]
hdas = [self.hist.import_dataset(_) for _ in lds]
lib.delete()
retrieved = self.hist.get_datasets()
assert len(hdas) == len(retrieved)
assert {_.id for _ in hdas} == {_.id for _ in retrieved}
selected = self.hist.get_datasets(name=bnames[0])
assert len(selected) == 1
assert selected[0].name == bnames[0]
def test_export_and_download(self):
jeha_id = self.hist.export(wait=True, maxwait=60)
assert jeha_id
tempdir = tempfile.mkdtemp(prefix="bioblend_test_")
temp_fn = os.path.join(tempdir, "export.tar.gz")
try:
with open(temp_fn, "wb") as fo:
self.hist.download(jeha_id, fo)
assert tarfile.is_tarfile(temp_fn)
finally:
shutil.rmtree(tempdir)
def test_update(self):
new_name = f"test_{uuid.uuid4().hex}"
new_annotation = f"Annotation for {new_name}"
new_tags = ["tag1", "tag2"]
updated_hist = self.hist.update(name=new_name, annotation=new_annotation, tags=new_tags)
assert self.hist.id == updated_hist.id
assert self.hist.name == new_name
assert self.hist.annotation == new_annotation
assert self.hist.tags == new_tags
updated_hist = self.hist.update(published=True)
assert self.hist.id == updated_hist.id
assert self.hist.published
def test_create_dataset_collection(self):
self._create_collection_description()
hdca = self.hist.create_dataset_collection(self.collection_description)
assert isinstance(hdca, wrappers.HistoryDatasetCollectionAssociation)
assert hdca.collection_type == "list"
assert hdca.container is self.hist
assert len(hdca.elements) == 2
assert self.dataset1.id == hdca.elements[0]["object"]["id"]
assert self.dataset2.id == hdca.elements[1]["object"]["id"]
def test_delete_dataset_collection(self):
self._create_collection_description()
hdca = self.hist.create_dataset_collection(self.collection_description)
hdca.delete()
assert hdca.deleted
def _create_collection_description(self) -> None:
self.dataset1 = self.hist.paste_content(FOO_DATA)
self.dataset2 = self.hist.paste_content(FOO_DATA_2)
self.collection_description = dataset_collections.CollectionDescription(
name="MyDatasetList",
elements=[
dataset_collections.HistoryDatasetElement(name="sample1", id=self.dataset1.id),
dataset_collections.HistoryDatasetElement(name="sample2", id=self.dataset2.id),
],
)
class TestHDAContents(GalaxyObjectsTestBase):
def setUp(self):
super().setUp()
self.hist = self.gi.histories.create(f"test_{uuid.uuid4().hex}")
self.ds = self.hist.paste_content(FOO_DATA)
def tearDown(self):
self.hist.delete(purge=True)
def test_dataset_get_stream(self):
for idx, c in enumerate(self.ds.get_stream(chunk_size=1)):
assert FOO_DATA[idx].encode() == c
def test_dataset_peek(self):
fetched_data = self.ds.peek(chunk_size=4)
assert FOO_DATA[0:4].encode() == fetched_data
def test_dataset_download(self):
with tempfile.TemporaryFile() as f:
self.ds.download(f)
f.seek(0)
assert FOO_DATA.encode() == f.read()
def test_dataset_get_contents(self):
assert FOO_DATA.encode() == self.ds.get_contents()
def test_dataset_update(self):
new_name = f"test_{uuid.uuid4().hex}"
new_annotation = f"Annotation for {new_name}"
new_genome_build = "hg19"
updated_hda = self.ds.update(name=new_name, annotation=new_annotation, genome_build=new_genome_build)
assert self.ds.id == updated_hda.id
assert self.ds.name == new_name
assert self.ds.annotation == new_annotation
assert self.ds.genome_build == new_genome_build
def test_dataset_delete(self):
self.ds.delete()
assert self.ds.deleted
assert not self.ds.purged
def test_dataset_purge(self):
self.ds.delete(purge=True)
assert self.ds.deleted
assert self.ds.purged
@test_util.skip_unless_galaxy("release_19.09")
class TestRunWorkflow(GalaxyObjectsTestBase):
def setUp(self):
super().setUp()
self.lib = self.gi.libraries.create(f"test_{uuid.uuid4().hex}")
with open(SAMPLE_FN) as f:
self.wf = self.gi.workflows.import_new(f.read())
self.contents = ["one\ntwo\n", "1\n2\n"]
self.inputs = [self.lib.upload_data(_) for _ in self.contents]
def tearDown(self):
self.wf.delete()
self.lib.delete()
def _test(self, existing_hist: bool = False, pass_params: bool = False) -> None:
hist_name = f"test_{uuid.uuid4().hex}"
if existing_hist:
hist: Union[str, wrappers.History] = self.gi.histories.create(hist_name)
else:
hist = hist_name
if pass_params:
params = {"Paste1": {"delimiter": "U"}}
sep = "_" # 'U' maps to '_' in the paste tool
else:
params = None
sep = "\t" # default
input_map = {"Input 1": self.inputs[0], "Input 2": self.inputs[1]}
sys.stderr.write(os.linesep)
inv = self.wf.invoke(inputs=input_map, params=params, history=hist, inputs_by="name")
out_hist = self.gi.histories.get(inv.history_id)
inv.wait()
last_step = inv.sorted_steps_by()[-1]
out_ds = last_step.get_outputs()["out_file1"]
assert out_ds.container.id == out_hist.id
res = out_ds.get_contents()
exp_rows = zip(*(_.splitlines() for _ in self.contents))
exp_res = ("\n".join(sep.join(t) for t in exp_rows) + "\n").encode()
assert res == exp_res
if isinstance(hist, wrappers.History): # i.e. existing_hist == True
assert out_hist.id == hist.id
out_hist.delete(purge=True)
def test_existing_history(self) -> None:
self._test(existing_hist=True)
def test_new_history(self) -> None:
self._test(existing_hist=False)
def test_params(self) -> None:
self._test(pass_params=True)
@test_util.skip_unless_galaxy("release_19.09")
class TestRunDatasetCollectionWorkflow(GalaxyObjectsTestBase):
def setUp(self):
super().setUp()
with open(SAMPLE_WF_COLL_FN) as f:
self.wf = self.gi.workflows.import_new(f.read())
self.hist = self.gi.histories.create(f"test_{uuid.uuid4().hex}")
def tearDown(self):
self.wf.delete()
self.hist.delete(purge=True)
def test_run_workflow_with_dataset_collection(self):
dataset1 = self.hist.paste_content(FOO_DATA)
dataset2 = self.hist.paste_content(FOO_DATA_2)
collection_description = dataset_collections.CollectionDescription(
name="MyDatasetList",
elements=[
dataset_collections.HistoryDatasetElement(name="sample1", id=dataset1.id),
dataset_collections.HistoryDatasetElement(name="sample2", id=dataset2.id),
],
)
dataset_collection = self.hist.create_dataset_collection(collection_description)
assert len(self.hist.content_infos) == 3
input_map = {"0": dataset_collection, "1": dataset1}
inv = self.wf.invoke(input_map, history=self.hist)
inv.wait()
self.hist.refresh()
assert len(self.hist.content_infos) == 6
last_step = inv.sorted_steps_by()[-1]
out_hdca = last_step.get_output_collections()["out_file1"]
assert out_hdca.collection_type == "list"
assert len(out_hdca.elements) == 2
assert out_hdca.container.id == self.hist.id
class TestJob(GalaxyObjectsTestBase):
def test_get(self):
job_prevs = self.gi.jobs.get_previews()
if len(job_prevs) > 0:
job_prev = job_prevs[0]
assert isinstance(job_prev, wrappers.JobPreview)
job = self.gi.jobs.get(job_prev.id)
assert isinstance(job, wrappers.Job)
assert job.id == job_prev.id
for job in self.gi.jobs.list():
assert isinstance(job, wrappers.Job)
bioblend-1.2.0/bioblend/_tests/TestGalaxyQuotas.py 0000664 0000000 0000000 00000004070 14447617043 0022216 0 ustar 00root root 0000000 0000000 import uuid
from . import GalaxyTestBase
class TestGalaxyQuotas(GalaxyTestBase.GalaxyTestBase):
def setUp(self):
super().setUp()
# Quota names must be unique, and they're impossible to delete
# without accessing the database.
self.quota_name = f"BioBlend-Test-Quota-{uuid.uuid4().hex}"
self.quota = self.gi.quotas.create_quota(self.quota_name, "testing", "100 GB", "=", default="registered")
def tearDown(self):
self.gi.quotas.update_quota(self.quota["id"], default="registered")
self.gi.quotas.update_quota(self.quota["id"], default="no")
self.gi.quotas.delete_quota(self.quota["id"])
def test_create_quota(self):
quota = self.gi.quotas.show_quota(self.quota["id"])
assert quota["name"] == self.quota_name
assert quota["bytes"] == 107374182400
assert quota["operation"] == "="
assert quota["description"] == "testing"
def test_get_quotas(self):
quotas = self.gi.quotas.get_quotas()
assert self.quota["id"] in [quota["id"] for quota in quotas]
def test_update_quota(self):
response = self.gi.quotas.update_quota(
self.quota["id"],
name=self.quota_name + "-new",
description="asdf",
default="registered",
operation="-",
amount=".01 TB",
)
assert f"""Quota '{self.quota_name}' has been renamed to '{self.quota_name}-new'""" in response
quota = self.gi.quotas.show_quota(self.quota["id"])
assert quota["name"] == self.quota_name + "-new"
assert quota["bytes"] == 10995116277
assert quota["operation"] == "-"
assert quota["description"] == "asdf"
def test_delete_undelete_quota(self):
self.gi.quotas.update_quota(self.quota["id"], default="no")
response = self.gi.quotas.delete_quota(self.quota["id"])
assert response == "Deleted 1 quotas: " + self.quota_name
response = self.gi.quotas.undelete_quota(self.quota["id"])
assert response == "Undeleted 1 quotas: " + self.quota_name
bioblend-1.2.0/bioblend/_tests/TestGalaxyRoles.py 0000664 0000000 0000000 00000001434 14447617043 0022027 0 ustar 00root root 0000000 0000000 import uuid
from . import GalaxyTestBase
class TestGalaxyRoles(GalaxyTestBase.GalaxyTestBase):
def setUp(self):
super().setUp()
self.name = f"test_{uuid.uuid4().hex}"
self.description = "automated test role"
self.role = self.gi.roles.create_role(self.name, self.description)
def tearDown(self):
# As of 2017/07/26, deleting a role is not possible through the API
pass
def test_get_roles(self):
roles = self.gi.roles.get_roles()
for role in roles:
assert role["id"] is not None
assert role["name"] is not None
def test_create_role(self):
assert self.role["name"] == self.name
assert self.role["description"] == self.description
assert self.role["id"] is not None
bioblend-1.2.0/bioblend/_tests/TestGalaxyToolContainerResolution.py 0000664 0000000 0000000 00000011076 14447617043 0025612 0 ustar 00root root 0000000 0000000 """
Test functions in bioblend.galaxy.container_resolution
"""
from . import (
GalaxyTestBase,
test_util,
)
class TestGalaxyContainerResolution(GalaxyTestBase.GalaxyTestBase):
@test_util.skip_unless_galaxy("release_22.05")
def test_get_container_resolvers(self):
container_resolvers = self.gi.container_resolution.get_container_resolvers()
assert isinstance(container_resolvers, list)
assert len(container_resolvers) > 0
assert isinstance(container_resolvers[0], dict)
assert container_resolvers[0]["model_class"] == "ExplicitContainerResolver"
assert container_resolvers[0]["resolver_type"] == "explicit"
assert container_resolvers[0]["can_uninstall_dependencies"] is False
assert container_resolvers[0]["builds_on_resolution"] is False
@test_util.skip_unless_galaxy("release_22.05")
def test_show_container_resolver(self):
container_resolver = self.gi.container_resolution.show_container_resolver(0)
print(container_resolver)
assert isinstance(container_resolver, dict)
assert container_resolver["model_class"] == "ExplicitContainerResolver"
assert container_resolver["resolver_type"] == "explicit"
assert container_resolver["can_uninstall_dependencies"] is False
assert container_resolver["builds_on_resolution"] is False
@test_util.skip_unless_galaxy("release_22.05")
def test_resolve(self):
tool = self.gi.container_resolution.resolve(tool_id="CONVERTER_parquet_to_csv")
print(tool)
assert isinstance(tool, dict)
tool_requirements_only = self.gi.container_resolution.resolve(
tool_id="CONVERTER_parquet_to_csv", requirements_only=True
)
assert isinstance(tool_requirements_only, dict)
@test_util.skip_unless_galaxy("release_22.05")
def test_resolve_toolbox(self):
toolbox = self.gi.container_resolution.resolve_toolbox()
assert isinstance(toolbox, list)
assert len(toolbox) > 0
assert isinstance(toolbox[0], dict)
toolbox_by_tool_ids = self.gi.container_resolution.resolve_toolbox(tool_ids=[toolbox[0]["tool_id"]])
assert isinstance(toolbox_by_tool_ids, list)
assert len(toolbox_by_tool_ids) == 1
assert isinstance(toolbox_by_tool_ids[0], dict)
toolbox_by_resolver_type = self.gi.container_resolution.resolve_toolbox(resolver_type="mulled")
assert isinstance(toolbox_by_resolver_type, list)
assert len(toolbox_by_resolver_type) > 0
assert isinstance(toolbox_by_resolver_type[0], dict)
assert len(toolbox) == len(toolbox_by_resolver_type)
for tool in toolbox_by_resolver_type:
print(tool)
assert (
tool["status"]["dependency_type"] is None
or tool["status"]["container_resolver"]["resolver_type"] == "mulled"
)
toolbox_by_container_type = self.gi.container_resolution.resolve_toolbox(container_type="docker")
assert isinstance(toolbox_by_container_type, list)
assert len(toolbox_by_container_type) > 0
assert isinstance(toolbox_by_container_type[0], dict)
assert len(toolbox) == len(toolbox_by_container_type)
for tool in toolbox_by_container_type:
assert tool["status"]["dependency_type"] is None or tool["status"]["dependency_type"] == "docker"
assert (
tool["status"]["dependency_type"] is None or tool["status"]["container_description"]["type"] == "docker"
)
toolbox_requirements_only = self.gi.container_resolution.resolve_toolbox(requirements_only=True)
assert isinstance(toolbox_requirements_only, list)
assert len(toolbox_requirements_only) > 0
assert isinstance(toolbox_requirements_only[0], dict)
assert len(toolbox) == len(toolbox_requirements_only)
# TODO unless containers are available this may fallback to conda by default?
# depending on Galaxy's config
# toolbox_by_index = self.gi.container_resolution.resolve_toolbox(tool_ids=[toolbox[0]['tool_id']], index=0, install=True)
# assert isinstance(toolbox_by_index, list)
# assert len(toolbox_by_index) > 0
# assert isinstance(toolbox_by_index[0], dict)
# TODO unless containers are available this may fallback to conda by default?
# depending on Galaxy's config
# def test_resolve_toolbox_with_install(self):
# toolbox = self.gi.container_resolution.resolve_toolbox_with_install(tool_ids=[])
# assert isinstance(toolbox, list)
# assert len(toolbox) == 0
bioblend-1.2.0/bioblend/_tests/TestGalaxyToolData.py 0000664 0000000 0000000 00000001032 14447617043 0022444 0 ustar 00root root 0000000 0000000 from . import GalaxyTestBase
class TestGalaxyToolData(GalaxyTestBase.GalaxyTestBase):
def test_get_data_tables(self):
tables = self.gi.tool_data.get_data_tables()
for table in tables:
assert table["name"] is not None
def test_show_data_table(self):
tables = self.gi.tool_data.get_data_tables()
table = self.gi.tool_data.show_data_table(tables[0]["name"])
assert table["columns"] is not None
assert table["fields"] is not None
assert table["name"] is not None
bioblend-1.2.0/bioblend/_tests/TestGalaxyToolDependencies.py 0000664 0000000 0000000 00000003124 14447617043 0024165 0 ustar 00root root 0000000 0000000 """
Test functions in bioblend.galaxy.tool_dependencies
"""
from . import (
GalaxyTestBase,
test_util,
)
class TestGalaxyToolDependencies(GalaxyTestBase.GalaxyTestBase):
@test_util.skip_unless_galaxy("release_20.01")
def test_summarize_toolbox(self):
toolbox_summary = self.gi.tool_dependencies.summarize_toolbox()
assert isinstance(toolbox_summary, list)
assert len(toolbox_summary) > 0
toolbox_summary_by_tool = self.gi.tool_dependencies.summarize_toolbox(index_by="tools")
assert isinstance(toolbox_summary_by_tool, list)
assert len(toolbox_summary_by_tool) > 0
assert isinstance(toolbox_summary_by_tool[0], dict)
assert "tool_ids" in toolbox_summary_by_tool[0]
assert isinstance(toolbox_summary_by_tool[0]["tool_ids"], list)
tool_id = toolbox_summary_by_tool[0]["tool_ids"][0]
toolbox_summary_select_tool_ids = self.gi.tool_dependencies.summarize_toolbox(
index_by="tools", tool_ids=[tool_id]
)
assert isinstance(toolbox_summary_select_tool_ids, list)
assert len(toolbox_summary_select_tool_ids) == 1
assert toolbox_summary_select_tool_ids[0]["tool_ids"][0] == tool_id
@test_util.skip_unless_galaxy("release_20.01")
def test_unused_dependency_paths(self):
unused_paths = self.gi.tool_dependencies.unused_dependency_paths()
assert isinstance(unused_paths, list)
@test_util.skip_unless_galaxy("release_20.01")
def test_delete_unused_dependency_paths(self):
self.gi.tool_dependencies.delete_unused_dependency_paths(paths=[])
bioblend-1.2.0/bioblend/_tests/TestGalaxyToolInputs.py 0000664 0000000 0000000 00000002476 14447617043 0023072 0 ustar 00root root 0000000 0000000 from bioblend.galaxy.tools.inputs import (
conditional,
dataset,
inputs,
repeat,
)
def test_conditional():
# Build up example inputs for random_lines1
as_dict = (
inputs()
.set("num_lines", 5)
.set("input", dataset("encoded1"))
.set("seed_source", conditional().set("seed_source_selector", "set_seed").set("seed", "asdf"))
.to_dict()
)
assert as_dict["num_lines"] == 5
assert as_dict["input"]["src"] == "hda"
assert as_dict["input"]["id"] == "encoded1"
assert as_dict["seed_source|seed_source_selector"] == "set_seed"
assert as_dict["seed_source|seed"] == "asdf"
def test_repeat():
# Build up inputs for cat1
as_dict = (
inputs()
.set("input1", dataset("encoded1"))
.set(
"queries",
repeat()
.instance(inputs().set_dataset_param("input2", "encoded2"))
.instance(inputs().set_dataset_param("input2", "encoded3")),
)
.to_dict()
)
assert as_dict["input1"]["src"] == "hda"
assert as_dict["input1"]["id"] == "encoded1"
assert as_dict["queries_0|input2"]["src"] == "hda"
assert as_dict["queries_0|input2"]["id"] == "encoded2"
assert as_dict["queries_1|input2"]["src"] == "hda"
assert as_dict["queries_1|input2"]["id"] == "encoded3"
bioblend-1.2.0/bioblend/_tests/TestGalaxyTools.py 0000664 0000000 0000000 00000016405 14447617043 0022047 0 ustar 00root root 0000000 0000000 """
"""
import os
from typing import (
Any,
Dict,
)
from bioblend.galaxy.tools.inputs import (
conditional,
dataset,
inputs,
repeat,
)
from . import (
GalaxyTestBase,
test_util,
)
class TestGalaxyTools(GalaxyTestBase.GalaxyTestBase):
def test_get_tools(self):
# Test requires target Galaxy is configured with at least one tool.
tools = self.gi.tools.get_tools()
assert len(tools) > 0
assert all(map(self._assert_is_tool_rep, tools))
def test_get_tool_panel(self):
# Test requires target Galaxy is configured with at least one tool
# section.
tool_panel = self.gi.tools.get_tool_panel()
sections = [s for s in tool_panel if "elems" in s]
assert len(sections) > 0
assert all(map(self._assert_is_tool_rep, sections[0]["elems"]))
def _assert_is_tool_rep(self, data):
assert data["model_class"].endswith("Tool")
# Special tools like SetMetadataTool may have different model_class
# than Tool - but they all seem to end in tool.
for key in ["name", "id", "version"]:
assert key in data
return True
def test_paste_content(self):
history = self.gi.histories.create_history(name="test_paste_data history")
paste_text = "line 1\nline 2\rline 3\r\nline 4"
tool_output = self.gi.tools.paste_content(paste_text, history["id"])
assert len(tool_output["outputs"]) == 1
# All lines in the resulting dataset should end with "\n"
expected_contents = ("\n".join(paste_text.splitlines()) + "\n").encode()
self._wait_and_verify_dataset(tool_output["outputs"][0]["id"], expected_contents)
# Same with space_to_tab=True
tool_output = self.gi.tools.paste_content(paste_text, history["id"], space_to_tab=True)
assert len(tool_output["outputs"]) == 1
expected_contents = ("\n".join("\t".join(_.split()) for _ in paste_text.splitlines()) + "\n").encode()
self._wait_and_verify_dataset(tool_output["outputs"][0]["id"], expected_contents)
def test_upload_file(self):
history = self.gi.histories.create_history(name="test_upload_file history")
fn = test_util.get_abspath("test_util.py")
file_name = "test1"
tool_output = self.gi.tools.upload_file(
fn,
# First param could be a regular path also of course...
history_id=history["id"],
file_name=file_name,
dbkey="?",
file_type="txt",
)
self._wait_for_and_verify_upload(tool_output, file_name, fn, expected_dbkey="?")
def test_upload_file_dbkey(self):
history = self.gi.histories.create_history(name="test_upload_file history")
fn = test_util.get_abspath("test_util.py")
file_name = "test1"
dbkey = "hg19"
tool_output = self.gi.tools.upload_file(
fn,
history_id=history["id"],
file_name=file_name,
dbkey=dbkey,
file_type="txt",
)
self._wait_for_and_verify_upload(tool_output, file_name, fn, expected_dbkey=dbkey)
@test_util.skip_unless_tool("random_lines1")
def test_run_random_lines(self):
# Run second test case from randomlines.xml
history_id = self.gi.histories.create_history(name="test_run_random_lines history")["id"]
with open(test_util.get_abspath(os.path.join("data", "1.bed"))) as f:
contents = f.read()
dataset_id = self._test_dataset(history_id, contents=contents)
tool_inputs = (
inputs()
.set("num_lines", "1")
.set("input", dataset(dataset_id))
.set("seed_source", conditional().set("seed_source_selector", "set_seed").set("seed", "asdf"))
)
tool_output = self.gi.tools.run_tool(history_id=history_id, tool_id="random_lines1", tool_inputs=tool_inputs)
assert len(tool_output["outputs"]) == 1
# TODO: Wait for results and verify has 1 line and is
# chr5 131424298 131424460 CCDS4149.1_cds_0_0_chr5_131424299_f 0 +
@test_util.skip_unless_tool("cat1")
def test_run_cat1(self):
history_id = self.gi.histories.create_history(name="test_run_cat1 history")["id"]
dataset1_id = self._test_dataset(history_id, contents="1 2 3")
dataset2_id = self._test_dataset(history_id, contents="4 5 6")
dataset3_id = self._test_dataset(history_id, contents="7 8 9")
tool_inputs = (
inputs()
.set("input1", dataset(dataset1_id))
.set(
"queries",
repeat()
.instance(inputs().set("input2", dataset(dataset2_id)))
.instance(inputs().set("input2", dataset(dataset3_id))),
)
)
tool_output = self.gi.tools.run_tool(history_id=history_id, tool_id="cat1", tool_inputs=tool_inputs)
assert len(tool_output["outputs"]) == 1
# TODO: Wait for results and verify it has 3 lines - 1 2 3, 4 5 6,
# and 7 8 9.
@test_util.skip_unless_galaxy("release_19.05")
@test_util.skip_unless_tool("CONVERTER_fasta_to_bowtie_color_index")
def test_tool_dependency_install(self):
installed_dependencies = self.gi.tools.install_dependencies("CONVERTER_fasta_to_bowtie_color_index")
assert any(
True for d in installed_dependencies if d.get("name") == "bowtie" and d.get("dependency_type") == "conda"
), f"installed_dependencies is {installed_dependencies}"
status = self.gi.tools.uninstall_dependencies("CONVERTER_fasta_to_bowtie_color_index")
assert status[0]["model_class"] == "NullDependency", status
@test_util.skip_unless_tool("CONVERTER_fasta_to_bowtie_color_index")
def test_tool_requirements(self):
tool_requirements = self.gi.tools.requirements("CONVERTER_fasta_to_bowtie_color_index")
assert any(
True
for tr in tool_requirements
if {"dependency_type", "version"} <= set(tr.keys()) and tr.get("name") == "bowtie"
), f"tool_requirements is {tool_requirements}"
@test_util.skip_unless_tool("CONVERTER_fasta_to_bowtie_color_index")
def test_reload(self):
response = self.gi.tools.reload("CONVERTER_fasta_to_bowtie_color_index")
assert isinstance(response, dict)
assert "message" in response
assert "id" in response["message"]
@test_util.skip_unless_tool("sra_source")
def test_get_citations(self):
citations = self.gi.tools.get_citations("sra_source")
assert len(citations) == 2
def _wait_for_and_verify_upload(
self, tool_output: Dict[str, Any], file_name: str, fn: str, expected_dbkey: str = "?"
) -> None:
assert len(tool_output["outputs"]) == 1
output = tool_output["outputs"][0]
assert output["name"] == file_name
expected_contents = open(fn, "rb").read()
self._wait_and_verify_dataset(output["id"], expected_contents)
assert output["genome_build"] == expected_dbkey
@test_util.skip_unless_tool("random_lines1")
def test_get_tool_model(self):
history_id = self.gi.histories.create_history(name="test_run_random_lines history")["id"]
tool_model = self.gi.tools.build(tool_id="random_lines1", history_id=history_id)
assert len(tool_model["inputs"]) == 3
bioblend-1.2.0/bioblend/_tests/TestGalaxyUsers.py 0000664 0000000 0000000 00000014341 14447617043 0022045 0 ustar 00root root 0000000 0000000 import bioblend.galaxy
from . import (
GalaxyTestBase,
test_util,
)
class TestGalaxyUsers(GalaxyTestBase.GalaxyTestBase):
def test_get_users(self):
users = self.gi.users.get_users()
for user in users:
assert user["id"] is not None
assert user["email"] is not None
def test_show_user(self):
current_user = self.gi.users.get_current_user()
user = self.gi.users.show_user(current_user["id"])
assert user["id"] == current_user["id"]
assert user["username"] == current_user["username"]
assert user["email"] == current_user["email"]
# The 2 following tests randomly fail
# assert user["nice_total_disk_usage"] == current_user["nice_total_disk_usage"]
# assert user["total_disk_usage"] == current_user["total_disk_usage"]
def test_create_remote_user(self):
# WARNING: only admins can create users!
# WARNING: Users cannot be purged through the Galaxy API, so execute
# this test only on a disposable Galaxy instance!
if not self.gi.config.get_config()["use_remote_user"]:
self.skipTest("This Galaxy instance is not configured to use remote users")
new_user_email = "newuser@example.org"
user = self.gi.users.create_remote_user(new_user_email)
assert user["email"] == new_user_email
if self.gi.config.get_config()["allow_user_deletion"]:
deleted_user = self.gi.users.delete_user(user["id"])
assert deleted_user["email"] == new_user_email
assert deleted_user["deleted"]
def test_create_local_user(self):
# WARNING: only admins can create users!
# WARNING: Users cannot be purged through the Galaxy API, so execute
# this test only on a disposable Galaxy instance!
if self.gi.config.get_config()["use_remote_user"]:
self.skipTest("This Galaxy instance is not configured to use local users")
new_username = test_util.random_string()
new_user_email = f"{new_username}@example.org"
password = test_util.random_string(20)
new_user = self.gi.users.create_local_user(new_username, new_user_email, password)
assert new_user["username"] == new_username
assert new_user["email"] == new_user_email
# test a BioBlend GalaxyInstance can be created using username+password
user_gi = bioblend.galaxy.GalaxyInstance(url=self.gi.base_url, email=new_user_email, password=password)
assert user_gi.users.get_current_user()["email"] == new_user_email
# test deletion
if self.gi.config.get_config()["allow_user_deletion"]:
deleted_user = self.gi.users.delete_user(new_user["id"])
assert deleted_user["email"] == new_user_email
assert deleted_user["deleted"]
def test_get_current_user(self):
user = self.gi.users.get_current_user()
assert user["id"] is not None
assert user["username"] is not None
assert user["email"] is not None
assert user["nice_total_disk_usage"] is not None
assert user["total_disk_usage"] is not None
def test_update_user(self):
# WARNING: only admins can create users!
# WARNING: Users cannot be purged through the Galaxy API, so execute
# this test only on a disposable Galaxy instance!
if self.gi.config.get_config()["use_remote_user"]:
self.skipTest("This Galaxy instance is not configured to use local users")
new_username = test_util.random_string()
new_user = self.gi.users.create_local_user(
new_username, f"{new_username}@example.org", test_util.random_string(20)
)
new_user_id = new_user["id"]
updated_username = test_util.random_string()
updated_user_email = f"{updated_username}@example.org"
self.gi.users.update_user(new_user_id, username=updated_username, email=updated_user_email)
updated_user = self.gi.users.show_user(new_user_id)
assert updated_user["username"] == updated_username
assert updated_user["email"] == updated_user_email
if self.gi.config.get_config()["allow_user_deletion"]:
self.gi.users.delete_user(new_user_id)
def test_get_user_apikey(self):
# Test getting the API key of the current user, which surely has one
user_id = self.gi.users.get_current_user()["id"]
apikey = self.gi.users.get_user_apikey(user_id)
assert apikey and apikey != "Not available."
# Test getting the API key of a new user, which doesn't have one
new_username = test_util.random_string()
new_user_id = self.gi.users.create_local_user(
new_username, f"{new_username}@example.org", test_util.random_string(20)
)["id"]
assert self.gi.users.get_user_apikey(new_user_id) == "Not available."
@test_util.skip_unless_galaxy("release_21.01")
def test_get_or_create_user_apikey(self):
# Check that get_or_create_user_apikey() doesn't regenerate an existing API key
user_id = self.gi.users.get_current_user()["id"]
apikey = self.gi.users.get_user_apikey(user_id)
assert self.gi.users.get_or_create_user_apikey(user_id) == apikey
# Check that get_or_create_user_apikey() generates an API key for a new user
new_username = test_util.random_string()
new_user_id = self.gi.users.create_local_user(
new_username, f"{new_username}@example.org", test_util.random_string(20)
)["id"]
new_apikey = self.gi.users.get_or_create_user_apikey(new_user_id)
assert new_apikey and new_apikey != "Not available."
def test_create_user_apikey(self):
# Test creating an API key for a new user
new_username = test_util.random_string()
new_user_id = self.gi.users.create_local_user(
new_username, f"{new_username}@example.org", test_util.random_string(20)
)["id"]
new_apikey = self.gi.users.create_user_apikey(new_user_id)
assert new_apikey and new_apikey != "Not available."
# Test regenerating an API key for a user that already has one
regenerated_apikey = self.gi.users.create_user_apikey(new_user_id)
assert regenerated_apikey and regenerated_apikey not in (new_apikey, "Not available.")
bioblend-1.2.0/bioblend/_tests/TestGalaxyWorkflows.py 0000664 0000000 0000000 00000032442 14447617043 0022743 0 ustar 00root root 0000000 0000000 import json
import os
import shutil
import tempfile
import time
from typing import (
Any,
Dict,
List,
)
import pytest
from bioblend import ConnectionError
from . import (
GalaxyTestBase,
test_util,
)
class TestGalaxyWorkflows(GalaxyTestBase.GalaxyTestBase):
@test_util.skip_unless_tool("cat1")
@test_util.skip_unless_tool("cat")
def test_workflow_scheduling(self):
path = test_util.get_abspath(os.path.join("data", "test_workflow_pause.ga"))
workflow = self.gi.workflows.import_workflow_from_local_path(path)
workflow_id = workflow["id"]
history_id = self.gi.histories.create_history(name="TestWorkflowState")["id"]
invocations = self.gi.workflows.get_invocations(workflow_id)
assert len(invocations) == 0
# Try invalid invocation (no input)
with pytest.raises(ConnectionError):
self.gi.workflows.invoke_workflow(workflow["id"])
dataset1_id = self._test_dataset(history_id)
invocation = self.gi.workflows.invoke_workflow(
workflow["id"],
inputs={"0": {"src": "hda", "id": dataset1_id}},
)
assert invocation["state"] == "new"
invocation_id = invocation["id"]
invocations = self.gi.workflows.get_invocations(workflow_id)
assert len(invocations) == 1
assert invocations[0]["id"] == invocation_id
def invocation_steps_by_order_index() -> Dict[int, Dict[str, Any]]:
invocation = self.gi.workflows.show_invocation(workflow_id, invocation_id)
return {s["order_index"]: s for s in invocation["steps"]}
for _ in range(20):
if 2 in invocation_steps_by_order_index():
break
time.sleep(0.5)
invocation = self.gi.workflows.show_invocation(workflow_id, invocation_id)
assert invocation["state"] == "ready"
steps = invocation_steps_by_order_index()
pause_step = steps[2]
assert self.gi.workflows.show_invocation_step(workflow_id, invocation_id, pause_step["id"])["action"] is None
self.gi.workflows.run_invocation_step_action(workflow_id, invocation_id, pause_step["id"], action=True)
assert self.gi.workflows.show_invocation_step(workflow_id, invocation_id, pause_step["id"])["action"]
for _ in range(20):
invocation = self.gi.workflows.show_invocation(workflow_id, invocation_id)
if invocation["state"] == "scheduled":
break
time.sleep(0.5)
invocation = self.gi.workflows.show_invocation(workflow_id, invocation_id)
assert invocation["state"] == "scheduled"
def test_invoke_workflow_parameters_normalized(self):
path = test_util.get_abspath(os.path.join("data", "paste_columns_subworkflow.ga"))
workflow_id = self.gi.workflows.import_workflow_from_local_path(path)["id"]
history_id = self.gi.histories.create_history(name="TestWorkflowInvokeParametersNormalized")["id"]
dataset_id = self._test_dataset(history_id)
with pytest.raises(ConnectionError):
self.gi.workflows.invoke_workflow(
workflow_id, inputs={"0": {"src": "hda", "id": dataset_id}}, params={"1": {"1|2": "comma"}}
)
self.gi.workflows.invoke_workflow(
workflow_id,
inputs={"0": {"src": "hda", "id": dataset_id}},
params={"1": {"1|2": "comma"}},
parameters_normalized=True,
)
@test_util.skip_unless_galaxy("release_19.09")
@test_util.skip_unless_tool("cat1")
@test_util.skip_unless_tool("cat")
def test_cancelling_workflow_scheduling(self):
path = test_util.get_abspath(os.path.join("data", "test_workflow_pause.ga"))
workflow = self.gi.workflows.import_workflow_from_local_path(path)
workflow_id = workflow["id"]
history_id = self.gi.histories.create_history(name="TestWorkflowState")["id"]
dataset1_id = self._test_dataset(history_id)
invocations = self.gi.workflows.get_invocations(workflow_id)
assert len(invocations) == 0
invocation = self.gi.workflows.invoke_workflow(
workflow["id"],
inputs={"0": {"src": "hda", "id": dataset1_id}},
)
invocation_id = invocation["id"]
invocations = self.gi.workflows.get_invocations(workflow_id)
assert len(invocations) == 1
assert invocations[0]["id"] == invocation_id
invocation = self.gi.workflows.show_invocation(workflow_id, invocation_id)
assert invocation["state"] in ["new", "ready"]
self.gi.workflows.cancel_invocation(workflow_id, invocation_id)
invocation = self.gi.invocations.wait_for_invocation(invocation_id, check=False)
assert invocation["state"] == "cancelled"
def test_import_export_workflow_from_local_path(self):
with pytest.raises(TypeError):
self.gi.workflows.import_workflow_from_local_path(None) # type: ignore[arg-type]
path = test_util.get_abspath(os.path.join("data", "paste_columns.ga"))
imported_wf = self.gi.workflows.import_workflow_from_local_path(path)
assert isinstance(imported_wf, dict)
assert imported_wf["name"] == "paste_columns"
assert imported_wf["url"].startswith("/api/workflows/")
assert not imported_wf["deleted"]
assert not imported_wf["published"]
with pytest.raises(TypeError):
self.gi.workflows.export_workflow_to_local_path(None, None, None) # type: ignore[arg-type]
export_dir = tempfile.mkdtemp(prefix="bioblend_test_")
try:
self.gi.workflows.export_workflow_to_local_path(imported_wf["id"], export_dir)
dir_contents = os.listdir(export_dir)
assert len(dir_contents) == 1
export_path = os.path.join(export_dir, dir_contents[0])
with open(export_path) as f:
exported_wf_dict = json.load(f)
finally:
shutil.rmtree(export_dir)
assert isinstance(exported_wf_dict, dict)
def test_import_publish_workflow_from_local_path(self):
path = test_util.get_abspath(os.path.join("data", "paste_columns.ga"))
imported_wf = self.gi.workflows.import_workflow_from_local_path(path, publish=True)
assert isinstance(imported_wf, dict)
assert not imported_wf["deleted"]
assert imported_wf["published"]
def test_import_export_workflow_dict(self):
path = test_util.get_abspath(os.path.join("data", "paste_columns.ga"))
with open(path) as f:
wf_dict = json.load(f)
imported_wf = self.gi.workflows.import_workflow_dict(wf_dict)
assert isinstance(imported_wf, dict)
assert imported_wf["name"] == "paste_columns"
assert imported_wf["url"].startswith("/api/workflows/")
assert not imported_wf["deleted"]
assert not imported_wf["published"]
exported_wf_dict = self.gi.workflows.export_workflow_dict(imported_wf["id"])
assert isinstance(exported_wf_dict, dict)
def test_import_publish_workflow_dict(self):
path = test_util.get_abspath(os.path.join("data", "paste_columns.ga"))
with open(path) as f:
wf_dict = json.load(f)
imported_wf = self.gi.workflows.import_workflow_dict(wf_dict, publish=True)
assert isinstance(imported_wf, dict)
assert not imported_wf["deleted"]
assert imported_wf["published"]
def test_get_workflows(self):
path = test_util.get_abspath(os.path.join("data", "paste_columns.ga"))
workflow = self.gi.workflows.import_workflow_from_local_path(path)
all_wfs = self.gi.workflows.get_workflows()
assert len(all_wfs) > 0
wfs_with_name = self.gi.workflows.get_workflows(name=workflow["name"])
wf_list = [w for w in wfs_with_name if w["id"] == workflow["id"]]
assert len(wf_list) == 1
wf_data = wf_list[0]
if "create_time" in workflow: # Galaxy >= 20.01
assert wf_data["create_time"] == workflow["create_time"]
else: # Galaxy <= 22.01
assert wf_data["url"] == workflow["url"]
def test_show_workflow(self):
path = test_util.get_abspath(os.path.join("data", "paste_columns.ga"))
wf = self.gi.workflows.import_workflow_from_local_path(path)
wf_data = self.gi.workflows.show_workflow(wf["id"])
assert wf_data["id"] == wf["id"]
assert wf_data["name"] == wf["name"]
assert wf_data["url"] == wf["url"]
assert len(wf_data["steps"]) == 3
assert wf_data["inputs"] is not None
def test_update_workflow_name(self):
path = test_util.get_abspath(os.path.join("data", "paste_columns.ga"))
wf = self.gi.workflows.import_workflow_from_local_path(path)
new_name = "new name"
updated_wf = self.gi.workflows.update_workflow(wf["id"], name=new_name)
assert updated_wf["name"] == new_name
@test_util.skip_unless_galaxy("release_21.01")
def test_update_workflow_published(self):
path = test_util.get_abspath(os.path.join("data", "paste_columns.ga"))
wf = self.gi.workflows.import_workflow_from_local_path(path)
assert not wf["published"]
updated_wf = self.gi.workflows.update_workflow(wf["id"], published=True)
assert updated_wf["published"]
updated_wf = self.gi.workflows.update_workflow(wf["id"], published=False)
assert not updated_wf["published"]
@test_util.skip_unless_galaxy(
"release_19.09"
) # due to Galaxy bug fixed in https://github.com/galaxyproject/galaxy/pull/9014
def test_show_workflow_versions(self):
path = test_util.get_abspath(os.path.join("data", "paste_columns.ga"))
wf = self.gi.workflows.import_workflow_from_local_path(path)
wf_data = self.gi.workflows.show_workflow(wf["id"])
assert wf_data["version"] == 0
new_name = "new name"
self.gi.workflows.update_workflow(wf["id"], name=new_name)
updated_wf = self.gi.workflows.show_workflow(wf["id"])
assert updated_wf["name"] == new_name
assert updated_wf["version"] == 1
updated_wf = self.gi.workflows.show_workflow(wf["id"], version=0)
assert updated_wf["name"] == "paste_columns"
assert updated_wf["version"] == 0
updated_wf = self.gi.workflows.show_workflow(wf["id"], version=1)
assert updated_wf["name"] == new_name
assert updated_wf["version"] == 1
@test_util.skip_unless_galaxy("release_19.09")
def test_extract_workflow_from_history(self):
path = test_util.get_abspath(os.path.join("data", "paste_columns.ga"))
wf = self.gi.workflows.import_workflow_from_local_path(path)
history_id = self.gi.histories.create_history(name="test_wf_invocation")["id"]
dataset1_id = self._test_dataset(history_id)
dataset = {"src": "hda", "id": dataset1_id}
invocation_id = self.gi.workflows.invoke_workflow(
wf["id"],
inputs={"Input 1": dataset, "Input 2": dataset},
history_id=history_id,
inputs_by="name",
)["id"]
invocation = self.gi.invocations.wait_for_invocation(invocation_id)
wf1 = self.gi.workflows.show_workflow(invocation["workflow_id"])
datasets = self.gi.histories.show_history(invocation["history_id"], contents=True)
dataset_hids = [dataset["hid"] for dataset in datasets]
job_ids = [step["job_id"] for step in invocation["steps"] if step["job_id"]]
for job_id in job_ids:
self.gi.jobs.wait_for_job(job_id)
new_workflow_name = "My new workflow!"
wf2 = self.gi.workflows.extract_workflow_from_history(
history_id=invocation["history_id"],
workflow_name=new_workflow_name,
job_ids=job_ids,
dataset_hids=dataset_hids,
)
wf2 = self.gi.workflows.show_workflow(wf2["id"])
assert wf2["name"] == new_workflow_name
assert len(wf1["steps"]) == len(wf2["steps"])
for i in range(len(wf1["steps"])):
assert wf1["steps"][str(i)]["type"] == wf2["steps"][str(i)]["type"]
assert wf1["steps"][str(i)]["tool_id"] == wf2["steps"][str(i)]["tool_id"]
def test_show_versions(self):
path = test_util.get_abspath(os.path.join("data", "paste_columns.ga"))
wf = self.gi.workflows.import_workflow_from_local_path(path)
versions = self.gi.workflows.show_versions(wf["id"])
assert len(versions) == 1
version = versions[0]
assert version["version"] == 0
assert "update_time" in version
assert "steps" in version
@test_util.skip_unless_galaxy("release_21.01")
def test_refactor_workflow(self):
actions: List[Dict[str, Any]] = [
{"action_type": "add_input", "type": "data", "label": "foo"},
{"action_type": "update_step_label", "label": "bar", "step": {"label": "foo"}},
]
path = test_util.get_abspath(os.path.join("data", "paste_columns.ga"))
wf = self.gi.workflows.import_workflow_from_local_path(path)
response = self.gi.workflows.refactor_workflow(wf["id"], actions, dry_run=True)
assert len(response["action_executions"]) == len(actions)
assert response["dry_run"] is True
updated_steps = response["workflow"]["steps"]
assert len(updated_steps) == 4
assert {step["label"] for step in updated_steps.values()} == {"bar", None, "Input 1", "Input 2"}
bioblend-1.2.0/bioblend/_tests/TestToolshed.py 0000664 0000000 0000000 00000006625 14447617043 0021365 0 ustar 00root root 0000000 0000000 import os
import unittest
import bioblend
import bioblend.toolshed
from . import test_util
@test_util.skip_unless_toolshed()
class TestToolshed(unittest.TestCase):
def setUp(self):
toolshed_url = os.environ["BIOBLEND_TOOLSHED_URL"]
self.ts = bioblend.toolshed.ToolShedInstance(url=toolshed_url)
def test_categories_client(self):
# get_categories
categories = self.ts.categories.get_categories()
assert "Assembly" in [c["name"] for c in categories]
# we cannot test get_categories with deleted=True as it requires administrator status
# show_category
visualization_category_id = [c for c in categories if c["name"] == "Visualization"][0]["id"]
visualization_category = self.ts.categories.show_category(visualization_category_id)
assert visualization_category["description"] == "Tools for visualizing data"
# get_repositories
repositories = self.ts.categories.get_repositories(visualization_category_id)
repositories_reversed = self.ts.categories.get_repositories(visualization_category_id, sort_order="desc")
assert repositories["repositories"][0]["model_class"] == "Repository"
assert len(repositories["repositories"]) > 200
assert repositories["repositories"][0] == repositories_reversed["repositories"][-1]
def test_repositories_client(self):
# get_repositories
repositories = self.ts.repositories.get_repositories()
assert len(repositories) > 5000
assert repositories[0]["model_class"] == "Repository"
repositories = self.ts.repositories.get_repositories(name="bam_to_sam", owner="devteam")
assert len(repositories) == 1
bam_to_sam_repo = repositories[0]
assert bam_to_sam_repo["name"] == "bam_to_sam"
assert bam_to_sam_repo["owner"] == "devteam"
# search_repositories
samtools_search = self.ts.repositories.search_repositories("samtools", page_size=5)
assert int(samtools_search["total_results"]) > 20
assert len(samtools_search["hits"]) == 5
# show_repository
show_bam_to_sam_repo = self.ts.repositories.show_repository(bam_to_sam_repo["id"])
assert "SAM" in show_bam_to_sam_repo["long_description"]
# test_create_repository
# need to provide an API key to test this
# test_update_repository
# need to provide an API key to test this
def test_repositories_revisions(self):
# get_ordered_installable_revisions
bam_to_sam_revisions = self.ts.repositories.get_ordered_installable_revisions("bam_to_sam", "devteam")
assert len(bam_to_sam_revisions) >= 4
# get_repository_revision_install_info
bam_to_sam_revision_install_info = self.ts.repositories.get_repository_revision_install_info(
"bam_to_sam", "devteam", bam_to_sam_revisions[0]
)
assert len(bam_to_sam_revision_install_info) == 3
assert bam_to_sam_revision_install_info[0].get("model_class") == "Repository"
assert bam_to_sam_revision_install_info[1].get("model_class") == "RepositoryMetadata"
assert bam_to_sam_revision_install_info[2].get("model_class") is None
def test_tools_client(self):
# search_tools
samtools_search = self.ts.tools.search_tools("samtools", page_size=5)
assert int(samtools_search["total_results"]) > 2000
assert len(samtools_search["hits"]) == 5
bioblend-1.2.0/bioblend/_tests/__init__.py 0000664 0000000 0000000 00000000000 14447617043 0020460 0 ustar 00root root 0000000 0000000 bioblend-1.2.0/bioblend/_tests/data/ 0000775 0000000 0000000 00000000000 14447617043 0017272 5 ustar 00root root 0000000 0000000 bioblend-1.2.0/bioblend/_tests/data/1.bed 0000664 0000000 0000000 00000010152 14447617043 0020105 0 ustar 00root root 0000000 0000000 chr1 147962192 147962580 CCDS989.1_cds_0_0_chr1_147962193_r 0 -
chr1 147984545 147984630 CCDS990.1_cds_0_0_chr1_147984546_f 0 +
chr1 148078400 148078582 CCDS993.1_cds_0_0_chr1_148078401_r 0 -
chr1 148185136 148185276 CCDS996.1_cds_0_0_chr1_148185137_f 0 +
chr10 55251623 55253124 CCDS7248.1_cds_0_0_chr10_55251624_r 0 -
chr11 116124407 116124501 CCDS8374.1_cds_0_0_chr11_116124408_r 0 -
chr11 116206508 116206563 CCDS8377.1_cds_0_0_chr11_116206509_f 0 +
chr11 116211733 116212337 CCDS8378.1_cds_0_0_chr11_116211734_r 0 -
chr11 1812377 1812407 CCDS7726.1_cds_0_0_chr11_1812378_f 0 +
chr12 38440094 38440321 CCDS8736.1_cds_0_0_chr12_38440095_r 0 -
chr13 112381694 112381953 CCDS9526.1_cds_0_0_chr13_112381695_f 0 +
chr14 98710240 98712285 CCDS9949.1_cds_0_0_chr14_98710241_r 0 -
chr15 41486872 41487060 CCDS10096.1_cds_0_0_chr15_41486873_r 0 -
chr15 41673708 41673857 CCDS10097.1_cds_0_0_chr15_41673709_f 0 +
chr15 41679161 41679250 CCDS10098.1_cds_0_0_chr15_41679162_r 0 -
chr15 41826029 41826196 CCDS10101.1_cds_0_0_chr15_41826030_f 0 +
chr16 142908 143003 CCDS10397.1_cds_0_0_chr16_142909_f 0 +
chr16 179963 180135 CCDS10401.1_cds_0_0_chr16_179964_r 0 -
chr16 244413 244681 CCDS10402.1_cds_0_0_chr16_244414_f 0 +
chr16 259268 259383 CCDS10403.1_cds_0_0_chr16_259269_r 0 -
chr18 23786114 23786321 CCDS11891.1_cds_0_0_chr18_23786115_r 0 -
chr18 59406881 59407046 CCDS11985.1_cds_0_0_chr18_59406882_f 0 +
chr18 59455932 59456337 CCDS11986.1_cds_0_0_chr18_59455933_r 0 -
chr18 59600586 59600754 CCDS11988.1_cds_0_0_chr18_59600587_f 0 +
chr19 59068595 59069564 CCDS12866.1_cds_0_0_chr19_59068596_f 0 +
chr19 59236026 59236146 CCDS12872.1_cds_0_0_chr19_59236027_r 0 -
chr19 59297998 59298008 CCDS12877.1_cds_0_0_chr19_59297999_f 0 +
chr19 59302168 59302288 CCDS12878.1_cds_0_0_chr19_59302169_r 0 -
chr2 118288583 118288668 CCDS2120.1_cds_0_0_chr2_118288584_f 0 +
chr2 118394148 118394202 CCDS2121.1_cds_0_0_chr2_118394149_r 0 -
chr2 220190202 220190242 CCDS2441.1_cds_0_0_chr2_220190203_f 0 +
chr2 220229609 220230869 CCDS2443.1_cds_0_0_chr2_220229610_r 0 -
chr20 33330413 33330423 CCDS13249.1_cds_0_0_chr20_33330414_r 0 -
chr20 33513606 33513792 CCDS13255.1_cds_0_0_chr20_33513607_f 0 +
chr20 33579500 33579527 CCDS13256.1_cds_0_0_chr20_33579501_r 0 -
chr20 33593260 33593348 CCDS13257.1_cds_0_0_chr20_33593261_f 0 +
chr21 32707032 32707192 CCDS13614.1_cds_0_0_chr21_32707033_f 0 +
chr21 32869641 32870022 CCDS13615.1_cds_0_0_chr21_32869642_r 0 -
chr21 33321040 33322012 CCDS13620.1_cds_0_0_chr21_33321041_f 0 +
chr21 33744994 33745040 CCDS13625.1_cds_0_0_chr21_33744995_r 0 -
chr22 30120223 30120265 CCDS13897.1_cds_0_0_chr22_30120224_f 0 +
chr22 30160419 30160661 CCDS13898.1_cds_0_0_chr22_30160420_r 0 -
chr22 30665273 30665360 CCDS13901.1_cds_0_0_chr22_30665274_f 0 +
chr22 30939054 30939266 CCDS13903.1_cds_0_0_chr22_30939055_r 0 -
chr5 131424298 131424460 CCDS4149.1_cds_0_0_chr5_131424299_f 0 +
chr5 131556601 131556672 CCDS4151.1_cds_0_0_chr5_131556602_r 0 -
chr5 131621326 131621419 CCDS4152.1_cds_0_0_chr5_131621327_f 0 +
chr5 131847541 131847666 CCDS4155.1_cds_0_0_chr5_131847542_r 0 -
chr6 108299600 108299744 CCDS5061.1_cds_0_0_chr6_108299601_r 0 -
chr6 108594662 108594687 CCDS5063.1_cds_0_0_chr6_108594663_f 0 +
chr6 108640045 108640151 CCDS5064.1_cds_0_0_chr6_108640046_r 0 -
chr6 108722976 108723115 CCDS5067.1_cds_0_0_chr6_108722977_f 0 +
chr7 113660517 113660685 CCDS5760.1_cds_0_0_chr7_113660518_f 0 +
chr7 116512159 116512389 CCDS5771.1_cds_0_0_chr7_116512160_r 0 -
chr7 116714099 116714152 CCDS5773.1_cds_0_0_chr7_116714100_f 0 +
chr7 116945541 116945787 CCDS5774.1_cds_0_0_chr7_116945542_r 0 -
chr8 118881131 118881317 CCDS6324.1_cds_0_0_chr8_118881132_r 0 -
chr9 128764156 128764189 CCDS6914.1_cds_0_0_chr9_128764157_f 0 +
chr9 128787519 128789136 CCDS6915.1_cds_0_0_chr9_128787520_r 0 -
chr9 128882427 128882523 CCDS6917.1_cds_0_0_chr9_128882428_f 0 +
chr9 128937229 128937445 CCDS6919.1_cds_0_0_chr9_128937230_r 0 -
chrX 122745047 122745924 CCDS14606.1_cds_0_0_chrX_122745048_f 0 +
chrX 152648964 152649196 CCDS14733.1_cds_0_0_chrX_152648965_r 0 -
chrX 152691446 152691471 CCDS14735.1_cds_0_0_chrX_152691447_f 0 +
chrX 152694029 152694263 CCDS14736.1_cds_0_0_chrX_152694030_r 0 -
bioblend-1.2.0/bioblend/_tests/data/Galaxy-History-Test-history-for-export.tar.gz 0000664 0000000 0000000 00000006647 14447617043 0030041 0 ustar 00root root 0000000 0000000 ŪĒ`’dataset_114237.dat ķ\[oŪFö«öWzéC×ŅÜ/y)ŗiݶ/-°]4A#[
%
$ŲäæļEQ“e;µ·mxDĒ£3ß9j×ėŗ)«Ū$mŖ77ĶÅoOH0f?±äøūé!tXbųä"_LŃÅŠ¾nŅj:½øJōę6٤ucŖ¹sß¾“ś8Ū¹)¬Hėzöj:ū»7Ł_§³¬2icf½1ö|Ä%fS,_üć¹Ą1n
÷»üaö¹b
·©śÉŌĶōŚ·8]ÕŌÜģŹŖ±"×ė<ÉŹżF$\m¹1Éræ.r[ł+n·e6ėrEŪ}Q@Y^Ł®üņx³Ķ y²vU(āåR ¤11zöéā'““6Mżą~’ĒŁ÷*äč’/Aæ|t”e±ÆÖcVę
¾«]D(wkp¦UUną¶I° āGNb}÷:][Ą_f¹ÄfÉ„P|ööllqį)¶ĶĘ&ö{ļĘ)É^„ō2ēK}ÉøŠ*ĖÉ„bFC!L¬®)LclUZŌĘŌė«mƳĒ1Bi*W&ēdbʳĢĘlk_i¶D([Ä@ŻhēšuĀęAčŚa2Ēłį:@r½]ö»7Ūż®(SŚŌ¶gæŽ&µĶČĒYv
Sńŗ,ąģę§Ųo¶N«6²ęvēĖfuSłfĪĒq”żėP·8+7³mb½uP(LM[ ø-Y¾3·m|)XoÖ>ĻŽ8p³Ķ½Ę4d’sķ¦Uć’$^4
[ŽÆ’Ą0 ¤Āõ½żdĒd0}ē'åŪavss“3ę-±Äo̤kY®Šęõėļ~ŌJĻqåuąN¢0MŖ7
]¾ŁPćGVŠ¢ŃÉŹ”|}@QH*XE+Qč)ʧŗ(¬8¦"²D"P°<Ö8Ī ĒPĻQL©
J0;čāĶ
|ŽułnÖĖÓY>Ø9;Ø×ĖĀDæ8Å0śģ(¶+1
-)ę"KõC££)6ÅŲ*9ü§Ėōi²ŗŌBfYŖČRč%"ObInHJ «ĒE±”ɬ_«ź&p^zÖčĀŚļ<aōa~¦n¦åvj{2ÅcPūć54eOjæ4;ĀČ+¬ęģ÷m/ŗ;Y7~žB±q!6ʬ§Ē,ś9Ö#|³j’"ė°ó!Ė©HRāYW]³roH
Į³Ču²V|Å_ćākdO_|!{%“Bģķ÷üwńmŲē”ó;’Ażß°`]Lłųü÷åęæ³^~įē’sęĢI÷ūĮųü’Č×I”&mĻO4¹üK[ßå¦I&ē3Ō’:ÖYiŅ&„ÉłŌm?ä£I&ē³QŪ>ÄL4hņ<Ō¶
`å±CPT²ØU Į BA8aŻé
ĄXRź9B©jĀ
÷:¢ īÓvĒ
$⓺TöÉ*čŅĢ31“_$Aw·ZŖ`-Χ¹ĶO IęŲD+Č! ¹cQ<Ųė2Kh×ų}%c$Ž!-XO,ķI%Ģ¢c- p²ŻIō °!Į!ü YŅÓ@öÖ" Ąæ +KQGėCD[3d!*ÓõEāäōqU .©¦