pax_global_header00006660000000000000000000000064140006103710014502gustar00rootroot0000000000000052 comment=0af50375a364288388f3a875f3c8254ae58366b4 datalad-container-1.1.2/000077500000000000000000000000001400061037100150555ustar00rootroot00000000000000datalad-container-1.1.2/.gitignore000066400000000000000000000001331400061037100170420ustar00rootroot00000000000000.pybuild/ .coverage /.tox *.egg-info *.py[coe] .#* .*.swp docs/build docs/source/generated datalad-container-1.1.2/.travis.yml000066400000000000000000000053551400061037100171760ustar00rootroot00000000000000# vim ft=yaml # travis-ci.org definition for DataLad build language: python services: - docker git: # we use submodules to be managed with datalad submodules: false python: - 3.5 - 3.6 - 3.7 - 3.8 cache: - apt env: global: # will be used in the matrix, where neither other variable is used - BOTO_CONFIG=/tmp/nowhere - DATALAD_TESTS_SSH=1 - DATALAD_LOG_CMD_ENV=GIT_SSH_COMMAND - TESTS_TO_PERFORM= - NOSE_OPTS=-s - NOSE_SELECTION_OP="not " # so it would be "not (integration or usecase)" # Special settings/helper for combined coverage from special remotes execution - COVERAGE=coverage - DATALAD_DATASETS_TOPURL=http://datasets-tests.datalad.org matrix: - DATALAD_REPO_VERSION=5 - DATALAD_REPO_VERSION=6 before_install: # Just in case we need to check if nfs is there etc - sudo lsmod # The ultimate one-liner setup for NeuroDebian repository - bash <(wget -q -O- http://neuro.debian.net/_files/neurodebian-travis.sh) - travis_retry sudo apt-get update -qq - travis_retry sudo apt-get install eatmydata # to speedup some installations - sudo eatmydata tools/ci/prep-travis-forssh-sudo.sh - tools/ci/prep-travis-forssh.sh # install git-annex with the relevant bits # no recommends to avoid inheriting the entire multimedia stack - travis_retry sudo eatmydata apt-get install --no-install-recommends git-annex-standalone aria2 git-remote-gcrypt lsof gnupg nocache # chroot/container stuff - travis_retry sudo eatmydata apt-get install xz-utils singularity-container - pip install --upgrade pip install: # Install standalone build of git-annex for the recent enough version - travis_retry sudo eatmydata apt-get install zip pandoc # for metadata support - git config --global user.email "test@travis.land" - git config --global user.name "Travis Almighty" - cd ..; pip install -q codecov; cd - - pip install -r requirements-devel.txt script: # Verify that setup.py build doesn't puke - python setup.py build # Test installation system-wide - sudo `which pip` install . # Run tests - http_proxy= PATH=$PWD/tools/coverage-bin:$PATH $NOSE_WRAPPER python -m nose $NOSE_OPTS -v -A "$NOSE_SELECTION_OP(integration or usecase or slow)" --with-doctest --with-cov --cover-package datalad_container --logging-level=INFO $TESTS_TO_PERFORM # Run doc examples # NOTE: We don't have a run_doc_examples ATM # - $NOSE_WRAPPER tools/testing/run_doc_examples - if [ ! "${DATALAD_LOG_LEVEL:-}" = 2 ]; then PYTHONPATH=$PWD $NOSE_WRAPPER make -C docs html doctest; fi # Report WTF information using system wide installed version - datalad wtf after_success: - coverage combine -a /tmp/.coverage-entrypoints-* - codecov datalad-container-1.1.2/CHANGELOG.md000066400000000000000000000134461400061037100166760ustar00rootroot00000000000000 ____ _ _ _ | _ \ __ _ | |_ __ _ | | __ _ __| | | | | | / _` || __| / _` || | / _` | / _` | | |_| || (_| || |_ | (_| || |___ | (_| || (_| | |____/ \__,_| \__| \__,_||_____| \__,_| \__,_| Container This is a high level and scarce summary of the changes between releases. We would recommend to consult log of the [DataLad git repository](http://github.com/datalad/datalad-container) for more details. ## 1.1.2 (January 16, 2021) -- - Replace use of `mock` with `unittest.mock` as we do no longer support Python 2 ## 1.1.1 (January 03, 2021) -- - Drop use of `Runner` (to be removed in datalad 0.14.0) in favor of `WitlessRunner` ## 1.1.0 (October 30, 2020) -- - Datalad version 0.13.0 or later is now required. - In the upcoming 0.14.0 release of DataLad, the datalad special remote will have built-in support for "shub://" URLs. If `containers-add` detects support for this feature, it will now add the "shub://" URL as is rather than resolving the URL itself. This avoids registering short-lived URLs, allowing the image to be retrieved later with `datalad get`. - `containers-run` learned to install necessary subdatasets when asked to execute a container from underneath an uninstalled subdataset. ## 1.0.1 (June 23, 2020) -- - Prefer `datalad.core.local.run` to `datalad.interface.run`. The latter has been marked as obsolete since DataLad v0.12 (our minimum requirement) and will be removed in DataLad's next feature release. ## 1.0.0 (Feb 23, 2020) -- not-as-a-shy-one Extension is pretty stable so releasing as 1. MAJOR release, so we could start tracking API breakages and enhancements properly. - Drops support for Python 2 and DataLad prior 0.12 ## 0.5.2 (Nov 12, 2019) -- ### Fixes - The Docker adapter unconditionally called `docker run` with `--interactive` and `--tty` even when stdin was not attached to a TTY, leading to an error. ## 0.5.1 (Nov 08, 2019) -- ### Fixes - The Docker adapter, which is used for the "dhub://" URL scheme, assumed the Python executable was spelled "python". - A call to DataLad's `resolve_path` helper assumed a string return value, which isn't true as of the latest DataLad release candidate, 0.12.0rc6. ## 0.5.0 (Jul 12, 2019) -- damn-you-malicious-users ### New features - The default result renderer for `containers-list` is now a custom renderer that includes the container name in the output. ### Fixes - Temporarily skip two tests relying on SingularityHub -- it is down. ## 0.4.0 (May 29, 2019) -- run-baby-run The minimum required DataLad version is now 0.11.5. ### New features - The call format gained the "{img_dspath}" placeholder, which expands to the relative path of the dataset that contains the image. This is useful for pointing to a wrapper script that is bundled in the same subdataset as a container. - `containers-run` now passes the container image to `run` via its `extra_inputs` argument so that a run command's "{inputs}" field is restricted to inputs that the caller explicitly specified. - During execution, `containers-run` now sets the environment variable `DATALAD_CONTAINER_NAME` to the name of the container. ### Fixes - `containers-run` mishandled paths when called from a subdirectory. - `containers-run` didn't provide an informative error message when `cmdexec` contained an unknown placeholder. - `containers-add` ignores the `--update` flag when the container doesn't yet exist, but it confusingly still used the word "update" in the commit message. ## 0.3.1 (Mar 05, 2019) -- Upgrayeddd ### Fixes - `containers-list` recursion actually does recursion. ## 0.3.0 (Mar 05, 2019) -- Upgrayedd ### API changes - `containers-list` no longer lists containers from subdatasets by default. Specify `--recursive` to do so. - `containers-run` no longer considers subdataset containers in its automatic selection of a container name when no name is specified. If the current dataset has one container, that container is selected. Subdataset containers must always be explicitly specified. ### New features - `containers-add` learned to update a previous container when passed `--update`. - `containers-add` now supports Singularity's "docker://" scheme in the URL. - To avoid unnecessary recursion into subdatasets, `containers-run` now decides to look for containers in subdatasets based on whether the name has a slash (which is true of all subdataset containers). ## 0.2.2 (Dec 19, 2018) -- The more the merrier - list/use containers recursively from installed subdatasets - Allow to specify container by path rather than just by name - Adding a container from local filesystem will copy it now ## 0.2.1 (Jul 14, 2018) -- Explicit lyrics - Add support `datalad run --explicit`. ## 0.2 (Jun 08, 2018) -- Docker - Initial support for adding and running Docker containers. - Add support `datalad run --sidecar`. - Simplify storage of `call_fmt` arguments in the Git config, by benefiting from `datalad run` being able to work with single-string compound commands. ## 0.1.2 (May 28, 2018) -- The docs - Basic beginner documentation ## 0.1.1 (May 22, 2018) -- The fixes ### New features - Add container images straight from singularity-hub, no need to manually specify `--call-fmt` arguments. ### API changes - Use "name" instead of "label" for referring to a container (e.g. `containers-run -n ...` instead of `containers-run -l`. ### Fixes - Pass relative container path to `datalad run`. - `containers-run` no longer hides `datalad run` failures. ## 0.1 (May 19, 2018) -- The Release - Initial release with basic functionality to add, remove, and list containers in a dataset, plus a `run` command wrapper that injects the container image as an input dependency of a command call. datalad-container-1.1.2/CONTRIBUTORS000066400000000000000000000001661400061037100167400ustar00rootroot00000000000000The following people have contributed to this project: Benjamin Poldrack Kyle Meyer Michael Hanke Yaroslav Halchenko datalad-container-1.1.2/COPYING000066400000000000000000000024241400061037100161120ustar00rootroot00000000000000# Main Copyright/License DataLad, including all examples, code snippets and attached documentation is covered by the MIT license. The MIT License Copyright (c) 2018- DataLad Team Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. See CONTRIBUTORS file for a full list of contributors. datalad-container-1.1.2/Makefile000066400000000000000000000023741400061037100165230ustar00rootroot00000000000000# simple makefile to simplify repetetive build env management tasks under posix # Ideas borrowed from scikit-learn's and PyMVPA Makefiles -- thanks! PYTHON ?= python NOSETESTS ?= nosetests MODULE ?= datalad all: clean test clean: $(PYTHON) setup.py clean rm -rf dist build bin docs/build docs/source/generated -find . -name '*.pyc' -delete -find . -name '__pycache__' -type d -delete bin: mkdir -p $@ PYTHONPATH=bin:$(PYTHONPATH) python setup.py develop --install-dir $@ test-code: bin PATH=bin:$(PATH) PYTHONPATH=bin:$(PYTHONPATH) $(NOSETESTS) -s -v $(MODULE) test-coverage: rm -rf coverage .coverage $(NOSETESTS) -s -v --with-coverage $(MODULE) test: test-code trailing-spaces: find $(MODULE) -name "*.py" -exec perl -pi -e 's/[ \t]*$$//' {} \; code-analysis: flake8 $(MODULE) | grep -v __init__ | grep -v external pylint -E -i y $(MODULE)/ # -d E1103,E0611,E1101 update-changelog: @echo ".. This file is auto-converted from CHANGELOG.md (make update-changelog) -- do not edit\n\nChange log\n**********" > docs/source/changelog.rst pandoc -t rst CHANGELOG.md >> docs/source/changelog.rst release-pypi: update-changelog # better safe than sorry test ! -e dist python setup.py sdist python setup.py bdist_wheel --universal twine upload dist/* datalad-container-1.1.2/README.md000066400000000000000000000076431400061037100163460ustar00rootroot00000000000000 ____ _ _ _ | _ \ __ _ | |_ __ _ | | __ _ __| | | | | | / _` || __| / _` || | / _` | / _` | | |_| || (_| || |_ | (_| || |___ | (_| || (_| | |____/ \__,_| \__| \__,_||_____| \__,_| \__,_| Container [![Travis tests status](https://secure.travis-ci.org/datalad/datalad-container.png?branch=master)](https://travis-ci.org/datalad/datalad-container) [![codecov.io](https://codecov.io/github/datalad/datalad-container/coverage.svg?branch=master)](https://codecov.io/github/datalad/datalad-container?branch=master) [![Documentation](https://readthedocs.org/projects/datalad-container/badge/?version=latest)](http://datalad-container.rtfd.org) [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) [![GitHub release](https://img.shields.io/github/release/datalad/datalad-container.svg)](https://GitHub.com/datalad/datalad-container/releases/) [![PyPI version fury.io](https://badge.fury.io/py/datalad-container.svg)](https://pypi.python.org/pypi/datalad-container/) [![Average time to resolve an issue](http://isitmaintained.com/badge/resolution/datalad/datalad-container.svg)](http://isitmaintained.com/project/datalad/datalad-container "Average time to resolve an issue") [![Percentage of issues still open](http://isitmaintained.com/badge/open/datalad/datalad-container.svg)](http://isitmaintained.com/project/datalad/datalad-container "Percentage of issues still open") [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.3368666.svg)](https://doi.org/10.5281/zenodo.3368666) ![Conda](https://anaconda.org/conda-forge/datalad-container/badges/version.svg) This extension enhances DataLad (http://datalad.org) for working with computational containers. Please see the [extension documentation](http://datalad-container.rtfd.org) for a description on additional commands and functionality. For general information on how to use or contribute to DataLad (and this extension), please see the [DataLad website](http://datalad.org) or the [main GitHub project page](http://datalad.org). ## Installation Before you install this package, please make sure that you [install a recent version of git-annex](https://git-annex.branchable.com/install). Afterwards, install the latest version of `datalad-container` from [PyPi](https://pypi.org/project/datalad-container). It is recommended to use a dedicated [virtualenv](https://virtualenv.pypa.io): # create and enter a new virtual environment (optional) virtualenv --system-site-packages --python=python3 ~/env/datalad . ~/env/datalad/bin/activate # install from PyPi pip install datalad_container ## Support The documentation of this project is found here: http://docs.datalad.org/projects/container All bugs, concerns and enhancement requests for this software can be submitted here: https://github.com/datalad/datalad-container/issues If you have a problem or would like to ask a question about how to use DataLad, please [submit a question to NeuroStars.org](https://neurostars.org/tags/datalad) with a ``datalad`` tag. NeuroStars.org is a platform similar to StackOverflow but dedicated to neuroinformatics. All previous DataLad questions are available here: http://neurostars.org/tags/datalad/ ## Acknowledgements DataLad development is supported by a US-German collaboration in computational neuroscience (CRCNS) project "DataGit: converging catalogues, warehouses, and deployment logistics into a federated 'data distribution'" (Halchenko/Hanke), co-funded by the US National Science Foundation (NSF 1429999) and the German Federal Ministry of Education and Research (BMBF 01GQ1411). Additional support is provided by the German federal state of Saxony-Anhalt and the European Regional Development Fund (ERDF), Project: Center for Behavioral Brain Sciences, Imaging Platform. This work is further facilitated by the ReproNim project (NIH 1P41EB019936-01A1). datalad-container-1.1.2/datalad_container/000077500000000000000000000000001400061037100205115ustar00rootroot00000000000000datalad-container-1.1.2/datalad_container/__init__.py000066400000000000000000000026421400061037100226260ustar00rootroot00000000000000"""DataLad container extension""" __docformat__ = 'restructuredtext' from .version import __version__ # defines a datalad command suite # this symbold must be indentified as a setuptools entrypoint # to be found by datalad command_suite = ( # description of the command suite, displayed in cmdline help "Containerized environments", [ # specification of a command, any number of commands can be defined ( # importable module that contains the command implementation 'datalad_container.containers_list', # name of the command class implementation in above module 'ContainersList', 'containers-list', 'containers_list', ), ( 'datalad_container.containers_remove', # name of the command class implementation in above module 'ContainersRemove', 'containers-remove', 'containers_remove', ), ( 'datalad_container.containers_add', # name of the command class implementation in above module 'ContainersAdd', 'containers-add', 'containers_add', ), ( 'datalad_container.containers_run', 'ContainersRun', 'containers-run', 'containers_run', ) ] ) from datalad import setup_package from datalad import teardown_package datalad-container-1.1.2/datalad_container/adapters/000077500000000000000000000000001400061037100223145ustar00rootroot00000000000000datalad-container-1.1.2/datalad_container/adapters/__init__.py000066400000000000000000000000001400061037100244130ustar00rootroot00000000000000datalad-container-1.1.2/datalad_container/adapters/docker.py000066400000000000000000000147201400061037100241410ustar00rootroot00000000000000"""Work with Docker images as local paths. This module provides support for saving a Docker image in a local directory and then loading it on-the-fly before calling `docker run ...`. The motivation for this is that it allows the components of an image to be tracked as objects in a DataLad dataset. Run `python -m datalad_container.adapters.docker --help` for details about the command-line interface. """ from glob import glob import hashlib import os import os.path as op import subprocess as sp import sys import tarfile import tempfile import logging from datalad.utils import ( on_windows, ) lgr = logging.getLogger("datalad.containers.adapters.docker") # Note: A dockerpy dependency probably isn't worth it in the current # state but is worth thinking about if this module gets more # complicated. # FIXME: These functions assume that there is a "docker" on the path # that can be managed by a non-root user. At the least, this should # be documented somewhere. def save(image, path): """Save and extract a docker image to a directory. Parameters ---------- image : str A unique identifier for a docker image. path : str A directory to extract the image to. """ # Use a temporary file because docker save (or actually tar underneath) # complains that stdout needs to be redirected if we use Popen and PIPE. with tempfile.NamedTemporaryFile() as stream: # Windows can't write to an already opened file stream.close() sp.check_call(["docker", "save", "-o", stream.name, image]) with tarfile.open(stream.name, mode="r:") as tar: if not op.exists(path): lgr.debug("Creating new directory at %s", path) os.makedirs(path) elif os.listdir(path): raise OSError("Directory {} is not empty".format(path)) tar.extractall(path=path) lgr.info("Saved %s to %s", image, path) def _list_images(): out = sp.check_output( ["docker", "images", "--all", "--quiet", "--no-trunc"]) return out.decode().splitlines() def get_image(path): """Return the image ID of the image extracted at `path`. """ jsons = [j for j in glob(op.join(path, "*.json")) if not j.endswith(op.sep + "manifest.json")] if len(jsons) != 1: raise ValueError("Could not find a unique JSON configuration object " "in {}".format(path)) with open(jsons[0], "rb") as stream: return hashlib.sha256(stream.read()).hexdigest() def load(path): """Load the Docker image from `path`. Parameters ---------- path : str A directory with an extracted tar archive. Returns ------- The image ID (str) """ # FIXME: If we load a dataset, it may overwrite the current tag. Say that # (1) a dataset has a saved neurodebian:latest from a month ago, (2) a # newer neurodebian:latest has been pulled, and (3) the old image have been # deleted (e.g., with 'docker image prune --all'). Given all three of these # things, loading the image from the dataset will tag the old neurodebian # image as the latest. image_id = "sha256:" + get_image(path) if image_id not in _list_images(): lgr.debug("Loading %s", image_id) cmd = ["docker", "load"] p = sp.Popen(cmd, stdin=sp.PIPE, stdout=sp.PIPE, stderr=sp.PIPE) with tarfile.open(fileobj=p.stdin, mode="w|", dereference=True) as tar: tar.add(path, arcname="") out, err = p.communicate() return_code = p.poll() if return_code: lgr.warning("Running %r failed: %s", cmd, err.decode()) raise sp.CalledProcessError(return_code, cmd, output=out) else: lgr.debug("Image %s is already present", image_id) if image_id not in _list_images(): raise RuntimeError( "docker image {} was not successfully loaded".format(image_id)) return image_id # Command-line def cli_save(namespace): save(namespace.image, namespace.path) def cli_run(namespace): image_id = load(namespace.path) prefix = ["docker", "run", # FIXME: The -v/-w settings are convenient for testing, but they # should be configurable. "-v", "{}:/tmp".format(os.getcwd()), "-w", "/tmp", "--rm", "--interactive"] if not on_windows: # Make it possible for the output files to be added to the # dataset without the user needing to manually adjust the # permissions. prefix.extend(["-u", "{}:{}".format(os.getuid(), os.getgid())]) if sys.stdin.isatty(): prefix.append("--tty") prefix.append(image_id) cmd = prefix + namespace.cmd lgr.debug("Running %r", cmd) sp.check_call(cmd) def main(args): import argparse parser = argparse.ArgumentParser( prog="python -m datalad_container.adapters.docker", description="Work with Docker images as local paths") parser.add_argument( "-v", "--verbose", action="store_true") subparsers = parser.add_subparsers(title="subcommands") # Don't continue without a subcommand. subparsers.required = True subparsers.dest = "command" parser_save = subparsers.add_parser( "save", help="save and extract a Docker image to a directory") parser_save.add_argument( "image", metavar="NAME", help="image to save") parser_save.add_argument( "path", metavar="PATH", help="directory to save image in") parser_save.set_defaults(func=cli_save) # TODO: Add command for updating an archive directory. parser_run = subparsers.add_parser( "run", help="run a command with a directory's image") parser_run.add_argument( "path", metavar="PATH", help="run the image in this directory") parser_run.add_argument( "cmd", metavar="CMD", nargs=argparse.REMAINDER, help="command to execute") parser_run.set_defaults(func=cli_run) namespace = parser.parse_args(args[1:]) logging.basicConfig( level=logging.DEBUG if namespace.verbose else logging.INFO, format="%(message)s") namespace.func(namespace) if __name__ == "__main__": try: main(sys.argv) except Exception as exc: lgr.exception("Failed to execute %s", sys.argv) if isinstance(exc, sp.CalledProcessError): excode = exc.returncode else: excode = 1 sys.exit(excode) datalad-container-1.1.2/datalad_container/adapters/tests/000077500000000000000000000000001400061037100234565ustar00rootroot00000000000000datalad-container-1.1.2/datalad_container/adapters/tests/__init__.py000066400000000000000000000000001400061037100255550ustar00rootroot00000000000000datalad-container-1.1.2/datalad_container/adapters/tests/test_docker.py000066400000000000000000000063541400061037100263460ustar00rootroot00000000000000from distutils.spawn import find_executable import os.path as op import sys import datalad_container.adapters.docker as da from datalad.cmd import ( StdOutCapture, WitlessRunner, ) from datalad.support.exceptions import CommandError from datalad.tests.utils import ( SkipTest, assert_in, assert_raises, eq_, ok_exists, with_tempfile, with_tree, ) if not find_executable("docker"): raise SkipTest("'docker' not found on path") def call(args, **kwds): return WitlessRunner().run( [sys.executable, "-m", "datalad_container.adapters.docker"] + args, **kwds) def list_images(args): cmd = ["docker", "images", "--quiet", "--no-trunc"] + args res = WitlessRunner().run(cmd, protocol=StdOutCapture) return res["stdout"].strip().split() def images_exist(args): return bool(list_images(args)) @with_tempfile def test_docker_save_doesnt_exist(path): image_name = "idonotexistsurely" if images_exist([image_name]): raise SkipTest("Image wasn't supposed to exist, but does: {}" .format(image_name)) with assert_raises(CommandError): call(["save", image_name, path]) class TestAdapterBusyBox(object): @classmethod def setup_class(cls): cls.image_name = "busybox:latest" if images_exist([cls.image_name]): cls.image_existed = True else: cls.image_existed = False WitlessRunner().run(["docker", "pull", cls.image_name]) @classmethod def teardown_class(cls): if not cls.image_existed and images_exist([cls.image_name]): WitlessRunner().run(["docker", "rmi", cls.image_name]) @with_tempfile(mkdir=True) def test_save_and_run(self, path): image_dir = op.join(path, "image") call(["save", self.image_name, image_dir]) ok_exists(op.join(image_dir, "manifest.json")) img_ids = list_images([self.image_name]) assert len(img_ids) == 1 eq_("sha256:" + da.get_image(image_dir), img_ids[0]) if not self.image_existed: WitlessRunner().run(["docker", "rmi", self.image_name]) out = call(["run", image_dir, "ls"], cwd=path, protocol=StdOutCapture) assert images_exist([self.image_name]) assert_in("image", out["stdout"]) @with_tree({"foo": "content"}) def test_containers_run(self, path): if self.image_existed: raise SkipTest( "Not pulling with containers-run due to existing image: {}" .format(self.image_name)) from datalad.api import Dataset ds = Dataset(path).create(force=True) ds.save(path="foo") ds.containers_add("bb", url="dhub://" + self.image_name) out = WitlessRunner(cwd=ds.path).run( ["datalad", "containers-run", "-n", "bb", "cat foo"], protocol=StdOutCapture) assert_in("content", out["stdout"]) # Data can be received on stdin. with (ds.pathobj / "foo").open() as ifh: out = WitlessRunner(cwd=ds.path).run( ["datalad", "containers-run", "-n", "bb", "cat"], protocol=StdOutCapture, stdin=ifh) assert_in("content", out["stdout"]) datalad-container-1.1.2/datalad_container/containers_add.py000066400000000000000000000321261400061037100240440ustar00rootroot00000000000000"""Add a container environment to a dataset""" __docformat__ = 'restructuredtext' import re import logging import os import os.path as op import sys from shutil import copyfile from simplejson import loads from datalad.cmd import WitlessRunner from datalad.interface.base import Interface from datalad.interface.base import build_doc from datalad.support.param import Parameter from datalad.distribution.dataset import datasetmethod, EnsureDataset from datalad.distribution.dataset import require_dataset from datalad.interface.utils import eval_results from datalad.support.constraints import EnsureStr from datalad.support.constraints import EnsureNone from datalad.support.exceptions import InsufficientArgumentsError from datalad.interface.results import get_status_dict from .definitions import definitions lgr = logging.getLogger("datalad.containers.containers_add") # The DataLad special remote has built-in support for Singularity Hub URLs. Let # it handle shub:// URLs if it's available. _HAS_SHUB_DOWNLOADER = True try: import datalad.downloaders.shub except ImportError: lgr.debug("DataLad's shub downloader not found. " "Custom handling for shub:// will be used") _HAS_SHUB_DOWNLOADER = False def _resolve_img_url(url): """Takes a URL and tries to resolve it to an actual download URL that `annex addurl` can handle""" if not _HAS_SHUB_DOWNLOADER and url.startswith('shub://'): # TODO: Remove this handling once the minimum DataLad version is at # least 0.14. lgr.debug('Query singularity-hub for image download URL') import requests req = requests.get( 'https://www.singularity-hub.org/api/container/{}'.format( url[7:])) shub_info = loads(req.text) url = shub_info['image'] return url def _guess_call_fmt(ds, name, url): """Helper to guess a container exec setup based on - a name (to be able to look up more config - a plain url to make inference based on the source location Should return `None` is no guess can be made. """ if url is None: return None elif url.startswith('shub://') or url.startswith('docker://'): return 'singularity exec {img} {cmd}' elif url.startswith('dhub://'): return op.basename(sys.executable) + ' -m datalad_container.adapters.docker run {img} {cmd}' def _ensure_datalad_remote(repo): """Initialize and enable datalad special remote if it isn't already.""" dl_remote = None for info in repo.get_special_remotes().values(): if info["externaltype"] == "datalad": dl_remote = info["name"] break if not dl_remote: from datalad.consts import DATALAD_SPECIAL_REMOTE from datalad.customremotes.base import init_datalad_remote init_datalad_remote(repo, DATALAD_SPECIAL_REMOTE, autoenable=True) elif repo.is_special_annex_remote(dl_remote, check_if_known=False): lgr.debug("datalad special remote '%s' is already enabled", dl_remote) else: lgr.debug("datalad special remote '%s' found. Enabling", dl_remote) repo.enable_remote(dl_remote) @build_doc # all commands must be derived from Interface class ContainersAdd(Interface): # first docstring line is used a short description in the cmdline help # the rest is put in the verbose help and manpage """Add a container to a dataset """ # parameters of the command, must be exhaustive _params_ = dict( dataset=Parameter( args=("-d", "--dataset"), doc="""specify the dataset to add the container to. If no dataset is given, an attempt is made to identify the dataset based on the current working directory""", constraints=EnsureDataset() | EnsureNone() ), name=Parameter( args=("name",), doc="""The name to register the container under. This also determines the default location of the container image within the dataset.""", metavar="NAME", constraints=EnsureStr(), ), url=Parameter( args=("-u", "--url"), doc="""A URL (or local path) to get the container image from. If the URL scheme is one recognized by Singularity, 'shub://' or 'docker://', the command format string will be auto-guessed when [CMD: --call-fmt CMD][PY: call_fmt PY] is not specified. For the scheme 'dhub://', the rest of the URL will be interpreted as the argument to 'docker pull', the image will be saved to the location specified by `name`, and the call format will be auto-guessed if not given.""", metavar="URL", constraints=EnsureStr() | EnsureNone(), ), # TODO: The "prepared command stuff should ultimately go somewhere else # (probably datalad-run). But first figure out, how exactly to address # container datasets call_fmt=Parameter( args=("--call-fmt",), doc="""Command format string indicating how to execute a command in this container, e.g. "singularity exec {img} {cmd}". Where '{img}' is a placeholder for the path to the container image and '{cmd}' is replaced with the desired command. Additional placeholders: '{img_dspath}' is relative path to the dataset containing the image. """, metavar="FORMAT", constraints=EnsureStr() | EnsureNone(), ), image=Parameter( args=("-i", "--image"), doc="""Relative path of the container image within the dataset. If not given, a default location will be determined using the `name` argument.""", metavar="IMAGE", constraints=EnsureStr() | EnsureNone(), ), update=Parameter( args=("--update",), action="store_true", doc="""Update the existing container for `name`. If no other options are specified, URL will be set to 'updateurl', if configured. If a container with `name` does not already exist, this option is ignored.""" ) ) @staticmethod @datasetmethod(name='containers_add') @eval_results def __call__(name, url=None, dataset=None, call_fmt=None, image=None, update=False): if not name: raise InsufficientArgumentsError("`name` argument is required") ds = require_dataset(dataset, check_installed=True, purpose='add container') runner = WitlessRunner() # prevent madness in the config file if not re.match(r'^[0-9a-zA-Z-]+$', name): raise ValueError( "Container names can only contain alphanumeric characters " "and '-', got: '{}'".format(name)) cfgbasevar = "datalad.containers.{}".format(name) if cfgbasevar + ".image" in ds.config: if not update: yield get_status_dict( action="containers_add", ds=ds, logger=lgr, status="impossible", message=("Container named %r already exists. " "Use --update to reconfigure.", name)) return if not (url or image or call_fmt): # No updated values were provided. See if an update url is # configured (currently relevant only for Singularity Hub). url = ds.config.get(cfgbasevar + ".updateurl") if not url: yield get_status_dict( action="containers_add", ds=ds, logger=lgr, status="impossible", message="No values to update specified") return call_fmt = call_fmt or ds.config.get(cfgbasevar + ".cmdexec") image = image or ds.config.get(cfgbasevar + ".image") if not image: loc_cfg_var = "datalad.containers.location" # TODO: We should provide an entry point (or sth similar) for extensions # to get config definitions into the ConfigManager. In other words an # easy way to extend definitions in datalad's common_cfgs.py. container_loc = \ ds.config.obtain( loc_cfg_var, where=definitions[loc_cfg_var]['destination'], # if not False it would actually modify the # dataset config file -- undesirable store=False, default=definitions[loc_cfg_var]['default'], dialog_type=definitions[loc_cfg_var]['ui'][0], valtype=definitions[loc_cfg_var]['type'], **definitions[loc_cfg_var]['ui'][1] ) image = op.join(ds.path, container_loc, name, 'image') else: image = op.join(ds.path, image) result = get_status_dict( action="containers_add", path=image, type="file", logger=lgr, ) if call_fmt is None: # maybe built in knowledge can help call_fmt = _guess_call_fmt(ds, name, url) # collect bits for a final and single save() call to_save = [] imgurl = url was_updated = False if url: if update and op.lexists(image): was_updated = True # XXX: check=False is used to avoid dropping the image. It # should use drop=False if remove() gets such an option (see # DataLad's gh-2673). for r in ds.remove(image, save=False, check=False, return_type="generator"): yield r imgurl = _resolve_img_url(url) lgr.debug('Attempt to obtain container image from: %s', imgurl) if url.startswith("dhub://"): from .adapters import docker docker_image = url[len("dhub://"):] lgr.debug( "Running 'docker pull %s and saving image to %s", docker_image, image) runner.run(["docker", "pull", docker_image]) docker.save(docker_image, image) elif url.startswith("docker://"): image_dir, image_basename = op.split(image) if not image_basename: raise ValueError("No basename in path {}".format(image)) if image_dir and not op.exists(image_dir): os.makedirs(image_dir) lgr.info("Building Singularity image for %s " "(this may take some time)", url) runner.run(["singularity", "build", image_basename, url], cwd=image_dir or None) elif op.exists(url): lgr.info("Copying local file %s to %s", url, image) image_dir = op.dirname(image) if image_dir and not op.exists(image_dir): os.makedirs(image_dir) copyfile(url, image) else: if _HAS_SHUB_DOWNLOADER and url.startswith('shub://'): _ensure_datalad_remote(ds.repo) try: ds.repo.add_url_to_file(image, imgurl) except Exception as e: result["status"] = "error" result["message"] = str(e) yield result # TODO do we have to take care of making the image executable # if --call_fmt is not provided? to_save.append(image) # continue despite a remote access failure, the following config # setting will enable running the command again with just the name # given to ease a re-run if not op.lexists(image): result["status"] = "error" result["message"] = ('no image at %s', image) yield result return # store configs if imgurl != url: # store originally given URL, as it resolves to something # different and maybe can be used to update the container # at a later point in time ds.config.set("{}.updateurl".format(cfgbasevar), url) # force store the image, and prevent multiple entries ds.config.set( "{}.image".format(cfgbasevar), op.relpath(image, start=ds.path), force=True) if call_fmt: ds.config.set( "{}.cmdexec".format(cfgbasevar), call_fmt, force=True) # store changes to_save.append(op.join(".datalad", "config")) for r in ds.save( path=to_save, message="[DATALAD] {do} containerized environment '{name}'".format( do="Update" if was_updated else "Configure", name=name)): yield r result["status"] = "ok" yield result datalad-container-1.1.2/datalad_container/containers_list.py000066400000000000000000000110031400061037100242560ustar00rootroot00000000000000"""List known container environments of a dataset""" __docformat__ = 'restructuredtext' import logging import os.path as op from datalad.interface.base import Interface from datalad.interface.base import build_doc from datalad.interface.common_opts import recursion_flag from datalad.support.param import Parameter from datalad.distribution.dataset import datasetmethod, EnsureDataset, Dataset from datalad.distribution.dataset import require_dataset from datalad.interface.utils import default_result_renderer from datalad.interface.utils import eval_results from datalad.support.constraints import EnsureNone import datalad.support.ansi_colors as ac from datalad.interface.results import get_status_dict from datalad.coreapi import subdatasets from datalad.ui import ui lgr = logging.getLogger("datalad.containers.containers_list") @build_doc # all commands must be derived from Interface class ContainersList(Interface): # first docstring line is used a short description in the cmdline help # the rest is put in the verbose help and manpage """List containers known to a dataset """ result_renderer = 'tailored' # parameters of the command, must be exhaustive _params_ = dict( dataset=Parameter( args=("-d", "--dataset"), doc="""specify the dataset to query. If no dataset is given, an attempt is made to identify the dataset based on the current working directory""", constraints=EnsureDataset() | EnsureNone()), contains=Parameter( args=('--contains',), metavar='PATH', action='append', doc="""when operating recursively, restrict the reported containers to those from subdatasets that contain the given path (i.e. the subdatasets that are reported by :command:`datalad subdatasets --contains=PATH`). Top-level containers are always reported."""), recursive=recursion_flag, ) @staticmethod @datasetmethod(name='containers_list') @eval_results def __call__(dataset=None, recursive=False, contains=None): ds = require_dataset(dataset, check_installed=True, purpose='list containers') refds = ds.path if recursive: for sub in ds.subdatasets( contains=contains, on_failure='ignore', return_type='generator'): subds = Dataset(sub['path']) if subds.is_installed(): for c in subds.containers_list(recursive=recursive, return_type='generator', on_failure='ignore', result_filter=None, result_renderer=None, result_xfm=None): c['name'] = sub['gitmodule_name'] + '/' + c['name'] c['refds'] = refds yield c # all info is in the dataset config! var_prefix = 'datalad.containers.' containers = {} for var, value in ds.config.items(): if not var.startswith(var_prefix): # not an interesting variable continue var_comps = var[len(var_prefix):].split('.') cname = var_comps[0] ccfgname = '.'.join(var_comps[1:]) if not ccfgname: continue cinfo = containers.get(cname, {}) cinfo[ccfgname] = value containers[cname] = cinfo for k, v in containers.items(): if 'image' not in v: # there is no container location configured continue res = get_status_dict( status='ok', action='containers', name=k, type='file', path=op.join(ds.path, v.pop('image')), refds=refds, parentds=ds.path, # TODO #state='absent' if ... else 'present' **v) yield res @staticmethod def custom_result_renderer(res, **kwargs): if res["action"] != "containers": default_result_renderer(res) else: ui.message( "{name} -> {path}" .format(name=ac.color_word(res["name"], ac.MAGENTA), path=op.relpath(res["path"], res["refds"]))) datalad-container-1.1.2/datalad_container/containers_remove.py000066400000000000000000000064551400061037100246170ustar00rootroot00000000000000"""Remove a container environment from a dataset""" __docformat__ = 'restructuredtext' import logging import os.path as op from datalad.interface.base import Interface from datalad.interface.base import build_doc from datalad.support.param import Parameter from datalad.distribution.dataset import datasetmethod, EnsureDataset from datalad.distribution.dataset import require_dataset from datalad.interface.utils import eval_results from datalad.support.constraints import EnsureNone from datalad.support.constraints import EnsureStr from datalad.interface.results import get_status_dict lgr = logging.getLogger("datalad.containers.containers_remove") @build_doc # all commands must be derived from Interface class ContainersRemove(Interface): # first docstring line is used a short description in the cmdline help # the rest is put in the verbose help and manpage """Remove a known container from a dataset """ # parameters of the command, must be exhaustive _params_ = dict( dataset=Parameter( args=("-d", "--dataset"), doc="""specify the dataset to query. If no dataset is given, an attempt is made to identify the dataset based on the current working directory""", constraints=EnsureDataset() | EnsureNone()), name=Parameter( args=("name",), doc="""name of the container to remove""", metavar="NAME", constraints=EnsureStr(), ), remove_image=Parameter( args=("-i", "--remove-image",), doc="""if set, remove container image as well""", action="store_true", ), ) @staticmethod @datasetmethod(name='containers_remove') @eval_results def __call__(name, dataset=None, remove_image=False): ds = require_dataset(dataset, check_installed=True, purpose='remove a container') res = get_status_dict( ds=ds, action='containers_remove', logger=lgr) section = 'datalad.containers.{}'.format(name) imagecfg = '{}.image'.format(section) to_save = [] if remove_image and imagecfg in ds.config: imagepath = ds.config.get(imagecfg) if op.lexists(op.join(ds.path, imagepath)): for r in ds.remove( path=imagepath, # XXX shortcomming: this is the only way to say: # don't drop check=False, # config setting might be outdated and image no longer # there -> no reason to fail, just report on_failure='ignore', save=False): yield r to_save.append(imagepath) if section in ds.config.sections(): ds.config.remove_section( section, where='dataset', reload=True) res['status'] = 'ok' to_save.append(op.join('.datalad', 'config')) else: res['status'] = 'notneeded' if to_save: for r in ds.save( path=to_save, message='[DATALAD] Remove container {}'.format(name)): yield r yield res datalad-container-1.1.2/datalad_container/containers_run.py000066400000000000000000000127521400061037100241230ustar00rootroot00000000000000"""Drop-in replacement for `datalad run` for command execution in a container""" __docformat__ = 'restructuredtext' import logging import os.path as op from datalad.interface.base import Interface from datalad.interface.base import build_doc from datalad.support.param import Parameter from datalad.distribution.dataset import datasetmethod from datalad.distribution.dataset import require_dataset from datalad.interface.utils import eval_results from datalad.interface.results import get_status_dict from datalad.core.local.run import ( Run, get_command_pwds, normalize_command, run_command, ) from datalad_container.find_container import find_container_ lgr = logging.getLogger("datalad.containers.containers_run") # Environment variable to be set during execution to possibly # inform underlying shim scripts about the original name of # the container CONTAINER_NAME_ENVVAR = 'DATALAD_CONTAINER_NAME' _run_params = dict( Run._params_, container_name=Parameter( args=('-n', '--container-name',), metavar="NAME", doc="""Specify the name of or a path to a known container to use for execution, in case multiple containers are configured."""), ) @build_doc # all commands must be derived from Interface class ContainersRun(Interface): # first docstring line is used a short description in the cmdline help # the rest is put in the verbose help and manpage """Drop-in replacement of 'run' to perform containerized command execution Container(s) need to be configured beforehand (see containers-add). If no container is specified and only one container is configured in the current dataset, it will be selected automatically. If more than one container is registered in the current dataset or to access containers from subdatasets, the container has to be specified. A command is generated based on the input arguments such that the container image itself will be recorded as an input dependency of the command execution in the `run` record in the git history. During execution the environment variable {name_envvar} is set to the name of the used container. """ _docs_ = dict( name_envvar=CONTAINER_NAME_ENVVAR ) _params_ = _run_params @staticmethod @datasetmethod(name='containers_run') @eval_results def __call__(cmd, container_name=None, dataset=None, inputs=None, outputs=None, message=None, expand=None, explicit=False, sidecar=None): from unittest.mock import patch # delayed, since takes long (~600ms for yoh) pwd, _ = get_command_pwds(dataset) ds = require_dataset(dataset, check_installed=True, purpose='run a containerized command execution') container = None for res in find_container_(ds, container_name): if res.get("action") == "containers": container = res else: yield res assert container, "bug: container should always be defined here" image_path = op.relpath(container["path"], pwd) # container record would contain path to the (sub)dataset containing # it. If not - take current dataset, as it must be coming from it image_dspath = op.relpath(container.get('parentds', ds.path), pwd) # sure we could check whether the container image is present, # but it might live in a subdataset that isn't even installed yet # let's leave all this business to `get` that is called by `run` cmd = normalize_command(cmd) # expand the command with container execution if 'cmdexec' in container: callspec = container['cmdexec'] # Temporary kludge to give a more helpful message if callspec.startswith("["): import simplejson try: simplejson.loads(callspec) except simplejson.errors.JSONDecodeError: pass # Never mind, false positive. else: raise ValueError( 'cmdexe {!r} is in an old, unsupported format. ' 'Convert it to a plain string.'.format(callspec)) try: cmd_kwargs = dict( img=image_path, cmd=cmd, img_dspath=image_dspath, ) cmd = callspec.format(**cmd_kwargs) except KeyError as exc: yield get_status_dict( 'run', ds=ds, status='error', message=( 'Unrecognized cmdexec placeholder: %s. ' 'See containers-add for information on known ones: %s', exc, ", ".join(cmd_kwargs))) return else: # just prepend and pray cmd = container['path'] + ' ' + cmd with patch.dict('os.environ', {CONTAINER_NAME_ENVVAR: container['name']}): # fire! for r in run_command( cmd=cmd, dataset=dataset or (ds if ds.path == pwd else None), inputs=inputs, extra_inputs=[image_path], outputs=outputs, message=message, expand=expand, explicit=explicit, sidecar=sidecar): yield r datalad-container-1.1.2/datalad_container/definitions.py000066400000000000000000000006171400061037100234020ustar00rootroot00000000000000import os.path as op from datalad.support.constraints import EnsureStr definitions = { 'datalad.containers.location': { 'ui': ('question', { 'title': 'Container location', 'text': 'path within the dataset where to store containers'}), 'default': op.join(".datalad", "environments"), 'type': EnsureStr(), 'destination': 'dataset' }, } datalad-container-1.1.2/datalad_container/find_container.py000066400000000000000000000151261400061037100240520ustar00rootroot00000000000000"""Support module for selecting a container from a dataset and its subdatasets. """ import logging from datalad.distribution.dataset import Dataset from datalad.utils import Path from datalad_container.containers_list import ContainersList lgr = logging.getLogger("datalad_container.find_container") def _list_containers(dataset, recursive, contains=None): return {c['name']: c for c in ContainersList.__call__(dataset=dataset, recursive=recursive, contains=contains, return_type='generator', on_failure='ignore', result_filter=None, result_renderer=None, result_xfm=None)} def _get_subdataset_container(ds, container_name): """Try to get subdataset container matching `container_name`. This is the primary function tried by find_container_() when the container name looks like it is from a subdataset (i.e. has a slash). Parameters ---------- ds : Dataset container_name : str Yields ------- Result records for any installed subdatasets and a containers-list record for the container, if any, found for `container_name`. """ name_parts = container_name.split('/') subds_names = name_parts[:-1] if Dataset(ds.pathobj / Path(*subds_names)).is_installed(): # This avoids unnecessary work in the common case, but it can result in # not installing the necessary subdatasets in the rare case that chain # of submodule names point to a subdataset path that is installed while # the actual submodule paths contains uninstalled parts. lgr.debug( "Subdataset for %s is probably installed. Skipping install logic", container_name) return curds = ds for name in subds_names: for sub in curds.subdatasets(return_type='generator'): if sub['gitmodule_name'] == name: path = sub['path'] yield from curds.get( path, get_data=False, on_failure='ignore', return_type='generator') curds = Dataset(path) break else: # There wasn't a submodule name chain that matched container_name. # Aside from an invalid name, the main case where this can happen # is when an image path is given for the container name. lgr.debug("Did not find submodule name %s in %s", name, curds) return containers = _list_containers(dataset=ds, recursive=True, contains=curds.path) res = containers.get(container_name) if res: yield res # Fallback functions tried by find_container_. These are called with the # current dataset, the container name, and a dictionary mapping the container # name to a record (as returned by containers-list). def _get_the_one_and_only(_, name, containers): if name is None: if len(containers) == 1: # no questions asked, take container and run return list(containers.values())[0] else: raise ValueError("Must explicitly specify container" " (known containers are: {})" .format(', '.join(containers))) def _get_container_by_name(_, name, containers): return containers.get(name) def _get_container_by_path(ds, name, containers): from datalad.distribution.dataset import resolve_path # Note: since datalad0.12.0rc6 resolve_path returns a Path object here, # which then fails to equal c['path'] below as this is taken from # config as a string container_path = str(resolve_path(name, ds)) container = [c for c in containers.values() if c['path'] == container_path] if len(container) == 1: return container[0] # Entry points def find_container_(ds, container_name=None): """Find the container in dataset `ds` specified by `container_name`. Parameters ---------- ds : Dataset Dataset to query. container_name : str or None Name in the form of how `containers-list -d ds -r` would report it (e.g., "s0/s1/cname"). Yields ------ The container record, as returned by containers-list. Before that record, it may yield records of other action types, in particular "install" records for subdatasets that were installed to try to get access to a subdataset container. Raises ------ ValueError if a uniquely matching container cannot be found. """ recurse = container_name and "/" in container_name if recurse: for res in _get_subdataset_container(ds, container_name): # Before the container record, the results may include install # records. Don't relay "notneeded" results to avoid noise. Also, # don't propagate install failures, which may be due to an image # path being given or a non-existent container, both cases that are # handled downstream. if res.get("status") == "ok": yield res if res.get("action") == "containers": return containers = _list_containers(dataset=ds, recursive=recurse) if not containers: raise ValueError("No known containers. Use containers-add") fns = [ _get_the_one_and_only, _get_container_by_name, _get_container_by_path, ] for fn in fns: lgr.debug("Trying to find container with %s", fn) container = fn(ds, container_name, containers) if container: yield container return raise ValueError( 'Container selection impossible: not specified, ambiguous ' 'or unknown (known containers are: {})' .format(', '.join(containers)) ) def find_container(ds, container_name=None): """Like `find_container_`, but just return the container record. """ # Note: This function was once used directly by containers_run(), but that # now uses the find_container_() generator function directly. Now # find_container() exists for compatibility with third-party tools # (reproman) and the test_find.py tests. for res in find_container_(ds, container_name): if res.get("action") == "containers": return res raise RuntimeError( "bug: find_container_() should return container or raise exception") datalad-container-1.1.2/datalad_container/tests/000077500000000000000000000000001400061037100216535ustar00rootroot00000000000000datalad-container-1.1.2/datalad_container/tests/__init__.py000066400000000000000000000000001400061037100237520ustar00rootroot00000000000000datalad-container-1.1.2/datalad_container/tests/test_add.py000066400000000000000000000026651400061037100240250ustar00rootroot00000000000000from datalad.api import Dataset from datalad.api import clone from datalad.consts import DATALAD_SPECIAL_REMOTE from datalad.customremotes.base import init_datalad_remote from datalad.tests.utils import assert_false from datalad.tests.utils import assert_in from datalad.tests.utils import assert_not_in from datalad.tests.utils import with_tempfile from datalad.utils import Path from datalad_container.containers_add import _ensure_datalad_remote # NOTE: At the moment, testing of the containers-add itself happens implicitly # via use in other tests. @with_tempfile def test_ensure_datalad_remote_init_and_enable_needed(path): ds = Dataset(path).create(force=True) repo = ds.repo assert_false(repo.get_remotes()) _ensure_datalad_remote(repo) assert_in("datalad", repo.get_remotes()) @with_tempfile def check_ensure_datalad_remote_maybe_enable(autoenable, path): path = Path(path) ds_a = Dataset(path / "a").create(force=True) init_datalad_remote(ds_a.repo, DATALAD_SPECIAL_REMOTE, autoenable=autoenable) ds_b = clone(source=ds_a.path, path=path / "b") repo = ds_b.repo if not autoenable: assert_not_in("datalad", repo.get_remotes()) _ensure_datalad_remote(repo) assert_in("datalad", repo.get_remotes()) def test_ensure_datalad_remote_maybe_enable(): yield check_ensure_datalad_remote_maybe_enable, False yield check_ensure_datalad_remote_maybe_enable, True datalad-container-1.1.2/datalad_container/tests/test_containers.py000066400000000000000000000244471400061037100254440ustar00rootroot00000000000000import os.path as op from datalad.api import Dataset from datalad.api import install from datalad.api import containers_add from datalad.api import containers_remove from datalad.api import containers_list from datalad.utils import swallow_outputs from datalad.tests.utils import SkipTest from datalad.tests.utils import ok_clean_git from datalad.tests.utils import with_tree from datalad.tests.utils import ok_ from datalad.tests.utils import ok_file_has_content from datalad.tests.utils import assert_equal from datalad.tests.utils import assert_status from datalad.tests.utils import assert_raises from datalad.tests.utils import assert_result_count from datalad.tests.utils import assert_in from datalad.tests.utils import assert_in_results from datalad.tests.utils import assert_not_in from datalad.tests.utils import assert_re_in from datalad.tests.utils import with_tempfile from datalad.tests.utils import serve_path_via_http from datalad.support.network import get_local_file_url from datalad_container.tests.utils import add_pyscript_image @with_tempfile def test_add_noop(path): ds = Dataset(path).create() ok_clean_git(ds.path) assert_raises(TypeError, ds.containers_add) # fails when there is no image assert_status('error', ds.containers_add('name', on_failure='ignore')) # no config change ok_clean_git(ds.path) # place a dummy "image" file with open(op.join(ds.path, 'dummy'), 'w') as f: f.write('some') ds.save('dummy') ok_clean_git(ds.path) # config will be added, as long as there is a file, even when URL access # fails res = ds.containers_add( 'broken', url='bogus-protocol://bogus-server', image='dummy', on_failure='ignore' ) assert_status('ok', res) assert_result_count(res, 1, action='save', status='ok') @with_tempfile @with_tree(tree={"foo.img": "doesn't matter 0", "bar.img": "doesn't matter 1"}) def test_add_local_path(path, local_file): ds = Dataset(path).create() res = ds.containers_add(name="foobert", url=op.join(local_file, "foo.img")) foo_target = op.join(path, ".datalad", "environments", "foobert", "image") assert_result_count(res, 1, status="ok", type="file", path=foo_target, action="containers_add") # We've just copied and added the file. assert_not_in(ds.repo.WEB_UUID, ds.repo.whereis(foo_target)) # We can force the URL to be added. (Note: This works because datalad # overrides 'annex.security.allowed-url-schemes' in its tests.) ds.containers_add(name="barry", url=get_local_file_url(op.join(local_file, "bar.img"))) bar_target = op.join(path, ".datalad", "environments", "barry", "image") assert_in(ds.repo.WEB_UUID, ds.repo.whereis(bar_target)) RAW_KWDS = dict(return_type='list', result_filter=None, result_renderer=None, result_xfm=None) @with_tempfile @with_tree(tree={'some_container.img': "doesn't matter"}) @serve_path_via_http def test_container_files(ds_path, local_file, url): # setup things to add # # Note: Since "adding" as a container doesn't actually call anything or use # the container in some way, but simply registers it, for testing any file # is sufficient. local_file = get_local_file_url(op.join(local_file, 'some_container.img')) # prepare dataset: ds = Dataset(ds_path).create() # non-default location: ds.config.add("datalad.containers.location", value=op.join(".datalad", "test-environments"), where='dataset') ds.save(message="Configure container mountpoint") # no containers yet: res = ds.containers_list(**RAW_KWDS) assert_result_count(res, 0) # add first "image": must end up at the configured default location target_path = op.join( ds.path, ".datalad", "test-environments", "first", "image") res = ds.containers_add(name="first", url=local_file) ok_clean_git(ds.repo) assert_result_count(res, 1, status="ok", type="file", path=target_path, action="containers_add") ok_(op.lexists(target_path)) res = ds.containers_list(**RAW_KWDS) assert_result_count(res, 1) assert_result_count( res, 1, name='first', type='file', action='containers', status='ok', path=target_path) # and kill it again # but needs name assert_raises(TypeError, ds.containers_remove) res = ds.containers_remove('first', remove_image=True) assert_status('ok', res) assert_result_count(ds.containers_list(**RAW_KWDS), 0) # image removed assert(not op.lexists(target_path)) @with_tempfile @with_tree(tree={'foo.img': "foo", 'bar.img': "bar"}) @serve_path_via_http def test_container_update(ds_path, local_file, url): url_foo = get_local_file_url(op.join(local_file, 'foo.img')) url_bar = get_local_file_url(op.join(local_file, 'bar.img')) img = op.join(".datalad", "environments", "foo", "image") ds = Dataset(ds_path).create() ds.containers_add(name="foo", call_fmt="call-fmt1", url=url_foo) # Abort without --update flag. res = ds.containers_add(name="foo", on_failure="ignore") assert_result_count(res, 1, action="containers_add", status="impossible") # Abort if nothing to update is specified. res = ds.containers_add(name="foo", update=True, on_failure="ignore") assert_result_count(res, 1, action="containers_add", status="impossible", message="No values to update specified") # Update call format. ds.containers_add(name="foo", update=True, call_fmt="call-fmt2") assert_equal(ds.config.get("datalad.containers.foo.cmdexec"), "call-fmt2") ok_file_has_content(op.join(ds.path, img), "foo") # Update URL/image. ds.drop(img) # Make sure it works even with absent content. res = ds.containers_add(name="foo", update=True, url=url_bar) assert_result_count(res, 1, action="remove", status="ok", path=img) assert_result_count(res, 1, action="save", status="ok") ok_file_has_content(op.join(ds.path, img), "bar") # Test commit message # In the above case it was updating existing image so should have "Update " get_commit_msg = lambda *args: ds.repo.format_commit('%B') assert_in("Update ", get_commit_msg()) # If we add a new image with update=True should say Configure res = ds.containers_add(name="foo2", update=True, url=url_bar) assert_in("Configure ", get_commit_msg()) @with_tempfile @with_tempfile @with_tree(tree={'some_container.img': "doesn't matter"}) def test_container_from_subdataset(ds_path, src_subds_path, local_file): # prepare a to-be subdataset with a registered container src_subds = Dataset(src_subds_path).create() src_subds.containers_add(name="first", url=get_local_file_url(op.join(local_file, 'some_container.img')) ) # add it as subdataset to a super ds: ds = Dataset(ds_path).create() subds = ds.install("sub", source=src_subds_path) # add it again one level down to see actual recursion: subds.install("subsub", source=src_subds_path) # We come up empty without recursive: res = ds.containers_list(recursive=False, **RAW_KWDS) assert_result_count(res, 0) # query available containers from within super: res = ds.containers_list(recursive=True, **RAW_KWDS) assert_result_count(res, 2) assert_in_results(res, action="containers", refds=ds.path) # default location within the subdataset: target_path = op.join(subds.path, '.datalad', 'environments', 'first', 'image') assert_result_count( res, 1, name='sub/first', type='file', action='containers', status='ok', path=target_path, parentds=subds.path ) # not installed subdataset doesn't pose an issue: sub2 = ds.create("sub2") assert_result_count(ds.subdatasets(), 2, type="dataset") ds.uninstall("sub2") from datalad.tests.utils import assert_false assert_false(sub2.is_installed()) # same results as before, not crashing or somehow confused by a not present # subds: res = ds.containers_list(recursive=True, **RAW_KWDS) assert_result_count(res, 2) assert_result_count( res, 1, name='sub/first', type='file', action='containers', status='ok', path=target_path, parentds=subds.path ) # The default renderer includes the image names. with swallow_outputs() as out: ds.containers_list(recursive=True) lines = out.out.splitlines() assert_re_in("sub/first", lines) assert_re_in("sub/subsub/first", lines) # But we are careful not to render partial names from subdataset traversals # (i.e. we recurse with containers_list(..., result_renderer=None)). with assert_raises(AssertionError): assert_re_in("subsub/first", lines) @with_tempfile def test_list_contains(path): ds = Dataset(path).create() subds_a = ds.create("a") subds_b = ds.create("b") subds_a_c = subds_a.create("c") add_pyscript_image(subds_a_c, "in-c", "img") add_pyscript_image(subds_a, "in-a", "img") add_pyscript_image(subds_b, "in-b", "img") add_pyscript_image(ds, "in-top", "img") ds.save(recursive=True) assert_result_count(ds.containers_list(recursive=True, **RAW_KWDS), 4) assert_result_count( ds.containers_list(contains=["nowhere"], recursive=True, **RAW_KWDS), 1, name="in-top", action='containers') res = ds.containers_list(contains=[subds_a.path], recursive=True, **RAW_KWDS) assert_result_count(res, 3) assert_in_results(res, name="in-top") assert_in_results(res, name="a/in-a") assert_in_results(res, name="a/c/in-c") res = ds.containers_list(contains=[subds_a_c.path], recursive=True, **RAW_KWDS) assert_result_count(res, 3) assert_in_results(res, name="in-top") assert_in_results(res, name="a/in-a") assert_in_results(res, name="a/c/in-c") res = ds.containers_list(contains=[subds_b.path], recursive=True, **RAW_KWDS) assert_result_count(res, 2) assert_in_results(res, name="in-top") assert_in_results(res, name="b/in-b") datalad-container-1.1.2/datalad_container/tests/test_find.py000066400000000000000000000023171400061037100242070ustar00rootroot00000000000000import os.path as op from datalad.api import Dataset from datalad.tests.utils import ( ok_clean_git, assert_in, assert_is_instance, assert_in_results, assert_result_count, assert_raises ) from datalad.tests.utils import with_tree from datalad_container.find_container import find_container @with_tree(tree={"sub": {"i.img": "doesn't matter"}}) def test_find_containers(path): ds = Dataset(path).create(force=True) ds.save(path=[op.join('sub', 'i.img')], message="dummy container") ds.containers_add("i", image=op.join('sub', 'i.img')) ok_clean_git(path) # find the only one res = find_container(ds) assert_is_instance(res, dict) assert_result_count([res], 1, status="ok", path=op.join(ds.path, "sub", "i.img")) # find by name res = find_container(ds, "i") assert_is_instance(res, dict) assert_result_count([res], 1, status="ok", path=op.join(ds.path, "sub", "i.img")) # find by path res = find_container(ds, op.join("sub", "i.img")) assert_is_instance(res, dict) assert_result_count([res], 1, status="ok", path=op.join(ds.path, "sub", "i.img")) # don't find another thing assert_raises(ValueError, find_container, ds, "nothere") datalad-container-1.1.2/datalad_container/tests/test_register.py000066400000000000000000000002731400061037100251120ustar00rootroot00000000000000from datalad.tests.utils import assert_result_count def test_register(): import datalad.api as da assert hasattr(da, 'containers_list') assert hasattr(da, 'containers_add') datalad-container-1.1.2/datalad_container/tests/test_run.py000066400000000000000000000260661400061037100241020ustar00rootroot00000000000000import os import os.path as op from six import text_type from datalad.api import Dataset from datalad.api import clone from datalad.api import create from datalad.api import containers_add from datalad.api import containers_run from datalad.api import containers_list from datalad.utils import Path from datalad.tests.utils import ok_ from datalad.tests.utils import ok_clean_git from datalad.tests.utils import assert_false from datalad.tests.utils import assert_not_in_results from datalad.tests.utils import assert_in from datalad.tests.utils import assert_result_count from datalad.tests.utils import assert_raises from datalad.tests.utils import ok_file_has_content from datalad.tests.utils import with_tempfile from datalad.tests.utils import with_tree from datalad.tests.utils import skip_if_no_network from datalad.tests.utils import SkipTest from datalad.utils import ( chpwd, on_windows, ) from datalad.support.network import get_local_file_url from datalad.cmd import ( StdOutCapture, WitlessRunner, ) from datalad_container.tests.utils import add_pyscript_image testimg_url = 'shub://datalad/datalad-container:testhelper' @with_tree(tree={"dummy0.img": "doesn't matter 0", "dummy1.img": "doesn't matter 1"}) def test_run_mispecified(path): ds = Dataset(path).create(force=True) ds.save(path=["dummy0.img", "dummy1.img"]) ok_clean_git(path) # Abort if no containers exist. with assert_raises(ValueError) as cm: ds.containers_run("doesn't matter") assert_in("No known containers", text_type(cm.exception)) # Abort if more than one container exists but no container name is # specified. ds.containers_add("d0", image="dummy0.img") ds.containers_add("d1", image="dummy0.img") with assert_raises(ValueError) as cm: ds.containers_run("doesn't matter") assert_in("explicitly specify container", text_type(cm.exception)) # Abort if unknown container is specified. with assert_raises(ValueError) as cm: ds.containers_run("doesn't matter", container_name="ghost") assert_in("Container selection impossible", text_type(cm.exception)) @with_tree(tree={"i.img": "doesn't matter"}) def test_run_unknown_cmdexec_placeholder(path): ds = Dataset(path).create(force=True) ds.containers_add("i", image="i.img", call_fmt="{youdontknowme}") assert_result_count( ds.containers_run("doesn't matter", on_failure="ignore"), 1, path=ds.path, action="run", status="error") @skip_if_no_network @with_tempfile @with_tempfile def test_container_files(path, super_path): raise SkipTest('SingularityHub is gone for now') ds = Dataset(path).create() cmd = ['dir'] if on_windows else ['ls'] # plug in a proper singularity image ds.containers_add( 'mycontainer', url=testimg_url, image='righthere', # the next one is auto-guessed #call_fmt='singularity exec {img} {cmd}' ) assert_result_count( ds.containers_list(), 1, path=op.join(ds.path, 'righthere'), name='mycontainer') ok_clean_git(path) def assert_no_change(res, path): # this command changed nothing # # Avoid specifying the action because it will change from "add" to # "save" in DataLad v0.12. assert_result_count( res, 1, status='notneeded', path=path, type='dataset') # now we can run stuff in the container # and because there is just one, we don't even have to name the container res = ds.containers_run(cmd) # container becomes an 'input' for `run` -> get request, but "notneeded" assert_result_count( res, 1, action='get', status='notneeded', path=op.join(ds.path, 'righthere'), type='file') assert_no_change(res, ds.path) # same thing as we specify the container by its name: res = ds.containers_run(cmd, container_name='mycontainer') # container becomes an 'input' for `run` -> get request, but "notneeded" assert_result_count( res, 1, action='get', status='notneeded', path=op.join(ds.path, 'righthere'), type='file') assert_no_change(res, ds.path) # we can also specify the container by its path: res = ds.containers_run(cmd, container_name=op.join(ds.path, 'righthere')) # container becomes an 'input' for `run` -> get request, but "notneeded" assert_result_count( res, 1, action='get', status='notneeded', path=op.join(ds.path, 'righthere'), type='file') assert_no_change(res, ds.path) # Now, test the same thing, but with this dataset being a subdataset of # another one: super_ds = Dataset(super_path).create() super_ds.install("sub", source=path) # When running, we don't discover containers in subdatasets with assert_raises(ValueError) as cm: super_ds.containers_run(cmd) assert_in("No known containers", text_type(cm.exception)) # ... unless we need to specify the name res = super_ds.containers_run(cmd, container_name="sub/mycontainer") # container becomes an 'input' for `run` -> get request (needed this time) assert_result_count( res, 1, action='get', status='ok', path=op.join(super_ds.path, 'sub', 'righthere'), type='file') assert_no_change(res, super_ds.path) @with_tempfile @with_tree(tree={'some_container.img': "doesn't matter"}) def test_custom_call_fmt(path, local_file): ds = Dataset(path).create() subds = ds.create('sub') # plug in a proper singularity image subds.containers_add( 'mycontainer', url=get_local_file_url(op.join(local_file, 'some_container.img')), image='righthere', call_fmt='echo image={img} cmd={cmd} img_dspath={img_dspath} ' # and environment variable being set/propagated by default 'name=$DATALAD_CONTAINER_NAME' ) ds.save() # record the effect in super-dataset # Running should work fine either withing sub or within super out = WitlessRunner(cwd=subds.path).run( ['datalad', 'containers-run', '-n', 'mycontainer', 'XXX'], protocol=StdOutCapture) assert_in('image=righthere cmd=XXX img_dspath=. name=mycontainer', out['stdout']) out = WitlessRunner(cwd=ds.path).run( ['datalad', 'containers-run', '-n', 'sub/mycontainer', 'XXX'], protocol=StdOutCapture) assert_in('image=sub/righthere cmd=XXX img_dspath=sub', out['stdout']) # Test within subdirectory of the super-dataset subdir = op.join(ds.path, 'subdir') os.mkdir(subdir) out = WitlessRunner(cwd=subdir).run( ['datalad', 'containers-run', '-n', 'sub/mycontainer', 'XXX'], protocol=StdOutCapture) assert_in('image=../sub/righthere cmd=XXX img_dspath=../sub', out['stdout']) @skip_if_no_network @with_tree(tree={"subdir": {"in": "innards"}}) def test_run_no_explicit_dataset(path): raise SkipTest('SingularityHub is gone for now') ds = Dataset(path).create(force=True) ds.save() ds.containers_add("deb", url=testimg_url, call_fmt="singularity exec {img} {cmd}") # When no explicit dataset is given, paths are interpreted as relative to # the current working directory. # From top-level directory. with chpwd(path): containers_run("cat {inputs[0]} {inputs[0]} >doubled", inputs=[op.join("subdir", "in")], outputs=["doubled"]) ok_file_has_content(op.join(path, "doubled"), "innardsinnards") # From under a subdirectory. subdir = op.join(ds.path, "subdir") with chpwd(subdir): containers_run("cat {inputs[0]} {inputs[0]} >doubled", inputs=["in"], outputs=["doubled"]) ok_file_has_content(op.join(subdir, "doubled"), "innardsinnards") @with_tempfile def test_run_subdataset_install(path): path = Path(path) ds_src = Dataset(path / "src").create() # Repository setup # # . # |-- a/ # | |-- a2/ # | | `-- img # | `-- img # |-- b/ / module name: b-name / # | `-- b2/ # | `-- img # |-- c/ # | `-- c2/ # | `-- img # `-- d/ / module name: d-name / # `-- d2/ # `-- img ds_src_a = ds_src.create("a") ds_src_a2 = ds_src_a.create("a2") ds_src_b = Dataset(ds_src.pathobj / "b").create() ds_src_b2 = ds_src_b.create("b2") ds_src_c = ds_src.create("c") ds_src_c2 = ds_src_c.create("c2") ds_src_d = Dataset(ds_src.pathobj / "d").create() ds_src_d2 = ds_src_d.create("d2") ds_src.repo.add_submodule("b", name="b-name") ds_src.repo.add_submodule("d", name="d-name") ds_src.save() add_pyscript_image(ds_src_a, "in-a", "img") add_pyscript_image(ds_src_a2, "in-a2", "img") add_pyscript_image(ds_src_b2, "in-b2", "img") add_pyscript_image(ds_src_c2, "in-c2", "img") add_pyscript_image(ds_src_d2, "in-d2", "img") ds_src.save(recursive=True) ds_dest = clone(ds_src.path, str(path / "dest")) ds_dest_a2 = Dataset(ds_dest.pathobj / "a" / "a2") ds_dest_b2 = Dataset(ds_dest.pathobj / "b" / "b2") ds_dest_c2 = Dataset(ds_dest.pathobj / "c" / "c2") ds_dest_d2 = Dataset(ds_dest.pathobj / "d" / "d2") assert_false(ds_dest_a2.is_installed()) assert_false(ds_dest_b2.is_installed()) assert_false(ds_dest_c2.is_installed()) assert_false(ds_dest_d2.is_installed()) # Needed subdatasets are installed if container name is given... res = ds_dest.containers_run(["arg"], container_name="a/a2/in-a2") assert_result_count( res, 1, action="install", status="ok", path=ds_dest_a2.path) assert_result_count( res, 1, action="get", status="ok", path=str(ds_dest_a2.pathobj / "img")) ok_(ds_dest_a2.is_installed()) # ... even if the name and path do not match. res = ds_dest.containers_run(["arg"], container_name="b-name/b2/in-b2") assert_result_count( res, 1, action="install", status="ok", path=ds_dest_b2.path) assert_result_count( res, 1, action="get", status="ok", path=str(ds_dest_b2.pathobj / "img")) ok_(ds_dest_b2.is_installed()) # Subdatasets will also be installed if given an image path... res = ds_dest.containers_run(["arg"], container_name=str(Path("c/c2/img"))) assert_result_count( res, 1, action="install", status="ok", path=ds_dest_c2.path) assert_result_count( res, 1, action="get", status="ok", path=str(ds_dest_c2.pathobj / "img")) ok_(ds_dest_c2.is_installed()) # ... unless the module name chain doesn't match the subdataset path. In # that case, the caller needs to install the subdatasets beforehand. with assert_raises(ValueError): ds_dest.containers_run(["arg"], container_name=str(Path("d/d2/img"))) ds_dest.get(ds_dest_d2.path, recursive=True, get_data=False) ds_dest.containers_run(["arg"], container_name=str(Path("d/d2/img"))) # There's no install record if subdataset is already present. res = ds_dest.containers_run(["arg"], container_name="a/a2/in-a2") assert_not_in_results(res, action="install") datalad-container-1.1.2/datalad_container/tests/test_schemes.py000066400000000000000000000021111400061037100247060ustar00rootroot00000000000000import os.path as op from datalad.api import Dataset from datalad.api import create from datalad.api import containers_add from datalad.api import containers_list from datalad.api import containers_run from datalad.cmd import ( StdOutCapture, WitlessRunner, ) from datalad.tests.utils import ok_clean_git from datalad.tests.utils import ok_file_has_content from datalad.tests.utils import assert_result_count from datalad.tests.utils import with_tempfile from datalad.tests.utils import skip_if_no_network @skip_if_no_network @with_tempfile def test_docker(path): # Singularity's "docker://" scheme. ds = Dataset(path).create() ds.containers_add( "bb", url=("docker://busybox@sha256:" "7964ad52e396a6e045c39b5a44438424ac52e12e4d5a25d94895f2058cb863a0")) img = op.join(ds.path, ".datalad", "environments", "bb", "image") assert_result_count(ds.containers_list(), 1, path=img, name="bb") ok_clean_git(path) WitlessRunner(cwd=ds.path).run( ["datalad", "containers-run", "ls", "/singularity"], protocol=StdOutCapture) datalad-container-1.1.2/datalad_container/tests/utils.py000066400000000000000000000024051400061037100233660ustar00rootroot00000000000000import os import os.path as op import sys from datalad.api import containers_add from datalad.utils import chpwd from datalad.tests.utils import SkipTest from datalad.interface.common_cfg import dirs as appdirs def add_pyscript_image(ds, container_name, file_name): """Set up simple Python script as image. Parameters ---------- ds : Dataset container_name : str Add container with this name. file_name : str Write script to this file and use it as the image. """ ds_file = (ds.pathobj / file_name) ds_file.write_text("import sys\nprint(sys.argv)\n") ds.save(ds_file, message="Add dummy container") containers_add(container_name, image=str(ds_file), call_fmt=sys.executable + " {img} {cmd}", dataset=ds) def get_singularity_image(): imgname = 'datalad_container_singularity_testimg.simg' targetpath = op.join( appdirs.user_cache_dir, imgname) if op.exists(targetpath): return targetpath with chpwd(appdirs.user_cache_dir): os.system( 'singularity pull --name "{}" shub://datalad/datalad-container:testhelper'.format( imgname)) if op.exists(targetpath): return targetpath raise SkipTest datalad-container-1.1.2/datalad_container/version.py000066400000000000000000000000261400061037100225460ustar00rootroot00000000000000__version__ = '1.1.2' datalad-container-1.1.2/docs/000077500000000000000000000000001400061037100160055ustar00rootroot00000000000000datalad-container-1.1.2/docs/Makefile000066400000000000000000000165251400061037100174560ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = -W SPHINXBUILD = sphinx-build PAPER = BUILDDIR = build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " applehelp to make an Apple Help Book" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" @echo " coverage to run coverage check of the documentation (if enabled)" clean: rm -rf $(BUILDDIR)/* source/generated source/_extras/schema.json html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/datalad_container.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/datalad_container.qhc" applehelp: $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp @echo @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." @echo "N.B. You won't be able to view it unless you put it in" \ "~/Library/Documentation/Help or install it in your application" \ "bundle." devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/datalad_container" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/datalad_container" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." coverage: $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage @echo "Testing of coverage in the sources finished, look at the " \ "results in $(BUILDDIR)/coverage/python.txt." xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." datalad-container-1.1.2/docs/examples/000077500000000000000000000000001400061037100176235ustar00rootroot00000000000000datalad-container-1.1.2/docs/examples/basic_demo.sh000066400000000000000000000034741400061037100222540ustar00rootroot00000000000000#!/bin/sh # SKIP_IN_V6 set -e OLD_PWD=$PWD # BOILERPLATE #% EXAMPLE START # # Getting started # *************** # # The Datalad container extension provides a few commands to register # containers with a dataset and use them for execution of arbitray # commands. In order to get going quickly, we only need a dataset # and a ready-made container. For this demo we will start with a # fresh dataset and a demo container from Singularity-Hub. #% # fresh dataset datalad create demo cd demo # register container straight from Singularity-Hub datalad containers-add my1st --url shub://datalad/datalad-container:testhelper #% # This will download the container image, add it to the dataset, and record # basic information on the container under its name "my1st" in the dataset's # configuration at ``.datalad/config``. # # Now we are all set to use this container for command execution. All it needs # is to swap the command `datalad run` with `datalad containers-run`. The # command is automatically executed in the registered container and the results # (if there are any) will be added to the dataset: #% datalad containers-run cp /etc/debian_version proof.txt #% # If there is more than one container registered, the desired container needs # to be specifed via the ``--name`` option. Containers do not need to come from # Singularity-Hub, but can be local images too. Via the ``containers-add # --call-fmt`` option it is possible to configure how exactly a container # is being executed, or which local directories shall be made available to # a container. # # At the moment there is built-in support for Singularity and Docker, but other # container execution systems can be used together with custom helper scripts. #% EXAMPLE END testEquality() { assertEquals 1 1 } cd "$OLD_PWD" [ -n "$DATALAD_TESTS_RUNCMDLINE" ] && . shunit2 || true datalad-container-1.1.2/docs/source/000077500000000000000000000000001400061037100173055ustar00rootroot00000000000000datalad-container-1.1.2/docs/source/_static/000077500000000000000000000000001400061037100207335ustar00rootroot00000000000000datalad-container-1.1.2/docs/source/_static/datalad_logo.png000066400000000000000000000016761400061037100240650ustar00rootroot00000000000000PNG  IHDRddGPA@ncO gw iYAJ# a 0H!S X* Hhm#A,v6,]i&RFkV2Lijd ʰʏ6 >}]SD| ~p~mX/OTD~ Up4adQ/™ǣ%x!Ex4ʵꈾd8uّ@h[i(mBB!㠽PH2?c!)^U͙pw~?gGx =l9u9aJ0$O8xh$pIENDB`datalad-container-1.1.2/docs/source/_templates/000077500000000000000000000000001400061037100214425ustar00rootroot00000000000000datalad-container-1.1.2/docs/source/_templates/autosummary/000077500000000000000000000000001400061037100240305ustar00rootroot00000000000000datalad-container-1.1.2/docs/source/_templates/autosummary/module.rst000066400000000000000000000006641400061037100260550ustar00rootroot00000000000000{% if fullname == 'datalad.api' -%} `{{ name }}` =={%- for c in name %}={%- endfor %} .. automodule:: datalad.api .. currentmodule:: datalad.api {% for item in members if not item.startswith('_') %} `{{ item }}` --{%- for c in item %}-{%- endfor %} .. autofunction:: {{ item }} {% endfor %} {% else -%} {{ fullname }} {{ underline }} .. automodule:: {{ fullname }} :members: :undoc-members: :show-inheritance: {% endif %} datalad-container-1.1.2/docs/source/acknowledgements.rst000066400000000000000000000021321400061037100233670ustar00rootroot00000000000000Acknowledgments *************** DataLad development is being performed as part of a US-German collaboration in computational neuroscience (CRCNS) project "DataGit: converging catalogues, warehouses, and deployment logistics into a federated 'data distribution'" (Halchenko_/Hanke_), co-funded by the US National Science Foundation (`NSF 1429999`_) and the German Federal Ministry of Education and Research (`BMBF 01GQ1411`_). Additional support is provided by the German federal state of Saxony-Anhalt and the European Regional Development Fund (ERDF), Project: `Center for Behavioral Brain Sciences`_, Imaging Platform DataLad is built atop the git-annex_ software that is being developed and maintained by `Joey Hess`_. .. _Halchenko: http://haxbylab.dartmouth.edu/ppl/yarik.html .. _Hanke: http://www.psychoinformatics.de .. _NSF 1429999: http://www.nsf.gov/awardsearch/showAward?AWD_ID=1429999 .. _BMBF 01GQ1411: http://www.gesundheitsforschung-bmbf.de/de/2550.php .. _Center for Behavioral Brain Sciences: http://cbbs.eu/en/ .. _git-annex: http://git-annex.branchable.com .. _Joey Hess: https://joeyh.name datalad-container-1.1.2/docs/source/changelog.rst000066400000000000000000000155011400061037100217700ustar00rootroot00000000000000.. This file is auto-converted from CHANGELOG.md (make update-changelog) -- do not edit Change log ********** :: ____ _ _ _ | _ \ __ _ | |_ __ _ | | __ _ __| | | | | | / _` || __| / _` || | / _` | / _` | | |_| || (_| || |_ | (_| || |___ | (_| || (_| | |____/ \__,_| \__| \__,_||_____| \__,_| \__,_| Container This is a high level and scarce summary of the changes between releases. We would recommend to consult log of the `DataLad git repository `__ for more details. 1.1.2 (January 16, 2021) – -------------------------- - Replace use of ``mock`` with ``unittest.mock`` as we do no longer support Python 2 1.1.1 (January 03, 2021) – -------------------------- - Drop use of ``Runner`` (to be removed in datalad 0.14.0) in favor of ``WitlessRunner`` 1.1.0 (October 30, 2020) – -------------------------- - Datalad version 0.13.0 or later is now required. - In the upcoming 0.14.0 release of DataLad, the datalad special remote will have built-in support for “shub://” URLs. If ``containers-add`` detects support for this feature, it will now add the “shub://” URL as is rather than resolving the URL itself. This avoids registering short-lived URLs, allowing the image to be retrieved later with ``datalad get``. - ``containers-run`` learned to install necessary subdatasets when asked to execute a container from underneath an uninstalled subdataset. 1.0.1 (June 23, 2020) – ----------------------- - Prefer ``datalad.core.local.run`` to ``datalad.interface.run``. The latter has been marked as obsolete since DataLad v0.12 (our minimum requirement) and will be removed in DataLad’s next feature release. 1.0.0 (Feb 23, 2020) – not-as-a-shy-one --------------------------------------- Extension is pretty stable so releasing as 1. MAJOR release, so we could start tracking API breakages and enhancements properly. - Drops support for Python 2 and DataLad prior 0.12 0.5.2 (Nov 12, 2019) – ---------------------- Fixes ~~~~~ - The Docker adapter unconditionally called ``docker run`` with ``--interactive`` and ``--tty`` even when stdin was not attached to a TTY, leading to an error. 0.5.1 (Nov 08, 2019) – ---------------------- .. _fixes-1: Fixes ~~~~~ - The Docker adapter, which is used for the “dhub://” URL scheme, assumed the Python executable was spelled “python”. - A call to DataLad’s ``resolve_path`` helper assumed a string return value, which isn’t true as of the latest DataLad release candidate, 0.12.0rc6. 0.5.0 (Jul 12, 2019) – damn-you-malicious-users ----------------------------------------------- New features ~~~~~~~~~~~~ - The default result renderer for ``containers-list`` is now a custom renderer that includes the container name in the output. .. _fixes-2: Fixes ~~~~~ - Temporarily skip two tests relying on SingularityHub – it is down. 0.4.0 (May 29, 2019) – run-baby-run ----------------------------------- The minimum required DataLad version is now 0.11.5. .. _new-features-1: New features ~~~~~~~~~~~~ - The call format gained the “{img_dspath}” placeholder, which expands to the relative path of the dataset that contains the image. This is useful for pointing to a wrapper script that is bundled in the same subdataset as a container. - ``containers-run`` now passes the container image to ``run`` via its ``extra_inputs`` argument so that a run command’s “{inputs}” field is restricted to inputs that the caller explicitly specified. - During execution, ``containers-run`` now sets the environment variable ``DATALAD_CONTAINER_NAME`` to the name of the container. .. _fixes-3: Fixes ~~~~~ - ``containers-run`` mishandled paths when called from a subdirectory. - ``containers-run`` didn’t provide an informative error message when ``cmdexec`` contained an unknown placeholder. - ``containers-add`` ignores the ``--update`` flag when the container doesn’t yet exist, but it confusingly still used the word “update” in the commit message. 0.3.1 (Mar 05, 2019) – Upgrayeddd --------------------------------- .. _fixes-4: Fixes ~~~~~ - ``containers-list`` recursion actually does recursion. 0.3.0 (Mar 05, 2019) – Upgrayedd -------------------------------- API changes ~~~~~~~~~~~ - ``containers-list`` no longer lists containers from subdatasets by default. Specify ``--recursive`` to do so. - ``containers-run`` no longer considers subdataset containers in its automatic selection of a container name when no name is specified. If the current dataset has one container, that container is selected. Subdataset containers must always be explicitly specified. .. _new-features-2: New features ~~~~~~~~~~~~ - ``containers-add`` learned to update a previous container when passed ``--update``. - ``containers-add`` now supports Singularity’s “docker://” scheme in the URL. - To avoid unnecessary recursion into subdatasets, ``containers-run`` now decides to look for containers in subdatasets based on whether the name has a slash (which is true of all subdataset containers). 0.2.2 (Dec 19, 2018) – The more the merrier ------------------------------------------- - list/use containers recursively from installed subdatasets - Allow to specify container by path rather than just by name - Adding a container from local filesystem will copy it now 0.2.1 (Jul 14, 2018) – Explicit lyrics -------------------------------------- - Add support ``datalad run --explicit``. 0.2 (Jun 08, 2018) – Docker --------------------------- - Initial support for adding and running Docker containers. - Add support ``datalad run --sidecar``. - Simplify storage of ``call_fmt`` arguments in the Git config, by benefiting from ``datalad run`` being able to work with single-string compound commands. 0.1.2 (May 28, 2018) – The docs ------------------------------- - Basic beginner documentation 0.1.1 (May 22, 2018) – The fixes -------------------------------- .. _new-features-3: New features ~~~~~~~~~~~~ - Add container images straight from singularity-hub, no need to manually specify ``--call-fmt`` arguments. .. _api-changes-1: API changes ~~~~~~~~~~~ - Use “name” instead of “label” for referring to a container (e.g. ``containers-run -n ...`` instead of ``containers-run -l``. .. _fixes-5: Fixes ~~~~~ - Pass relative container path to ``datalad run``. - ``containers-run`` no longer hides ``datalad run`` failures. 0.1 (May 19, 2018) – The Release -------------------------------- - Initial release with basic functionality to add, remove, and list containers in a dataset, plus a ``run`` command wrapper that injects the container image as an input dependency of a command call. datalad-container-1.1.2/docs/source/conf.py000066400000000000000000000245031400061037100206100ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # datalad_container documentation build configuration file, created by # sphinx-quickstart on Tue Oct 13 08:41:19 2015. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os from os.path import join as opj, exists from os.path import dirname from os import pardir def setup(sphinx): sys.path.insert(0, os.path.abspath('utils')) # travis sys.path.insert(0, os.path.abspath(opj(pardir, 'utils'))) # RTD from pygments_ansi_color import AnsiColorLexer sphinx.add_lexer("ansi-color", AnsiColorLexer()) # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # generate missing pieces for setup_py_path in (opj(pardir, 'setup.py'), # travis opj(pardir, pardir, 'setup.py')): # RTD if exists(setup_py_path): sys.path.insert(0, os.path.abspath(dirname(setup_py_path))) try: for cmd in 'manpage', 'examples': os.system('{} build_{}'.format(setup_py_path, cmd)) except: # shut up and do your best pass # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.autosummary', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.mathjax', 'sphinx.ext.ifconfig', 'sphinx.ext.inheritance_diagram', 'sphinx.ext.viewcode', 'sphinx.ext.napoleon', ] # for the module reference autosummary_generate = True # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Datalad for containerized environments' copyright = u'2018, DataLad team' author = u'DataLad team' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '1.0' # The full version, including alpha/beta/rc tags. release = '1.1.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. #html_theme = 'alabaster' html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. html_logo = '_static/datalad_logo.png' # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = ['_extras'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. html_split_index = True # If true, links to the reST sources are added to the pages. html_show_sourcelink = False # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' #html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # Now only 'ja' uses this config value #html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. #html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. htmlhelp_basename = 'datalad_containerdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', # Latex figure (float) alignment #'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'datalad_container.tex', u'datalad_container Documentation', u'DataLad team', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'datalad', u'datalad Documentation', [author], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'datalad', u'datalad Documentation', author, 'datalad', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {'https://docs.python.org/': None} datalad-container-1.1.2/docs/source/index.rst000066400000000000000000000023211400061037100211440ustar00rootroot00000000000000DataLad extension for containerized environments ************************************************ This extension equips DataLad's `run/rerun `_ functionality with the ability to transparently execute commands in containerized computational environments. On re-run, DataLad will automatically obtain any required container at the correct version prior execution. Documentation ============= * :ref:`Documentation index ` * `Getting started`_ * `API reference`_ .. toctree:: :maxdepth: 1 changelog acknowledgements Getting started --------------- .. toctree:: :hidden: generated/examples/basic_demo .. include:: generated/examples/basic_demo.rst :start-after: *************** API Reference ============= Command manuals --------------- .. toctree:: :maxdepth: 1 generated/man/datalad-containers-add generated/man/datalad-containers-remove generated/man/datalad-containers-list generated/man/datalad-containers-run Python API ---------- .. currentmodule:: datalad_container .. autosummary:: :toctree: generated containers_add containers_remove containers_list containers_run .. |---| unicode:: U+02014 .. em dash datalad-container-1.1.2/docs/utils/000077500000000000000000000000001400061037100171455ustar00rootroot00000000000000datalad-container-1.1.2/docs/utils/pygments_ansi_color.py000066400000000000000000000140241400061037100235760ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Pygments lexer for text containing ANSI color codes.""" from __future__ import absolute_import from __future__ import unicode_literals import itertools import re import pygments.lexer import pygments.token Color = pygments.token.Token.Color _ansi_code_to_color = { 0: 'Black', 1: 'Red', 2: 'Green', 3: 'Yellow', 4: 'Blue', 5: 'Magenta', 6: 'Cyan', 7: 'White', } def _token_from_lexer_state(bold, fg_color, bg_color): """Construct a token given the current lexer state. We can only emit one token even though we have a multiple-tuple state. To do work around this, we construct tokens like "BoldRed". """ token_name = '' if bold: token_name += 'Bold' if fg_color: token_name += fg_color if bg_color: token_name += 'BG' + bg_color if token_name == '': return pygments.token.Text else: return getattr(Color, token_name) def color_tokens(fg_colors, bg_colors): """Return color tokens for a given set of colors. Pygments doesn't have a generic "color" token; instead everything is contextual (e.g. "comment" or "variable"). That doesn't make sense for us, where the colors actually *are* what we care about. This function will register combinations of tokens (things like "Red" or "BoldRedBGGreen") based on the colors passed in. You can also define the tokens yourself, but note that the token names are *not* currently guaranteed to be stable between releases as I'm not really happy with this approach. Usage: fg_colors = bg_colors = { 'Black': '#000000', 'Red': '#EF2929', 'Green': '#8AE234', 'Yellow': '#FCE94F', 'Blue': '#3465A4', 'Magenta': '#c509c5', 'Cyan': '#34E2E2', 'White': '#ffffff', } class MyStyle(pygments.styles.SomeStyle): styles = dict(pygments.styles.SomeStyle.styles) styles.update(color_tokens(fg_colors, bg_colors)) """ styles = {} for bold, fg_color, bg_color in itertools.product( (False, True), {None} | set(fg_colors), {None} | set(bg_colors), ): token = _token_from_lexer_state(bold, fg_color, bg_color) if token is not pygments.token.Text: value = [] if bold: value.append('bold') if fg_color: value.append(fg_colors[fg_color]) if bg_color: value.append('bg:' + bg_colors[bg_color]) styles[token] = ' '.join(value) return styles class AnsiColorLexer(pygments.lexer.RegexLexer): name = 'ANSI Color' aliases = ('ansi-color', 'ansi', 'ansi-terminal') flags = re.DOTALL | re.MULTILINE def __init__(self, *args, **kwargs): super(AnsiColorLexer, self).__init__(*args, **kwargs) self.reset_state() def reset_state(self): self.bold = False self.fg_color = None self.bg_color = None @property def current_token(self): return _token_from_lexer_state( self.bold, self.fg_color, self.bg_color, ) def process(self, match): """Produce the next token and bit of text. Interprets the ANSI code (which may be a color code or some other code), changing the lexer state and producing a new token. If it's not a color code, we just strip it out and move on. Some useful reference for ANSI codes: * http://ascii-table.com/ansi-escape-sequences.php """ # "after_escape" contains everything after the start of the escape # sequence, up to the next escape sequence. We still need to separate # the content from the end of the escape sequence. after_escape = match.group(1) # TODO: this doesn't handle the case where the values are non-numeric. # This is rare but can happen for keyboard remapping, e.g. # '\x1b[0;59;"A"p' parsed = re.match( r'([0-9;=]*?)?([a-zA-Z])(.*)$', after_escape, re.DOTALL | re.MULTILINE, ) if parsed is None: # This shouldn't ever happen if we're given valid text + ANSI, but # people can provide us with utter junk, and we should tolerate it. text = after_escape else: value, code, text = parsed.groups() if code == 'm': # "m" is "Set Graphics Mode" # Special case \x1b[m is a reset code if value == '': self.reset_state() else: values = value.split(';') for value in values: try: value = int(value) except ValueError: # Shouldn't ever happen, but could with invalid # ANSI. continue else: fg_color = _ansi_code_to_color.get(value - 30) bg_color = _ansi_code_to_color.get(value - 40) if fg_color: self.fg_color = fg_color elif bg_color: self.bg_color = bg_color elif value == 1: self.bold = True elif value == 22: self.bold = False elif value == 39: self.fg_color = None elif value == 49: self.bg_color = None elif value == 0: self.reset_state() yield match.start(), self.current_token, text tokens = { # states have to be native strings str('root'): [ (r'\x1b\[([^\x1b]*)', process), (r'[^\x1b]+', pygments.token.Text), ], } datalad-container-1.1.2/formatters.py000066400000000000000000000243161400061037100176230ustar00rootroot00000000000000# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## # # See COPYING file distributed along with the DataLad package for the # copyright and license terms. # # ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## import argparse import datetime import re class ManPageFormatter(argparse.HelpFormatter): # This code was originally distributed # under the same License of Python # Copyright (c) 2014 Oz Nahum Tiram def __init__(self, prog, indent_increment=2, max_help_position=4, width=1000000, section=1, ext_sections=None, authors=None, version=None ): super(ManPageFormatter, self).__init__( prog, indent_increment=indent_increment, max_help_position=max_help_position, width=width) self._prog = prog self._section = 1 self._today = datetime.date.today().strftime('%Y\\-%m\\-%d') self._ext_sections = ext_sections self._version = version def _get_formatter(self, **kwargs): return self.formatter_class(prog=self.prog, **kwargs) def _markup(self, txt): return txt.replace('-', '\\-') def _underline(self, string): return "\\fI\\s-1" + string + "\\s0\\fR" def _bold(self, string): if not string.strip().startswith('\\fB'): string = '\\fB' + string if not string.strip().endswith('\\fR'): string = string + '\\fR' return string def _mk_synopsis(self, parser): self.add_usage(parser.usage, parser._actions, parser._mutually_exclusive_groups, prefix='') usage = self._format_usage(None, parser._actions, parser._mutually_exclusive_groups, '') # replace too long list of commands with a single placeholder usage = re.sub(r'{[^]]*?create,.*?}', ' COMMAND ', usage, flags=re.MULTILINE) # take care of proper wrapping usage = re.sub(r'\[([-a-zA-Z0-9]*)\s([a-zA-Z0-9{}|_]*)\]', r'[\1\~\2]', usage) usage = usage.replace('%s ' % self._prog, '') usage = '.SH SYNOPSIS\n.nh\n.HP\n\\fB%s\\fR %s\n.hy\n' % (self._markup(self._prog), usage) return usage def _mk_title(self, prog): name_version = "\"{0} {1}\"".format(prog, self._version) return '.TH {0} {1} {2} {3}\n'.format(prog, self._section, self._today, name_version) def _make_name(self, parser): """ this method is in consitent with others ... it relies on distribution """ return '.SH NAME\n%s \\- %s\n' % (parser.prog, parser.description) def _mk_description(self, parser): desc = parser.description if not desc: return '' desc = desc.replace('\n\n', '\n.PP\n') # sub-section headings desc = re.sub(r'^\*(.*)\*$', r'.SS \1', desc, flags=re.MULTILINE) # italic commands desc = re.sub(r'^ ([-a-z]*)$', r'.TP\n\\fI\1\\fR', desc, flags=re.MULTILINE) # deindent body text, leave to troff viewer desc = re.sub(r'^ (\S.*)\n', '\\1\n', desc, flags=re.MULTILINE) # format NOTEs as indented paragraphs desc = re.sub(r'^NOTE\n', '.TP\nNOTE\n', desc, flags=re.MULTILINE) # deindent indented paragraphs after heading setup desc = re.sub(r'^ (.*)$', '\\1', desc, flags=re.MULTILINE) return '.SH DESCRIPTION\n%s\n' % self._markup(desc) def _mk_footer(self, sections): if not hasattr(sections, '__iter__'): return '' footer = [] for section, value in sections.items(): part = ".SH {}\n {}".format(section.upper(), value) footer.append(part) return '\n'.join(footer) def format_man_page(self, parser): page = [] page.append(self._mk_title(self._prog)) page.append(self._mk_synopsis(parser)) page.append(self._mk_description(parser)) page.append(self._mk_options(parser)) page.append(self._mk_footer(self._ext_sections)) return ''.join(page) def _mk_options(self, parser): formatter = parser._get_formatter() # positionals, optionals and user-defined groups for action_group in parser._action_groups: formatter.start_section(None) formatter.add_text(None) formatter.add_arguments(action_group._group_actions) formatter.end_section() # epilog formatter.add_text(parser.epilog) # determine help from format above help = formatter.format_help() # add spaces after comma delimiters for easier reformatting help = re.sub(r'([a-z]),([a-z])', '\\1, \\2', help) # get proper indentation for argument items help = re.sub(r'^ (\S.*)\n', '.TP\n\\1\n', help, flags=re.MULTILINE) # deindent body text, leave to troff viewer help = re.sub(r'^ (\S.*)\n', '\\1\n', help, flags=re.MULTILINE) return '.SH OPTIONS\n' + help def _format_action_invocation(self, action): if not action.option_strings: metavar, = self._metavar_formatter(action, action.dest)(1) return metavar else: parts = [] # if the Optional doesn't take a value, format is: # -s, --long if action.nargs == 0: parts.extend([self._bold(action_str) for action_str in action.option_strings]) # if the Optional takes a value, format is: # -s ARGS, --long ARGS else: default = self._underline(action.dest.upper()) args_string = self._format_args(action, default) for option_string in action.option_strings: parts.append('%s %s' % (self._bold(option_string), args_string)) return ', '.join(parts) class RSTManPageFormatter(ManPageFormatter): def _get_formatter(self, **kwargs): return self.formatter_class(prog=self.prog, **kwargs) def _markup(self, txt): # put general tune-ups here return txt def _underline(self, string): return "*{0}*".format(string) def _bold(self, string): return "**{0}**".format(string) def _mk_synopsis(self, parser): self.add_usage(parser.usage, parser._actions, parser._mutually_exclusive_groups, prefix='') usage = self._format_usage(None, parser._actions, parser._mutually_exclusive_groups, '') usage = usage.replace('%s ' % self._prog, '') usage = 'Synopsis\n--------\n::\n\n %s %s\n' \ % (self._markup(self._prog), usage) return usage def _mk_title(self, prog): # and an easy to use reference point title = ".. _man_%s:\n\n" % prog title += "{0}".format(prog) title += '\n{0}\n\n'.format('=' * len(prog)) return title def _make_name(self, parser): return '' def _mk_description(self, parser): desc = parser.description if not desc: return '' return 'Description\n-----------\n%s\n' % self._markup(desc) def _mk_footer(self, sections): if not hasattr(sections, '__iter__'): return '' footer = [] for section, value in sections.items(): part = "\n{0}\n{1}\n{2}\n".format( section, '-' * len(section), value) footer.append(part) return '\n'.join(footer) def _mk_options(self, parser): # this non-obvious maneuver is really necessary! formatter = self.__class__(self._prog) # positionals, optionals and user-defined groups for action_group in parser._action_groups: formatter.start_section(None) formatter.add_text(None) formatter.add_arguments(action_group._group_actions) formatter.end_section() # epilog formatter.add_text(parser.epilog) # determine help from format above option_sec = formatter.format_help() return '\n\nOptions\n-------\n{0}'.format(option_sec) def _format_action(self, action): # determine the required width and the entry label action_header = self._format_action_invocation(action) if action.help: help_text = self._expand_help(action) help_lines = self._split_lines(help_text, 80) help = ' '.join(help_lines) else: help = '' # return a single string return '{0}\n{1}\n{2}\n\n'.format( action_header, '~' * len(action_header), help) def cmdline_example_to_rst(src, out=None, ref=None): if out is None: from six.moves import StringIO out = StringIO() # place header out.write('.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n') if ref: # place cross-ref target out.write('.. {0}:\n\n'.format(ref)) # parser status vars inexample = False incodeblock = False for line in src: if line.startswith('#% EXAMPLE START'): inexample = True incodeblock = False continue if not inexample: continue if line.startswith('#% EXAMPLE END'): break if not inexample: continue if line.startswith('#%'): incodeblock = not incodeblock if incodeblock: out.write('\n.. code-block:: sh\n\n') continue if not incodeblock and line.startswith('#'): out.write(line[(min(2, len(line) - 1)):]) continue if incodeblock: if not line.rstrip().endswith('#% SKIP'): out.write(' %s' % line) continue if not len(line.strip()): continue else: raise RuntimeError("this should not happen") return out datalad-container-1.1.2/requirements-devel.txt000066400000000000000000000000141400061037100214310ustar00rootroot00000000000000-e .[devel] datalad-container-1.1.2/requirements.txt000066400000000000000000000001421400061037100203360ustar00rootroot00000000000000# If you want to develop, use requirements-devel.txt # git+https://github.com/datalad/datalad.git datalad-container-1.1.2/setup.py000077500000000000000000000056051400061037100166000ustar00rootroot00000000000000#!/usr/bin/env python from setuptools import setup from setuptools import find_packages from setuptools import findall from os.path import join as opj from os.path import sep as pathsep from os.path import splitext from os.path import dirname from setup_support import BuildManPage from setup_support import BuildRSTExamplesFromScripts from setup_support import get_version def findsome(subdir, extensions): """Find files under subdir having specified extensions Leading directory (datalad) gets stripped """ return [ f.split(pathsep, 1)[1] for f in findall(opj('datalad_container', subdir)) if splitext(f)[-1].lstrip('.') in extensions ] # extension version version = get_version() cmdclass = { 'build_manpage': BuildManPage, 'build_examples': BuildRSTExamplesFromScripts, } # PyPI doesn't render markdown yet. Workaround for a sane appearance # https://github.com/pypa/pypi-legacy/issues/148#issuecomment-227757822 README = opj(dirname(__file__), 'README.md') try: import pypandoc long_description = pypandoc.convert(README, 'rst') except (ImportError, OSError) as exc: # attempting to install pandoc via brew on OSX currently hangs and # pypandoc imports but throws OSError demanding pandoc print( "WARNING: pypandoc failed to import or thrown an error while converting" " README.md to RST: %r .md version will be used as is" % exc ) long_description = open(README).read() requires = { 'core': [ 'datalad>=0.13', 'requests>=1.2', # to talk to Singularity-hub ], 'devel-docs': [ # used for converting README.md -> .rst for long_description 'pypandoc', # Documentation 'sphinx>=1.6.2', 'sphinx-rtd-theme', ], 'tests': [ 'nose>=1.3.4', ], } requires['devel'] = sum(list(requires.values()), []) setup( # basic project properties can be set arbitrarily name="datalad_container", author="The DataLad Team and Contributors", author_email="team@datalad.org", version=version, description="DataLad extension package for working with containerized environments", long_description=long_description, packages=[pkg for pkg in find_packages('.') if pkg.startswith('datalad')], # datalad command suite specs from here install_requires=requires['core'], extras_require=requires, cmdclass=cmdclass, entry_points = { # 'datalad.extensions' is THE entrypoint inspected by the datalad API builders 'datalad.extensions': [ # the label in front of '=' is the command suite label # the entrypoint can point to any symbol of any name, as long it is # valid datalad interface specification (see demo in this extension) 'container=datalad_container:command_suite', ], 'datalad.tests': [ 'container=datalad_container', ], }, ) datalad-container-1.1.2/setup_support.py000066400000000000000000000123421400061037100203650ustar00rootroot00000000000000# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## # # See COPYING file distributed along with the DataLad package for the # copyright and license terms. # # ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## import os from os.path import dirname, join as opj from distutils.core import Command from distutils.errors import DistutilsOptionError import datetime import formatters as fmt cmdline_command_names = ( 'containers-list', 'containers-add', 'containers-remove', 'containers-run', ) def _path_rel2file(p): return opj(dirname(__file__), p) def get_version(): """Load version of datalad from version.py without entailing any imports """ # This might entail lots of imports which might not yet be available # so let's do ad-hoc parsing of the version.py with open(opj(dirname(__file__), 'datalad_container', 'version.py')) as f: version_lines = list(filter(lambda x: x.startswith('__version__'), f)) assert (len(version_lines) == 1) return version_lines[0].split('=')[1].strip(" '\"\t\n") class BuildManPage(Command): # The BuildManPage code was originally distributed # under the same License of Python # Copyright (c) 2014 Oz Nahum Tiram description = 'Generate man page from an ArgumentParser instance.' user_options = [ ('manpath=', None, 'output path for manpages'), ('rstpath=', None, 'output path for RST files'), ('parser=', None, 'module path to an ArgumentParser instance' '(e.g. mymod:func, where func is a method or function which return' 'a dict with one or more arparse.ArgumentParser instances.'), ] def initialize_options(self): self.manpath = opj('build', 'man') self.rstpath = opj('docs', 'source', 'generated', 'man') self.parser = 'datalad.cmdline.main:setup_parser' def finalize_options(self): if self.manpath is None: raise DistutilsOptionError('\'manpath\' option is required') if self.rstpath is None: raise DistutilsOptionError('\'rstpath\' option is required') if self.parser is None: raise DistutilsOptionError('\'parser\' option is required') self.manpath = _path_rel2file(self.manpath) self.rstpath = _path_rel2file(self.rstpath) mod_name, func_name = self.parser.split(':') fromlist = mod_name.split('.') try: mod = __import__(mod_name, fromlist=fromlist) self._parser = getattr(mod, func_name)( ['datalad'], formatter_class=fmt.ManPageFormatter, return_subparsers=True) except ImportError as err: raise err self.announce('Writing man page(s) to %s' % self.manpath) self._today = datetime.date.today() def run(self): dist = self.distribution #homepage = dist.get_url() #appname = self._parser.prog appname = 'datalad' sections = { 'Authors': """{0} is developed by {1} <{2}>.""".format( appname, dist.get_author(), dist.get_author_email()), } dist = self.distribution for cls, opath, ext in ((fmt.ManPageFormatter, self.manpath, '1'), (fmt.RSTManPageFormatter, self.rstpath, 'rst')): if not os.path.exists(opath): os.makedirs(opath) for cmdname in cmdline_command_names: p = self._parser[cmdname] cmdname = "{0}{1}".format( 'datalad-' if cmdname != 'datalad' else '', cmdname) format = cls(cmdname, ext_sections=sections, version=get_version()) formatted = format.format_man_page(p) with open(opj(opath, '{0}.{1}'.format( cmdname, ext)), 'w') as f: f.write(formatted) class BuildRSTExamplesFromScripts(Command): description = 'Generate RST variants of example shell scripts.' user_options = [ ('expath=', None, 'path to look for example scripts'), ('rstpath=', None, 'output path for RST files'), ] def initialize_options(self): self.expath = opj('docs', 'examples') self.rstpath = opj('docs', 'source', 'generated', 'examples') def finalize_options(self): if self.expath is None: raise DistutilsOptionError('\'expath\' option is required') if self.rstpath is None: raise DistutilsOptionError('\'rstpath\' option is required') self.expath = _path_rel2file(self.expath) self.rstpath = _path_rel2file(self.rstpath) self.announce('Converting example scripts') def run(self): opath = self.rstpath if not os.path.exists(opath): os.makedirs(opath) from glob import glob for example in glob(opj(self.expath, '*.sh')): exname = os.path.basename(example)[:-3] with open(opj(opath, '{0}.rst'.format(exname)), 'w') as out: fmt.cmdline_example_to_rst( open(example), out=out, ref='_example_{0}'.format(exname)) datalad-container-1.1.2/tools/000077500000000000000000000000001400061037100162155ustar00rootroot00000000000000datalad-container-1.1.2/tools/Singularity.testhelper000066400000000000000000000002031400061037100226230ustar00rootroot00000000000000# # This produces a minimal image that can be used for testing the # extension itself. # Bootstrap:docker From:debian:stable-slim datalad-container-1.1.2/tools/ci/000077500000000000000000000000001400061037100166105ustar00rootroot00000000000000datalad-container-1.1.2/tools/ci/prep-travis-forssh-sudo.sh000077500000000000000000000001411400061037100236710ustar00rootroot00000000000000#!/usr/bin/env bash echo "127.0.0.1 datalad-test" >> /etc/hosts apt-get install openssh-client datalad-container-1.1.2/tools/ci/prep-travis-forssh.sh000077500000000000000000000011051400061037100227220ustar00rootroot00000000000000#!/bin/bash mkdir -p ~/.ssh echo -e "Host localhost\n\tStrictHostKeyChecking no\n\tIdentityFile /tmp/dl-test-ssh-id\n" >> ~/.ssh/config echo -e "Host datalad-test\n\tStrictHostKeyChecking no\n\tIdentityFile /tmp/dl-test-ssh-id\n" >> ~/.ssh/config ssh-keygen -f /tmp/dl-test-ssh-id -N "" cat /tmp/dl-test-ssh-id.pub >> ~/.ssh/authorized_keys eval $(ssh-agent) ssh-add /tmp/dl-test-ssh-id echo "DEBUG: test connection to localhost ..." ssh -v localhost exit echo "DEBUG: test connection to datalad-test ..." ssh -v datalad-test exit # tmp: don't run the actual tests: # exit 1 datalad-container-1.1.2/tools/mk_minimal_chroot.sh000077500000000000000000000010431400061037100222450ustar00rootroot00000000000000#!/bin/bash # # bootstrap a tiny chroot (26MB compressed) # # run with sudo set -e -u chrootdir=$(mktemp -d) echo "Working in $chrootdir" debootstrap --variant=minbase --no-check-gpg stretch "$chrootdir" find "$chrootdir"/var/cache/apt/archives -type f -delete find "$chrootdir"/var/lib/apt/lists/ -type f -delete rm -rf "$chrootdir"/usr/share/doc/* rm -rf "$chrootdir"/usr/share/man tar --show-transformed-names --transform=s,^.*$(basename $chrootdir),minichroot, -cvjf minichroot.tar.xz "$chrootdir" echo "chroot tarball at minichroot.tar.xz"