pax_global_header00006660000000000000000000000064146341105530014514gustar00rootroot0000000000000052 comment=ae1ccb7935f4b8a6397b5708d53bf0d7aca3afb3 subprocess-tee-0.4.2/000077500000000000000000000000001463411055300144625ustar00rootroot00000000000000subprocess-tee-0.4.2/.config/000077500000000000000000000000001463411055300160055ustar00rootroot00000000000000subprocess-tee-0.4.2/.config/subprocess-tee.spec000066400000000000000000000026461463411055300216340ustar00rootroot00000000000000# spell-checker:ignore bcond pkgversion buildrequires autosetup PYTHONPATH noarch buildroot bindir sitelib numprocesses clib %bcond_with check %bcond_with privileged_tests #global source_date_epoch_from_changelog 0 #global clamp_mtime_to_source_date_epoch 1 Name: subprocess-tee Version: VERSION_PLACEHOLDER Release: 1%{?dist} Summary: subprocess-tee License: MIT URL: https://github.com/pycontribs/subprocess-tee Source0: %{pypi_source} BuildArch: noarch BuildRequires: pyproject-rpm-macros BuildRequires: python%{python3_pkgversion}-build BuildRequires: python%{python3_pkgversion}-pip BuildRequires: python%{python3_pkgversion}-setuptools BuildRequires: python%{python3_pkgversion}-setuptools_scm BuildRequires: python%{python3_pkgversion}-wheel BuildRequires: python%{python3_pkgversion}-devel %if %{with check} # These are required for tests: BuildRequires: python%{python3_pkgversion}-pytest BuildRequires: python%{python3_pkgversion}-pytest-xdist BuildRequires: git-core %endif %description subprocess-tee %prep %autosetup %build %pyproject_wheel %generate_buildrequires %pyproject_buildrequires %install %pyproject_install %pyproject_save_files subprocess_tee %check %if %{with check} pytest \ -v \ --disable-pytest-warnings \ --numprocesses=auto \ test %endif %files -f %{pyproject_files} %license LICENSE %doc README.md %changelog subprocess-tee-0.4.2/.dockerignore000066400000000000000000000000411463411055300171310ustar00rootroot00000000000000.tox **/*.pyc .github build dist subprocess-tee-0.4.2/.flake8000066400000000000000000000000361463411055300156340ustar00rootroot00000000000000[flake8] max-line-length = 88 subprocess-tee-0.4.2/.github/000077500000000000000000000000001463411055300160225ustar00rootroot00000000000000subprocess-tee-0.4.2/.github/CODEOWNERS000066400000000000000000000000221463411055300174070ustar00rootroot00000000000000* @ssbarnea subprocess-tee-0.4.2/.github/FUNDING.yml000066400000000000000000000000231463411055300176320ustar00rootroot00000000000000github: [ssbarnea] subprocess-tee-0.4.2/.github/dependabot.yml000066400000000000000000000005411463411055300206520ustar00rootroot00000000000000--- version: 2 updates: - package-ecosystem: pip directory: / schedule: interval: daily labels: - skip-changelog versioning-strategy: lockfile-only open-pull-requests-limit: 0 # neutered - package-ecosystem: "github-actions" directory: "/" schedule: interval: daily labels: - "skip-changelog" subprocess-tee-0.4.2/.github/release-drafter.yml000066400000000000000000000001231463411055300216060ustar00rootroot00000000000000--- # see https://github.com/ansible/team-devtools _extends: ansible/team-devtools subprocess-tee-0.4.2/.github/workflows/000077500000000000000000000000001463411055300200575ustar00rootroot00000000000000subprocess-tee-0.4.2/.github/workflows/ack.yml000066400000000000000000000003761463411055300213460ustar00rootroot00000000000000--- # See https://github.com/ansible/team-devtools/blob/main/.github/workflows/ack.yml name: ack on: pull_request_target: types: [opened, labeled, unlabeled, synchronize] jobs: ack: uses: ansible/team-devtools/.github/workflows/ack.yml@main subprocess-tee-0.4.2/.github/workflows/push.yml000066400000000000000000000004021463411055300215550ustar00rootroot00000000000000--- # See https://github.com/ansible/team-devtools/blob/main/.github/workflows/push.yml name: push on: push: branches: - main - "releases/**" - "stable/**" jobs: ack: uses: ansible/team-devtools/.github/workflows/push.yml@main subprocess-tee-0.4.2/.github/workflows/release.yml000066400000000000000000000016571463411055300222330ustar00rootroot00000000000000--- name: release on: release: types: [published] jobs: pypi: name: Publish to PyPI registry environment: release runs-on: ubuntu-20.04 env: FORCE_COLOR: 1 PY_COLORS: 1 TOXENV: pkg TOX_PARALLEL_NO_SPINNER: 1 steps: - name: Switch to using Python 3.8 by default uses: actions/setup-python@v5 with: python-version: 3.8 - name: Install tox run: python3 -m pip install --user tox - name: Check out src from Git uses: actions/checkout@v4 with: fetch-depth: 0 # needed by setuptools-scm - name: Build dists run: python -m tox - name: Publish to pypi.org if: >- # "create" workflows run separately from "push" & "pull_request" github.event_name == 'release' uses: pypa/gh-action-pypi-publish@release/v1 with: password: ${{ secrets.pypi_password }} subprocess-tee-0.4.2/.github/workflows/tox.yml000066400000000000000000000043301463411055300214140ustar00rootroot00000000000000name: tox on: push: # only publishes pushes to the main branch to TestPyPI branches: # any integration branch but not tag - main tags-ignore: - "**" pull_request: jobs: pre: name: pre runs-on: ubuntu-22.04 outputs: matrix: ${{ steps.generate_matrix.outputs.matrix }} steps: - name: Determine matrix id: generate_matrix uses: coactions/dynamic-matrix@v1 with: other_names: | lint docs pkg alpine: name: alpine runs-on: ubuntu-22.04 steps: - name: Check out code uses: actions/checkout@v4 with: fetch-depth: 0 - name: Run docker build run: docker build . unit: name: ${{ matrix.name }} runs-on: ${{ matrix.os || 'ubuntu-22.04' }} needs: pre strategy: matrix: ${{ fromJson(needs.pre.outputs.matrix) }} steps: - uses: actions/checkout@main with: fetch-depth: 0 - name: Set up stock Python ${{ matrix.python_version }} from GitHub uses: actions/setup-python@v5 with: python-version: ${{ matrix.python_version }} - name: Log the currently selected Python version info (${{ matrix.python_version }}) run: | python --version --version which python - name: Pip cache uses: actions/cache@v4 with: path: ~/.cache/pip key: ${{ runner.os }}-pip-${{ env.PY_SHA256 }}-${{ hashFiles('setup.cfg') }}-${{ hashFiles('tox.ini') }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('.pre-commit-config.yaml') }}-${{ hashFiles('pytest.ini') }} restore-keys: | ${{ runner.os }}-pip- ${{ runner.os }}- - name: Install tox run: | python -m pip install -U pip pip install tox - name: Run tox run -e ${{ matrix.passed_name }} run: tox run -e ${{ matrix.passed_name }} check: # This job does nothing and is only used for the branch protection if: always() needs: - unit - alpine runs-on: ubuntu-22.04 steps: - name: Decide whether the needed jobs succeeded or failed uses: re-actors/alls-green@release/v1 with: jobs: ${{ toJSON(needs) }} subprocess-tee-0.4.2/.gitignore000066400000000000000000000034011463411055300164500ustar00rootroot00000000000000# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ parts/ sdist/ var/ wheels/ pip-wheel-metadata/ share/python-wheels/ *.egg-info/ .installed.cfg *.egg MANIFEST # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .nox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover *.py,cover .hypothesis/ .pytest_cache/ # Translations *.mo *.pot # Django stuff: *.log local_settings.py db.sqlite3 db.sqlite3-journal # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/_build/ # PyBuilder target/ # Jupyter Notebook .ipynb_checkpoints # IPython profile_default/ ipython_config.py # pyenv .python-version # pipenv # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. # However, in case of collaboration, if having platform-specific dependencies or dependencies # having no cross-platform support, pipenv may install dependencies that don't work, or not # install all needed dependencies. #Pipfile.lock # PEP 582; used by e.g. github.com/David-OConnor/pyflow __pypackages__/ # Celery stuff celerybeat-schedule celerybeat.pid # SageMath parsed files *.sage.py # Environments .env .venv env/ venv/ ENV/ env.bak/ venv.bak/ # Spyder project settings .spyderproject .spyproject # Rope project settings .ropeproject # mkdocs documentation /site # mypy .mypy_cache/ .dmypy.json dmypy.json # Pyre type checker .pyre/ *.rpm subprocess-tee-0.4.2/.packit.yaml000066400000000000000000000024301463411055300166760ustar00rootroot00000000000000--- # spell-checker:ignore packit specfile copr epel # https://packit.dev/docs/configuration/ # TODO(ssbarnea): Is there a way to template the package name in order to make # it easier to sync this config between projects? specfile_path: dist/subprocess-tee.spec actions: create-archive: - python3 -m build --sdist --outdir dist - sh -c "ls dist/subprocess-tee-*.tar.gz" get-current-version: - python3 -m setuptools_scm --strip-dev post-upstream-clone: - bash -c "rm -f *.src.rpm && rm -rf dist/*.*" - mkdir -p dist - bash -c 'sed -e "s/VERSION_PLACEHOLDER/$(python3 -m setuptools_scm --strip-dev)/" .config/subprocess-tee.spec > dist/subprocess-tee.spec' srpm_build_deps: - python3-build - python3-setuptools_scm jobs: - job: copr_build targets: # See https://packit.dev/docs/configuration/#aliases # API to get available targets: https://api.dev.testing-farm.io/v0.1/composes/public # Fedora 37 is the first version having a setuptools(-scm) that supports our PEP-621 metadata - fedora-37 - fedora-rawhide trigger: pull_request # - job: tests # trigger: pull_request # metadata: # targets: # - fedora-all # - job: propose_downstream # trigger: release # metadata: # dist-git-branch: master subprocess-tee-0.4.2/.pre-commit-config.yaml000066400000000000000000000031571463411055300207510ustar00rootroot00000000000000--- exclude: | (?x)( ^docs/conf.py$ ) repos: - repo: https://github.com/PyCQA/isort rev: 5.13.2 hooks: - id: isort - repo: https://github.com/psf/black rev: 24.4.2 hooks: - id: black language_version: python3 - repo: https://github.com/pre-commit/pre-commit-hooks.git rev: v4.6.0 hooks: - id: end-of-file-fixer - id: trailing-whitespace - id: mixed-line-ending - id: check-byte-order-marker - id: check-executables-have-shebangs - id: check-merge-conflict - id: debug-statements language_version: python3 - repo: https://github.com/pycqa/flake8.git rev: 7.1.0 hooks: - id: flake8 additional_dependencies: - flake8-absolute-import - flake8-black>=0.1.1 - flake8-docstrings>=1.5.0 - pydocstyle>=5.1.1 language_version: python3 - repo: https://github.com/adrienverge/yamllint.git rev: v1.35.1 hooks: - id: yamllint files: \.(yaml|yml)$ types: [file, yaml] entry: yamllint --strict - repo: https://github.com/pre-commit/mirrors-mypy rev: v1.10.0 hooks: - id: mypy # empty args needed in order to match mypy cli behavior args: [] entry: mypy src/ pass_filenames: false additional_dependencies: - pytest>=6.1.2 - enrich>=1.2.5 - repo: https://github.com/PyCQA/pylint rev: v3.2.3 hooks: - id: pylint additional_dependencies: - pytest>=6.1.2 - enrich>=1.2.5 - typing - typing-extensions subprocess-tee-0.4.2/.yamllint000066400000000000000000000011261463411055300163140ustar00rootroot00000000000000--- # Based on ansible-lint config extends: default rules: braces: max-spaces-inside: 1 level: error brackets: max-spaces-inside: 1 level: error colons: max-spaces-after: -1 level: error commas: max-spaces-after: -1 level: error comments: disable comments-indentation: disable document-start: disable empty-lines: max: 3 level: error hyphens: level: error indentation: disable key-duplicates: enable line-length: disable new-line-at-end-of-file: disable new-lines: type: unix trailing-spaces: disable truthy: disable subprocess-tee-0.4.2/Dockerfile000066400000000000000000000010141463411055300164500ustar00rootroot00000000000000FROM alpine:latest # Alpine is used on purpose because it does not come with bash, and we # want to test that subprocess-tee works even on systems without bash shell. ENV BUILD_DEPS="\ ansible-core \ gcc \ git \ libffi-dev \ make \ musl-dev \ python3 \ python3-dev \ py3-pip \ py3-ruamel.yaml \ " RUN \ apk add --update --no-cache \ ${BUILD_DEPS} COPY . /root/code/ WORKDIR /root/code/ RUN \ python3 --version && \ python3 -m venv venv && \ . venv/bin/activate && \ python3 -m pip install ".[test]" && \ python3 -m pytest subprocess-tee-0.4.2/LICENSE000066400000000000000000000020621463411055300154670ustar00rootroot00000000000000The MIT License Copyright (c) 2020 Sorin Sbarnea Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. subprocess-tee-0.4.2/README.md000066400000000000000000000017121463411055300157420ustar00rootroot00000000000000# subprocess-tee This package provides a drop-in alternative to `subprocess.run` that captures the output while still printing it in **real-time**, just the way `tee` does. Printing output in real-time while still capturing is valuable for any tool that executes long-running child processes. For those, you do want to provide instant feedback (progress) related to what is happening. ```python # from subprocess import run from subprocess_tee import run result = run("echo 123") result.stdout == "123\n" ``` You can add `tee=False` to disable the tee functionality if you want, this being a much shorter alternative than adding the well known `stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL`. Keep in mind that `universal_newlines=True` is implied as we expect text processing, this being a divergence from the original `subprocess.run`. You can still use `check=True` in order to make it raise CompletedProcess exception when the result code is not zero. subprocess-tee-0.4.2/docs/000077500000000000000000000000001463411055300154125ustar00rootroot00000000000000subprocess-tee-0.4.2/docs/index.md000066400000000000000000000000211463411055300170340ustar00rootroot00000000000000{!../README.md!} subprocess-tee-0.4.2/mkdocs.yml000066400000000000000000000017001463411055300164630ustar00rootroot00000000000000site_name: subprocess-tee site_url: https://subprocess-tee.readthedocs.io/ repo_url: https://github.com/pycontribs/subprocess-tee theme: name: "material" features: - content.code.copy - content.action.edit extra: social: - icon: fontawesome/brands/github-alt link: https://github.com/squidfunk nav: - home: index.md - github: https://github.com/pycontribs/subprocess-tee plugins: - search - social - mkdocstrings - git-revision-date-localized: enable_creation_date: true markdown_extensions: - pymdownx.highlight: anchor_linenums: true - pymdownx.inlinehilite - pymdownx.snippets - pymdownx.superfences - pymdownx.magiclink: repo_url_shortener: true repo_url_shorthand: true social_url_shorthand: true social_url_shortener: true user: facelessuser repo: pymdown-extensions normalize_issue_symbols: true - markdown_include.include: base_path: docs subprocess-tee-0.4.2/molecule/000077500000000000000000000000001463411055300162675ustar00rootroot00000000000000subprocess-tee-0.4.2/molecule/default/000077500000000000000000000000001463411055300177135ustar00rootroot00000000000000subprocess-tee-0.4.2/molecule/default/converge.yml000066400000000000000000000002251463411055300222450ustar00rootroot00000000000000--- - name: Converge hosts: localhost gather_facts: false tasks: - name: "Test" debug: msg: "Past glories are poor feeding." subprocess-tee-0.4.2/molecule/default/molecule.yml000066400000000000000000000002141463411055300222400ustar00rootroot00000000000000--- dependency: name: galaxy driver: name: default platforms: - name: instance provisioner: name: ansible verifier: name: ansible subprocess-tee-0.4.2/pyproject.toml000066400000000000000000000047401463411055300174030ustar00rootroot00000000000000[build-system] requires = [ # Needed for PEP 621 support "setuptools >= 61.0", "setuptools_scm[toml] >= 7.0.0", ] build-backend = "setuptools.build_meta" [project] # https://peps.python.org/pep-0621/#readme requires-python = ">=3.8" dynamic = ["version"] name = "subprocess-tee" description = "subprocess-tee" readme = "README.md" authors = [ {"name"="Sorin Sbarnea", "email"="sorin.sbarnea@gmail.com"} ] maintainers = [ {"name"="Sorin Sbarnea", "email"="sorin.sbarnea@gmail.com"} ] license = {text = "MIT"} classifiers = [ # https://pypi.org/classifiers/ "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Intended Audience :: System Administrators", "License :: OSI Approved :: MIT License", "Operating System :: MacOS", "Operating System :: POSIX :: Linux", "Operating System :: POSIX", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python", "Topic :: System :: Systems Administration", "Topic :: Utilities", ] keywords = ["subprocess", "asyncio"] [project.urls] homepage = "https://github.com/pycontribs/subprocess-tee" documentation = "https://subprocess-tee.readthedocs.io" repository = "https://github.com/pycontribs/subprocess-tee" changelog = "https://github.com/pycontribs/subprocess-tee/releases" [project.optional-dependencies] docs = [ "argparse-manpage", "cairosvg", "markdown-include", "mkdocs", "mkdocs-git-revision-date-localized-plugin", "mkdocs-material", "mkdocs-material-extensions", "mkdocstrings", "mkdocstrings-python", "pillow", "pymdown-extensions", ] test =[ "enrich>=1.2.6", "molecule>=3.4.0", # ansible is needed but no direct imports are made "pytest-cov>=2.12.1", "pytest-plus>=0.2", "pytest-xdist>=2.3.0", "pytest>=6.2.5", ] [tool.isort] profile = "black" known_first_party = "subprocess_tee" [tool.mypy] python_version = 3.8 color_output = true error_summary = true disallow_any_generics = true disallow_any_unimported = true disallow_untyped_calls = true disallow_untyped_defs = true warn_redundant_casts = true warn_return_any = true warn_unused_configs = true [tool.setuptools_scm] local_scheme = "no-local-version" subprocess-tee-0.4.2/readthedocs.yml000066400000000000000000000003001463411055300174630ustar00rootroot00000000000000version: 2 mkdocs: fail_on_warning: true build: os: ubuntu-22.04 tools: python: "3.11" python: install: - method: pip path: . extra_requirements: - docs subprocess-tee-0.4.2/src/000077500000000000000000000000001463411055300152515ustar00rootroot00000000000000subprocess-tee-0.4.2/src/subprocess_tee/000077500000000000000000000000001463411055300202765ustar00rootroot00000000000000subprocess-tee-0.4.2/src/subprocess_tee/__init__.py000066400000000000000000000125251463411055300224140ustar00rootroot00000000000000"""tee like run implementation.""" import asyncio import os import platform import subprocess import sys from asyncio import StreamReader from importlib.metadata import PackageNotFoundError, version # type: ignore from shlex import join from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Union try: __version__ = version("subprocess-tee") except PackageNotFoundError: # pragma: no branch __version__ = "0.1.dev1" __all__ = ["run", "CompletedProcess", "__version__"] if TYPE_CHECKING: CompletedProcess = subprocess.CompletedProcess[Any] # pylint: disable=E1136 else: CompletedProcess = subprocess.CompletedProcess STREAM_LIMIT = 2**23 # 8MB instead of default 64kb, override it if you need async def _read_stream(stream: StreamReader, callback: Callable[..., Any]) -> None: while True: line = await stream.readline() if line: callback(line) else: break async def _stream_subprocess( args: Union[str, List[str]], **kwargs: Any ) -> CompletedProcess: platform_settings: Dict[str, Any] = {} if platform.system() == "Windows": platform_settings["env"] = os.environ # this part keeps behavior backwards compatible with subprocess.run tee = kwargs.get("tee", True) stdout = kwargs.get("stdout", sys.stdout) with open(os.devnull, "w", encoding="UTF-8") as devnull: if stdout == subprocess.DEVNULL or not tee: stdout = devnull stderr = kwargs.get("stderr", sys.stderr) if stderr == subprocess.DEVNULL or not tee: stderr = devnull # We need to tell subprocess which shell to use when running shell-like # commands. # * SHELL is not always defined # * /bin/bash does not exit on alpine, /bin/sh seems bit more portable if "executable" not in kwargs and isinstance(args, str) and " " in args: platform_settings["executable"] = os.environ.get("SHELL", "/bin/sh") # pass kwargs we know to be supported for arg in ["cwd", "env"]: if arg in kwargs: platform_settings[arg] = kwargs[arg] # Some users are reporting that default (undocumented) limit 64k is too # low if isinstance(args, str): process = await asyncio.create_subprocess_shell( args, limit=STREAM_LIMIT, stdin=kwargs.get("stdin", False), stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, **platform_settings, ) else: process = await asyncio.create_subprocess_exec( *args, limit=STREAM_LIMIT, stdin=kwargs.get("stdin", False), stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, **platform_settings, ) out: List[str] = [] err: List[str] = [] def tee_func(line: bytes, sink: List[str], pipe: Optional[Any]) -> None: line_str = line.decode("utf-8").rstrip() sink.append(line_str) if not kwargs.get("quiet", False): if pipe and hasattr(pipe, "write"): print(line_str, file=pipe) else: print(line_str) loop = asyncio.get_running_loop() tasks = [] if process.stdout: tasks.append( loop.create_task( _read_stream(process.stdout, lambda x: tee_func(x, out, stdout)) ) ) if process.stderr: tasks.append( loop.create_task( _read_stream(process.stderr, lambda x: tee_func(x, err, stderr)) ) ) await asyncio.wait(set(tasks)) # We need to be sure we keep the stdout/stderr output identical with # the ones procued by subprocess.run(), at least when in text mode. check = kwargs.get("check", False) stdout = None if check else "" stderr = None if check else "" if out: stdout = os.linesep.join(out) + os.linesep if err: stderr = os.linesep.join(err) + os.linesep return CompletedProcess( args=args, returncode=await process.wait(), stdout=stdout, stderr=stderr, ) def run(args: Union[str, List[str]], **kwargs: Any) -> CompletedProcess: """Drop-in replacement for subprocess.run that behaves like tee. Extra arguments added by our version: echo: False - Prints command before executing it. quiet: False - Avoid printing output """ if isinstance(args, str): cmd = args else: # run was called with a list instead of a single item but asyncio # create_subprocess_shell requires command as a single string, so # we need to convert it to string cmd = join(args) check = kwargs.get("check", False) if kwargs.get("echo", False): print(f"COMMAND: {cmd}") result = asyncio.run(_stream_subprocess(args, **kwargs)) # we restore original args to mimic subproces.run() result.args = args if check and result.returncode != 0: raise subprocess.CalledProcessError( result.returncode, args, output=result.stdout, stderr=result.stderr ) return result subprocess-tee-0.4.2/src/subprocess_tee/py.typed000066400000000000000000000000001463411055300217630ustar00rootroot00000000000000subprocess-tee-0.4.2/test/000077500000000000000000000000001463411055300154415ustar00rootroot00000000000000subprocess-tee-0.4.2/test/__init__.py000066400000000000000000000000201463411055300175420ustar00rootroot00000000000000"""Unitests.""" subprocess-tee-0.4.2/test/test_func.py000066400000000000000000000011131463411055300200010ustar00rootroot00000000000000"""Functional tests for subprocess-tee library.""" import subprocess import sys import pytest @pytest.mark.skipif( sys.version_info < (3, 9), reason="molecule test requires python 3.9+" ) def test_molecule() -> None: """Ensures molecule does display output of its subprocesses.""" result = subprocess.run( ["molecule", "test"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, check=False, ) # type: ignore assert result.returncode == 0 assert "Past glories are poor feeding." in result.stdout subprocess-tee-0.4.2/test/test_rich.py000066400000000000000000000030671463411055300200050ustar00rootroot00000000000000"""Tests for rich module.""" import sys import pytest from enrich.console import Console from subprocess_tee import run @pytest.mark.xfail(reason="Bug https://github.com/pycontribs/subprocess-tee/issues/58") def test_rich_console_ex() -> None: """Validate that ConsoleEx can capture output from print() calls.""" console = Console(record=True, redirect=True) console.print("alpha") print("beta") sys.stdout.write("gamma\n") sys.stderr.write("delta\n") # While not supposed to happen we want to be sure that this will not raise # an exception. Some libraries may still sometimes send bytes to the # streams, notable example being click. # sys.stdout.write(b"epsilon\n") # type: ignore proc = run("echo 123") assert proc.stdout == "123\n" text = console.export_text() assert text == "alpha\nbeta\ngamma\ndelta\n123\n" def test_rich_console_ex_ansi() -> None: """Validate that ANSI sent to sys.stdout does not become garbage in record.""" print() console = Console(force_terminal=True, record=True, redirect=True) console.print("[green]this from Console.print()[/green]", style="red") proc = run(r'echo -e "\033[31mred\033[0m"') assert proc.returncode == 0 assert "red" in proc.stdout # validate that what rich recorded is the same as what the subprocess produced text = console.export_text(clear=False) assert "red" in text # validate that html export also contains at least the "red" text html = console.export_html(clear=False) assert 'red' in html subprocess-tee-0.4.2/test/test_unit.py000066400000000000000000000113361463411055300200350ustar00rootroot00000000000000"""Unittests.""" import subprocess import sys from typing import Dict import pytest from _pytest.capture import CaptureFixture from subprocess_tee import run def test_run_string() -> None: """Valida run() called with a single string command.""" cmd = "echo 111 && >&2 echo 222" old_result = subprocess.run( cmd, shell=True, universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False, ) result = run(cmd) assert result.returncode == old_result.returncode assert result.stdout == old_result.stdout assert result.stderr == old_result.stderr def test_run_list() -> None: """Validate run call with a command made of list of strings.""" # NOTICE: subprocess.run() does fail to capture any output when cmd is # a list and you specific shell=True. Still, when not mentioning shell, # it does work. cmd = [sys.executable, "--version"] old_result = subprocess.run( cmd, # shell=True, universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False, ) result = run(cmd) assert result.returncode == old_result.returncode assert result.stdout == old_result.stdout assert result.stderr == old_result.stderr def test_run_echo(capsys: CaptureFixture[str]) -> None: """Validate run call with echo dumps command.""" cmd = [sys.executable, "--version"] old_result = subprocess.run( cmd, # shell=True, universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False, ) result = run(cmd, echo=True) assert result.returncode == old_result.returncode assert result.stdout == old_result.stdout assert result.stderr == old_result.stderr out, err = capsys.readouterr() assert out.startswith("COMMAND:") assert err == "" @pytest.mark.parametrize( "env", [{}, {"SHELL": "/bin/sh"}, {"SHELL": "/bin/bash"}, {"SHELL": "/bin/zsh"}], ids=["auto", "sh", "bash", "zsh"], ) def test_run_with_env(env: Dict[str, str]) -> None: """Validate that passing custom env to run() works.""" env["FOO"] = "BAR" result = run("echo $FOO", env=env, echo=True) assert result.stdout == "BAR\n" def test_run_shell() -> None: """Validate run call with multiple shell commands works.""" cmd = "echo a && echo b && false || exit 4" # "python --version" result = run(cmd, echo=True) assert result.returncode == 4 assert result.stdout == "a\nb\n" def test_run_shell_undefined() -> None: """Validate run call with multiple shell commands works.""" cmd = "echo a && echo b && false || exit 4" # "python --version" result = run(cmd, echo=True, env={}) assert result.returncode == 4 assert result.stdout == "a\nb\n" def test_run_cwd() -> None: """Validate that run accepts cwd and respects it.""" cmd = "pwd" result = run(cmd, echo=True, cwd="/") assert result.returncode == 0 assert result.stdout == "/\n" def test_run_with_check_raise() -> None: """Asure compatibility with subprocess.run when using check (return 1).""" with pytest.raises(subprocess.CalledProcessError) as ours: run("false", check=True) with pytest.raises(subprocess.CalledProcessError) as original: subprocess.run("false", check=True, universal_newlines=True) assert ours.value.returncode == original.value.returncode assert ours.value.cmd == original.value.cmd assert ours.value.output == original.value.output assert ours.value.stdout == original.value.stdout assert ours.value.stderr == original.value.stderr def test_run_with_check_pass() -> None: """Asure compatibility with subprocess.run when using check (return 0).""" ours = run("true", check=True) original = subprocess.run("true", check=True, universal_newlines=True) assert ours.returncode == original.returncode assert ours.args == original.args assert ours.stdout == original.stdout assert ours.stderr == original.stderr def test_run_compat() -> None: """Assure compatiblity with subprocess.run().""" cmd = ["seq", "10"] ours = run(cmd) original = subprocess.run( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, check=False, ) assert ours.returncode == original.returncode assert ours.stdout == original.stdout assert ours.stderr == original.stderr assert ours.args == original.args def test_run_waits_for_completion(tmp_path): """run() should always wait for the process to complete.""" tmpfile = tmp_path / "output.txt" run(f"sleep 0.1 && echo 42 > {str(tmpfile)}") assert tmpfile.read_text() == "42\n" subprocess-tee-0.4.2/tox.ini000066400000000000000000000026431463411055300160020ustar00rootroot00000000000000[tox] minversion = 4.0 envlist = lint pkg deps py isolated_build = True requires = tox>=4.0.12 [testenv] usedevelop = True # do not put * in passenv as it may break builds do to reduced isolation passenv = CI GITHUB_* HOME PIP_* PUBLISH PYTEST_* SSH_AUTH_SOCK TERM setenv = PIP_DISABLE_VERSION_CHECK=1 PYTEST_REQPASS=16 py38: PYTEST_REQPASS=15 PYTHONDONTWRITEBYTECODE=1 PYTHONUNBUFFERED=1 commands = python -m pytest deps = ansible-core extras = test allowlist_externals = find rm sh changedir = {toxinidir} [testenv:docs] description = Build docs extras = docs commands = mkdocs build --strict [testenv:lint] description = Runs all linting tasks commands = # to run a single linter you can do "pre-commit run flake8" python -m pre_commit run {posargs:--all} deps = pre-commit>=1.18.1 extras = skip_install = true usedevelop = false [testenv:pkg] description = Do packaging and distribution tests, PEP-440,PEP-517,PEP-621 # `usedevelop = true` overrides `skip_install` instruction, it's unwanted usedevelop = false # don't install package in this env skip_install = true deps = build >= 0.9.0 twine >= 4.0.1 setenv = commands = rm -rfv {toxinidir}/dist/ python -m build \ --outdir {toxinidir}/dist/ \ {toxinidir} # metadata validation python -m twine check --strict {toxinidir}/dist/*