pytest-forked-0.2/0000775000175000017500000000000013141141777015631 5ustar rpfannscrpfannsc00000000000000pytest-forked-0.2/.travis.yml0000644000175000017500000000250212777240621017741 0ustar rpfannscrpfannsc00000000000000sudo: false language: python python: - '2.7' - '3.4' - '3.5' cache: pip install: pip install -U tox setuptools_scm env: matrix: - TOXENV=py-pytest27 - TOXENV=py-pytest28 - TOXENV=py-pytest29 - TOXENV=py-pytest30 matrix: include: - python: '2.7' env: TOXENV=flakes - python: '2.7' env: TOXENV=readme script: tox notifications: irc: channels: - chat.freenode.net#pytest on_success: change on_failure: change skip_join: true email: - pytest-commit@python.org deploy: provider: pypi user: ronny password: secure: cxmSDho5d+PYKEM4ZCg8ms1P4lzhYkrw6fEOm2HtTcsuCyY6aZMSgImWAnEYbJHSkdzgcxlXK9UKJ9B0YenXmBCkAr7UjdnpNXNmkySr0sYzlH/sfqt/dDATCHFaRKxnkOSOVywaDYhT9n8YudbXI77pXwD12i/CeSSJDbHhsu0JYUfAcb+D6YjRYoA2SEGCnzSzg+gDDfwXZx4ZiODCGLVwieNp1klCg88YROUE1BaYYNuUOONvfXX8+TWowbCF6ChH1WL/bZ49OStEYQNuYxZQZr4yClIqu9VJbchrU8j860K9ott2kkGTgfB/dDrQB/XncBubyIX9ikzCQAmmBXWAI3eyvWLPDk2Jz7kW2l2RT7syct80tCq3JhvQ1qdwr5ap7siocTLgnBW0tF4tkHSTFN3510fkc43npnp6FThebESQpnI24vqpwJ9hI/kW5mYi014Og2E/cpCXnz2XO8iZPDbqAMQpDsqEQoyhfGNgPTGp4K30TxRtwZBI5hHhDKnnR16fXtRgt1gYPvz/peUQvvpOm4JzIzGXPzluuutpnCBy75v5+oiwT3YRrLL/Meims9FtDDXL3qQubAE/ezIOOpm0N5XXV8DxIom8EN71yq5ab1tqhM+tBX7owRjy4FR4If2Q8feBdmTuh26DIQt/y+qSG8VkB9Sw/JCjc7c= on: tags: true distributions: sdist bdist_wheel repo: pytest-dev/pytest-boxed pytest-forked-0.2/testing/0000775000175000017500000000000013141141777017306 5ustar rpfannscrpfannsc00000000000000pytest-forked-0.2/testing/conftest.py0000644000175000017500000000044512777236004021510 0ustar rpfannscrpfannsc00000000000000import pytest pytest_plugins = "pytester" @pytest.fixture(autouse=True) def _divert_atexit(request, monkeypatch): import atexit l = [] def finish(): while l: l.pop()() monkeypatch.setattr(atexit, "register", l.append) request.addfinalizer(finish) pytest-forked-0.2/testing/test_boxed.py0000644000175000017500000000234613141133566022020 0ustar rpfannscrpfannsc00000000000000import pytest import os needsfork = pytest.mark.skipif(not hasattr(os, "fork"), reason="os.fork required") @needsfork def test_functional_boxed(testdir): p1 = testdir.makepyfile(""" import os def test_function(): os.kill(os.getpid(), 15) """) result = testdir.runpytest(p1, "--forked") result.stdout.fnmatch_lines([ "*CRASHED*", "*1 failed*" ]) @needsfork @pytest.mark.parametrize("capmode", [ "no", pytest.mark.xfail("sys", reason="capture cleanup needed"), pytest.mark.xfail("fd", reason="capture cleanup needed")]) def test_functional_boxed_capturing(testdir, capmode): p1 = testdir.makepyfile(""" import os import sys def test_function(): sys.stdout.write("hello\\n") sys.stderr.write("world\\n") os.kill(os.getpid(), 15) """) result = testdir.runpytest(p1, "--forked", "--capture=%s" % capmode) result.stdout.fnmatch_lines(""" *CRASHED* *stdout* hello *stderr* world *1 failed* """) def test_is_not_boxed_by_default(testdir): config = testdir.parseconfig(testdir.tmpdir) assert not config.option.forked pytest-forked-0.2/setup.py0000644000175000017500000000225513141136302017331 0ustar rpfannscrpfannsc00000000000000from setuptools import setup setup( name="pytest-forked", use_scm_version=True, description='run tests in isolated forked subprocesses', long_description=open('README.rst').read(), license='MIT', author='pytest-dev', author_email='pytest-dev@python.org', url='https://github.com/pytest-dev/pytest-forked', platforms=['linux', 'osx'], packages=['pytest_forked'], package_dir={'': 'src'}, entry_points={ 'pytest11': [ 'pytest_forked = pytest_forked', ], }, zip_safe=False, install_requires=['pytest>=2.6.0'], setup_requires=['setuptools_scm'], classifiers=[ 'Development Status :: 7 - Inactive', 'Framework :: Pytest', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Quality Assurance', 'Topic :: Utilities', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', ], ) pytest-forked-0.2/setup.cfg0000644000175000017500000000010313141141777017442 0ustar rpfannscrpfannsc00000000000000[bdist_wheel] universal = 1 [egg_info] tag_build = tag_date = 0 pytest-forked-0.2/MANIFEST.in0000644000175000017500000000016012777232135017365 0ustar rpfannscrpfannsc00000000000000include CHANGELOG include LICENSE include README.txt include setup.py include tox.ini graft testing prune .git pytest-forked-0.2/tox.ini0000644000175000017500000000105413136644634017146 0ustar rpfannscrpfannsc00000000000000[tox] # if you change the envlist, please update .travis.yml file as well envlist= py{27,35,36}-pytest{29,30,31} flakes readme [testenv] deps = pycmd # to avoid .eggs setuptools_scm pytest29: pytest~=2.9.1 pytest30: pytest~=3.0.1 pytest31: pytest~=3.1.1 platform=linux|darwin commands= py.test {posargs} [testenv:flakes] changedir= deps = flake8 commands = flake8 setup.py testing xdist [testenv:readme] changedir = deps = readme skip_install = true commands = python setup.py check -r -s [pytest] addopts = -rsfxX ;; hello pytest-forked-0.2/README.rst0000644000175000017500000000236313141141417017311 0ustar rpfannscrpfannsc00000000000000pytest-forked: run each test in a forked subprocess ==================================================== .. warning:: this is a extraction of the xdist --forked module, future maintenance beyond the bare minimum is not plannend until a new maintainer is found * ``--forked``: (not available on Windows) run each test in a forked subprocess to survive ``SEGFAULTS`` or otherwise dying processes Installation ----------------------- Install the plugin with:: pip install pytest-forked or use the package in develope/in-place mode with a checkout of the `pytest-forked repository`_ :: pip install -e . Usage examples --------------------- If you have tests involving C or C++ libraries you might have to deal with tests crashing the process. For this case you may use the boxing options:: py.test --forked which will run each test in a subprocess and will report if a test crashed the process. You can also combine this option with running multiple processes via pytest-xdist to speed up the test run and use your CPU cores:: py.test -n3 --forked this would run 3 testing subprocesses in parallel which each create new forked subprocesses for each test. .. _`pytest-forked repository`: https://github.com/pytest-dev/pytest-forkedpytest-forked-0.2/PKG-INFO0000664000175000017500000000463713141141777016740 0ustar rpfannscrpfannsc00000000000000Metadata-Version: 1.1 Name: pytest-forked Version: 0.2 Summary: run tests in isolated forked subprocesses Home-page: https://github.com/pytest-dev/pytest-forked Author: pytest-dev Author-email: pytest-dev@python.org License: MIT Description: pytest-forked: run each test in a forked subprocess ==================================================== .. warning:: this is a extraction of the xdist --forked module, future maintenance beyond the bare minimum is not plannend until a new maintainer is found * ``--forked``: (not available on Windows) run each test in a forked subprocess to survive ``SEGFAULTS`` or otherwise dying processes Installation ----------------------- Install the plugin with:: pip install pytest-forked or use the package in develope/in-place mode with a checkout of the `pytest-forked repository`_ :: pip install -e . Usage examples --------------------- If you have tests involving C or C++ libraries you might have to deal with tests crashing the process. For this case you may use the boxing options:: py.test --forked which will run each test in a subprocess and will report if a test crashed the process. You can also combine this option with running multiple processes via pytest-xdist to speed up the test run and use your CPU cores:: py.test -n3 --forked this would run 3 testing subprocesses in parallel which each create new forked subprocesses for each test. .. _`pytest-forked repository`: https://github.com/pytest-dev/pytest-forked Platform: linux Platform: osx Classifier: Development Status :: 7 - Inactive Classifier: Framework :: Pytest Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: MIT License Classifier: Operating System :: POSIX Classifier: Operating System :: MacOS :: MacOS X Classifier: Topic :: Software Development :: Testing Classifier: Topic :: Software Development :: Quality Assurance Classifier: Topic :: Utilities Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 3 pytest-forked-0.2/LICENSE0000644000175000017500000000204512777232135016640 0ustar rpfannscrpfannsc00000000000000 Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. pytest-forked-0.2/src/0000775000175000017500000000000013141141777016420 5ustar rpfannscrpfannsc00000000000000pytest-forked-0.2/src/pytest_forked/0000775000175000017500000000000013141141777021302 5ustar rpfannscrpfannsc00000000000000pytest-forked-0.2/src/pytest_forked/__init__.py0000644000175000017500000000463313141133512023403 0ustar rpfannscrpfannsc00000000000000 import py # we know this bit is bad, but we cant help it with the current pytest setup from _pytest import runner import pytest # copied from xdist remote def serialize_report(rep): import py d = rep.__dict__.copy() if hasattr(rep.longrepr, 'toterminal'): d['longrepr'] = str(rep.longrepr) else: d['longrepr'] = rep.longrepr for name in d: if isinstance(d[name], py.path.local): d[name] = str(d[name]) elif name == "result": d[name] = None # for now return d def pytest_addoption(parser): group = parser.getgroup("forked", "forked subprocess test execution") group.addoption( '--forked', action="store_true", dest="forked", default=False, help="box each test run in a separate process (unix)") @pytest.mark.tryfirst def pytest_runtest_protocol(item): if item.config.getvalue("forked"): reports = forked_run_report(item) for rep in reports: item.ihook.pytest_runtest_logreport(report=rep) return True def forked_run_report(item): # for now, we run setup/teardown in the subprocess # XXX optionally allow sharing of setup/teardown from _pytest.runner import runtestprotocol EXITSTATUS_TESTEXIT = 4 import marshal def runforked(): try: reports = runtestprotocol(item, log=False) except KeyboardInterrupt: py.std.os._exit(EXITSTATUS_TESTEXIT) return marshal.dumps([serialize_report(x) for x in reports]) ff = py.process.ForkedFunc(runforked) result = ff.waitfinish() if result.retval is not None: report_dumps = marshal.loads(result.retval) return [runner.TestReport(**x) for x in report_dumps] else: if result.exitstatus == EXITSTATUS_TESTEXIT: py.test.exit("forked test item %s raised Exit" % (item,)) return [report_process_crash(item, result)] def report_process_crash(item, result): path, lineno = item._getfslineno() info = ("%s:%s: running the test CRASHED with signal %d" % (path, lineno, result.signal)) from _pytest import runner call = runner.CallInfo(lambda: 0/0, "???") call.excinfo = info rep = runner.pytest_runtest_makereport(item, call) if result.out: rep.sections.append(("captured stdout", result.out)) if result.err: rep.sections.append(("captured stderr", result.err)) return rep pytest-forked-0.2/example/0000775000175000017500000000000013141141777017264 5ustar rpfannscrpfannsc00000000000000pytest-forked-0.2/example/boxed.txt0000644000175000017500000000573513141136100021116 0ustar rpfannscrpfannsc00000000000000 If your testing involves C or C++ libraries you might have to deal with crashing processes. The xdist-plugin provides the ``--boxed`` option to run each test in a controlled subprocess. Here is a basic example:: # content of test_module.py import pytest import os import time # run test function 50 times with different argument @pytest.mark.parametrize("arg", range(50)) def test_func(arg): time.sleep(0.05) # each tests takes a while if arg % 19 == 0: os.kill(os.getpid(), 15) If you run this with:: $ py.test --forked =========================== test session starts ============================ platform linux2 -- Python 2.7.3 -- pytest-2.3.0.dev8 plugins: xdist, bugzilla, cache, oejskit, cli, pep8, cov collecting ... collected 50 items test_module.py f..................f..................f........... ================================= FAILURES ================================= _______________________________ test_func[0] _______________________________ /home/hpk/tmp/doc-exec-420/test_module.py:6: running the test CRASHED with signal 15 ______________________________ test_func[19] _______________________________ /home/hpk/tmp/doc-exec-420/test_module.py:6: running the test CRASHED with signal 15 ______________________________ test_func[38] _______________________________ /home/hpk/tmp/doc-exec-420/test_module.py:6: running the test CRASHED with signal 15 =================== 3 failed, 47 passed in 3.41 seconds ==================== You'll see that a couple of tests are reported as crashing, indicated by lower-case ``f`` and the respective failure summary. You can also use the xdist-provided parallelization feature to speed up your testing:: $ py.test --forked -n3 =========================== test session starts ============================ platform linux2 -- Python 2.7.3 -- pytest-2.3.0.dev8 plugins: xdist, bugzilla, cache, oejskit, cli, pep8, cov gw0 I / gw1 I / gw2 I gw0 [50] / gw1 [50] / gw2 [50] scheduling tests via LoadScheduling ..f...............f..................f............ ================================= FAILURES ================================= _______________________________ test_func[0] _______________________________ [gw0] linux2 -- Python 2.7.3 /home/hpk/venv/1/bin/python /home/hpk/tmp/doc-exec-420/test_module.py:6: running the test CRASHED with signal 15 ______________________________ test_func[19] _______________________________ [gw2] linux2 -- Python 2.7.3 /home/hpk/venv/1/bin/python /home/hpk/tmp/doc-exec-420/test_module.py:6: running the test CRASHED with signal 15 ______________________________ test_func[38] _______________________________ [gw2] linux2 -- Python 2.7.3 /home/hpk/venv/1/bin/python /home/hpk/tmp/doc-exec-420/test_module.py:6: running the test CRASHED with signal 15 =================== 3 failed, 47 passed in 2.03 seconds ==================== pytest-forked-0.2/CHANGELOG0000644000175000017500000000005412777232171017043 0ustar rpfannscrpfannsc00000000000000v1.0 ===== * just a takeout of pytest-xdistpytest-forked-0.2/appveyor.yml0000644000175000017500000000026712777232135020227 0ustar rpfannscrpfannsc00000000000000install: - C:\Python35\python -m pip install tox setuptools_scm build: false # Not a C# project, build stuff at the test step instead. test_script: - C:\Python35\python -m tox