pax_global_header00006660000000000000000000000064146656047070014531gustar00rootroot0000000000000052 comment=7f1e5ad77198761971f0a85782daf451e906e119 spyder-unittest-0.7.0/000077500000000000000000000000001466560470700147205ustar00rootroot00000000000000spyder-unittest-0.7.0/.ciocheck000066400000000000000000000071561466560470700165020ustar00rootroot00000000000000# ----------------------------------------------------------------------------- # ciocheck # https://github.com/ContinuumIO/ciocheck # ----------------------------------------------------------------------------- [ciocheck] branch = origin/master diff_mode = commited file_mode = all check = pep8,pydocstyle,flake8,isort,yapf,coverage,pytest enforce = pep8,pydocstyle,flake8,isort,yapf,coverage,pytest # Python (pyformat) header = # -*- coding: utf-8 -*- copyright_file = .ciocopyright add_copyright = true add_header = true add_init = true # ----------------------------------------------------------------------------- # pep8 # https://pep8.readthedocs.io/en/release-1.7.x/intro.html#configuration # ----------------------------------------------------------------------------- [pep8] exclude = */tests/* ignore = E126,W503 max-line-length = 79 # ----------------------------------------------------------------------------- # pydocstyle # http://www.pydocstyle.org/en/latest/usage.html#example # ----------------------------------------------------------------------------- [pydocstyle] add-ignore = D203, inherit = false match = .*/(?!test)[^/]*.py # ----------------------------------------------------------------------------- # Flake 8 # http://flake8.readthedocs.io/en/latest/config.html # ----------------------------------------------------------------------------- [flake8] exclude = */tests/* ignore = E126,W503 max-line-length = 79 max-complexity = 64 # ----------------------------------------------------------------------------- # pylint # https://pylint.readthedocs.io/en/latest/ # ----------------------------------------------------------------------------- #[pylint:messages] # ----------------------------------------------------------------------------- # isort # https://github.com/timothycrosley/isort/wiki/isort-Settings # ----------------------------------------------------------------------------- [isort] from_first = true import_heading_stdlib = Standard library imports import_heading_thirdparty = Third party imports import_heading_firstparty = Local imports import_heading_localfolder = Local imports indent = ' ' known_first_party = spyder_unittest known_third_party = spyder,pytest,pytestqt,nose line_length = 79 sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER # ----------------------------------------------------------------------------- # YAPF # https://github.com/google/yapf#formatting-style # ----------------------------------------------------------------------------- [yapf:style] based_on_style = pep8 column_limit = 79 spaces_before_comment = 2 split_before_named_assigned = False # ----------------------------------------------------------------------------- # autopep8 # http://pep8.readthedocs.io/en/latest/intro.html#configuration # ----------------------------------------------------------------------------- [autopep8] exclude = */tests/* ignore = E126, max-line-length = 99 aggressive = 0 # ----------------------------------------------------------------------------- # Coverage # http://coverage.readthedocs.io/en/latest/config.html # ----------------------------------------------------------------------------- [coverage:run] omit = */tests/* [coverage:report] fail_under = 0 show_missing = true skip_covered = true exclude_lines = pragma: no cover def test(): if __name__ == .__main__.: # ----------------------------------------------------------------------------- # pytest # http://doc.pytest.org/en/latest/usage.html # ----------------------------------------------------------------------------- [pytest] addopts = -rfew --durations=10 python_functions = test_* python_files = test_*.py spyder-unittest-0.7.0/.coveragerc000066400000000000000000000002471466560470700170440ustar00rootroot00000000000000[run] omit = */tests/* [report] fail_under = 0 show_missing = true skip_covered = true exclude_lines = pragma: no cover def test(): if __name__ == .__main__.: spyder-unittest-0.7.0/.gitattributes000066400000000000000000000001251466560470700176110ustar00rootroot00000000000000# Set the default behavior, in case people don't have core.autocrlf set. * text=auto spyder-unittest-0.7.0/.github/000077500000000000000000000000001466560470700162605ustar00rootroot00000000000000spyder-unittest-0.7.0/.github/FUNDING.yml000066400000000000000000000000301466560470700200660ustar00rootroot00000000000000open_collective: spyder spyder-unittest-0.7.0/.github/issue_template.md000066400000000000000000000014731466560470700216320ustar00rootroot00000000000000 ## Description of your problem ## What steps will reproduce the problem? 1. 2. 3. ## What is the expected output? What do you see instead? ### Paste traceback/error below (if applicable) *(Copy from error dialog or `View > Panes > Internal Console`)* ```python-traceback ``` ## Key versions and other information: * Spyder version: * Version of spyder-unittest plugin: * Installation method for Spyder and the unittest plugin: Anaconda / pip / ... * Python version: * Testing framework used: nose2 / pytest / unittest * Testing framework version: * Operating system: spyder-unittest-0.7.0/.github/scripts/000077500000000000000000000000001466560470700177475ustar00rootroot00000000000000spyder-unittest-0.7.0/.github/scripts/generate-without-spyder.py000066400000000000000000000012201466560470700251130ustar00rootroot00000000000000#!/usr/bin/env python # # Copyright (c) Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Script to generate requirements/without-spyder.txt""" import re from pathlib import Path rootdir = Path(__file__).parents[2] input_filename = rootdir / 'requirements' / 'conda.txt' output_filename = rootdir / 'requirements' / 'without-spyder.txt' with open(input_filename) as infile: with open(output_filename, 'w') as outfile: for line in infile: package_name = re.match('[-a-z0-9_]*', line).group(0) if package_name != 'spyder': outfile.write(line) spyder-unittest-0.7.0/.github/workflows/000077500000000000000000000000001466560470700203155ustar00rootroot00000000000000spyder-unittest-0.7.0/.github/workflows/run-tests.yml000066400000000000000000000073541466560470700230150ustar00rootroot00000000000000name: Run tests on: push: branches: - master pull_request: branches: - master jobs: main: strategy: fail-fast: false matrix: OS: ['ubuntu', 'macos', 'windows'] PYTHON_VERSION: ['3.9', '3.10', '3.11'] SPYDER_SOURCE: ['conda', 'git'] name: ${{ matrix.OS }} py${{ matrix.PYTHON_VERSION }} spyder-from-${{ matrix.SPYDER_SOURCE }} runs-on: ${{ matrix.OS }}-latest env: CI: True PYTHON_VERSION: ${{ matrix.PYTHON_VERSION }} steps: - name: Checkout branch uses: actions/checkout@v4 with: path: 'spyder-unittest' - name: Install System Packages if: matrix.OS == 'ubuntu' run: | sudo apt-get update --fix-missing sudo apt-get install -qq pyqt5-dev-tools libxcb-xinerama0 xterm --fix-missing - name: Install Conda uses: conda-incubator/setup-miniconda@v3 with: miniforge-version: latest auto-update-conda: true python-version: ${{ matrix.PYTHON_VERSION }} - name: Checkout Spyder from git if: matrix.SPYDER_SOURCE == 'git' uses: actions/checkout@v4 with: repository: 'spyder-ide/spyder' path: 'spyder' - name: Install Spyder's dependencies (main) if: matrix.SPYDER_SOURCE == 'git' shell: bash -l {0} run: mamba env update --file spyder/requirements/main.yml - name: Install Spyder's dependencies (Linux) if: matrix.SPYDER_SOURCE == 'git' && matrix.OS == 'ubuntu' shell: bash -l {0} run: mamba env update --file spyder/requirements/linux.yml - name: Install Spyder's dependencies (Mac / Windows) if: matrix.SPYDER_SOURCE == 'git' && matrix.OS != 'ubuntu' shell: bash -l {0} run: mamba env update --file spyder/requirements/${{ matrix.OS }}.yml - name: Install Spyder from source if: matrix.SPYDER_SOURCE == 'git' shell: bash -l {0} run: pip install --no-deps -e spyder - name: Install plugin dependencies (without Spyder) if: matrix.SPYDER_SOURCE == 'git' shell: bash -l {0} run: | python spyder-unittest/.github/scripts/generate-without-spyder.py mamba install --file spyder-unittest/requirements/without-spyder.txt -y - name: Install plugin dependencies if: matrix.SPYDER_SOURCE == 'conda' shell: bash -l {0} run: mamba install --file spyder-unittest/requirements/conda.txt -y - name: Install test dependencies shell: bash -l {0} run: | mamba install nomkl -y -q mamba install --file spyder-unittest/requirements/tests.txt -y - name: Install plugin shell: bash -l {0} run: pip install --no-deps -e spyder-unittest - name: Show environment information shell: bash -l {0} run: | mamba info mamba list - name: Run tests (Linux) if: matrix.OS == 'ubuntu' uses: nick-fields/retry@v3 with: timeout_minutes: 10 max_attempts: 3 shell: bash command: | . ~/.profile xvfb-run --auto-servernum pytest spyder-unittest/spyder_unittest -vv - name: Run tests (MacOS) if: matrix.OS == 'macos' uses: nick-fields/retry@v3 with: timeout_minutes: 10 max_attempts: 3 shell: bash command: | . ~/.profile pytest spyder-unittest/spyder_unittest -vv - name: Run tests (Windows) if: matrix.OS == 'windows' uses: nick-fields/retry@v3 with: timeout_minutes: 10 max_attempts: 3 command: pytest spyder-unittest/spyder_unittest -vv spyder-unittest-0.7.0/.gitignore000066400000000000000000000004761466560470700167170ustar00rootroot00000000000000*.py[cod] # C extensions *.so # Packages *.egg *.egg-info dist build eggs parts bin var sdist develop-eggs .installed.cfg lib lib64 __pycache__ .cache # Installer logs pip-log.txt # Unit test / coverage reports .coverage .tox nosetests.xml # Mr Developer .mr.developer.cfg .project .pydevproject # OSX .DS_Store spyder-unittest-0.7.0/AUTHORS000066400000000000000000000004671466560470700157770ustar00rootroot00000000000000The Spyder Project Contributors are composed of: * Pierre Raybaut (Original Spyder author). * Carlos Cordoba (Current maintainer). * All other developers who have contributed to this repository and/or the precursor in the main spyder-ide/spyder repository. spyder-unittest-0.7.0/CHANGELOG.md000066400000000000000000000526501466560470700165410ustar00rootroot00000000000000# History of changes ## Version 0.7.0 (2024/09/03) This release updates the plugin to be used with Spyder 6 and fixes two bugs. ### Bug fixes * Save command-line arguments ([Issue 216](https://github.com/spyder-ide/spyder-unittest/issues/216), [PR 217](https://github.com/spyder-ide/spyder-unittest/pull/217) by [@abdullahkhalids](https://github.com/abdullahkhalids)) * Thanks to [Abdullah Khalid](https://github.com/abdullahkhalids) for this contribution! * Update installation instructions to point to conda-forge ([Issue 220](https://github.com/spyder-ide/spyder-unittest/issues/220), [PR 223](https://github.com/spyder-ide/spyder-unittest/pull/223)) ### Maintenance * Make plugin compatible with Spyder 6 ([Issue 198](https://github.com/spyder-ide/spyder-unittest/issues/198), [Issue 210](https://github.com/spyder-ide/spyder-unittest/issues/210), [Issue 215](https://github.com/spyder-ide/spyder-unittest/issues/215), [Issue 221](https://github.com/spyder-ide/spyder-unittest/issues/221), [PR 223](https://github.com/spyder-ide/spyder-unittest/pull/223), [PR 222](https://github.com/spyder-ide/spyder-unittest/pull/222), [PR 218](https://github.com/spyder-ide/spyder-unittest/pull/218)) ## Version 0.6.0 (2023-07-02) ### New Features * Support nose2 and drop support for nose ([Issue 178](https://github.com/spyder-ide/spyder-unittest/issues/178), [PR 200](https://github.com/spyder-ide/spyder-unittest/pull/200)) * New menu item for running only a single test ([Issue 88](https://github.com/spyder-ide/spyder-unittest/issues/88), [PR 211](https://github.com/spyder-ide/spyder-unittest/pull/211)) * New configuration option for adding extra command-line arguments when running tests ([Issue 199](https://github.com/spyder-ide/spyder-unittest/issues/199), [PR 204](https://github.com/spyder-ide/spyder-unittest/pull/204)) * New configuration option to disable or enable abbreviating the test name ([Issue 122](https://github.com/spyder-ide/spyder-unittest/issues/122), [PR 208](https://github.com/spyder-ide/spyder-unittest/pull/208)) ### Bug Fixes * Execute `unittest` tests programmatically for robustness ([Issue 73](https://github.com/spyder-ide/spyder-unittest/issues/73), [Issue 76](https://github.com/spyder-ide/spyder-unittest/issues/76), [Issue 160](https://github.com/spyder-ide/spyder-unittest/issues/160), [PR 202](https://github.com/spyder-ide/spyder-unittest/pull/202)) * Support changed format of `unittest` output in Python 3.11 ([Issue 193](https://github.com/spyder-ide/spyder-unittest/issues/193), [PR 190](https://github.com/spyder-ide/spyder-unittest/pull/190), [PR 194](https://github.com/spyder-ide/spyder-unittest/pull/194), by [@juliangilbey](https://github.com/juliangilbey)) * Fix keyboard shortcut for running tests ([Issue 172](https://github.com/spyder-ide/spyder-unittest/issues/172), [PR 203](https://github.com/spyder-ide/spyder-unittest/pull/203)) * Use colours from Spyder's standard palette to get a uniform UI ([Issue 186](https://github.com/spyder-ide/spyder-unittest/issues/186), [PR 187](https://github.com/spyder-ide/spyder-unittest/pull/187)) ### Maintenance * Keep plugin up-to-date with latest changes in Spyder 5 ([Issue 195](https://github.com/spyder-ide/spyder-unittest/issues/195), [Issue 206](https://github.com/spyder-ide/spyder-unittest/issues/206), [Issue 209](https://github.com/spyder-ide/spyder-unittest/issues/209), [PR 197](https://github.com/spyder-ide/spyder-unittest/pull/197), [PR 207](https://github.com/spyder-ide/spyder-unittest/pull/207), [PR 214](https://github.com/spyder-ide/spyder-unittest/pull/214)) * Update translations ([PR 212](https://github.com/spyder-ide/spyder-unittest/pull/212)) * Fix integration tests for the plugin ([Issue 167](https://github.com/spyder-ide/spyder-unittest/issues/167), [PR 197](https://github.com/spyder-ide/spyder-unittest/pull/197)) * Update GitHub workflow for running tests ([PR 192](https://github.com/spyder-ide/spyder-unittest/pull/192), [PR 196](https://github.com/spyder-ide/spyder-unittest/pull/196), [PR 201](https://github.com/spyder-ide/spyder-unittest/pull/201)) ## Version 0.5.1 (2022/09/03) ### New Features * Tests are executed using the Python interpreter set in Preferences (instead of the interpreter that Spyder runs under), by [@stevetracvc](https://github.com/stevetracvc) ([Issue 65](https://github.com/spyder-ide/spyder-unittest/issues/65), [PR 174](https://github.com/spyder-ide/spyder-unittest/pull/174)) * You can display test coverage, though only for pytest; by [@stevetracvc](https://github.com/stevetracvc) ([Issue 33](https://github.com/spyder-ide/spyder-unittest/issues/33), [PR 175](https://github.com/spyder-ide/spyder-unittest/pull/175)) ### Bug Fixes and Maintenance * Use the correct environment when checking which testing frameworks are installed ([Issue 177](https://github.com/spyder-ide/spyder-unittest/issues/177), [PR 182](https://github.com/spyder-ide/spyder-unittest/pull/182)) * A message is shown if pytest exits abnormally ([Issue 176](https://github.com/spyder-ide/spyder-unittest/issues/176), [PR 180](https://github.com/spyder-ide/spyder-unittest/pull/180)) * The plugin no longer supports Python 2 ([Issue 156](https://github.com/spyder-ide/spyder-unittest/issues/156), [PR 179](https://github.com/spyder-ide/spyder-unittest/pull/179)) * Resolve warnings emitted by test suite for spyder-unittest ([Issue 173](https://github.com/spyder-ide/spyder-unittest/issues/173), [PR 181](https://github.com/spyder-ide/spyder-unittest/pull/181)) ## Version 0.5.0 (2022/01/20) * Update plugin for Spyder 5.2 ([Issue 163](https://github.com/spyder-ide/spyder-unittest/issues/163), [PR 166](https://github.com/spyder-ide/spyder-unittest/pull/166)). No version of the plugin is compatible with Spyder 5.0 or 5.1. * Add partial translations for Brazilian Portuguese, French and German ([Issue 30](https://github.com/spyder-ide/spyder-unittest/issues/30), [PR 168](https://github.com/spyder-ide/spyder-unittest/pull/168), [PR 169](https://github.com/spyder-ide/spyder-unittest/pull/169), [PR 170](https://github.com/spyder-ide/spyder-unittest/pull/170)). ## Version 0.4.1 (2020/05/23) This release fixes several bugs and other issues, allowing the plugin to be used with Spyder 4.1. This release can not be used with Python 2. ### Issues Closed * [Issue 154](https://github.com/spyder-ide/spyder-unittest/issues/154) - Make plugin depend on Python 3 ([PR 155](https://github.com/spyder-ide/spyder-unittest/pull/155)) * [Issue 145](https://github.com/spyder-ide/spyder-unittest/issues/145) - Go to test definition only works when run from root dir ([PR 149](https://github.com/spyder-ide/spyder-unittest/pull/149)) * [Issue 138](https://github.com/spyder-ide/spyder-unittest/issues/138) - Move CI to github actions ([PR 143](https://github.com/spyder-ide/spyder-unittest/pull/143)) * [Issue 127](https://github.com/spyder-ide/spyder-unittest/issues/127) - Teardown function's logs not captured ([PR 151](https://github.com/spyder-ide/spyder-unittest/pull/151)) * [Issue 115](https://github.com/spyder-ide/spyder-unittest/issues/115) - Report pytest plugins used while running a test suite ([PR 146](https://github.com/spyder-ide/spyder-unittest/pull/146)) * [Issue 47](https://github.com/spyder-ide/spyder-unittest/issues/47) - pytest statuses "expected-fail" and "unexpectedly passing" not yet reflected in Category ([PR 151](https://github.com/spyder-ide/spyder-unittest/pull/151)) In this release 6 issues were closed. ### Pull Requests Merged * [PR 155](https://github.com/spyder-ide/spyder-unittest/pull/155) - PR: Require Python 3.5 or later ([154](https://github.com/spyder-ide/spyder-unittest/issues/154)) * [PR 153](https://github.com/spyder-ide/spyder-unittest/pull/153) - Fix tests that could never fail * [PR 152](https://github.com/spyder-ide/spyder-unittest/pull/152) - Fix pytest output processing * [PR 151](https://github.com/spyder-ide/spyder-unittest/pull/151) - Fix pytest backend ([47](https://github.com/spyder-ide/spyder-unittest/issues/47), [127](https://github.com/spyder-ide/spyder-unittest/issues/127)) * [PR 150](https://github.com/spyder-ide/spyder-unittest/pull/150) - Fix test_pytestrunner_start * [PR 149](https://github.com/spyder-ide/spyder-unittest/pull/149) - Fix pytest test filename path resolution ([145](https://github.com/spyder-ide/spyder-unittest/issues/145)) * [PR 148](https://github.com/spyder-ide/spyder-unittest/pull/148) - Use set_status_label function * [PR 147](https://github.com/spyder-ide/spyder-unittest/pull/147) - Fix abbreviator if name has parameters with dots * [PR 146](https://github.com/spyder-ide/spyder-unittest/pull/146) - Show version info of test installed frameworks and their plugins ([115](https://github.com/spyder-ide/spyder-unittest/issues/115)) * [PR 144](https://github.com/spyder-ide/spyder-unittest/pull/144) - Dynamic sizing of text editor window ([12202](https://github.com/spyder-ide/spyder/issues/12202)) * [PR 143](https://github.com/spyder-ide/spyder-unittest/pull/143) - Move CI to GitHub actions ([138](https://github.com/spyder-ide/spyder-unittest/issues/138)) * [PR 141](https://github.com/spyder-ide/spyder-unittest/pull/141) - Fix status label * [PR 139](https://github.com/spyder-ide/spyder-unittest/pull/139) - Fix TextEditor constructor In this release 13 pull requests were closed. ## Version 0.4.0 (2020/01/07) This release updates the plugin to be used with Spyder 4 and fixes some bugs. ### Issues Closed * [Issue 133](https://github.com/spyder-ide/spyder-unittest/issues/133) - Colours make text hard to read when run in dark mode ([PR 135](https://github.com/spyder-ide/spyder-unittest/pull/135)) * [Issue 129](https://github.com/spyder-ide/spyder-unittest/issues/129) - Docstrings in test functions confuse unittest's output parser ([PR 134](https://github.com/spyder-ide/spyder-unittest/pull/134)) * [Issue 128](https://github.com/spyder-ide/spyder-unittest/issues/128) - KeyError: 'test not found' ([PR 132](https://github.com/spyder-ide/spyder-unittest/pull/132)) In this release 3 issues were closed. ### Pull Requests Merged * [PR 135](https://github.com/spyder-ide/spyder-unittest/pull/135) - PR: Use appropriate colours when Spyder is in dark mode ([133](https://github.com/spyder-ide/spyder-unittest/issues/133)) * [PR 134](https://github.com/spyder-ide/spyder-unittest/pull/134) - PR: Allow for unittest tests to have docstrings ([129](https://github.com/spyder-ide/spyder-unittest/issues/129)) * [PR 132](https://github.com/spyder-ide/spyder-unittest/pull/132) - PR: Use nodeid provided by pytest in itemcollected hook ([128](https://github.com/spyder-ide/spyder-unittest/issues/128)) * [PR 131](https://github.com/spyder-ide/spyder-unittest/pull/131) - PR: Compatibility fixes for Spyder 4 In this release 4 pull requests were closed. ## Version 0.3.1 (2018/06/15) This version fixes some bugs and also includes some cosmetic changes. ### Issues Closed * [Issue 117](https://github.com/spyder-ide/spyder-unittest/issues/117) - Rename "py.test" to "pytest" throughout ([PR 119](https://github.com/spyder-ide/spyder-unittest/pull/119)) * [Issue 113](https://github.com/spyder-ide/spyder-unittest/issues/113) - NameError in test file causes internal error ([PR 118](https://github.com/spyder-ide/spyder-unittest/pull/118)) * [Issue 112](https://github.com/spyder-ide/spyder-unittest/issues/112) - Plugin confused by tests writing to `sys.__stdout__` ([PR 114](https://github.com/spyder-ide/spyder-unittest/pull/114)) In this release 3 issues were closed. ### Pull Requests Merged * [PR 121](https://github.com/spyder-ide/spyder-unittest/pull/121) - PR: Update readme to remove funding appeal, harmonize with other readmes and minor fixes * [PR 120](https://github.com/spyder-ide/spyder-unittest/pull/120) - Remove unused variables when initializing localization * [PR 119](https://github.com/spyder-ide/spyder-unittest/pull/119) - Replace 'py.test' by 'pytest' ([117](https://github.com/spyder-ide/spyder-unittest/issues/117)) * [PR 118](https://github.com/spyder-ide/spyder-unittest/pull/118) - Use str() to convert pytest's longrepr to a string ([113](https://github.com/spyder-ide/spyder-unittest/issues/113)) * [PR 114](https://github.com/spyder-ide/spyder-unittest/pull/114) - Use ZMQ sockets to communicate results of pytest run ([112](https://github.com/spyder-ide/spyder-unittest/issues/112)) In this release 5 pull requests were closed. ## Version 0.3.0 (2018/02/16) This version includes improved support of `py.test` (test results are displayed as they come in, double clicking on a test result opens the test in the editor) as well as various other improvements. ### Issues Closed * [Issue 106](https://github.com/spyder-ide/spyder-unittest/issues/106) - After sorting, test details are lost ([PR 110](https://github.com/spyder-ide/spyder-unittest/pull/110)) * [Issue 103](https://github.com/spyder-ide/spyder-unittest/issues/103) - "Go to" not working unless working directory is correctly set ([PR 109](https://github.com/spyder-ide/spyder-unittest/pull/109)) * [Issue 98](https://github.com/spyder-ide/spyder-unittest/issues/98) - Running unittest tests within py.test results in error ([PR 102](https://github.com/spyder-ide/spyder-unittest/pull/102)) * [Issue 96](https://github.com/spyder-ide/spyder-unittest/issues/96) - Use new colors for passed and failed tests ([PR 108](https://github.com/spyder-ide/spyder-unittest/pull/108)) * [Issue 94](https://github.com/spyder-ide/spyder-unittest/issues/94) - Enable sorting in table of test results ([PR 104](https://github.com/spyder-ide/spyder-unittest/pull/104)) * [Issue 93](https://github.com/spyder-ide/spyder-unittest/issues/93) - Handle errors in py.test's collection phase ([PR 99](https://github.com/spyder-ide/spyder-unittest/pull/99)) * [Issue 92](https://github.com/spyder-ide/spyder-unittest/issues/92) - Retitle "Kill" (tests) button to "Stop" ([PR 107](https://github.com/spyder-ide/spyder-unittest/pull/107)) * [Issue 89](https://github.com/spyder-ide/spyder-unittest/issues/89) - Write tests for UnitTestPlugin ([PR 95](https://github.com/spyder-ide/spyder-unittest/pull/95)) * [Issue 87](https://github.com/spyder-ide/spyder-unittest/issues/87) - Don't display test time when using unittest ([PR 105](https://github.com/spyder-ide/spyder-unittest/pull/105)) * [Issue 86](https://github.com/spyder-ide/spyder-unittest/issues/86) - Use sensible precision when displaying test times ([PR 105](https://github.com/spyder-ide/spyder-unittest/pull/105)) * [Issue 83](https://github.com/spyder-ide/spyder-unittest/issues/83) - Changes for compatibility with new undocking behavior of Spyder ([PR 84](https://github.com/spyder-ide/spyder-unittest/pull/84)) * [Issue 77](https://github.com/spyder-ide/spyder-unittest/issues/77) - Be smarter about abbreviating test names * [Issue 71](https://github.com/spyder-ide/spyder-unittest/issues/71) - Save before running tests (?) ([PR 101](https://github.com/spyder-ide/spyder-unittest/pull/101)) * [Issue 50](https://github.com/spyder-ide/spyder-unittest/issues/50) - Use py.test's API to run tests ([PR 91](https://github.com/spyder-ide/spyder-unittest/pull/91)) * [Issue 43](https://github.com/spyder-ide/spyder-unittest/issues/43) - Save selected test framework ([PR 90](https://github.com/spyder-ide/spyder-unittest/pull/90)) * [Issue 31](https://github.com/spyder-ide/spyder-unittest/issues/31) - Add issues/PRs templates ([PR 111](https://github.com/spyder-ide/spyder-unittest/pull/111)) * [Issue 13](https://github.com/spyder-ide/spyder-unittest/issues/13) - Display test results as they come in ([PR 91](https://github.com/spyder-ide/spyder-unittest/pull/91)) * [Issue 12](https://github.com/spyder-ide/spyder-unittest/issues/12) - Double clicking on test name should take you somewhere useful ([PR 100](https://github.com/spyder-ide/spyder-unittest/pull/100)) In this release 18 issues were closed. ### Pull Requests Merged * [PR 111](https://github.com/spyder-ide/spyder-unittest/pull/111) - Update docs for new release ([31](https://github.com/spyder-ide/spyder-unittest/issues/31)) * [PR 110](https://github.com/spyder-ide/spyder-unittest/pull/110) - Emit modelReset after sorting test results ([106](https://github.com/spyder-ide/spyder-unittest/issues/106)) * [PR 109](https://github.com/spyder-ide/spyder-unittest/pull/109) - Store full path to file containing test in TestResult ([103](https://github.com/spyder-ide/spyder-unittest/issues/103)) * [PR 108](https://github.com/spyder-ide/spyder-unittest/pull/108) - Use paler shade of red as background for failing tests ([96](https://github.com/spyder-ide/spyder-unittest/issues/96)) * [PR 107](https://github.com/spyder-ide/spyder-unittest/pull/107) - Relabel 'Kill' button ([92](https://github.com/spyder-ide/spyder-unittest/issues/92)) * [PR 105](https://github.com/spyder-ide/spyder-unittest/pull/105) - Improve display of test times ([87](https://github.com/spyder-ide/spyder-unittest/issues/87), [86](https://github.com/spyder-ide/spyder-unittest/issues/86)) * [PR 104](https://github.com/spyder-ide/spyder-unittest/pull/104) - Allow user to sort tests ([94](https://github.com/spyder-ide/spyder-unittest/issues/94)) * [PR 102](https://github.com/spyder-ide/spyder-unittest/pull/102) - Use nodeid when collecting tests using py.test ([98](https://github.com/spyder-ide/spyder-unittest/issues/98)) * [PR 101](https://github.com/spyder-ide/spyder-unittest/pull/101) - Save all files before running tests ([71](https://github.com/spyder-ide/spyder-unittest/issues/71)) * [PR 100](https://github.com/spyder-ide/spyder-unittest/pull/100) - Implement go to test definition for py.test ([12](https://github.com/spyder-ide/spyder-unittest/issues/12)) * [PR 99](https://github.com/spyder-ide/spyder-unittest/pull/99) - Handle errors encountered when py.test collect tests ([93](https://github.com/spyder-ide/spyder-unittest/issues/93)) * [PR 97](https://github.com/spyder-ide/spyder-unittest/pull/97) - Abbreviate module names when displaying test names * [PR 95](https://github.com/spyder-ide/spyder-unittest/pull/95) - Add unit tests for plugin ([89](https://github.com/spyder-ide/spyder-unittest/issues/89)) * [PR 91](https://github.com/spyder-ide/spyder-unittest/pull/91) - Display py.test results as they come in ([50](https://github.com/spyder-ide/spyder-unittest/issues/50), [13](https://github.com/spyder-ide/spyder-unittest/issues/13)) * [PR 90](https://github.com/spyder-ide/spyder-unittest/pull/90) - Load and save configuration for tests ([43](https://github.com/spyder-ide/spyder-unittest/issues/43)) * [PR 85](https://github.com/spyder-ide/spyder-unittest/pull/85) - Remove PySide from CI scripts and remove Scrutinizer * [PR 84](https://github.com/spyder-ide/spyder-unittest/pull/84) - PR: Show undock action ([83](https://github.com/spyder-ide/spyder-unittest/issues/83)) In this release 17 pull requests were closed. ## Version 0.2.0 (2017/08/20) The main change in this version is that it adds support for tests written using the `unittest` framework available in the standard Python library. ### Issues Closed * [Issue 79](https://github.com/spyder-ide/spyder-unittest/issues/79) - Remove QuantifiedCode * [Issue 74](https://github.com/spyder-ide/spyder-unittest/issues/74) - Also test against spyder's master branch in CI * [Issue 70](https://github.com/spyder-ide/spyder-unittest/issues/70) - Point contributors to ciocheck * [Issue 41](https://github.com/spyder-ide/spyder-unittest/issues/41) - Add function for registering test frameworks * [Issue 15](https://github.com/spyder-ide/spyder-unittest/issues/15) - Check whether test framework is installed * [Issue 11](https://github.com/spyder-ide/spyder-unittest/issues/11) - Abbreviate test names * [Issue 4](https://github.com/spyder-ide/spyder-unittest/issues/4) - Add unittest support In this release 7 issues were closed. ### Pull Requests Merged * [PR 82](https://github.com/spyder-ide/spyder-unittest/pull/82) - Enable Scrutinizer * [PR 81](https://github.com/spyder-ide/spyder-unittest/pull/81) - Update README.md * [PR 80](https://github.com/spyder-ide/spyder-unittest/pull/80) - Install Spyder from github 3.x branch when testing on Circle * [PR 78](https://github.com/spyder-ide/spyder-unittest/pull/78) - Properly handle test frameworks which are not installed * [PR 75](https://github.com/spyder-ide/spyder-unittest/pull/75) - Shorten test name displayed in widget * [PR 72](https://github.com/spyder-ide/spyder-unittest/pull/72) - Support unittest * [PR 69](https://github.com/spyder-ide/spyder-unittest/pull/69) - Process coverage stats using coveralls * [PR 68](https://github.com/spyder-ide/spyder-unittest/pull/68) - Add framework registry for associating testing frameworks with runners * [PR 67](https://github.com/spyder-ide/spyder-unittest/pull/67) - Install the tests alongside the module In this release 9 pull requests were closed. ## Version 0.1.2 (2017/03/04) This version fixes a bug in the packaging code. ### Pull Requests Merged * [PR 63](https://github.com/spyder-ide/spyder-unittest/pull/63) - Fix parsing of module version In this release 1 pull request was closed. ## Version 0.1.1 (2017/02/11) This version improves the packaging. The code itself was not changed. ### Issues Closed * [Issue 58](https://github.com/spyder-ide/spyder-unittest/issues/58) - Normalized copyright information * [Issue 57](https://github.com/spyder-ide/spyder-unittest/issues/57) - Depend on nose and pytest at installation * [Issue 56](https://github.com/spyder-ide/spyder-unittest/issues/56) - Add the test suite to the release tarball In this release 3 issues were closed. ### Pull Requests Merged * [PR 59](https://github.com/spyder-ide/spyder-unittest/pull/59) - Improve distributed package In this release 1 pull request was closed. ## Version 0.1.0 (2017/02/05) Initial release, supporting nose and py.test frameworks. spyder-unittest-0.7.0/LICENSE.txt000066400000000000000000000021051466560470700165410ustar00rootroot00000000000000The MIT License (MIT) Copyright © 2013 Spyder Project Contributors Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. spyder-unittest-0.7.0/MANIFEST.in000066400000000000000000000002201466560470700164500ustar00rootroot00000000000000include CHANGELOG.md include LICENSE.txt include README.md recursive-include spyder_unittest *.py recursive-include spyder_unittest/locale *.mo spyder-unittest-0.7.0/README.md000066400000000000000000000131321466560470700161770ustar00rootroot00000000000000# Spyder-Unittest ## Project information [![license](https://img.shields.io/pypi/l/spyder-unittest.svg)](./LICENSE) [![conda version](https://img.shields.io/conda/v/spyder-ide/spyder-unittest.svg)](https://www.anaconda.com/download/) [![download count](https://img.shields.io/conda/d/spyder-ide/spyder-unittest.svg)](https://www.anaconda.com/download/) [![pypi version](https://img.shields.io/pypi/v/spyder-unittest.svg)](https://pypi.org/project/spyder-unittest/) [![Join the chat at https://gitter.im/spyder-ide/public](https://badges.gitter.im/spyder-ide/spyder.svg)](https://gitter.im/spyder-ide/public) [![OpenCollective Backers](https://opencollective.com/spyder/backers/badge.svg?color=blue)](#backers) [![OpenCollective Sponsors](https://opencollective.com/spyder/sponsors/badge.svg?color=blue)](#sponsors) ## Build status [![Windows status](https://github.com/spyder-ide/spyder-unittest/workflows/Windows%20tests/badge.svg)](https://github.com/spyder-ide/spyder-notebook/actions?query=workflow%3A%22Windows+tests%22) [![Linux status](https://github.com/spyder-ide/spyder-unittest/workflows/Linux%20tests/badge.svg)](https://github.com/spyder-ide/spyder-notebook/actions?query=workflow%3A%22Linux+tests%22) [![MacOS status](https://github.com/spyder-ide/spyder-unittest/workflows/Macos%20tests/badge.svg)](https://github.com/spyder-ide/spyder-notebook/actions?query=workflow%3A%22Macos+tests%22) [![Crowdin](https://badges.crowdin.net/spyder-unittest/localized.svg)](https://crowdin.com/project/spyder-unittest) *Copyright © 2014 Spyder Project Contributors* ![Screenshot of spyder-unittest plugin showing test results](./doc/screenshot.png) ## Description Spyder-unittest is a plugin that integrates popular unit test frameworks with Spyder, allowing you to run test suites and view the results in the IDE. The plugin supports the `unittest` module in the Python standard library as well as the `pytest` and `nose2` testing frameworks. Support for `pytest` is most complete at the moment. ## Installation To install this plugin, you can use either ``pip`` or ``conda`` package managers, as follows: Using conda (the recommended way!): ``` conda install spyder-unittest -c conda-forge ``` Using pip: ``` pip install spyder-unittest ``` **Note**: At the moment it is not possible to use this plugin with the [Spyder installers](http://docs.spyder-ide.org/current/installation.html#standalone-installers) for Windows and macOS. We're working to make that a reality in the future. ## Usage The plugin adds an item `Run unit tests` to the `Run` menu in Spyder. Click on this to run the unit tests. After you specify the testing framework and the directory under which the tests are stored, the tests are run. The `Unit testing` window pane (displayed at the top of this file) will pop up with the results. If you are using `pytest`, you can double-click on a test to view it in the editor. If you want to run tests in a different directory or switch testing frameworks, click `Configure` in the Options menu (cogwheel icon), which is located in the upper right corner of the `Unit testing` pane. ## Feedback Bug reports, feature requests and other ideas are more than welcome on the [issue tracker](https://github.com/spyder-ide/spyder-unittest/issues). Use the [Spyder Google Group](https://groups.google.com/group/spyderlib) or our [Gitter Chatroom](https://gitter.im/spyder-ide/public) for general discussion. ## Development Development of the plugin is done at https://github.com/spyder-ide/spyder-unittest . You can install the development version of the plugin by cloning the git repository and running `pip install .`, possibly with the `--editable` flag. The plugin has the following dependencies: * [spyder](https://github.com/spyder-ide/spyder) (obviously), at least version 4.0 * [lxml](http://lxml.de/) * the testing framework that you will be using: [pytest](https://pytest.org) and/or [nose2](https://docs.nose2.io) In order to run the tests distributed with this plugin, you need [nose2](https://docs.nose2.io), [pytest](https://pytest.org) and [pytest-qt](https://github.com/pytest-dev/pytest-qt). If you use Python 2, you also need [mock](https://github.com/testing-cabal/mock). You are very welcome to submit code contributions in the form of pull requests to the [issue tracker](https://github.com/spyder-ide/spyder-unittest/issues). GitHub is configured to run pull requests automatically against the test suite and against several automatic style checkers using [ciocheck](https://github.com/ContinuumIO/ciocheck). The style checkers can be rather finicky so you may want to install ciocheck locally and run them before submitting the code. ## Contributing Everyone is welcome to contribute! The document [Contributing to Spyder]( https://github.com/spyder-ide/spyder/blob/master/CONTRIBUTING.md) also applies to the unittest plugin. We are grateful to the entire Spyder community for their support, without which this plugin and the whole of Spyder would be a lot less awesome. ## More information [Main Website](https://www.spyder-ide.org/) [Download Spyder (with Anaconda)](https://www.anaconda.com/download/) [Spyder Github](https://github.com/spyder-ide/spyder) [Troubleshooting Guide and FAQ]( https://github.com/spyder-ide/spyder/wiki/Troubleshooting-Guide-and-FAQ) [Development Wiki](https://github.com/spyder-ide/spyder/wiki/Dev:-Index) [Gitter Chatroom](https://gitter.im/spyder-ide/public) [Google Group](https://groups.google.com/group/spyderlib) [@Spyder_IDE on Twitter](https://twitter.com/spyder_ide) [@SpyderIDE on Facebook](https://www.facebook.com/SpyderIDE/) [Support Spyder on OpenCollective](https://opencollective.com/spyder/) spyder-unittest-0.7.0/RELEASE.md000066400000000000000000000032121466560470700163200ustar00rootroot000000000000001. Generate changelog: Run `loghub spyder-ide/spyder-unittest --milestone vx.y.z` 1. Edit changelog and commit 1. Bump version number in `spyder_unittest/__init__.py` 1. Remove non-versioned files: Run `git clean -xfdi` 1. Create source distribution: Run `python setup.py sdist` in root 1. Check that source distribution does not contain any unnecessary files (e.g., cache, `.pyc`) 1. Create wheel: Run `python setup.py bdist_wheel` 1. Test wheel: Uninstall current plugin and run `pip install dist/spyder_unittest-xxx.whl` 1. Check that `dist/` contains only the source distribution and wheel that you want to upload 1. Upload to PyPI: Run `twine upload dist/*` 1. Test: Uninstall current plugin and run `pip install spyder-unittest` 1. Commit `spyder_unittest/__init__.py` 1. Create a version tag on above commit: Run `git tag -a vx.y.z` 1. Change version number in `spyder_unittest/__init__.py` to `....dev0` and commit and push 1. If building conda package: 1. Wait for bot to submit PR **or** edit `meta.yaml` in fork of `spyder-unittest-feedstock`, changing version number and hash computed with `sha256sum dist/spyder_unittest-x.y.z.tar.gz`, test with `conda build conda.recipe`, and submit PR 1. When automatic tests on PR finish successfully, merge PR 1. Wait for CI to build Conda package 1. Copy: `anaconda copy conda-forge/spyder-unittest/x.y.z --to-owner spyder-ide` 1. Test Conda package: Uninstall current plugin and run `conda install -c spuder-ide spyder-unittest` 1. Push commits and version tag to `spyder-ide` repo: Run `git push remote_name vx.y.z` 1. Use GitHub to edit tag and publish release 1. Announce release on Google Groups spyder-unittest-0.7.0/conftest.py000066400000000000000000000010431466560470700171150ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © Spyder Project Contributors # Licensed under the terms of the MIT License # """Configuration file for Pytest.""" # Standard library imports import os # To activate/deactivate certain things in Spyder when running tests. # NOTE: Please leave this before any other import here!! os.environ['SPYDER_PYTEST'] = 'True' # Third-party imports import pytest @pytest.fixture(autouse=True) def reset_conf_before_test(): from spyder.config.manager import CONF CONF.reset_to_defaults(notification=False) spyder-unittest-0.7.0/crowdin.yml000066400000000000000000000005201466560470700171050ustar00rootroot00000000000000commit_message: '[ci skip] New %language% translation from Crowdin' append_commit_message: false files: - source: /spyder_unittest/locale/spyder_unittest.pot translation: /spyder_unittest/locale/%two_letters_code%/LC_MESSAGES/%file_name%.po languages_mapping: two_letters_code: pt-BR: pt_BR zh-CN: zh_CN spyder-unittest-0.7.0/doc/000077500000000000000000000000001466560470700154655ustar00rootroot00000000000000spyder-unittest-0.7.0/doc/example/000077500000000000000000000000001466560470700171205ustar00rootroot00000000000000spyder-unittest-0.7.0/doc/example/test_foo.py000066400000000000000000000007651466560470700213240ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Example tests used to generate screenshots.""" import pytest def test_one_plus_one_is_two(): assert 1 + 1 == 2 def test_two_plus_two_is_four(): assert 2 + 2 == 4 def test_one_plus_two_is_five(): assert 1 + 2 == 5 def test_two_times_two_is_four(): assert 2 * 2 == 4 @pytest.mark.skip def test_will_be_skipped(): assert 0 == 1 spyder-unittest-0.7.0/doc/screenshot.png000066400000000000000000001236461466560470700203640ustar00rootroot00000000000000PNG  IHDRXr3F pHYs+ IDATx^gXWg7zJ vEŎ=jԨF%hk^cWl(K²m:1|r;̜);̙afd UQ  ҕ@ ti(" ]  H! ҥ@ ti(" ]  H! ҥ@ ti(" ]  H! ҥ@ ti(" ]  H! ҥ@ ti(" ] ȾO\{zo;7U8;߿~_ɳF>2aۥagW#g|LM#G>"x<.hKn<OtB $k;gv6<iɩ;7'BNVfx::l>ƅS?6^I[KsP Jg|}w$eu<b2W-f tK{LLM`COByo6nD"qa Ż몫͝5cfimɢ/ҵ3pԆ={l^&9c١RxQɩ*vw5r@^VǾ:z8.Q0ϽlCŸOnf?8]F>edij`G?]RRv͙9r}F03;s ΄X֍kl۸vƵ (?gfoz"eZ+(A^loh9aVf~Q(Ofn*ˋKn}pwקZm?e'kjTSӭ ?uH$;kkmYV^qyRZ5ȳ-D"Q\BR(ʬϦ5PAyęգeceeݻUTx'[5vO=G-4yhs6Mx-!zpm6m;=ʲf= E|-&}m--M_r=7`g̖3Qں!Tk7GSf}))e#C.>ᰃ"= ASƍM̞7ð/Y.J׮\6~/~k%eKW@. 륫3{mg +sFM eǎrGO#b0 P( L&Ԝ0vl :~^45vёJs,)+6unZ+R/b; ~ڼ#$OZYvӖ;n{(Fq鴮szfv;ebd#'wȧU\X Ғ%ܼtZhbbMcjb|ḄSg/&$)t\nfICC]~ Ta L4163ScJ񼃁06!_`docm~#8,\@$؄$LFLM+<=~13s\HKόOLvwsٳ Wܸ2p@;W{:{ʢf&Ff&QV^ kK 8RR`gSriA uux\hw2َ=d22>:3iՅ+=FZACN)qe3/7v1={psY^lbd¦ h < xL땟=;uOYlqrkEDn\012eO4ہVW-(#ɾ[wؐ&XYԢ{GF>}5|g>-#8k6m^o|~ yTsIhos455(iI/Sɩ01tmIq 9dr90t s@?;' 0 P+4u޾_^}B[Τ<N2{73r6d乹8GgUAͥާ]R(ȕ[mj/:V FW焤RPTRjgkMd2CpW;6%J? ?g悹Q[X\돃:Z.N/ B`HbZAcIihki;^G<пOHX#df@kGE5Hqt*o=Cݡ¢bqN$:׹iYv6e0v6V=uv/(mʪQ>}7l%+'(_V^QXTҳѾjj[ &hki呞vӶwy=d XI/S9l抵_-&O7{T&VƁ#':ZRZnog(`ѲUfxήAv>aʅ HL|ׇ>{*975?0`jn~+#'krvH$cyEAg5mܶI(,*: .5 tikAP DA4A. BAKCAP DA4A. BAKCAP DA4A. BAKCAP DA4A. BAKCAP DA4)N'  ȧK$  奅4fvfL*%AAOstCU`4BAGAOL&mlS(8G^&DuG_ 7L@/  ]L&EAP DA4A. BAKCAiIGHN lWݼ49- EœK#e gouH$n&R| 036JM-}=mRレ(0_:p_/\T8(2 wO2=+h3[KS!Q*km,,̌ W:zvR) <*2ḭ\)J\R[Ri'&;ƢI^A P(؀>=cz94x4Je#IuԌnp99YYѨʚC. #zzsY&[_gA1qTg;T$8ҫ_çWV } +?+nlތ u hkvQ>^,3615("c uktN0J_Op\"{VnaUM R"rsE"Cx\Nv^sE文T*|8ijϙ+wT:-F'O?B"g8@iyեfNn~@ |,<3'dbawI2QsxQib2%ڝsۼ0J+L Z :em]= ϟgJ*&bќ!7q/R Kz[u7A\t} P3`XDL~=@`S^USïWX4wZaIي{t4.eZg~1 )OCt6 qfj$h@N~QxtQ8t`_bO;J$G굳 e3LGK@w}j܍X=z?kʘIc;~AY$:~;ﳵ{dg|s8X/rFRj֖=G`sYxRҳNSΤSt2<:wޞ}0 #ӳ I$+&wIEPTx46HRj&!eUTVT@QI9QW[HOL(.qONPO EEUMIy6 iXH,E:[䓄aXXT|NL<W=-*䴞.PQUͱT*#Q+k,9aճr irj8&lIe;_)> U5u8ǿL'nNvյHJͬ8YV72#N)ںdǓRrE`XY"nfa%ZlVgu!CxtBxty;He,M| tucHZ"F8ȡ?-_[p:~$!\iբUqwϝb<걁\7KWݞrJ~D driᔒcanrnND:l]Ѩ)p{^&GYQ&Ѩ^'$$yBloMRT8Tվ4[[ B:Ew_ xu'+T*hiyN,f1E"rRy&BQx5ÖliP);(0,<u3hf_(<:aDjd0J6 0h߯>ie_0wqDUw/u𢌱rسy0N}^xʹO^XTϾɪ@BٺWyDP(ܖ/OJ$җi/Ӳ/[yUue7Ӟ{4 uAA TW+.6IPY)~ ]"g&nɽLa@yQ:kvsTQz:sIC0AY"mM{ jN|ԟ(CRQ(T&+.PFGDx%&xM"qiy%8;Xk*ݮQCu5eK 4#a+UE%Me:ښZ`maFp J)J6 F(0ЧrC=8ǫ!Ɔ}{P bfbH+WVz-06k9wgMd2c_]%e/S6m]}Ö=G Xhv}Cceum y1mkDiyD2F&fl'ɫj]pӜrgSpڱ~Y!#'^7(h<٬Faça5.B&G&w33.-(_W>ˆ)JM-j;3oW45O])7.j&FHQqiY%U gO޶vIP<|lQ-!ϕo&7@5FGM?bQrϿhg,,ZR{ZhgC.a0#+9 /_`\rAc9qWj<Ύ~}GN?˄(謮QAaT*ewYeuWAcOfLI1%uVTꜮQAڔq#4e2YQi3%=A/"^S;փ@rRg@ARRjVRj9AEA4A. 3r!3{4ZWI 10: AP DA4A. BAKCA>@hQϟ]׌]&SkLѐN@yO` tL 4 ŤɩՑT%@_w׀K3o |Uǥ30%V=V+c)Կ/ș'#IyX3d|`u-uMS 0c΄GOK2[l;Z, (I'csZny/}ǿOyz^oVbMkBfZLNvORtP6=ؖGgVWrbc-q<ͲG,*X~M+84Zw1Nl*#hK:Xt^8 6gWN|ڪwouoȟn$DdK]&0 e%m5^`R(ݓ䴬\* 5=}=- ?JssAXx!+`J0Q*uچC/ͷ07O=Lf qݑsv%<&&G:4&hm|{99"SC^$ ]ZRm醘ٯAF֫܆i6 /f8ֈخ&wF.򸵻 #M/m=`vZ;/\`i&5wcۛF6+\:h6J%;%"7̳Xy<-r{.tlL J-6%eB/+ué!G @GaO7&AN}8ԨNpz4-Mgg_ C~pHQhY O%LscP&Y6J]wciƫqG;."lƘaKCy}ƥ1HK:Fi~~/|MM{kﱪMzѷ)fјxu=7^P?궚sZLet*U$*_P۲zKlܸON8 IDAT׆TX ֶ>=T4_8ѷ^7`Xb:i?Ggn|C\Ar׾yȺ; Ev;i19[+\|ʓt uG 2ap5;9x;J2m4yM)SCߚuk:ذ{[D aP"7u CfQIϖE\`p6uf#IAU.&'#,&Q1 ;1\MT CmgX<#{Kgڄu1w5 z_"[X˗eEsh s5mљ kՈ+W)q]sSJ]ٽdblCʲ-qZLyH/k^/_N3Lmn*?,JԒ+a;\F:<꧘5/l+ xVS*R[S mI[$O*I fՓF +HZ轂b!iI/ `ܢƺ] s3SB,_jN\1"J΋)oͷ oIhYCKeuL njAeYӠ܃)Ěe\Yf ȿAŭm*'kB99-oω.y:fFTy&3*<戯`![,t(Sʕ;];Xqm+.f\̎{ LJL*pe}V}ͼMFTCJVCAQJ_!8j70)bri sT*X.љ|Oxtɀ5LyS''zαp̒Ɂ_Iѷna8(dE4H$I@kѕ>~7N^.`ieUeK^?TNKBT*寢o[BWIm:jM`\͍;Li!IRhYt6a8I&(nj_X[*'.TwwTuTaqe Ҹn.մ NLbQ0,giῼ~ @ -Cg'-# G-#`J0qn~Sj"_Tl=`"ڗ&>8ni\M;,T=EX9i!rW 8NQ4QQ꣢S8xe*l "z=k4Qe8k eJQJ>@C{HJCW%T%T5baO]\"WdշN)E۸i؛”2/C+KÓ}+sPELl_JBV~"=4J-u |d_n?37}<#|y+]}xZZ?SUT[R*+p9V Wݭ^7m&ǀC,OPbG]`h!Sȋ健kht6^It͌blm?CzO]]*A:mc{MC=Q:xaq,:ã3iƗ5 ,+u= 5܆I%KHdoX=6LfiI[wsjQldTzD\ p1iG ?SCvklQ6LjK/?]uLoi]M{hs'rta)˞iݻR$H+t;M@x4-~%NwJEӝʈbb 5ֽXʈ5:+x *QBWT*1k29F.fPha?b!}̝nY#dybm>$*Xz ǜdW~rB$oî,wsz,_"zQUx1+JV?q: U?F:?tIq>+ 2Ͻ} b.⪋No=I/RV3;dkӉ@hY]uT<5ĆnX@P㪋ȣZҖ/'vwoH&e Ev}=Y`NGCy9/$ []8.CoXRt k;˲qެ Rʜ_& "uچuwn'3A*b`N#I{ ^ L RI[_;,i׎^N֕A:9"툪wپNT9ޣG\/E_~2Q.֝JMZ`Kslu ~m*kj8r9'TP $4s$'i`holua` >F57mHN}[ }9Oj9 љG\+l3Y(U *ёM_OD8);=-oj0hOyz^oVbMkBfZLNvORtP6=ؖGgVWrbc-q<ͲG,*X~M+84Zw1Nl*#hKQ[iG=rwVξ~3{@̭tu-w@(D֗矖d {M#{R0JvOR&Ӳr[ /`Ri7G,i-%?#~TMia;;ig'*-tз *(_NB>zpdSʶ'ז؝`{{t#F93XYU_?(JQ*Wm;#y]rSϑ30J-lduaTP#!_"#w(QWJ~ֳa9&~*A3mzqO|V$ ,IR$ _;xU'< B{0o,'95+\M {tDyRxDZk“zhS%IV Rם@lOd{/ j9 g\cz)Tysm6{Z=—Դ~5aM0#Fˮ bW 7+j~i1+"өTWm|Am$"'їhQ4"S!== "6~gn\u''d |{kÿ?X*'_i>/Uȗ:{_J|r6SHZL>?N0{w$F}^pY67a˚8L8Znvr 07I}et{ޤg"i08{:3I^4#g+H%QYS9A[M/t-)4EDz27<{$*F!}; [$=&ciK؂ZDt.+CciΤ`X]F,|^"O(뚛R$cnfW55lbrm`GXYΉyjSjjt&67  =n˲[|2"WOpҘHg7TsFmT0(ns~%U'Y j[;#Bʲ4lG-#5s .J+1Oz(FCѕEug2n%.qLWkfiPdy|AmbMk.ƃZ`ؽ3Qɵ-F_2Ӻu|JmIkĤ[,Tֈ MV}G20/d[Tj/uv1S8t5i~+o'xP7u64LfTXygқ#VΟaluPӹ_-N3E\15t1cjUвluYX mTӽ_r\7S< A:B$U8_SD-UFeriR]"iutD'``R[6~sH0BVUhym58TS[]8 fR,WZF*Z\ENB4ÿ H4B?SB 98@^T_ƣSbGg^P>ё&Z2M Sc:Ƕ1K'$UF>uW :OxtTx $bSjNN3+ }|;o-N"Ab%VֈOK2/|i$ɠPI0joBޓdRq,ګ#՝^DBHY|-&',~@.1g\ SXov| 6Ug>/ y*f`إ7č:,rRŕ)XBH{p2T~v2kq2aqW<7 xE1ws^w&> #kw]4~vYSx%EX97 =x[]9STziqţN/plYUrOٽ̽ltwQfNy:H+s2^=`Xvou\JV|fPΖMq4T vjR+S[Pې&$iu#Fhy6ԕQ0J/= ~E VR[SMMl4~4J*}n\t JxZ b wWNzZ%זUur ^cr2טڒR!_umn}i39l:fyL8 C B^X/,_3GSO5: fF 1¶!G.CbaG( V갸DBEљ4KjtnnCuj2sg-&G잺fn:D i*h¶^f^@Ҽ$ } 5-y5mQ_M40jZnL&I&¶F_×՝vWZNdE.Jb8pd̷HWϽ3+[܈1˦nܪV7{GM }`b?052$kl4= -Շ\AzKoaWm⏊Q˗:{+K~nӇAz4x*#r,tgXb~*#R;k29F.fPha?b!}̝nY#dybm>$*Xz ǜdW~rB$oî,wsz,_"zQUx1+JV?q: U?F:?tIq>+ 2Ͻ} b.⪋No=I/RV3;dkw%RW>/\HKD"r6ΣF,_A᫧hm픖ZP&*-[OgX捛¯rkP"ƽ—v;"xVnÖ8eJ2?$.CʲOz$&^*I P!G_F(D!T]N*{XT"ǁeF6JUKU}~m5=F6o~qnk/aFV.4}`,S)Y5QT fZ9A 3: ~Ga][?'NJܓ~{ik3e"8 ыYԺ2R" tay7Bm A ;eAtf$ñciaTA]>  FA.BAKCAP DA4A.!},/"grkk[0Lsnw!83.$"kmm2HN}Ј)^x/6֓05:3RY0Wo>p0ɉ?ٯUwq_@ure/n6t?r #DӟW魿IϞdd:(+m4o"%}ϿO)޲^8lxr𑌯#Wo5ۆ{P(r)ݖ kv7dnTދ4^l:PLYry wi_Q3/6VP=8E&r_- 䜽{̽6v4S0˜E\Hb26v{`0,@ol暍܀^xi-Qbm>t \EjVQugq c:I{ ~I\@ƗjIaGy %+83[yŦ *:b)V6D.cΗt6|/2L>{}ܸSkL`7ݹj1N/2(@w6 ?ʆx/6ŦS Ť ϞWmͯFk[1}΋Ly˜5Qw>u&}40B)Wz,1IuvU劾B?!ϔ=(OIR7ކXTgWP"B%W`=}Ev"GʐgG>},k?_ ,6nyF(CђHY{ k6Jk^`sD_ ai/.ld]BGөΊ\EQa`؇''>ce3/xitPUgg}4&}i]GNmkS882m!䢯1F˓IEB,XT;GyJi+ 9q`JͻbFxU%I8}tӼ ´7C5``ovXB͗ <"T;#rLGGzy+[ H_I$9m27޿|{EAXY38`L& EOipP̺)3jw?Cuo0 xjֶ4Wׯ7eEVdgQ@P$'PYxC=A-i";SYLXQ\R=xea̵By(rdo3W:*j(0XZ˞yXQZWURw(.nKd!ҫ+%}W;GEF*}"-mF[יִsH/W<.#Q(&6Xo&X\[a̕k)f)eP`&A!+Lڂ  X=oJ3 ; ''*_CT[{ɡ?0]}ĩ_Hl,4H7bx7壪!47K^]YU-"bo `\YHh4_sumbm1 om\ҙLå*m47է?u3&ݸϞ0 4BpGGֈfl;ѠE5Yp35_Т nX3}gB2;Ks g5PawodƱ';~xC@feʦ[»xrwӍV6+y΃\&q) }:s`p35AESfr,,ȼ\kQ6{D؄R?~ܽ˥btp%1KV6idI.[}z]^ž@unect*DŶ u[b7XIjc'#ӈxZoPٴqs㧐 &_c3w_ ٘3$Y6wgX }ei}'QLSgV\yZ )}_BM[}O(^>HA(">ERm>FƠTj"F1YU13 2'[F6?@/n4o泧̘G(S߻ aIuu fFOdO8/ym݋[Y'}; /?b -zR.nFWnFѽgn j|,zr?EhҌS^R>/ Jztn)UN~d8}hʗ̥%"Dl:khͲ(TM{ل A-ʂF~MzRaVFn5v$bc632=Mҩ%=ET(V6I wFtj3J,uAY" o" ZZDm/:p+gA2{+JʙYFi z^ypwsIsw7,fssrdG>^|֭\M/˽xP60 ۵e/@7_#>ik7._I$~1="yMÆP^z^y^kMRM{nL_&`ASҦ^Rt2s6il؇Ȁ`m Inv'!Qz_ HTQK SR ^ۼSTj^>`ԉ:{9t԰!fg!hnXݬI#R5j`:{a|B"9ԭ]kߓx<8| IM_zs;uBgnf:oUI^ ݹL&316:jݻ؄EDݸ Iп2Ś7s*,-Φ#S ޼ܣ_9ycӚ+o-f̈F d2=q﻾vnv}+SƏ611޴mwfOlfjJ-Ss̘q 5}R $Aě[vϚ.U* 'ືA/:|ڍ+,xL@lz1~N@l|{22|ݣ1[323{L3|񊵺۷Yr_c&ţ HUQ93^~ K cwR8?s]1q"?xþ.YD~M--n>~12Io2T(Y9+A q`ԯW$9^nI3;{ӭXf'wl_ZUJnvA6ݪE WHT7}\Rs EtL-\}ކ@@Pkg3SSz(;;9,_ .Ucb׫aؽ #3?i?H,vrԡPę epރjn.U}윗@moxDb8wZ͌xTzk?otLT*]n^%&%&"RgΎoP(1,O?THT {?AXg! 3_#[yy9r,QQ,0 ʡ&s D? A8'''hT &&ŕ\ٷ4:6gիut v_bZ.X윜5+aۧǒ3?~ھ_~TvNN=wsh]{ߥzLǏ>|@X,&i+=deghA~ ?7> |Rb I&&ƙY`iaNgddf:Fpy10ܼ{nsqêeii׽} &Iܔffzz8ScmT>`oiLlmV->Qͽoy"QNNnFFyAH073ʿVZOں~uAc;wY8-k.]J:}ŏ1d2*@с*DרZݽ+8;:4oքJ {̞^nm݊%'Jod$ @9--, )9%;7WmoDr,-pμ(R9GO?z!Vƀ 9@c ĢRX,qsu*ݩt[D*fXl6jddf@->{ңk'w5k`meٮ͏ ܷOqRP*%&kJ5eh'}q>>4mp 3+L;Icnf]On^:M)A*",._4εsܹT*Θ2a6^d.]|G>^bx\P޼so疵 0,(8 w}^`08{7/)w=1q**7/mhŋWo@P,Xj̩Fh4GN:aL]dTju@M-7ҩ#F$\^s,ҖcnfztsamVh؇M\xXYZ0DC(l{Ofcmuc&Sw*6q=u) CLF`OOع0uKYkӲM z*-~F|N#e8-7 uX#~Դw}'Aµ,-,9uBDA~ E[=}2͢:_8kڗ`sޗ(" ZkAAWP DA*5teQ+pKW3xM7$4ēg![w ^^mo;uҵ Hvvk0 Ӿ֤#"a`7vM gd2[h;ؐ> fO?bٛ׮;} 7s3AJBBZ=lp9Oy}?ݗW|,kM50}xM .[8lܺK, tu[vt٦Uscpiq ŎTNy4i4sDG&$֭]˙3}rIB \x, F(h_N R"g]~`> x[qm4hx :@ JLJ!GҰ~ݠg#0',={wՎSH<z0|z^yux- UHyOKM%{u2sD&YȮOT"R9x:}xo_^|Tj5 wrmC$oӪYFއ<{Q[(5-sԗu.v&*扌simzujcP-NdLax^}W(.4,1)eųMZqA0i9ۧKv>ATpvƍ%* *K j1.ΎA![w=#$u\Pc Ӈp@ EF|}L.?rW ~n ,0L5kpP?ߺ3lTjLsLX y3rl*]ќx+&fN7sʊAGa5q9mM j].P})Zl6G^Pc1h#A~ %TnzًW{LML$Hy&?GQKz$8en^^BRp8._)#3Sw#0Lw}x')0 tF͚'ݼ$N_bU9'Www;vQC r bzZYul7u˜D?Yt/תQ})B7nM ߣk Ԏ|Q$Sa:N]w\hnfVvLMM]QMwDbթUcp D*K!<L&WUTJFFֈ *n.sRZ~QBP DA*55ARCAP DA*5AJ BAR;8|{-k A a\ڲ~9_StL5U7skXrtSSSX5~'G'k_ҭThS~lx5oȈwWo$Ņ ٘CkF yyKP ,? G׎P~L"ę g szuk;}N=vn ߡjDU~~j%x{m55ѓڐ Rk~O?h TvsSKj}Y,6£)ٴ{>P#9Of1ԮӧKA$П^w^ߩCۍ0-1)^AOm IDATJR 4k?5b&)3k0hצe-lF. =x㸉IFf&-ܽ|w{ӣI#@Xa~k0L$|trh '^uW;T.]nUL_IY2$~n ~aTjH$03K,&sڕ/_- ͣg T_zԪ9~԰s|ɳz!A* woݠ}_Aԯ[N3mx/dx`zjMWqv7jؑC6Vԫ!w-ܳط}o4ʴ z!A*+4 Seѥcq# 7E{u kWU~TjU mZ6q.=AC Hr-&Z& j A,*5Bg=p8=ED@-BARCAP DA*5AJ BARCAP DA*aaBA_0  [C] H! R@ Tj("   H! R@ Tj(" Z 1d@_zj)lZk#{?znzvɔv%Vմ)B`%ܽ~aI8/! ~xuKztQzh=ĆΓ=vBo)j_b詥TݍdASҦ^R ]hxMMCK\g浃pŋVuVu|B+?$I}411./,TL%nݰYF*j԰0uDVmތff*e/oT=L;}@3QC1Wǽ,0vϚ.U* 'ືy&3& bcOQUʾPc/lھS6fo޽߶AK"HbӶTfM^Tj5tlfʥ@XrZ^f훴3g1۷5iT*Je_66x]ؼ}׭7jըv x2 p 5}R $Aě fvqƧ/K$]={lݖ;{iX(>~Z\|Bׯ^72vcg';68njbܲco+ffN^՝$Нd2ccoݽMXDڍ[ ǰbWҩCNY2V ϠglᜤD:⯁&;*NF(pޒ;6 yuum"޺~㧽}[[Y8+:&CGPp?ͳ>i@A'\ 4D5-Wg{QMkV?{6`gњM>n] "H\!4Pa}Z^qkͻгk'o_* Bh4]\LO JHL&\8is Α+wt塃oChݲy:LD۶jAQ%&%_ FҰ~ݠ',={wnԫܾiMþ@|Bb/>t쌬&260zG407*3,r8Wkִt+2iI]/^vPKgרacHR@W }֯[dR- ?Ξ@T?{MMK_v_h$9|72277r]]GP(|ոa}שC[x۴-$8׀aIڔbqy\JEc,_,+xHk0~+k'{)ϺuEb}팅 '\Oկ׬I#6}Š@nH$tMV$l6Ufx$RoxoR{>X;[*anhi [ܼކzC$A`pyyVH@Gw[iYaL$z Y F5pyGN}SzuٳcTq u9D`O}r}vj&4CRׂ %#l6{%eruk;߇7Ynfj64lݖ}X2Q 1* SX=5mK^ݻ\R?\r)_drkcQ]սKG IM8ީC+o'l6{_M]9h4g/^9{񊉱S͜bfJ%=42I477,#3^ -BJUAQA~+kT":;9H(@hxAݻt`uk@֭\b2IP($R5bٺ355IHJA$GgMM>=Pغ뷡TJy1#lEV͛ݼtN7(Zx<bS@(ܼtڙ0̎lٱKAԿ+ou }7j(QъotM 7//!)YPcgD"qV8[[Yҳu(ϩmX,V?z B~b.]|G>^bxU*Θ2a6 >~v~BCh4]|MMf8{7/)w=Th%ܳ[3Gg/ܲvlf9yIJG?zTu'\н/\Yp6^֨A=q[h=u֝-;*;7VaตզGSΠ\ya{;f^:uDшkoX3wƔY&@\|m@߱3 6>Ë'$O=/o*NT'uC鞃G}mܺkiO!IyqlAýX{[;y7n/.Af^Ke|ߛ{ I˽~ 3y QZ?aͤdde:v>aƍ| ȯ<$>*2gG;D.[ LkA"LЄ  #B=>    H! R@ Tj(" @hgd;lM#˂1w^;M盜8//r36SA)?4pWӨG'L /V?a07З;j$.*7p_aJ`?U-jՈr{7ˤg#?Ku;8 Ry.|)~m>Y~. ]۔.9%r-oIz4 7 Z۸jJR݄ki3̟Q~ƎeűB۴6Ò$żAV6n[_֮k=ȐK$EҊ5pzpt^_Jy;{tSFVhc[5M&M :xktkD䓯_V)@†][:qDY>M!IOO'U&`m?ح0G! I䲆$%rcd&NdN*ܛLSnږwk]<Z)[u.hSL ?>Qj4Y֨{CKGEY>0]..9 WIp8F;?<94pm o!>OuZ.kܭ+`uoE?o[ae!7b7w8qĀjb(^^RO5Ll݈+EhIV.*xORݝjϨRQI]+V޳\M,5>+j<\H ;lk<6S/c`jUILt!R(MEaj͐V5Mm۽|&;|яN=NLU_Ѥg}sX>IbnNhq})5ZI>KQu*WVC@B,ɽ&QI>Kw,|zNOt_xu?aS.Kű0G)5O6Π sXBrMeQM@DN꤀r Z av|Hߋ)QeIeFTPX5y}V6n) U&ʔK<YCKD.+רՄFUCӼJonlm>+Y]FT``!o9$0efIqYIɒ$Z: OAm~. YU1 -bD$-l\;;Դ326aZٺ M8;2OTۑoZ~bBN}zi_}tFƍ-f먝 %J4 >V?YpTgRόc$`q j Y\wc+Wet^FRRZ˹Ȝ)Q-X8RK^z8st&W;R_L]6mP$]bފm5rv62FKpvo"48 -MhQ7O6< OyJ̤5Z%}ܾI= S }&n۹qj v=2=~腊c`jH}V`<&}]4B H991ӡ\C6`3nz" m ֶX*KoL9Fݧqfר^DIβ74貴qLdÛ{6F7vqt j)atrs&M`z9:jӊ+Q^ǟ?/xvmJKuK_cTg>4tu\0H8du^kj(}Q~jl^B 1rV].EMhܘWj>l}kdGkkMTM@r~uJ>{\:3^g$hX9%YU^Ҩۑ#4fl(hHK2I]awt.lx{S}0|]#Q-J^d=TQͩY`!_r{o3D ϼ41tSVOOKRJtaMF[ &쓜Y|1zhu t >0An:g@ަ>ߌ?lpuݩ?|Zm@hs/ۉJb6N9&Tǰ.Ԩ`m@EVq[R} -QKrv1`8njWkT IfV& &øCHkpUHX0~:*$`_TEh#4VT d7Fd%w!FgJAVFK-! Fo2?6^ݰ$ўQܞaF? \؝˫%1GqҋWA:kR>RzF 5?4`#>6s2;u%t/;TEr<ݍ*IXJҤE3ݛ>=-Iu2o޾06pIII&-ZHV;%f" ce47K5nZ`LMRəVqnIފ է'8-ө.?0"=Nvc&=NwYeΪQrz&"x!"o}haTm7d6˪0 X5Z>ʗu+"'E*!sܐ00呒lMB-\˛׉${2VC Lq_F|x nl6ب{O}; ,@hW9)2mpc+Sh_˜lɩ0"3Az}1ky~IE j=$B 3sSV>Uxi9N{@ޏNV<|Y+0@rj&)B5Jra!- SDd%)^P'3] O|%r*BQ[OǺ΅RL6fdSp9YxXrRXCɪw[Y>:@dK/.]x|J.n xPʻ. &SN $q~"yڍ$UrMG0k&6f[Wcq vP~K$}(?T(Hp&41;M RT`I|~?hJC"/ ^4) af}tmd# ݓdcط;qO/'@wnI.B %ݟ40|[m~_ gnb3'!$2Ϥ\  aW]Q{݁°N[u+G01Nf\' Ա6@N Ӿ&)TIBXG[@w?k z2A`l.jUsGﱮ,0^ ZËv]P "|R*n^s $YCpi":*^M1& #UYL6xz.GmFS T+ueInTZ,Q!9+ڰwzv.>49UD 0tSG*OXqDV%C'}cnH1*2ݜ#c^Jtpq`r\?::*D"pdK PgѲ N0uRLBζqOZ$PH Mj}mP߇BP U!NfCE`O Y܆0ݛ '\Zc,ö&5ILzDNJT cÙ3\<= !4EMR`òr ~ۻyB}'^,RN glìJ:Tem[\HnHq$ٍMyjg6KrpǂTTH4jRq/σ[i~!rRp Gj$SN'&)aF亻4۟\lvHn4_0jblB aL[Mrd1!SN*$ AvzV[Ig8*X g6n1U Uprϰt(;u%~ŬB;ɪZJuh_b\ڍ0(Iu(@;pZ aX`\ӥfdI[@굖ISl``X-ḭ; $. C3j1\Xu;u\: :THP,"3\P 7:dBYK8d`U˴YUN&6pѾW]#x/"=N{L0ad76jΰ(yi[SH,ߎgXR {E(^MhIbޣɹM$h1{ .0.CA33ffy]UzZ᤯T=ejtlQ'.6tU+l!"gBkDy-Foi/WGg6t&@*Pޮ ԅm_~>>!{(ԁ10[`EZ:dL \& #*ye.tMs_6ZeIPZ+2oH~EYF, 3Pz\#.T=d:/ѿ0VEi*u݊]_[[â?N6Pd`ιL>FD?昅-@h.'㪤F_HKiK [խxنy9jh咇'OH`J,Y1n@nѪU4ٟDԱǞ Uzyf#?1,Wc2>NZ4w3$糝)O^yj[ޠȶ;4]ŋ#a-|Li`2Vj+Tㆎ=0?86p VfEzJNSֿߡ^ơտ 4AOߜ݄luQG,}͏#| xix A ^C!kB5!x A ^C!kB5!x A ^C!kB5!x A ^C!kB5!x A ^C!kB5!x A ^C!kB5!x A ^C!kB5!x A ^C!kB5!x A ^C!kB5!\ 2589{F1hfB;WOAR׶#xE^!J)WߎܑLrn4#"qnI  >B5E.EDn< XRC^B ["k2:_[og¡ix A ^C!kB5uE6֖-`-e;qKE+v;]MnbjQ5%D_X\R&^{^wga ~X,9pxF9itnDv""% 3e찎MMMŒ+џU K =H~!M={cİV^ SXRkw8>x;߼9(HX\&><\>|cdfb+bO?e%oӓSy}rؽY'/c۷%lSԣB|Vic$$y9/qO@`a6@v=:9@D#BL{,4"rYl5{CO5%eaRXhQIrF }ū)DTRZ;*&rrXԱ8]K 0RYE\q5Ou9u\:_R"ھh^];x5Ԅ$2F-yGDtTBnNvwgs50zu9x7#ԶG69& RSrɡvd•7:I:pJF9nh) _ťY psؗFFٲ׫zJK/-౸>=:%ҔwQZ^馺s/$"WgQ=\M_ݍV"_/`_0Dde%Ee}ťmP{No51,t庭bTTr tcYo^%nl%'7c#.՚V٤Vk+,E9.N칅=9|tDM WT'kXRscnDt% CVDnY-YIq8 CZ!gSUĤf`ia.wdp [bbl^>1fȗsf$ݸsu] g Ktry%0&F*&B!_*Z㕈a W/Ѐata}Ašҹ}zvfdcS9˅JwoL MT=s3SOgrIe֖ek :C W+X~~Z)HuLM-,Ujmu_v=H}_0 Æ15g06׫JDVԵn\\,n>^nz-^cqQj\6 1jqJצi陟S6!ACٰ1j@y|`$4_UUǖy >blզ}Nwd֤K1gtuSRN,ns ]}rw/z|rڰU#;IDATZhwBuG;V⇧]b?jJ}<ܜEC1OS8醽<\ +[`T*eεQ"'ٰcB7G]Jd .N6QfvEv]`sL\g QX"|F+(""FS;gߺw#< hy 8o]gFrֲUoاu+*-7`/Gt-(n՘VZ=:ƀHe2V,ۙ3s <'LH'or7ZZ h)O/EDǞt0޶S{s+ZzYYZ xynzo}><8-=-,.-DܯgiQR&j~>Ddm)7r0¨v~>+CCgG{ݟ܌to~۾;/&1^wh#'vsq1iVեNt־#CZ{̚>.7Zjݷ7~4 V{DAzt uh7n IRJJ˽<\]D;xZͭ-E>nE4?܂7"'uؖ\ѐNf&ΎNvz )H{==qYtA֊5FټDTT\zD<?;e0grdΨi5x{NJ/_oܙj:/h7EJJ7;bЄ!RiŮ1?Ŧd󮨈 'زs UKW0z3JyTѽu0&9wE7hhB9LDyE)͛5R-l軟McG 9$J1:}5M%&.J 6Ά=Gy%_UTUB1Ã,EUUwgPΰaSY*wgƝO""Owa^Idw:Νw%8HRg޲?+5]G5gj0i/fOfS W}u5R+*w,L0R=kOtϹiD-|ke%SM=в_} :tiAoׅ @D7&< |,̆ }+#Wdb - V/fGXs٘M4u ^lai<4 ܜڴyX"s=OJ}Vx"aP)ꚍRY};4OnlcVtw -utCnQqw-D"y/H\sв-C&{he}9=}{WҚ*d0{Owatm iq4`h-?Jۆ[F~N^JS}]Cb+ VraZȍm˄޹7? [0` ͿKW(*Tʪ)?ZJYP=6,<7 spyder-unittest-0.7.0/requirements/tests.txt000066400000000000000000000000401466560470700213400ustar00rootroot00000000000000flaky nose2 pytest>=5 pytest-qt spyder-unittest-0.7.0/setup.cfg000066400000000000000000000000731466560470700165410ustar00rootroot00000000000000[tool:pytest] # Do not collect any classes python_classes= spyder-unittest-0.7.0/setup.py000066400000000000000000000051711466560470700164360ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2013 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """ Setup script for spyder_unittest """ from setuptools import setup, find_packages import os import os.path as osp def get_version(): """Get version from source file""" import codecs with codecs.open("spyder_unittest/__init__.py", encoding="utf-8") as f: lines = f.read().splitlines() for l in lines: if "__version__" in l: version = l.split("=")[1].strip() version = version.replace("'", '').replace('"', '') return version def get_package_data(name, extlist): """Return data files for package *name* with extensions in *extlist*""" flist = [] # Workaround to replace os.path.relpath (not available until Python 2.6): offset = len(name) + len(os.pathsep) for dirpath, _dirnames, filenames in os.walk(name): for fname in filenames: if not fname.startswith('.') and osp.splitext(fname)[1] in extlist: flist.append(osp.join(dirpath, fname)[offset:]) return flist # Requirements REQUIREMENTS = ['lxml', 'spyder>=6,<7', 'pyzmq'] EXTLIST = ['.jpg', '.png', '.json', '.mo', '.ini'] LIBNAME = 'spyder_unittest' LONG_DESCRIPTION = """ This is a plugin for the Spyder IDE that integrates popular unit test frameworks. It allows you to run tests and view the results. The plugin supports the `unittest` framework in the Python standard library and the `pytest` and `nose2` testing frameworks. """ setup( name=LIBNAME, version=get_version(), packages=find_packages(), package_data={LIBNAME: get_package_data(LIBNAME, EXTLIST)}, keywords=["Qt PyQt4 PyQt5 spyder plugins testing"], python_requires='>=3.8', install_requires=REQUIREMENTS, url='https://github.com/spyder-ide/spyder-unittest', license='MIT', author="Spyder Project Contributors", description='Plugin to run tests from within the Spyder IDE', long_description=LONG_DESCRIPTION, classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: X11 Applications :: Qt', 'Environment :: Win32 (MS Windows)', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3', 'Topic :: Software Development :: Testing', 'Topic :: Text Editors :: Integrated Development Environments (IDE)'], entry_points={ 'spyder.plugins': [ 'unittest = spyder_unittest.unittestplugin:UnitTestPlugin']}) spyder-unittest-0.7.0/spyder_unittest/000077500000000000000000000000001466560470700201655ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/__init__.py000066400000000000000000000004461466560470700223020ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2013 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Spyder unittest plugin.""" # Local imports from .unittestplugin import UnitTestPlugin as PLUGIN_CLASS __version__ = '0.7.0' PLUGIN_CLASS spyder-unittest-0.7.0/spyder_unittest/backend/000077500000000000000000000000001466560470700215545ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/backend/__init__.py000066400000000000000000000003341466560470700236650ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2013 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Parts of the unittest plugin that are not related to the GUI.""" spyder-unittest-0.7.0/spyder_unittest/backend/abbreviator.py000066400000000000000000000066551466560470700244420ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2017 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Class for abbreviating test names.""" from __future__ import annotations # Standard imports from dataclasses import dataclass @dataclass class Abbreviation: """ Abbreviation for one component of a test name. Abbreviations are defined recursively, so `.head` is the abbreviation for the first component and `.tail` specifies the abbreviations for the second and later components. """ head: str tail: Abbreviator class Abbreviator: """ Abbreviates names so that abbreviation identifies name uniquely. First, if the name contains brackets, the part in brackets starting at the first bracket is removed from the name. Then, all names are split in components separated by full stops (like module names in Python). Every component is abbreviated by the smallest prefix not shared by other names in the same directory, except for the last component which is not changed. Finally, the part in brackets, which was removed at the beginning, is appended to the abbreviated name. Attributes ---------- dic : dict of (str, [str, Abbreviator]) keys are the first-level components, values are a list, with the abbreviation as its first element and an Abbreviator for abbreviating the higher-level components as its second element. """ def __init__(self, names: list[str]=[]) -> None: """ Constructor. Arguments --------- names : list of str list of words which needs to be abbreviated. """ self.dic: dict[str, Abbreviation] = {} for name in names: self.add(name) def add(self, name: str) -> None: """ Add name to list of names to be abbreviated. Arguments --------- name : str """ name = name.split('[', 1)[0] if '.' not in name: return len_abbrev = 1 start, rest = name.split('.', 1) for other in self.dic: if start[:len_abbrev] == other[:len_abbrev]: if start == other: break while (start[:len_abbrev] == other[:len_abbrev] and len_abbrev < len(start) and len_abbrev < len(other)): len_abbrev += 1 if len_abbrev == len(start): self.dic[other].head = other[:len_abbrev + 1] elif len_abbrev == len(other): self.dic[other].head = other len_abbrev += 1 else: if len(self.dic[other].head) < len_abbrev: self.dic[other].head = other[:len_abbrev] else: self.dic[start] = Abbreviation(start[:len_abbrev], Abbreviator()) self.dic[start].tail.add(rest) def abbreviate(self, name: str) -> str: """Return abbreviation of name.""" if '[' in name: name, parameters = name.split('[', 1) parameters = '[' + parameters else: parameters = '' if '.' in name: start, rest = name.split('.', 1) res = (self.dic[start].head + '.' + self.dic[start].tail.abbreviate(rest)) else: res = name return res + parameters spyder-unittest-0.7.0/spyder_unittest/backend/frameworkregistry.py000066400000000000000000000045671466560470700257300ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2017 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Keep track of testing frameworks and create test runners when requested.""" from __future__ import annotations # Standard imports from typing import Optional, TYPE_CHECKING # Local imports if TYPE_CHECKING: from spyder_unittest.backend.runnerbase import RunnerBase from spyder_unittest.widgets.unittestgui import UnitTestWidget class FrameworkRegistry(): """ Registry of testing frameworks and their associated runners. The test runner for a framework is responsible for running the tests and parsing the results. It should implement the interface of RunnerBase. Frameworks should first be registered using `.register()`. This registry can then create the assoicated test runner when `.create_runner()` is called. Attributes ---------- frameworks : dict of (str, type) Dictionary mapping names of testing frameworks to the types of the associated runners. """ def __init__(self) -> None: """Initialize self.""" self.frameworks: dict[str, type[RunnerBase]] = {} def register(self, runner_class: type[RunnerBase]) -> None: """Register runner class for a testing framework. Parameters ---------- runner_class Class used for creating tests runners for the framework. """ self.frameworks[runner_class.name] = runner_class def create_runner(self, framework: str, widget: UnitTestWidget, tempfilename: Optional[str]) -> RunnerBase: """Create test runner associated to some testing framework. This creates an instance of the runner class whose `name` attribute equals `framework`. Parameters ---------- framework Name of testing framework. widget Unit test widget which constructs the test runner. resultfilename Name of file in which to store test results. If None, use default. Returns ------- RunnerBase Newly created test runner Exceptions ---------- KeyError Provided testing framework has not been registered. """ cls = self.frameworks[framework] return cls(widget, tempfilename) spyder-unittest-0.7.0/spyder_unittest/backend/nose2runner.py000066400000000000000000000072711466560470700244150ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2013 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Support for Nose framework.""" from __future__ import annotations # Standard library imports from typing import Optional, TYPE_CHECKING # Third party imports from lxml import etree from spyder.config.base import get_translation # Local imports from spyder_unittest.backend.runnerbase import Category, RunnerBase, TestResult if TYPE_CHECKING: from spyder_unittest.widgets.configdialog import Config try: _ = get_translation('spyder_unittest') except KeyError: import gettext _ = gettext.gettext class Nose2Runner(RunnerBase): """Class for running tests within Nose framework.""" module = 'nose2' name = 'nose2' def create_argument_list(self, config: Config, cov_path: Optional[str], single_test: Optional[str]) -> list[str]: """Create argument list for testing process.""" arguments = [ '-m', self.module, '--plugin=nose2.plugins.junitxml', '--junit-xml', '--junit-xml-path={}'.format(self.resultfilename) ] if single_test: arguments.append(single_test) arguments += config.args return arguments def finished(self, exitcode: int) -> None: """Called when the unit test process has finished.""" output = self.read_all_process_output() testresults = self.load_data() self.sig_finished.emit(testresults, output, True) def load_data(self) -> list[TestResult]: """ Read and parse unit test results. This function reads the unit test results from the file with name `self.resultfilename` and parses them. The file should contain the test results in JUnitXML format. Returns ------- list of TestResult Unit test results. """ try: data = etree.parse(self.resultfilename).getroot() except OSError: return [] testresults = [] for testcase in data: category = Category.OK status = 'ok' name = '{}.{}'.format(testcase.get('classname'), testcase.get('name')) message = '' time = float(testcase.get('time')) extras = [] for child in testcase: if child.tag in ('error', 'failure', 'skipped'): if child.tag == 'skipped': category = Category.SKIP else: category = Category.FAIL status = child.tag type_ = child.get('type') message = child.get('message', default='') if type_ and message: message = '{0}: {1}'.format(type_, message) elif type_: message = type_ if child.text: extras.append(child.text) elif child.tag in ('system-out', 'system-err') and child.text: if child.tag == 'system-out': heading = _('Captured stdout') else: heading = _('Captured stderr') contents = child.text.rstrip('\n') extras.append('----- {} -----\n{}'.format(heading, contents)) extra_text = '\n\n'.join(extras) testresults.append( TestResult(category, status, name, message, time, extra_text)) return testresults spyder-unittest-0.7.0/spyder_unittest/backend/pytestrunner.py000066400000000000000000000217231466560470700247150ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2013 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Support for pytest framework.""" from __future__ import annotations # Standard library imports import os import os.path as osp import re from typing import Any, Optional, TYPE_CHECKING # Local imports from spyder.config.base import get_translation from spyder_unittest.backend.runnerbase import (Category, RunnerBase, TestResult, COV_TEST_NAME) from spyder_unittest.backend.zmqreader import ZmqStreamReader if TYPE_CHECKING: from spyder_unittest.widgets.configdialog import Config try: _ = get_translation('spyder_unittest') except KeyError: import gettext _ = gettext.gettext class PyTestRunner(RunnerBase): """Class for running tests within pytest framework.""" module = 'pytest' name = 'pytest' def create_argument_list(self, config: Config, cov_path: Optional[str], single_test: Optional[str]) -> list[str]: """Create argument list for testing process.""" dirname = os.path.dirname(__file__) pyfile = os.path.join(dirname, 'workers', 'pytestworker.py') arguments = [pyfile, str(self.reader.port)] if config.coverage: arguments += [f'--cov={cov_path}', '--cov-report=term-missing'] if single_test: arguments.append(self.convert_testname_to_nodeid(single_test)) arguments += config.args return arguments def start(self, config: Config, cov_path: Optional[str], executable: str, pythonpath: list[str], single_test: Optional[str]) -> None: """Start process which will run the unit test suite.""" self.config = config self.reader = ZmqStreamReader() self.reader.sig_received.connect(self.process_output) super().start(config, cov_path, executable, pythonpath, single_test) def process_output(self, output: list[dict[str, Any]]) -> None: """ Process output of test process. Parameters ---------- output list of decoded Python object sent by test process. """ collected_list = [] collecterror_list = [] starttest_list = [] result_list = [] for result_item in output: if result_item['event'] == 'config': self.rootdir = result_item['rootdir'] elif result_item['event'] == 'collected': name = self.convert_nodeid_to_testname(result_item['nodeid']) collected_list.append(name) elif result_item['event'] == 'collecterror': tupl = self.logreport_collecterror_to_tuple(result_item) collecterror_list.append(tupl) elif result_item['event'] == 'starttest': name = self.logreport_starttest_to_str(result_item) starttest_list.append(name) elif result_item['event'] == 'logreport': testresult = self.logreport_to_testresult(result_item) result_list.append(testresult) if collected_list: self.sig_collected.emit(collected_list) if collecterror_list: self.sig_collecterror.emit(collecterror_list) if starttest_list: self.sig_starttest.emit(starttest_list) if result_list: self.sig_testresult.emit(result_list) def process_coverage(self, output: str) -> None: """Search the output text for coverage details. Called by the function 'finished' at the very end. """ cov_results = re.search( r'-*? coverage:.*?-*\nTOTAL\s.*?\s(\d*?)\%.*\n=*', output, flags=re.S) if cov_results: total_coverage = cov_results.group(1) cov_report = TestResult( Category.COVERAGE, f'{total_coverage}%', COV_TEST_NAME) # create a fake test, then emit the coverage as the result # This gives overall test coverage, used in TestDataModel.summary self.sig_collected.emit([COV_TEST_NAME]) self.sig_testresult.emit([cov_report]) # also build a result for each file's coverage header = "".join(cov_results.group(0).split("\n")[1:3]) # coverage report columns: # Name Stmts Miss Cover Missing for row in re.findall( r'^((.*?\.py) .*?(\d+%).*?(\d[\d\,\-\ ]*)?)$', cov_results.group(0), flags=re.M): lineno: Optional[int] = None if row[3]: match = re.search(r'^(\d*)', row[3]) if match: lineno = int(match.group(1)) - 1 file_cov = TestResult( Category.COVERAGE, row[2], row[1], message=_('Missing: {}').format(row[3] if row[3] else _("(none)")), extra_text=_('{}\n{}').format(header, row[0]), filename=row[1], lineno=lineno) self.sig_collected.emit([row[1]]) self.sig_testresult.emit([file_cov]) def finished(self, exitcode: int) -> None: """ Called when the unit test process has finished. This function emits `sig_finished`. Parameters ---------- exitcode Exit code of the test process. """ self.reader.close() output = self.read_all_process_output() if self.config.coverage: self.process_coverage(output) normal_exit = exitcode in [0, 1, 2, 5] # Meaning of exit codes: 0 = all tests passed, 1 = test failed, # 2 = interrupted, 5 = no tests collected self.sig_finished.emit([], output, normal_exit) def normalize_module_name(self, name: str) -> str: """ Convert module name reported by pytest to Python conventions. This function strips the .py suffix and replaces '/' by '.', so that 'ham/spam.py' becomes 'ham.spam'. The result is relative to the directory from which tests are run and not the pytest root dir. """ wdir = osp.realpath(self.config.wdir) if wdir != self.rootdir: abspath = osp.join(self.rootdir, name) try: name = osp.relpath(abspath, start=wdir) except ValueError: # Happens on Windows if paths are on different drives pass if name.endswith('.py'): name = name[:-3] return name.replace(osp.sep, '.') def convert_nodeid_to_testname(self, nodeid: str) -> str: """Convert a nodeid to a test name.""" module, name = nodeid.split('::', 1) module = self.normalize_module_name(module) return '{}.{}'.format(module, name) def convert_testname_to_nodeid(self, testname: str) -> str: """ Convert a test name to a nodeid relative to wdir. A true nodeid is relative to the pytest root dir. The return value of this function is like a nodeid but relative to the wdir (i.e., the directory from which test are run). This is the format that pytest expects when running single tests. """ *path_parts, last_part = testname.split('.') path_parts[-1] += '.py' nodeid = osp.join(*path_parts) + '::' + last_part return nodeid def logreport_collecterror_to_tuple( self, report: dict[str, Any]) -> tuple[str, str]: """Convert a 'collecterror' logreport to a (str, str) tuple.""" module = self.normalize_module_name(report['nodeid']) return (module, report['longrepr']) def logreport_starttest_to_str(self, report: dict[str, Any]) -> str: """Convert a 'starttest' logreport to a str.""" return self.convert_nodeid_to_testname(report['nodeid']) def logreport_to_testresult(self, report: dict[str, Any]) -> TestResult: """Convert a logreport sent by test process to a TestResult.""" status = report['outcome'] if report['outcome'] in ('failed', 'xpassed') or report['witherror']: cat = Category.FAIL elif report['outcome'] in ('passed', 'xfailed'): cat = Category.OK else: cat = Category.SKIP testname = self.convert_nodeid_to_testname(report['nodeid']) message = report.get('message', '') extra_text = report.get('longrepr', '') if 'sections' in report: if extra_text: extra_text += '\n' for (heading, text) in report['sections']: extra_text += '----- {} -----\n{}'.format(heading, text) filename = osp.join(self.rootdir, report['filename']) result = TestResult(cat, status, testname, message=message, time=report['duration'], extra_text=extra_text, filename=filename, lineno=report['lineno']) return result spyder-unittest-0.7.0/spyder_unittest/backend/runnerbase.py000066400000000000000000000200641466560470700242740ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2013 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Classes for running tests within various frameworks.""" from __future__ import annotations # Standard library imports from enum import IntEnum import logging import os import tempfile from typing import ClassVar, Optional, TYPE_CHECKING # Third party imports from qtpy.QtCore import ( QObject, QProcess, QProcessEnvironment, QTextCodec, Signal) # Local imports if TYPE_CHECKING: from spyder_unittest.widgets.configdialog import Config from spyder_unittest.widgets.unittestgui import UnitTestWidget # Logging logger = logging.getLogger(__name__) # if generating coverage report, use this name for the TestResult # it's here in case we can get coverage results from unittest too COV_TEST_NAME = 'Total Test Coverage' class Category(IntEnum): """Enum type representing category of test result.""" FAIL = 1 OK = 2 SKIP = 3 PENDING = 4 COVERAGE = 5 class TestResult: """Class representing the result of running a single test.""" __test__ = False # this is not a pytest test class def __init__(self, category: Category, status: str, name: str, message: str = '', time: Optional[float] = None, extra_text: str = '', filename: Optional[str] = None, lineno: Optional[int] = None): """ Construct a test result. """ self.category = category self.status = status self.name = name self.message = message self.time = time extra_text = extra_text.rstrip() if extra_text: self.extra_text = extra_text.split("\n") else: self.extra_text = [] self.filename = filename self.lineno = lineno def __eq__(self, other: object) -> bool: """Test for equality.""" if not isinstance(other, TestResult): return NotImplemented return self.__dict__ == other.__dict__ class RunnerBase(QObject): """ Base class for running tests with a framework that uses JUnit XML. This is an abstract class, meant to be subclassed before being used. Concrete subclasses should define create_argument_list() and finished(). All communication back to the caller is done via signals. Attributes ---------- module : str Name of Python module for test framework. This needs to be defined before the user can run tests. name : str Name of test framework, as presented to user. process : QProcess or None Process running the unit test suite. resultfilename : str Name of file in which test results are stored. Signals ------- sig_collected(list of str) Emitted when tests are collected. sig_collecterror(list of (str, str) tuples) Emitted when errors are encountered during collection. First element of tuple is test name, second element is error message. sig_starttest(list of str) Emitted just before tests are run. sig_testresult(list of TestResult) Emitted when tests are finished. sig_finished(list of TestResult, str, bool) Emitted when test process finishes. First argument contains the test results, second argument contains the output of the test process, third argument is True on normal exit, False on abnormal exit. sig_stop() Emitted when test process is being stopped. """ module: ClassVar[str] name: ClassVar[str] sig_collected = Signal(object) sig_collecterror = Signal(object) sig_starttest = Signal(object) sig_testresult = Signal(object) sig_finished = Signal(object, str, bool) sig_stop = Signal() def __init__(self, widget: UnitTestWidget, resultfilename: Optional[str] = None): """ Construct test runner. Parameters ---------- widget : UnitTestWidget Unit test widget which constructs the test runner. resultfilename : str or None Name of file in which to store test results. If None, use default. """ QObject.__init__(self, widget) self.process: Optional[QProcess] = None if resultfilename is None: self.resultfilename = os.path.join(tempfile.gettempdir(), 'unittest.results') else: self.resultfilename = resultfilename def create_argument_list(self, config: Config, cov_path: Optional[str], single_test: Optional[str]) -> list[str]: """ Create argument list for testing process (dummy). This function should be defined before calling self.start(). """ raise NotImplementedError def _prepare_process(self, config: Config, pythonpath: list[str]) -> QProcess: """ Prepare and return process for running the unit test suite. This sets the working directory and environment. """ process = QProcess(self) process.setProcessChannelMode(QProcess.MergedChannels) process.setWorkingDirectory(config.wdir) process.finished.connect(self.finished) if pythonpath: env = QProcessEnvironment.systemEnvironment() old_python_path = env.value('PYTHONPATH', '') python_path_str = os.pathsep.join(pythonpath) if old_python_path: python_path_str += os.pathsep + old_python_path env.insert('PYTHONPATH', python_path_str) process.setProcessEnvironment(env) return process def start(self, config: Config, cov_path: Optional[str], executable: str, pythonpath: list[str], single_test: Optional[str]) -> None: """ Start process which will run the unit test suite. The process is run in the working directory specified in 'config', with the directories in `pythonpath` added to the Python path for the test process. The test results are written to the file `self.resultfilename`. The standard output and error are also recorded. Once the process is finished, `self.finished()` will be called. Parameters ---------- config Unit test configuration. cov_path Path to filter source for coverage report executable Path to Python executable pythonpath List of directories to be added to the Python path single_test If None, run all tests; otherwise, it is the name of the only test to be run. Raises ------ RuntimeError If process failed to start. """ self.process = self._prepare_process(config, pythonpath) p_args = self.create_argument_list(config, cov_path, single_test) try: os.remove(self.resultfilename) except OSError: pass logger.debug(f'Starting Python process with arguments {p_args}') self.process.start(executable, p_args) running = self.process.waitForStarted() if not running: raise RuntimeError def finished(self, exitcode: int) -> None: """ Called when the unit test process has finished. This function should be implemented in derived classes. It should read the results (if necessary) and emit `sig_finished`. """ raise NotImplementedError def read_all_process_output(self) -> str: """Read and return all output from `self.process` as unicode.""" assert self.process is not None qbytearray = self.process.readAllStandardOutput() locale_codec = QTextCodec.codecForLocale() return locale_codec.toUnicode(qbytearray.data()) def stop_if_running(self) -> None: """Stop testing process if it is running.""" if self.process and self.process.state() == QProcess.Running: self.process.kill() self.sig_stop.emit() spyder-unittest-0.7.0/spyder_unittest/backend/tests/000077500000000000000000000000001466560470700227165ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/backend/tests/__init__.py000066400000000000000000000003021466560470700250220ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2017 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Tests for spyder_unittest.backend .""" spyder-unittest-0.7.0/spyder_unittest/backend/tests/test_abbreviator.py000066400000000000000000000046771466560470700266450ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2017 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Tests for abbreviator.py""" # Local imports from spyder_unittest.backend.abbreviator import Abbreviator def test_abbreviator_with_one_word(): abb = Abbreviator() abb.add('ham') assert abb.abbreviate('ham') == 'ham' def test_abbreviator_with_one_word_with_two_components(): abb = Abbreviator() abb.add('ham.spam') assert abb.abbreviate('ham.spam') == 'h.spam' def test_abbreviator_with_one_word_with_three_components(): abb = Abbreviator() abb.add('ham.spam.eggs') assert abb.abbreviate('ham.spam.eggs') == 'h.s.eggs' def test_abbreviator_without_common_prefix(): abb = Abbreviator(['ham.foo', 'spam.foo']) assert abb.abbreviate('ham.foo') == 'h.foo' assert abb.abbreviate('spam.foo') == 's.foo' def test_abbreviator_with_prefix(): abb = Abbreviator(['test_ham.x', 'test_spam.x']) assert abb.abbreviate('test_ham.x') == 'test_h.x' assert abb.abbreviate('test_spam.x') == 'test_s.x' def test_abbreviator_with_first_word_prefix_of_second(): abb = Abbreviator(['ham.x', 'hameggs.x']) assert abb.abbreviate('ham.x') == 'ham.x' assert abb.abbreviate('hameggs.x') == 'hame.x' def test_abbreviator_with_second_word_prefix_of_first(): abb = Abbreviator(['hameggs.x', 'ham.x']) assert abb.abbreviate('hameggs.x') == 'hame.x' assert abb.abbreviate('ham.x') == 'ham.x' def test_abbreviator_with_three_words(): abb = Abbreviator(['hamegg.x', 'hameggs.x', 'hall.x']) assert abb.abbreviate('hamegg.x') == 'hamegg.x' assert abb.abbreviate('hameggs.x') == 'hameggs.x' assert abb.abbreviate('hall.x') == 'hal.x' def test_abbreviator_with_multilevel(): abb = Abbreviator(['ham.eggs.foo', 'ham.spam.bar', 'eggs.ham.foo', 'eggs.hamspam.bar']) assert abb.abbreviate('ham.eggs.foo') == 'h.e.foo' assert abb.abbreviate('ham.spam.bar') == 'h.s.bar' assert abb.abbreviate('eggs.ham.foo') == 'e.ham.foo' assert abb.abbreviate('eggs.hamspam.bar') == 'e.hams.bar' def test_abbreviator_with_one_word_and_parameters_with_dot(): abb = Abbreviator() abb.add('ham[.]') assert abb.abbreviate('ham[x.]') == 'ham[x.]' def test_abbreviator_with_one_word_with_two_components_and_parameters_with_dot(): abb = Abbreviator() abb.add('ham.spam[.]') assert abb.abbreviate('ham.spam[x.]') == 'h.spam[x.]' spyder-unittest-0.7.0/spyder_unittest/backend/tests/test_frameworkregistry.py000066400000000000000000000014751466560470700301240ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2017 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Tests for frameworkregistry.py""" # Third party imports import pytest # Local imports from spyder_unittest.backend.frameworkregistry import FrameworkRegistry class MockRunner: name = 'foo' def __init__(self, *args): self.init_args = args def test_frameworkregistry_when_empty(): reg = FrameworkRegistry() with pytest.raises(KeyError): reg.create_runner('foo', None, 'temp.txt') def test_frameworkregistry_after_registering(): reg = FrameworkRegistry() reg.register(MockRunner) runner = reg.create_runner('foo', None, 'temp.txt') assert isinstance(runner, MockRunner) assert runner.init_args == (None, 'temp.txt') spyder-unittest-0.7.0/spyder_unittest/backend/tests/test_nose2runner.py000066400000000000000000000026101466560470700266060ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2013 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Tests for nose2runner.py""" # Local imports from spyder_unittest.backend.nose2runner import Nose2Runner from spyder_unittest.backend.runnerbase import Category def test_nose2runner_load_data(tmpdir): result_file = tmpdir.join('results') result_txt = """ text """ result_file.write(result_txt) runner = Nose2Runner(None, result_file.strpath) results = runner.load_data() assert len(results) == 2 assert results[0].category == Category.OK assert results[0].status == 'ok' assert results[0].name == 'test_foo.test1' assert results[0].message == '' assert results[0].time == 0.04 assert results[0].extra_text == [] assert results[1].category == Category.FAIL assert results[1].status == 'failure' assert results[1].name == 'test_foo.test2' assert results[1].message == 'test failure' assert results[1].time == 0.01 assert results[1].extra_text == ['text'] spyder-unittest-0.7.0/spyder_unittest/backend/tests/test_pytestrunner.py000066400000000000000000000320261466560470700271140ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2017 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Tests for pytestrunner.py""" # Standard library imports import os.path as osp import sys from unittest.mock import Mock, patch # Third party imports import pytest # Local imports from spyder_unittest.backend.pytestrunner import PyTestRunner from spyder_unittest.backend.runnerbase import (Category, TestResult, COV_TEST_NAME) from spyder_unittest.widgets.configdialog import Config @pytest.fixture def runner(): res = PyTestRunner(None) res.rootdir = 'ham' res.config = Config(wdir='ham') return res def test_pytestrunner_create_argument_list(monkeypatch, runner): config = Config(args=['--extra-arg']) cov_path = None MockZMQStreamReader = Mock() monkeypatch.setattr( 'spyder_unittest.backend.pytestrunner.ZmqStreamReader', MockZMQStreamReader) mock_reader = MockZMQStreamReader() mock_reader.port = 42 runner = PyTestRunner(None, 'results') runner.reader = mock_reader monkeypatch.setattr('spyder_unittest.backend.pytestrunner.os.path.dirname', lambda _: 'dir') arg_list = runner.create_argument_list(config, cov_path, None) pyfile, port, *coverage, last = arg_list assert pyfile == osp.join('dir', 'workers', 'pytestworker.py') assert port == '42' assert last == '--extra-arg' def test_pytestrunner_start(monkeypatch): MockZMQStreamReader = Mock() monkeypatch.setattr( 'spyder_unittest.backend.pytestrunner.ZmqStreamReader', MockZMQStreamReader) mock_reader = MockZMQStreamReader() mock_base_start = Mock() monkeypatch.setattr('spyder_unittest.backend.unittestrunner.RunnerBase.start', mock_base_start) runner = PyTestRunner(None, 'results') config = Config() cov_path = None runner.start(config, cov_path, sys.executable, ['pythondir'], None) assert runner.config is config assert runner.reader is mock_reader runner.reader.sig_received.connect.assert_called_once_with( runner.process_output) mock_base_start.assert_called_once_with( config, cov_path, sys.executable, ['pythondir'], None) def test_pytestrunner_process_output_with_collected(qtbot, runner): output = [{'event': 'collected', 'nodeid': 'spam.py::ham'}, {'event': 'collected', 'nodeid': 'eggs.py::bacon'}] with qtbot.waitSignal(runner.sig_collected) as blocker: runner.process_output(output) expected = ['spam.ham', 'eggs.bacon'] assert blocker.args == [expected] def test_pytestrunner_process_output_with_collecterror(qtbot, runner): output = [{ 'event': 'collecterror', 'nodeid': 'ham/spam.py', 'longrepr': 'msg' }] with qtbot.waitSignal(runner.sig_collecterror) as blocker: runner.process_output(output) expected = [('ham.spam', 'msg')] assert blocker.args == [expected] def test_pytestrunner_process_output_with_starttest(qtbot, runner): output = [{'event': 'starttest', 'nodeid': 'ham/spam.py::ham'}, {'event': 'starttest', 'nodeid': 'ham/eggs.py::bacon'}] with qtbot.waitSignal(runner.sig_starttest) as blocker: runner.process_output(output) expected = ['ham.spam.ham', 'ham.eggs.bacon'] assert blocker.args == [expected] @pytest.mark.parametrize('exitcode, normal_exit', [(0, True), (1, True), (2, True), (3, False), (4, False), (5, True)]) def test_pytestrunner_finished(qtbot, exitcode, normal_exit): output = '== 1 passed in 0.10s ==' mock_reader = Mock() mock_reader.close = lambda: None runner = PyTestRunner(None) runner.reader = mock_reader runner.read_all_process_output = lambda: output runner.config = Config('pytest', None, False) with qtbot.waitSignal(runner.sig_finished) as blocker: runner.finished(exitcode) results = [] assert blocker.args == [results, output, normal_exit] @pytest.mark.parametrize('wdir, expected', [ ('ham', 'spam.eggs'), (osp.join('ham', 'spam'), 'eggs'), (osp.join('link-to-ham', 'spam'), 'eggs')]) def test_normalize_module_name(runner, wdir, expected): def new_realpath(name): """Simulate link from `link-to-ham` to `ham`""" if name.startswith('link-to-ham'): return name[len('link-to-'):] else: return name with patch('spyder_unittest.backend.pytestrunner.osp.realpath', side_effect=new_realpath): runner.config = Config(wdir=wdir) result = runner.normalize_module_name(osp.join('spam', 'eggs.py')) assert result == expected def test_convert_nodeid_to_testname(runner): nodeid = osp.join('spam', 'eggs.py') + '::test_foo' testname = 'spam.eggs.test_foo' result = runner.convert_nodeid_to_testname(nodeid) assert result == testname def test_convert_testname_to_nodeid(runner): nodeid = osp.join('spam', 'eggs.py') + '::test_foo' testname = 'spam.eggs.test_foo' result = runner.convert_testname_to_nodeid(testname) assert result == nodeid def standard_logreport_output(): return { 'event': 'logreport', 'outcome': 'passed', 'witherror': False, 'nodeid': 'foo.py::bar', 'filename': 'foo.py', 'lineno': 24, 'duration': 42 } def test_pytestrunner_process_output_with_logreport_passed(qtbot, runner): output = [standard_logreport_output()] with qtbot.waitSignal(runner.sig_testresult) as blocker: runner.process_output(output) expected = [TestResult(Category.OK, 'passed', 'foo.bar', time=42, filename=osp.join('ham', 'foo.py'), lineno=24)] assert blocker.args == [expected] def test_pytestrunner_process_coverage(qtbot): output = """ ============================= test session starts ============================== platform linux -- Python 3.9.12, pytest-7.1.2, pluggy-1.0.0 PyQt5 5.12.3 -- Qt runtime 5.12.9 -- Qt compiled 5.12.9 rootdir: /TRAC/TRAC-data/spyder-unittest, configfile: setup.cfg plugins: flaky-3.7.0, cov-3.0.0, qt-4.0.2, mock-3.7.0 collected 152 items spyder_unittest/backend/tests/test_abbreviator.py ........... [ 7%] spyder_unittest/backend/tests/test_frameworkregistry.py .. [ 8%] spyder_unittest/backend/tests/test_noserunner.py ..... [ 11%] spyder_unittest/backend/tests/test_pytestrunner.py ..................... [ 25%] .... [ 28%] spyder_unittest/backend/tests/test_pytestworker.py ..................... [ 42%] .... [ 44%] spyder_unittest/backend/tests/test_runnerbase.py ..... [ 48%] spyder_unittest/backend/tests/test_unittestrunner.py .......... [ 54%] spyder_unittest/backend/tests/test_zmqstream.py . [ 55%] spyder_unittest/tests/test_unittestplugin.py s.sss [ 58%] spyder_unittest/widgets/tests/test_configdialog.py ........... [ 65%] spyder_unittest/widgets/tests/test_datatree.py ......................... [ 82%] .. [ 83%] spyder_unittest/widgets/tests/test_unittestgui.py ...................... [ 98%] ... [100%] =============================== warnings summary =============================== ---------- coverage: platform linux, python 3.9.12-final-0 ----------- Name Stmts Miss Cover Missing ------------------------------------------------------------------------- setup.py 26 26 0% 7-53 spyder_unittest/backend/noserunner.py 62 7 89% 17-19, 71-72, 94, 103 spyder_unittest/backend/pytestrunner.py 101 6 94% 100-106 spyder_unittest/backend/pytestworker.py 78 4 95% 36, 40, 44, 152 spyder_unittest/backend/runnerbase.py 87 2 98% 20-21 spyder_unittest/backend/unittestrunner.py 78 5 94% 69, 75, 123, 138, 146 spyder_unittest/unittestplugin.py 119 65 45% 60, 71, 119-123, 136-141, 148-150, 161, 170-173, 183-186, 207-208, 219-226, 240-272, 280-289, 299-301, 313-314 spyder_unittest/widgets/configdialog.py 95 10 89% 28-30, 134-135, 144, 173-176 spyder_unittest/widgets/datatree.py 244 14 94% 26-28, 100, 105, 107, 276-277, 280, 293, 312, 417, 422-424 spyder_unittest/widgets/unittestgui.py 218 35 84% 41-43, 49, 223, 241, 245, 249-256, 271-278, 302-305, 330, 351-352, 468-482 ------------------------------------------------------------------------- TOTAL 1201 174 86% 6 files skipped due to complete coverage. ================= 148 passed, 4 skipped, 242 warnings in 4.25s ================= """ cov_text = """ ---------- coverage: platform linux, python 3.9.12-final-0 ----------- Name Stmts Miss Cover Missing ------------------------------------------------------------------------- setup.py 26 26 0% 7-53 spyder_unittest/backend/noserunner.py 62 7 89% 17-19, 71-72, 94, 103 spyder_unittest/backend/pytestrunner.py 101 6 94% 100-106 spyder_unittest/backend/pytestworker.py 78 4 95% 36, 40, 44, 152 spyder_unittest/backend/runnerbase.py 87 2 98% 20-21 spyder_unittest/backend/unittestrunner.py 78 5 94% 69, 75, 123, 138, 146 spyder_unittest/unittestplugin.py 119 65 45% 60, 71, 119-123, 136-141, 148-150, 161, 170-173, 183-186, 207-208, 219-226, 240-272, 280-289, 299-301, 313-314 spyder_unittest/widgets/configdialog.py 95 10 89% 28-30, 134-135, 144, 173-176 spyder_unittest/widgets/datatree.py 244 14 94% 26-28, 100, 105, 107, 276-277, 280, 293, 312, 417, 422-424 spyder_unittest/widgets/unittestgui.py 218 35 84% 41-43, 49, 223, 241, 245, 249-256, 271-278, 302-305, 330, 351-352, 468-482 ------------------------------------------------------------------------- TOTAL 1201 174 86% 6 files skipped due to complete coverage.""" runner = PyTestRunner(None) runner.rootdir = 'ham' with qtbot.waitSignal(runner.sig_testresult) as blocker: runner.process_coverage(output) expected = TestResult( Category.COVERAGE, "86%", COV_TEST_NAME, extra_text=cov_text) @pytest.mark.parametrize('outcome,witherror,category', [ ('passed', True, Category.FAIL), ('passed', False, Category.OK), ('failed', True, Category.FAIL), ('failed', False, Category.FAIL), # ('skipped', True, this is not possible) ('skipped', False, Category.SKIP), ('xfailed', True, Category.FAIL), ('xfailed', False, Category.OK), ('xpassed', True, Category.FAIL), ('xpassed', False, Category.FAIL), ('---', True, Category.FAIL) # ('---', False, this is not possible) ]) def test_logreport_to_testresult_with_outcome_and_possible_error( runner, outcome, witherror, category): report = standard_logreport_output() report['outcome'] = outcome report['witherror'] = witherror expected = TestResult(category, outcome, 'foo.bar', time=42, filename=osp.join('ham', 'foo.py'), lineno=24) assert runner.logreport_to_testresult(report) == expected def test_logreport_to_testresult_with_message(runner): report = standard_logreport_output() report['message'] = 'msg' expected = TestResult(Category.OK, 'passed', 'foo.bar', message='msg', time=42, filename=osp.join('ham', 'foo.py'), lineno=24) assert runner.logreport_to_testresult(report) == expected def test_logreport_to_testresult_with_extratext(runner): report = standard_logreport_output() report['longrepr'] = 'long msg' expected = TestResult(Category.OK, 'passed', 'foo.bar', time=42, extra_text='long msg', filename=osp.join('ham', 'foo.py'), lineno=24) assert runner.logreport_to_testresult(report) == expected @pytest.mark.parametrize('longrepr,prefix', [ ('', ''), ('msg', '\n') ]) def test_logreport_to_testresult_with_output(runner, longrepr, prefix): report = standard_logreport_output() report['longrepr'] = longrepr report['sections'] = [['Captured stdout call', 'ham\n'], ['Captured stderr call', 'spam\n']] txt = (longrepr + prefix + '----- Captured stdout call -----\nham\n' '----- Captured stderr call -----\nspam\n') expected = TestResult(Category.OK, 'passed', 'foo.bar', time=42, extra_text=txt, filename=osp.join('ham', 'foo.py'), lineno=24) assert runner.logreport_to_testresult(report) == expected spyder-unittest-0.7.0/spyder_unittest/backend/tests/test_runnerbase.py000066400000000000000000000062641466560470700265030ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2013 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Tests for baserunner.py""" # Standard library imports import os from unittest.mock import Mock # Third party imports import pytest # Local imports from spyder_unittest.backend.runnerbase import RunnerBase from spyder_unittest.widgets.configdialog import Config def test_runnerbase_with_nonexisting_module(): class FooRunner(RunnerBase): module = 'nonexisiting' foo_runner = FooRunner(None) config = Config(foo_runner.module, 'wdir', True) with pytest.raises(NotImplementedError): foo_runner.create_argument_list(config, 'cov_path', None) with pytest.raises(NotImplementedError): foo_runner.finished(0) @pytest.mark.parametrize('pythonpath,env_pythonpath', [ ([], None), (['pythonpath'], None), (['pythonpath'], 'old') ]) def test_runnerbase_prepare_process(monkeypatch, pythonpath, env_pythonpath): MockQProcess = Mock() monkeypatch.setattr('spyder_unittest.backend.runnerbase.QProcess', MockQProcess) mock_process = MockQProcess() MockEnvironment = Mock() monkeypatch.setattr( 'spyder_unittest.backend.runnerbase.QProcessEnvironment.systemEnvironment', MockEnvironment) mock_environment = MockEnvironment() mock_environment.configure_mock(**{'value.return_value': env_pythonpath}) config = Config('myRunner', 'wdir') runner = RunnerBase(None, 'results') runner._prepare_process(config, pythonpath) mock_process.setWorkingDirectory.assert_called_once_with('wdir') mock_process.finished.connect.assert_called_once_with(runner.finished) if pythonpath: if env_pythonpath: mock_environment.insert.assert_any_call('PYTHONPATH', 'pythonpath{}{}'.format( os.pathsep, env_pythonpath)) else: mock_environment.insert.assert_any_call('PYTHONPATH', 'pythonpath') mock_process.setProcessEnvironment.assert_called_once() else: mock_environment.insert.assert_not_called() mock_process.setProcessEnvironment.assert_not_called() def test_runnerbase_start(monkeypatch): MockQProcess = Mock() monkeypatch.setattr('spyder_unittest.backend.runnerbase.QProcess', MockQProcess) mock_process = MockQProcess() mock_remove = Mock(side_effect=OSError()) monkeypatch.setattr('spyder_unittest.backend.runnerbase.os.remove', mock_remove) runner = RunnerBase(None, 'results') runner._prepare_process = lambda c, p: mock_process runner.create_argument_list = lambda c, cp, st: ['arg1', 'arg2'] config = Config('pytest', 'wdir', False) cov_path = None mock_process.waitForStarted = lambda: False with pytest.raises(RuntimeError): runner.start(config, cov_path, 'python_exec', ['pythondir'], None) mock_process.start.assert_called_once_with('python_exec', ['arg1', 'arg2']) mock_remove.assert_called_once_with('results') spyder-unittest-0.7.0/spyder_unittest/backend/tests/test_unittestrunner.py000066400000000000000000000145411466560470700274450ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2017 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Tests for unittestrunner.py""" # Standard library imports import os.path as osp import sys from unittest.mock import Mock # Local imports from spyder_unittest.backend.unittestrunner import UnittestRunner from spyder_unittest.backend.runnerbase import Category, TestResult from spyder_unittest.widgets.configdialog import Config def test_unittestrunner_create_argument_list(monkeypatch): """ Test that UnittestRunner.createArgumentList() returns the expected list. """ config = Config(args=['--extra-arg']) cov_path = None MockZMQStreamReader = Mock() monkeypatch.setattr( 'spyder_unittest.backend.unittestrunner.ZmqStreamReader', MockZMQStreamReader) mock_reader = MockZMQStreamReader() mock_reader.port = 42 runner = UnittestRunner(None, 'resultfile') runner.reader = mock_reader monkeypatch.setattr( 'spyder_unittest.backend.unittestrunner.osp.dirname', lambda _: 'dir') result = runner.create_argument_list(config, cov_path, None) pyfile = osp.join('dir', 'workers', 'unittestworker.py') assert result == [pyfile, '42', '--extra-arg'] def test_unittestrunner_start(monkeypatch): """ Test that UnittestRunner.start() sets the .config and .reader members correctly, that it connects to the reader's sig_received, and that it called the base class method. """ MockZMQStreamReader = Mock() monkeypatch.setattr( 'spyder_unittest.backend.unittestrunner.ZmqStreamReader', MockZMQStreamReader) mock_reader = MockZMQStreamReader() mock_base_start = Mock() monkeypatch.setattr('spyder_unittest.backend.unittestrunner.RunnerBase.start', mock_base_start) runner = UnittestRunner(None, 'results') config = Config() cov_path = None runner.start(config, cov_path, sys.executable, ['pythondir'], None) assert runner.config is config assert runner.reader is mock_reader runner.reader.sig_received.connect.assert_called_once_with( runner.process_output) mock_base_start.assert_called_once_with( config, cov_path, sys.executable, ['pythondir'], None) def test_unittestrunner_process_output_with_collected(qtbot): """Test UnittestRunner.processOutput() with two `collected` events.""" runner = UnittestRunner(None) output = [{'event': 'collected', 'id': 'spam.ham'}, {'event': 'collected', 'id': 'eggs.bacon'}] with qtbot.waitSignal(runner.sig_collected) as blocker: runner.process_output(output) expected = ['spam.ham', 'eggs.bacon'] assert blocker.args == [expected] def test_unittestrunner_process_output_with_starttest(qtbot): """Test UnittestRunner.processOutput() with two `startTest` events.""" runner = UnittestRunner(None) output = [{'event': 'startTest', 'id': 'spam.ham'}, {'event': 'startTest', 'id': 'eggs.bacon'}] with qtbot.waitSignal(runner.sig_starttest) as blocker: runner.process_output(output) expected = ['spam.ham', 'eggs.bacon'] assert blocker.args == [expected] def test_unittestrunner_process_output_with_addsuccess(qtbot): """Test UnittestRunner.processOutput() with an `addSuccess` event.""" runner = UnittestRunner(None) output = [{'event': 'addSuccess', 'id': 'spam.ham'}] with qtbot.waitSignal(runner.sig_testresult) as blocker: runner.process_output(output) expected = [TestResult(Category.OK, 'success', 'spam.ham')] assert blocker.args == [expected] def test_unittestrunner_process_output_with_addfailure(qtbot): """Test UnittestRunner.processOutput() with an `addFailure` event.""" runner = UnittestRunner(None) output = [{'event': 'addFailure', 'id': 'spam.ham', 'reason': 'exception', 'err': 'traceback'}] with qtbot.waitSignal(runner.sig_testresult) as blocker: runner.process_output(output) expected = [TestResult(Category.FAIL, 'failure', 'spam.ham', message='exception', extra_text='traceback')] assert blocker.args == [expected] def test_unittestrunner_process_output_with_adderror(qtbot): """Test UnittestRunner.processOutput() with an `addError` event.""" runner = UnittestRunner(None) output = [{'event': 'addError', 'id': 'spam.ham', 'reason': 'exception', 'err': 'traceback'}] with qtbot.waitSignal(runner.sig_testresult) as blocker: runner.process_output(output) expected = [TestResult(Category.FAIL, 'error', 'spam.ham', message='exception', extra_text='traceback')] assert blocker.args == [expected] def test_unittestrunner_process_output_with_addskip(qtbot): """Test UnittestRunner.processOutput() with an `addSkip` event.""" runner = UnittestRunner(None) output = [{'event': 'addSkip', 'id': 'spam.ham', 'reason': 'skip reason'}] with qtbot.waitSignal(runner.sig_testresult) as blocker: runner.process_output(output) expected = [TestResult(Category.SKIP, 'skip', 'spam.ham', message='skip reason')] assert blocker.args == [expected] def test_unittestrunner_process_output_with_addexpectedfailure(qtbot): """Test UnittestRunner.processOutput() with an `addExpectedFailure` event.""" runner = UnittestRunner(None) output = [{'event': 'addExpectedFailure', 'id': 'spam.ham', 'reason': 'exception', 'err': 'traceback'}] with qtbot.waitSignal(runner.sig_testresult) as blocker: runner.process_output(output) expected = [TestResult(Category.OK, 'expectedFailure', 'spam.ham', message='exception', extra_text='traceback')] assert blocker.args == [expected] def test_unittestrunner_process_output_with_addunexpectedsuccess(qtbot): """Test UnittestRunner.processOutput() with an `addUnexpectedSuccess` event.""" runner = UnittestRunner(None) output = [{'event': 'addUnexpectedSuccess', 'id': 'spam.ham'}] with qtbot.waitSignal(runner.sig_testresult) as blocker: runner.process_output(output) expected = [TestResult(Category.FAIL, 'unexpectedSuccess', 'spam.ham')] assert blocker.args == [expected] spyder-unittest-0.7.0/spyder_unittest/backend/tests/test_zmqstream.py000066400000000000000000000011201466560470700263440ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2018 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Tests for zmqstream.py""" # Local imports from spyder_unittest.backend.zmqreader import ZmqStreamReader from spyder_unittest.backend.workers.zmqwriter import ZmqStreamWriter def test_zmqstream(qtbot): manager = ZmqStreamReader() worker = ZmqStreamWriter(manager.port) with qtbot.waitSignal(manager.sig_received) as blocker: worker.write(42) assert blocker.args == [[42]] worker.close() manager.close() spyder-unittest-0.7.0/spyder_unittest/backend/unittestrunner.py000066400000000000000000000070161466560470700252430ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2017 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Support for unittest framework.""" from __future__ import annotations # Standard library imports import os.path as osp from typing import Any, Optional # Local imports from spyder_unittest.widgets.configdialog import Config from spyder_unittest.backend.runnerbase import Category, RunnerBase, TestResult from spyder_unittest.backend.zmqreader import ZmqStreamReader class UnittestRunner(RunnerBase): """Class for running tests with unittest module in standard library.""" module = 'unittest' name = 'unittest' def create_argument_list(self, config: Config, cov_path: Optional[str], single_test: Optional[str]) -> list[str]: """Create argument list for testing process.""" dirname = osp.dirname(__file__) pyfile = osp.join(dirname, 'workers', 'unittestworker.py') arguments = [pyfile, str(self.reader.port)] if single_test: arguments.append(single_test) arguments += config.args return arguments def start(self, config: Config, cov_path: Optional[str], executable: str, pythonpath: list[str], single_test: Optional[str]) -> None: """Start process which will run the unit test suite.""" self.config = config self.reader = ZmqStreamReader() self.reader.sig_received.connect(self.process_output) super().start(config, cov_path, executable, pythonpath, single_test) def finished(self, exitcode: int) -> None: """ Called when the unit test process has finished. This function reads the process output and emits `sig_finished`. """ self.reader.close() output = self.read_all_process_output() self.sig_finished.emit([], output, True) def process_output(self, output: list[dict[str, Any]]) -> None: """ Process output of test process. Parameters ---------- output : list list of decoded Python object sent by test process. """ collected_list = [] starttest_list = [] result_list = [] for result_item in output: if result_item['event'] == 'collected': collected_list.append(result_item['id']) elif result_item['event'] == 'startTest': starttest_list.append(result_item['id']) elif result_item['event'].startswith('add'): testresult = add_event_to_testresult(result_item) result_list.append(testresult) if collected_list: self.sig_collected.emit(collected_list) if starttest_list: self.sig_starttest.emit(starttest_list) if result_list: self.sig_testresult.emit(result_list) def add_event_to_testresult(event: dict[str, Any]) -> TestResult: """Convert an addXXX event sent by test process to a TestResult.""" status = event['event'][3].lower() + event['event'][4:] if status in ('error', 'failure', 'unexpectedSuccess'): cat = Category.FAIL elif status in ('success', 'expectedFailure'): cat = Category.OK else: cat = Category.SKIP testname = event['id'] message = event.get('reason', '') extra_text = event.get('err', '') result = TestResult(cat, status, testname, message=message, extra_text=extra_text) return result spyder-unittest-0.7.0/spyder_unittest/backend/workers/000077500000000000000000000000001466560470700232505ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/backend/workers/__init__.py000066400000000000000000000010341466560470700253570ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """ Code to be run in the target environment. This directory contains scripts and supporting modules that are supposed to be executed in the target environment (using the Python interpreter that the user specifies in the Preferences) instead of the environment that Spyder runs in. Dependencies should be kept to a minimum, because they need to be installed in each target environment. """ spyder-unittest-0.7.0/spyder_unittest/backend/workers/print_versions.py000066400000000000000000000044751466560470700267200ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """ Script for checking which test frameworks are installed. This script prints a dictionary with the required info to stdout. """ def get_pytest_info(): """Return information about pytest.""" try: import pytest except ImportError: return {'available': False} plugins = {} class GetPluginVersionsPlugin(): def pytest_cmdline_main(self, config): nonlocal plugins plugininfo = config.pluginmanager.list_plugin_distinfo() plugins = {dist.project_name: dist.version for plugin, dist in plugininfo} return 0 # stop pytest, don't collect or run tests # --capture=sys needed on Windows to avoid # ValueError: saved filedescriptor not valid anymore pytest.main(['--capture=sys'], plugins=[GetPluginVersionsPlugin()]) return {'available': True, 'version': pytest.__version__, 'plugins': plugins} def get_nose2_info(): """ Return information about nose2. This only returns the version of nose2. The function does not gather any information about plugins. """ try: import nose2 except ImportError: return {'available': False} return {'available': True, 'version': nose2.__version__, 'plugins': {}} def get_unittest_info(): """ Return versions of framework and its plugins. As 'unittest' is a built-in framework, we use the python version. """ from platform import python_version return {'available': True, 'version': python_version(), 'plugins': {}} def get_all_info(): """ Return information about all testing frameworks. Information is returned as a dictionary like the following: {'pytest': {'available': True, 'version': '7.1.1', 'plugins': {'flaky': '3.7.0', 'pytest-mock': '3.6.1'}}, 'nose2': {'available': False}, 'unittest': {'available': True, 'version': '3.10.5', 'plugins': {}}} """ return {'pytest': get_pytest_info(), 'nose2': get_nose2_info(), 'unittest': get_unittest_info()} if __name__ == '__main__': print(get_all_info()) spyder-unittest-0.7.0/spyder_unittest/backend/workers/pytestworker.py000066400000000000000000000114671466560470700264150ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2017 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """ Script for running pytest tests. This script is meant to be run in a separate process by a PyTestRunner. It runs tests via the pytest framework and prints the results so that the PyTestRunner can read them. """ # Standard library imports import sys # Third party imports import pytest # Local imports # Note that the script can be run in an environment that does not contain # spyder_unittest so `from spyder_unittest.xxx import xxx` does not work. from zmqwriter import FileStub, ZmqStreamWriter class SpyderPlugin(): """Pytest plugin which reports in format suitable for Spyder.""" def __init__(self, writer): """Constructor.""" self.writer = writer def initialize_logreport(self): """Reset accumulator variables.""" self.status = '---' self.duration = 0 self.longrepr = [] self.sections = [] self.had_error = False self.was_skipped = False self.was_xfail = False def pytest_report_header(self, config, startdir): """Called by pytest before any reporting.""" self.writer.write({ 'event': 'config', 'rootdir': str(config.rootdir) }) def pytest_collectreport(self, report): """Called by pytest after collecting tests from a file.""" if report.outcome == 'failed': self.writer.write({ 'event': 'collecterror', 'nodeid': report.nodeid, 'longrepr': str(report.longrepr) }) def pytest_itemcollected(self, item): """Called by pytest when a test item is collected.""" self.writer.write({ 'event': 'collected', 'nodeid': item.nodeid }) def pytest_runtest_logstart(self, nodeid, location): """Called by pytest before running a test.""" self.writer.write({ 'event': 'starttest', 'nodeid': nodeid }) self.initialize_logreport() def pytest_runtest_logreport(self, report): """Called by pytest when a phase of a test is completed.""" if report.when == 'call': self.status = report.outcome self.duration = report.duration else: if report.outcome == 'failed': self.had_error = True elif report.outcome == 'skipped': self.was_skipped = True if hasattr(report, 'wasxfail'): self.was_xfail = True self.longrepr.append(report.wasxfail if report.wasxfail else 'WAS EXPECTED TO FAIL') self.sections = report.sections # already accumulated over phases if report.longrepr: first_msg_idx = len(self.longrepr) if hasattr(report.longrepr, 'reprcrash'): self.longrepr.append(report.longrepr.reprcrash.message) if isinstance(report.longrepr, tuple): self.longrepr.append(report.longrepr[2]) elif isinstance(report.longrepr, str): self.longrepr.append(report.longrepr) else: self.longrepr.append(str(report.longrepr)) if report.outcome == 'failed' and report.when in ( 'setup', 'teardown'): self.longrepr[first_msg_idx] = '{} {}: {}'.format( 'ERROR at', report.when, self.longrepr[first_msg_idx]) def pytest_runtest_logfinish(self, nodeid, location): """Called by pytest when the entire test is completed.""" if self.was_xfail: if self.status == 'passed': self.status = 'xpassed' else: # 'skipped' self.status = 'xfailed' elif self.was_skipped: self.status = 'skipped' data = {'event': 'logreport', 'outcome': self.status, 'witherror': self.had_error, 'sections': self.sections, 'duration': self.duration, 'nodeid': nodeid, 'filename': location[0], 'lineno': location[1]} if self.longrepr: msg_lines = self.longrepr[0].rstrip().splitlines() data['message'] = msg_lines[0] start_item = 1 if len(msg_lines) == 1 else 0 data['longrepr'] = '\n'.join(self.longrepr[start_item:]) self.writer.write(data) def main(args): """Run pytest with the Spyder plugin.""" if args[1] == 'file': writer = FileStub('pytestworker.log') else: writer = ZmqStreamWriter(int(args[1])) result = pytest.main(args[2:], plugins=[SpyderPlugin(writer)]) writer.close() return result if __name__ == '__main__': result = main(sys.argv) sys.exit(result) spyder-unittest-0.7.0/spyder_unittest/backend/workers/tests/000077500000000000000000000000001466560470700244125ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/backend/workers/tests/test_print_versions.py000066400000000000000000000035541466560470700311160ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Tests for print_versions.py""" from spyder_unittest.backend.workers.print_versions import ( get_nose2_info, get_pytest_info, get_unittest_info) def test_get_pytest_info_without_plugins(monkeypatch): import pytest monkeypatch.setattr(pytest, '__version__', '1.2.3') from _pytest.config import PytestPluginManager monkeypatch.setattr( PytestPluginManager, 'list_plugin_distinfo', lambda _: ()) expected = {'available': True, 'version': '1.2.3', 'plugins': {}} assert get_pytest_info() == expected def test_get_pytest_info_with_plugins(monkeypatch): import pytest import pkg_resources monkeypatch.setattr(pytest, '__version__', '1.2.3') dist1 = pkg_resources.Distribution(project_name='myPlugin1', version='4.5.6') dist2 = pkg_resources.Distribution(project_name='myPlugin2', version='7.8.9') from _pytest.config import PytestPluginManager monkeypatch.setattr( PytestPluginManager, 'list_plugin_distinfo', lambda _: (('1', dist1), ('2', dist2))) expected = {'available': True, 'version': '1.2.3', 'plugins': {'myPlugin1': '4.5.6', 'myPlugin2': '7.8.9'}} assert get_pytest_info() == expected def test_get_nose2_info(monkeypatch): import nose2 monkeypatch.setattr(nose2, '__version__', '1.2.3') expected = {'available': True, 'version': '1.2.3', 'plugins': {}} assert get_nose2_info() == expected def test_get_unittest_imfo(monkeypatch): import platform monkeypatch.setattr(platform, 'python_version', lambda: '1.2.3') expected = {'available': True, 'version': '1.2.3', 'plugins': {}} assert get_unittest_info() == expected spyder-unittest-0.7.0/spyder_unittest/backend/workers/tests/test_pytestworker.py000066400000000000000000000271161466560470700306140ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2017 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Tests for pytestworker.py""" # Standard library imports import os import os.path as osp import sys from unittest.mock import create_autospec, MagicMock, Mock # Third party imports import pytest # Local imports # Local imports # Modules in spyder_unittest.backend.workers assume that their directory # is in `sys.path`, so add that directory to the path. old_path = sys.path sys.path.insert(0, osp.join(osp.dirname(__file__), osp.pardir)) from spyder_unittest.backend.workers.pytestworker import SpyderPlugin, main from spyder_unittest.backend.workers.zmqwriter import ZmqStreamWriter sys.path = old_path class EmptyClass: pass @pytest.fixture def plugin(): mock_writer = create_autospec(ZmqStreamWriter) return SpyderPlugin(mock_writer) def test_spyderplugin_test_report_header(plugin): import pathlib config = EmptyClass() config.rootdir = pathlib.PurePosixPath('/myRootDir') plugin.pytest_report_header(config, None) plugin.writer.write.assert_called_once_with({ 'event': 'config', 'rootdir': '/myRootDir' }) @pytest.fixture def plugin_ini(): mock_writer = create_autospec(ZmqStreamWriter) plugin = SpyderPlugin(mock_writer) plugin.status = '---' plugin.duration = 0 plugin.longrepr = [] plugin.sections = [] plugin.had_error = False plugin.was_skipped = False plugin.was_xfail = False return plugin def test_spyderplugin_test_collectreport_with_success(plugin): report = EmptyClass() report.outcome = 'success' report.nodeid = 'foo.py::bar' plugin.pytest_collectreport(report) plugin.writer.write.assert_not_called() def test_spyderplugin_test_collectreport_with_failure(plugin): report = EmptyClass() report.outcome = 'failed' report.nodeid = 'foo.py::bar' report.longrepr = MagicMock() report.longrepr.__str__.return_value = 'message' plugin.pytest_collectreport(report) plugin.writer.write.assert_called_once_with({ 'event': 'collecterror', 'nodeid': 'foo.py::bar', 'longrepr': 'message' }) def test_spyderplugin_test_itemcollected(plugin): testitem = EmptyClass() testitem.nodeid = 'foo.py::bar' plugin.pytest_itemcollected(testitem) plugin.writer.write.assert_called_once_with({ 'event': 'collected', 'nodeid': 'foo.py::bar' }) def standard_logreport(): report = EmptyClass() report.when = 'call' report.outcome = 'passed' report.nodeid = 'foo.py::bar' report.duration = 42 report.sections = [] report.longrepr = None report.location = ('foo.py', 24, 'bar') return report def test_pytest_runtest_logreport_passed(plugin_ini): report = standard_logreport() report.sections = ['output'] plugin_ini.pytest_runtest_logreport(report) assert plugin_ini.status == 'passed' assert plugin_ini.duration == 42 assert plugin_ini.sections == ['output'] assert plugin_ini.had_error is False assert plugin_ini.was_skipped is False assert plugin_ini.was_xfail is False def test_pytest_runtest_logreport_failed(plugin_ini): report = standard_logreport() report.when = 'teardown' report.outcome = 'failed' plugin_ini.pytest_runtest_logreport(report) assert plugin_ini.status == '---' assert plugin_ini.duration == 0 assert plugin_ini.had_error is True assert plugin_ini.was_skipped is False assert plugin_ini.was_xfail is False def test_pytest_runtest_logreport_skipped(plugin_ini): report = standard_logreport() report.when = 'setup' report.outcome = 'skipped' plugin_ini.pytest_runtest_logreport(report) assert plugin_ini.status == '---' assert plugin_ini.duration == 0 assert plugin_ini.had_error is False assert plugin_ini.was_skipped is True assert plugin_ini.was_xfail is False @pytest.mark.parametrize('xfail_msg,longrepr', [ ('msg', 'msg'), ('', 'WAS EXPECTED TO FAIL') ]) def test_pytest_runtest_logreport_xfail(plugin_ini, xfail_msg, longrepr): report = standard_logreport() report.wasxfail = xfail_msg plugin_ini.pytest_runtest_logreport(report) assert plugin_ini.status == 'passed' assert plugin_ini.duration == 42 assert plugin_ini.had_error is False assert plugin_ini.was_skipped is False assert plugin_ini.was_xfail is True assert plugin_ini.longrepr == [longrepr] def test_pytest_runtest_logreport_with_reprcrash_longrepr(plugin_ini): class MockLongrepr: def __init__(self): self.reprcrash = EmptyClass() self.reprcrash.message = 'msg' def __str__(self): return 'reprtraceback' report = standard_logreport() report.longrepr = MockLongrepr() plugin_ini.pytest_runtest_logreport(report) assert plugin_ini.longrepr == ['msg', 'reprtraceback'] def test_pytest_runtest_logreport_with_tuple_longrepr(plugin_ini): report = standard_logreport() report.longrepr = ('path', 'lineno', 'msg') plugin_ini.pytest_runtest_logreport(report) assert plugin_ini.longrepr == ['msg'] def test_pytest_runtest_logreport_with_str_longrepr(plugin_ini): report = standard_logreport() report.longrepr = 'msg' plugin_ini.pytest_runtest_logreport(report) assert plugin_ini.longrepr == ['msg'] def test_pytest_runtest_logreport_with_excinfo_longrepr(plugin_ini): class MockLongrepr: def __str__(self): return 'msg' report = standard_logreport() report.longrepr = MockLongrepr() plugin_ini.pytest_runtest_logreport(report) assert plugin_ini.longrepr == ['msg'] @pytest.mark.parametrize('when,longrepr,expected',[ ('setup', [], ['ERROR at setup: msg']), ('call', [], ['msg']), ('teardown', ['prev msg'], ['prev msg', 'ERROR at teardown: msg']) ]) def test_pytest_runtest_logreport_error_in_setup_or_teardown_message( plugin_ini, when, longrepr, expected): report = standard_logreport() report.when = when report.outcome = 'failed' report.longrepr = 'msg' plugin_ini.longrepr = longrepr plugin_ini.pytest_runtest_logreport(report) assert plugin_ini.longrepr == expected def test_pytest_runtest_logreport_error_in_setup_or_teardown_multiple_messages( plugin_ini): class MockLongrepr: def __init__(self): self.reprcrash = EmptyClass() self.reprcrash.message = 'msg' def __str__(self): return 'reprtraceback' report = standard_logreport() report.when = 'setup' report.outcome = 'failed' report.longrepr = MockLongrepr() plugin_ini.pytest_runtest_logreport(report) assert plugin_ini.longrepr == ['ERROR at setup: msg', 'reprtraceback'] def test_pytest_runtest_logfinish_skipped(plugin_ini): nodeid = 'foo.py::bar' location = ('foo.py', 24) plugin_ini.was_skipped = True plugin_ini.duration = 42 plugin_ini.pytest_runtest_logfinish(nodeid, location) plugin_ini.writer.write.assert_called_once_with({ 'event': 'logreport', 'outcome': 'skipped', 'witherror': False, 'nodeid': 'foo.py::bar', 'duration': 42, 'sections': [], 'filename': 'foo.py', 'lineno': 24 }) def test_pytest_runtest_logfinish_xfailed(plugin_ini): nodeid = 'foo.py::bar' location = ('foo.py', 24) plugin_ini.was_xfail = True plugin_ini.status = 'skipped' plugin_ini.duration = 42 plugin_ini.pytest_runtest_logfinish(nodeid, location) plugin_ini.writer.write.assert_called_once_with({ 'event': 'logreport', 'outcome': 'xfailed', 'witherror': False, 'nodeid': 'foo.py::bar', 'duration': 42, 'sections': [], 'filename': 'foo.py', 'lineno': 24 }) def test_pytest_runtest_logfinish_xpassed(plugin_ini): nodeid = 'foo.py::bar' location = ('foo.py', 24) plugin_ini.was_xfail = True plugin_ini.status = 'passed' plugin_ini.duration = 42 plugin_ini.pytest_runtest_logfinish(nodeid, location) plugin_ini.writer.write.assert_called_once_with({ 'event': 'logreport', 'outcome': 'xpassed', 'witherror': False, 'nodeid': 'foo.py::bar', 'duration': 42, 'sections': [], 'filename': 'foo.py', 'lineno': 24 }) @pytest.mark.parametrize('self_longrepr,message,longrepr', [ (['msg1 line1'], 'msg1 line1', ''), (['msg1 line1\nmsg1 line2'], 'msg1 line1', 'msg1 line1\nmsg1 line2'), (['msg1 line1', 'msg2'], 'msg1 line1', 'msg2'), (['msg1 line1\nmsg1 line2', 'msg2'], 'msg1 line1', 'msg1 line1\nmsg1 line2\nmsg2'), ]) def test_pytest_runtest_logfinish_handles_longrepr(plugin_ini, self_longrepr, message, longrepr): nodeid = 'foo.py::bar' location = ('foo.py', 24) plugin_ini.status = 'passed' plugin_ini.duration = 42 plugin_ini.longrepr = self_longrepr plugin_ini.pytest_runtest_logfinish(nodeid, location) plugin_ini.writer.write.assert_called_once_with({ 'event': 'logreport', 'outcome': 'passed', 'witherror': False, 'nodeid': 'foo.py::bar', 'duration': 42, 'sections': [], 'filename': 'foo.py', 'lineno': 24, 'message': message, 'longrepr': longrepr }) @pytest.fixture(scope='module') def testfile_path(tmp_path_factory): tmp_path = tmp_path_factory.mktemp('pytestworker') res = tmp_path / 'test_pytestworker_foo.py' res.write_text('def test_ok(): assert 1+1 == 2\n' 'def test_fail(): assert 1+1 == 3\n') return res @pytest.mark.parametrize('alltests', [True, False]) def test_pytestworker_integration(monkeypatch, testfile_path, alltests): mock_writer = create_autospec(ZmqStreamWriter) MockZmqStreamWriter = Mock(return_value=mock_writer) monkeypatch.setattr( 'spyder_unittest.backend.workers.pytestworker.ZmqStreamWriter', MockZmqStreamWriter) os.chdir(testfile_path.parent) testfilename = testfile_path.name pytest_args = ['mockscriptname', '42'] if not alltests: pytest_args.append(f'{testfilename}::test_ok') main(pytest_args) args = mock_writer.write.call_args_list messages = [arg[0][0] for arg in args] assert len(messages) == 7 if alltests else 4 assert messages[0]['event'] == 'config' assert 'rootdir' in messages[0] assert messages[1]['event'] == 'collected' assert messages[1]['nodeid'] == f'{testfilename}::test_ok' if alltests: n = 3 assert messages[2]['event'] == 'collected' assert messages[2]['nodeid'] == f'{testfilename}::test_fail' else: n = 2 assert messages[n]['event'] == 'starttest' assert messages[n]['nodeid'] == f'{testfilename}::test_ok' assert messages[n+1]['event'] == 'logreport' assert messages[n+1]['outcome'] == 'passed' assert messages[n+1]['nodeid'] == f'{testfilename}::test_ok' assert messages[n+1]['sections'] == [] assert messages[n+1]['filename'] == testfilename assert messages[n+1]['lineno'] == 0 assert 'duration' in messages[n+1] if alltests: assert messages[n+2]['event'] == 'starttest' assert messages[n+2]['nodeid'] == f'{testfilename}::test_fail' assert messages[n+3]['event'] == 'logreport' assert messages[n+3]['outcome'] == 'failed' assert messages[n+3]['nodeid'] == f'{testfilename}::test_fail' assert messages[n+3]['sections'] == [] assert messages[n+3]['filename'] == testfilename assert messages[n+3]['lineno'] == 1 assert 'duration' in messages[n+3] spyder-unittest-0.7.0/spyder_unittest/backend/workers/tests/test_unittestworker.py000066400000000000000000000154261466560470700311440ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2017 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Tests for pytestworker.py""" # Standard library imports import os import os.path as osp import sys import unittest from unittest.mock import call, create_autospec, Mock # Third-party imports import pytest # Local imports # Modules in spyder_unittest.backend.workers assume that their directory # is in `sys.path`, so add that directory to the path. old_path = sys.path sys.path.insert(0, osp.join(osp.dirname(__file__), osp.pardir)) from spyder_unittest.backend.workers.unittestworker import ( main, report_collected, SpyderTestResult) from spyder_unittest.backend.workers.zmqwriter import ZmqStreamWriter sys.path = old_path class MyTest(unittest.TestCase): """Simple test class.""" def first(): pass def second(): pass @pytest.fixture def testresult(): mock_writer = create_autospec(ZmqStreamWriter) my_testresult = SpyderTestResult( stream=Mock(), descriptions=True, verbosity=2) my_testresult.writer = mock_writer my_testresult._exc_info_to_string = lambda err, test: 'some exception info' return my_testresult def test_spydertestresult_starttest(testresult): """Test that SpyderTestResult.startTest() writes the correct info.""" test = MyTest(methodName='first') testresult.startTest(test) expected = {'event': 'startTest', 'id': test.id()} testresult.writer.write.assert_called_once_with(expected) def test_spydertestresult_addsuccess(testresult): """Test that SpyderTestResult.addSuccess() writes the correct info.""" test = MyTest(methodName='first') testresult.addSuccess(test) expected = {'event': 'addSuccess', 'id': test.id()} testresult.writer.write.assert_called_once_with(expected) def test_spydertestresult_addfailure(testresult): """Test that SpyderTestResult.addFailure() writes the correct info.""" test = MyTest(methodName='first') err = ('notused', AssertionError('xxx'), 'notused') testresult.addFailure(test, err) expected = {'event': 'addFailure', 'id': test.id(), 'reason': 'AssertionError: xxx', 'err': 'some exception info'} testresult.writer.write.assert_called_once_with(expected) def test_spydertestresult_adderror(testresult): """Test that SpyderTestResult.addError() writes the correct info.""" test = MyTest(methodName='first') err = ('notused', AssertionError('xxx'), 'notused') testresult.addError(test, err) expected = {'event': 'addError', 'id': test.id(), 'reason': 'AssertionError: xxx', 'err': 'some exception info'} testresult.writer.write.assert_called_once_with(expected) def test_spydertestresult_addskip(testresult): """Test that SpyderTestResult.addSkip() writes the correct info.""" test = MyTest(methodName='first') reason = 'my reason' testresult.addSkip(test, reason) expected = {'event': 'addSkip', 'id': test.id(), 'reason': reason} testresult.writer.write.assert_called_once_with(expected) def test_spydertestresult_addexpectedfailure(testresult): """Test that SpyderTestResult.addExpectedFailure() writes the correct info.""" test = MyTest(methodName='first') err = ('notused', AssertionError('xxx'), 'notused') testresult.addExpectedFailure(test, err) expected = {'event': 'addExpectedFailure', 'id': test.id(), 'reason': 'AssertionError: xxx', 'err': 'some exception info'} testresult.writer.write.assert_called_once_with(expected) def test_spydertestresult_addunexpectedsuccess(testresult): """Test that SpyderTestResult.addUnexpectedSuccess() writes the correct info.""" test = MyTest(methodName='first') testresult.addUnexpectedSuccess(test) expected = {'event': 'addUnexpectedSuccess', 'id': test.id()} testresult.writer.write.assert_called_once_with(expected) def test_unittestworker_report_collected(): """ Test that report_collected() with a test suite containing two tests writes two `collected` events to the ZMQ stream. """ mock_writer = create_autospec(ZmqStreamWriter) test1 = MyTest(methodName='first') test2 = MyTest(methodName='second') test_suite_inner = unittest.TestSuite([test1, test2]) test_suite = unittest.TestSuite([test_suite_inner]) report_collected(mock_writer, test_suite) expected = [call({'event': 'collected', 'id': test1.id()}), call({'event': 'collected', 'id': test2.id()})] assert mock_writer.write.mock_calls == expected @pytest.fixture(scope='module') def testfile_path(tmp_path_factory): tmp_path = tmp_path_factory.mktemp('unittestworker') res = tmp_path / 'test_unittestworker_foo.py' res.write_text('import unittest\n' 'class MyTest(unittest.TestCase):\n' ' def test_ok(self): self.assertEqual(1+1, 2)\n' ' def test_fail(self): self.assertEqual(1+1, 3)\n') return res @pytest.mark.parametrize('alltests', [True, False]) def test_unittestworker_main(monkeypatch, testfile_path, alltests): """ Test that the main function with some tests writes the expected output to the ZMQ stream. """ mock_writer = create_autospec(ZmqStreamWriter) MockZmqStreamWriter = Mock(return_value=mock_writer) monkeypatch.setattr( 'spyder_unittest.backend.workers.unittestworker.ZmqStreamWriter', MockZmqStreamWriter) os.chdir(testfile_path.parent) testfilename = testfile_path.stem # `stem` removes the .py suffix main_args = ['mockscriptname', '42'] if not alltests: main_args.append(f'{testfilename}.MyTest.test_fail') main(main_args) args = mock_writer.write.call_args_list messages = [arg[0][0] for arg in args] assert len(messages) == (6 if alltests else 3) assert messages[0]['event'] == 'collected' assert messages[0]['id'] == f'{testfilename}.MyTest.test_fail' if alltests: n = 2 assert messages[1]['event'] == 'collected' assert messages[1]['id'] == f'{testfilename}.MyTest.test_ok' else: n = 1 assert messages[n]['event'] == 'startTest' assert messages[n]['id'] == f'{testfilename}.MyTest.test_fail' assert messages[n+1]['event'] == 'addFailure' assert messages[n+1]['id'] == f'{testfilename}.MyTest.test_fail' assert 'AssertionError' in messages[n+1]['reason'] assert 'assertEqual(1+1, 3)' in messages[n+1]['err'] if alltests: assert messages[n+2]['event'] == 'startTest' assert messages[n+2]['id'] == f'{testfilename}.MyTest.test_ok' assert messages[n+3]['event'] == 'addSuccess' assert messages[n+3]['id'] == f'{testfilename}.MyTest.test_ok' spyder-unittest-0.7.0/spyder_unittest/backend/workers/unittestworker.py000066400000000000000000000105141466560470700267340ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """ Script for running unittest tests. This script is meant to be run in a separate process by a UnittestRunner. It runs tests via the unittest framework and transmits the results over a ZMQ socket so that the UnittestRunner can read them. Usage: python unittestworker.py port [testname] Here, `port` is the port number of the ZMQ socket. Use `file` to store the results in the file `unittestworker.json`. The optional argument `testname` is the test to run; if omitted, run all tests. """ from __future__ import annotations # Standard library imports import os import sys from typing import ClassVar from unittest import ( defaultTestLoader, TestCase, TestSuite, TextTestResult, TextTestRunner) # Local imports # Note that the script can be run in an environment that does not contain # spyder_unittest so `from spyder_unittest.xxx import xxx` does not work. from zmqwriter import FileStub, ZmqStreamWriter class SpyderTestResult(TextTestResult): """ Store test results and write them to a ZmqStreamWriter. The member `.writer` should be set to a ZmqStreamWriter before running any tests. """ writer: ClassVar[ZmqStreamWriter] def startTest(self, test: TestCase) -> None: self.writer.write({ 'event': 'startTest', 'id': test.id() }) super().startTest(test) def addSuccess(self, test: TestCase) -> None: self.writer.write({ 'event': 'addSuccess', 'id': test.id() }) super().addSuccess(test) def addError(self, test: TestCase, err) -> None: (__, value, __) = err first_line = str(value).splitlines()[0] self.writer.write({ 'event': 'addError', 'id': test.id(), 'reason': f'{type(value).__name__}: {first_line}', 'err': self._exc_info_to_string(err, test) }) super().addError(test, err) def addFailure(self, test: TestCase, err) -> None: (__, value, __) = err first_line = str(value).splitlines()[0] self.writer.write({ 'event': 'addFailure', 'id': test.id(), 'reason': f'{type(value).__name__}: {first_line}', 'err': self._exc_info_to_string(err, test) }) super().addFailure(test, err) def addSkip(self, test: TestCase, reason: str) -> None: self.writer.write({ 'event': 'addSkip', 'id': test.id(), 'reason': reason }) super().addSkip(test, reason) def addExpectedFailure(self, test: TestCase, err) -> None: (__, value, __) = err first_line = str(value).splitlines()[0] self.writer.write({ 'event': 'addExpectedFailure', 'id': test.id(), 'reason': f'{type(value).__name__}: {first_line}', 'err': self._exc_info_to_string(err, test) }) super().addExpectedFailure(test, err) def addUnexpectedSuccess(self, test: TestCase) -> None: self.writer.write({ 'event': 'addUnexpectedSuccess', 'id': test.id() }) super().addUnexpectedSuccess(test) def report_collected(writer: ZmqStreamWriter, test_suite: TestSuite) -> None: for test in test_suite: if isinstance(test, TestSuite): report_collected(writer, test) else: writer.write({ 'event': 'collected', 'id': test.id() }) def main(args: list[str]) -> None: """Run unittest tests.""" # Parse first command line argument and create writer if args[1] != 'file': writer = ZmqStreamWriter(args[1]) else: writer = FileStub('unittestworker.log') SpyderTestResult.writer = writer # Gather tests if args[2:]: # Add cwd to path so that modules can be found sys.path = [os.getcwd()] + sys.path test_suite = defaultTestLoader.loadTestsFromNames(args[2:]) else: test_suite = defaultTestLoader.discover('.') report_collected(writer, test_suite) # Run tests test_runner = TextTestRunner(verbosity=2, resultclass=SpyderTestResult) test_runner.run(test_suite) writer.close() if __name__ == '__main__': main(sys.argv) spyder-unittest-0.7.0/spyder_unittest/backend/workers/zmqwriter.py000066400000000000000000000036741466560470700257000ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """ Writer for sending stream of python objects over a ZMQ socket. The intended usage is that you construct a ZmqStreamReader in one process and a ZmqStreamWriter (with the same port number as the reader) in a worker process. The worker process can then use the stream to send its result to the reader. """ # Standard library imports import sys # Third party imports import zmq class ZmqStreamWriter: """Writer for sending stream of Python object over a ZMQ stream.""" def __init__(self, port: str) -> None: """ Constructor. Arguments --------- port : str TCP port number to be used for the stream. This should equal the `port` attribute of the corresponding `ZmqStreamReader`. """ context = zmq.Context() self.socket = context.socket(zmq.PAIR) self.socket.connect('tcp://localhost:{}'.format(port)) def write(self, obj: object) -> None: """Write arbitrary Python object to stream.""" self.socket.send_pyobj(obj) def close(self) -> None: """Close stream.""" self.socket.close() class FileStub(ZmqStreamWriter): """Stub for ZmqStreamWriter which instead writes to a file.""" def __init__(self, filename: str) -> None: """Constructor; connect to specified filename.""" self.file = open(filename, 'w') def write(self, obj: object) -> None: """Write Python object to file.""" self.file.write(str(obj) + '\n') def close(self) -> None: """Close file.""" self.file.close() if __name__ == '__main__': # Usage: python zmqwriter.py # Construct a ZMQ stream on the given port number and send the number 42 # over the stream (for testing) worker = ZmqStreamWriter(sys.argv[1]) worker.write(42) spyder-unittest-0.7.0/spyder_unittest/backend/zmqreader.py000066400000000000000000000053521466560470700241250ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """ Reader for sending stream of python objects over a ZMQ socket. The intended usage is that you construct a ZmqStreamReader in one process and a ZmqStreamWriter (with the same port number as the reader) in a worker process. The worker process can then use the stream to send its result to the reader. """ # Third party imports from qtpy.QtCore import QObject, QProcess, QSocketNotifier, Signal from qtpy.QtWidgets import QApplication import zmq class ZmqStreamReader(QObject): """ Reader for receiving stream of Python objects via a ZMQ stream. Attributes ---------- port : int TCP port number used for the stream. Signals ------- sig_received(list) Emitted when objects are received; argument is list of received objects. """ sig_received = Signal(object) def __init__(self) -> None: """Constructor; also constructs ZMQ stream.""" super().__init__() self.context = zmq.Context() self.socket = self.context.socket(zmq.PAIR) self.port = self.socket.bind_to_random_port('tcp://*') fid = self.socket.getsockopt(zmq.FD) self.notifier = QSocketNotifier(fid, QSocketNotifier.Read, self) self.notifier.activated.connect(self.received_message) def received_message(self) -> None: """Called when a message is received.""" self.notifier.setEnabled(False) messages = [] try: while 1: message = self.socket.recv_pyobj(flags=zmq.NOBLOCK) messages.append(message) except zmq.ZMQError: pass finally: self.notifier.setEnabled(True) if messages: self.sig_received.emit(messages) def close(self) -> None: """Read any remaining messages and close stream.""" self.received_message() # Flush remaining messages self.notifier.setEnabled(False) self.socket.close() self.context.destroy() if __name__ == '__main__': # Usage: python zmqreader.py # Start zmqwriter.py in another process and construct a ZMQ stream between # this process and the zmqwriter process. Read and print what zmqwriter # sends over the ZMQ stream. import os.path import sys app = QApplication(sys.argv) manager = ZmqStreamReader() manager.sig_received.connect(print) process = QProcess() dirname = os.path.dirname(sys.argv[0]) writer_name = os.path.join(dirname, 'workers', 'zmqwriter.py') process.start('python', [writer_name, str(manager.port)]) process.finished.connect(app.quit) sys.exit(app.exec_()) spyder-unittest-0.7.0/spyder_unittest/locale/000077500000000000000000000000001466560470700214245ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/locale/de/000077500000000000000000000000001466560470700220145ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/locale/de/LC_MESSAGES/000077500000000000000000000000001466560470700236015ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/locale/de/LC_MESSAGES/spyder_unittest.mo000066400000000000000000000065371466560470700274160ustar00rootroot000000000000008      " :H X"e /! 0H&[    ( 2?3S*      - 9 H X l }      5 - 4 ? AO  (   % . 1I {         "$ G Q \ 8s 2    * 5 < A G M (none), {} coverage, {} other, {} passed, {} pendingAbbreviate test namesCaptured stderrCaptured stdoutCollapseCollapse allCommand-line arguments:Configure ...Configure testsDependenciesDirectory from which to run tests:ErrorExpandExpand allExtra command-line arguments when running testsGo to definitionInclude coverage report in outputMessageMissing: {}NameNo results to show.Process failed to startRun only this testRun test suites and view their resultsRun testsRun unit testsRunning tests ...Select directorySettingsShow outputStatusStopStop current test processTest framework:Test process exited abnormallyTime (ms)Unit testingUnit testing outputVersions of frameworks and their installed plugins:Works only for pytest, requires pytest-covcollected {}collection errorfailurenot availablenot runpendingrunningtesttests{} {}{} failedProject-Id-Version: spyder-unittest POT-Creation-Date: 2023-06-23 16:41+0100 PO-Revision-Date: 2023-06-25 20:37 Last-Translator: Language-Team: German MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Generated-By: pygettext.py 1.5 Plural-Forms: nplurals=2; plural=(n != 1); X-Crowdin-Project: spyder-unittest X-Crowdin-Project-ID: 381839 X-Crowdin-Language: de X-Crowdin-File: /master/spyder_unittest/locale/spyder_unittest.pot X-Crowdin-File-ID: 49 Language: de_DE (keine), {} Abdeckung, {} andere, {} bestanden, {} ausstehendTestnamen abkürzenErfasstes stderrErfasstes stdoutEinklappenAlle einklappenBefehlszeilenargumente:Konfigurieren ...Tests konfigurierenAbhängigkeitenVerzeichnis, aus dem Tests ausgeführt werden sollen:FehlerAusklappenAlle ausklappenZusätzliche Befehlszeilenargumente bei der Ausführung von TestsZur Definition gehenAbdeckungsbericht in Ausgabe einbeziehenNachrichtFehlt: {}NameKeine anzuzeigenden Ergebnisse.Prozess konnte nicht gestartet werdenNur diesen Test ausführenTestsuiten ausführen und ihre Ergebnisse ansehenTests ausführenUnit-Tests ausführenTests laufen ...Verzeichnis auswählenEinstellungenAusgabe anzeigenStatusStoppAktuellen Testprozess stoppenTest-Framework:Testprozess wurde abnormal beendetZeit (ms)Unit-TestsAusgabe der Unit-TestsVersionen von Frameworks und deren installierte Plugins:Funktioniert nur für pytest, erfordert pytest-cov{} gesammeltSammlungsfehlerFehlschlagnicht verfügbarnicht ausgeführtausstehendläuftTestTests{} {}{} fehlgeschlagenspyder-unittest-0.7.0/spyder_unittest/locale/de/LC_MESSAGES/spyder_unittest.po000066400000000000000000000142761466560470700274200ustar00rootroot00000000000000msgid "" msgstr "" "Project-Id-Version: spyder-unittest\n" "POT-Creation-Date: 2023-06-23 16:41+0100\n" "PO-Revision-Date: 2023-06-25 20:37\n" "Last-Translator: \n" "Language-Team: German\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Generated-By: pygettext.py 1.5\n" "Plural-Forms: nplurals=2; plural=(n != 1);\n" "X-Crowdin-Project: spyder-unittest\n" "X-Crowdin-Project-ID: 381839\n" "X-Crowdin-Language: de\n" "X-Crowdin-File: /master/spyder_unittest/locale/spyder_unittest.pot\n" "X-Crowdin-File-ID: 49\n" "Language: de_DE\n" #: spyder_unittest/backend/nose2runner.py:99 msgid "Captured stdout" msgstr "Erfasstes stdout" #: spyder_unittest/backend/nose2runner.py:101 msgid "Captured stderr" msgstr "Erfasstes stderr" #: spyder_unittest/backend/pytestrunner.py:129 msgid "(none)" msgstr "(keine)" #: spyder_unittest/backend/pytestrunner.py:129 msgid "Missing: {}" msgstr "Fehlt: {}" #: spyder_unittest/backend/pytestrunner.py:130 msgid "{}\n" "{}" msgstr "{}\n" "{}" #: spyder_unittest/unittestplugin.py:73 spyder_unittest/widgets/unittestgui.py:138 msgid "Unit testing" msgstr "Unit-Tests" #: spyder_unittest/unittestplugin.py:84 msgid "Run test suites and view their results" msgstr "Testsuiten ausführen und ihre Ergebnisse ansehen" #: spyder_unittest/unittestplugin.py:105 spyder_unittest/unittestplugin.py:106 spyder_unittest/widgets/unittestgui.py:424 msgid "Run unit tests" msgstr "Unit-Tests ausführen" #: spyder_unittest/widgets/configdialog.py:78 msgid "Configure tests" msgstr "Tests konfigurieren" #: spyder_unittest/widgets/configdialog.py:86 msgid "Test framework:" msgstr "Test-Framework:" #: spyder_unittest/widgets/configdialog.py:95 spyder_unittest/widgets/unittestgui.py:294 msgid "not available" msgstr "nicht verfügbar" #: spyder_unittest/widgets/configdialog.py:102 msgid "Command-line arguments:" msgstr "Befehlszeilenargumente:" #: spyder_unittest/widgets/configdialog.py:106 msgid "Extra command-line arguments when running tests" msgstr "Zusätzliche Befehlszeilenargumente bei der Ausführung von Tests" #: spyder_unittest/widgets/configdialog.py:118 msgid "Include coverage report in output" msgstr "Abdeckungsbericht in Ausgabe einbeziehen" #: spyder_unittest/widgets/configdialog.py:119 msgid "Works only for pytest, requires pytest-cov" msgstr "Funktioniert nur für pytest, erfordert pytest-cov" #: spyder_unittest/widgets/configdialog.py:131 msgid "Directory from which to run tests:" msgstr "Verzeichnis, aus dem Tests ausgeführt werden sollen:" #: spyder_unittest/widgets/configdialog.py:137 spyder_unittest/widgets/configdialog.py:196 msgid "Select directory" msgstr "Verzeichnis auswählen" #: spyder_unittest/widgets/confpage.py:26 msgid "Settings" msgstr "Einstellungen" #: spyder_unittest/widgets/confpage.py:28 msgid "Abbreviate test names" msgstr "Testnamen abkürzen" #: spyder_unittest/widgets/datatree.py:45 msgid "Message" msgstr "Nachricht" #: spyder_unittest/widgets/datatree.py:45 msgid "Name" msgstr "Name" #: spyder_unittest/widgets/datatree.py:45 msgid "Status" msgstr "Status" #: spyder_unittest/widgets/datatree.py:45 msgid "Time (ms)" msgstr "Zeit (ms)" #: spyder_unittest/widgets/datatree.py:153 msgid "Collapse" msgstr "Einklappen" #: spyder_unittest/widgets/datatree.py:156 msgid "Expand" msgstr "Ausklappen" #: spyder_unittest/widgets/datatree.py:162 msgid "Go to definition" msgstr "Zur Definition gehen" #: spyder_unittest/widgets/datatree.py:169 msgid "Run only this test" msgstr "Nur diesen Test ausführen" #: spyder_unittest/widgets/datatree.py:421 msgid "test" msgstr "Test" #: spyder_unittest/widgets/datatree.py:421 msgid "tests" msgstr "Tests" #: spyder_unittest/widgets/datatree.py:425 msgid "No results to show." msgstr "Keine anzuzeigenden Ergebnisse." #: spyder_unittest/widgets/datatree.py:430 msgid "collected {}" msgstr "{} gesammelt" #: spyder_unittest/widgets/datatree.py:431 msgid "{} failed" msgstr "{} fehlgeschlagen" #: spyder_unittest/widgets/datatree.py:432 msgid ", {} passed" msgstr ", {} bestanden" #: spyder_unittest/widgets/datatree.py:434 msgid ", {} other" msgstr ", {} andere" #: spyder_unittest/widgets/datatree.py:436 msgid ", {} pending" msgstr ", {} ausstehend" #: spyder_unittest/widgets/datatree.py:441 msgid ", {} coverage" msgstr ", {} Abdeckung" #: spyder_unittest/widgets/unittestgui.py:151 msgid "Configure ..." msgstr "Konfigurieren ..." #: spyder_unittest/widgets/unittestgui.py:158 msgid "Show output" msgstr "Ausgabe anzeigen" #: spyder_unittest/widgets/unittestgui.py:165 msgid "Collapse all" msgstr "Alle einklappen" #: spyder_unittest/widgets/unittestgui.py:172 msgid "Expand all" msgstr "Alle ausklappen" #: spyder_unittest/widgets/unittestgui.py:179 spyder_unittest/widgets/unittestgui.py:300 msgid "Dependencies" msgstr "Abhängigkeiten" #: spyder_unittest/widgets/unittestgui.py:248 msgid "Unit testing output" msgstr "Ausgabe der Unit-Tests" #: spyder_unittest/widgets/unittestgui.py:291 msgid "Versions of frameworks and their installed plugins:" msgstr "Versionen von Frameworks und deren installierte Plugins:" #: spyder_unittest/widgets/unittestgui.py:393 msgid "Error" msgstr "Fehler" #: spyder_unittest/widgets/unittestgui.py:393 msgid "Process failed to start" msgstr "Prozess konnte nicht gestartet werden" #: spyder_unittest/widgets/unittestgui.py:396 msgid "Running tests ..." msgstr "Tests laufen ..." #: spyder_unittest/widgets/unittestgui.py:417 msgid "Stop" msgstr "Stopp" #: spyder_unittest/widgets/unittestgui.py:418 msgid "Stop current test process" msgstr "Aktuellen Testprozess stoppen" #: spyder_unittest/widgets/unittestgui.py:423 msgid "Run tests" msgstr "Tests ausführen" #: spyder_unittest/widgets/unittestgui.py:451 msgid "Test process exited abnormally" msgstr "Testprozess wurde abnormal beendet" #: spyder_unittest/widgets/unittestgui.py:460 msgid "not run" msgstr "nicht ausgeführt" #: spyder_unittest/widgets/unittestgui.py:467 spyder_unittest/widgets/unittestgui.py:473 msgid "pending" msgstr "ausstehend" #: spyder_unittest/widgets/unittestgui.py:474 msgid "running" msgstr "läuft" #: spyder_unittest/widgets/unittestgui.py:480 msgid "failure" msgstr "Fehlschlag" #: spyder_unittest/widgets/unittestgui.py:481 msgid "collection error" msgstr "Sammlungsfehler" spyder-unittest-0.7.0/spyder_unittest/locale/es/000077500000000000000000000000001466560470700220335ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/locale/es/LC_MESSAGES/000077500000000000000000000000001466560470700236205ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/locale/es/LC_MESSAGES/spyder_unittest.mo000066400000000000000000000010561466560470700274240ustar00rootroot00000000000000$,-Project-Id-Version: spyder-unittest POT-Creation-Date: 2023-06-23 16:41+0100 PO-Revision-Date: 2023-06-23 20:17 Last-Translator: Language-Team: Spanish MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Generated-By: pygettext.py 1.5 Plural-Forms: nplurals=2; plural=(n != 1); X-Crowdin-Project: spyder-unittest X-Crowdin-Project-ID: 381839 X-Crowdin-Language: es-ES X-Crowdin-File: /master/spyder_unittest/locale/spyder_unittest.pot X-Crowdin-File-ID: 49 Language: es_ES spyder-unittest-0.7.0/spyder_unittest/locale/es/LC_MESSAGES/spyder_unittest.po000066400000000000000000000122541466560470700274310ustar00rootroot00000000000000msgid "" msgstr "" "Project-Id-Version: spyder-unittest\n" "POT-Creation-Date: 2023-06-23 16:41+0100\n" "PO-Revision-Date: 2023-06-23 20:17\n" "Last-Translator: \n" "Language-Team: Spanish\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Generated-By: pygettext.py 1.5\n" "Plural-Forms: nplurals=2; plural=(n != 1);\n" "X-Crowdin-Project: spyder-unittest\n" "X-Crowdin-Project-ID: 381839\n" "X-Crowdin-Language: es-ES\n" "X-Crowdin-File: /master/spyder_unittest/locale/spyder_unittest.pot\n" "X-Crowdin-File-ID: 49\n" "Language: es_ES\n" #: spyder_unittest/backend/nose2runner.py:99 msgid "Captured stdout" msgstr "" #: spyder_unittest/backend/nose2runner.py:101 msgid "Captured stderr" msgstr "" #: spyder_unittest/backend/pytestrunner.py:129 msgid "(none)" msgstr "" #: spyder_unittest/backend/pytestrunner.py:129 msgid "Missing: {}" msgstr "" #: spyder_unittest/backend/pytestrunner.py:130 msgid "{}\n" "{}" msgstr "" #: spyder_unittest/unittestplugin.py:73 spyder_unittest/widgets/unittestgui.py:138 msgid "Unit testing" msgstr "" #: spyder_unittest/unittestplugin.py:84 msgid "Run test suites and view their results" msgstr "" #: spyder_unittest/unittestplugin.py:105 spyder_unittest/unittestplugin.py:106 spyder_unittest/widgets/unittestgui.py:424 msgid "Run unit tests" msgstr "" #: spyder_unittest/widgets/configdialog.py:78 msgid "Configure tests" msgstr "" #: spyder_unittest/widgets/configdialog.py:86 msgid "Test framework:" msgstr "" #: spyder_unittest/widgets/configdialog.py:95 spyder_unittest/widgets/unittestgui.py:294 msgid "not available" msgstr "" #: spyder_unittest/widgets/configdialog.py:102 msgid "Command-line arguments:" msgstr "" #: spyder_unittest/widgets/configdialog.py:106 msgid "Extra command-line arguments when running tests" msgstr "" #: spyder_unittest/widgets/configdialog.py:118 msgid "Include coverage report in output" msgstr "" #: spyder_unittest/widgets/configdialog.py:119 msgid "Works only for pytest, requires pytest-cov" msgstr "" #: spyder_unittest/widgets/configdialog.py:131 msgid "Directory from which to run tests:" msgstr "" #: spyder_unittest/widgets/configdialog.py:137 spyder_unittest/widgets/configdialog.py:196 msgid "Select directory" msgstr "" #: spyder_unittest/widgets/confpage.py:26 msgid "Settings" msgstr "" #: spyder_unittest/widgets/confpage.py:28 msgid "Abbreviate test names" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Message" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Name" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Status" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Time (ms)" msgstr "" #: spyder_unittest/widgets/datatree.py:153 msgid "Collapse" msgstr "" #: spyder_unittest/widgets/datatree.py:156 msgid "Expand" msgstr "" #: spyder_unittest/widgets/datatree.py:162 msgid "Go to definition" msgstr "" #: spyder_unittest/widgets/datatree.py:169 msgid "Run only this test" msgstr "" #: spyder_unittest/widgets/datatree.py:421 msgid "test" msgstr "" #: spyder_unittest/widgets/datatree.py:421 msgid "tests" msgstr "" #: spyder_unittest/widgets/datatree.py:425 msgid "No results to show." msgstr "" #: spyder_unittest/widgets/datatree.py:430 msgid "collected {}" msgstr "" #: spyder_unittest/widgets/datatree.py:431 msgid "{} failed" msgstr "" #: spyder_unittest/widgets/datatree.py:432 msgid ", {} passed" msgstr "" #: spyder_unittest/widgets/datatree.py:434 msgid ", {} other" msgstr "" #: spyder_unittest/widgets/datatree.py:436 msgid ", {} pending" msgstr "" #: spyder_unittest/widgets/datatree.py:441 msgid ", {} coverage" msgstr "" #: spyder_unittest/widgets/unittestgui.py:151 msgid "Configure ..." msgstr "" #: spyder_unittest/widgets/unittestgui.py:158 msgid "Show output" msgstr "" #: spyder_unittest/widgets/unittestgui.py:165 msgid "Collapse all" msgstr "" #: spyder_unittest/widgets/unittestgui.py:172 msgid "Expand all" msgstr "" #: spyder_unittest/widgets/unittestgui.py:179 spyder_unittest/widgets/unittestgui.py:300 msgid "Dependencies" msgstr "" #: spyder_unittest/widgets/unittestgui.py:248 msgid "Unit testing output" msgstr "" #: spyder_unittest/widgets/unittestgui.py:291 msgid "Versions of frameworks and their installed plugins:" msgstr "" #: spyder_unittest/widgets/unittestgui.py:393 msgid "Error" msgstr "" #: spyder_unittest/widgets/unittestgui.py:393 msgid "Process failed to start" msgstr "" #: spyder_unittest/widgets/unittestgui.py:396 msgid "Running tests ..." msgstr "" #: spyder_unittest/widgets/unittestgui.py:417 msgid "Stop" msgstr "" #: spyder_unittest/widgets/unittestgui.py:418 msgid "Stop current test process" msgstr "" #: spyder_unittest/widgets/unittestgui.py:423 msgid "Run tests" msgstr "" #: spyder_unittest/widgets/unittestgui.py:451 msgid "Test process exited abnormally" msgstr "" #: spyder_unittest/widgets/unittestgui.py:460 msgid "not run" msgstr "" #: spyder_unittest/widgets/unittestgui.py:467 spyder_unittest/widgets/unittestgui.py:473 msgid "pending" msgstr "" #: spyder_unittest/widgets/unittestgui.py:474 msgid "running" msgstr "" #: spyder_unittest/widgets/unittestgui.py:480 msgid "failure" msgstr "" #: spyder_unittest/widgets/unittestgui.py:481 msgid "collection error" msgstr "" spyder-unittest-0.7.0/spyder_unittest/locale/fr/000077500000000000000000000000001466560470700220335ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/locale/fr/LC_MESSAGES/000077500000000000000000000000001466560470700236205ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/locale/fr/LC_MESSAGES/spyder_unittest.mo000066400000000000000000000045221466560470700274250ustar00rootroot00000000000000)d     "%HN U`qy~   ( 2? S`q y     #.8g nx!2Kck&t     ) 4 = B H , {} other, {} passed, {} pendingCaptured stderrCaptured stdoutCollapseCollapse allConfigure ...Configure testsDirectory from which to run tests:ErrorExpandExpand allGo to definitionMessageNameNo results to show.Process failed to startRun testsRun unit testsRunning tests ...Select directoryShow outputStatusStopStop current test processTest framework:Time (ms)Unit testingUnit testing outputcollected {}collection errorfailurenot availablenot runpendingrunningtesttests{} failedProject-Id-Version: spyder-unittest POT-Creation-Date: 2023-06-23 16:41+0100 PO-Revision-Date: 2023-06-24 20:15 Last-Translator: Language-Team: French MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Generated-By: pygettext.py 1.5 Plural-Forms: nplurals=2; plural=(n > 1); X-Crowdin-Project: spyder-unittest X-Crowdin-Project-ID: 381839 X-Crowdin-Language: fr X-Crowdin-File: /master/spyder_unittest/locale/spyder_unittest.pot X-Crowdin-File-ID: 49 Language: fr_FR , {} autres, {} réussi, {} en coursStderr récupéréStdout récupéréRéduireTout réduireConfigurer ...Configurer les testsRépertoire depuis lequel exécuter les tests:ErreurDéployerTout déployerAller à la définitionMessageNomAucun résultat à afficher.Le processus n'a pas pu démarrerLancer les testsLancement des tests unitairesExécution des tests ...Sélectionner un dossierAfficher les résultatsÉtat ArrêterArrêter le processus de test en coursSystème de test:Durée (ms)Tests unitairesRésultats de tests unitaires{} collectéserreur de collecteéchecnon disponiblenon lancéen attenteen courstesttests{} échecspyder-unittest-0.7.0/spyder_unittest/locale/fr/LC_MESSAGES/spyder_unittest.po000066400000000000000000000134411466560470700274300ustar00rootroot00000000000000msgid "" msgstr "" "Project-Id-Version: spyder-unittest\n" "POT-Creation-Date: 2023-06-23 16:41+0100\n" "PO-Revision-Date: 2023-06-24 20:15\n" "Last-Translator: \n" "Language-Team: French\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Generated-By: pygettext.py 1.5\n" "Plural-Forms: nplurals=2; plural=(n > 1);\n" "X-Crowdin-Project: spyder-unittest\n" "X-Crowdin-Project-ID: 381839\n" "X-Crowdin-Language: fr\n" "X-Crowdin-File: /master/spyder_unittest/locale/spyder_unittest.pot\n" "X-Crowdin-File-ID: 49\n" "Language: fr_FR\n" #: spyder_unittest/backend/nose2runner.py:99 msgid "Captured stdout" msgstr "Stdout récupéré" #: spyder_unittest/backend/nose2runner.py:101 msgid "Captured stderr" msgstr "Stderr récupéré" #: spyder_unittest/backend/pytestrunner.py:129 msgid "(none)" msgstr "" #: spyder_unittest/backend/pytestrunner.py:129 msgid "Missing: {}" msgstr "" #: spyder_unittest/backend/pytestrunner.py:130 msgid "{}\n" "{}" msgstr "" #: spyder_unittest/unittestplugin.py:73 spyder_unittest/widgets/unittestgui.py:138 msgid "Unit testing" msgstr "Tests unitaires" #: spyder_unittest/unittestplugin.py:84 msgid "Run test suites and view their results" msgstr "" #: spyder_unittest/unittestplugin.py:105 spyder_unittest/unittestplugin.py:106 spyder_unittest/widgets/unittestgui.py:424 msgid "Run unit tests" msgstr "Lancement des tests unitaires" #: spyder_unittest/widgets/configdialog.py:78 msgid "Configure tests" msgstr "Configurer les tests" #: spyder_unittest/widgets/configdialog.py:86 msgid "Test framework:" msgstr "Système de test:" #: spyder_unittest/widgets/configdialog.py:95 spyder_unittest/widgets/unittestgui.py:294 msgid "not available" msgstr "non disponible" #: spyder_unittest/widgets/configdialog.py:102 msgid "Command-line arguments:" msgstr "" #: spyder_unittest/widgets/configdialog.py:106 msgid "Extra command-line arguments when running tests" msgstr "" #: spyder_unittest/widgets/configdialog.py:118 msgid "Include coverage report in output" msgstr "" #: spyder_unittest/widgets/configdialog.py:119 msgid "Works only for pytest, requires pytest-cov" msgstr "" #: spyder_unittest/widgets/configdialog.py:131 msgid "Directory from which to run tests:" msgstr "Répertoire depuis lequel exécuter les tests:" #: spyder_unittest/widgets/configdialog.py:137 spyder_unittest/widgets/configdialog.py:196 msgid "Select directory" msgstr "Sélectionner un dossier" #: spyder_unittest/widgets/confpage.py:26 msgid "Settings" msgstr "" #: spyder_unittest/widgets/confpage.py:28 msgid "Abbreviate test names" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Message" msgstr "Message" #: spyder_unittest/widgets/datatree.py:45 msgid "Name" msgstr "Nom" #: spyder_unittest/widgets/datatree.py:45 msgid "Status" msgstr "État " #: spyder_unittest/widgets/datatree.py:45 msgid "Time (ms)" msgstr "Durée (ms)" #: spyder_unittest/widgets/datatree.py:153 msgid "Collapse" msgstr "Réduire" #: spyder_unittest/widgets/datatree.py:156 msgid "Expand" msgstr "Déployer" #: spyder_unittest/widgets/datatree.py:162 msgid "Go to definition" msgstr "Aller à la définition" #: spyder_unittest/widgets/datatree.py:169 msgid "Run only this test" msgstr "" #: spyder_unittest/widgets/datatree.py:421 msgid "test" msgstr "test" #: spyder_unittest/widgets/datatree.py:421 msgid "tests" msgstr "tests" #: spyder_unittest/widgets/datatree.py:425 msgid "No results to show." msgstr "Aucun résultat à afficher." #: spyder_unittest/widgets/datatree.py:430 msgid "collected {}" msgstr "{} collectés" #: spyder_unittest/widgets/datatree.py:431 msgid "{} failed" msgstr "{} échec" #: spyder_unittest/widgets/datatree.py:432 msgid ", {} passed" msgstr ", {} réussi" #: spyder_unittest/widgets/datatree.py:434 msgid ", {} other" msgstr ", {} autres" #: spyder_unittest/widgets/datatree.py:436 msgid ", {} pending" msgstr ", {} en cours" #: spyder_unittest/widgets/datatree.py:441 msgid ", {} coverage" msgstr "" #: spyder_unittest/widgets/unittestgui.py:151 msgid "Configure ..." msgstr "Configurer ..." #: spyder_unittest/widgets/unittestgui.py:158 msgid "Show output" msgstr "Afficher les résultats" #: spyder_unittest/widgets/unittestgui.py:165 msgid "Collapse all" msgstr "Tout réduire" #: spyder_unittest/widgets/unittestgui.py:172 msgid "Expand all" msgstr "Tout déployer" #: spyder_unittest/widgets/unittestgui.py:179 spyder_unittest/widgets/unittestgui.py:300 msgid "Dependencies" msgstr "" #: spyder_unittest/widgets/unittestgui.py:248 msgid "Unit testing output" msgstr "Résultats de tests unitaires" #: spyder_unittest/widgets/unittestgui.py:291 msgid "Versions of frameworks and their installed plugins:" msgstr "" #: spyder_unittest/widgets/unittestgui.py:393 msgid "Error" msgstr "Erreur" #: spyder_unittest/widgets/unittestgui.py:393 msgid "Process failed to start" msgstr "Le processus n'a pas pu démarrer" #: spyder_unittest/widgets/unittestgui.py:396 msgid "Running tests ..." msgstr "Exécution des tests ..." #: spyder_unittest/widgets/unittestgui.py:417 msgid "Stop" msgstr "Arrêter" #: spyder_unittest/widgets/unittestgui.py:418 msgid "Stop current test process" msgstr "Arrêter le processus de test en cours" #: spyder_unittest/widgets/unittestgui.py:423 msgid "Run tests" msgstr "Lancer les tests" #: spyder_unittest/widgets/unittestgui.py:451 msgid "Test process exited abnormally" msgstr "" #: spyder_unittest/widgets/unittestgui.py:460 msgid "not run" msgstr "non lancé" #: spyder_unittest/widgets/unittestgui.py:467 spyder_unittest/widgets/unittestgui.py:473 msgid "pending" msgstr "en attente" #: spyder_unittest/widgets/unittestgui.py:474 msgid "running" msgstr "en cours" #: spyder_unittest/widgets/unittestgui.py:480 msgid "failure" msgstr "échec" #: spyder_unittest/widgets/unittestgui.py:481 msgid "collection error" msgstr "erreur de collecte" spyder-unittest-0.7.0/spyder_unittest/locale/hr/000077500000000000000000000000001466560470700220355ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/locale/hr/LC_MESSAGES/000077500000000000000000000000001466560470700236225ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/locale/hr/LC_MESSAGES/spyder_unittest.mo000066400000000000000000000011661466560470700274300ustar00rootroot00000000000000$,H-Project-Id-Version: spyder-unittest POT-Creation-Date: 2023-06-23 16:41+0100 PO-Revision-Date: 2023-06-23 20:18 Last-Translator: Language-Team: Croatian MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Generated-By: pygettext.py 1.5 Plural-Forms: nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2); X-Crowdin-Project: spyder-unittest X-Crowdin-Project-ID: 381839 X-Crowdin-Language: hr X-Crowdin-File: /master/spyder_unittest/locale/spyder_unittest.pot X-Crowdin-File-ID: 49 Language: hr_HR spyder-unittest-0.7.0/spyder_unittest/locale/hr/LC_MESSAGES/spyder_unittest.po000066400000000000000000000123641466560470700274350ustar00rootroot00000000000000msgid "" msgstr "" "Project-Id-Version: spyder-unittest\n" "POT-Creation-Date: 2023-06-23 16:41+0100\n" "PO-Revision-Date: 2023-06-23 20:18\n" "Last-Translator: \n" "Language-Team: Croatian\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Generated-By: pygettext.py 1.5\n" "Plural-Forms: nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);\n" "X-Crowdin-Project: spyder-unittest\n" "X-Crowdin-Project-ID: 381839\n" "X-Crowdin-Language: hr\n" "X-Crowdin-File: /master/spyder_unittest/locale/spyder_unittest.pot\n" "X-Crowdin-File-ID: 49\n" "Language: hr_HR\n" #: spyder_unittest/backend/nose2runner.py:99 msgid "Captured stdout" msgstr "" #: spyder_unittest/backend/nose2runner.py:101 msgid "Captured stderr" msgstr "" #: spyder_unittest/backend/pytestrunner.py:129 msgid "(none)" msgstr "" #: spyder_unittest/backend/pytestrunner.py:129 msgid "Missing: {}" msgstr "" #: spyder_unittest/backend/pytestrunner.py:130 msgid "{}\n" "{}" msgstr "" #: spyder_unittest/unittestplugin.py:73 spyder_unittest/widgets/unittestgui.py:138 msgid "Unit testing" msgstr "" #: spyder_unittest/unittestplugin.py:84 msgid "Run test suites and view their results" msgstr "" #: spyder_unittest/unittestplugin.py:105 spyder_unittest/unittestplugin.py:106 spyder_unittest/widgets/unittestgui.py:424 msgid "Run unit tests" msgstr "" #: spyder_unittest/widgets/configdialog.py:78 msgid "Configure tests" msgstr "" #: spyder_unittest/widgets/configdialog.py:86 msgid "Test framework:" msgstr "" #: spyder_unittest/widgets/configdialog.py:95 spyder_unittest/widgets/unittestgui.py:294 msgid "not available" msgstr "" #: spyder_unittest/widgets/configdialog.py:102 msgid "Command-line arguments:" msgstr "" #: spyder_unittest/widgets/configdialog.py:106 msgid "Extra command-line arguments when running tests" msgstr "" #: spyder_unittest/widgets/configdialog.py:118 msgid "Include coverage report in output" msgstr "" #: spyder_unittest/widgets/configdialog.py:119 msgid "Works only for pytest, requires pytest-cov" msgstr "" #: spyder_unittest/widgets/configdialog.py:131 msgid "Directory from which to run tests:" msgstr "" #: spyder_unittest/widgets/configdialog.py:137 spyder_unittest/widgets/configdialog.py:196 msgid "Select directory" msgstr "" #: spyder_unittest/widgets/confpage.py:26 msgid "Settings" msgstr "" #: spyder_unittest/widgets/confpage.py:28 msgid "Abbreviate test names" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Message" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Name" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Status" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Time (ms)" msgstr "" #: spyder_unittest/widgets/datatree.py:153 msgid "Collapse" msgstr "" #: spyder_unittest/widgets/datatree.py:156 msgid "Expand" msgstr "" #: spyder_unittest/widgets/datatree.py:162 msgid "Go to definition" msgstr "" #: spyder_unittest/widgets/datatree.py:169 msgid "Run only this test" msgstr "" #: spyder_unittest/widgets/datatree.py:421 msgid "test" msgstr "" #: spyder_unittest/widgets/datatree.py:421 msgid "tests" msgstr "" #: spyder_unittest/widgets/datatree.py:425 msgid "No results to show." msgstr "" #: spyder_unittest/widgets/datatree.py:430 msgid "collected {}" msgstr "" #: spyder_unittest/widgets/datatree.py:431 msgid "{} failed" msgstr "" #: spyder_unittest/widgets/datatree.py:432 msgid ", {} passed" msgstr "" #: spyder_unittest/widgets/datatree.py:434 msgid ", {} other" msgstr "" #: spyder_unittest/widgets/datatree.py:436 msgid ", {} pending" msgstr "" #: spyder_unittest/widgets/datatree.py:441 msgid ", {} coverage" msgstr "" #: spyder_unittest/widgets/unittestgui.py:151 msgid "Configure ..." msgstr "" #: spyder_unittest/widgets/unittestgui.py:158 msgid "Show output" msgstr "" #: spyder_unittest/widgets/unittestgui.py:165 msgid "Collapse all" msgstr "" #: spyder_unittest/widgets/unittestgui.py:172 msgid "Expand all" msgstr "" #: spyder_unittest/widgets/unittestgui.py:179 spyder_unittest/widgets/unittestgui.py:300 msgid "Dependencies" msgstr "" #: spyder_unittest/widgets/unittestgui.py:248 msgid "Unit testing output" msgstr "" #: spyder_unittest/widgets/unittestgui.py:291 msgid "Versions of frameworks and their installed plugins:" msgstr "" #: spyder_unittest/widgets/unittestgui.py:393 msgid "Error" msgstr "" #: spyder_unittest/widgets/unittestgui.py:393 msgid "Process failed to start" msgstr "" #: spyder_unittest/widgets/unittestgui.py:396 msgid "Running tests ..." msgstr "" #: spyder_unittest/widgets/unittestgui.py:417 msgid "Stop" msgstr "" #: spyder_unittest/widgets/unittestgui.py:418 msgid "Stop current test process" msgstr "" #: spyder_unittest/widgets/unittestgui.py:423 msgid "Run tests" msgstr "" #: spyder_unittest/widgets/unittestgui.py:451 msgid "Test process exited abnormally" msgstr "" #: spyder_unittest/widgets/unittestgui.py:460 msgid "not run" msgstr "" #: spyder_unittest/widgets/unittestgui.py:467 spyder_unittest/widgets/unittestgui.py:473 msgid "pending" msgstr "" #: spyder_unittest/widgets/unittestgui.py:474 msgid "running" msgstr "" #: spyder_unittest/widgets/unittestgui.py:480 msgid "failure" msgstr "" #: spyder_unittest/widgets/unittestgui.py:481 msgid "collection error" msgstr "" spyder-unittest-0.7.0/spyder_unittest/locale/hu/000077500000000000000000000000001466560470700220405ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/locale/hu/LC_MESSAGES/000077500000000000000000000000001466560470700236255ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/locale/hu/LC_MESSAGES/spyder_unittest.mo000066400000000000000000000010551466560470700274300ustar00rootroot00000000000000$,-Project-Id-Version: spyder-unittest POT-Creation-Date: 2023-06-23 16:41+0100 PO-Revision-Date: 2023-06-23 20:17 Last-Translator: Language-Team: Hungarian MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Generated-By: pygettext.py 1.5 Plural-Forms: nplurals=2; plural=(n != 1); X-Crowdin-Project: spyder-unittest X-Crowdin-Project-ID: 381839 X-Crowdin-Language: hu X-Crowdin-File: /master/spyder_unittest/locale/spyder_unittest.pot X-Crowdin-File-ID: 49 Language: hu_HU spyder-unittest-0.7.0/spyder_unittest/locale/hu/LC_MESSAGES/spyder_unittest.po000066400000000000000000000122531466560470700274350ustar00rootroot00000000000000msgid "" msgstr "" "Project-Id-Version: spyder-unittest\n" "POT-Creation-Date: 2023-06-23 16:41+0100\n" "PO-Revision-Date: 2023-06-23 20:17\n" "Last-Translator: \n" "Language-Team: Hungarian\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Generated-By: pygettext.py 1.5\n" "Plural-Forms: nplurals=2; plural=(n != 1);\n" "X-Crowdin-Project: spyder-unittest\n" "X-Crowdin-Project-ID: 381839\n" "X-Crowdin-Language: hu\n" "X-Crowdin-File: /master/spyder_unittest/locale/spyder_unittest.pot\n" "X-Crowdin-File-ID: 49\n" "Language: hu_HU\n" #: spyder_unittest/backend/nose2runner.py:99 msgid "Captured stdout" msgstr "" #: spyder_unittest/backend/nose2runner.py:101 msgid "Captured stderr" msgstr "" #: spyder_unittest/backend/pytestrunner.py:129 msgid "(none)" msgstr "" #: spyder_unittest/backend/pytestrunner.py:129 msgid "Missing: {}" msgstr "" #: spyder_unittest/backend/pytestrunner.py:130 msgid "{}\n" "{}" msgstr "" #: spyder_unittest/unittestplugin.py:73 spyder_unittest/widgets/unittestgui.py:138 msgid "Unit testing" msgstr "" #: spyder_unittest/unittestplugin.py:84 msgid "Run test suites and view their results" msgstr "" #: spyder_unittest/unittestplugin.py:105 spyder_unittest/unittestplugin.py:106 spyder_unittest/widgets/unittestgui.py:424 msgid "Run unit tests" msgstr "" #: spyder_unittest/widgets/configdialog.py:78 msgid "Configure tests" msgstr "" #: spyder_unittest/widgets/configdialog.py:86 msgid "Test framework:" msgstr "" #: spyder_unittest/widgets/configdialog.py:95 spyder_unittest/widgets/unittestgui.py:294 msgid "not available" msgstr "" #: spyder_unittest/widgets/configdialog.py:102 msgid "Command-line arguments:" msgstr "" #: spyder_unittest/widgets/configdialog.py:106 msgid "Extra command-line arguments when running tests" msgstr "" #: spyder_unittest/widgets/configdialog.py:118 msgid "Include coverage report in output" msgstr "" #: spyder_unittest/widgets/configdialog.py:119 msgid "Works only for pytest, requires pytest-cov" msgstr "" #: spyder_unittest/widgets/configdialog.py:131 msgid "Directory from which to run tests:" msgstr "" #: spyder_unittest/widgets/configdialog.py:137 spyder_unittest/widgets/configdialog.py:196 msgid "Select directory" msgstr "" #: spyder_unittest/widgets/confpage.py:26 msgid "Settings" msgstr "" #: spyder_unittest/widgets/confpage.py:28 msgid "Abbreviate test names" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Message" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Name" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Status" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Time (ms)" msgstr "" #: spyder_unittest/widgets/datatree.py:153 msgid "Collapse" msgstr "" #: spyder_unittest/widgets/datatree.py:156 msgid "Expand" msgstr "" #: spyder_unittest/widgets/datatree.py:162 msgid "Go to definition" msgstr "" #: spyder_unittest/widgets/datatree.py:169 msgid "Run only this test" msgstr "" #: spyder_unittest/widgets/datatree.py:421 msgid "test" msgstr "" #: spyder_unittest/widgets/datatree.py:421 msgid "tests" msgstr "" #: spyder_unittest/widgets/datatree.py:425 msgid "No results to show." msgstr "" #: spyder_unittest/widgets/datatree.py:430 msgid "collected {}" msgstr "" #: spyder_unittest/widgets/datatree.py:431 msgid "{} failed" msgstr "" #: spyder_unittest/widgets/datatree.py:432 msgid ", {} passed" msgstr "" #: spyder_unittest/widgets/datatree.py:434 msgid ", {} other" msgstr "" #: spyder_unittest/widgets/datatree.py:436 msgid ", {} pending" msgstr "" #: spyder_unittest/widgets/datatree.py:441 msgid ", {} coverage" msgstr "" #: spyder_unittest/widgets/unittestgui.py:151 msgid "Configure ..." msgstr "" #: spyder_unittest/widgets/unittestgui.py:158 msgid "Show output" msgstr "" #: spyder_unittest/widgets/unittestgui.py:165 msgid "Collapse all" msgstr "" #: spyder_unittest/widgets/unittestgui.py:172 msgid "Expand all" msgstr "" #: spyder_unittest/widgets/unittestgui.py:179 spyder_unittest/widgets/unittestgui.py:300 msgid "Dependencies" msgstr "" #: spyder_unittest/widgets/unittestgui.py:248 msgid "Unit testing output" msgstr "" #: spyder_unittest/widgets/unittestgui.py:291 msgid "Versions of frameworks and their installed plugins:" msgstr "" #: spyder_unittest/widgets/unittestgui.py:393 msgid "Error" msgstr "" #: spyder_unittest/widgets/unittestgui.py:393 msgid "Process failed to start" msgstr "" #: spyder_unittest/widgets/unittestgui.py:396 msgid "Running tests ..." msgstr "" #: spyder_unittest/widgets/unittestgui.py:417 msgid "Stop" msgstr "" #: spyder_unittest/widgets/unittestgui.py:418 msgid "Stop current test process" msgstr "" #: spyder_unittest/widgets/unittestgui.py:423 msgid "Run tests" msgstr "" #: spyder_unittest/widgets/unittestgui.py:451 msgid "Test process exited abnormally" msgstr "" #: spyder_unittest/widgets/unittestgui.py:460 msgid "not run" msgstr "" #: spyder_unittest/widgets/unittestgui.py:467 spyder_unittest/widgets/unittestgui.py:473 msgid "pending" msgstr "" #: spyder_unittest/widgets/unittestgui.py:474 msgid "running" msgstr "" #: spyder_unittest/widgets/unittestgui.py:480 msgid "failure" msgstr "" #: spyder_unittest/widgets/unittestgui.py:481 msgid "collection error" msgstr "" spyder-unittest-0.7.0/spyder_unittest/locale/ja/000077500000000000000000000000001466560470700220165ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/locale/ja/LC_MESSAGES/000077500000000000000000000000001466560470700236035ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/locale/ja/LC_MESSAGES/spyder_unittest.mo000066400000000000000000000010451466560470700274050ustar00rootroot00000000000000$,-Project-Id-Version: spyder-unittest POT-Creation-Date: 2023-06-23 16:41+0100 PO-Revision-Date: 2023-06-23 20:17 Last-Translator: Language-Team: Japanese MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Generated-By: pygettext.py 1.5 Plural-Forms: nplurals=1; plural=0; X-Crowdin-Project: spyder-unittest X-Crowdin-Project-ID: 381839 X-Crowdin-Language: ja X-Crowdin-File: /master/spyder_unittest/locale/spyder_unittest.pot X-Crowdin-File-ID: 49 Language: ja_JP spyder-unittest-0.7.0/spyder_unittest/locale/ja/LC_MESSAGES/spyder_unittest.po000066400000000000000000000122431466560470700274120ustar00rootroot00000000000000msgid "" msgstr "" "Project-Id-Version: spyder-unittest\n" "POT-Creation-Date: 2023-06-23 16:41+0100\n" "PO-Revision-Date: 2023-06-23 20:17\n" "Last-Translator: \n" "Language-Team: Japanese\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Generated-By: pygettext.py 1.5\n" "Plural-Forms: nplurals=1; plural=0;\n" "X-Crowdin-Project: spyder-unittest\n" "X-Crowdin-Project-ID: 381839\n" "X-Crowdin-Language: ja\n" "X-Crowdin-File: /master/spyder_unittest/locale/spyder_unittest.pot\n" "X-Crowdin-File-ID: 49\n" "Language: ja_JP\n" #: spyder_unittest/backend/nose2runner.py:99 msgid "Captured stdout" msgstr "" #: spyder_unittest/backend/nose2runner.py:101 msgid "Captured stderr" msgstr "" #: spyder_unittest/backend/pytestrunner.py:129 msgid "(none)" msgstr "" #: spyder_unittest/backend/pytestrunner.py:129 msgid "Missing: {}" msgstr "" #: spyder_unittest/backend/pytestrunner.py:130 msgid "{}\n" "{}" msgstr "" #: spyder_unittest/unittestplugin.py:73 spyder_unittest/widgets/unittestgui.py:138 msgid "Unit testing" msgstr "" #: spyder_unittest/unittestplugin.py:84 msgid "Run test suites and view their results" msgstr "" #: spyder_unittest/unittestplugin.py:105 spyder_unittest/unittestplugin.py:106 spyder_unittest/widgets/unittestgui.py:424 msgid "Run unit tests" msgstr "" #: spyder_unittest/widgets/configdialog.py:78 msgid "Configure tests" msgstr "" #: spyder_unittest/widgets/configdialog.py:86 msgid "Test framework:" msgstr "" #: spyder_unittest/widgets/configdialog.py:95 spyder_unittest/widgets/unittestgui.py:294 msgid "not available" msgstr "" #: spyder_unittest/widgets/configdialog.py:102 msgid "Command-line arguments:" msgstr "" #: spyder_unittest/widgets/configdialog.py:106 msgid "Extra command-line arguments when running tests" msgstr "" #: spyder_unittest/widgets/configdialog.py:118 msgid "Include coverage report in output" msgstr "" #: spyder_unittest/widgets/configdialog.py:119 msgid "Works only for pytest, requires pytest-cov" msgstr "" #: spyder_unittest/widgets/configdialog.py:131 msgid "Directory from which to run tests:" msgstr "" #: spyder_unittest/widgets/configdialog.py:137 spyder_unittest/widgets/configdialog.py:196 msgid "Select directory" msgstr "" #: spyder_unittest/widgets/confpage.py:26 msgid "Settings" msgstr "" #: spyder_unittest/widgets/confpage.py:28 msgid "Abbreviate test names" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Message" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Name" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Status" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Time (ms)" msgstr "" #: spyder_unittest/widgets/datatree.py:153 msgid "Collapse" msgstr "" #: spyder_unittest/widgets/datatree.py:156 msgid "Expand" msgstr "" #: spyder_unittest/widgets/datatree.py:162 msgid "Go to definition" msgstr "" #: spyder_unittest/widgets/datatree.py:169 msgid "Run only this test" msgstr "" #: spyder_unittest/widgets/datatree.py:421 msgid "test" msgstr "" #: spyder_unittest/widgets/datatree.py:421 msgid "tests" msgstr "" #: spyder_unittest/widgets/datatree.py:425 msgid "No results to show." msgstr "" #: spyder_unittest/widgets/datatree.py:430 msgid "collected {}" msgstr "" #: spyder_unittest/widgets/datatree.py:431 msgid "{} failed" msgstr "" #: spyder_unittest/widgets/datatree.py:432 msgid ", {} passed" msgstr "" #: spyder_unittest/widgets/datatree.py:434 msgid ", {} other" msgstr "" #: spyder_unittest/widgets/datatree.py:436 msgid ", {} pending" msgstr "" #: spyder_unittest/widgets/datatree.py:441 msgid ", {} coverage" msgstr "" #: spyder_unittest/widgets/unittestgui.py:151 msgid "Configure ..." msgstr "" #: spyder_unittest/widgets/unittestgui.py:158 msgid "Show output" msgstr "" #: spyder_unittest/widgets/unittestgui.py:165 msgid "Collapse all" msgstr "" #: spyder_unittest/widgets/unittestgui.py:172 msgid "Expand all" msgstr "" #: spyder_unittest/widgets/unittestgui.py:179 spyder_unittest/widgets/unittestgui.py:300 msgid "Dependencies" msgstr "" #: spyder_unittest/widgets/unittestgui.py:248 msgid "Unit testing output" msgstr "" #: spyder_unittest/widgets/unittestgui.py:291 msgid "Versions of frameworks and their installed plugins:" msgstr "" #: spyder_unittest/widgets/unittestgui.py:393 msgid "Error" msgstr "" #: spyder_unittest/widgets/unittestgui.py:393 msgid "Process failed to start" msgstr "" #: spyder_unittest/widgets/unittestgui.py:396 msgid "Running tests ..." msgstr "" #: spyder_unittest/widgets/unittestgui.py:417 msgid "Stop" msgstr "" #: spyder_unittest/widgets/unittestgui.py:418 msgid "Stop current test process" msgstr "" #: spyder_unittest/widgets/unittestgui.py:423 msgid "Run tests" msgstr "" #: spyder_unittest/widgets/unittestgui.py:451 msgid "Test process exited abnormally" msgstr "" #: spyder_unittest/widgets/unittestgui.py:460 msgid "not run" msgstr "" #: spyder_unittest/widgets/unittestgui.py:467 spyder_unittest/widgets/unittestgui.py:473 msgid "pending" msgstr "" #: spyder_unittest/widgets/unittestgui.py:474 msgid "running" msgstr "" #: spyder_unittest/widgets/unittestgui.py:480 msgid "failure" msgstr "" #: spyder_unittest/widgets/unittestgui.py:481 msgid "collection error" msgstr "" spyder-unittest-0.7.0/spyder_unittest/locale/pl/000077500000000000000000000000001466560470700220375ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/locale/pl/LC_MESSAGES/000077500000000000000000000000001466560470700236245ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/locale/pl/LC_MESSAGES/spyder_unittest.mo000066400000000000000000000012741466560470700274320ustar00rootroot00000000000000$,-Project-Id-Version: spyder-unittest POT-Creation-Date: 2023-06-23 16:41+0100 PO-Revision-Date: 2023-06-23 20:17 Last-Translator: Language-Team: Polish MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Generated-By: pygettext.py 1.5 Plural-Forms: nplurals=4; plural=(n==1 ? 0 : (n%10>=2 && n%10<=4) && (n%100<12 || n%100>14) ? 1 : n!=1 && (n%10>=0 && n%10<=1) || (n%10>=5 && n%10<=9) || (n%100>=12 && n%100<=14) ? 2 : 3); X-Crowdin-Project: spyder-unittest X-Crowdin-Project-ID: 381839 X-Crowdin-Language: pl X-Crowdin-File: /master/spyder_unittest/locale/spyder_unittest.pot X-Crowdin-File-ID: 49 Language: pl_PL spyder-unittest-0.7.0/spyder_unittest/locale/pl/LC_MESSAGES/spyder_unittest.po000066400000000000000000000124721466560470700274370ustar00rootroot00000000000000msgid "" msgstr "" "Project-Id-Version: spyder-unittest\n" "POT-Creation-Date: 2023-06-23 16:41+0100\n" "PO-Revision-Date: 2023-06-23 20:17\n" "Last-Translator: \n" "Language-Team: Polish\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Generated-By: pygettext.py 1.5\n" "Plural-Forms: nplurals=4; plural=(n==1 ? 0 : (n%10>=2 && n%10<=4) && (n%100<12 || n%100>14) ? 1 : n!=1 && (n%10>=0 && n%10<=1) || (n%10>=5 && n%10<=9) || (n%100>=12 && n%100<=14) ? 2 : 3);\n" "X-Crowdin-Project: spyder-unittest\n" "X-Crowdin-Project-ID: 381839\n" "X-Crowdin-Language: pl\n" "X-Crowdin-File: /master/spyder_unittest/locale/spyder_unittest.pot\n" "X-Crowdin-File-ID: 49\n" "Language: pl_PL\n" #: spyder_unittest/backend/nose2runner.py:99 msgid "Captured stdout" msgstr "" #: spyder_unittest/backend/nose2runner.py:101 msgid "Captured stderr" msgstr "" #: spyder_unittest/backend/pytestrunner.py:129 msgid "(none)" msgstr "" #: spyder_unittest/backend/pytestrunner.py:129 msgid "Missing: {}" msgstr "" #: spyder_unittest/backend/pytestrunner.py:130 msgid "{}\n" "{}" msgstr "" #: spyder_unittest/unittestplugin.py:73 spyder_unittest/widgets/unittestgui.py:138 msgid "Unit testing" msgstr "" #: spyder_unittest/unittestplugin.py:84 msgid "Run test suites and view their results" msgstr "" #: spyder_unittest/unittestplugin.py:105 spyder_unittest/unittestplugin.py:106 spyder_unittest/widgets/unittestgui.py:424 msgid "Run unit tests" msgstr "" #: spyder_unittest/widgets/configdialog.py:78 msgid "Configure tests" msgstr "" #: spyder_unittest/widgets/configdialog.py:86 msgid "Test framework:" msgstr "" #: spyder_unittest/widgets/configdialog.py:95 spyder_unittest/widgets/unittestgui.py:294 msgid "not available" msgstr "" #: spyder_unittest/widgets/configdialog.py:102 msgid "Command-line arguments:" msgstr "" #: spyder_unittest/widgets/configdialog.py:106 msgid "Extra command-line arguments when running tests" msgstr "" #: spyder_unittest/widgets/configdialog.py:118 msgid "Include coverage report in output" msgstr "" #: spyder_unittest/widgets/configdialog.py:119 msgid "Works only for pytest, requires pytest-cov" msgstr "" #: spyder_unittest/widgets/configdialog.py:131 msgid "Directory from which to run tests:" msgstr "" #: spyder_unittest/widgets/configdialog.py:137 spyder_unittest/widgets/configdialog.py:196 msgid "Select directory" msgstr "" #: spyder_unittest/widgets/confpage.py:26 msgid "Settings" msgstr "" #: spyder_unittest/widgets/confpage.py:28 msgid "Abbreviate test names" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Message" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Name" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Status" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Time (ms)" msgstr "" #: spyder_unittest/widgets/datatree.py:153 msgid "Collapse" msgstr "" #: spyder_unittest/widgets/datatree.py:156 msgid "Expand" msgstr "" #: spyder_unittest/widgets/datatree.py:162 msgid "Go to definition" msgstr "" #: spyder_unittest/widgets/datatree.py:169 msgid "Run only this test" msgstr "" #: spyder_unittest/widgets/datatree.py:421 msgid "test" msgstr "" #: spyder_unittest/widgets/datatree.py:421 msgid "tests" msgstr "" #: spyder_unittest/widgets/datatree.py:425 msgid "No results to show." msgstr "" #: spyder_unittest/widgets/datatree.py:430 msgid "collected {}" msgstr "" #: spyder_unittest/widgets/datatree.py:431 msgid "{} failed" msgstr "" #: spyder_unittest/widgets/datatree.py:432 msgid ", {} passed" msgstr "" #: spyder_unittest/widgets/datatree.py:434 msgid ", {} other" msgstr "" #: spyder_unittest/widgets/datatree.py:436 msgid ", {} pending" msgstr "" #: spyder_unittest/widgets/datatree.py:441 msgid ", {} coverage" msgstr "" #: spyder_unittest/widgets/unittestgui.py:151 msgid "Configure ..." msgstr "" #: spyder_unittest/widgets/unittestgui.py:158 msgid "Show output" msgstr "" #: spyder_unittest/widgets/unittestgui.py:165 msgid "Collapse all" msgstr "" #: spyder_unittest/widgets/unittestgui.py:172 msgid "Expand all" msgstr "" #: spyder_unittest/widgets/unittestgui.py:179 spyder_unittest/widgets/unittestgui.py:300 msgid "Dependencies" msgstr "" #: spyder_unittest/widgets/unittestgui.py:248 msgid "Unit testing output" msgstr "" #: spyder_unittest/widgets/unittestgui.py:291 msgid "Versions of frameworks and their installed plugins:" msgstr "" #: spyder_unittest/widgets/unittestgui.py:393 msgid "Error" msgstr "" #: spyder_unittest/widgets/unittestgui.py:393 msgid "Process failed to start" msgstr "" #: spyder_unittest/widgets/unittestgui.py:396 msgid "Running tests ..." msgstr "" #: spyder_unittest/widgets/unittestgui.py:417 msgid "Stop" msgstr "" #: spyder_unittest/widgets/unittestgui.py:418 msgid "Stop current test process" msgstr "" #: spyder_unittest/widgets/unittestgui.py:423 msgid "Run tests" msgstr "" #: spyder_unittest/widgets/unittestgui.py:451 msgid "Test process exited abnormally" msgstr "" #: spyder_unittest/widgets/unittestgui.py:460 msgid "not run" msgstr "" #: spyder_unittest/widgets/unittestgui.py:467 spyder_unittest/widgets/unittestgui.py:473 msgid "pending" msgstr "" #: spyder_unittest/widgets/unittestgui.py:474 msgid "running" msgstr "" #: spyder_unittest/widgets/unittestgui.py:480 msgid "failure" msgstr "" #: spyder_unittest/widgets/unittestgui.py:481 msgid "collection error" msgstr "" spyder-unittest-0.7.0/spyder_unittest/locale/pt_BR/000077500000000000000000000000001466560470700224325ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/locale/pt_BR/LC_MESSAGES/000077500000000000000000000000001466560470700242175ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/locale/pt_BR/LC_MESSAGES/spyder_unittest.mo000066400000000000000000000044451466560470700300300ustar00rootroot00000000000000)d     "%HN U`qy~   ( 2? S`q y      !0&Bin w 6ELRp         , {} other, {} passed, {} pendingCaptured stderrCaptured stdoutCollapseCollapse allConfigure ...Configure testsDirectory from which to run tests:ErrorExpandExpand allGo to definitionMessageNameNo results to show.Process failed to startRun testsRun unit testsRunning tests ...Select directoryShow outputStatusStopStop current test processTest framework:Time (ms)Unit testingUnit testing outputcollected {}collection errorfailurenot availablenot runpendingrunningtesttests{} failedProject-Id-Version: spyder-unittest POT-Creation-Date: 2023-06-23 16:41+0100 PO-Revision-Date: 2023-06-24 20:15 Last-Translator: Language-Team: Portuguese, Brazilian MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Generated-By: pygettext.py 1.5 Plural-Forms: nplurals=2; plural=(n != 1); X-Crowdin-Project: spyder-unittest X-Crowdin-Project-ID: 381839 X-Crowdin-Language: pt-BR X-Crowdin-File: /master/spyder_unittest/locale/spyder_unittest.pot X-Crowdin-File-ID: 49 Language: pt_BR , {} outro, {} passou, {} pendentestderr capturadostdout capturadoRecolherRecolher tudoConfigurar ...Configurar testesDiretório para execução dos testes:ErroExpandirExpandir tudoIr para definiçãoMensagemNomeSem resultados para exibir.O processo falhou ao iniciarExecutar testesExecutar testes unitáriosExecutando teste...Selecione o diretórioMostrar saídaStatusPararParar processo de teste atualTestar framework:Tempo (ms)Teste unitárioSaída do teste unitáriocoletado {}erro na compilaçãofalhaindisponívelnão executarpendenteexecutandotestetestes{} falhouspyder-unittest-0.7.0/spyder_unittest/locale/pt_BR/LC_MESSAGES/spyder_unittest.po000066400000000000000000000133641466560470700300330ustar00rootroot00000000000000msgid "" msgstr "" "Project-Id-Version: spyder-unittest\n" "POT-Creation-Date: 2023-06-23 16:41+0100\n" "PO-Revision-Date: 2023-06-24 20:15\n" "Last-Translator: \n" "Language-Team: Portuguese, Brazilian\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Generated-By: pygettext.py 1.5\n" "Plural-Forms: nplurals=2; plural=(n != 1);\n" "X-Crowdin-Project: spyder-unittest\n" "X-Crowdin-Project-ID: 381839\n" "X-Crowdin-Language: pt-BR\n" "X-Crowdin-File: /master/spyder_unittest/locale/spyder_unittest.pot\n" "X-Crowdin-File-ID: 49\n" "Language: pt_BR\n" #: spyder_unittest/backend/nose2runner.py:99 msgid "Captured stdout" msgstr "stdout capturado" #: spyder_unittest/backend/nose2runner.py:101 msgid "Captured stderr" msgstr "stderr capturado" #: spyder_unittest/backend/pytestrunner.py:129 msgid "(none)" msgstr "" #: spyder_unittest/backend/pytestrunner.py:129 msgid "Missing: {}" msgstr "" #: spyder_unittest/backend/pytestrunner.py:130 msgid "{}\n" "{}" msgstr "" #: spyder_unittest/unittestplugin.py:73 spyder_unittest/widgets/unittestgui.py:138 msgid "Unit testing" msgstr "Teste unitário" #: spyder_unittest/unittestplugin.py:84 msgid "Run test suites and view their results" msgstr "" #: spyder_unittest/unittestplugin.py:105 spyder_unittest/unittestplugin.py:106 spyder_unittest/widgets/unittestgui.py:424 msgid "Run unit tests" msgstr "Executar testes unitários" #: spyder_unittest/widgets/configdialog.py:78 msgid "Configure tests" msgstr "Configurar testes" #: spyder_unittest/widgets/configdialog.py:86 msgid "Test framework:" msgstr "Testar framework:" #: spyder_unittest/widgets/configdialog.py:95 spyder_unittest/widgets/unittestgui.py:294 msgid "not available" msgstr "indisponível" #: spyder_unittest/widgets/configdialog.py:102 msgid "Command-line arguments:" msgstr "" #: spyder_unittest/widgets/configdialog.py:106 msgid "Extra command-line arguments when running tests" msgstr "" #: spyder_unittest/widgets/configdialog.py:118 msgid "Include coverage report in output" msgstr "" #: spyder_unittest/widgets/configdialog.py:119 msgid "Works only for pytest, requires pytest-cov" msgstr "" #: spyder_unittest/widgets/configdialog.py:131 msgid "Directory from which to run tests:" msgstr "Diretório para execução dos testes:" #: spyder_unittest/widgets/configdialog.py:137 spyder_unittest/widgets/configdialog.py:196 msgid "Select directory" msgstr "Selecione o diretório" #: spyder_unittest/widgets/confpage.py:26 msgid "Settings" msgstr "" #: spyder_unittest/widgets/confpage.py:28 msgid "Abbreviate test names" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Message" msgstr "Mensagem" #: spyder_unittest/widgets/datatree.py:45 msgid "Name" msgstr "Nome" #: spyder_unittest/widgets/datatree.py:45 msgid "Status" msgstr "Status" #: spyder_unittest/widgets/datatree.py:45 msgid "Time (ms)" msgstr "Tempo (ms)" #: spyder_unittest/widgets/datatree.py:153 msgid "Collapse" msgstr "Recolher" #: spyder_unittest/widgets/datatree.py:156 msgid "Expand" msgstr "Expandir" #: spyder_unittest/widgets/datatree.py:162 msgid "Go to definition" msgstr "Ir para definição" #: spyder_unittest/widgets/datatree.py:169 msgid "Run only this test" msgstr "" #: spyder_unittest/widgets/datatree.py:421 msgid "test" msgstr "teste" #: spyder_unittest/widgets/datatree.py:421 msgid "tests" msgstr "testes" #: spyder_unittest/widgets/datatree.py:425 msgid "No results to show." msgstr "Sem resultados para exibir." #: spyder_unittest/widgets/datatree.py:430 msgid "collected {}" msgstr "coletado {}" #: spyder_unittest/widgets/datatree.py:431 msgid "{} failed" msgstr "{} falhou" #: spyder_unittest/widgets/datatree.py:432 msgid ", {} passed" msgstr ", {} passou" #: spyder_unittest/widgets/datatree.py:434 msgid ", {} other" msgstr ", {} outro" #: spyder_unittest/widgets/datatree.py:436 msgid ", {} pending" msgstr ", {} pendente" #: spyder_unittest/widgets/datatree.py:441 msgid ", {} coverage" msgstr "" #: spyder_unittest/widgets/unittestgui.py:151 msgid "Configure ..." msgstr "Configurar ..." #: spyder_unittest/widgets/unittestgui.py:158 msgid "Show output" msgstr "Mostrar saída" #: spyder_unittest/widgets/unittestgui.py:165 msgid "Collapse all" msgstr "Recolher tudo" #: spyder_unittest/widgets/unittestgui.py:172 msgid "Expand all" msgstr "Expandir tudo" #: spyder_unittest/widgets/unittestgui.py:179 spyder_unittest/widgets/unittestgui.py:300 msgid "Dependencies" msgstr "" #: spyder_unittest/widgets/unittestgui.py:248 msgid "Unit testing output" msgstr "Saída do teste unitário" #: spyder_unittest/widgets/unittestgui.py:291 msgid "Versions of frameworks and their installed plugins:" msgstr "" #: spyder_unittest/widgets/unittestgui.py:393 msgid "Error" msgstr "Erro" #: spyder_unittest/widgets/unittestgui.py:393 msgid "Process failed to start" msgstr "O processo falhou ao iniciar" #: spyder_unittest/widgets/unittestgui.py:396 msgid "Running tests ..." msgstr "Executando teste..." #: spyder_unittest/widgets/unittestgui.py:417 msgid "Stop" msgstr "Parar" #: spyder_unittest/widgets/unittestgui.py:418 msgid "Stop current test process" msgstr "Parar processo de teste atual" #: spyder_unittest/widgets/unittestgui.py:423 msgid "Run tests" msgstr "Executar testes" #: spyder_unittest/widgets/unittestgui.py:451 msgid "Test process exited abnormally" msgstr "" #: spyder_unittest/widgets/unittestgui.py:460 msgid "not run" msgstr "não executar" #: spyder_unittest/widgets/unittestgui.py:467 spyder_unittest/widgets/unittestgui.py:473 msgid "pending" msgstr "pendente" #: spyder_unittest/widgets/unittestgui.py:474 msgid "running" msgstr "executando" #: spyder_unittest/widgets/unittestgui.py:480 msgid "failure" msgstr "falha" #: spyder_unittest/widgets/unittestgui.py:481 msgid "collection error" msgstr "erro na compilação" spyder-unittest-0.7.0/spyder_unittest/locale/ru/000077500000000000000000000000001466560470700220525ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/locale/ru/LC_MESSAGES/000077500000000000000000000000001466560470700236375ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/locale/ru/LC_MESSAGES/spyder_unittest.mo000066400000000000000000000013201466560470700274350ustar00rootroot00000000000000$,-Project-Id-Version: spyder-unittest POT-Creation-Date: 2023-06-23 16:41+0100 PO-Revision-Date: 2023-06-23 20:17 Last-Translator: Language-Team: Russian MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Generated-By: pygettext.py 1.5 Plural-Forms: nplurals=4; plural=((n%10==1 && n%100!=11) ? 0 : ((n%10 >= 2 && n%10 <=4 && (n%100 < 12 || n%100 > 14)) ? 1 : ((n%10 == 0 || (n%10 >= 5 && n%10 <=9)) || (n%100 >= 11 && n%100 <= 14)) ? 2 : 3)); X-Crowdin-Project: spyder-unittest X-Crowdin-Project-ID: 381839 X-Crowdin-Language: ru X-Crowdin-File: /master/spyder_unittest/locale/spyder_unittest.pot X-Crowdin-File-ID: 49 Language: ru_RU spyder-unittest-0.7.0/spyder_unittest/locale/ru/LC_MESSAGES/spyder_unittest.po000066400000000000000000000125161466560470700274510ustar00rootroot00000000000000msgid "" msgstr "" "Project-Id-Version: spyder-unittest\n" "POT-Creation-Date: 2023-06-23 16:41+0100\n" "PO-Revision-Date: 2023-06-23 20:17\n" "Last-Translator: \n" "Language-Team: Russian\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Generated-By: pygettext.py 1.5\n" "Plural-Forms: nplurals=4; plural=((n%10==1 && n%100!=11) ? 0 : ((n%10 >= 2 && n%10 <=4 && (n%100 < 12 || n%100 > 14)) ? 1 : ((n%10 == 0 || (n%10 >= 5 && n%10 <=9)) || (n%100 >= 11 && n%100 <= 14)) ? 2 : 3));\n" "X-Crowdin-Project: spyder-unittest\n" "X-Crowdin-Project-ID: 381839\n" "X-Crowdin-Language: ru\n" "X-Crowdin-File: /master/spyder_unittest/locale/spyder_unittest.pot\n" "X-Crowdin-File-ID: 49\n" "Language: ru_RU\n" #: spyder_unittest/backend/nose2runner.py:99 msgid "Captured stdout" msgstr "" #: spyder_unittest/backend/nose2runner.py:101 msgid "Captured stderr" msgstr "" #: spyder_unittest/backend/pytestrunner.py:129 msgid "(none)" msgstr "" #: spyder_unittest/backend/pytestrunner.py:129 msgid "Missing: {}" msgstr "" #: spyder_unittest/backend/pytestrunner.py:130 msgid "{}\n" "{}" msgstr "" #: spyder_unittest/unittestplugin.py:73 spyder_unittest/widgets/unittestgui.py:138 msgid "Unit testing" msgstr "" #: spyder_unittest/unittestplugin.py:84 msgid "Run test suites and view their results" msgstr "" #: spyder_unittest/unittestplugin.py:105 spyder_unittest/unittestplugin.py:106 spyder_unittest/widgets/unittestgui.py:424 msgid "Run unit tests" msgstr "" #: spyder_unittest/widgets/configdialog.py:78 msgid "Configure tests" msgstr "" #: spyder_unittest/widgets/configdialog.py:86 msgid "Test framework:" msgstr "" #: spyder_unittest/widgets/configdialog.py:95 spyder_unittest/widgets/unittestgui.py:294 msgid "not available" msgstr "" #: spyder_unittest/widgets/configdialog.py:102 msgid "Command-line arguments:" msgstr "" #: spyder_unittest/widgets/configdialog.py:106 msgid "Extra command-line arguments when running tests" msgstr "" #: spyder_unittest/widgets/configdialog.py:118 msgid "Include coverage report in output" msgstr "" #: spyder_unittest/widgets/configdialog.py:119 msgid "Works only for pytest, requires pytest-cov" msgstr "" #: spyder_unittest/widgets/configdialog.py:131 msgid "Directory from which to run tests:" msgstr "" #: spyder_unittest/widgets/configdialog.py:137 spyder_unittest/widgets/configdialog.py:196 msgid "Select directory" msgstr "" #: spyder_unittest/widgets/confpage.py:26 msgid "Settings" msgstr "" #: spyder_unittest/widgets/confpage.py:28 msgid "Abbreviate test names" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Message" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Name" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Status" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Time (ms)" msgstr "" #: spyder_unittest/widgets/datatree.py:153 msgid "Collapse" msgstr "" #: spyder_unittest/widgets/datatree.py:156 msgid "Expand" msgstr "" #: spyder_unittest/widgets/datatree.py:162 msgid "Go to definition" msgstr "" #: spyder_unittest/widgets/datatree.py:169 msgid "Run only this test" msgstr "" #: spyder_unittest/widgets/datatree.py:421 msgid "test" msgstr "" #: spyder_unittest/widgets/datatree.py:421 msgid "tests" msgstr "" #: spyder_unittest/widgets/datatree.py:425 msgid "No results to show." msgstr "" #: spyder_unittest/widgets/datatree.py:430 msgid "collected {}" msgstr "" #: spyder_unittest/widgets/datatree.py:431 msgid "{} failed" msgstr "" #: spyder_unittest/widgets/datatree.py:432 msgid ", {} passed" msgstr "" #: spyder_unittest/widgets/datatree.py:434 msgid ", {} other" msgstr "" #: spyder_unittest/widgets/datatree.py:436 msgid ", {} pending" msgstr "" #: spyder_unittest/widgets/datatree.py:441 msgid ", {} coverage" msgstr "" #: spyder_unittest/widgets/unittestgui.py:151 msgid "Configure ..." msgstr "" #: spyder_unittest/widgets/unittestgui.py:158 msgid "Show output" msgstr "" #: spyder_unittest/widgets/unittestgui.py:165 msgid "Collapse all" msgstr "" #: spyder_unittest/widgets/unittestgui.py:172 msgid "Expand all" msgstr "" #: spyder_unittest/widgets/unittestgui.py:179 spyder_unittest/widgets/unittestgui.py:300 msgid "Dependencies" msgstr "" #: spyder_unittest/widgets/unittestgui.py:248 msgid "Unit testing output" msgstr "" #: spyder_unittest/widgets/unittestgui.py:291 msgid "Versions of frameworks and their installed plugins:" msgstr "" #: spyder_unittest/widgets/unittestgui.py:393 msgid "Error" msgstr "" #: spyder_unittest/widgets/unittestgui.py:393 msgid "Process failed to start" msgstr "" #: spyder_unittest/widgets/unittestgui.py:396 msgid "Running tests ..." msgstr "" #: spyder_unittest/widgets/unittestgui.py:417 msgid "Stop" msgstr "" #: spyder_unittest/widgets/unittestgui.py:418 msgid "Stop current test process" msgstr "" #: spyder_unittest/widgets/unittestgui.py:423 msgid "Run tests" msgstr "" #: spyder_unittest/widgets/unittestgui.py:451 msgid "Test process exited abnormally" msgstr "" #: spyder_unittest/widgets/unittestgui.py:460 msgid "not run" msgstr "" #: spyder_unittest/widgets/unittestgui.py:467 spyder_unittest/widgets/unittestgui.py:473 msgid "pending" msgstr "" #: spyder_unittest/widgets/unittestgui.py:474 msgid "running" msgstr "" #: spyder_unittest/widgets/unittestgui.py:480 msgid "failure" msgstr "" #: spyder_unittest/widgets/unittestgui.py:481 msgid "collection error" msgstr "" spyder-unittest-0.7.0/spyder_unittest/locale/spyder_unittest.pot000066400000000000000000000120631466560470700254170ustar00rootroot00000000000000# SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR ORGANIZATION # FIRST AUTHOR , YEAR. # msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "POT-Creation-Date: 2023-06-23 16:41+0100\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Generated-By: pygettext.py 1.5\n" #: spyder_unittest/backend/nose2runner.py:99 msgid "Captured stdout" msgstr "" #: spyder_unittest/backend/nose2runner.py:101 msgid "Captured stderr" msgstr "" #: spyder_unittest/backend/pytestrunner.py:129 msgid "(none)" msgstr "" #: spyder_unittest/backend/pytestrunner.py:129 msgid "Missing: {}" msgstr "" #: spyder_unittest/backend/pytestrunner.py:130 msgid "" "{}\n" "{}" msgstr "" #: spyder_unittest/unittestplugin.py:73 spyder_unittest/widgets/unittestgui.py:138 msgid "Unit testing" msgstr "" #: spyder_unittest/unittestplugin.py:84 msgid "Run test suites and view their results" msgstr "" #: spyder_unittest/unittestplugin.py:105 spyder_unittest/unittestplugin.py:106 spyder_unittest/widgets/unittestgui.py:424 msgid "Run unit tests" msgstr "" #: spyder_unittest/widgets/configdialog.py:78 msgid "Configure tests" msgstr "" #: spyder_unittest/widgets/configdialog.py:86 msgid "Test framework:" msgstr "" #: spyder_unittest/widgets/configdialog.py:95 spyder_unittest/widgets/unittestgui.py:294 msgid "not available" msgstr "" #: spyder_unittest/widgets/configdialog.py:102 msgid "Command-line arguments:" msgstr "" #: spyder_unittest/widgets/configdialog.py:106 msgid "Extra command-line arguments when running tests" msgstr "" #: spyder_unittest/widgets/configdialog.py:118 msgid "Include coverage report in output" msgstr "" #: spyder_unittest/widgets/configdialog.py:119 msgid "Works only for pytest, requires pytest-cov" msgstr "" #: spyder_unittest/widgets/configdialog.py:131 msgid "Directory from which to run tests:" msgstr "" #: spyder_unittest/widgets/configdialog.py:137 spyder_unittest/widgets/configdialog.py:196 msgid "Select directory" msgstr "" #: spyder_unittest/widgets/confpage.py:26 msgid "Settings" msgstr "" #: spyder_unittest/widgets/confpage.py:28 msgid "Abbreviate test names" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Message" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Name" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Status" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Time (ms)" msgstr "" #: spyder_unittest/widgets/datatree.py:153 msgid "Collapse" msgstr "" #: spyder_unittest/widgets/datatree.py:156 msgid "Expand" msgstr "" #: spyder_unittest/widgets/datatree.py:162 msgid "Go to definition" msgstr "" #: spyder_unittest/widgets/datatree.py:169 msgid "Run only this test" msgstr "" #: spyder_unittest/widgets/datatree.py:421 msgid "test" msgstr "" #: spyder_unittest/widgets/datatree.py:421 msgid "tests" msgstr "" #: spyder_unittest/widgets/datatree.py:425 msgid "No results to show." msgstr "" #: spyder_unittest/widgets/datatree.py:430 msgid "collected {}" msgstr "" #: spyder_unittest/widgets/datatree.py:431 msgid "{} failed" msgstr "" #: spyder_unittest/widgets/datatree.py:432 msgid ", {} passed" msgstr "" #: spyder_unittest/widgets/datatree.py:434 msgid ", {} other" msgstr "" #: spyder_unittest/widgets/datatree.py:436 msgid ", {} pending" msgstr "" #: spyder_unittest/widgets/datatree.py:441 msgid ", {} coverage" msgstr "" #: spyder_unittest/widgets/unittestgui.py:151 msgid "Configure ..." msgstr "" #: spyder_unittest/widgets/unittestgui.py:158 msgid "Show output" msgstr "" #: spyder_unittest/widgets/unittestgui.py:165 msgid "Collapse all" msgstr "" #: spyder_unittest/widgets/unittestgui.py:172 msgid "Expand all" msgstr "" #: spyder_unittest/widgets/unittestgui.py:179 spyder_unittest/widgets/unittestgui.py:300 msgid "Dependencies" msgstr "" #: spyder_unittest/widgets/unittestgui.py:248 msgid "Unit testing output" msgstr "" #: spyder_unittest/widgets/unittestgui.py:291 msgid "Versions of frameworks and their installed plugins:" msgstr "" #: spyder_unittest/widgets/unittestgui.py:393 msgid "Error" msgstr "" #: spyder_unittest/widgets/unittestgui.py:393 msgid "Process failed to start" msgstr "" #: spyder_unittest/widgets/unittestgui.py:396 msgid "Running tests ..." msgstr "" #: spyder_unittest/widgets/unittestgui.py:417 msgid "Stop" msgstr "" #: spyder_unittest/widgets/unittestgui.py:418 msgid "Stop current test process" msgstr "" #: spyder_unittest/widgets/unittestgui.py:423 msgid "Run tests" msgstr "" #: spyder_unittest/widgets/unittestgui.py:451 msgid "Test process exited abnormally" msgstr "" #: spyder_unittest/widgets/unittestgui.py:460 msgid "not run" msgstr "" #: spyder_unittest/widgets/unittestgui.py:467 spyder_unittest/widgets/unittestgui.py:473 msgid "pending" msgstr "" #: spyder_unittest/widgets/unittestgui.py:474 msgid "running" msgstr "" #: spyder_unittest/widgets/unittestgui.py:480 msgid "failure" msgstr "" #: spyder_unittest/widgets/unittestgui.py:481 msgid "collection error" msgstr "" spyder-unittest-0.7.0/spyder_unittest/locale/zh_CN/000077500000000000000000000000001466560470700224255ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/locale/zh_CN/LC_MESSAGES/000077500000000000000000000000001466560470700242125ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/locale/zh_CN/LC_MESSAGES/spyder_unittest.mo000066400000000000000000000010621466560470700300130ustar00rootroot00000000000000$,-Project-Id-Version: spyder-unittest POT-Creation-Date: 2023-06-23 16:41+0100 PO-Revision-Date: 2023-06-23 20:17 Last-Translator: Language-Team: Chinese Simplified MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Generated-By: pygettext.py 1.5 Plural-Forms: nplurals=1; plural=0; X-Crowdin-Project: spyder-unittest X-Crowdin-Project-ID: 381839 X-Crowdin-Language: zh-CN X-Crowdin-File: /master/spyder_unittest/locale/spyder_unittest.pot X-Crowdin-File-ID: 49 Language: zh_CN spyder-unittest-0.7.0/spyder_unittest/locale/zh_CN/LC_MESSAGES/spyder_unittest.po000066400000000000000000000122601466560470700300200ustar00rootroot00000000000000msgid "" msgstr "" "Project-Id-Version: spyder-unittest\n" "POT-Creation-Date: 2023-06-23 16:41+0100\n" "PO-Revision-Date: 2023-06-23 20:17\n" "Last-Translator: \n" "Language-Team: Chinese Simplified\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Generated-By: pygettext.py 1.5\n" "Plural-Forms: nplurals=1; plural=0;\n" "X-Crowdin-Project: spyder-unittest\n" "X-Crowdin-Project-ID: 381839\n" "X-Crowdin-Language: zh-CN\n" "X-Crowdin-File: /master/spyder_unittest/locale/spyder_unittest.pot\n" "X-Crowdin-File-ID: 49\n" "Language: zh_CN\n" #: spyder_unittest/backend/nose2runner.py:99 msgid "Captured stdout" msgstr "" #: spyder_unittest/backend/nose2runner.py:101 msgid "Captured stderr" msgstr "" #: spyder_unittest/backend/pytestrunner.py:129 msgid "(none)" msgstr "" #: spyder_unittest/backend/pytestrunner.py:129 msgid "Missing: {}" msgstr "" #: spyder_unittest/backend/pytestrunner.py:130 msgid "{}\n" "{}" msgstr "" #: spyder_unittest/unittestplugin.py:73 spyder_unittest/widgets/unittestgui.py:138 msgid "Unit testing" msgstr "" #: spyder_unittest/unittestplugin.py:84 msgid "Run test suites and view their results" msgstr "" #: spyder_unittest/unittestplugin.py:105 spyder_unittest/unittestplugin.py:106 spyder_unittest/widgets/unittestgui.py:424 msgid "Run unit tests" msgstr "" #: spyder_unittest/widgets/configdialog.py:78 msgid "Configure tests" msgstr "" #: spyder_unittest/widgets/configdialog.py:86 msgid "Test framework:" msgstr "" #: spyder_unittest/widgets/configdialog.py:95 spyder_unittest/widgets/unittestgui.py:294 msgid "not available" msgstr "" #: spyder_unittest/widgets/configdialog.py:102 msgid "Command-line arguments:" msgstr "" #: spyder_unittest/widgets/configdialog.py:106 msgid "Extra command-line arguments when running tests" msgstr "" #: spyder_unittest/widgets/configdialog.py:118 msgid "Include coverage report in output" msgstr "" #: spyder_unittest/widgets/configdialog.py:119 msgid "Works only for pytest, requires pytest-cov" msgstr "" #: spyder_unittest/widgets/configdialog.py:131 msgid "Directory from which to run tests:" msgstr "" #: spyder_unittest/widgets/configdialog.py:137 spyder_unittest/widgets/configdialog.py:196 msgid "Select directory" msgstr "" #: spyder_unittest/widgets/confpage.py:26 msgid "Settings" msgstr "" #: spyder_unittest/widgets/confpage.py:28 msgid "Abbreviate test names" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Message" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Name" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Status" msgstr "" #: spyder_unittest/widgets/datatree.py:45 msgid "Time (ms)" msgstr "" #: spyder_unittest/widgets/datatree.py:153 msgid "Collapse" msgstr "" #: spyder_unittest/widgets/datatree.py:156 msgid "Expand" msgstr "" #: spyder_unittest/widgets/datatree.py:162 msgid "Go to definition" msgstr "" #: spyder_unittest/widgets/datatree.py:169 msgid "Run only this test" msgstr "" #: spyder_unittest/widgets/datatree.py:421 msgid "test" msgstr "" #: spyder_unittest/widgets/datatree.py:421 msgid "tests" msgstr "" #: spyder_unittest/widgets/datatree.py:425 msgid "No results to show." msgstr "" #: spyder_unittest/widgets/datatree.py:430 msgid "collected {}" msgstr "" #: spyder_unittest/widgets/datatree.py:431 msgid "{} failed" msgstr "" #: spyder_unittest/widgets/datatree.py:432 msgid ", {} passed" msgstr "" #: spyder_unittest/widgets/datatree.py:434 msgid ", {} other" msgstr "" #: spyder_unittest/widgets/datatree.py:436 msgid ", {} pending" msgstr "" #: spyder_unittest/widgets/datatree.py:441 msgid ", {} coverage" msgstr "" #: spyder_unittest/widgets/unittestgui.py:151 msgid "Configure ..." msgstr "" #: spyder_unittest/widgets/unittestgui.py:158 msgid "Show output" msgstr "" #: spyder_unittest/widgets/unittestgui.py:165 msgid "Collapse all" msgstr "" #: spyder_unittest/widgets/unittestgui.py:172 msgid "Expand all" msgstr "" #: spyder_unittest/widgets/unittestgui.py:179 spyder_unittest/widgets/unittestgui.py:300 msgid "Dependencies" msgstr "" #: spyder_unittest/widgets/unittestgui.py:248 msgid "Unit testing output" msgstr "" #: spyder_unittest/widgets/unittestgui.py:291 msgid "Versions of frameworks and their installed plugins:" msgstr "" #: spyder_unittest/widgets/unittestgui.py:393 msgid "Error" msgstr "" #: spyder_unittest/widgets/unittestgui.py:393 msgid "Process failed to start" msgstr "" #: spyder_unittest/widgets/unittestgui.py:396 msgid "Running tests ..." msgstr "" #: spyder_unittest/widgets/unittestgui.py:417 msgid "Stop" msgstr "" #: spyder_unittest/widgets/unittestgui.py:418 msgid "Stop current test process" msgstr "" #: spyder_unittest/widgets/unittestgui.py:423 msgid "Run tests" msgstr "" #: spyder_unittest/widgets/unittestgui.py:451 msgid "Test process exited abnormally" msgstr "" #: spyder_unittest/widgets/unittestgui.py:460 msgid "not run" msgstr "" #: spyder_unittest/widgets/unittestgui.py:467 spyder_unittest/widgets/unittestgui.py:473 msgid "pending" msgstr "" #: spyder_unittest/widgets/unittestgui.py:474 msgid "running" msgstr "" #: spyder_unittest/widgets/unittestgui.py:480 msgid "failure" msgstr "" #: spyder_unittest/widgets/unittestgui.py:481 msgid "collection error" msgstr "" spyder-unittest-0.7.0/spyder_unittest/tests/000077500000000000000000000000001466560470700213275ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/tests/conftest.py000066400000000000000000000027661466560470700235410ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """ Configuration file for Pytest. This contains the necessary definitions to make the main_window fixture available for integration tests. """ # Standard library imports import os # Third-party imports from qtpy.QtWidgets import QApplication import pytest # QtWebEngineWidgets must be imported # before a QCoreApplication instance is created from qtpy import QtWebEngineWidgets # noqa # Spyder imports from spyder import version_info as spyder_version_info from spyder.api.plugin_registration.registry import PLUGIN_REGISTRY from spyder.app import start from spyder.config.manager import CONF @pytest.fixture def main_window(monkeypatch): """Main Window fixture""" # Don't show tours message CONF.set('tours', 'show_tour_message', False) # Turn introspection on, even though it's slower and more memory # intensive, because otherwise tests are aborted at end with # "QThread: Destroyed while thread is still running". os.environ['SPY_TEST_USE_INTROSPECTION'] = 'True' QApplication.processEvents() # Start the window window = start.main() QApplication.processEvents() yield window # Close main window window.closing(close_immediately=True) window.close() CONF.reset_to_defaults(notification=False) CONF.reset_manager() PLUGIN_REGISTRY.reset() os.environ.pop('SPY_TEST_USE_INTROSPECTION') spyder-unittest-0.7.0/spyder_unittest/tests/test_unittestplugin.py000066400000000000000000000127741466560470700260510ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """ Tests for the integration of the plugin with Spyder. """ # Standard library imports from collections import OrderedDict import os # Third party imports from qtpy.QtCore import Qt # Spyder imports from spyder import version_info as spyder_version_info from spyder.api.plugins import Plugins from spyder.plugins.mainmenu.api import ApplicationMenus # Local imports from spyder_unittest.unittestplugin import UnitTestPlugin from spyder_unittest.widgets.configdialog import Config def test_menu_item(main_window): """ Test that plugin adds item 'Run unit tests' to Run menu. """ main_menu = main_window.get_plugin(Plugins.MainMenu) run_menu = main_menu.get_application_menu(ApplicationMenus.Run) actions = run_menu.get_actions() # Filter out seperators (indicated by action is None) and convert to text menu_items = [action.text() for action in actions if action] assert 'Run unit tests' in menu_items def test_pythonpath_change(main_window): """ Test that pythonpath changes in Spyder propagate to UnitTestWidget. """ ppm = main_window.get_plugin(Plugins.PythonpathManager) unittest_plugin = main_window.get_plugin(UnitTestPlugin.NAME) new_path = '/some/path' new_path_dict = OrderedDict([(new_path, True)]) ppm.get_container()._update_python_path(new_path_dict) assert unittest_plugin.get_widget().pythonpath == [new_path] def test_default_working_dir(main_window, tmpdir): """ Test that plugin's default working dir is current working directory. After creating a project, the plugin's default working dir should be the same as the project directory. When the project is closed again, the plugin's default working dir should revert back to the current working directory. """ projects = main_window.get_plugin(Plugins.Projects) unittest_plugin = main_window.get_plugin(UnitTestPlugin.NAME) project_dir = str(tmpdir) assert unittest_plugin.get_widget().default_wdir == os.getcwd() projects.create_project(project_dir) assert unittest_plugin.get_widget().default_wdir == project_dir projects.close_project() assert unittest_plugin.get_widget().default_wdir == os.getcwd() def test_plugin_config(main_window, tmpdir, qtbot): """ Test that plugin uses the project's config file if a project is open. """ projects = main_window.get_plugin(Plugins.Projects) unittest_plugin = main_window.get_plugin(UnitTestPlugin.NAME) unittest_widget = unittest_plugin.get_widget() project_dir = str(tmpdir) config_file_path = tmpdir.join('.spyproject', 'config', 'unittest.ini') # Test config file does not exist and config is empty assert not config_file_path.check() assert unittest_widget.config is None # Create new project projects.create_project(project_dir) # Test config file does exist but config is empty assert config_file_path.check() assert 'framework = ' in config_file_path.read().splitlines() assert unittest_widget.config is None # Set config and test that this is recorded in config file config = Config(framework='unittest', wdir=str(tmpdir)) with qtbot.waitSignal(unittest_widget.sig_newconfig): unittest_widget.config = config assert 'framework = unittest' in config_file_path.read().splitlines() # Close project and test that config is empty projects.close_project() assert unittest_widget.config is None # Re-open project and test that config is correctly read projects.open_project(project_dir) assert unittest_widget.config == config # Close project before ending test, which removes the project dir projects.close_project() def test_go_to_test_definition(main_window, tmpdir, qtbot): """ Test that double clicking on a test result opens the file with the test definition in the editor with the cursor on the test definition. """ unittest_plugin = main_window.get_plugin(UnitTestPlugin.NAME) unittest_widget = unittest_plugin.get_widget() model = unittest_widget.testdatamodel view = unittest_widget.testdataview # Write test file testdir_str = str(tmpdir) testfile_str = tmpdir.join('test_foo.py').strpath os.chdir(testdir_str) with open(testfile_str, 'w') as f: f.write("def test_ok(): assert 1+1 == 2\n" "def test_fail(): assert 1+1 == 3\n") # Run tests config = Config(wdir=testdir_str, framework='pytest', coverage=False) with qtbot.waitSignal( unittest_widget.sig_finished, timeout=10000, raising=True): unittest_widget.run_tests(config) # Check that row 1 corresponds to `test_fail` index = model.index(1, 1) point = view.visualRect(index).center() assert view.indexAt(point).data(Qt.DisplayRole).endswith('test_fail') # Double click on `test_fail` unittest_plugin.switch_to_plugin() with qtbot.waitSignal(view.sig_edit_goto): qtbot.mouseClick(view.viewport(), Qt.LeftButton, pos=point, delay=100) qtbot.mouseDClick(view.viewport(), Qt.LeftButton, pos=point) # Check that test file is opened in editor editor = main_window.get_plugin(Plugins.Editor) filename = editor.get_current_filename() assert filename == testfile_str # Check that cursor is on line defining `test_fail` cursor = editor.get_current_editor().textCursor() line = cursor.block().text() assert line.startswith('def test_fail') spyder-unittest-0.7.0/spyder_unittest/unittestplugin.py000066400000000000000000000317571466560470700236520ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2013 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Unit testing Plugin.""" # Standard library imports from os import getcwd import os.path as osp # Third party imports import qtawesome from qtpy.QtCore import Qt # Spyder imports from spyder.api.plugins import Plugins, SpyderDockablePlugin from spyder.api.plugin_registration.decorators import ( on_plugin_available, on_plugin_teardown) from spyder.config.base import get_translation from spyder.plugins.mainmenu.api import ApplicationMenus from spyder.utils.palette import SpyderPalette # Local imports from spyder_unittest.widgets.configdialog import Config from spyder_unittest.widgets.confpage import UnitTestConfigPage from spyder_unittest.widgets.unittestgui import UnitTestWidget _ = get_translation('spyder_unittest') class UnitTestPluginActions: Run = 'Run tests' class UnitTestPlugin(SpyderDockablePlugin): """Spyder plugin for unit testing.""" NAME = 'unittest' REQUIRES = [] OPTIONAL = [Plugins.Editor, Plugins.MainMenu, Plugins.Preferences, Plugins.Projects, Plugins.PythonpathManager, Plugins.WorkingDirectory] TABIFY = [Plugins.VariableExplorer] WIDGET_CLASS = UnitTestWidget CONF_SECTION = NAME CONF_DEFAULTS = [(CONF_SECTION, {'framework': '', 'wdir': '', 'coverage': False, 'args': [], 'abbrev_test_names': False}), ('shortcuts', {'unittest/Run tests': 'Alt+Shift+F11'})] CONF_NAMEMAP = {CONF_SECTION: [(CONF_SECTION, ['framework', 'wdir', 'coverage', 'args'])]} CONF_FILE = True CONF_VERSION = '0.2.0' CONF_WIDGET_CLASS = UnitTestConfigPage # --- Mandatory SpyderDockablePlugin methods ------------------------------ @staticmethod def get_name(): """ Return the plugin localized name. Returns ------- str Localized name of the plugin. """ return _('Unit testing') @staticmethod def get_description(): """ Return the plugin localized description. Returns ------- str Localized description of the plugin. """ return _('Run test suites and view their results') @classmethod def get_icon(cls): """ Return the plugin associated icon. Returns ------- QIcon QIcon instance """ return qtawesome.icon('mdi.test-tube', color=SpyderPalette.ICON_1) def on_initialize(self): """ Setup the plugin. """ self.get_widget().sig_newconfig.connect(self.save_config) self.create_action( UnitTestPluginActions.Run, text=_('Run unit tests'), tip=_('Run unit tests'), icon=self.get_icon(), triggered=self.maybe_configure_and_start, context=Qt.ApplicationShortcut, register_shortcut=True) # ----- Set up interactions with other plugins ---------------------------- @on_plugin_available(plugin=Plugins.Editor) def on_editor_available(self): """ Set up interactions when Editor plugin available. Add 'Run unit tests' to context menu in editor for Python files. Save all files in editor before running tests. Go to test definition in editor on double click in unit test plugin. """ editor = self.get_plugin(Plugins.Editor) run_action = self.get_action(UnitTestPluginActions.Run) editor.get_widget().pythonfile_dependent_actions += [run_action] # FIXME: Previous line does not do anything self.get_widget().pre_test_hook = editor.get_widget().save_all self.get_widget().sig_edit_goto.connect(self.goto_in_editor) @on_plugin_teardown(plugin=Plugins.Editor) def on_editor_teardown(self): """ Disconnect from Editor plugin. """ self.get_widget().pre_test_hook = None self.get_widget().sig_edit_goto.disconnect(self.goto_in_editor) @on_plugin_available(plugin=Plugins.MainMenu) def on_main_menu_available(self): """ Add 'Run unit tests' menu item when MainMenu plugin available. """ mainmenu = self.get_plugin(Plugins.MainMenu) run_action = self.get_action(UnitTestPluginActions.Run) mainmenu.add_item_to_application_menu( run_action, menu_id=ApplicationMenus.Run) @on_plugin_teardown(plugin=Plugins.MainMenu) def on_main_menu_teardown(self): """ Remove 'Run unit tests; menu item from the application menu. """ mainmenu = self.get_plugin(Plugins.MainMenu) mainmenu.remove_item_from_application_menu( UnitTestPluginActions.Run, menu_id=ApplicationMenus.Run) @on_plugin_available(plugin=Plugins.Preferences) def on_preferences_available(self): """ Use config when Preferences plugin available. Specifically, register the unittest plugin preferences, and find out whether Spyder uses a dark interface and communicate this to the unittest widget. """ preferences = self.get_plugin(Plugins.Preferences) preferences.register_plugin_preferences(self) @on_plugin_teardown(plugin=Plugins.Preferences) def on_preferences_teardown(self): """ De-register unittest plugin preferences. """ preferences = self.get_plugin(Plugins.Preferences) preferences.deregister_plugin_preferences(self) @on_plugin_available(plugin=Plugins.Projects) def on_projects_available(self): """ Connect when Projects plugin available. Connect to signals emitted when the current project changes. """ projects = self.get_plugin(Plugins.Projects) projects.sig_project_created.connect(self.handle_project_change) projects.sig_project_loaded.connect(self.handle_project_change) projects.sig_project_closed.connect(self.handle_project_change) @on_plugin_teardown(plugin=Plugins.Projects) def on_projects_teardown(self): """ Disconnect from Projects plugin. """ projects = self.get_plugin(Plugins.Projects) projects.sig_project_created.disconnect(self.handle_project_change) projects.sig_project_loaded.disconnect(self.handle_project_change) projects.sig_project_closed.disconnect(self.handle_project_change) @on_plugin_available(plugin=Plugins.PythonpathManager) def on_pythonpath_manager_available(self): """ Connect to signal announcing that Python path changed. """ ppm = self.get_plugin(Plugins.PythonpathManager) ppm.sig_pythonpath_changed.connect(self.update_pythonpath) @on_plugin_teardown(plugin=Plugins.PythonpathManager) def on_pythonpath_manager_teardown(self): """ Disconnect from PythonpathManager plugin. """ ppm = self.get_plugin(Plugins.PythonpathManager) ppm.sig_pythonpath_changed.disconnect(self.update_pythonpath) @on_plugin_available(plugin=Plugins.WorkingDirectory) def on_working_directory_available(self): """ Connect when WorkingDirectory plugin available. Find out what the current working directory is and connect to the signal emitted when the current working directory changes. """ working_directory = self.get_plugin(Plugins.WorkingDirectory) working_directory.sig_current_directory_changed.connect( self.update_default_wdir) self.update_default_wdir() @on_plugin_teardown(plugin=Plugins.WorkingDirectory) def on_working_directory_teardown(self): """ Disconnect from WorkingDirectory plugin. """ working_directory = self.get_plugin(Plugins.WorkingDirectory) working_directory.sig_current_directory_changed.disconnect( self.update_default_wdir) # --- UnitTestPlugin methods ---------------------------------------------- def update_pythonpath(self): """ Update Python path used to run unit tests. This function is called whenever the Python path set in Spyder changes. It synchronizes the Python path in the unittest widget with the Python path in Spyder. """ ppm = self.get_plugin(Plugins.PythonpathManager) self.get_widget().pythonpath = ppm.get_spyder_pythonpath() def handle_project_change(self): """ Handle the event where the current project changes. This updates the default working directory for running tests and loads the test configuration from the project preferences. """ self.update_default_wdir() self.load_config() def update_default_wdir(self): """ Update default working dir for running unit tests. The default working dir for running unit tests is set to the project directory if a project is open, or the current working directory if no project is opened. This function is called whenever this directory may change. """ projects = self.get_plugin(Plugins.Projects) if projects: wdir = projects.get_active_project_path() if not wdir: # if no project opened wdir = getcwd() else: wdir = getcwd() self.get_widget().default_wdir = wdir def load_config(self): """ Load test configuration from project preferences. If the test configuration stored in the project preferences is valid, then use it. If it is not valid (e.g., because the user never configured testing for this project) or no project is opened, then invalidate the current test configuration. If necessary, patch the project preferences to include this plugin's config options. """ widget = self.get_widget() projects_plugin = self.get_plugin(Plugins.Projects) if projects_plugin: project = projects_plugin.get_active_project() else: project = None if not project: widget.set_config_without_emit(None) return if self.CONF_SECTION not in project.config._name_map: project.config._name_map = project.config._name_map.copy() project.config._name_map.update(self.CONF_NAMEMAP) if self.CONF_SECTION not in project.config._configs_map: config_class = project.config.get_config_class() path = osp.join(project.root_path, '.spyproject', 'config') conf = config_class( name=self.CONF_SECTION, defaults=self.CONF_DEFAULTS, path=path, load=True, version=self.CONF_VERSION) project.config._configs_map[self.CONF_SECTION] = conf new_config = Config( framework=project.get_option('framework', self.CONF_SECTION), wdir=project.get_option('wdir', self.CONF_SECTION), coverage=project.get_option('coverage', self.CONF_SECTION), args=project.get_option('args', self.CONF_SECTION)) if not widget.config_is_valid(new_config): new_config = None widget.set_config_without_emit(new_config) def save_config(self, test_config): """ Save test configuration in project preferences. If no project is opened, then do not save. """ projects_plugin = self.get_plugin(Plugins.Projects) if not projects_plugin: return project = projects_plugin.get_active_project() if not project: return project.set_option('framework', test_config.framework, self.CONF_SECTION) project.set_option('wdir', test_config.wdir, self.CONF_SECTION) project.set_option('coverage', test_config.coverage, self.CONF_SECTION) project.set_option('args', test_config.args, self.CONF_SECTION) def goto_in_editor(self, filename, lineno): """ Go to specified line in editor. This function is called when the unittest widget emits `sig_edit_goto`. Note that the line number in the signal is zero based (the first line is line 0), but the editor expects a one-based line number. """ editor_plugin = self.get_plugin(Plugins.Editor) if editor_plugin: editor_plugin.load(filename, lineno + 1, '') # ----- Public API -------------------------------------------------------- def maybe_configure_and_start(self): """ Ask for configuration if necessary and then run tests. Raise unittest widget. If the current test configuration is not valid (or not set), then ask the user to configure. Then run the tests. """ self.switch_to_plugin() self.get_widget().maybe_configure_and_start() spyder-unittest-0.7.0/spyder_unittest/widgets/000077500000000000000000000000001466560470700216335ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/widgets/__init__.py000066400000000000000000000002731466560470700237460ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2013 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Widgets for unittest plugin.""" spyder-unittest-0.7.0/spyder_unittest/widgets/configdialog.py000066400000000000000000000205211466560470700246320ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2013 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """ Functionality for asking the user to specify the test configuration. The main entry point is `ask_for_config()`. """ from __future__ import annotations # Standard library imports from os import getcwd import os.path as osp import shlex from typing import Optional, NamedTuple # Third party imports from qtpy.compat import getexistingdirectory from qtpy.QtCore import Slot from qtpy.QtWidgets import ( QApplication, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QLabel, QLineEdit, QPushButton, QVBoxLayout, QCheckBox) from spyder.config.base import get_translation from spyder.utils import icon_manager as ima try: _ = get_translation('spyder_unittest') except KeyError: import gettext _ = gettext.gettext class Config(NamedTuple): framework: Optional[str] = None wdir: str = '' coverage: bool = False args: list[str] = [] class ConfigDialog(QDialog): """ Dialog window for specifying test configuration. The window contains a combobox with all the frameworks, a line edit box for specifying the working directory, a button to use a file browser for selecting the directory, and OK and Cancel buttons. Initially, no framework is selected and the OK button is disabled. Selecting a framework enables the OK button. """ # Width of strut in the layout of the dialog window; this determines # the width of the dialog STRUT_WIDTH = 400 # Extra vertical space added between elements in the dialog EXTRA_SPACE = 10 def __init__(self, frameworks, config, versions, parent=None): """ Construct a dialog window. Parameters ---------- frameworks : dict of (str, type) Names of all supported frameworks with their associated class (assumed to be a subclass of RunnerBase) config : Config Initial configuration versions : dict Versions of testing frameworks and their plugins parent : QWidget """ super().__init__(parent) self.versions = versions self.setWindowTitle(_('Configure tests')) layout = QVBoxLayout(self) layout.addStrut(self.STRUT_WIDTH) grid_layout = QGridLayout() # Combo box for selecting the test framework framework_label = QLabel(_('Test framework:')) grid_layout.addWidget(framework_label, 0, 0) self.framework_combobox = QComboBox(self) for ix, (name, runner) in enumerate(sorted(frameworks.items())): installed = versions[name]['available'] if installed: label = name else: label = '{} ({})'.format(name, _('not available')) self.framework_combobox.addItem(label) self.framework_combobox.model().item(ix).setEnabled(installed) grid_layout.addWidget(self.framework_combobox, 0, 1) # Line edit field for adding extra command-line arguments args_label = QLabel(_('Command-line arguments:')) grid_layout.addWidget(args_label, 1, 0) self.args_lineedit = QLineEdit(self) args_toolTip = _('Extra command-line arguments when running tests') self.args_lineedit.setToolTip(args_toolTip) grid_layout.addWidget(self.args_lineedit, 1, 1) layout.addLayout(grid_layout) spacing = grid_layout.verticalSpacing() + self.EXTRA_SPACE grid_layout.setVerticalSpacing(spacing) layout.addSpacing(self.EXTRA_SPACE) # Checkbox for enabling coverage report coverage_label = _('Include coverage report in output') coverage_toolTip = _('Works only for pytest, requires pytest-cov') coverage_layout = QHBoxLayout() self.coverage_checkbox = QCheckBox(coverage_label, self) self.coverage_checkbox.setToolTip(coverage_toolTip) self.coverage_checkbox.setEnabled(False) coverage_layout.addWidget(self.coverage_checkbox) layout.addLayout(coverage_layout) layout.addSpacing(self.EXTRA_SPACE) # Line edit field for selecting directory wdir_label = QLabel(_('Directory from which to run tests:')) layout.addWidget(wdir_label) wdir_layout = QHBoxLayout() self.wdir_lineedit = QLineEdit(self) wdir_layout.addWidget(self.wdir_lineedit) self.wdir_button = QPushButton(ima.icon('DirOpenIcon'), '', self) self.wdir_button.setToolTip(_("Select directory")) self.wdir_button.clicked.connect(lambda: self.select_directory()) wdir_layout.addWidget(self.wdir_button) layout.addLayout(wdir_layout) layout.addSpacing(2 * self.EXTRA_SPACE) # OK and Cancel buttons at the bottom self.buttons = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel) layout.addWidget(self.buttons) self.buttons.accepted.connect(self.accept) self.buttons.rejected.connect(self.reject) self.ok_button = self.buttons.button(QDialogButtonBox.Ok) self.ok_button.setEnabled(False) self.framework_combobox.currentIndexChanged.connect( self.framework_changed) # Set initial values to agree with the given config self.framework_combobox.setCurrentIndex(-1) if config.framework: index = self.framework_combobox.findText(config.framework) if index != -1: self.framework_combobox.setCurrentIndex(index) self.coverage_checkbox.setChecked(config.coverage) self.enable_coverage_checkbox_if_available() self.args_lineedit.setText(shlex.join(config.args)) self.wdir_lineedit.setText(config.wdir) @Slot(int) def framework_changed(self, index): """Called when selected framework changes.""" if index != -1: self.ok_button.setEnabled(True) self.enable_coverage_checkbox_if_available() def enable_coverage_checkbox_if_available(self): """ Enable coverage checkbox only if coverage is available. Coverage is only implemented for pytest and requires pytest_cov. Enable the coverage checkbox if these conditions are satisfied, otherwise, disable and un-check the checkbox. """ if (str(self.framework_combobox.currentText()) != 'pytest' or 'pytest-cov' not in self.versions['pytest']['plugins']): self.coverage_checkbox.setEnabled(False) self.coverage_checkbox.setChecked(False) else: self.coverage_checkbox.setEnabled(True) def select_directory(self): """Display dialog for user to select working directory.""" basedir = self.wdir_lineedit.text() if not osp.isdir(basedir): basedir = getcwd() title = _("Select directory") directory = getexistingdirectory(self, title, basedir) if directory: self.wdir_lineedit.setText(directory) def get_config(self): """ Return the test configuration specified by the user. Returns ------- Config Test configuration """ framework = self.framework_combobox.currentText() if framework == '': framework = None args = self.args_lineedit.text() args = shlex.split(args) return Config(framework=framework, wdir=self.wdir_lineedit.text(), coverage=self.coverage_checkbox.isChecked(), args=args) def ask_for_config(frameworks, config, versions, parent=None): """ Ask user to specify a test configuration. This is a convenience function which displays a modal dialog window of type `ConfigDialog`. """ dialog = ConfigDialog(frameworks, config, versions, parent) result = dialog.exec_() if result == QDialog.Accepted: return dialog.get_config() if __name__ == '__main__': app = QApplication([]) frameworks = { 'nose2': object, 'unittest': object, 'pytest': object} versions = { 'nose2': {'available': False}, 'unittest': {'available': True}, 'pytest': {'available': True, 'plugins': {'pytest-cov', '3.1.4'}} } config = Config(wdir=getcwd()) print(ask_for_config(frameworks, config, versions)) spyder-unittest-0.7.0/spyder_unittest/widgets/confpage.py000066400000000000000000000022441466560470700237710ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # ----------------------------------------------------------------------------- # Copyright (c) 2023- Spyder Project Contributors # # Released under the terms of the MIT License # (see LICENSE.txt in the project root directory for details) # ----------------------------------------------------------------------------- """ Spyder-unittest Preferences Page. """ # Third party imports from qtpy.QtWidgets import QGroupBox, QVBoxLayout from spyder.api.preferences import PluginConfigPage from spyder.api.translations import get_translation # Localization _ = get_translation('spyder_unittest') class UnitTestConfigPage(PluginConfigPage): def setup_page(self) -> None: settings_group = QGroupBox(_('Settings')) widget = self.create_checkbox( _('Abbreviate test names'), 'abbrev_test_names', default=False) self.abbrev_box = widget.checkbox settings_layout = QVBoxLayout() settings_layout.addWidget(self.abbrev_box) settings_group.setLayout(settings_layout) vlayout = QVBoxLayout() vlayout.addWidget(settings_group) vlayout.addStretch(1) self.setLayout(vlayout) spyder-unittest-0.7.0/spyder_unittest/widgets/datatree.py000066400000000000000000000410571466560470700240050ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2017 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Model and view classes for storing and displaying test results.""" # Standard library imports from collections import Counter from operator import attrgetter # Third party imports from qtpy import PYQT4 from qtpy.QtCore import QAbstractItemModel, QModelIndex, Qt, Signal from qtpy.QtGui import QBrush, QColor, QFont from qtpy.QtWidgets import QMenu, QTreeView from spyder.api.config.mixins import SpyderConfigurationAccessor from spyder.config.base import get_translation from spyder.utils.palette import SpyderPalette from spyder.utils.qthelpers import create_action # Local imports from spyder_unittest.backend.abbreviator import Abbreviator from spyder_unittest.backend.runnerbase import Category try: _ = get_translation('spyder_unittest') except KeyError: import gettext _ = gettext.gettext COLORS = { Category.OK: SpyderPalette.COLOR_SUCCESS_1, Category.FAIL: SpyderPalette.COLOR_ERROR_1, Category.SKIP: SpyderPalette.COLOR_WARN_1, Category.PENDING: SpyderPalette.COLOR_BACKGROUND_1, Category.COVERAGE: SpyderPalette.COLOR_ACCENT_1 } STATUS_COLUMN = 0 NAME_COLUMN = 1 MESSAGE_COLUMN = 2 TIME_COLUMN = 3 HEADERS = [_('Status'), _('Name'), _('Message'), _('Time (ms)')] TOPLEVEL_ID = 2 ** 32 - 1 class TestDataView(QTreeView): """ Tree widget displaying test results. Signals ------- sig_edit_goto(str, int): Emitted if editor should go to some position. Arguments are file name and line number (zero-based). sig_single_test_run_requested(str): Emitted to request a single test to be run. Argument is the name of the test. """ sig_edit_goto = Signal(str, int) sig_single_test_run_requested = Signal(str) __test__ = False # this is not a pytest test class def __init__(self, parent=None): """Constructor.""" QTreeView.__init__(self, parent) self.header().setDefaultAlignment(Qt.AlignCenter) self.setItemsExpandable(True) self.setSortingEnabled(True) self.header().setSortIndicatorShown(False) self.header().sortIndicatorChanged.connect(self.sortByColumn) self.header().sortIndicatorChanged.connect( lambda col, order: self.header().setSortIndicatorShown(True)) self.setExpandsOnDoubleClick(False) self.doubleClicked.connect(self.go_to_test_definition) def reset(self): """ Reset internal state of the view and read all data afresh from model. This function is called whenever the model data changes drastically. """ QTreeView.reset(self) self.resizeColumns() self.spanFirstColumn(0, self.model().rowCount() - 1) def rowsInserted(self, parent, firstRow, lastRow): """Called when rows are inserted.""" QTreeView.rowsInserted(self, parent, firstRow, lastRow) self.resizeColumns() self.spanFirstColumn(firstRow, lastRow) def dataChanged(self, topLeft, bottomRight, roles=[]): """Called when data in model has changed.""" if PYQT4: QTreeView.dataChanged(self, topLeft, bottomRight) else: QTreeView.dataChanged(self, topLeft, bottomRight, roles) self.resizeColumns() while topLeft.parent().isValid(): topLeft = topLeft.parent() while bottomRight.parent().isValid(): bottomRight = bottomRight.parent() self.spanFirstColumn(topLeft.row(), bottomRight.row()) def contextMenuEvent(self, event): """Called when user requests a context menu.""" index = self.indexAt(event.pos()) index = self.make_index_canonical(index) if not index: return # do nothing if no item under mouse position contextMenu = self.build_context_menu(index) contextMenu.exec_(event.globalPos()) def go_to_test_definition(self, index): """Ask editor to go to definition of test corresponding to index.""" index = self.make_index_canonical(index) filename, lineno = self.model().data(index, Qt.UserRole) if filename is not None: if lineno is None: lineno = 0 self.sig_edit_goto.emit(filename, lineno) def run_single_test(self, index): """Ask plugin to run only the test corresponding to index.""" index = self.make_index_canonical(index) testresult = self.model().testresults[index.row()] testname = testresult.name self.sig_single_test_run_requested.emit(testname) def make_index_canonical(self, index): """ Convert given index to canonical index for the same test. For every test, the canonical index points to the item on the top level in the first column corresponding to the given position. If the given index is invalid, then return None. """ if not index.isValid(): return None while index.parent().isValid(): # find top-level node index = index.parent() index = index.sibling(index.row(), 0) # go to first column return index def build_context_menu(self, index): """Build context menu for test item that given index points to.""" contextMenu = QMenu(self) if self.isExpanded(index): menuItem = create_action(self, _('Collapse'), triggered=lambda: self.collapse(index)) else: menuItem = create_action(self, _('Expand'), triggered=lambda: self.expand(index)) menuItem.setEnabled(self.model().hasChildren(index)) contextMenu.addAction(menuItem) menuItem = create_action( self, _('Go to definition'), triggered=lambda: self.go_to_test_definition(index)) test_location = self.model().data(index, Qt.UserRole) menuItem.setEnabled(test_location[0] is not None) contextMenu.addAction(menuItem) menuItem = create_action( self, _('Run only this test'), triggered=lambda: self.run_single_test(index)) result_category = self.model().testresults[index.row()].category menuItem.setEnabled(result_category != Category.COVERAGE) contextMenu.addAction(menuItem) return contextMenu def resizeColumns(self): """Resize column to fit their contents.""" for col in range(self.model().columnCount()): self.resizeColumnToContents(col) def spanFirstColumn(self, firstRow, lastRow): """ Make first column span whole row in second-level children. Note: Second-level children display the test output. Arguments --------- firstRow : int Index of first row to act on. lastRow : int Index of last row to act on. Note that this row is included in the range, following Qt conventions and contrary to Python conventions. """ model = self.model() for row in range(firstRow, lastRow + 1): index = model.index(row, 0) for i in range(model.rowCount(index)): self.setFirstColumnSpanned(i, index, True) class TestDataModel(QAbstractItemModel, SpyderConfigurationAccessor): """ Model class storing test results for display. Test results are stored as a list of TestResults in the property `self.testresults`. Every test is exposed as a child of the root node, with extra information as second-level nodes. As in every model, an iteem of data is identified by its index, which is a tuple (row, column, id). The id is TOPLEVEL_ID for top-level items. For level-2 items, the id is the index of the test in `self.testresults`. Signals ------- sig_summary(str) Emitted with new summary if test results change. """ CONF_SECTION = 'unittest' sig_summary = Signal(str) __test__ = False # this is not a pytest test class def __init__(self, parent=None): """Constructor.""" QAbstractItemModel.__init__(self, parent) self.abbreviator = Abbreviator() self.testresults = [] try: self.monospace_font = parent.window().editor.get_plugin_font() except AttributeError: # If run standalone for testing self.monospace_font = QFont("Courier New") self.monospace_font.setPointSize(10) @property def testresults(self): """List of test results.""" return self._testresults @testresults.setter def testresults(self, new_value): """Setter for test results.""" self.beginResetModel() self.abbreviator = Abbreviator(res.name for res in new_value) self._testresults = new_value self.endResetModel() self.emit_summary() def add_testresults(self, new_tests): """ Add new test results to the model. Arguments --------- new_tests : list of TestResult """ firstRow = len(self.testresults) lastRow = firstRow + len(new_tests) - 1 for test in new_tests: self.abbreviator.add(test.name) self.beginInsertRows(QModelIndex(), firstRow, lastRow) self.testresults.extend(new_tests) self.endInsertRows() self.emit_summary() def update_testresults(self, new_results): """ Update some test results by new results. The tests in `new_results` should already be included in `self.testresults` (otherwise a `KeyError` is raised). This function replaces the existing results by `new_results`. Arguments --------- new_results: list of TestResult """ idx_min = idx_max = None for new_result in new_results: for (idx, old_result) in enumerate(self.testresults): if old_result.name == new_result.name: self.testresults[idx] = new_result if idx_min is None: idx_min = idx_max = idx else: idx_min = min(idx_min, idx) idx_max = max(idx_max, idx) break else: raise KeyError('test not found') if idx_min is not None: self.dataChanged.emit(self.index(idx_min, 0), self.index(idx_max, len(HEADERS) - 1)) self.emit_summary() def index(self, row, column, parent=QModelIndex()): """ Construct index to given item of data. If `parent` not valid, then the item of data is on the top level. """ if not self.hasIndex(row, column, parent): # check bounds etc. return QModelIndex() if not parent.isValid(): return self.createIndex(row, column, TOPLEVEL_ID) else: testresult_index = parent.row() return self.createIndex(row, column, testresult_index) def data(self, index, role): """ Return data in `role` for item of data that `index` points to. If `role` is `DisplayRole`, then return string to display. If `role` is `TooltipRole`, then return string for tool tip. If `role` is `FontRole`, then return monospace font for level-2 items. If `role` is `BackgroundRole`, then return background color. If `role` is `TextAlignmentRole`, then return right-aligned for time. If `role` is `UserRole`, then return location of test as (file, line). """ if not index.isValid(): return None row = index.row() column = index.column() id = index.internalId() if role == Qt.DisplayRole: if id != TOPLEVEL_ID: return self.testresults[id].extra_text[index.row()] elif column == STATUS_COLUMN: return self.testresults[row].status elif column == NAME_COLUMN: name = self.testresults[row].name # don't abbreviate for the code coverage filename if self.testresults[row].category == Category.COVERAGE: return name if self.get_conf('abbrev_test_names', False): return self.abbreviator.abbreviate(name) else: return name elif column == MESSAGE_COLUMN: return self.testresults[row].message elif column == TIME_COLUMN: time = self.testresults[row].time return '' if time is None else '{:.2f}'.format(time * 1e3) elif role == Qt.ToolTipRole: if id == TOPLEVEL_ID and column == NAME_COLUMN: return self.testresults[row].name elif role == Qt.FontRole: if id != TOPLEVEL_ID: return self.monospace_font elif role == Qt.BackgroundRole: if id == TOPLEVEL_ID: testresult = self.testresults[row] color = COLORS[testresult.category] return QBrush(QColor(color)) elif role == Qt.TextAlignmentRole: if id == TOPLEVEL_ID and column == TIME_COLUMN: return Qt.AlignRight elif role == Qt.UserRole: if id == TOPLEVEL_ID: testresult = self.testresults[row] return (testresult.filename, testresult.lineno) else: return None def headerData(self, section, orientation, role=Qt.DisplayRole): """Return data for specified header.""" if orientation == Qt.Horizontal and role == Qt.DisplayRole: return HEADERS[section] else: return None def parent(self, index): """Return index to parent of item that `index` points to.""" if not index.isValid(): return QModelIndex() id = index.internalId() if id == TOPLEVEL_ID: return QModelIndex() else: return self.index(id, 0) def rowCount(self, parent=QModelIndex()): """Return number of rows underneath `parent`.""" if not parent.isValid(): return len(self.testresults) if parent.internalId() == TOPLEVEL_ID and parent.column() == 0: return len(self.testresults[parent.row()].extra_text) return 0 def columnCount(self, parent=QModelIndex()): """Return number of rcolumns underneath `parent`.""" if not parent.isValid(): return len(HEADERS) else: return 1 def sort(self, column, order): """Sort model by `column` in `order`.""" def key_time(result): return result.time or -1 self.beginResetModel() reverse = order == Qt.DescendingOrder if column == STATUS_COLUMN: self.testresults.sort(key=attrgetter('category', 'status'), reverse=reverse) elif column == NAME_COLUMN: self.testresults.sort(key=attrgetter('name'), reverse=reverse) elif column == MESSAGE_COLUMN: self.testresults.sort(key=attrgetter('message'), reverse=reverse) elif column == TIME_COLUMN: self.testresults.sort(key=key_time, reverse=reverse) self.endResetModel() def summary(self): """Return summary for current results.""" def n_test_or_tests(n): test_or_tests = _('test') if n == 1 else _('tests') return '{} {}'.format(n, test_or_tests) if not len(self.testresults): return _('No results to show.') counts = Counter(res.category for res in self.testresults) if all(counts[cat] == 0 for cat in (Category.FAIL, Category.OK, Category.SKIP)): txt = n_test_or_tests(counts[Category.PENDING]) return _('collected {}').format(txt) msg = _('{} failed').format(n_test_or_tests(counts[Category.FAIL])) msg += _(', {} passed').format(counts[Category.OK]) if counts[Category.SKIP]: msg += _(', {} other').format(counts[Category.SKIP]) if counts[Category.PENDING]: msg += _(', {} pending').format(counts[Category.PENDING]) if counts[Category.COVERAGE]: # find the coverage result and get its status coverage = [res for res in self.testresults if res.category == Category.COVERAGE][0].status msg += _(', {} coverage').format(coverage) return msg def emit_summary(self): """Emit sig_summary with summary for current results.""" self.sig_summary.emit(self.summary()) spyder-unittest-0.7.0/spyder_unittest/widgets/tests/000077500000000000000000000000001466560470700227755ustar00rootroot00000000000000spyder-unittest-0.7.0/spyder_unittest/widgets/tests/__init__.py000066400000000000000000000003021466560470700251010ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2017 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Tests for spyder_unittest.widgets .""" spyder-unittest-0.7.0/spyder_unittest/widgets/tests/test_configdialog.py000066400000000000000000000116771466560470700270470ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2013 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Tests for configdialog.py.""" # Standard library imports import os # Third party imports from qtpy.QtWidgets import QDialogButtonBox # Local imports from spyder_unittest.widgets.configdialog import Config, ConfigDialog class SpamRunner: name = 'spam' @classmethod def is_installed(cls): return False class HamRunner: name = 'ham' @classmethod def is_installed(cls): return True class FakePytestRunner: name = 'pytest' @classmethod def is_installed(cls): return True frameworks = {r.name: r for r in [HamRunner, FakePytestRunner, SpamRunner]} versions = { 'spam': {'available': False}, 'ham': {'available': True}, 'pytest': {'available': True, 'plugins': {'pytest-cov', '3.1.4'}} } def default_config(): return Config(framework=None, wdir=os.getcwd(), coverage=False, args=[]) def test_configdialog_uses_frameworks(qtbot): configdialog = ConfigDialog( {'ham': HamRunner}, default_config(), versions) assert configdialog.framework_combobox.count() == 1 assert configdialog.framework_combobox.itemText(0) == 'ham' def test_configdialog_indicates_unvailable_frameworks(qtbot): configdialog = ConfigDialog( {'spam': SpamRunner}, default_config(), versions) assert configdialog.framework_combobox.count() == 1 assert configdialog.framework_combobox.itemText( 0) == 'spam (not available)' def test_configdialog_disables_unavailable_frameworks(qtbot): configdialog = ConfigDialog(frameworks, default_config(), versions) model = configdialog.framework_combobox.model() assert model.item(0).isEnabled() # ham assert model.item(1).isEnabled() # pytest assert not model.item(2).isEnabled() # spam def test_configdialog_sets_initial_config(qtbot): config = Config(framework='pytest', wdir='/some/dir', coverage=True, args=['some', 'arg']) configdialog = ConfigDialog(frameworks, config, versions) assert configdialog.get_config() == config def test_configdialog_click_pytest(qtbot): configdialog = ConfigDialog(frameworks, default_config(), versions) qtbot.addWidget(configdialog) configdialog.framework_combobox.setCurrentIndex(1) assert configdialog.get_config().framework == 'pytest' def test_configdialog_ok_initially_disabled(qtbot): configdialog = ConfigDialog(frameworks, default_config(), versions) qtbot.addWidget(configdialog) assert not configdialog.buttons.button(QDialogButtonBox.Ok).isEnabled() def test_configdialog_ok_setting_framework_initially_enables_ok(qtbot): config = Config(framework='ham', wdir=os.getcwd()) configdialog = ConfigDialog(frameworks, config, versions) qtbot.addWidget(configdialog) assert configdialog.buttons.button(QDialogButtonBox.Ok).isEnabled() def test_configdialog_clicking_pytest_enables_ok(qtbot): configdialog = ConfigDialog(frameworks, default_config(), versions) qtbot.addWidget(configdialog) configdialog.framework_combobox.setCurrentIndex(1) assert configdialog.buttons.button(QDialogButtonBox.Ok).isEnabled() def test_configdialog_coverage_checkbox(qtbot, monkeypatch): configdialog = ConfigDialog(frameworks, default_config(), versions) qtbot.addWidget(configdialog) configdialog.framework_combobox.setCurrentIndex(1) configdialog.coverage_checkbox.click() assert configdialog.get_config().coverage is True def test_configdialog_coverage_checkbox_pytestcov_noinstall(qtbot, monkeypatch): local_versions = versions.copy() local_versions['pytest']['plugins'] = {} configdialog = ConfigDialog(frameworks, default_config(), local_versions) qtbot.addWidget(configdialog) configdialog.framework_combobox.setCurrentIndex(1) assert configdialog.coverage_checkbox.isEnabled() is False def test_configdialog_args_lineedit(qtbot): configdialog = ConfigDialog(frameworks, default_config(), versions) qtbot.addWidget(configdialog) configdialog.args_lineedit.setText('-x "ham and" spam') assert configdialog.get_config().args == ['-x', 'ham and', 'spam'] def test_configdialog_wdir_lineedit(qtbot): configdialog = ConfigDialog(frameworks, default_config(), versions) qtbot.addWidget(configdialog) wdir = os.path.normpath(os.path.join(os.getcwd(), os.path.pardir)) configdialog.wdir_lineedit.setText(wdir) assert configdialog.get_config().wdir == wdir def test_configdialog_wdir_button(qtbot, monkeypatch): configdialog = ConfigDialog(frameworks, default_config(), versions) qtbot.addWidget(configdialog) wdir = os.path.normpath(os.path.join(os.getcwd(), os.path.pardir)) monkeypatch.setattr( 'spyder_unittest.widgets.configdialog.getexistingdirectory', lambda parent, caption, basedir: wdir) configdialog.wdir_button.click() assert configdialog.get_config().wdir == wdir spyder-unittest-0.7.0/spyder_unittest/widgets/tests/test_confpage.py000066400000000000000000000142261466560470700261750ustar00rootroot00000000000000# -*- coding: utf-8 -*- # ----------------------------------------------------------------------------- # Copyright (c) 2023- Spyder Project Contributors # # Licensed under the terms of the MIT License # (see LICENSE.txt for details) # ----------------------------------------------------------------------------- # Standard library imports import sys import types from unittest.mock import Mock, MagicMock # Third party imports from qtpy.QtWidgets import QWidget, QMainWindow import pytest # Spyder imports from spyder.api.plugins import Plugins from spyder.api.plugin_registration.registry import PLUGIN_REGISTRY from spyder.app.cli_options import get_options from spyder.config.manager import CONF # Local imports from spyder_unittest.unittestplugin import UnitTestPlugin # ----------------------------------------------------------------------------- # # Classes and fixtures copied from spyder/plugins/preferences/tests/conftest.py class MainWindowMock(QMainWindow): register_shortcut = Mock() def __init__(self, parent): # This import assumes that an QApplication is already running, # so we can not put it at the top of the file from spyder.plugins.preferences.plugin import Preferences super().__init__(parent) self.default_style = None self.widgetlist = [] self.thirdparty_plugins = [] self.shortcut_data = [] self.prefs_dialog_instance = None self._APPLICATION_TOOLBARS = MagicMock() self.console = Mock() # To provide command line options for plugins that need them sys_argv = [sys.argv[0]] # Avoid options passed to pytest self._cli_options = get_options(sys_argv)[0] PLUGIN_REGISTRY.reset() PLUGIN_REGISTRY.sig_plugin_ready.connect(self.register_plugin) PLUGIN_REGISTRY.register_plugin(self, Preferences) # Load shortcuts for tests for context, name, __ in CONF.iter_shortcuts(): self.shortcut_data.append((None, context, name, None, None)) for attr in ['mem_status', 'cpu_status']: mock_attr = Mock() setattr(mock_attr, 'toolTip', lambda: '') setattr(mock_attr, 'setToolTip', lambda x: '') setattr(mock_attr, 'prefs_dialog_instance', lambda: '') setattr(self, attr, mock_attr) def register_plugin(self, plugin_name, external=False): plugin = PLUGIN_REGISTRY.get_plugin(plugin_name) plugin._register(omit_conf=True) def get_plugin(self, plugin_name, error=True): if plugin_name in PLUGIN_REGISTRY: return PLUGIN_REGISTRY.get_plugin(plugin_name) class ConfigDialogTester(QWidget): def __init__(self, parent, main_class, general_config_plugins, plugins): # This import assumes that an QApplication is already running, # so we can not put it at the top of the file from spyder.plugins.preferences.plugin import Preferences super().__init__(parent) self._main = main_class(self) if main_class else None if self._main is None: self._main = MainWindowMock(self) def register_plugin(self, plugin_name, external=False): plugin = PLUGIN_REGISTRY.get_plugin(plugin_name) plugin._register() def get_plugin(self, plugin_name, error=True): if plugin_name in PLUGIN_REGISTRY: return PLUGIN_REGISTRY.get_plugin(plugin_name) return None # Commented out because it gives the error: # A plugin with section "unittest" already exists! # setattr(self._main, 'register_plugin', # types.MethodType(register_plugin, self._main)) setattr(self._main, 'get_plugin', types.MethodType(get_plugin, self._main)) PLUGIN_REGISTRY.reset() PLUGIN_REGISTRY.sig_plugin_ready.connect(self._main.register_plugin) print(f'ConfigDialogTester registering {Preferences=}') PLUGIN_REGISTRY.register_plugin(self._main, Preferences) if plugins: for Plugin in plugins: if hasattr(Plugin, 'CONF_WIDGET_CLASS'): for required in (Plugin.REQUIRES or []): if required not in PLUGIN_REGISTRY: PLUGIN_REGISTRY.plugin_registry[required] = MagicMock() PLUGIN_REGISTRY.register_plugin(self._main, Plugin) else: plugin = Plugin(self._main) preferences = self._main.get_plugin(Plugins.Preferences) preferences.register_plugin_preferences(plugin) @pytest.fixture def config_dialog(qtbot, request): # mocker.patch.object(ima, 'icon', lambda x, *_: QIcon()) # Above line commented out from source because it gave an error main_class, general_config_plugins, plugins = request.param main_ref = ConfigDialogTester( None, main_class, general_config_plugins, plugins) qtbot.addWidget(main_ref) preferences = main_ref._main.get_plugin(Plugins.Preferences) preferences.open_dialog() container = preferences.get_container() dlg = container.dialog yield dlg dlg.close() # ----------------------------------------------------------------------------- # # Test for the spyder-unittest plugin @pytest.mark.parametrize( 'config_dialog', [[MainWindowMock, [], [UnitTestPlugin]]], indirect=True) def test_unittestconfigpage(config_dialog): """Test that changing "Abbreviate test names" works as expected.""" # Get reference to Preferences dialog and widget page to interact with dlg = config_dialog widget = config_dialog.get_page() # Assert default value of option in True assert widget.get_option('abbrev_test_names') is False # Toggle checkbox and assert that option value is now False widget.abbrev_box.click() dlg.apply_btn.click() assert widget.get_option('abbrev_test_names') is True # Reset options to default and check that option value is True again # Note: it is necessary to specify the section in reset_to_defaults() CONF.reset_to_defaults(section='unittest', notification=False) assert widget.get_option('abbrev_test_names') is False spyder-unittest-0.7.0/spyder_unittest/widgets/tests/test_datatree.py000066400000000000000000000256541466560470700262130ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2017 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Tests for unittestgui.py.""" # Third party imports from qtpy.QtCore import QModelIndex, QPoint, Qt from qtpy.QtGui import QBrush, QColor, QContextMenuEvent from unittest.mock import Mock import pytest # Local imports from spyder_unittest.backend.runnerbase import Category, TestResult from spyder_unittest.widgets.datatree import ( COLORS, TestDataModel, TestDataView) @pytest.fixture def view_and_model(qtbot): view = TestDataView() model = TestDataModel() # setModel() before populating testresults because setModel() does a sort view.setModel(model) res = [TestResult(Category.OK, 'status', 'foo.bar'), TestResult(Category.FAIL, 'error', 'foo.bar', 'kadoom', 0, 'crash!\nboom!', filename='ham.py', lineno=42)] model.testresults = res return view, model def test_contextMenuEvent_calls_exec(view_and_model, monkeypatch): # test that a menu is displayed when clicking on an item mock_exec = Mock() monkeypatch.setattr('spyder_unittest.widgets.datatree.QMenu.exec_', mock_exec) view, model = view_and_model pos = view.visualRect(model.index(0, 0)).center() event = QContextMenuEvent(QContextMenuEvent.Mouse, pos) view.contextMenuEvent(event) assert mock_exec.called # test that no menu is displayed when clicking below the bottom item mock_exec.reset_mock() pos = view.visualRect(model.index(1, 0)).bottomRight() pos += QPoint(0, 1) event = QContextMenuEvent(QContextMenuEvent.Mouse, pos) view.contextMenuEvent(event) assert not mock_exec.called def test_go_to_test_definition_with_invalid_target(view_and_model, qtbot): view, model = view_and_model with qtbot.assertNotEmitted(view.sig_edit_goto): view.go_to_test_definition(model.index(0, 0)) def test_go_to_test_definition_with_valid_target(view_and_model, qtbot): view, model = view_and_model with qtbot.waitSignal(view.sig_edit_goto) as blocker: view.go_to_test_definition(model.index(1, 0)) assert blocker.args == ['ham.py', 42] def test_go_to_test_definition_with_lineno_none(view_and_model, qtbot): view, model = view_and_model res = model.testresults res[1].lineno = None model.testresults = res with qtbot.waitSignal(view.sig_edit_goto) as blocker: view.go_to_test_definition(model.index(1, 0)) assert blocker.args == ['ham.py', 0] def test_run_single_test(view_and_model, qtbot): view, model = view_and_model with qtbot.waitSignal(view.sig_single_test_run_requested) as blocker: view.run_single_test(model.index(1, 0)) assert blocker.args == ['foo.bar'] def test_make_index_canonical_with_index_in_column2(view_and_model): view, model = view_and_model index = model.index(1, 2) res = view.make_index_canonical(index) assert res == model.index(1, 0) def test_make_index_canonical_with_level2_index(view_and_model): view, model = view_and_model index = model.index(1, 0, model.index(1, 0)) res = view.make_index_canonical(index) assert res == model.index(1, 0) def test_make_index_canonical_with_invalid_index(view_and_model): view, model = view_and_model index = QModelIndex() res = view.make_index_canonical(index) assert res is None def test_build_context_menu(view_and_model): view, model = view_and_model menu = view.build_context_menu(model.index(0, 0)) assert len(menu.actions()) == 3 assert menu.actions()[0].text() == 'Expand' assert menu.actions()[1].text() == 'Go to definition' assert menu.actions()[2].text() == 'Run only this test' def test_build_context_menu_with_disabled_entries(view_and_model): view, model = view_and_model menu = view.build_context_menu(model.index(0, 0)) assert menu.actions()[0].isEnabled() == False assert menu.actions()[1].isEnabled() == False assert menu.actions()[2].isEnabled() == True def test_build_context_menu_with_enabled_entries(view_and_model): view, model = view_and_model menu = view.build_context_menu(model.index(1, 0)) assert menu.actions()[0].isEnabled() == True assert menu.actions()[1].isEnabled() == True assert menu.actions()[2].isEnabled() == True def test_build_context_menu_with_coverage_entry(view_and_model): view, model = view_and_model testresult = TestResult(Category.COVERAGE, 'coverage', 'foo') model.testresults.append(testresult) menu = view.build_context_menu(model.index(2, 0)) assert menu.actions()[0].isEnabled() == False assert menu.actions()[1].isEnabled() == False assert menu.actions()[2].isEnabled() == False def test_build_context_menu_with_expanded_entry(view_and_model): view, model = view_and_model view.expand(model.index(1, 0)) menu = view.build_context_menu(model.index(1, 0)) assert menu.actions()[0].text() == 'Collapse' assert menu.actions()[0].isEnabled() == True def test_testdatamodel_using_qtmodeltester(qtmodeltester): model = TestDataModel() res = [TestResult(Category.OK, 'status', 'foo.bar'), TestResult(Category.FAIL, 'error', 'foo.bar', 'kadoom', 0, 'crash!\nboom!')] model.testresults = res qtmodeltester.check(model) @pytest.mark.parametrize('config, result', [(False, 'foo.bar'), (True, 'f.bar')]) def test_testdatamodel_shows_abbreviated_name_in_table(qtbot, config, result): model = TestDataModel() old_config = model.get_conf('abbrev_test_names') model.set_conf('abbrev_test_names', config) res = TestResult(Category.OK, 'status', 'foo.bar', '', 0, '') model.testresults = [res] index = model.index(0, 1) assert model.data(index, Qt.DisplayRole) == result model.set_conf('abbrev_test_names', old_config) def test_testdatamodel_shows_full_name_in_tooltip(qtbot): model = TestDataModel() res = TestResult(Category.OK, 'status', 'foo.bar', '', 0, '') model.testresults = [res] index = model.index(0, 1) assert model.data(index, Qt.ToolTipRole) == 'foo.bar' def test_testdatamodel_shows_time(qtmodeltester): model = TestDataModel() res = TestResult(Category.OK, 'status', 'foo.bar', time=0.0012345) model.testresults = [res] index = model.index(0, 3) assert model.data(index, Qt.DisplayRole) == '1.23' assert model.data(index, Qt.TextAlignmentRole) == Qt.AlignRight def test_testdatamodel_shows_time_when_zero(qtmodeltester): model = TestDataModel() res = TestResult(Category.OK, 'status', 'foo.bar', time=0) model.testresults = [res] assert model.data(model.index(0, 3), Qt.DisplayRole) == '0.00' def test_testdatamodel_shows_time_when_blank(qtmodeltester): model = TestDataModel() res = TestResult(Category.OK, 'status', 'foo.bar') model.testresults = [res] assert model.data(model.index(0, 3), Qt.DisplayRole) == '' def test_testdatamodel_data_background(): model = TestDataModel() res = [TestResult(Category.OK, 'status', 'foo.bar'), TestResult(Category.FAIL, 'error', 'foo.bar', 'kadoom')] model.testresults = res index = model.index(0, 0) expected = QBrush(QColor(COLORS[Category.OK])) assert model.data(index, Qt.BackgroundRole) == expected index = model.index(1, 2) expected = QBrush(QColor(COLORS[Category.FAIL])) assert model.data(index, Qt.BackgroundRole) == expected def test_testdatamodel_data_userrole(): model = TestDataModel() res = [TestResult(Category.OK, 'status', 'foo.bar', filename='somefile', lineno=42)] model.testresults = res index = model.index(0, 0) assert model.data(index, Qt.UserRole) == ('somefile', 42) def test_testdatamodel_add_tests(qtbot): def check_args1(parent, begin, end): return not parent.isValid() and begin == 0 and end == 0 def check_args2(parent, begin, end): return not parent.isValid() and begin == 1 and end == 1 model = TestDataModel() assert model.testresults == [] result1 = TestResult(Category.OK, 'status', 'foo.bar') with qtbot.waitSignals([model.rowsInserted, model.sig_summary], check_params_cbs=[check_args1, None], raising=True): model.add_testresults([result1]) assert model.testresults == [result1] result2 = TestResult(Category.FAIL, 'error', 'foo.bar', 'kadoom') with qtbot.waitSignals([model.rowsInserted, model.sig_summary], check_params_cbs=[check_args2, None], raising=True): model.add_testresults([result2]) assert model.testresults == [result1, result2] def test_testdatamodel_replace_tests(qtbot): def check_args(topLeft, bottomRight, *args): return (topLeft.row() == 0 and topLeft.column() == 0 and not topLeft.parent().isValid() and bottomRight.row() == 0 and bottomRight.column() == 3 and not bottomRight.parent().isValid()) model = TestDataModel() result1 = TestResult(Category.OK, 'status', 'foo.bar') model.testresults = [result1] result2 = TestResult(Category.FAIL, 'error', 'foo.bar', 'kadoom') with qtbot.waitSignals([model.dataChanged, model.sig_summary], check_params_cbs=[check_args, None], raising=True): model.update_testresults([result2]) assert model.testresults == [result2] STANDARD_TESTRESULTS = [ TestResult(Category.OK, 'status', 'foo.bar', time=2), TestResult(Category.FAIL, 'failure', 'fu.baz', 'kaboom',time=1), TestResult(Category.FAIL, 'error', 'fu.bar', 'boom')] def test_testdatamodel_sort_by_status_ascending(qtbot): model = TestDataModel() model.testresults = STANDARD_TESTRESULTS[:] with qtbot.waitSignal(model.modelReset): model.sort(0, Qt.AscendingOrder) expected = [STANDARD_TESTRESULTS[k] for k in [2, 1, 0]] assert model.testresults == expected def test_testdatamodel_sort_by_status_descending(): model = TestDataModel() model.testresults = STANDARD_TESTRESULTS[:] model.sort(0, Qt.DescendingOrder) expected = [STANDARD_TESTRESULTS[k] for k in [0, 1, 2]] assert model.testresults == expected def test_testdatamodel_sort_by_name(): model = TestDataModel() model.testresults = STANDARD_TESTRESULTS[:] model.sort(1, Qt.AscendingOrder) expected = [STANDARD_TESTRESULTS[k] for k in [0, 2, 1]] assert model.testresults == expected def test_testdatamodel_sort_by_message(): model = TestDataModel() model.testresults = STANDARD_TESTRESULTS[:] model.sort(2, Qt.AscendingOrder) expected = [STANDARD_TESTRESULTS[k] for k in [0, 2, 1]] assert model.testresults == expected def test_testdatamodel_sort_by_time(): model = TestDataModel() model.testresults = STANDARD_TESTRESULTS[:] model.sort(3, Qt.AscendingOrder) expected = [STANDARD_TESTRESULTS[k] for k in [2, 1, 0]] assert model.testresults == expected spyder-unittest-0.7.0/spyder_unittest/widgets/tests/test_unittestgui.py000066400000000000000000000346271466560470700270060ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2013 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Tests for unittestgui.py.""" # Standard library imports import os import sys from unittest.mock import Mock, patch # Third party imports from qtpy.QtCore import Qt, QProcess import pytest # Local imports from spyder_unittest.backend.runnerbase import (Category, TestResult, COV_TEST_NAME) from spyder_unittest.widgets.configdialog import Config from spyder_unittest.widgets.unittestgui import UnitTestWidget @pytest.fixture def widget(qtbot): unittest_widget = UnitTestWidget('testwidget', None, None) unittest_widget.get_conf( 'executable', section='main_interpreter', default=sys.executable) unittest_widget.setup() qtbot.addWidget(unittest_widget) return unittest_widget def use_mock_model(widget): """Replace data model in unit test widget with mock model.""" widget.testdatamodel = Mock() widget.testdatamodel.summary = lambda: 'message' widget.testdatamodel.testresults = [] def test_unittestwidget_forwards_sig_edit_goto(qtbot, widget): with qtbot.waitSignal(widget.sig_edit_goto) as blocker: widget.testdataview.sig_edit_goto.emit('ham', 42) assert blocker.args == ['ham', 42] def test_unittestwidget_set_config_emits_newconfig(qtbot, widget): config = Config(wdir=os.getcwd(), framework='unittest', coverage=False) with qtbot.waitSignal(widget.sig_newconfig) as blocker: widget.config = config assert blocker.args == [config] assert widget.config == config def test_unittestwidget_set_config_does_not_emit_when_invalid(qtbot, widget): config = Config(wdir=os.getcwd(), framework=None, coverage=False) with qtbot.assertNotEmitted(widget.sig_newconfig): widget.config = config assert widget.config == config def test_unittestwidget_config_with_unknown_framework_invalid(widget): """Check that if the framework in the config is not known, config_is_valid() returns False""" config = Config( wdir=os.getcwd(), framework='unknown framework', coverage=False) assert widget.config_is_valid(config) == False def test_unittestwidget_process_finished_updates_results(widget): results = [TestResult(Category.OK, 'ok', 'hammodule.spam')] widget.process_finished(results, 'output', True) assert widget.testdatamodel.testresults == results def test_unittestwidget_replace_pending_with_not_run(widget): use_mock_model(widget) results = [TestResult(Category.PENDING, 'pending', 'hammodule.eggs'), TestResult(Category.OK, 'ok', 'hammodule.spam')] widget.testdatamodel.testresults = results widget.replace_pending_with_not_run() expected = [TestResult(Category.SKIP, 'not run', 'hammodule.eggs')] widget.testdatamodel.update_testresults.assert_called_once_with(expected) def test_unittestwidget_tests_collected(widget): use_mock_model(widget) details = ['hammodule.spam', 'hammodule.eggs'] widget.tests_collected(details) results = [TestResult(Category.PENDING, 'pending', 'hammodule.spam'), TestResult(Category.PENDING, 'pending', 'hammodule.eggs')] widget.testdatamodel.add_testresults.assert_called_once_with(results) def test_unittestwidget_tests_started(widget): use_mock_model(widget) details = ['hammodule.spam'] results = [TestResult(Category.PENDING, 'pending', 'hammodule.spam', 'running')] widget.tests_started(details) widget.testdatamodel.update_testresults.assert_called_once_with(results) def test_unittestwidget_tests_collect_error(widget): use_mock_model(widget) names_plus_msg = [('hammodule.spam', 'msg')] results = [TestResult(Category.FAIL, 'failure', 'hammodule.spam', 'collection error', extra_text='msg')] widget.tests_collect_error(names_plus_msg) widget.testdatamodel.add_testresults.assert_called_once_with(results) def test_unittestwidget_tests_yield_results(widget): use_mock_model(widget) results = [TestResult(Category.OK, 'ok', 'hammodule.spam')] widget.tests_yield_result(results) widget.testdatamodel.update_testresults.assert_called_once_with(results) def test_unittestwidget_set_message(widget): widget.status_label = Mock() widget.set_status_label('xxx') widget.status_label.setText.assert_called_once_with('xxx') def test_run_tests_starts_testrunner(widget): mockRunner = Mock() widget.framework_registry.create_runner = Mock(return_value=mockRunner) config = Config(wdir=None, framework='ham', coverage=False) widget.run_tests(config) assert widget.framework_registry.create_runner.call_count == 1 assert widget.framework_registry.create_runner.call_args[0][0] == 'ham' assert mockRunner.start.call_count == 1 def test_run_tests_with_pre_test_hook_returning_true(widget): mockRunner = Mock() widget.framework_registry.create_runner = Mock(return_value=mockRunner) widget.pre_test_hook = Mock(return_value=True) widget.run_tests(Config()) assert widget.pre_test_hook.call_count == 1 assert mockRunner.start.call_count == 1 def test_run_tests_with_pre_test_hook_returning_false(widget): mockRunner = Mock() widget.framework_registry.create_runner = Mock(return_value=mockRunner) widget.pre_test_hook = Mock(return_value=False) widget.run_tests(Config()) assert widget.pre_test_hook.call_count == 1 assert mockRunner.start.call_count == 0 @pytest.mark.parametrize('results,label', [([TestResult(Category.OK, 'ok', '')], '0 tests failed, 1 passed'), ([], 'No results to show.'), ([TestResult(Category.OK, 'ok', ''), TestResult(Category.COVERAGE, '90%', COV_TEST_NAME)], '0 tests failed, 1 passed, 90% coverage')]) def test_unittestwidget_process_finished_updates_status_label(widget, results, label): widget.process_finished(results, 'output', True) assert widget.status_label.text() == '{}'.format(label) def test_unittestwidget_process_finished_abnormally_status_label(widget): widget.process_finished([], 'output', False) expected_text = '{}'.format('Test process exited abnormally') assert widget.status_label.text() == expected_text def test_unittestwidget_handles_sig_single_test_run_requested(widget): with patch.object(widget, 'run_tests') as mock_run_tests: widget.testdataview.sig_single_test_run_requested.emit('testname') mock_run_tests.assert_called_once_with(single_test='testname') @pytest.mark.parametrize('framework', ['pytest', 'nose2']) @pytest.mark.parametrize('alltests', [True, False]) def test_run_tests_and_display_results(qtbot, widget, tmpdir, monkeypatch, framework, alltests): """Basic integration test.""" os.chdir(tmpdir.strpath) testfilename = tmpdir.join('test_foo.py').strpath with open(testfilename, 'w') as f: f.write("def test_fail(): assert 1+1 == 3\n" "def test_ok(): assert 1+1 == 2\n") MockQMessageBox = Mock() monkeypatch.setattr('spyder_unittest.widgets.unittestgui.QMessageBox', MockQMessageBox) config = Config(wdir=tmpdir.strpath, framework=framework, coverage=False) with qtbot.waitSignal(widget.sig_finished, timeout=10000, raising=True): if alltests: widget.run_tests(config) else: widget.run_tests(config, single_test='test_foo.test_fail') MockQMessageBox.assert_not_called() model = widget.testdatamodel assert model.rowCount() == (2 if alltests else 1) assert model.index(0, 0).data( Qt.DisplayRole) == 'failure' if framework == 'nose2' else 'failed' assert model.index(0, 1).data(Qt.DisplayRole) == 'test_foo.test_fail' assert model.index(0, 1).data(Qt.ToolTipRole) == 'test_foo.test_fail' if alltests: assert model.index(1, 0).data( Qt.DisplayRole) == 'ok' if framework == 'nose2' else 'passed' assert model.index(1, 1).data(Qt.DisplayRole) == 'test_foo.test_ok' assert model.index(1, 1).data(Qt.ToolTipRole) == 'test_foo.test_ok' assert model.index(1, 2).data(Qt.DisplayRole) == '' @pytest.mark.parametrize('alltests', [True, False]) def test_run_tests_using_unittest_and_display_results( qtbot, widget, tmpdir, monkeypatch, alltests): """Basic check.""" os.chdir(tmpdir.strpath) testfilename = tmpdir.join('test_foo.py').strpath with open(testfilename, 'w') as f: f.write("import unittest\n" "class MyTest(unittest.TestCase):\n" " def test_ok(self): self.assertEqual(1+1, 2)\n" " def test_fail(self): self.assertEqual(1+1, 3)\n") MockQMessageBox = Mock() monkeypatch.setattr('spyder_unittest.widgets.unittestgui.QMessageBox', MockQMessageBox) config = Config(wdir=tmpdir.strpath, framework='unittest', coverage=False) with qtbot.waitSignal(widget.sig_finished, timeout=10000, raising=True): if alltests: widget.run_tests(config) else: widget.run_tests(config, single_test='test_foo.MyTest.test_fail') MockQMessageBox.assert_not_called() model = widget.testdatamodel assert model.rowCount() == (2 if alltests else 1) assert model.index(0, 0).data(Qt.DisplayRole) == 'failure' assert model.index(0, 1).data(Qt.DisplayRole) == 'test_foo.MyTest.test_fail' assert model.index(0, 1).data(Qt.ToolTipRole) == 'test_foo.MyTest.test_fail' if alltests: assert model.index(1, 0).data(Qt.DisplayRole) == 'success' assert model.index(1, 1).data(Qt.DisplayRole) == 'test_foo.MyTest.test_ok' assert model.index(1, 1).data(Qt.ToolTipRole) == 'test_foo.MyTest.test_ok' assert model.index(1, 2).data(Qt.DisplayRole) == '' def test_run_tests_with_print_using_unittest_and_display_results( qtbot, widget, tmpdir, monkeypatch): """ Run a failing test which print to stderr using unittest and check that it is displayed as a failing test. Regression test for spyder-ide/spyder-unittest#160. """ os.chdir(tmpdir.strpath) testfilename = tmpdir.join('test_foo.py').strpath with open(testfilename, 'w') as f: f.write("import sys\n" "import unittest\n" "class MyTest(unittest.TestCase):\n" " def test_fail(self):\n" " print('text', file=sys.stderr)\n" " self.assertEqual(1+1, 3)\n" " def test_ok(self): self.assertEqual(1+1, 2)\n") MockQMessageBox = Mock() monkeypatch.setattr('spyder_unittest.widgets.unittestgui.QMessageBox', MockQMessageBox) config = Config(wdir=tmpdir.strpath, framework='unittest', coverage=False) with qtbot.waitSignal(widget.sig_finished, timeout=10000, raising=True): widget.run_tests(config) MockQMessageBox.assert_not_called() model = widget.testdatamodel assert model.rowCount() == 2 assert model.index(0, 0).data(Qt.DisplayRole) == 'failure' assert model.index(1, 0).data(Qt.DisplayRole) == 'success' @pytest.mark.parametrize('framework', ['unittest', 'pytest', 'nose2']) def test_run_with_no_tests_discovered_and_display_results( qtbot, widget, tmpdir, monkeypatch, framework): """Basic integration test.""" os.chdir(tmpdir.strpath) MockQMessageBox = Mock() monkeypatch.setattr('spyder_unittest.widgets.unittestgui.QMessageBox', MockQMessageBox) config = Config(wdir=tmpdir.strpath, framework=framework, coverage=False) with qtbot.waitSignal(widget.sig_finished, timeout=10000, raising=True): widget.run_tests(config) MockQMessageBox.assert_not_called() model = widget.testdatamodel assert model.rowCount() == 0 assert widget.status_label.text() == 'No results to show.' def test_stop_running_tests_before_testresult_is_received(qtbot, widget, tmpdir): os.chdir(tmpdir.strpath) testfilename = tmpdir.join('test_foo.py').strpath with open(testfilename, 'w') as f: f.write("import unittest\n" "import time\n" "class MyTest(unittest.TestCase):\n" " def test_ok(self): \n" " time.sleep(3)\n" " self.assertTrue(True)\n") config = Config(wdir=tmpdir.strpath, framework='unittest', coverage=False) widget.run_tests(config) qtbot.waitUntil(lambda: widget.testrunner.process.state() == QProcess.Running) widget.testrunner.stop_if_running() assert widget.testdatamodel.rowCount() == 0 assert widget.status_label.text() == '' def test_show_versions(monkeypatch, widget): mockQMessageBox = Mock() monkeypatch.setattr('spyder_unittest.widgets.unittestgui.QMessageBox', mockQMessageBox) versions = """{ 'nose': {'available': False}, 'pytest': {'available': True, 'version': '1.2.3', 'plugins': {'plugin1': '4.5.6', 'plugin2': '7.8.9'}}, 'unittest': {'available': True, 'version': '1.2.3', 'plugins': {}} }""" mock_process = Mock(stdout=versions) monkeypatch.setattr('spyder_unittest.widgets.unittestgui.subprocess.run', lambda *args, **kwargs: mock_process) widget.show_versions() expected = ('Versions of frameworks and their installed plugins:\n\n' 'nose: not available\n\npytest 1.2.3\n plugin1 4.5.6\n ' 'plugin2 7.8.9\n\nunittest 1.2.3') mockQMessageBox.information.assert_called_with(widget, 'Dependencies', expected) @pytest.mark.parametrize('use_cached, equal, expected', [(True, True, 'cached'), (True, False, 'new'), (False, True, 'new'), (False, False, 'new')]) def test_get_versions(monkeypatch, widget, use_cached, equal, expected): widget.dependencies = 'cached' widget.environment_for_dependencies = 'old_env' interpreter = 'old_env' if equal else 'new_env' widget.get_conf = Mock(return_value=interpreter) mock_process = Mock(stdout='"new"') monkeypatch.setattr('spyder_unittest.widgets.unittestgui.subprocess.run', lambda *args, **kwargs: mock_process) result = widget.get_versions(use_cached) assert result == expected spyder-unittest-0.7.0/spyder_unittest/widgets/unittestgui.py000066400000000000000000000446631466560470700246060ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright © 2013 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """Unit Testing widget.""" # Standard library imports import ast import copy import os.path as osp import subprocess import sys # Third party imports from qtpy.QtCore import Signal from qtpy.QtWidgets import QLabel, QMessageBox, QVBoxLayout from spyder.api.widgets.main_widget import PluginMainWidget from spyder.config.base import get_conf_path, get_translation from spyder.utils import icon_manager as ima from spyder.plugins.variableexplorer.widgets.texteditor import TextEditor # Local imports from spyder_unittest.backend.frameworkregistry import FrameworkRegistry from spyder_unittest.backend.nose2runner import Nose2Runner from spyder_unittest.backend.pytestrunner import PyTestRunner from spyder_unittest.backend.runnerbase import Category, TestResult from spyder_unittest.backend.unittestrunner import UnittestRunner from spyder_unittest.widgets.configdialog import Config, ask_for_config from spyder_unittest.widgets.datatree import TestDataModel, TestDataView # This is needed for testing this module as a stand alone script try: _ = get_translation('spyder_unittest') except KeyError: import gettext _ = gettext.gettext # Supported testing frameworks FRAMEWORKS = {Nose2Runner, PyTestRunner, UnittestRunner} class UnitTestWidgetActions: RunTests = 'run_tests' Config = 'config' ShowLog = 'show_log' CollapseAll = 'collapse_all' ExpandAll = 'expand_all' ShowDependencies = 'show_dependencies' class UnitTestWidgetButtons: Start = 'start' class UnitTestWidgetToolbar: LeftStretcher = 'left_stretcher' StatusLabel = 'status_label' RightStretcher = 'right_stretcher' class UnitTestWidget(PluginMainWidget): """ Unit testing widget. Attributes ---------- config : Config or None Configuration for running tests, or `None` if not set. default_wdir : str Default choice of working directory. dependencies : dict or None Cached dependencies, as returned by `self.get_versions()`. environment_for_dependencies : str or None Python interpreter for which `self.dependencies` is valid. framework_registry : FrameworkRegistry Registry of supported testing frameworks. pre_test_hook : function returning bool or None If set, contains function to run before running tests; abort the test run if hook returns False. pythonpath : list of str Directories to be added to the Python path when running tests. testrunner : TestRunner or None Object associated with the current test process, or `None` if no test process is running at the moment. Signals ------- sig_finished: Emitted when plugin finishes processing tests. sig_newconfig(Config): Emitted when test config is changed. Argument is new config, which is always valid. sig_edit_goto(str, int): Emitted if editor should go to some position. Arguments are file name and line number (zero-based). """ CONF_SECTION = 'unittest' VERSION = '0.0.1' sig_finished = Signal() sig_newconfig = Signal(Config) sig_edit_goto = Signal(str, int) def __init__(self, name, plugin, parent): """Unit testing widget.""" super().__init__(name, plugin, parent) self.config = None self.default_wdir = None self.dependencies = None self.environment_for_dependencies = None self.output = None self.pre_test_hook = None self.pythonpath = None self.testrunner = None self.testdataview = TestDataView(self) self.testdatamodel = TestDataModel(self) self.testdataview.setModel(self.testdatamodel) self.testdataview.sig_edit_goto.connect(self.sig_edit_goto) self.testdataview.sig_single_test_run_requested.connect( self.run_single_test) self.testdatamodel.sig_summary.connect(self.set_status_label) self.framework_registry = FrameworkRegistry() for runner in FRAMEWORKS: self.framework_registry.register(runner) layout = QVBoxLayout() layout.addWidget(self.testdataview) self.setLayout(layout) # --- Mandatory PluginMainWidget methods ---------------------------------- def get_title(self): """ Return the title that will be displayed on dockwidget or window title. """ return _('Unit testing') def setup(self): """ Create widget actions, add to menu and other setup requirements. """ # Options menu menu = self.get_options_menu() config_action = self.create_action( UnitTestWidgetActions.Config, text=_('Configure ...'), icon=self.create_icon('configure'), triggered=self.configure) self.add_item_to_menu(config_action, menu) self.show_log_action = self.create_action( UnitTestWidgetActions.ShowLog, text=_('Show output'), icon=self.create_icon('log'), triggered=self.show_log) self.add_item_to_menu(self.show_log_action, menu) collapse_all_action = self.create_action( UnitTestWidgetActions.CollapseAll, text=_('Collapse all'), icon=self.create_icon('collapse'), triggered=self.testdataview.collapseAll) self.add_item_to_menu(collapse_all_action, menu) expand_all_action = self.create_action( UnitTestWidgetActions.ExpandAll, text=_('Expand all'), icon=self.create_icon('expand'), triggered=self.testdataview.expandAll) self.add_item_to_menu(expand_all_action, menu) show_dependencies_action = self.create_action( UnitTestWidgetActions.ShowDependencies, text=_('Dependencies'), icon=self.create_icon('advanced'), triggered=self.show_versions) self.add_item_to_menu(show_dependencies_action, menu) # Other widgets in the main toolbar toolbar = self.get_main_toolbar() self.start_button = self.create_toolbutton(UnitTestWidgetButtons.Start) self.set_running_state(False) self.add_item_to_toolbar(self.start_button, toolbar=toolbar) self.add_item_to_toolbar( self.create_stretcher(id_=UnitTestWidgetToolbar.LeftStretcher), toolbar=toolbar) self.status_label = QLabel('') self.status_label.ID = UnitTestWidgetToolbar.StatusLabel self.add_item_to_toolbar(self.status_label, toolbar=toolbar) self.add_item_to_toolbar( self.create_stretcher(id_=UnitTestWidgetToolbar.RightStretcher), toolbar=toolbar) def update_actions(self): """ Update the state of exposed actions. Exposed actions are actions created by the self.create_action method. """ pass # --- Optional PluginMainWidget methods ----------------------------------- def get_focus_widget(self): """ Return the test data view as the widget to give focus to. Returns ------- QWidget QWidget to give focus to. """ return self.testdataview # --- UnitTestWidget methods ---------------------------------------------- @property def config(self): """Return current test configuration.""" return self._config @config.setter def config(self, new_config): """Set test configuration and emit sig_newconfig if valid.""" self._config = new_config if self.config_is_valid(): self.sig_newconfig.emit(new_config) def set_config_without_emit(self, new_config): """Set test configuration but do not emit any signal.""" self._config = new_config def show_log(self): """Show output of testing process.""" if self.output: te = TextEditor( self.output, title=_("Unit testing output"), readonly=True, parent=self) te.show() te.exec_() def get_versions(self, use_cached): """ Return versions of frameworks and their plugins. If `use_cached` is `True` and `self.environment_for_dependencies` equals the Python interpreter set by the user in the Preferences, then return the cached information in `self.dependencies`. Otherwise, run the `print_versions.py` script in the target environment to retrieve the dependencyy information. Store that information in `self.dependencies` and return it. Parameters ---------- use_cached : bool Whether to use the cached information, if possible. Returns ------- dict Dependency information as returned by `print_versions.py` """ executable = self.get_conf('executable', section='main_interpreter') if use_cached and self.environment_for_dependencies == executable: return self.dependencies script = osp.join(osp.dirname(__file__), osp.pardir, 'backend', 'workers', 'print_versions.py') process = subprocess.run([executable, script], capture_output=True, text=True) self.dependencies = ast.literal_eval(process.stdout) self.environment_for_dependencies = executable return self.dependencies def show_versions(self): """Show versions of frameworks and their plugins""" all_info = self.get_versions(use_cached=False) versions = [_('Versions of frameworks and their installed plugins:')] for name, info in all_info.items(): if not info['available']: versions.append('{}: {}'.format(name, _('not available'))) else: version = f'{name} {info["version"]}' plugins = [f' {name} {version}' for name, version in info['plugins'].items()] versions.append('\n'.join([version] + plugins)) QMessageBox.information(self, _('Dependencies'), '\n\n'.join(versions)) def configure(self): """Configure tests.""" if self.config: oldconfig = self.config else: oldconfig = Config(wdir=self.default_wdir) frameworks = self.framework_registry.frameworks versions = self.get_versions(use_cached=True) config = ask_for_config(frameworks, oldconfig, versions, parent=self) if config: self.config = config def config_is_valid(self, config=None): """ Return whether configuration for running tests is valid. Parameters ---------- config : Config or None configuration for unit tests. If None, use `self.config`. """ if config is None: config = self.config return (config and config.framework and config.framework in self.framework_registry.frameworks and osp.isdir(config.wdir)) def maybe_configure_and_start(self): """ Ask for configuration if necessary and then run tests. If the current test configuration is not valid (or not set(, then ask the user to configure. Then run the tests. """ if not self.config_is_valid(): self.configure() if self.config_is_valid(): self.run_tests() def run_tests(self, config=None, single_test=None): """ Run unit tests. First, run `self.pre_test_hook` if it is set, and abort if its return value is `False`. Then, run the unit tests. If `single_test` is not None, then only run that test. The process's output is consumed by `read_output()`. When the process finishes, the `finish` signal is emitted. Parameters ---------- config : Config or None configuration for unit tests. If None, use `self.config`. In either case, configuration should be valid. single_test : str or None If None, run all tests; otherwise, it is the name of the only test to be run. """ if self.pre_test_hook: if self.pre_test_hook() is False: return if config is None: config = self.config pythonpath = self.pythonpath self.testdatamodel.testresults = [] self.testdetails = [] tempfilename = get_conf_path('unittest.results') self.testrunner = self.framework_registry.create_runner( config.framework, self, tempfilename) self.testrunner.sig_finished.connect(self.process_finished) self.testrunner.sig_collected.connect(self.tests_collected) self.testrunner.sig_collecterror.connect(self.tests_collect_error) self.testrunner.sig_starttest.connect(self.tests_started) self.testrunner.sig_testresult.connect(self.tests_yield_result) self.testrunner.sig_stop.connect(self.tests_stopped) cov_path = self.get_conf('current_project_path', default='None', section='project_explorer') # config returns 'None' as a string rather than None cov_path = config.wdir if cov_path == 'None' else cov_path executable = self.get_conf('executable', section='main_interpreter') try: self.testrunner.start( config, cov_path, executable, pythonpath, single_test) except RuntimeError: QMessageBox.critical(self, _("Error"), _("Process failed to start")) else: self.set_running_state(True) self.set_status_label(_('Running tests ...')) def set_running_state(self, state): """ Change start/stop button according to whether tests are running. If tests are running, then display a stop button, otherwise display a start button. Parameters ---------- state : bool Set to True if tests are running. """ button = self.start_button try: button.clicked.disconnect() except TypeError: # raised if not connected to any handler pass if state: button.setIcon(ima.icon('stop')) button.setText(_('Stop')) button.setToolTip(_('Stop current test process')) if self.testrunner: button.clicked.connect(self.testrunner.stop_if_running) else: button.setIcon(ima.icon('run')) button.setText(_("Run tests")) button.setToolTip(_('Run unit tests')) button.clicked.connect( lambda checked: self.maybe_configure_and_start()) def process_finished(self, testresults, output, normal_exit): """ Called when unit test process finished. This function collects and shows the test results and output. Parameters ---------- testresults : list of TestResult Test results reported when the test process finished. output : str Output from the test process. normal_exit : bool Whether test process exited normally. """ self.output = output self.set_running_state(False) self.testrunner = None self.show_log_action.setEnabled(bool(output)) self.testdatamodel.add_testresults(testresults) self.replace_pending_with_not_run() self.sig_finished.emit() if not normal_exit: self.set_status_label(_('Test process exited abnormally')) def replace_pending_with_not_run(self): """Change status of pending tests to 'not run''.""" new_results = [] for res in self.testdatamodel.testresults: if res.category == Category.PENDING: new_res = copy.copy(res) new_res.category = Category.SKIP new_res.status = _('not run') new_results.append(new_res) if new_results: self.testdatamodel.update_testresults(new_results) def tests_collected(self, testnames): """Called when tests are collected.""" testresults = [TestResult(Category.PENDING, _('pending'), name) for name in testnames] self.testdatamodel.add_testresults(testresults) def tests_started(self, testnames): """Called when tests are about to be run.""" testresults = [TestResult(Category.PENDING, _('pending'), name, message=_('running')) for name in testnames] self.testdatamodel.update_testresults(testresults) def tests_collect_error(self, testnames_plus_msg): """Called when errors are encountered during collection.""" testresults = [TestResult(Category.FAIL, _('failure'), name, message=_('collection error'), extra_text=msg) for name, msg in testnames_plus_msg] self.testdatamodel.add_testresults(testresults) def tests_yield_result(self, testresults): """Called when test results are received.""" self.testdatamodel.update_testresults(testresults) def tests_stopped(self): """Called when tests are stopped""" self.status_label.setText('') def set_status_label(self, msg): """ Set status label to the specified message. Arguments --------- msg: str """ self.status_label.setText('{}'.format(msg)) def run_single_test(self, test_name: str) -> None: """ Run a single test with the given name. """ self.run_tests(single_test=test_name) def test(): """ Run widget test. Show the unittest widgets, configured so that our own tests are run when the user clicks "Run tests". """ from spyder.utils.qthelpers import qapplication app = qapplication() widget = UnitTestWidget(None) # set wdir to .../spyder_unittest wdir = osp.abspath(osp.join(osp.dirname(__file__), osp.pardir)) widget.config = Config('pytest', wdir) # add wdir's parent to python path, so that `import spyder_unittest` works rootdir = osp.abspath(osp.join(wdir, osp.pardir)) widget.pythonpath = [rootdir] widget.resize(800, 600) widget.show() sys.exit(app.exec_()) if __name__ == '__main__': test()