pax_global_header 0000666 0000000 0000000 00000000064 14172331455 0014517 g ustar 00root root 0000000 0000000 52 comment=3a2be20177ec6e396d23eecb68d38d410c2ef7a5
unittest-xml-reporting-3.2.0/ 0000775 0000000 0000000 00000000000 14172331455 0016205 5 ustar 00root root 0000000 0000000 unittest-xml-reporting-3.2.0/.coveragerc 0000664 0000000 0000000 00000000066 14172331455 0020330 0 ustar 00root root 0000000 0000000 [report]
include =
setup.py
tests/*
xmlrunner/*
unittest-xml-reporting-3.2.0/.github/ 0000775 0000000 0000000 00000000000 14172331455 0017545 5 ustar 00root root 0000000 0000000 unittest-xml-reporting-3.2.0/.github/workflows/ 0000775 0000000 0000000 00000000000 14172331455 0021602 5 ustar 00root root 0000000 0000000 unittest-xml-reporting-3.2.0/.github/workflows/tests.yml 0000664 0000000 0000000 00000002656 14172331455 0023500 0 ustar 00root root 0000000 0000000 name: Tests
on:
push:
branches:
- master
pull_request:
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
include:
- python-version: 3.7
toxenv: py37
- python-version: 3.8
toxenv: py38
- python-version: 3.8
toxenv: py38-djangolts
- python-version: 3.8
toxenv: py38-djangocurr
- python-version: 3.8
toxenv: py38-quality
- python-version: 3.9
toxenv: py39
- python-version: "3.10"
toxenv: py310
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Setup Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Before Install
run: |
python --version
uname -a
lsb_release -a
- name: Install
env:
TOXENV: ${{ matrix.toxenv }}
run: |
pip install tox-gh-actions codecov coveralls
pip --version
tox --version
- name: Script
run: |
tox -v
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
- name: After Failure
if: ${{ failure() }}
run: |
more .tox/log/* | cat
more .tox/*/log/* | cat
unittest-xml-reporting-3.2.0/.gitignore 0000664 0000000 0000000 00000000205 14172331455 0020172 0 ustar 00root root 0000000 0000000 # Python bytecode
*.pyc
# Build directory
build/*
dist/*
# Egg info directory
*.egg-info
# tox + coverage
.tox
.coverage
htmlcov/
unittest-xml-reporting-3.2.0/.landscape.yml 0000664 0000000 0000000 00000000176 14172331455 0020744 0 ustar 00root root 0000000 0000000 doc-warnings: true
test-warnings: false
strictness: veryhigh
max-line-length: 80
autodetect: true
python-targets:
- 2
- 3
unittest-xml-reporting-3.2.0/LICENSE 0000664 0000000 0000000 00000002774 14172331455 0017224 0 ustar 00root root 0000000 0000000 Copyright (c) 2008-2013, Daniel Fernandes Martins
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
The views and conclusions contained in the software and documentation are those
of the authors and should not be interpreted as representing official policies,
either expressed or implied, of the FreeBSD Project. unittest-xml-reporting-3.2.0/MANIFEST.in 0000664 0000000 0000000 00000000041 14172331455 0017736 0 ustar 00root root 0000000 0000000 include README.md
include LICENSE unittest-xml-reporting-3.2.0/Makefile 0000664 0000000 0000000 00000001256 14172331455 0017651 0 ustar 00root root 0000000 0000000
build/tox/bin:
python3 -m venv build/tox
build/tox/bin/pip install tox
build/publish/bin:
python3 -m venv build/publish
build/publish/bin/pip install wheel twine
checkversion:
git log -1 --oneline | grep -q "Bump version" || (echo "DID NOT DO VERSION BUMP"; exit 1)
git show-ref --tags | grep -q $$(git log -1 --pretty=%H) || (echo "DID NOT TAG VERSION"; exit 1)
dist: checkversion build/publish/bin
build/publish/bin/python setup.py sdist
build/publish/bin/python setup.py bdist_wheel
publish: dist/ build/publish/bin
build/publish/bin/twine upload dist/*
test: build/tox/bin
build/tox/bin/tox
clean:
rm -rf build/ dist/
.PHONY: checkversion dist publish clean test
unittest-xml-reporting-3.2.0/README.md 0000664 0000000 0000000 00000023204 14172331455 0017465 0 ustar 00root root 0000000 0000000 [](https://pypi.python.org/pypi/unittest-xml-reporting/)
[](https://pypi.python.org/pypi/unittest-xml-reporting/)
[](https://pypi.python.org/pypi/unittest-xml-reporting/)
[](http://unittest-xml-reporting.readthedocs.io/en/latest/?badge=latest)
[](https://codecov.io/github/xmlrunner/unittest-xml-reporting?branch=master)
[](https://coveralls.io/github/xmlrunner/unittest-xml-reporting?branch=master)
[](https://requires.io/github/xmlrunner/unittest-xml-reporting/requirements/?branch=master)
# unittest-xml-reporting (aka xmlrunner)
A unittest test runner that can save test results to XML files in xUnit format.
The files can be consumed by a wide range of tools, such as build systems, IDEs
and continuous integration servers.
## Requirements
* Python 3.7+
* Please note Python 3.6 end-of-life was in Dec 2021, last version supporting 3.6 was 3.1.0
* Please note Python 3.5 end-of-life was in Sep 2020, last version supporting 3.5 was 3.1.0
* Please note Python 2.7 end-of-life was in Jan 2020, last version supporting 2.7 was 2.5.2
* Please note Python 3.4 end-of-life was in Mar 2019, last version supporting 3.4 was 2.5.2
* Please note Python 2.6 end-of-life was in Oct 2013, last version supporting 2.6 was 1.14.0
## Limited support for `unittest.TestCase.subTest`
https://docs.python.org/3/library/unittest.html#unittest.TestCase.subTest
`unittest` has the concept of sub-tests for a `unittest.TestCase`; this doesn't map well to an existing xUnit concept, so you won't find it in the schema. What that means, is that you lose some granularity
in the reports for sub-tests.
`unittest` also does not report successful sub-tests, so the accounting won't be exact.
## Jenkins plugins
- Jenkins JUnit plugin : https://plugins.jenkins.io/junit/
- Jenkins xUnit plugin : https://plugins.jenkins.io/xunit/
### Jenkins JUnit plugin
This plugin does not perform XSD validation (at time of writing) and should parse the XML file without issues.
### Jenkins xUnit plugin version 1.100
- [Jenkins (junit-10.xsd), xunit plugin (2014-2018)](https://github.com/jenkinsci/xunit-plugin/blob/14c6e39c38408b9ed6280361484a13c6f5becca7/src/main/resources/org/jenkinsci/plugins/xunit/types/model/xsd/junit-10.xsd), version `1.100`.
This plugin does perfom XSD validation and uses the more lax XSD. This should parse the XML file without issues.
### Jenkins xUnit plugin version 1.104+
- [Jenkins (junit-10.xsd), xunit plugin (2018-current)](https://github.com/jenkinsci/xunit-plugin/blob/ae25da5089d4f94ac6c4669bf736e4d416cc4665/src/main/resources/org/jenkinsci/plugins/xunit/types/model/xsd/junit-10.xsd), version `1.104`+.
This plugin does perfom XSD validation and uses the more strict XSD.
See https://github.com/xmlrunner/unittest-xml-reporting/issues/209
```
import io
import unittest
import xmlrunner
# run the tests storing results in memory
out = io.BytesIO()
unittest.main(
testRunner=xmlrunner.XMLTestRunner(output=out),
failfast=False, buffer=False, catchbreak=False, exit=False)
```
Transform the results removing extra attributes.
```
from xmlrunner.extra.xunit_plugin import transform
with open('TEST-report.xml', 'wb') as report:
report.write(transform(out.getvalue()))
```
## JUnit Schema ?
There are many tools claiming to write JUnit reports, so you will find many schemas with minor differences.
We used the XSD that was available in the Jenkins xUnit plugin version `1.100`; a copy is available under `tests/vendor/jenkins/xunit-plugin/.../junit-10.xsd` (see attached license).
You may also find these resources useful:
- https://stackoverflow.com/questions/4922867/what-is-the-junit-xml-format-specification-that-hudson-supports
- https://stackoverflow.com/questions/11241781/python-unittests-in-jenkins
- [JUnit-Schema (JUnit.xsd)](https://github.com/windyroad/JUnit-Schema/blob/master/JUnit.xsd)
- [Windyroad (JUnit.xsd)](http://windyroad.com.au/dl/Open%20Source/JUnit.xsd)
- [a gist (Jenkins xUnit test result schema)](https://gist.github.com/erikd/4192748)
## Installation
The easiest way to install unittest-xml-reporting is via
[Pip](http://www.pip-installer.org):
````bash
$ pip install unittest-xml-reporting
````
If you use Git and want to get the latest *development* version:
````bash
$ git clone https://github.com/xmlrunner/unittest-xml-reporting.git
$ cd unittest-xml-reporting
$ sudo python setup.py install
````
Or get the latest *development* version as a tarball:
````bash
$ wget https://github.com/xmlrunner/unittest-xml-reporting/archive/master.zip
$ unzip master.zip
$ cd unittest-xml-reporting
$ sudo python setup.py install
````
Or you can manually download the latest released version from
[PyPI](https://pypi.python.org/pypi/unittest-xml-reporting/).
## Command-line
````bash
python -m xmlrunner [options]
python -m xmlrunner discover [options]
# help
python -m xmlrunner -h
````
e.g.
````bash
python -m xmlrunner discover -t ~/mycode/tests -o /tmp/build/junit-reports
````
## Usage
The script below, adapted from the
[unittest](http://docs.python.org/library/unittest.html), shows how to use
`XMLTestRunner` in a very simple way. In fact, the only difference between
this script and the original one is the last line:
````python
import random
import unittest
import xmlrunner
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
self.seq = list(range(10))
@unittest.skip("demonstrating skipping")
def test_skipped(self):
self.fail("shouldn't happen")
def test_shuffle(self):
# make sure the shuffled sequence does not lose any elements
random.shuffle(self.seq)
self.seq.sort()
self.assertEqual(self.seq, list(range(10)))
# should raise an exception for an immutable sequence
self.assertRaises(TypeError, random.shuffle, (1,2,3))
def test_choice(self):
element = random.choice(self.seq)
self.assertTrue(element in self.seq)
def test_sample(self):
with self.assertRaises(ValueError):
random.sample(self.seq, 20)
for element in random.sample(self.seq, 5):
self.assertTrue(element in self.seq)
if __name__ == '__main__':
unittest.main(
testRunner=xmlrunner.XMLTestRunner(output='test-reports'),
# these make sure that some options that are not applicable
# remain hidden from the help menu.
failfast=False, buffer=False, catchbreak=False)
````
### Reporting to a single file
````python
if __name__ == '__main__':
with open('/path/to/results.xml', 'wb') as output:
unittest.main(
testRunner=xmlrunner.XMLTestRunner(output=output),
failfast=False, buffer=False, catchbreak=False)
````
### Doctest support
The XMLTestRunner can also be used to report on docstrings style tests.
````python
import doctest
import xmlrunner
def twice(n):
"""
>>> twice(5)
10
"""
return 2 * n
class Multiplicator(object):
def threetimes(self, n):
"""
>>> Multiplicator().threetimes(5)
15
"""
return 3 * n
if __name__ == "__main__":
suite = doctest.DocTestSuite()
xmlrunner.XMLTestRunner().run(suite)
````
### Django support
In order to plug `XMLTestRunner` to a Django project, add the following
to your `settings.py`:
````python
TEST_RUNNER = 'xmlrunner.extra.djangotestrunner.XMLTestRunner'
````
Also, the following settings are provided so you can fine tune the reports:
|setting|default|values|description|
|-|-|-|-|
|`TEST_OUTPUT_VERBOSE`|`1`|`0\|1\|2`|Besides the XML reports generated by the test runner, a bunch of useful information is printed to the `sys.stderr` stream, just like the `TextTestRunner` does. Use this setting to choose between a verbose and a non-verbose output.|
|`TEST_OUTPUT_DESCRIPTIONS`|`False`|`True\|False`|If your test methods contains docstrings, you can display such docstrings instead of display the test name (ex: `module.TestCase.test_method`). In order to use this feature, you have to enable verbose output by setting `TEST_OUTPUT_VERBOSE = 2`. Only effects stdout and not XML output.|
|`TEST_OUTPUT_DIR`|`"."`|``|Tells the test runner where to put the XML reports. If the directory couldn't be found, the test runner will try to create it before generate the XML files.|
|`TEST_OUTPUT_FILE_NAME`|`None`|``|Tells the test runner to output a single XML report with this filename under `os.path.join(TEST_OUTPUT_DIR, TEST_OUTPUT_FILE_NAME)`. Please note that for long running tests, this will keep the results in memory for a longer time than multiple reports, and may use up more resources.|
## Contributing
We are always looking for good contributions, so please just fork the
repository and send pull requests (with tests!).
If you would like write access to the repository, or become a maintainer,
feel free to get in touch.
### Testing changes with `tox`
Please use `tox` to test your changes before sending a pull request.
You can find more information about `tox` at .
```bash
$ pip install tox
# basic sanity test, friendly output
$ tox -e pytest
# all combinations
$ tox
```
unittest-xml-reporting-3.2.0/docs/ 0000775 0000000 0000000 00000000000 14172331455 0017135 5 ustar 00root root 0000000 0000000 unittest-xml-reporting-3.2.0/docs/Makefile 0000664 0000000 0000000 00000016766 14172331455 0020615 0 ustar 00root root 0000000 0000000 # Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = _build
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
.PHONY: help
help:
@echo "Please use \`make ' where is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " applehelp to make an Apple Help Book"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " epub3 to make an epub3"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
@echo " coverage to run coverage check of the documentation (if enabled)"
@echo " dummy to check syntax errors of document sources"
.PHONY: clean
clean:
rm -rf $(BUILDDIR)/*
.PHONY: html
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
.PHONY: dirhtml
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
.PHONY: singlehtml
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
.PHONY: pickle
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
.PHONY: json
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
.PHONY: htmlhelp
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
.PHONY: qthelp
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/unittest-xml-reporting.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/unittest-xml-reporting.qhc"
.PHONY: applehelp
applehelp:
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
@echo
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
@echo "N.B. You won't be able to view it unless you put it in" \
"~/Library/Documentation/Help or install it in your application" \
"bundle."
.PHONY: devhelp
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/unittest-xml-reporting"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/unittest-xml-reporting"
@echo "# devhelp"
.PHONY: epub
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
.PHONY: epub3
epub3:
$(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3
@echo
@echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3."
.PHONY: latex
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
.PHONY: latexpdf
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
.PHONY: latexpdfja
latexpdfja:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
.PHONY: text
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
.PHONY: man
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
.PHONY: texinfo
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
.PHONY: info
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
.PHONY: gettext
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
.PHONY: changes
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
.PHONY: linkcheck
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
.PHONY: doctest
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
.PHONY: coverage
coverage:
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
@echo "Testing of coverage in the sources finished, look at the " \
"results in $(BUILDDIR)/coverage/python.txt."
.PHONY: xml
xml:
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
.PHONY: pseudoxml
pseudoxml:
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
.PHONY: dummy
dummy:
$(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy
@echo
@echo "Build finished. Dummy builder generates no files."
unittest-xml-reporting-3.2.0/docs/conf.py 0000664 0000000 0000000 00000023216 14172331455 0020440 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8 -*-
#
# unittest-xml-reporting documentation build configuration file, created by
# sphinx-quickstart on Mon May 30 11:39:40 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ['.rst', '.md']
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'unittest-xml-reporting'
copyright = u'2016, Daniel Fernandes Martins, Damien Nozay'
author = u'Daniel Fernandes Martins, Damien Nozay'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'2.1.0'
# The full version, including alpha/beta/rc tags.
release = u'2.1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# " v documentation" by default.
#
# html_title = u'unittest-xml-reporting v2.1.0'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'unittest-xml-reportingdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'unittest-xml-reporting.tex', u'unittest-xml-reporting Documentation',
u'Daniel Fernandes Martins, Damien Nozay', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'unittest-xml-reporting', u'unittest-xml-reporting Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'unittest-xml-reporting', u'unittest-xml-reporting Documentation',
author, 'unittest-xml-reporting', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
unittest-xml-reporting-3.2.0/docs/index.rst 0000664 0000000 0000000 00000002132 14172331455 0020774 0 ustar 00root root 0000000 0000000 unittest-xml-reporting
======================
``unittest-xml-reporting`` is a ``unittest`` test runner that can save
test results to XML files (jUnit) and be consumed by a wide range of
tools such as continuous integration systems.
Getting started
===============
Similar to the ``unittest`` module, you can run::
python -m xmlrunner test_module
python -m xmlrunner module.TestClass
python -m xmlrunner module.Class.test_method
as well as::
python -m xmlrunner discover [options]
You can also add a top level file to allow running the tests with
the command ``python tests.py``, and configure the test runner
to output the XML reports in the ``test-reports`` directory. ::
# tests.py
if __name__ == '__main__':
unittest.main(
testRunner=xmlrunner.XMLTestRunner(output='test-reports'),
# these make sure that some options that are not applicable
# remain hidden from the help menu.
failfast=False, buffer=False, catchbreak=False)
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
unittest-xml-reporting-3.2.0/setup.cfg 0000664 0000000 0000000 00000000055 14172331455 0020026 0 ustar 00root root 0000000 0000000 [bdist_wheel]
universal = 1
python-tag = py3
unittest-xml-reporting-3.2.0/setup.py 0000775 0000000 0000000 00000004316 14172331455 0017726 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python
from setuptools import setup, find_packages
from distutils.util import convert_path
import codecs
# Load version information
main_ns = {}
ver_path = convert_path('xmlrunner/version.py')
with codecs.open(ver_path, 'rb', 'utf8') as ver_file:
exec(ver_file.read(), main_ns)
# Load README.md
readme_path = convert_path('README.md')
with codecs.open(readme_path, 'rb', 'utf8') as readme_file:
long_description = readme_file.read()
# this is for sdist to work.
import sys
if sys.version_info < (3, 7):
raise RuntimeError('This version requires Python 3.7+') # pragma: no cover
setup(
name = 'unittest-xml-reporting',
version = main_ns['__version__'],
author = 'Daniel Fernandes Martins, Damien Nozay',
description = 'unittest-based test runner with Ant/JUnit like XML reporting.',
long_description = long_description,
long_description_content_type = 'text/markdown',
data_files = [('', ['LICENSE'])],
install_requires = ['lxml'],
license = 'BSD',
platforms = ['Any'],
python_requires='>=3.7',
keywords = [
'pyunit', 'unittest', 'junit xml', 'xunit', 'report', 'testrunner', 'xmlrunner'
],
url = 'http://github.com/xmlrunner/unittest-xml-reporting/tree/master/',
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Testing'
],
packages = ['xmlrunner', 'xmlrunner.extra'],
zip_safe = False,
include_package_data = True,
test_suite = 'tests'
)
unittest-xml-reporting-3.2.0/tests/ 0000775 0000000 0000000 00000000000 14172331455 0017347 5 ustar 00root root 0000000 0000000 unittest-xml-reporting-3.2.0/tests/__init__.py 0000664 0000000 0000000 00000000000 14172331455 0021446 0 ustar 00root root 0000000 0000000 unittest-xml-reporting-3.2.0/tests/builder_test.py 0000664 0000000 0000000 00000020712 14172331455 0022410 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8
from xmlrunner.unittest import unittest
import xml.etree.ElementTree as ET
from xml.dom.minidom import Document
from xmlrunner import builder
class TestXMLContextTest(unittest.TestCase):
"""TestXMLContext test cases.
"""
def setUp(self):
self.doc = Document()
self.root = builder.TestXMLContext(self.doc)
def test_current_element_tag_name(self):
self.root.begin('tag', 'context-name')
self.assertEqual(self.root.element_tag(), 'tag')
def test_current_context_name(self):
self.root.begin('tag', 'context-name')
name = self.root.element.getAttribute('name')
self.assertEqual(name, 'context-name')
def test_current_context_invalid_unicode_name(self):
self.root.begin('tag', u'context-name\x01\x0B')
name = self.root.element.getAttribute('name')
self.assertEqual(name, u'context-name\uFFFD\uFFFD')
def test_increment_valid_testsuites_counters(self):
self.root.begin('testsuites', 'name')
for c in ('tests', 'failures', 'errors', 'skipped'):
self.root.increment_counter(c)
element = self.root.end()
with self.assertRaises(KeyError):
element.attributes['skipped']
for c in ('tests', 'failures', 'errors'):
value = element.attributes[c].value
self.assertEqual(value, '1')
def test_increment_valid_testsuite_counters(self):
self.root.begin('testsuite', 'name')
for c in ('tests', 'failures', 'errors', 'skipped'):
self.root.increment_counter(c)
element = self.root.end()
for c in ('tests', 'failures', 'errors', 'skipped'):
value = element.attributes[c].value
self.assertEqual(value, '1')
def test_increment_counters_for_unknown_context(self):
self.root.begin('unknown', 'name')
for c in ('tests', 'failures', 'errors', 'skipped', 'invalid'):
self.root.increment_counter(c)
element = self.root.end()
for c in ('tests', 'failures', 'errors', 'skipped', 'invalid'):
with self.assertRaises(KeyError):
element.attributes[c]
def test_empty_counters_on_end_context(self):
self.root.begin('testsuite', 'name')
element = self.root.end()
for c in ('tests', 'failures', 'errors', 'skipped'):
self.assertEqual(element.attributes[c].value, '0')
def test_add_time_attribute_on_end_context(self):
self.root.begin('testsuite', 'name')
element = self.root.end()
element.attributes['time'].value
def test_add_timestamp_attribute_on_end_context(self):
self.root.begin('testsuite', 'name')
element = self.root.end()
element.attributes['timestamp'].value
class TestXMLBuilderTest(unittest.TestCase):
"""TestXMLBuilder test cases.
"""
def setUp(self):
self.builder = builder.TestXMLBuilder()
self.doc = self.builder._xml_doc
self.builder.begin_context('testsuites', 'name')
self.valid_chars = u'выбор'
self.invalid_chars = '\x01'
self.invalid_chars_replace = u'\ufffd'
def test_root_has_no_parent(self):
self.assertIsNone(self.builder.current_context().parent)
def test_current_context_tag(self):
self.assertEqual(self.builder.context_tag(), 'testsuites')
def test_begin_nested_context(self):
root = self.builder.current_context()
self.builder.begin_context('testsuite', 'name')
self.assertEqual(self.builder.context_tag(), 'testsuite')
self.assertIs(self.builder.current_context().parent, root)
def test_end_inexistent_context(self):
self.builder = builder.TestXMLBuilder()
self.assertFalse(self.builder.end_context())
self.assertEqual(len(self.doc.childNodes), 0)
def test_end_root_context(self):
root = self.builder.current_context()
self.assertTrue(self.builder.end_context())
self.assertIsNone(self.builder.current_context())
# No contexts left
self.assertFalse(self.builder.end_context())
doc_children = self.doc.childNodes
self.assertEqual(len(doc_children), 1)
self.assertEqual(len(doc_children[0].childNodes), 0)
self.assertEqual(doc_children[0].tagName, root.element_tag())
def test_end_nested_context(self):
self.builder.begin_context('testsuite', 'name')
self.builder.current_context()
self.assertTrue(self.builder.end_context())
# Only updates the document when all contexts end
self.assertEqual(len(self.doc.childNodes), 0)
def test_end_all_context_stack(self):
root = self.builder.current_context()
self.builder.begin_context('testsuite', 'name')
nested = self.builder.current_context()
self.assertTrue(self.builder.end_context())
self.assertTrue(self.builder.end_context())
# No contexts left
self.assertFalse(self.builder.end_context())
root_child = self.doc.childNodes
self.assertEqual(len(root_child), 1)
self.assertEqual(root_child[0].tagName, root.element_tag())
nested_child = root_child[0].childNodes
self.assertEqual(len(nested_child), 1)
self.assertEqual(nested_child[0].tagName, nested.element_tag())
def test_append_valid_unicode_cdata_section(self):
self.builder.append_cdata_section('tag', self.valid_chars)
self.builder.end_context()
root_child = self.doc.childNodes[0]
cdata_container = root_child.childNodes[0]
self.assertEqual(cdata_container.tagName, 'tag')
cdata = cdata_container.childNodes[0]
self.assertEqual(cdata.data, self.valid_chars)
def test_append_invalid_unicode_cdata_section(self):
self.builder.append_cdata_section('tag', self.invalid_chars)
self.builder.end_context()
root_child = self.doc.childNodes[0]
cdata_container = root_child.childNodes[0]
cdata = cdata_container.childNodes[0]
self.assertEqual(cdata.data, self.invalid_chars_replace)
def test_append_cdata_closing_tags_into_cdata_section(self):
self.builder.append_cdata_section('tag', ']]>')
self.builder.end_context()
root_child = self.doc.childNodes[0]
cdata_container = root_child.childNodes[0]
self.assertEqual(len(cdata_container.childNodes), 2)
self.assertEqual(cdata_container.childNodes[0].data, ']]')
self.assertEqual(cdata_container.childNodes[1].data, '>')
def test_append_tag_with_valid_unicode_values(self):
self.builder.append('tag', self.valid_chars, attr=self.valid_chars)
self.builder.end_context()
root_child = self.doc.childNodes[0]
tag = root_child.childNodes[0]
self.assertEqual(tag.tagName, 'tag')
self.assertEqual(tag.getAttribute('attr'), self.valid_chars)
self.assertEqual(tag.childNodes[0].data, self.valid_chars)
def test_append_tag_with_invalid_unicode_values(self):
self.builder.append('tag', self.invalid_chars, attr=self.invalid_chars)
self.builder.end_context()
root_child = self.doc.childNodes[0]
tag = root_child.childNodes[0]
self.assertEqual(tag.tagName, 'tag')
self.assertEqual(tag.getAttribute('attr'), self.invalid_chars_replace)
self.assertEqual(tag.childNodes[0].data, self.invalid_chars_replace)
def test_increment_root_context_counter(self):
self.builder.increment_counter('tests')
self.builder.end_context()
root_child = self.doc.childNodes[0]
self.assertEqual(root_child.tagName, 'testsuites')
self.assertEqual(root_child.getAttribute('tests'), '1')
def test_increment_nested_context_counter(self):
self.builder.increment_counter('tests')
self.builder.begin_context('testsuite', 'name')
self.builder.increment_counter('tests')
self.builder.end_context()
self.builder.end_context()
root_child = self.doc.childNodes[0]
nested_child = root_child.childNodes[0]
self.assertEqual(root_child.tagName, 'testsuites')
self.assertEqual(nested_child.getAttribute('tests'), '1')
self.assertEqual(root_child.getAttribute('tests'), '2')
def test_finish_nested_context(self):
self.builder.begin_context('testsuite', 'name')
tree = ET.fromstring(self.builder.finish())
self.assertEqual(tree.tag, 'testsuites')
self.assertEqual(len(tree.findall("./testsuite")), 1)
unittest-xml-reporting-3.2.0/tests/django_example/ 0000775 0000000 0000000 00000000000 14172331455 0022324 5 ustar 00root root 0000000 0000000 unittest-xml-reporting-3.2.0/tests/django_example/app/ 0000775 0000000 0000000 00000000000 14172331455 0023104 5 ustar 00root root 0000000 0000000 unittest-xml-reporting-3.2.0/tests/django_example/app/__init__.py 0000664 0000000 0000000 00000000000 14172331455 0025203 0 ustar 00root root 0000000 0000000 unittest-xml-reporting-3.2.0/tests/django_example/app/admin.py 0000664 0000000 0000000 00000000107 14172331455 0024544 0 ustar 00root root 0000000 0000000 from django.contrib import admin # NOQA
# Register your models here.
unittest-xml-reporting-3.2.0/tests/django_example/app/migrations/ 0000775 0000000 0000000 00000000000 14172331455 0025260 5 ustar 00root root 0000000 0000000 unittest-xml-reporting-3.2.0/tests/django_example/app/migrations/__init__.py 0000664 0000000 0000000 00000000000 14172331455 0027357 0 ustar 00root root 0000000 0000000 unittest-xml-reporting-3.2.0/tests/django_example/app/models.py 0000664 0000000 0000000 00000000101 14172331455 0024731 0 ustar 00root root 0000000 0000000 from django.db import models # NOQA
# Create your models here.
unittest-xml-reporting-3.2.0/tests/django_example/app/tests.py 0000664 0000000 0000000 00000000532 14172331455 0024620 0 ustar 00root root 0000000 0000000 from django.test import TestCase
# Create your tests here.
class DummyTestCase(TestCase):
def test_pass(self):
"""Test Pass"""
pass
def test_negative_comment1(self):
"""Use a close comment XML tag -->"""
pass
def test_negative_comment2(self):
"""Check XML tag """
pass
unittest-xml-reporting-3.2.0/tests/django_example/app/views.py 0000664 0000000 0000000 00000000107 14172331455 0024611 0 ustar 00root root 0000000 0000000 from django.shortcuts import render # NOQA
# Create your views here.
unittest-xml-reporting-3.2.0/tests/django_example/app2/ 0000775 0000000 0000000 00000000000 14172331455 0023166 5 ustar 00root root 0000000 0000000 unittest-xml-reporting-3.2.0/tests/django_example/app2/__init__.py 0000664 0000000 0000000 00000000000 14172331455 0025265 0 ustar 00root root 0000000 0000000 unittest-xml-reporting-3.2.0/tests/django_example/app2/admin.py 0000664 0000000 0000000 00000000107 14172331455 0024626 0 ustar 00root root 0000000 0000000 from django.contrib import admin # NOQA
# Register your models here.
unittest-xml-reporting-3.2.0/tests/django_example/app2/migrations/ 0000775 0000000 0000000 00000000000 14172331455 0025342 5 ustar 00root root 0000000 0000000 unittest-xml-reporting-3.2.0/tests/django_example/app2/migrations/__init__.py 0000664 0000000 0000000 00000000000 14172331455 0027441 0 ustar 00root root 0000000 0000000 unittest-xml-reporting-3.2.0/tests/django_example/app2/models.py 0000664 0000000 0000000 00000000101 14172331455 0025013 0 ustar 00root root 0000000 0000000 from django.db import models # NOQA
# Create your models here.
unittest-xml-reporting-3.2.0/tests/django_example/app2/tests.py 0000664 0000000 0000000 00000000202 14172331455 0024674 0 ustar 00root root 0000000 0000000 from django.test import TestCase
# Create your tests here.
class DummyTestCase(TestCase):
def test_pass(self):
pass
unittest-xml-reporting-3.2.0/tests/django_example/app2/views.py 0000664 0000000 0000000 00000000107 14172331455 0024673 0 ustar 00root root 0000000 0000000 from django.shortcuts import render # NOQA
# Create your views here.
unittest-xml-reporting-3.2.0/tests/django_example/example/ 0000775 0000000 0000000 00000000000 14172331455 0023757 5 ustar 00root root 0000000 0000000 unittest-xml-reporting-3.2.0/tests/django_example/example/__init__.py 0000664 0000000 0000000 00000000000 14172331455 0026056 0 ustar 00root root 0000000 0000000 unittest-xml-reporting-3.2.0/tests/django_example/example/settings.py 0000664 0000000 0000000 00000001360 14172331455 0026171 0 ustar 00root root 0000000 0000000
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = 'not-a-secret'
DEBUG = True
ALLOWED_HOSTS = []
INSTALLED_APPS = ['app', 'app2']
MIDDLEWARE_CLASSES = []
ROOT_URLCONF = 'example.urls'
TEMPLATES = []
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
# The settings we care about for xmlrunner.
# They are commented out because we will use settings.configure() in tests.
# TEST_RUNNER = 'xmlrunner.extra.djangotestrunner.XMLTestRunner'
# TEST_OUTPUT_FILE_NAME = 'results.xml'
# TEST_OUTPUT_VERBOSE = 2
unittest-xml-reporting-3.2.0/tests/django_example/example/urls.py 0000664 0000000 0000000 00000000175 14172331455 0025321 0 ustar 00root root 0000000 0000000
from django.conf.urls import url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
]
unittest-xml-reporting-3.2.0/tests/django_example/example/wsgi.py 0000664 0000000 0000000 00000000250 14172331455 0025277 0 ustar 00root root 0000000 0000000
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "example.settings")
application = get_wsgi_application()
unittest-xml-reporting-3.2.0/tests/django_example/manage.py 0000775 0000000 0000000 00000000371 14172331455 0024132 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "example.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
unittest-xml-reporting-3.2.0/tests/django_test.py 0000664 0000000 0000000 00000013163 14172331455 0022226 0 ustar 00root root 0000000 0000000 from xmlrunner.unittest import unittest
import sys
import os
from os import path
import glob
from unittest import mock
import tempfile
import shutil
try:
import django
except ImportError:
django = None
else:
from django.test.utils import get_runner
from django.conf import settings, UserSettingsHolder
from django.apps import apps
settings.configure(DEBUG=True)
TESTS_DIR = path.dirname(__file__)
@unittest.skipIf(django is None, 'django not found')
class DjangoTest(unittest.TestCase):
def setUp(self):
self._old_cwd = os.getcwd()
self.project_dir = path.abspath(path.join(TESTS_DIR, 'django_example'))
self.tmpdir = tempfile.mkdtemp()
os.chdir(self.project_dir)
sys.path.append(self.project_dir)
# allow changing settings
self.old_settings = settings._wrapped
os.environ['DJANGO_SETTINGS_MODULE'] = 'example.settings'
settings.INSTALLED_APPS # load settings on first access
settings.DATABASES['default'] = {}
settings.DATABASES['default']['NAME'] = path.join(
self.tmpdir, 'db.sqlite3')
# this goes around the "settings already loaded" issue.
self.override = UserSettingsHolder(settings._wrapped)
settings._wrapped = self.override
def tearDown(self):
os.chdir(self._old_cwd)
shutil.rmtree(self.tmpdir)
settings._wrapped = self.old_settings
def _override_settings(self, **kwargs):
# see django.test.utils.override_settings
for key, new_value in kwargs.items():
setattr(self.override, key, new_value)
def _check_runner(self, runner):
suite = runner.build_suite(test_labels=['app2', 'app'])
test_ids = [test.id() for test in suite]
self.assertEqual(test_ids, [
'app2.tests.DummyTestCase.test_pass',
'app.tests.DummyTestCase.test_negative_comment1',
'app.tests.DummyTestCase.test_negative_comment2',
'app.tests.DummyTestCase.test_pass',
])
suite = runner.build_suite(test_labels=[])
test_ids = [test.id() for test in suite]
self.assertEqual(set(test_ids), set([
'app.tests.DummyTestCase.test_pass',
'app.tests.DummyTestCase.test_negative_comment1',
'app.tests.DummyTestCase.test_negative_comment2',
'app2.tests.DummyTestCase.test_pass',
]))
def test_django_runner(self):
runner_class = get_runner(settings)
runner = runner_class()
self._check_runner(runner)
def test_django_xmlrunner(self):
self._override_settings(
TEST_RUNNER='xmlrunner.extra.djangotestrunner.XMLTestRunner')
runner_class = get_runner(settings)
runner = runner_class()
self._check_runner(runner)
def test_django_verbose(self):
self._override_settings(
TEST_OUTPUT_VERBOSE=True,
TEST_RUNNER='xmlrunner.extra.djangotestrunner.XMLTestRunner')
runner_class = get_runner(settings)
runner = runner_class()
self._check_runner(runner)
def test_django_single_report(self):
self._override_settings(
TEST_OUTPUT_DIR=self.tmpdir,
TEST_OUTPUT_FILE_NAME='results.xml',
TEST_OUTPUT_VERBOSE=0,
TEST_RUNNER='xmlrunner.extra.djangotestrunner.XMLTestRunner')
apps.populate(settings.INSTALLED_APPS)
runner_class = get_runner(settings)
runner = runner_class()
suite = runner.build_suite()
runner.run_suite(suite)
expected_file = path.join(self.tmpdir, 'results.xml')
self.assertTrue(path.exists(expected_file),
'did not generate xml report where expected.')
def test_django_single_report_create_folder(self):
intermediate_directory = 'report'
directory = path.join(self.tmpdir, intermediate_directory)
self._override_settings(
TEST_OUTPUT_DIR=directory,
TEST_OUTPUT_FILE_NAME='results.xml',
TEST_OUTPUT_VERBOSE=0,
TEST_RUNNER='xmlrunner.extra.djangotestrunner.XMLTestRunner')
apps.populate(settings.INSTALLED_APPS)
runner_class = get_runner(settings)
runner = runner_class()
suite = runner.build_suite()
runner.run_suite(suite)
expected_file = path.join(directory, 'results.xml')
self.assertTrue(path.exists(expected_file),
'did not generate xml report where expected.')
def test_django_multiple_reports(self):
self._override_settings(
TEST_OUTPUT_DIR=self.tmpdir,
TEST_OUTPUT_VERBOSE=0,
TEST_RUNNER='xmlrunner.extra.djangotestrunner.XMLTestRunner')
apps.populate(settings.INSTALLED_APPS)
runner_class = get_runner(settings)
runner = runner_class()
suite = runner.build_suite(test_labels=None)
runner.run_suite(suite)
test_files = glob.glob(path.join(self.tmpdir, 'TEST*.xml'))
self.assertTrue(test_files,
'did not generate xml reports where expected.')
self.assertEqual(2, len(test_files))
def test_django_runner_extension(self):
from xmlrunner.extra.djangotestrunner import XMLTestRunner
class MyDjangoRunner(XMLTestRunner):
test_runner = mock.Mock()
self._override_settings(
TEST_OUTPUT_DIR=self.tmpdir,
TEST_OUTPUT_VERBOSE=0)
apps.populate(settings.INSTALLED_APPS)
runner = MyDjangoRunner()
suite = runner.build_suite(test_labels=None)
runner.run_suite(suite)
self.assertTrue(MyDjangoRunner.test_runner.called)
unittest-xml-reporting-3.2.0/tests/doctest_example.py 0000664 0000000 0000000 00000000346 14172331455 0023104 0 ustar 00root root 0000000 0000000
def twice(n):
"""
>>> twice(5)
10
"""
return 2 * n
class Multiplicator(object):
def threetimes(self, n):
"""
>>> Multiplicator().threetimes(5)
15
"""
return 3 * n
unittest-xml-reporting-3.2.0/tests/testsuite.py 0000775 0000000 0000000 00000110556 14172331455 0021765 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Executable module to test unittest-xml-reporting.
"""
from __future__ import print_function
import contextlib
import io
import sys
from xmlrunner.unittest import unittest
import xmlrunner
from xmlrunner.result import _DuplicateWriter
from xmlrunner.result import _XMLTestResult
from xmlrunner.result import resolve_filename
import doctest
import tests.doctest_example
from io import StringIO, BytesIO
from tempfile import mkdtemp
from tempfile import mkstemp
from shutil import rmtree
from glob import glob
from xml.dom import minidom
from lxml import etree
import os
import os.path
from unittest import mock
def _load_schema(version):
path = os.path.join(
os.path.dirname(__file__),
'vendor/jenkins/xunit-plugin', version, 'junit-10.xsd')
with open(path, 'r') as schema_file:
schema_doc = etree.parse(schema_file)
schema = etree.XMLSchema(schema_doc)
return schema
raise RuntimeError('Could not load JUnit schema') # pragma: no cover
def validate_junit_report(version, text):
document = etree.parse(BytesIO(text))
schema = _load_schema(version)
schema.assertValid(document)
class DoctestTest(unittest.TestCase):
def test_doctest_example(self):
suite = doctest.DocTestSuite(tests.doctest_example)
outdir = BytesIO()
stream = StringIO()
runner = xmlrunner.XMLTestRunner(
stream=stream, output=outdir, verbosity=0)
runner.run(suite)
outdir.seek(0)
output = outdir.read()
self.assertIn('classname="tests.doctest_example.Multiplicator"'.encode('utf8'), output)
self.assertIn('name="threetimes"'.encode('utf8'), output)
self.assertIn('classname="tests.doctest_example"'.encode('utf8'), output)
self.assertIn('name="twice"'.encode('utf8'), output)
@contextlib.contextmanager
def capture_stdout_stderr():
"""
context manager to capture stdout and stderr
"""
orig_stdout = sys.stdout
orig_stderr = sys.stderr
sys.stdout = StringIO()
sys.stderr = StringIO()
try:
yield (sys.stdout, sys.stderr)
finally:
sys.stdout = orig_stdout
sys.stderr = orig_stderr
def _strip_xml(xml, changes):
doc = etree.fromstring(xml)
for xpath, attributes in changes.items():
for node in doc.xpath(xpath):
for attrib in node.attrib.keys():
if attrib not in attributes:
del node.attrib[attrib]
return etree.tostring(doc)
def some_decorator(f):
# for issue #195
code = """\
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
"""
evaldict = dict(func=f)
exec(code, evaldict)
return evaldict['wrapper']
class XMLTestRunnerTestCase(unittest.TestCase):
"""
XMLTestRunner test case.
"""
class DummyTest(unittest.TestCase):
@unittest.skip("demonstrating skipping")
def test_skip(self):
pass # pragma: no cover
@unittest.skip(u"demonstrating non-ascii skipping: éçà")
def test_non_ascii_skip(self):
pass # pragma: no cover
def test_pass(self):
pass
def test_fail(self):
self.assertTrue(False)
@unittest.expectedFailure
def test_expected_failure(self):
self.assertTrue(False)
@unittest.expectedFailure
def test_unexpected_success(self):
pass
def test_error(self):
1 / 0
def test_cdata_section(self):
print('')
def test_invalid_xml_chars_in_doc(self):
"""
Testing comments, -- is not allowed, or invalid xml 1.0 chars such as \x0c
"""
pass
def test_non_ascii_error(self):
self.assertEqual(u"éçà", 42)
def test_unsafe_unicode(self):
print(u"A\x00B\x08C\x0BD\x0C")
def test_output_stdout_and_stderr(self):
print('test on stdout')
print('test on stderr', file=sys.stderr)
def test_runner_buffer_output_pass(self):
print('should not be printed')
def test_runner_buffer_output_fail(self):
print('should be printed')
self.fail('expected to fail')
def test_output(self):
print('test message')
def test_non_ascii_runner_buffer_output_fail(self):
print(u'Where is the café ?')
self.fail(u'The café could not be found')
class DummySubTest(unittest.TestCase):
def test_subTest_pass(self):
for i in range(2):
with self.subTest(i=i):
pass
def test_subTest_fail(self):
for i in range(2):
with self.subTest(i=i):
self.fail('this is a subtest.')
def test_subTest_error(self):
for i in range(2):
with self.subTest(i=i):
raise Exception('this is a subtest')
def test_subTest_mixed(self):
for i in range(2):
with self.subTest(i=i):
self.assertLess(i, 1, msg='this is a subtest.')
def test_subTest_with_dots(self):
for i in range(2):
with self.subTest(module='hello.world.subTest{}'.format(i)):
self.fail('this is a subtest.')
class DecoratedUnitTest(unittest.TestCase):
@some_decorator
def test_pass(self):
pass
class DummyErrorInCallTest(unittest.TestCase):
def __call__(self, result):
try:
raise Exception('Massive fail')
except Exception:
result.addError(self, sys.exc_info())
return
def test_pass(self):
# it is expected not to be called.
pass # pragma: no cover
class DummyRefCountTest(unittest.TestCase):
class dummy(object):
pass
def test_fail(self):
inst = self.dummy()
self.assertTrue(False)
def setUp(self):
self.stream = StringIO()
self.outdir = mkdtemp()
self.verbosity = 0
self.runner_kwargs = {}
self.addCleanup(rmtree, self.outdir)
def _test_xmlrunner(self, suite, runner=None, outdir=None):
if outdir is None:
outdir = self.outdir
stream = self.stream
verbosity = self.verbosity
runner_kwargs = self.runner_kwargs
if runner is None:
runner = xmlrunner.XMLTestRunner(
stream=stream, output=outdir, verbosity=verbosity,
**runner_kwargs)
if isinstance(outdir, BytesIO):
self.assertFalse(outdir.getvalue())
else:
self.assertEqual(0, len(glob(os.path.join(outdir, '*xml'))))
runner.run(suite)
if isinstance(outdir, BytesIO):
self.assertTrue(outdir.getvalue())
else:
self.assertEqual(1, len(glob(os.path.join(outdir, '*xml'))))
return runner
def test_basic_unittest_constructs(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
suite.addTest(self.DummyTest('test_skip'))
suite.addTest(self.DummyTest('test_fail'))
suite.addTest(self.DummyTest('test_expected_failure'))
suite.addTest(self.DummyTest('test_unexpected_success'))
suite.addTest(self.DummyTest('test_error'))
self._test_xmlrunner(suite)
def test_classnames(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
suite.addTest(self.DummySubTest('test_subTest_pass'))
outdir = BytesIO()
stream = StringIO()
runner = xmlrunner.XMLTestRunner(
stream=stream, output=outdir, verbosity=0)
runner.run(suite)
outdir.seek(0)
output = outdir.read()
output = _strip_xml(output, {
'//testsuite': (),
'//testcase': ('classname', 'name'),
'//failure': ('message',),
})
self.assertRegex(
output,
r'classname="tests\.testsuite\.(XMLTestRunnerTestCase\.)?'
r'DummyTest" name="test_pass"'.encode('utf8'),
)
self.assertRegex(
output,
r'classname="tests\.testsuite\.(XMLTestRunnerTestCase\.)?'
r'DummySubTest" name="test_subTest_pass"'.encode('utf8'),
)
def test_expected_failure(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_expected_failure'))
outdir = BytesIO()
self._test_xmlrunner(suite, outdir=outdir)
self.assertNotIn(b'".encode('utf8'),
output)
def test_xmlrunner_non_ascii_failures(self):
self._xmlrunner_non_ascii_failures()
def test_xmlrunner_non_ascii_failures_buffered_output(self):
self._xmlrunner_non_ascii_failures(buffer=True)
def _xmlrunner_non_ascii_failures(self, buffer=False):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest(
'test_non_ascii_runner_buffer_output_fail'))
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir, verbosity=self.verbosity,
buffer=buffer, **self.runner_kwargs)
# allow output non-ascii letters to stdout
orig_stdout = sys.stdout
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
try:
runner.run(suite)
finally:
# Not to be closed when TextIOWrapper is disposed.
sys.stdout.detach()
sys.stdout = orig_stdout
outdir.seek(0)
output = outdir.read()
self.assertIn(
u'Where is the café ?'.encode('utf8'),
output)
self.assertIn(
u'The café could not be found'.encode('utf8'),
output)
@unittest.expectedFailure
def test_xmlrunner_buffer_output_pass(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_runner_buffer_output_pass'))
self._test_xmlrunner(suite)
testsuite_output = self.stream.getvalue()
# Since we are always buffering stdout/stderr
# it is currently troublesome to print anything at all
# and be consistent with --buffer option (issue #59)
self.assertIn('should not be printed', testsuite_output)
# this will be fixed when using the composite approach
# that was under development in the rewrite branch.
def test_xmlrunner_buffer_output_fail(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_runner_buffer_output_fail'))
# --buffer option
self.runner_kwargs['buffer'] = True
self._test_xmlrunner(suite)
testsuite_output = self.stream.getvalue()
self.assertIn('should be printed', testsuite_output)
def test_xmlrunner_output_without_buffer(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_output'))
with capture_stdout_stderr() as r:
self._test_xmlrunner(suite)
output_from_test = r[0].getvalue()
self.assertIn('test message', output_from_test)
def test_xmlrunner_output_with_buffer(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_output'))
# --buffer option
self.runner_kwargs['buffer'] = True
with capture_stdout_stderr() as r:
self._test_xmlrunner(suite)
output_from_test = r[0].getvalue()
self.assertNotIn('test message', output_from_test)
def test_xmlrunner_stdout_stderr_recovered_without_buffer(self):
orig_stdout = sys.stdout
orig_stderr = sys.stderr
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
self._test_xmlrunner(suite)
self.assertIs(orig_stdout, sys.stdout)
self.assertIs(orig_stderr, sys.stderr)
def test_xmlrunner_stdout_stderr_recovered_with_buffer(self):
orig_stdout = sys.stdout
orig_stderr = sys.stderr
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
# --buffer option
self.runner_kwargs['buffer'] = True
self._test_xmlrunner(suite)
self.assertIs(orig_stdout, sys.stdout)
self.assertIs(orig_stderr, sys.stderr)
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
@unittest.skipIf(not hasattr(unittest.TestCase, 'subTest'),
'unittest.TestCase.subTest not present.')
def test_unittest_subTest_fail(self):
# test for issue #77
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir, verbosity=self.verbosity,
**self.runner_kwargs)
suite = unittest.TestSuite()
suite.addTest(self.DummySubTest('test_subTest_fail'))
runner.run(suite)
outdir.seek(0)
output = outdir.read()
output = _strip_xml(output, {
'//testsuite': (),
'//testcase': ('classname', 'name'),
'//failure': ('message',),
})
self.assertRegex(
output,
br''.encode('utf8'))
i_system_out = output.index(''.encode('utf8'))
i_system_err = output.index(''.encode('utf8'))
i_testcase = output.index(''.encode('utf8'), output)
validate_junit_report('14c6e39c38408b9ed6280361484a13c6f5becca7', output)
@unittest.skipIf(hasattr(sys, 'pypy_version_info'),
'skip - PyPy + lxml seems to be hanging')
def test_xunit_plugin_transform(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_fail'))
suite.addTest(self.DummyTest('test_pass'))
suite.properties = None
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir, verbosity=self.verbosity,
**self.runner_kwargs)
runner.run(suite)
outdir.seek(0)
output = outdir.read()
validate_junit_report('14c6e39c38408b9ed6280361484a13c6f5becca7', output)
with self.assertRaises(etree.DocumentInvalid):
validate_junit_report('ae25da5089d4f94ac6c4669bf736e4d416cc4665', output)
from xmlrunner.extra.xunit_plugin import transform
transformed = transform(output)
validate_junit_report('14c6e39c38408b9ed6280361484a13c6f5becca7', transformed)
validate_junit_report('ae25da5089d4f94ac6c4669bf736e4d416cc4665', transformed)
self.assertIn('test_pass'.encode('utf8'), transformed)
self.assertIn('test_fail'.encode('utf8'), transformed)
def test_xmlrunner_elapsed_times(self):
self.runner_kwargs['elapsed_times'] = False
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
self._test_xmlrunner(suite)
def test_xmlrunner_resultclass(self):
class Result(_XMLTestResult):
pass
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
self.runner_kwargs['resultclass'] = Result
self._test_xmlrunner(suite)
def test_xmlrunner_stream(self):
stream = self.stream
output = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=stream, output=output, verbosity=self.verbosity,
**self.runner_kwargs)
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
runner.run(suite)
def test_xmlrunner_stream_empty_testsuite(self):
stream = self.stream
output = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=stream, output=output, verbosity=self.verbosity,
**self.runner_kwargs)
suite = unittest.TestSuite()
runner.run(suite)
def test_xmlrunner_output_subdir(self):
stream = self.stream
output = os.path.join(self.outdir, 'subdir')
runner = xmlrunner.XMLTestRunner(
stream=stream, output=output, verbosity=self.verbosity,
**self.runner_kwargs)
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
runner.run(suite)
def test_xmlrunner_patched_stdout(self):
old_stdout, old_stderr = sys.stdout, sys.stderr
try:
sys.stdout, sys.stderr = StringIO(), StringIO()
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
suite.properties = dict(key='value')
self._test_xmlrunner(suite)
finally:
sys.stdout, sys.stderr = old_stdout, old_stderr
def test_opaque_decorator(self):
suite = unittest.TestSuite()
suite.addTest(self.DecoratedUnitTest('test_pass'))
self._test_xmlrunner(suite)
testsuite_output = self.stream.getvalue()
self.assertNotIn('IOError:', testsuite_output)
def test_xmlrunner_error_in_call(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyErrorInCallTest('test_pass'))
self._test_xmlrunner(suite)
testsuite_output = self.stream.getvalue()
self.assertIn('Exception: Massive fail', testsuite_output)
@unittest.skipIf(not hasattr(sys, 'getrefcount'),
'skip - PyPy does not have sys.getrefcount.')
@unittest.skipIf((3, 0) <= sys.version_info < (3, 4),
'skip - test not garbage collected. '
'https://bugs.python.org/issue11798.')
def test_xmlrunner_hold_traceback(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyRefCountTest('test_fail'))
countBeforeTest = sys.getrefcount(self.DummyRefCountTest.dummy)
runner = self._test_xmlrunner(suite)
countAfterTest = sys.getrefcount(self.DummyRefCountTest.dummy)
self.assertEqual(countBeforeTest, countAfterTest)
class StderrXMLTestRunner(xmlrunner.XMLTestRunner):
"""
XMLTestRunner that outputs to sys.stderr that might be replaced
Though XMLTestRunner defaults to use sys.stderr as stream,
it cannot be replaced e.g. by replaced by capture_stdout_stderr(),
as it's already resolved.
This class resolved sys.stderr lazily and outputs to replaced sys.stderr.
"""
def __init__(self, **kwargs):
super(XMLTestRunnerTestCase.StderrXMLTestRunner, self).__init__(
stream=sys.stderr,
**kwargs
)
def test_test_program_succeed_with_buffer(self):
with capture_stdout_stderr() as r:
unittest.TestProgram(
module=self.__class__.__module__,
testRunner=self.StderrXMLTestRunner,
argv=[
sys.argv[0],
'-b',
'XMLTestRunnerTestCase.DummyTest.test_runner_buffer_output_pass',
],
exit=False,
)
self.assertNotIn('should not be printed', r[0].getvalue())
self.assertNotIn('should not be printed', r[1].getvalue())
def test_test_program_succeed_wo_buffer(self):
with capture_stdout_stderr() as r:
unittest.TestProgram(
module=self.__class__.__module__,
testRunner=self.StderrXMLTestRunner,
argv=[
sys.argv[0],
'XMLTestRunnerTestCase.DummyTest.test_runner_buffer_output_pass',
],
exit=False,
)
self.assertIn('should not be printed', r[0].getvalue())
self.assertNotIn('should not be printed', r[1].getvalue())
def test_test_program_fail_with_buffer(self):
with capture_stdout_stderr() as r:
unittest.TestProgram(
module=self.__class__.__module__,
testRunner=self.StderrXMLTestRunner,
argv=[
sys.argv[0],
'-b',
'XMLTestRunnerTestCase.DummyTest.test_runner_buffer_output_fail',
],
exit=False,
)
self.assertNotIn('should be printed', r[0].getvalue())
self.assertIn('should be printed', r[1].getvalue())
def test_test_program_fail_wo_buffer(self):
with capture_stdout_stderr() as r:
unittest.TestProgram(
module=self.__class__.__module__,
testRunner=self.StderrXMLTestRunner,
argv=[
sys.argv[0],
'XMLTestRunnerTestCase.DummyTest.test_runner_buffer_output_fail',
],
exit=False,
)
self.assertIn('should be printed', r[0].getvalue())
self.assertNotIn('should be printed', r[1].getvalue())
def test_partialmethod(self):
from functools import partialmethod
def test_partialmethod(test):
pass
class TestWithPartialmethod(unittest.TestCase):
pass
setattr(
TestWithPartialmethod,
'test_partialmethod',
partialmethod(test_partialmethod),
)
suite = unittest.TestSuite()
suite.addTest(TestWithPartialmethod('test_partialmethod'))
self._test_xmlrunner(suite)
class DuplicateWriterTestCase(unittest.TestCase):
def setUp(self):
fd, self.file = mkstemp()
self.fh = os.fdopen(fd, 'w')
self.buffer = StringIO()
self.writer = _DuplicateWriter(self.fh, self.buffer)
def tearDown(self):
self.buffer.close()
self.fh.close()
os.unlink(self.file)
def getFirstContent(self):
with open(self.file, 'r') as f:
return f.read()
def getSecondContent(self):
return self.buffer.getvalue()
def test_flush(self):
self.writer.write('foobarbaz')
self.writer.flush()
self.assertEqual(self.getFirstContent(), self.getSecondContent())
def test_writable(self):
self.assertTrue(self.writer.writable())
def test_writelines(self):
self.writer.writelines([
'foo\n',
'bar\n',
'baz\n',
])
self.writer.flush()
self.assertEqual(self.getFirstContent(), self.getSecondContent())
def test_write(self):
# try long buffer (1M)
buffer = 'x' * (1024 * 1024)
wrote = self.writer.write(buffer)
self.writer.flush()
self.assertEqual(self.getFirstContent(), self.getSecondContent())
self.assertEqual(wrote, len(self.getSecondContent()))
class XMLProgramTestCase(unittest.TestCase):
@mock.patch('sys.argv', ['xmlrunner', '-o', 'flaf'])
@mock.patch('xmlrunner.runner.XMLTestRunner')
@mock.patch('sys.exit')
def test_xmlrunner_output(self, exiter, testrunner):
xmlrunner.runner.XMLTestProgram()
kwargs = dict(
buffer=mock.ANY,
failfast=mock.ANY,
verbosity=mock.ANY,
warnings=mock.ANY,
output='flaf',
)
if sys.version_info[:2] > (3, 4):
kwargs.update(tb_locals=mock.ANY)
testrunner.assert_called_once_with(**kwargs)
exiter.assert_called_once_with(False)
@mock.patch('sys.argv', ['xmlrunner', '--output-file', 'test.xml'])
@mock.patch('xmlrunner.runner.open')
@mock.patch('xmlrunner.runner.XMLTestRunner')
@mock.patch('sys.exit')
def test_xmlrunner_output_file(self, exiter, testrunner, opener):
xmlrunner.runner.XMLTestProgram()
opener.assert_called_once_with('test.xml', 'wb')
open_file = opener()
open_file.close.assert_called_with()
kwargs = dict(
buffer=mock.ANY,
failfast=mock.ANY,
verbosity=mock.ANY,
warnings=mock.ANY,
output=open_file,
)
if sys.version_info[:2] > (3, 4):
kwargs.update(tb_locals=mock.ANY)
testrunner.assert_called_once_with(**kwargs)
exiter.assert_called_once_with(False)
@mock.patch('sys.argv', ['xmlrunner', '--outsuffix', ''])
@mock.patch('xmlrunner.runner.open')
@mock.patch('xmlrunner.runner.XMLTestRunner')
@mock.patch('sys.exit')
def test_xmlrunner_outsuffix(self, exiter, testrunner, opener):
xmlrunner.runner.XMLTestProgram()
kwargs = dict(
buffer=mock.ANY,
failfast=mock.ANY,
verbosity=mock.ANY,
warnings=mock.ANY,
outsuffix='',
)
if sys.version_info[:2] > (3, 4):
kwargs.update(tb_locals=mock.ANY)
testrunner.assert_called_once_with(**kwargs)
exiter.assert_called_once_with(False)
class ResolveFilenameTestCase(unittest.TestCase):
@mock.patch('os.path.relpath')
def test_resolve_filename_relative(self, relpath):
relpath.return_value = 'somefile.py'
filename = resolve_filename('/path/to/somefile.py')
self.assertEqual(filename, 'somefile.py')
@mock.patch('os.path.relpath')
def test_resolve_filename_outside(self, relpath):
relpath.return_value = '../../../tmp/somefile.py'
filename = resolve_filename('/tmp/somefile.py')
self.assertEqual(filename, '/tmp/somefile.py')
@mock.patch('os.path.relpath')
def test_resolve_filename_error(self, relpath):
relpath.side_effect = ValueError("ValueError: path is on mount 'C:', start on mount 'D:'")
filename = resolve_filename('C:\\path\\to\\somefile.py')
self.assertEqual(filename, 'C:\\path\\to\\somefile.py')
unittest-xml-reporting-3.2.0/tests/vendor/ 0000775 0000000 0000000 00000000000 14172331455 0020644 5 ustar 00root root 0000000 0000000 unittest-xml-reporting-3.2.0/tests/vendor/jenkins/ 0000775 0000000 0000000 00000000000 14172331455 0022305 5 ustar 00root root 0000000 0000000 unittest-xml-reporting-3.2.0/tests/vendor/jenkins/xunit-plugin/ 0000775 0000000 0000000 00000000000 14172331455 0024750 5 ustar 00root root 0000000 0000000 14c6e39c38408b9ed6280361484a13c6f5becca7/ 0000775 0000000 0000000 00000000000 14172331455 0032255 5 ustar 00root root 0000000 0000000 unittest-xml-reporting-3.2.0/tests/vendor/jenkins/xunit-plugin junit-10.xsd 0000664 0000000 0000000 00000014252 14172331455 0034350 0 ustar 00root root 0000000 0000000 unittest-xml-reporting-3.2.0/tests/vendor/jenkins/xunit-plugin/14c6e39c38408b9ed6280361484a13c6f5becca7
ae25da5089d4f94ac6c4669bf736e4d416cc4665/ 0000775 0000000 0000000 00000000000 14172331455 0032354 5 ustar 00root root 0000000 0000000 unittest-xml-reporting-3.2.0/tests/vendor/jenkins/xunit-plugin junit-10.xsd 0000664 0000000 0000000 00000014633 14172331455 0034452 0 ustar 00root root 0000000 0000000 unittest-xml-reporting-3.2.0/tests/vendor/jenkins/xunit-plugin/ae25da5089d4f94ac6c4669bf736e4d416cc4665
unittest-xml-reporting-3.2.0/tox.ini 0000664 0000000 0000000 00000002342 14172331455 0017521 0 ustar 00root root 0000000 0000000 [pytest]
python_files = *_test.py test*.py
testpaths = tests
norecursedirs = tests/django_example
[tox]
envlist = begin,py{py3,37,38,39,310},pytest,py38-django{lts,curr},end,quality
[gh-actions]
python =
3.7: py37,pytest
3.8: begin,py38,py38-django{lts,curr},end,quality
3.9: py39
3.10: py310
[testenv]
deps =
coverage
codecov>=1.4.0
coveralls
djangolts,pytest: django~=3.2.0
djangocurr: django~=4.0.0
pytest: pytest
lxml>=3.6.0
commands =
coverage run --append setup.py test
coverage report --omit='.tox/*'
python -m xmlrunner discover -p test_xmlrunner_output
codecov -e TOXENV
-coveralls
passenv = CI TRAVIS_BUILD_ID TRAVIS TRAVIS_BRANCH TRAVIS_JOB_NUMBER TRAVIS_PULL_REQUEST TRAVIS_JOB_ID TRAVIS_REPO_SLUG TRAVIS_COMMIT CODECOV_TOKEN COVERALLS_REPO_TOKEN GITHUB_ACTION GITHUB_HEAD_REF GITHUB_REF GITHUB_REPOSITORY GITHUB_RUN_ID GITHUB_SHA
[testenv:pytest]
commands = pytest
[testenv:begin]
commands = coverage erase
[testenv:end]
commands =
coverage report
coverage html
[testenv:quality]
ignore_outcome = True
deps =
mccabe
pylint
flake8
pyroma
pep257
commands =
pylint xmlrunner tests
flake8 --max-complexity 10
pyroma .
pep257
unittest-xml-reporting-3.2.0/xmlrunner/ 0000775 0000000 0000000 00000000000 14172331455 0020237 5 ustar 00root root 0000000 0000000 unittest-xml-reporting-3.2.0/xmlrunner/__init__.py 0000664 0000000 0000000 00000000441 14172331455 0022347 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8 -*-
"""
This module provides the XMLTestRunner class, which is heavily based on the
default TextTestRunner.
"""
# Allow version to be detected at runtime.
from .version import __version__
from .runner import XMLTestRunner
__all__ = ('__version__', 'XMLTestRunner')
unittest-xml-reporting-3.2.0/xmlrunner/__main__.py 0000664 0000000 0000000 00000000760 14172331455 0022334 0 ustar 00root root 0000000 0000000 """Main entry point"""
import sys
from .runner import XMLTestProgram
if sys.argv[0].endswith("__main__.py"):
import os.path
# We change sys.argv[0] to make help message more useful
# use executable without path, unquoted
# (it's just a hint anyway)
# (if you have spaces in your executable you get what you deserve!)
executable = os.path.basename(sys.executable)
sys.argv[0] = executable + " -m xmlrunner"
del os
__unittest = True
XMLTestProgram(module=None)
unittest-xml-reporting-3.2.0/xmlrunner/builder.py 0000664 0000000 0000000 00000020117 14172331455 0022240 0 ustar 00root root 0000000 0000000 import re
import sys
import datetime
import time
from xml.dom.minidom import Document
__all__ = ('TestXMLBuilder', 'TestXMLContext')
# see issue #74, the encoding name needs to be one of
# http://www.iana.org/assignments/character-sets/character-sets.xhtml
UTF8 = 'UTF-8'
# Workaround for Python bug #5166
# http://bugs.python.org/issue5166
_char_tail = ''
if sys.maxunicode > 0x10000:
_char_tail = (u'%s-%s') % (
chr(0x10000),
chr(min(sys.maxunicode, 0x10FFFF))
)
_nontext_sub = re.compile(
r'[^\x09\x0A\x0D\x20-\uD7FF\uE000-\uFFFD%s]' % _char_tail,
re.U
).sub
def replace_nontext(text, replacement=u'\uFFFD'):
return _nontext_sub(replacement, text)
class TestXMLContext(object):
"""A XML report file have a distinct hierarchy. The outermost element is
'testsuites', which contains one or more 'testsuite' elements. The role of
these elements is to give the proper context to 'testcase' elements.
These contexts have a few things in common: they all have some sort of
counters (i.e. how many testcases are inside that context, how many failed,
and so on), they all have a 'time' attribute indicating how long it took
for their testcases to run, etc.
The purpose of this class is to abstract the job of composing this
hierarchy while keeping track of counters and how long it took for a
context to be processed.
"""
# Allowed keys for self.counters
_allowed_counters = ('tests', 'errors', 'failures', 'skipped',)
def __init__(self, xml_doc, parent_context=None):
"""Creates a new instance of a root or nested context (depending whether
`parent_context` is provided or not).
"""
self.xml_doc = xml_doc
self.parent = parent_context
self._start_time_m = 0
self._stop_time_m = 0
self._stop_time = 0
self.counters = {}
def element_tag(self):
"""Returns the name of the tag represented by this context.
"""
return self.element.tagName
def begin(self, tag, name):
"""Begins the creation of this context in the XML document by creating
an empty tag .
"""
self.element = self.xml_doc.createElement(tag)
self.element.setAttribute('name', replace_nontext(name))
self._start_time = time.monotonic()
def end(self):
"""Closes this context (started with a call to `begin`) and creates an
attribute for each counter and another for the elapsed time.
"""
# time.monotonic is reliable for measuring differences, not affected by NTP
self._stop_time_m = time.monotonic()
# time.time is used for reference point
self._stop_time = time.time()
self.element.setAttribute('time', self.elapsed_time())
self.element.setAttribute('timestamp', self.timestamp())
self._set_result_counters()
return self.element
def _set_result_counters(self):
"""Sets an attribute in this context's tag for each counter considering
what's valid for each tag name.
"""
tag = self.element_tag()
for counter_name in TestXMLContext._allowed_counters:
valid_counter_for_element = False
if counter_name == 'skipped':
valid_counter_for_element = (
tag == 'testsuite'
)
else:
valid_counter_for_element = (
tag in ('testsuites', 'testsuite')
)
if valid_counter_for_element:
value = str(
self.counters.get(counter_name, 0)
)
self.element.setAttribute(counter_name, value)
def increment_counter(self, counter_name):
"""Increments a counter named by `counter_name`, which can be any one
defined in `_allowed_counters`.
"""
if counter_name in TestXMLContext._allowed_counters:
self.counters[counter_name] = \
self.counters.get(counter_name, 0) + 1
def elapsed_time(self):
"""Returns the time the context took to run between the calls to
`begin()` and `end()`, in seconds.
"""
return format(self._stop_time_m - self._start_time_m, '.3f')
def timestamp(self):
"""Returns the time the context ended as ISO-8601-formatted timestamp.
"""
return datetime.datetime.fromtimestamp(self._stop_time).replace(microsecond=0).isoformat()
class TestXMLBuilder(object):
"""This class encapsulates most rules needed to create a XML test report
behind a simple interface.
"""
def __init__(self):
"""Creates a new instance.
"""
self._xml_doc = Document()
self._current_context = None
def current_context(self):
"""Returns the current context.
"""
return self._current_context
def begin_context(self, tag, name):
"""Begins a new context in the XML test report, which usually is defined
by one on the tags 'testsuites', 'testsuite', or 'testcase'.
"""
context = TestXMLContext(self._xml_doc, self._current_context)
context.begin(tag, name)
self._current_context = context
def context_tag(self):
"""Returns the tag represented by the current context.
"""
return self._current_context.element_tag()
def _create_cdata_section(self, content):
"""Returns a new CDATA section containing the string defined in
`content`.
"""
filtered_content = replace_nontext(content)
return self._xml_doc.createCDATASection(filtered_content)
def append_cdata_section(self, tag, content):
"""Appends a tag in the format CDATA into the tag represented
by the current context. Returns the created tag.
"""
element = self._xml_doc.createElement(tag)
pos = content.find(']]>')
while pos >= 0:
tmp = content[0:pos+2]
element.appendChild(self._create_cdata_section(tmp))
content = content[pos+2:]
pos = content.find(']]>')
element.appendChild(self._create_cdata_section(content))
self._append_child(element)
return element
def append(self, tag, content, **kwargs):
"""Apends a tag in the format CDATA
into the tag represented by the current context. Returns the created
tag.
"""
element = self._xml_doc.createElement(tag)
for key, value in kwargs.items():
filtered_value = replace_nontext(str(value))
element.setAttribute(key, filtered_value)
if content:
element.appendChild(self._create_cdata_section(content))
self._append_child(element)
return element
def _append_child(self, element):
"""Appends a tag object represented by `element` into the tag
represented by the current context.
"""
if self._current_context:
self._current_context.element.appendChild(element)
else:
self._xml_doc.appendChild(element)
def increment_counter(self, counter_name):
"""Increments a counter in the current context and their parents.
"""
context = self._current_context
while context:
context.increment_counter(counter_name)
context = context.parent
def end_context(self):
"""Ends the current context and sets the current context as being the
previous one (if it exists). Also, when a context ends, its tag is
appended in the proper place inside the document.
"""
if not self._current_context:
return False
element = self._current_context.end()
self._current_context = self._current_context.parent
self._append_child(element)
return True
def finish(self):
"""Ends all open contexts and returns a pretty printed version of the
generated XML document.
"""
while self.end_context():
pass
return self._xml_doc.toprettyxml(indent='\t', encoding=UTF8)
unittest-xml-reporting-3.2.0/xmlrunner/extra/ 0000775 0000000 0000000 00000000000 14172331455 0021362 5 ustar 00root root 0000000 0000000 unittest-xml-reporting-3.2.0/xmlrunner/extra/__init__.py 0000664 0000000 0000000 00000000000 14172331455 0023461 0 ustar 00root root 0000000 0000000 unittest-xml-reporting-3.2.0/xmlrunner/extra/djangotestrunner.py 0000664 0000000 0000000 00000004170 14172331455 0025332 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8 -*-
"""
Custom Django test runner that runs the tests using the
XMLTestRunner class.
This script shows how to use the XMLTestRunner in a Django project. To learn
how to configure a custom TestRunner in a Django project, please read the
Django docs website.
"""
import os
import xmlrunner
import os.path
from django.conf import settings
from django.test.runner import DiscoverRunner
class XMLTestRunner(DiscoverRunner):
test_runner = xmlrunner.XMLTestRunner
def get_resultclass(self):
# Django provides `DebugSQLTextTestResult` if `debug_sql` argument is True
# To use `xmlrunner.result._XMLTestResult` we supress default behavior
return None
def get_test_runner_kwargs(self):
# We use separate verbosity setting for our runner
verbosity = getattr(settings, 'TEST_OUTPUT_VERBOSE', 1)
if isinstance(verbosity, bool):
verbosity = (1, 2)[verbosity]
verbosity = verbosity # not self.verbosity
output_dir = getattr(settings, 'TEST_OUTPUT_DIR', '.')
single_file = getattr(settings, 'TEST_OUTPUT_FILE_NAME', None)
# For single file case we are able to create file here
# But for multiple files case files will be created inside runner/results
if single_file is None: # output will be a path (folder)
output = output_dir
else: # output will be a stream
if not os.path.exists(output_dir):
os.makedirs(output_dir)
file_path = os.path.join(output_dir, single_file)
output = open(file_path, 'wb')
return dict(
verbosity=verbosity,
descriptions=getattr(settings, 'TEST_OUTPUT_DESCRIPTIONS', False),
failfast=self.failfast,
resultclass=self.get_resultclass(),
output=output,
)
def run_suite(self, suite, **kwargs):
runner_kwargs = self.get_test_runner_kwargs()
runner = self.test_runner(**runner_kwargs)
results = runner.run(suite)
if hasattr(runner_kwargs['output'], 'close'):
runner_kwargs['output'].close()
return results
unittest-xml-reporting-3.2.0/xmlrunner/extra/xunit_plugin.py 0000664 0000000 0000000 00000001434 14172331455 0024463 0 ustar 00root root 0000000 0000000 import io
import lxml.etree as etree
TRANSFORM = etree.XSLT(etree.XML(b'''\
'''))
def transform(xml_data):
out = io.BytesIO()
xml_doc = etree.XML(xml_data)
result = TRANSFORM(xml_doc)
result.write(out)
return out.getvalue()
unittest-xml-reporting-3.2.0/xmlrunner/result.py 0000664 0000000 0000000 00000055544 14172331455 0022144 0 ustar 00root root 0000000 0000000
import inspect
import io
import os
import sys
import datetime
import traceback
import re
from os import path
from io import StringIO
# use direct import to bypass freezegun
from time import time
from .unittest import TestResult, TextTestResult, failfast
# Matches invalid XML1.0 unicode characters, like control characters:
# http://www.w3.org/TR/2006/REC-xml-20060816/#charsets
# http://stackoverflow.com/questions/1707890/fast-way-to-filter-illegal-xml-unicode-chars-in-python
_illegal_unichrs = [
(0x00, 0x08), (0x0B, 0x0C), (0x0E, 0x1F),
(0x7F, 0x84), (0x86, 0x9F),
(0xFDD0, 0xFDDF), (0xFFFE, 0xFFFF),
]
if sys.maxunicode >= 0x10000: # not narrow build
_illegal_unichrs.extend([
(0x1FFFE, 0x1FFFF), (0x2FFFE, 0x2FFFF),
(0x3FFFE, 0x3FFFF), (0x4FFFE, 0x4FFFF),
(0x5FFFE, 0x5FFFF), (0x6FFFE, 0x6FFFF),
(0x7FFFE, 0x7FFFF), (0x8FFFE, 0x8FFFF),
(0x9FFFE, 0x9FFFF), (0xAFFFE, 0xAFFFF),
(0xBFFFE, 0xBFFFF), (0xCFFFE, 0xCFFFF),
(0xDFFFE, 0xDFFFF), (0xEFFFE, 0xEFFFF),
(0xFFFFE, 0xFFFFF), (0x10FFFE, 0x10FFFF),
])
_illegal_ranges = [
"%s-%s" % (chr(low), chr(high))
for (low, high) in _illegal_unichrs
]
INVALID_XML_1_0_UNICODE_RE = re.compile(u'[%s]' % u''.join(_illegal_ranges))
STDOUT_LINE = '\nStdout:\n%s'
STDERR_LINE = '\nStderr:\n%s'
def safe_unicode(data, encoding='utf8'):
"""Return a unicode string containing only valid XML characters.
encoding - if data is a byte string it is first decoded to unicode
using this encoding.
"""
data = str(data)
return INVALID_XML_1_0_UNICODE_RE.sub('', data)
def testcase_name(test_method):
testcase = type(test_method)
# Ignore module name if it is '__main__'
module = testcase.__module__ + '.'
if module == '__main__.':
module = ''
result = module + testcase.__name__
return result
def resolve_filename(filename):
# Try to make filename relative to current directory.
try:
rel_filename = os.path.relpath(filename)
except ValueError:
return filename
# if not inside folder, keep as-is
return filename if rel_filename.startswith('../') else rel_filename
class _DuplicateWriter(io.TextIOBase):
"""
Duplicate output from the first handle to the second handle
The second handle is expected to be a StringIO and not to block.
"""
def __init__(self, first, second):
super(_DuplicateWriter, self).__init__()
self._first = first
self._second = second
def flush(self):
self._first.flush()
self._second.flush()
def writable(self):
return True
def getvalue(self):
return self._second.getvalue()
def writelines(self, lines):
self._first.writelines(lines)
self._second.writelines(lines)
def write(self, b):
if isinstance(self._first, io.TextIOBase):
wrote = self._first.write(b)
if wrote is not None:
# expected to always succeed to write
self._second.write(b[:wrote])
return wrote
else:
# file-like object that doesn't return wrote bytes.
self._first.write(b)
self._second.write(b)
return len(b)
class _TestInfo(object):
"""
This class keeps useful information about the execution of a
test method.
"""
# Possible test outcomes
(SUCCESS, FAILURE, ERROR, SKIP) = range(4)
OUTCOME_ELEMENTS = {
SUCCESS: None,
FAILURE: 'failure',
ERROR: 'error',
SKIP: 'skipped',
}
def __init__(self, test_result, test_method, outcome=SUCCESS, err=None, subTest=None, filename=None, lineno=None, doc=None):
self.test_result = test_result
self.outcome = outcome
self.elapsed_time = 0
self.timestamp = datetime.datetime.min.replace(microsecond=0).isoformat()
if err is not None:
if self.outcome != _TestInfo.SKIP:
self.test_exception_name = safe_unicode(err[0].__name__)
self.test_exception_message = safe_unicode(err[1])
else:
self.test_exception_message = safe_unicode(err)
self.stdout = test_result._stdout_data
self.stderr = test_result._stderr_data
self.test_description = self.test_result.getDescription(test_method)
self.test_exception_info = (
'' if outcome in (self.SUCCESS, self.SKIP)
else self.test_result._exc_info_to_string(
err, test_method)
)
self.test_name = testcase_name(test_method)
self.test_id = test_method.id()
if subTest:
self.test_id = subTest.id()
self.test_description = self.test_result.getDescription(subTest)
self.filename = filename
self.lineno = lineno
self.doc = doc
def id(self):
return self.test_id
def test_finished(self):
"""Save info that can only be calculated once a test has run.
"""
self.elapsed_time = \
self.test_result.stop_time - self.test_result.start_time
timestamp = datetime.datetime.fromtimestamp(self.test_result.stop_time)
self.timestamp = timestamp.replace(microsecond=0).isoformat()
def get_error_info(self):
"""
Return a text representation of an exception thrown by a test
method.
"""
return self.test_exception_info
class _XMLTestResult(TextTestResult):
"""
A test result class that can express test results in a XML report.
Used by XMLTestRunner.
"""
def __init__(self, stream=sys.stderr, descriptions=1, verbosity=1,
elapsed_times=True, properties=None, infoclass=None):
TextTestResult.__init__(self, stream, descriptions, verbosity)
self._stdout_data = None
self._stderr_data = None
self._stdout_capture = StringIO()
self.__stdout_saved = None
self._stderr_capture = StringIO()
self.__stderr_saved = None
self.successes = []
self.callback = None
self.elapsed_times = elapsed_times
self.properties = properties # junit testsuite properties
self.filename = None
self.lineno = None
self.doc = None
if infoclass is None:
self.infoclass = _TestInfo
else:
self.infoclass = infoclass
def _prepare_callback(self, test_info, target_list, verbose_str,
short_str):
"""
Appends a `infoclass` to the given target list and sets a callback
method to be called by stopTest method.
"""
test_info.filename = self.filename
test_info.lineno = self.lineno
test_info.doc = self.doc
target_list.append(test_info)
def callback():
"""Prints the test method outcome to the stream, as well as
the elapsed time.
"""
test_info.test_finished()
# Ignore the elapsed times for a more reliable unit testing
if not self.elapsed_times:
self.start_time = self.stop_time = 0
if self.showAll:
self.stream.writeln(
'%s (%.3fs)' % (verbose_str, test_info.elapsed_time)
)
elif self.dots:
self.stream.write(short_str)
self.stream.flush()
self.callback = callback
def startTest(self, test):
"""
Called before execute each test method.
"""
self.start_time = time()
TestResult.startTest(self, test)
try:
if getattr(test, '_dt_test', None) is not None:
# doctest.DocTestCase
self.filename = test._dt_test.filename
self.lineno = test._dt_test.lineno
else:
# regular unittest.TestCase?
test_method = getattr(test, test._testMethodName)
test_class = type(test)
# Note: inspect can get confused with decorators, so use class.
self.filename = inspect.getsourcefile(test_class)
# Handle partial and partialmethod objects.
test_method = getattr(test_method, 'func', test_method)
_, self.lineno = inspect.getsourcelines(test_method)
self.doc = test_method.__doc__
except (AttributeError, IOError, TypeError):
# issue #188, #189, #195
# some frameworks can make test method opaque.
pass
if self.showAll:
self.stream.write(' ' + self.getDescription(test))
self.stream.write(" ... ")
self.stream.flush()
def _setupStdout(self):
"""
Capture stdout / stderr by replacing sys.stdout / sys.stderr
"""
super(_XMLTestResult, self)._setupStdout()
self.__stdout_saved = sys.stdout
sys.stdout = _DuplicateWriter(sys.stdout, self._stdout_capture)
self.__stderr_saved = sys.stderr
sys.stderr = _DuplicateWriter(sys.stderr, self._stderr_capture)
def _restoreStdout(self):
"""
Stop capturing stdout / stderr and recover sys.stdout / sys.stderr
"""
if self.__stdout_saved:
sys.stdout = self.__stdout_saved
self.__stdout_saved = None
if self.__stderr_saved:
sys.stderr = self.__stderr_saved
self.__stderr_saved = None
self._stdout_capture.seek(0)
self._stdout_capture.truncate()
self._stderr_capture.seek(0)
self._stderr_capture.truncate()
super(_XMLTestResult, self)._restoreStdout()
def _save_output_data(self):
self._stdout_data = self._stdout_capture.getvalue()
self._stderr_data = self._stderr_capture.getvalue()
def stopTest(self, test):
"""
Called after execute each test method.
"""
self._save_output_data()
# self._stdout_data = sys.stdout.getvalue()
# self._stderr_data = sys.stderr.getvalue()
TextTestResult.stopTest(self, test)
self.stop_time = time()
if self.callback and callable(self.callback):
self.callback()
self.callback = None
def addSuccess(self, test):
"""
Called when a test executes successfully.
"""
self._save_output_data()
self._prepare_callback(
self.infoclass(self, test), self.successes, 'ok', '.'
)
@failfast
def addFailure(self, test, err):
"""
Called when a test method fails.
"""
self._save_output_data()
testinfo = self.infoclass(
self, test, self.infoclass.FAILURE, err)
self.failures.append((
testinfo,
self._exc_info_to_string(err, test)
))
self._prepare_callback(testinfo, [], 'FAIL', 'F')
@failfast
def addError(self, test, err):
"""
Called when a test method raises an error.
"""
self._save_output_data()
testinfo = self.infoclass(
self, test, self.infoclass.ERROR, err)
self.errors.append((
testinfo,
self._exc_info_to_string(err, test)
))
self._prepare_callback(testinfo, [], 'ERROR', 'E')
def addSubTest(self, testcase, test, err):
"""
Called when a subTest method raises an error.
"""
if err is not None:
errorText = None
errorValue = None
errorList = None
if issubclass(err[0], test.failureException):
errorText = 'FAIL'
errorValue = self.infoclass.FAILURE
errorList = self.failures
else:
errorText = 'ERROR'
errorValue = self.infoclass.ERROR
errorList = self.errors
self._save_output_data()
testinfo = self.infoclass(
self, testcase, errorValue, err, subTest=test)
errorList.append((
testinfo,
self._exc_info_to_string(err, testcase)
))
self._prepare_callback(testinfo, [], errorText, errorText[0])
def addSkip(self, test, reason):
"""
Called when a test method was skipped.
"""
self._save_output_data()
testinfo = self.infoclass(
self, test, self.infoclass.SKIP, reason)
testinfo.test_exception_name = 'skip'
testinfo.test_exception_message = reason
self.skipped.append((testinfo, reason))
self._prepare_callback(testinfo, [], 'skip', 's')
def addExpectedFailure(self, test, err):
"""
Missing in xmlrunner, copy-pasted from xmlrunner addError.
"""
self._save_output_data()
testinfo = self.infoclass(self, test, self.infoclass.SKIP, err)
testinfo.test_exception_name = 'XFAIL'
testinfo.test_exception_message = 'expected failure: {}'.format(testinfo.test_exception_message)
self.expectedFailures.append((testinfo, self._exc_info_to_string(err, test)))
self._prepare_callback(testinfo, [], 'expected failure', 'x')
@failfast
def addUnexpectedSuccess(self, test):
"""
Missing in xmlrunner, copy-pasted from xmlrunner addSuccess.
"""
self._save_output_data()
testinfo = self.infoclass(self, test) # do not set outcome here because it will need exception
testinfo.outcome = self.infoclass.ERROR
# But since we want to have error outcome, we need to provide additional fields:
testinfo.test_exception_name = 'UnexpectedSuccess'
testinfo.test_exception_message = ('Unexpected success: This test was marked as expected failure but passed, '
'please review it')
self.unexpectedSuccesses.append((testinfo, 'unexpected success'))
self._prepare_callback(testinfo, [], 'unexpected success', 'u')
def printErrorList(self, flavour, errors):
"""
Writes information about the FAIL or ERROR to the stream.
"""
for test_info, dummy in errors:
self.stream.writeln(self.separator1)
self.stream.writeln(
'%s [%.3fs]: %s' % (flavour, test_info.elapsed_time,
test_info.test_description)
)
self.stream.writeln(self.separator2)
self.stream.writeln('%s' % test_info.get_error_info())
self.stream.flush()
def _get_info_by_testcase(self):
"""
Organizes test results by TestCase module. This information is
used during the report generation, where a XML report will be created
for each TestCase.
"""
tests_by_testcase = {}
for tests in (self.successes, self.failures, self.errors,
self.skipped, self.expectedFailures, self.unexpectedSuccesses):
for test_info in tests:
if isinstance(test_info, tuple):
# This is a skipped, error or a failure test case
test_info = test_info[0]
testcase_name = test_info.test_name
if testcase_name not in tests_by_testcase:
tests_by_testcase[testcase_name] = []
tests_by_testcase[testcase_name].append(test_info)
return tests_by_testcase
def _report_testsuite_properties(xml_testsuite, xml_document, properties):
if properties:
xml_properties = xml_document.createElement('properties')
xml_testsuite.appendChild(xml_properties)
for key, value in properties.items():
prop = xml_document.createElement('property')
prop.setAttribute('name', str(key))
prop.setAttribute('value', str(value))
xml_properties.appendChild(prop)
_report_testsuite_properties = staticmethod(_report_testsuite_properties)
def _report_testsuite(suite_name, tests, xml_document, parentElement,
properties):
"""
Appends the testsuite section to the XML document.
"""
testsuite = xml_document.createElement('testsuite')
parentElement.appendChild(testsuite)
module_name = suite_name.rpartition('.')[0]
file_name = module_name.replace('.', '/') + '.py'
testsuite.setAttribute('name', suite_name)
testsuite.setAttribute('tests', str(len(tests)))
testsuite.setAttribute('file', file_name)
testsuite.setAttribute(
'time', '%.3f' % sum(map(lambda e: e.elapsed_time, tests))
)
if tests:
testsuite.setAttribute(
'timestamp', max(map(lambda e: e.timestamp, tests))
)
failures = filter(lambda e: e.outcome == e.FAILURE, tests)
testsuite.setAttribute('failures', str(len(list(failures))))
errors = filter(lambda e: e.outcome == e.ERROR, tests)
testsuite.setAttribute('errors', str(len(list(errors))))
skips = filter(lambda e: e.outcome == _TestInfo.SKIP, tests)
testsuite.setAttribute('skipped', str(len(list(skips))))
_XMLTestResult._report_testsuite_properties(
testsuite, xml_document, properties)
for test in tests:
_XMLTestResult._report_testcase(test, testsuite, xml_document)
return testsuite
_report_testsuite = staticmethod(_report_testsuite)
def _test_method_name(test_id):
"""
Returns the test method name.
"""
# Trick subtest referencing objects
subtest_parts = test_id.split(' ')
test_method_name = subtest_parts[0].split('.')[-1]
subtest_method_name = [test_method_name] + subtest_parts[1:]
return ' '.join(subtest_method_name)
_test_method_name = staticmethod(_test_method_name)
def _createCDATAsections(xmldoc, node, text):
text = safe_unicode(text)
pos = text.find(']]>')
while pos >= 0:
tmp = text[0:pos+2]
cdata = xmldoc.createCDATASection(tmp)
node.appendChild(cdata)
text = text[pos+2:]
pos = text.find(']]>')
cdata = xmldoc.createCDATASection(text)
node.appendChild(cdata)
_createCDATAsections = staticmethod(_createCDATAsections)
def _report_testcase(test_result, xml_testsuite, xml_document):
"""
Appends a testcase section to the XML document.
"""
testcase = xml_document.createElement('testcase')
xml_testsuite.appendChild(testcase)
class_name = re.sub(r'^__main__.', '', test_result.id())
# Trick subtest referencing objects
class_name = class_name.split(' ')[0].rpartition('.')[0]
testcase.setAttribute('classname', class_name)
testcase.setAttribute(
'name', _XMLTestResult._test_method_name(test_result.test_id)
)
testcase.setAttribute('time', '%.3f' % test_result.elapsed_time)
testcase.setAttribute('timestamp', test_result.timestamp)
if test_result.filename is not None:
# Try to make filename relative to current directory.
filename = resolve_filename(test_result.filename)
testcase.setAttribute('file', filename)
if test_result.lineno is not None:
testcase.setAttribute('line', str(test_result.lineno))
if test_result.doc is not None:
comment = str(test_result.doc)
# The use of '--' is forbidden in XML comments
comment = comment.replace('--', '--')
testcase.appendChild(xml_document.createComment(safe_unicode(comment)))
result_elem_name = test_result.OUTCOME_ELEMENTS[test_result.outcome]
if result_elem_name is not None:
result_elem = xml_document.createElement(result_elem_name)
testcase.appendChild(result_elem)
result_elem.setAttribute(
'type',
test_result.test_exception_name
)
result_elem.setAttribute(
'message',
test_result.test_exception_message
)
if test_result.get_error_info():
error_info = safe_unicode(test_result.get_error_info())
_XMLTestResult._createCDATAsections(
xml_document, result_elem, error_info)
if test_result.stdout:
systemout = xml_document.createElement('system-out')
testcase.appendChild(systemout)
_XMLTestResult._createCDATAsections(
xml_document, systemout, test_result.stdout)
if test_result.stderr:
systemout = xml_document.createElement('system-err')
testcase.appendChild(systemout)
_XMLTestResult._createCDATAsections(
xml_document, systemout, test_result.stderr)
_report_testcase = staticmethod(_report_testcase)
def generate_reports(self, test_runner):
"""
Generates the XML reports to a given XMLTestRunner object.
"""
from xml.dom.minidom import Document
all_results = self._get_info_by_testcase()
outputHandledAsString = \
isinstance(test_runner.output, str)
if (outputHandledAsString and not os.path.exists(test_runner.output)):
os.makedirs(test_runner.output)
if not outputHandledAsString:
doc = Document()
testsuite = doc.createElement('testsuites')
doc.appendChild(testsuite)
parentElement = testsuite
for suite, tests in all_results.items():
if outputHandledAsString:
doc = Document()
parentElement = doc
suite_name = suite
if test_runner.outsuffix:
# not checking with 'is not None', empty means no suffix.
suite_name = '%s-%s' % (suite, test_runner.outsuffix)
# Build the XML file
testsuite = _XMLTestResult._report_testsuite(
suite_name, tests, doc, parentElement, self.properties
)
if outputHandledAsString:
xml_content = doc.toprettyxml(
indent='\t',
encoding=test_runner.encoding
)
filename = path.join(
test_runner.output,
'TEST-%s.xml' % suite_name)
with open(filename, 'wb') as report_file:
report_file.write(xml_content)
if self.showAll:
self.stream.writeln('Generated XML report: {}'.format(filename))
if not outputHandledAsString:
# Assume that test_runner.output is a stream
xml_content = doc.toprettyxml(
indent='\t',
encoding=test_runner.encoding
)
test_runner.output.write(xml_content)
def _exc_info_to_string(self, err, test):
"""Converts a sys.exc_info()-style tuple of values into a string."""
return super(_XMLTestResult, self)._exc_info_to_string(err, test)
unittest-xml-reporting-3.2.0/xmlrunner/runner.py 0000664 0000000 0000000 00000014764 14172331455 0022136 0 ustar 00root root 0000000 0000000
import argparse
import sys
import time
from .unittest import TextTestRunner, TestProgram
from .result import _XMLTestResult
# see issue #74, the encoding name needs to be one of
# http://www.iana.org/assignments/character-sets/character-sets.xhtml
UTF8 = 'UTF-8'
class XMLTestRunner(TextTestRunner):
"""
A test runner class that outputs the results in JUnit like XML files.
"""
def __init__(self, output='.', outsuffix=None,
elapsed_times=True, encoding=UTF8,
resultclass=None,
**kwargs):
super(XMLTestRunner, self).__init__(**kwargs)
self.output = output
self.encoding = encoding
# None means default timestamped suffix
# '' (empty) means no suffix
if outsuffix is None:
outsuffix = time.strftime("%Y%m%d%H%M%S")
self.outsuffix = outsuffix
self.elapsed_times = elapsed_times
if resultclass is None:
self.resultclass = _XMLTestResult
else:
self.resultclass = resultclass
def _make_result(self):
"""
Creates a TestResult object which will be used to store
information about the executed tests.
"""
# override in subclasses if necessary.
return self.resultclass(
self.stream, self.descriptions, self.verbosity, self.elapsed_times
)
def run(self, test):
"""
Runs the given test case or test suite.
"""
try:
# Prepare the test execution
result = self._make_result()
result.failfast = self.failfast
result.buffer = self.buffer
if hasattr(test, 'properties'):
# junit testsuite properties
result.properties = test.properties
# Print a nice header
self.stream.writeln()
self.stream.writeln('Running tests...')
self.stream.writeln(result.separator2)
# Execute tests
start_time = time.monotonic()
test(result)
stop_time = time.monotonic()
time_taken = stop_time - start_time
# Print results
result.printErrors()
self.stream.writeln(result.separator2)
run = result.testsRun
self.stream.writeln("Ran %d test%s in %.3fs" % (
run, run != 1 and "s" or "", time_taken)
)
self.stream.writeln()
# other metrics
expectedFails = len(result.expectedFailures)
unexpectedSuccesses = len(result.unexpectedSuccesses)
skipped = len(result.skipped)
# Error traces
infos = []
if not result.wasSuccessful():
self.stream.write("FAILED")
failed, errored = map(len, (result.failures, result.errors))
if failed:
infos.append("failures={0}".format(failed))
if errored:
infos.append("errors={0}".format(errored))
else:
self.stream.write("OK")
if skipped:
infos.append("skipped={0}".format(skipped))
if expectedFails:
infos.append("expected failures={0}".format(expectedFails))
if unexpectedSuccesses:
infos.append("unexpected successes={0}".format(
unexpectedSuccesses))
if infos:
self.stream.writeln(" ({0})".format(", ".join(infos)))
else:
self.stream.write("\n")
# Generate reports
self.stream.writeln()
self.stream.writeln('Generating XML reports...')
result.generate_reports(self)
finally:
pass
return result
class XMLTestProgram(TestProgram):
def __init__(self, *args, **kwargs):
kwargs.setdefault('testRunner', XMLTestRunner)
self.warnings = None # python2 fix
self._parseKnownArgs(kwargs)
super(XMLTestProgram, self).__init__(*args, **kwargs)
def _parseKnownArgs(self, kwargs):
argv = kwargs.get('argv')
if argv is None:
argv = sys.argv
# python2 argparse fix
parser = argparse.ArgumentParser(prog='xmlrunner')
group = parser.add_mutually_exclusive_group()
group.add_argument(
'-o', '--output', metavar='DIR',
help='Directory for storing XML reports (\'.\' default)')
group.add_argument(
'--output-file', metavar='FILENAME',
help='Filename for storing XML report')
parser.add_argument(
'--outsuffix', metavar='STRING',
help='Output suffix (timestamp is default)')
namespace, argv = parser.parse_known_args(argv)
self.output = namespace.output
self.output_file = namespace.output_file
self.outsuffix = namespace.outsuffix
kwargs['argv'] = argv
def _initArgParsers(self):
# this code path is only called in python3 (optparse vs argparse)
super(XMLTestProgram, self)._initArgParsers()
for parser in (self._main_parser, self._discovery_parser):
group = parser.add_mutually_exclusive_group()
group.add_argument(
'-o', '--output', metavar='DIR', nargs=1,
help='Directory for storing XML reports (\'.\' default)')
group.add_argument(
'--output-file', metavar='FILENAME', nargs=1,
help='Filename for storing XML report')
group.add_argument(
'--outsuffix', metavar='STRING', nargs=1,
help='Output suffix (timestamp is default)')
def runTests(self):
kwargs = dict(
verbosity=self.verbosity,
failfast=self.failfast,
buffer=self.buffer,
warnings=self.warnings,
)
if sys.version_info[:2] > (3, 4):
kwargs.update(tb_locals=self.tb_locals)
output_file = None
try:
if self.output_file is not None:
output_file = open(self.output_file, 'wb')
kwargs.update(output=output_file)
elif self.output is not None:
kwargs.update(output=self.output)
if self.outsuffix is not None:
kwargs.update(outsuffix=self.outsuffix)
self.testRunner = self.testRunner(**kwargs)
super(XMLTestProgram, self).runTests()
finally:
if output_file is not None:
output_file.close()
unittest-xml-reporting-3.2.0/xmlrunner/unittest.py 0000664 0000000 0000000 00000000556 14172331455 0022476 0 ustar 00root root 0000000 0000000
from __future__ import absolute_import
import sys
# pylint: disable-msg=W0611
import unittest
from unittest import TextTestRunner
from unittest import TestResult, TextTestResult
from unittest.result import failfast
from unittest.main import TestProgram
__all__ = (
'unittest', 'TextTestRunner', 'TestResult', 'TextTestResult',
'TestProgram', 'failfast')
unittest-xml-reporting-3.2.0/xmlrunner/version.py 0000664 0000000 0000000 00000000027 14172331455 0022275 0 ustar 00root root 0000000 0000000
__version__ = '3.2.0'