pax_global_header00006660000000000000000000000064137014350230014510gustar00rootroot0000000000000052 comment=f449aaec02b776485939fa56135ca341f06c27bf flaky-3.7.0/000077500000000000000000000000001370143502300126255ustar00rootroot00000000000000flaky-3.7.0/.coveragerc000066400000000000000000000002661370143502300147520ustar00rootroot00000000000000[run] # Whether to measure branch coverage in addition to statement coverage. branch = True # List of packages or directories, the source to measure during execution. source = flaky flaky-3.7.0/.gitignore000066400000000000000000000005221370143502300146140ustar00rootroot00000000000000*.py[cod] # C extensions *.so # Packages *.egg *.egg-info dist build eggs parts bin var sdist develop-eggs .installed.cfg lib lib64 # Installer logs pip-log.txt # Unit test / coverage reports .coverage .tox nosetests.xml # Translations *.mo # IntelliJ .idea/codeStyleSettings.xml .idea/misc.xml .idea/tasks.xml .idea/workspace.xml flaky-3.7.0/.idea/000077500000000000000000000000001370143502300136055ustar00rootroot00000000000000flaky-3.7.0/.idea/.name000066400000000000000000000000051370143502300145210ustar00rootroot00000000000000flakyflaky-3.7.0/.idea/compiler.xml000066400000000000000000000013731370143502300161450ustar00rootroot00000000000000 flaky-3.7.0/.idea/copyright/000077500000000000000000000000001370143502300156155ustar00rootroot00000000000000flaky-3.7.0/.idea/copyright/profiles_settings.xml000066400000000000000000000001571370143502300221050ustar00rootroot00000000000000 flaky-3.7.0/.idea/encodings.xml000066400000000000000000000002461370143502300163020ustar00rootroot00000000000000 flaky-3.7.0/.idea/inspectionProfiles/000077500000000000000000000000001370143502300174645ustar00rootroot00000000000000flaky-3.7.0/.idea/inspectionProfiles/Project_Default.xml000066400000000000000000000034341370143502300232640ustar00rootroot00000000000000 flaky-3.7.0/.idea/inspectionProfiles/profiles_settings.xml000066400000000000000000000003531370143502300237520ustar00rootroot00000000000000 flaky-3.7.0/.idea/modules.xml000066400000000000000000000003741370143502300160030ustar00rootroot00000000000000 flaky-3.7.0/.idea/other.xml000066400000000000000000000002731370143502300154520ustar00rootroot00000000000000 flaky-3.7.0/.idea/scopes/000077500000000000000000000000001370143502300151015ustar00rootroot00000000000000flaky-3.7.0/.idea/scopes/scope_settings.xml000066400000000000000000000002131370143502300206500ustar00rootroot00000000000000 flaky-3.7.0/.idea/testrunner.xml000066400000000000000000000003701370143502300165400ustar00rootroot00000000000000 flaky-3.7.0/.idea/uiDesigner.xml000066400000000000000000000211321370143502300164240ustar00rootroot00000000000000 flaky-3.7.0/.idea/vcs.xml000066400000000000000000000003321370143502300151200ustar00rootroot00000000000000 flaky-3.7.0/.pylintrc000066400000000000000000000174261370143502300145040ustar00rootroot00000000000000[MASTER] # Specify a configuration file. #rcfile= # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). #init-hook= # Add files or directories to the blacklist. They should be base names, not # paths. #ignore=CVS # Pickle collected data for later comparisons. #persistent=yes # List of plugins (as comma separated values of python modules names) to load, # usually to register additional checkers. #load-plugins= [MESSAGES CONTROL] # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option # multiple time. See also the "--disable" option for examples. #enable= # Disable the message, report, category or checker with the given id(s). You # can either give multiple identifiers separated by comma (,) or put this # option multiple times (only on the command line, not in the configuration # file where it should appear only once).You can also use "--disable=all" to # disable everything first and then reenable specific checks. For example, if # you want to run only the similarities checker, you can use "--disable=all # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use"--disable=all --enable=classes # --disable=W" # C0111 => Missing docstring # W0108 => unnecessary lambda # W0142 => Used * or ** magic # R0921 => Abstract class not referenced # R0205 => Useless object inheritence: codebase still supports 2.7 disable=I, C0111, W0108, W0142, R0921, R0205 [REPORTS] # Set the output format. Available formats are text, parseable, colorized, msvs # (visual studio) and html. You can also give a reporter class, eg # mypackage.mymodule.MyReporterClass. output-format=text # Put messages in a separate file for each module / package specified on the # command line instead of printing them on stdout. Reports (if any) will be # written in a file name "pylint_global.[txt|html]". files-output=no # Tells whether to display a full report or only the messages reports=no # Python expression which should return a note less than 10 (10 is the highest # note). You have access to the variables errors warning, statement which # respectively contain the number of errors / warnings messages and the total # number of statements analyzed. This is used by the global evaluation report # (RP0004). evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) # Template used to display messages. This is a python new-style format string # used to format the massage information. See doc for all details msg-template={module}:{line}:{column}: [{msg_id}({symbol}), {obj}] {msg} [BASIC] # List of builtins function names that should not be used, separated by a comma bad-functions=map,filter,apply,input # Regular expression which should only match correct module names module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ # Regular expression which should only match correct module level names const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ # Regular expression which should only match correct class names class-rgx=[A-Z_][a-zA-Z0-9]+$ # Regular expression which should only match correct function names function-rgx=[a-z_][a-z0-9_]{2,60}$ # Regular expression which should only match correct method names method-rgx=[a-z_][a-z0-9_]{2,60}$ # Regular expression which should only match correct instance attribute names attr-rgx=[a-z_][a-z0-9_]{2,30}$ # Regular expression which should only match correct argument names argument-rgx=[a-z_][a-z0-9_]{2,30}$ # Regular expression which should only match correct variable names variable-rgx=[a-z_][a-z0-9_]{2,30}$ # Regular expression which should only match correct attribute names in class # bodies class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ # Regular expression which should only match correct list comprehension / # generator expression variable names inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ # Good variable names which should always be accepted, separated by a comma good-names=i,j,k,ex,Run,_ # Bad variable names which should always be refused, separated by a comma bad-names=foo,bar,baz,toto,tutu,tata # Regular expression which should only match function or class names that do # not require a docstring. no-docstring-rgx=__.*__ # Minimum line length for functions/classes that require docstrings, shorter # ones are exempt. docstring-min-length=-1 [FORMAT] # Maximum number of characters on a single line. max-line-length=120 # Regexp for a line that is allowed to be longer than the limit. ignore-long-lines=^\s*(# )??$ # Maximum number of lines in a module max-module-lines=1000 # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 # tab). indent-string=' ' [MISCELLANEOUS] # List of note tags to take in consideration, separated by a comma. notes=TODO [SIMILARITIES] # Minimum lines number of a similarity. min-similarity-lines=4 # Ignore comments when computing similarities. ignore-comments=yes # Ignore docstrings when computing similarities. ignore-docstrings=yes # Ignore imports when computing similarities. ignore-imports=no [TYPECHECK] # Tells whether missing members accessed in mixin class should be ignored. A # mixin class is detected if its name ends with "mixin" (case insensitive). ignore-mixin-members=yes # List of classes names for which member attributes should not be checked # (useful for classes with attributes dynamically set). #ignored-classes=SQLObject ignored-classes=pytest, _pytest # List of members which are set dynamically and missed by pylint inference # system, and so shouldn't trigger E0201 when accessed. Python regular # expressions are accepted. generated-members=REQUEST,acl_users,aq_parent [VARIABLES] # Tells whether we should check for unused import in __init__ files. init-import=no # A regular expression matching the beginning of the name of dummy variables # (i.e. not used). dummy-variables-rgx=_$|dummy # List of additional names supposed to be defined in builtins. Remember that # you should avoid to define new builtins when possible. additional-builtins= [CLASSES] # List of method names used to declare (i.e. assign) instance attributes. defining-attr-methods=__init__,__new__,setUp # List of valid names for the first argument in a class method. valid-classmethod-first-arg=cls # List of valid names for the first argument in a metaclass class method. valid-metaclass-classmethod-first-arg=mcs [DESIGN] # Maximum number of arguments for function / method max-args=15 # Argument names that match this expression will be ignored. Default to name # with leading underscore ignored-argument-names=_.* # Maximum number of locals for function / method body max-locals=20 # Maximum number of return / yield for function / method body max-returns=6 # Maximum number of branch for function / method body max-branches=12 # Maximum number of statements in function / method body max-statements=50 # Maximum number of parents for a class (see R0901). max-parents=7 # Maximum number of attributes for a class (see R0902). max-attributes=15 # Minimum number of public methods for a class (see R0903). min-public-methods=0 # Maximum number of public methods for a class (see R0904). max-public-methods=100 [IMPORTS] # Deprecated modules which should not be used, separated by a comma deprecated-modules= # Create a graph of every (i.e. internal and external) dependencies in the # given file (report RP0402 must not be disabled) import-graph= # Create a graph of external dependencies in the given file (report RP0402 must # not be disabled) ext-import-graph= # Create a graph of internal dependencies in the given file (report RP0402 must # not be disabled) int-import-graph= [EXCEPTIONS] # Exceptions that will emit a warning when being caught. Defaults to # "Exception" overgeneral-exceptions=Exception flaky-3.7.0/.travis.yml000066400000000000000000000006111370143502300147340ustar00rootroot00000000000000language: python cache: pip matrix: include: - python: "2.7" - python: "3.4" - python: "3.5" - python: "2.7" - python: pypy - env: TOX_ENV=pep8 - env: TOX_ENV=pylint - env: TOX_ENV=coverage # commands to install dependencies install: - pip install -U tox-travis # commands to run script: - tox after_success: - if [ "-x$TOX_ENV" = "xcoverage" ]; then coveralls; fi flaky-3.7.0/AUTHORS.rst000066400000000000000000000005271370143502300145100ustar00rootroot00000000000000Flaky is an open source project supported by `Box `_ that was born out of our testing framework. This is a list of contributors. - `@Jeff-Meadows `_ - `@benpatterson `_ - `@amygdalama `_ - `@hugovk `_ flaky-3.7.0/CONTRIBUTING.rst000066400000000000000000000064461370143502300153000ustar00rootroot00000000000000Contributing ============ All contributions are welcome to this project. Contributor License Agreement ----------------------------- Before a contribution can be merged into this project, please fill out the Contributor License Agreement (CLA) located at:: https://opensource.box.com/cla To learn more about CLAs and why they are important to open source projects, please see the `Wikipedia entry `_. How to contribute ----------------- - **File an issue** - if you found a bug, want to request an enhancement, or want to implement something (bug fix or feature). - **Send a pull request** - if you want to contribute code. Please be sure to file an issue first. Pull request best practices --------------------------- We want to accept your pull requests. Please follow these steps: Step 1: File an issue ~~~~~~~~~~~~~~~~~~~~~ Before writing any code, please file an issue stating the problem you want to solve or the feature you want to implement. This allows us to give you feedback before you spend any time writing code. There may be a known limitation that can't be addressed, or a bug that has already been fixed in a different way. The issue allows us to communicate and figure out if it's worth your time to write a bunch of code for the project. Step 2: Fork this repository in GitHub ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ This will create your own copy of our repository. Step 3: Add the upstream source ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The upstream source is the project under the Box organization on GitHub. To add an upstream source for this project, type: .. code-block:: console git remote add upstream git@github.com:box/flaky.git This will come in useful later. Step 4: Create a feature branch ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Create a branch with a descriptive name, such as ``add-search``. Step 5: Push your feature branch to your fork ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ As you develop code, continue to push code to your remote feature branch. Please make sure to include the issue number you're addressing in your commit message, such as: .. code-block:: console git commit -am "Adding search (fixes #123)" This helps us out by allowing us to track which issue your commit relates to. Keep a separate feature branch for each issue you want to address. Step 6: Rebase ~~~~~~~~~~~~~~ Before sending a pull request, rebase against upstream, such as: .. code-block:: console git fetch upstream git rebase upstream/master This will add your changes on top of what's already in upstream, minimizing merge issues. Step 7: Run the tests ~~~~~~~~~~~~~~~~~~~~~ Make sure that all tests are passing before submitting a pull request. Step 8: Send the pull request ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Send the pull request from your feature branch to us. Be sure to include a description that lets us know what work you did. Keep in mind that we like to see one issue addressed per pull request, as this helps keep our git history clean and we can more easily track down issues. Finally, please add a note in HISTORY.rst under the Upcoming section detailing what's new in your change. These will become the release notes for the next release. In addition, feel free to add yourself to AUTHORS.rst if you aren't already listed. flaky-3.7.0/HISTORY.rst000066400000000000000000000133771370143502300145330ustar00rootroot00000000000000.. :changelog: Release History --------------- Upcoming ++++++++ 3.7.0 (2020-07-07) ++++++++++++++++++ - Flaky now retries tests which fail during setup. 3.6.1 (2019-08-06) ++++++++++++++++++ **Bugfixes** - Reraise ``KeyboardInterrupt`` when running tests under pytest. 3.6.0 (2019-06-25) ++++++++++++++++++ - Do not print an empty report if no tests marked 'flaky' were run at all (#116). NOTE: This change could be breaking if you relied on the flaky report being printed. 3.5.3 (2019-01-16) ++++++++++++++++++ - Add rerun_filter parameter to _make_test_flaky 3.5.2 (2019-01-10) ++++++++++++++++++ **Bugfixes** - Fall back to old pytest marker API for older pytest version (``get_marker`` vs ``iter_markers``). 3.5.1 (2019-01-09) ++++++++++++++++++ - Officially support and test on Python 3.6 and 3.7. - Adds a pytest marker that can be used instead of ```@flaky``. - Replaced references to 'slaveoutput', where possible with 'workeroutput', following the convention chosen by pytest. - Prints formatted tracebacks in the flaky report when using nose. **Bugfixes** - Ensure that tests are only reported as successful to the nose runner once. 3.5.0 (2019-01-07) ++++++++++++++++++ - Updated references to pytest instead of py.test. **Bugfixes** - Flaky is now compatible with pytest >= 4.1. 3.4.0 (2017-06-15) ++++++++++++++++++ **Bugfixes** - Flaky for pytest will no longer silently swallow errors that occur during test setup. 3.3.0 (2016-07-28) ++++++++++++++++++ - Flaky for Nose will now rerun tests using the ``afterTest`` plugin hook, rather than the ``stopTest`` hook. The ``afterTest`` hook is called slightly later in the test run process; this change allows flaky to be used with `TestCase` subclasses that override the test run process, and do teardown after ``stopTest`` is called. In particular, this means that flaky is now compatible with Django's ``LiveServerTestCase``. 3.2.0 (2016-07-21) ++++++++++++++++++ - Flaky will completely suppress the flaky report if ``--no-success-flaky-report`` is specified and no tests needed to be rerun. **Bugfixes** - Flaky will no longer cause ``py.test --pep8`` to fail. 3.1.0 (2016-22-11) ++++++++++++++++++ - Flaky's automated tests now include a run with the ``pytest-xdist`` plugin enabled. - Flaky for pytest has slightly changed how it patches the runner. This simplifies the plugin code a bit, but, more importantly, avoids reporting test retries until flaky is done with them. This *should* improve compatibility with other plugins. 3.0.2 (2015-12-21) ++++++++++++++++++ **Bugfixes** - Flaky for pytest no longer passes None for the first 2 arguments to the optional ``rerun_filter``. 3.0.1 (2015-12-16) ++++++++++++++++++ **Bugfixes** - Flaky for pytest no longer causes errors with the pytester plugin. 3.0.0 (2015-12-14) ++++++++++++++++++ - Flaky for pytest now reruns test setup and teardown. **This is a possibly breaking change.** **Bugfixes** - Bug with nose and multiprocess fixed. 2.4.0 (2015-10-27) ++++++++++++++++++ **Bugfixes** - The flaky report is now available under nose with the multiprocessing plugin. 2.3.0 (2015-10-15) ++++++++++++++++++ - Added support and testing for Python 3.5 - Fixed tests on Python 2.6 with latest version of py.test **Bugfixes** - Flaky will no longer swallow exceptions raised during pytest fixture setup. This change is correct, but is a change in behavior. 2.2.0 (2015-08-28) ++++++++++++++++++ - The `@flaky` decorator now accepts a `rerun_filter` parameter. This allows for failing certain types of failures/errors immediately instead of rerunning. - Flaky now accepts a command line option, `--no-success-flaky-report`. When that option is present, flaky won't add information about test successes to the flaky report. 2.1.2 (2015-07-30) ++++++++++++++++++ **Bugfixes** - Flaky will no longer raise a UnicodeEncodeError for flaky tests which raise exceptions with non-ascii characters. - Flaky will no longer cause nose to report non-flaky test failures and errors twice. - Flaky now works with tests that are parametrized with py.test. 2.1.1 (2015-05-22) ++++++++++++++++++ **Bugfixes** - Flaky will no longer raise a KeyError for failed flaky tests. 2.1.0 (2015-05-05) ++++++++++++++++++ **Bugfixes** - Flaky for nose now reruns failed tests *after* calling the `tearDown()` method. This change is correct, but is a change in behavior. 2.0.4 (2015-04-20) ++++++++++++++++++ **Bugfixes** - Flaky now copies flaky attributes to collected tests, rather than modifying them on the test declaration. This means that tests collected from classes that inherit tests marked flaky (from a base class) will now work correctly. - Running py.test with doctests will no longer cause the doctests to fail. Doctests cannot, however, be marked flaky. - Tests marked flaky will now be correctly rerun from pytest when using the pytest-xdist option. However, they will not be run if the `--boxed` option is used due to a technical limitation. **Documentation updates** - Updated documentation to correctly specify how to suppress the flaky report under py.test. 2.0.3 (2015-03-20) ++++++++++++++++++ **Bugfixes** - Tests marked flaky that are part of a class inheriting from `unittest.TestCase` will now be rerun when they fail under py.test. 2.0.0 (2015-03-01) ++++++++++++++++++ **Bugfixes** - Tests marked flaky that fail after exhausting reruns will now be reported to the nose test runner. This is a *breaking* change, because the exit code of the nose test runner will indicate failure in this case. - Tests marked flaky will now be marked as failures after they have failed ``max_runs - min_passes + 1`` times. This is a *breaking* change as well, because a bug in previous versions was allowing tests with ``min_passes > 0`` to run more than ``max_runs`` times. flaky-3.7.0/LICENSE000066400000000000000000000225461370143502300136430ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS flaky-3.7.0/MANIFEST.in000066400000000000000000000001231370143502300143570ustar00rootroot00000000000000include README.rst LICENSE recursive-include test test*.py __init__.py conftest.py flaky-3.7.0/README.rst000066400000000000000000000142141370143502300143160ustar00rootroot00000000000000flaky ===== .. image:: http://opensource.box.com/badges/stable.svg :target: http://opensource.box.com/badges .. image:: https://travis-ci.org/box/flaky.svg?branch=master :target: https://travis-ci.org/box/flaky .. image:: https://img.shields.io/pypi/v/flaky.svg :target: https://pypi.python.org/pypi/flaky About ----- Flaky is a plugin for nose or pytest that automatically reruns flaky tests. Ideally, tests reliably pass or fail, but sometimes test fixtures must rely on components that aren't 100% reliable. With flaky, instead of removing those tests or marking them to @skip, they can be automatically retried. For more information about flaky, see `this presentation `_. Marking tests flaky ~~~~~~~~~~~~~~~~~~~ To mark a test as flaky, simply import flaky and decorate the test with @flaky: .. code-block:: python from flaky import flaky .. code-block:: python @flaky def test_something_that_usually_passes(self): value_to_double = 21 result = get_result_from_flaky_doubler(value_to_double) self.assertEqual(result, value_to_double * 2, 'Result doubled incorrectly.') By default, flaky will retry a failing test once, but that behavior can be overridden by passing values to the flaky decorator. It accepts two parameters: max_runs, and min_passes; flaky will run tests up to max_runs times, until it has succeeded min_passes times. Once a test passes min_passes times, it's considered a success; once it has been run max_runs times without passing min_passes times, it's considered a failure. .. code-block:: python @flaky(max_runs=3, min_passes=2) def test_something_that_usually_passes(self): """This test must pass twice, and it can be run up to three times.""" value_to_double = 21 result = get_result_from_flaky_doubler(value_to_double) self.assertEqual(result, value_to_double * 2, 'Result doubled incorrectly.') Marking a class flaky +++++++++++++++++++++ In addition to marking a single test flaky, entire test cases can be marked flaky: .. code-block:: python @flaky class TestMultipliers(TestCase): def test_flaky_doubler(self): value_to_double = 21 result = get_result_from_flaky_doubler(value_to_double) self.assertEqual(result, value_to_double * 2, 'Result doubled incorrectly.') @flaky(max_runs=3) def test_flaky_tripler(self): value_to_triple = 14 result = get_result_from_flaky_tripler(value_to_triple) self.assertEqual(result, value_to_triple * 3, 'Result tripled incorrectly.') The @flaky class decorator will mark test_flaky_doubler as flaky, but it won't override the 3 max_runs for test_flaky_tripler (from the decorator on that test method). Pytest marker +++++++++++++ When using ``pytest``, ``@pytest.mark.flaky`` can be used in place of ``@flaky``. Don't rerun certain types of failures ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Depending on your tests, some failures are obviously not due to flakiness. Instead of rerunning after those failures, you can specify a filter function that can tell flaky to fail the test right away. .. code-block:: python def is_not_crash(err, *args): return not issubclass(err[0], ProductCrashedError) @flaky def test_something(): raise ProductCrashedError @flaky(rerun_filter=is_not_crash) def test_something_else(): raise ProductCrashedError Flaky will run ``test_something`` twice, but will only run ``test_something_else`` once. It can also be used to incur a delay between test retries: .. code-block:: python import time def delay_rerun(*args): time.sleep(1) return True @flaky(rerun_filter=delay_rerun) def test_something_else(): ... Activating the plugin ~~~~~~~~~~~~~~~~~~~~~ Like any nose plugin, flaky can be activated via the command line: .. code-block:: console nosetests --with-flaky With pytest, flaky will automatically run. It can, however be disabled via the command line: .. code-block:: console pytest -p no:flaky Command line arguments ~~~~~~~~~~~~~~~~~~~~~~ No Flaky Report +++++++++++++++ Pass ``--no-flaky-report`` to suppress the report at the end of the run detailing flaky test results. Shorter Flaky Report ++++++++++++++++++++ Pass ``--no-success-flaky-report`` to suppress information about successful flaky tests. Force Flaky +++++++++++ Pass ``--force-flaky`` to treat all tests as flaky. Pass ``--max-runs=MAX_RUNS`` and/or ``--min-passes=MIN_PASSES`` to control the behavior of flaky if ``--force-flaky`` is specified. Flaky decorators on individual tests will override these defaults. *Additional usage examples are in the code - see test/test_nose/test_nose_example.py and test/test_pytest/test_pytest_example.py* Installation ------------ To install, simply: .. code-block:: console pip install flaky Compatibility ------------- Flaky is tested with the following test runners and options: - Nosetests. Doctests cannot be marked flaky. - Py.test. Works with ``pytest-xdist`` but not with the ``--boxed`` option. Doctests cannot be marked flaky. Contributing ------------ See `CONTRIBUTING.rst `_. Setup ~~~~~ Create a virtual environment and install packages - .. code-block:: console mkvirtualenv flaky pip install -r requirements-dev.txt Testing ~~~~~~~ Run all tests using - .. code-block:: console tox The tox tests include code style checks via pycodestyle and pylint. Copyright and License --------------------- :: Copyright 2015 Box, Inc. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. flaky-3.7.0/flaky.iml000066400000000000000000000015661370143502300144460ustar00rootroot00000000000000 flaky-3.7.0/flaky/000077500000000000000000000000001370143502300137335ustar00rootroot00000000000000flaky-3.7.0/flaky/__init__.py000066400000000000000000000001341370143502300160420ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals from .flaky_decorator import flaky flaky-3.7.0/flaky/_flaky_plugin.py000066400000000000000000000542451370143502300171420ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals from io import StringIO from traceback import format_exception from flaky import defaults from flaky.names import FlakyNames from flaky.utils import ensure_unicode_string class _FlakyPlugin(object): _retry_failure_message = ' failed ({0} runs remaining out of {1}).' _failure_message = ' failed; it passed {0} out of the required {1} times.' _not_rerun_message = ' failed and was not selected for rerun.' def __init__(self): super(_FlakyPlugin, self).__init__() self._stream = StringIO() self._flaky_success_report = True self._had_flaky_tests = False @property def stream(self): """ Returns the stream used for building the flaky report. Anything written to this stream before the end of the test run will be written to the flaky report. :return: The stream used for building the flaky report. :rtype: :class:`StringIO` """ return self._stream def _log_test_failure(self, test_callable_name, err, message): """ Add messaging about a test failure to the stream, which will be printed by the plugin's report method. """ formatted_exception_info = ''.join(format_exception(*err)).replace('\n', '\n\t').rstrip() self._stream.writelines([ ensure_unicode_string(test_callable_name), ensure_unicode_string(message), ensure_unicode_string(formatted_exception_info), '\n', ]) def _report_final_failure(self, err, flaky, name): """ Report that the test has failed too many times to pass at least min_passes times. By default, this means that the test has failed twice. :param err: Information about the test failure (from sys.exc_info()) :type err: `tuple` of `class`, :class:`Exception`, `traceback` :param flaky: Dictionary of flaky attributes :type flaky: `dict` of `unicode` to varies :param name: The test name :type name: `unicode` """ min_passes = flaky[FlakyNames.MIN_PASSES] current_passes = flaky[FlakyNames.CURRENT_PASSES] message = self._failure_message.format( current_passes, min_passes, ) self._log_test_failure(name, err, message) def _log_intermediate_failure(self, err, flaky, name): """ Report that the test has failed, but still has reruns left. Then rerun the test. :param err: Information about the test failure (from sys.exc_info()) :type err: `tuple` of `class`, :class:`Exception`, `traceback` :param flaky: Dictionary of flaky attributes :type flaky: `dict` of `unicode` to varies :param name: The test name :type name: `unicode` """ max_runs = flaky[FlakyNames.MAX_RUNS] runs_left = max_runs - flaky[FlakyNames.CURRENT_RUNS] message = self._retry_failure_message.format( runs_left, max_runs, ) self._log_test_failure(name, err, message) def _should_handle_test_error_or_failure(self, test): """ Whether or not flaky should handle a test error or failure. Only handle tests marked @flaky. Count remaining retries and compare with number of required successes that have not yet been achieved. This method may be called multiple times for the same test run, so it has no side effects. :param test: The test that has raised an error :type test: :class:`nose.case.Test` or :class:`Function` :return: True, if the test needs to be rerun; False, otherwise. :rtype: `bool` """ if not self._has_flaky_attributes(test): return False flaky_attributes = self._get_flaky_attributes(test) flaky_attributes[FlakyNames.CURRENT_RUNS] += 1 has_failed = self._has_flaky_test_failed(flaky_attributes) return not has_failed def _will_handle_test_error_or_failure(self, test, name, err): """ Whether or not flaky will handle a test error or failure. Returns True if the plugin should handle the test result, and the `rerun_filter` returns True. :param test: The test that has raised an error :type test: :class:`nose.case.Test` or :class:`Function` :param name: The name of the test that has raised an error :type name: `unicode` :param err: Information about the test failure (from sys.exc_info()) :type err: `tuple` of `type`, :class:`Exception`, `traceback` :return: True, if the test will be rerun by flaky; False, otherwise. :rtype: `bool` """ return self._should_handle_test_error_or_failure(test) and self._should_rerun_test(test, name, err) def _handle_test_error_or_failure(self, test, err): """ Handle a flaky test error or failure. Returning True from this method keeps the test runner from reporting the test as a failure; this way we can retry and only report as a failure if we are out of retries. This method may only be called once per test run; it changes persisted flaky attributes. :param test: The test that has raised an error :type test: :class:`nose.case.Test` or :class:`Function` :param err: Information about the test failure (from sys.exc_info()) :type err: `tuple` of `type`, :class:`Exception`, `traceback` :return: True, if the test will be rerun; False, if the test runner should handle it. :rtype: `bool` """ try: name = self._get_test_callable_name(test) except AttributeError: return False if self._has_flaky_attributes(test): self._had_flaky_tests = True self._add_flaky_test_failure(test, err) should_handle = self._should_handle_test_error_or_failure(test) self._increment_flaky_attribute(test, FlakyNames.CURRENT_RUNS) if should_handle: flaky_attributes = self._get_flaky_attributes(test) if self._should_rerun_test(test, name, err): self._log_intermediate_failure(err, flaky_attributes, name) self._mark_test_for_rerun(test) return True self._log_test_failure(name, err, self._not_rerun_message) return False flaky_attributes = self._get_flaky_attributes(test) self._report_final_failure(err, flaky_attributes, name) return False def _should_rerun_test(self, test, name, err): """ Whether or not a test should be rerun. This is a pass-through to the test's rerun filter. A flaky test will only be rerun if it hasn't failed too many times to succeed at least min_passes times, and if this method returns True. :param test: The test that has raised an error :type test: :class:`nose.case.Test` or :class:`Function` :param name: The test name :type name: `unicode` :param err: Information about the test failure (from sys.exc_info()) :type err: `tuple` of `class`, :class:`Exception`, `traceback` :return: Whether flaky should rerun this test. :rtype: `bool` """ rerun_filter = self._get_flaky_attribute(test, FlakyNames.RERUN_FILTER) return rerun_filter(err, name, test, self) def _mark_test_for_rerun(self, test): """ Mark a flaky test for rerun. :param test: The test that has raised an error or succeeded :type test: :class:`nose.case.Test` or :class:`Function` """ raise NotImplementedError # pragma: no cover def _should_handle_test_success(self, test): if not self._has_flaky_attributes(test): return False flaky = self._get_flaky_attributes(test) flaky[FlakyNames.CURRENT_PASSES] += 1 flaky[FlakyNames.CURRENT_RUNS] += 1 return not self._has_flaky_test_succeeded(flaky) def _handle_test_success(self, test): """ Handle a flaky test success. Count remaining retries and compare with number of required successes that have not yet been achieved; retry if necessary. Returning True from this method keeps the test runner from reporting the test as a success; this way we can retry and only report as a success if the test has passed the required number of times. :param test: The test that has raised an error :type test: :class:`nose.case.Test` or :class:`Function` :return: True, if the test will be rerun; False, if the test runner should handle it. :rtype: `bool` """ try: name = self._get_test_callable_name(test) except AttributeError: return False need_reruns = self._should_handle_test_success(test) if self._has_flaky_attributes(test): self._had_flaky_tests = True flaky = self._get_flaky_attributes(test) min_passes = flaky[FlakyNames.MIN_PASSES] passes = flaky[FlakyNames.CURRENT_PASSES] + 1 self._set_flaky_attribute(test, FlakyNames.CURRENT_PASSES, passes) self._increment_flaky_attribute(test, FlakyNames.CURRENT_RUNS) if self._flaky_success_report: self._stream.writelines([ ensure_unicode_string(name), ' passed {} out of the required {} times. '.format( passes, min_passes, ), ]) if need_reruns: self._stream.write( 'Running test again until it passes {} times.\n'.format( min_passes, ) ) else: self._stream.write('Success!\n') if need_reruns: self._mark_test_for_rerun(test) return need_reruns @staticmethod def add_report_option(add_option): """ Add an option to the test runner to suppress the flaky report. :param add_option: A function that can add an option to the test runner. Its argspec should equal that of argparse.add_option. :type add_option: `callable` """ add_option( '--no-flaky-report', action='store_false', dest='flaky_report', default=True, help="Suppress the report at the end of the " "run detailing flaky test results.", ) add_option( '--no-success-flaky-report', action='store_false', dest='flaky_success_report', default=True, help="Suppress reporting flaky test successes" "in the report at the end of the " "run detailing flaky test results.", ) @staticmethod def add_force_flaky_options(add_option): """ Add options to the test runner that force all tests to be flaky. :param add_option: A function that can add an option to the test runner. Its argspec should equal that of argparse.add_option. :type add_option: `callable` """ add_option( '--force-flaky', action="store_true", dest="force_flaky", default=False, help="If this option is specified, we will treat all tests as " "flaky." ) add_option( '--max-runs', action="store", dest="max_runs", type=int, default=2, help="If --force-flaky is specified, we will run each test at " "most this many times (unless the test has its own flaky " "decorator)." ) add_option( '--min-passes', action="store", dest="min_passes", type=int, default=1, help="If --force-flaky is specified, we will run each test at " "least this many times (unless the test has its own flaky " "decorator)." ) def _add_flaky_report(self, stream): """ Baseclass override. Write details about flaky tests to the test report. :param stream: The test stream to which the report can be written. :type stream: `file` """ value = self._stream.getvalue() # Do not print report if there were no tests marked 'flaky' at all. if not self._had_flaky_tests and not value: return # If everything succeeded and --no-success-flaky-report is specified # don't print anything. if not self._flaky_success_report and not value: return stream.write('===Flaky Test Report===\n\n') # Python 2 will write to the stderr stream as a byte string, whereas # Python 3 will write to the stream as text. Only encode into a byte # string if the write tries to encode it first and raises a # UnicodeEncodeError. try: stream.write(value) except UnicodeEncodeError: stream.write(value.encode('utf-8', 'replace')) stream.write('\n===End Flaky Test Report===\n') @classmethod def _copy_flaky_attributes(cls, test, test_class): """ Copy flaky attributes from the test callable or class to the test. :param test: The test that is being prepared to run :type test: :class:`nose.case.Test` """ test_callable = cls._get_test_callable(test) if test_callable is None: return for attr, value in cls._get_flaky_attributes(test_class).items(): already_set = hasattr(test, attr) if already_set: continue attr_on_callable = getattr(test_callable, attr, None) if attr_on_callable is not None: cls._set_flaky_attribute(test, attr, attr_on_callable) elif value is not None: cls._set_flaky_attribute(test, attr, value) @staticmethod def _get_flaky_attribute(test_item, flaky_attribute): """ Gets an attribute describing the flaky test. :param test_item: The test method from which to get the attribute :type test_item: `callable` or :class:`nose.case.Test` or :class:`Function` :param flaky_attribute: The name of the attribute to get :type flaky_attribute: `unicode` :return: The test callable's attribute, or None if the test callable doesn't have that attribute. :rtype: varies """ return getattr(test_item, flaky_attribute, None) @staticmethod def _set_flaky_attribute(test_item, flaky_attribute, value): """ Sets an attribute on a flaky test. Uses magic __dict__ since setattr doesn't work for bound methods. :param test_item: The test callable on which to set the attribute :type test_item: `callable` or :class:`nose.case.Test` or :class:`Function` :param flaky_attribute: The name of the attribute to set :type flaky_attribute: `unicode` :param value: The value to set the test callable's attribute to. :type value: varies """ test_item.__dict__[flaky_attribute] = value @classmethod def _increment_flaky_attribute(cls, test_item, flaky_attribute): """ Increments the value of an attribute on a flaky test. :param test_item: The test callable on which to set the attribute :type test_item: `callable` or :class:`nose.case.Test` or :class:`Function` :param flaky_attribute: The name of the attribute to set :type flaky_attribute: `unicode` """ cls._set_flaky_attribute(test_item, flaky_attribute, cls._get_flaky_attribute(test_item, flaky_attribute) + 1) @classmethod def _has_flaky_attributes(cls, test): """ Returns True if the test callable in question is marked as flaky. :param test: The test that is being prepared to run :type test: :class:`nose.case.Test` or :class:`Function` :return: :rtype: `bool` """ current_runs = cls._get_flaky_attribute(test, FlakyNames.CURRENT_RUNS) return current_runs is not None @classmethod def _get_flaky_attributes(cls, test_item): """ Get all the flaky related attributes from the test. :param test_item: The test callable from which to get the flaky related attributes. :type test_item: `callable` or :class:`nose.case.Test` or :class:`Function` :return: :rtype: `dict` of `unicode` to varies """ return { attr: cls._get_flaky_attribute( test_item, attr, ) for attr in FlakyNames() } @classmethod def _add_flaky_test_failure(cls, test, err): """ Store test error information on the test callable. :param test: The flaky test on which to update the flaky attributes. :type test: :class:`nose.case.Test` or :class:`Function` :param err: Information about the test failure (from sys.exc_info()) :type err: `tuple` of `class`, :class:`Exception`, `traceback` """ errs = getattr(test, FlakyNames.CURRENT_ERRORS, None) or [] cls._set_flaky_attribute(test, FlakyNames.CURRENT_ERRORS, errs) errs.append(err) @classmethod def _has_flaky_test_failed(cls, flaky): """ Whether or not the flaky test has failed :param flaky: Dictionary of flaky attributes :type flaky: `dict` of `unicode` to varies :return: True if the flaky test should be marked as failure; False if it should be rerun. :rtype: `bool` """ max_runs, current_runs, min_passes, current_passes = ( flaky[FlakyNames.MAX_RUNS], flaky[FlakyNames.CURRENT_RUNS], flaky[FlakyNames.MIN_PASSES], flaky[FlakyNames.CURRENT_PASSES], ) runs_left = max_runs - current_runs passes_needed = min_passes - current_passes no_retry = passes_needed > runs_left return no_retry and not cls._has_flaky_test_succeeded(flaky) @staticmethod def _has_flaky_test_succeeded(flaky): """ Whether or not the flaky test has succeeded :param flaky: Dictionary of flaky attributes :type flaky: `dict` of `unicode` to varies :return: True if the flaky test should be marked as success; False if it should be rerun. :rtype: `bool` """ return flaky[FlakyNames.CURRENT_PASSES] >= flaky[FlakyNames.MIN_PASSES] @classmethod def _get_test_callable(cls, test): """ Get the test callable, from the test. :param test: The test that has raised an error or succeeded :type test: :class:`nose.case.Test` or :class:`pytest.Item` :return: The test declaration, callable and name that is being run :rtype: `callable` """ raise NotImplementedError # pragma: no cover @staticmethod def _get_test_callable_name(test): """ Get the name of the test callable from the test. :param test: The test that has raised an error or succeeded :type test: :class:`nose.case.Test` or :class:`pytest.Item` :return: The name of the test callable that is being run by the test :rtype: `unicode` """ raise NotImplementedError # pragma: no cover @classmethod def _make_test_flaky(cls, test, max_runs=None, min_passes=None, rerun_filter=None): """ Make a given test flaky. :param test: The test in question. :type test: :class:`nose.case.Test` or :class:`Function` :param max_runs: The value of the FlakyNames.MAX_RUNS attribute to use. :type max_runs: `int` :param min_passes: The value of the FlakyNames.MIN_PASSES attribute to use. :type min_passes: `int` :param rerun_filter: Filter function to decide whether a test should be rerun if it fails. Function signature is as follows: (err, name, test, plugin) -> should_rerun - err (`tuple` of `class`, :class:`Exception`, `traceback`): Information about the test failure (from sys.exc_info()) - name (`unicode`): The test name - test (:class:`nose.case.Test` or :class:`Function`): The test that has raised an error - plugin (:class:`FlakyNosePlugin` or :class:`FlakyPytestPlugin`): The flaky plugin. Has a :prop:`stream` that can be written to in order to add to the Flaky Report. :type rerun_filter: `callable` """ attrib_dict = defaults.default_flaky_attributes(max_runs, min_passes, rerun_filter) for attr, value in attrib_dict.items(): cls._set_flaky_attribute(test, attr, value) flaky-3.7.0/flaky/defaults.py000066400000000000000000000034301370143502300161140ustar00rootroot00000000000000# coding: utf-8 from flaky.names import FlakyNames def _true(*args): """ Default rerun filter function that always returns True. """ # pylint:disable=unused-argument return True class FilterWrapper(object): """ Filter function wrapper. Expected to be called as though it's a filter function. Since @flaky adds attributes to a decorated class, Python wants to turn a bare function into an unbound method, which is not what we want. """ def __init__(self, rerun_filter): self._filter = rerun_filter def __call__(self, *args, **kwargs): return self._filter(*args, **kwargs) def default_flaky_attributes(max_runs=None, min_passes=None, rerun_filter=None): """ Returns the default flaky attributes to set on a flaky test. :param max_runs: The value of the FlakyNames.MAX_RUNS attribute to use. :type max_runs: `int` :param min_passes: The value of the FlakyNames.MIN_PASSES attribute to use. :type min_passes: `int` :param rerun_filter: Filter function to decide whether a test should be rerun if it fails. :type rerun_filter: `callable` :return: Default flaky attributes to set on a flaky test. :rtype: `dict` """ if max_runs is None: max_runs = 2 if min_passes is None: min_passes = 1 if min_passes <= 0: raise ValueError('min_passes must be positive') if max_runs < min_passes: raise ValueError('min_passes cannot be greater than max_runs!') return { FlakyNames.MAX_RUNS: max_runs, FlakyNames.MIN_PASSES: min_passes, FlakyNames.CURRENT_RUNS: 0, FlakyNames.CURRENT_PASSES: 0, FlakyNames.RERUN_FILTER: FilterWrapper(rerun_filter or _true), } flaky-3.7.0/flaky/flaky_decorator.py000066400000000000000000000041251370143502300174570ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals from flaky.defaults import default_flaky_attributes def flaky(max_runs=None, min_passes=None, rerun_filter=None): """ Decorator used to mark a test as "flaky". When used in conjuction with the flaky nosetests plugin, will cause the decorated test to be retried until min_passes successes are achieved out of up to max_runs test runs. :param max_runs: The maximum number of times the decorated test will be run. :type max_runs: `int` :param min_passes: The minimum number of times the test must pass to be a success. :type min_passes: `int` :param rerun_filter: Filter function to decide whether a test should be rerun if it fails. Function signature is as follows: (err, name, test, plugin) -> should_rerun - err (`tuple` of `class`, :class:`Exception`, `traceback`): Information about the test failure (from sys.exc_info()) - name (`unicode`): The test name - test (:class:`nose.case.Test` or :class:`Function`): The test that has raised an error - plugin (:class:`FlakyNosePlugin` or :class:`FlakyPytestPlugin`): The flaky plugin. Has a :prop:`stream` that can be written to in order to add to the Flaky Report. :type rerun_filter: `callable` :return: A wrapper function that includes attributes describing the flaky test. :rtype: `callable` """ # In case @flaky is applied to a function or class without arguments # (and without parentheses), max_runs will refer to the wrapped object. # In this case, the default value can be used. wrapped = None if hasattr(max_runs, '__call__'): wrapped, max_runs = max_runs, None attrib = default_flaky_attributes(max_runs, min_passes, rerun_filter) def wrapper(wrapped_object): for name, value in attrib.items(): setattr(wrapped_object, name, value) return wrapped_object return wrapper(wrapped) if wrapped is not None else wrapper flaky-3.7.0/flaky/flaky_nose_plugin.py000066400000000000000000000226641370143502300200270ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals import logging from optparse import OptionGroup import os from nose.failure import Failure from nose.plugins import Plugin from nose.result import TextTestResult from flaky._flaky_plugin import _FlakyPlugin class FlakyPlugin(_FlakyPlugin, Plugin): """ Plugin for nosetests that allows retrying flaky tests. """ name = 'flaky' def __init__(self): super(FlakyPlugin, self).__init__() self._logger = logging.getLogger('nose.plugins.flaky') self._flaky_result = None self._nose_result = None self._flaky_report = True self._force_flaky = False self._max_runs = None self._min_passes = None self._test_status = {} self._tests_that_reran = set() self._tests_that_have_been_reported = set() def options(self, parser, env=os.environ): """ Base class override. Add options to the nose argument parser. """ # pylint:disable=dangerous-default-value super(FlakyPlugin, self).options(parser, env=env) self.add_report_option(parser.add_option) group = OptionGroup( parser, "Force flaky", "Force all tests to be flaky.") self.add_force_flaky_options(group.add_option) parser.add_option_group(group) def _get_stream(self, multiprocess=False): """ Get the stream used to store the flaky report. If this nose run is going to use the multiprocess plugin, then use a multiprocess-list backed StringIO proxy; otherwise, use the default stream. :param multiprocess: Whether or not this test run is configured for multiprocessing. :type multiprocess: `bool` :return: The stream to use for storing the flaky report. :rtype: :class:`StringIO` or :class:`MultiprocessingStringIO` """ if multiprocess: from flaky.multiprocess_string_io import MultiprocessingStringIO return MultiprocessingStringIO() return self._stream def configure(self, options, conf): """Base class override.""" super(FlakyPlugin, self).configure(options, conf) if not self.enabled: return is_multiprocess = int(getattr(options, 'multiprocess_workers', 0)) > 0 self._stream = self._get_stream(is_multiprocess) self._flaky_result = TextTestResult(self._stream, [], 0) self._flaky_report = options.flaky_report self._flaky_success_report = options.flaky_success_report self._force_flaky = options.force_flaky self._max_runs = options.max_runs self._min_passes = options.min_passes def startTest(self, test): """ Base class override. Called before a test is run. Add the test to the test status tracker, so it can potentially be rerun during afterTest. :param test: The test that is going to be run. :type test: :class:`nose.case.Test` """ # pylint:disable=invalid-name self._test_status[test] = None def afterTest(self, test): """ Base class override. Called after a test is run. If the test was marked for rerun, rerun the test. :param test: The test that has been run. :type test: :class:`nose.case.Test` """ # pylint:disable=invalid-name if self._test_status[test]: self._tests_that_reran.add(id(test)) test.run(self._flaky_result) self._test_status.pop(test, None) def _mark_test_for_rerun(self, test): """ Base class override. Rerun a flaky test. In this case, don't actually rerun the test, but mark it for rerun during afterTest. :param test: The test that is going to be rerun. :type test: :class:`nose.case.Test` """ self._test_status[test] = True def handleError(self, test, err): """ Baseclass override. Called when a test raises an exception. If the test isn't going to be rerun again, then report the error to the nose test result. :param test: The test that has raised an error :type test: :class:`nose.case.Test` :param err: Information about the test failure (from sys.exc_info()) :type err: `tuple` of `class`, :class:`Exception`, `traceback` :return: True, if the test will be rerun; False, if nose should handle it. :rtype: `bool` """ # pylint:disable=invalid-name want_error = self._handle_test_error_or_failure(test, err) if not want_error and id(test) in self._tests_that_reran: self._nose_result.addError(test, err) return want_error or None def handleFailure(self, test, err): """ Baseclass override. Called when a test fails. If the test isn't going to be rerun again, then report the failure to the nose test result. :param test: The test that has raised an error :type test: :class:`nose.case.Test` :param err: Information about the test failure (from sys.exc_info()) :type err: `tuple` of `class`, :class:`Exception`, `traceback` :return: True, if the test will be rerun; False, if nose should handle it. :rtype: `bool` """ # pylint:disable=invalid-name want_failure = self._handle_test_error_or_failure(test, err) if not want_failure and id(test) in self._tests_that_reran: self._nose_result.addFailure(test, err) return want_failure or None def addSuccess(self, test): """ Baseclass override. Called when a test succeeds. Count remaining retries and compare with number of required successes that have not yet been achieved; retry if necessary. Returning True from this method keeps the test runner from reporting the test as a success; this way we can retry and only report as a success if we have achieved the required number of successes. :param test: The test that has succeeded :type test: :class:`nose.case.Test` :return: True, if the test will be rerun; False, if nose should handle it. :rtype: `bool` """ # pylint:disable=invalid-name will_handle = self._handle_test_success(test) test_id = id(test) # If this isn't a rerun, the builtin reporter is going to report it as a success if will_handle and test_id not in self._tests_that_reran: self._tests_that_have_been_reported.add(test_id) # If this test hasn't already been reported as successful, then do it now if not will_handle and test_id in self._tests_that_reran and test_id not in self._tests_that_have_been_reported: self._nose_result.addSuccess(test) return will_handle or None def report(self, stream): """ Baseclass override. Write details about flaky tests to the test report. :param stream: The test stream to which the report can be written. :type stream: `file` """ if self._flaky_report: self._add_flaky_report(stream) def prepareTestResult(self, result): """ Baseclass override. Called right before the first test is run. Stores the test result so that errors and failures can be reported to the nose test result. :param result: The nose test result that needs to be informed of test failures. :type result: :class:`nose.result.TextTestResult` """ # pylint:disable=invalid-name self._nose_result = result def prepareTestCase(self, test): """ Baseclass override. Called right before a test case is run. If the test class is marked flaky and the test callable is not, copy the flaky attributes from the test class to the test callable. :param test: The test that is being prepared to run :type test: :class:`nose.case.Test` """ # pylint:disable=invalid-name if not isinstance(test.test, Failure): test_class = test.test self._copy_flaky_attributes(test, test_class) if self._force_flaky and not self._has_flaky_attributes(test): self._make_test_flaky( test, self._max_runs, self._min_passes) @staticmethod def _get_test_callable_name(test): """ Base class override. """ _, _, class_and_callable_name = test.address() first_dot_index = class_and_callable_name.find('.') test_callable_name = class_and_callable_name[first_dot_index + 1:] return test_callable_name @classmethod def _get_test_callable(cls, test): """ Base class override. :param test: The test that has raised an error or succeeded :type test: :class:`nose.case.Test` """ callable_name = cls._get_test_callable_name(test) test_callable = getattr( test.test, callable_name, getattr(test.test, 'test', test.test), ) return test_callable flaky-3.7.0/flaky/flaky_pytest_plugin.py000066400000000000000000000345701370143502300204120ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals from _pytest.runner import call_runtest_hook # pylint:disable=import-error from flaky._flaky_plugin import _FlakyPlugin from flaky.utils import ensure_unicode_string def _get_worker_output(item): worker_output = None if hasattr(item, 'workeroutput'): worker_output = item.workeroutput elif hasattr(item, 'slaveoutput'): worker_output = item.slaveoutput return worker_output class FlakyXdist(object): def __init__(self, plugin): super(FlakyXdist, self).__init__() self._plugin = plugin def pytest_testnodedown(self, node, error): """ Pytest hook for responding to a test node shutting down. Copy worker flaky report output so it's available on the master flaky report. """ # pylint: disable=unused-argument, no-self-use worker_output = _get_worker_output(node) if worker_output is not None and 'flaky_report' in worker_output: self._plugin.stream.write(worker_output['flaky_report']) class FlakyPlugin(_FlakyPlugin): """ Plugin for pytest that allows retrying flaky tests. """ runner = None flaky_report = True force_flaky = False max_runs = None min_passes = None config = None _call_infos = {} _PYTEST_WHEN_SETUP = 'setup' _PYTEST_WHEN_CALL = 'call' _PYTEST_WHENS = (_PYTEST_WHEN_SETUP, _PYTEST_WHEN_CALL) _PYTEST_OUTCOME_PASSED = 'passed' _PYTEST_OUTCOME_FAILED = 'failed' _PYTEST_EMPTY_STATUS = ('', '', '') def pytest_runtest_protocol(self, item, nextitem): """ Pytest hook to override how tests are run. Runs a test collected by pytest. - First, monkey patches the builtin runner module to call back to FlakyPlugin.call_runtest_hook rather than its own. - Then defers to the builtin runner module to run the test, and repeats the process if the test needs to be rerun. - Reports test results to the flaky report. :param item: pytest wrapper for the test function to be run :type item: :class:`Function` :param nextitem: pytest wrapper for the next test function to be run :type nextitem: :class:`Function` :return: True if no further hook implementations should be invoked. :rtype: `bool` """ test_instance = self._get_test_instance(item) self._copy_flaky_attributes(item, test_instance) if self.force_flaky and not self._has_flaky_attributes(item): self._make_test_flaky( item, self.max_runs, self.min_passes, ) original_call_and_report = self.runner.call_and_report self._call_infos[item] = {} should_rerun = True try: self.runner.call_and_report = self.call_and_report while should_rerun: self.runner.pytest_runtest_protocol(item, nextitem) call_info = None excinfo = None for when in self._PYTEST_WHENS: call_info = self._call_infos.get(item, {}).get(when, None) excinfo = getattr(call_info, 'excinfo', None) if excinfo is not None: break if call_info is None: return False passed = excinfo is None if passed: should_rerun = self.add_success(item) else: skipped = excinfo.typename == 'Skipped' should_rerun = not skipped and self.add_failure(item, excinfo) if not should_rerun: item.excinfo = excinfo finally: self.runner.call_and_report = original_call_and_report del self._call_infos[item] return True def call_and_report(self, item, when, log=True, **kwds): """ Monkey patched from the runner plugin. Responsible for running the test and reporting the outcome. Had to be patched to avoid reporting about test retries. :param item: pytest wrapper for the test function to be run :type item: :class:`Function` :param when: The stage of the test being run. Usually one of 'setup', 'call', 'teardown'. :type when: `str` :param log: Whether or not to report the test outcome. Ignored for test retries; flaky doesn't report test retries, only the final outcome. :type log: `bool` """ call = call_runtest_hook(item, when, **kwds) self._call_infos[item][when] = call hook = item.ihook report = hook.pytest_runtest_makereport(item=item, call=call) # Start flaky modifications # only retry on call, not setup or teardown if report.when in self._PYTEST_WHENS: if report.outcome == self._PYTEST_OUTCOME_PASSED: if self._should_handle_test_success(item): log = False elif report.outcome == self._PYTEST_OUTCOME_FAILED: err, name = self._get_test_name_and_err(item, when) if self._will_handle_test_error_or_failure(item, name, err): log = False # End flaky modifications if log: hook.pytest_runtest_logreport(report=report) if self.runner.check_interactive_exception(call, report): hook.pytest_exception_interact(node=item, call=call, report=report) return report def _get_test_name_and_err(self, item, when): """ Get the test name and error tuple from a test item. :param item: pytest wrapper for the test function to be run :type item: :class:`Function` :return: The test name and error tuple. :rtype: ((`type`, :class:`Exception`, :class:`Traceback`) or (None, None, None), `unicode`) """ name = self._get_test_callable_name(item) call_info = self._call_infos.get(item, {}).get(when, None) if call_info is not None and call_info.excinfo: err = (call_info.excinfo.type, call_info.excinfo.value, call_info.excinfo.tb) else: err = (None, None, None) return err, name def pytest_terminal_summary(self, terminalreporter): """ Pytest hook to write details about flaky tests to the test report. Write details about flaky tests to the test report. :param terminalreporter: Terminal reporter object. Supports stream writing operations. :type terminalreporter: :class: `TerminalReporter` """ if self.flaky_report: self._add_flaky_report(terminalreporter) def pytest_addoption(self, parser): """ Pytest hook to add an option to the argument parser. :param parser: Parser for command line arguments and ini-file values. :type parser: :class:`Parser` """ self.add_report_option(parser.addoption) group = parser.getgroup( "Force flaky", "Force all tests to be flaky.") self.add_force_flaky_options(group.addoption) def pytest_configure(self, config): """ Pytest hook to get information about how the test run has been configured. :param config: The pytest configuration object for this test run. :type config: :class:`Configuration` """ self.flaky_report = config.option.flaky_report self.flaky_success_report = config.option.flaky_success_report self.force_flaky = config.option.force_flaky self.max_runs = config.option.max_runs self.min_passes = config.option.min_passes self.runner = config.pluginmanager.getplugin("runner") if config.pluginmanager.hasplugin('xdist'): config.pluginmanager.register(FlakyXdist(self), name='flaky.xdist') self.config = config worker_output = _get_worker_output(config) if worker_output is not None: worker_output['flaky_report'] = '' config.addinivalue_line('markers', 'flaky: marks tests to be automatically retried upon failure') def pytest_runtest_setup(self, item): """ Pytest hook to modify the test before it's run. :param item: The test item. """ if not self._has_flaky_attributes(item): if hasattr(item, 'iter_markers'): for marker in item.iter_markers(name='flaky'): self._make_test_flaky(item, *marker.args, **marker.kwargs) break elif hasattr(item, 'get_marker'): marker = item.get_marker('flaky') if marker: self._make_test_flaky(item, *marker.args, **marker.kwargs) def pytest_sessionfinish(self): """ Pytest hook to take a final action after the session is complete. Copy flaky report contents so that the master process can read it. """ worker_output = _get_worker_output(self.config) if worker_output is not None: worker_output['flaky_report'] += self.stream.getvalue() @property def stream(self): return self._stream @property def flaky_success_report(self): """ Property for setting whether or not the plugin will print results about flaky tests that were successful. :return: Whether or not flaky will report on test successes. :rtype: `bool` """ return self._flaky_success_report @flaky_success_report.setter def flaky_success_report(self, value): """ Property for setting whether or not the plugin will print results about flaky tests that were successful. :param value: Whether or not flaky will report on test successes. :type value: `bool` """ self._flaky_success_report = value @staticmethod def _get_test_instance(item): """ Get the object containing the test. This might be `test.instance` or `test.parent.obj`. """ test_instance = getattr(item, 'instance', None) if test_instance is None: if hasattr(item, 'parent') and hasattr(item.parent, 'obj'): test_instance = item.parent.obj return test_instance def add_success(self, item): """ Called when a test succeeds. Count remaining retries and compare with number of required successes that have not yet been achieved; retry if necessary. :param item: pytest wrapper for the test function that has succeeded :type item: :class:`Function` """ return self._handle_test_success(item) def add_failure(self, item, err): """ Called when a test fails. Count remaining retries and compare with number of required successes that have not yet been achieved; retry if necessary. :param item: pytest wrapper for the test function that has succeeded :type item: :class:`Function` :param err: Information about the test failure :type err: :class: `ExceptionInfo` """ if err is not None: error = (err.type, err.value, err.traceback) else: error = (None, None, None) return self._handle_test_error_or_failure(item, error) @staticmethod def _get_test_callable_name(test): """ Base class override. """ return test.name @classmethod def _get_test_callable(cls, test): """ Base class override. :param test: The test that has raised an error or succeeded :type test: :class:`Function` :return: The test declaration, callable and name that is being run :rtype: `tuple` of `object`, `callable`, `unicode` """ callable_name = cls._get_test_callable_name(test) if callable_name.endswith(']') and '[' in callable_name: unparametrized_name = callable_name[:callable_name.index('[')] else: unparametrized_name = callable_name test_instance = cls._get_test_instance(test) if hasattr(test_instance, callable_name): # Test is a method of a class def_and_callable = getattr(test_instance, callable_name) return def_and_callable if hasattr(test_instance, unparametrized_name): # Test is a parametrized method of a class def_and_callable = getattr(test_instance, unparametrized_name) return def_and_callable if hasattr(test, 'module'): if hasattr(test.module, callable_name): # Test is a function in a module def_and_callable = getattr(test.module, callable_name) return def_and_callable if hasattr(test.module, unparametrized_name): # Test is a parametrized function in a module def_and_callable = getattr(test.module, unparametrized_name) return def_and_callable elif hasattr(test, 'runtest'): # Test is a doctest or other non-Function Item return test.runtest return None def _mark_test_for_rerun(self, test): """Base class override. Rerun a flaky test.""" def _log_test_failure(self, test_callable_name, err, message): """ Add messaging about a test failure to the stream, which will be printed by the plugin's report method. """ self._stream.writelines([ ensure_unicode_string(test_callable_name), message, '\n\t', ensure_unicode_string(err[0]), '\n\t', ensure_unicode_string(err[1]), '\n\t', ensure_unicode_string(err[2]), '\n', ]) PLUGIN = FlakyPlugin() # pytest only processes hooks defined on the module # find all hooks defined on the plugin class and copy them to the module globals for _pytest_hook in dir(PLUGIN): if _pytest_hook.startswith('pytest_'): globals()[_pytest_hook] = getattr(PLUGIN, _pytest_hook) flaky-3.7.0/flaky/multiprocess_string_io.py000066400000000000000000000017311370143502300211150ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals import multiprocessing class MultiprocessingStringIO(object): """ Provide a StringIO-like interface to the multiprocessing ListProxy. The multiprocessing ListProxy needs to be instantiated before the flaky plugin is configured, so the list is created as a class variable. """ _manager = multiprocessing.Manager() proxy = _manager.list() # pylint:disable=no-member def getvalue(self): """ Shadow the StringIO.getvalue method. """ return ''.join(i for i in self.proxy) def writelines(self, content_list): """ Shadow the StringIO.writelines method. Ingests a list and translates that to a string """ for item in content_list: self.write(item) def write(self, content): """ Shadow the StringIO.write method. """ content.strip('\n') self.proxy.append(content) flaky-3.7.0/flaky/names.py000066400000000000000000000013221370143502300154060ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals class FlakyNames(object): """ Names of flaky attributes that will be added to flaky tests """ CURRENT_ERRORS = '_flaky_current_errors' CURRENT_RUNS = '_flaky_current_runs' CURRENT_PASSES = '_flaky_current_passes' MAX_RUNS = '_flaky_max_runs' MIN_PASSES = '_flaky_min_passes' RERUN_FILTER = '_flaky_rerun_filter' def items(self): return ( self.CURRENT_ERRORS, self.CURRENT_PASSES, self.CURRENT_RUNS, self.MAX_RUNS, self.MIN_PASSES, self.RERUN_FILTER, ) def __iter__(self): for attr in self.items(): yield attr flaky-3.7.0/flaky/utils.py000066400000000000000000000011361370143502300154460ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals # pylint:disable=invalid-name try: unicode_type = unicode except NameError: unicode_type = str def ensure_unicode_string(obj): """ Return a unicode string representation of the given obj. :param obj: The obj we want to represent in unicode :type obj: varies :rtype: `unicode` """ try: return unicode_type(obj) except UnicodeDecodeError: if hasattr(obj, 'decode'): return obj.decode('utf-8', 'replace') return str(obj).decode('utf-8', 'replace') flaky-3.7.0/requirements-dev.txt000066400000000000000000000001571370143502300166700ustar00rootroot00000000000000-rrequirements.txt coveralls genty mock nose ordereddict pycodestyle pylint pytest pytest-cov pytest-xdist tox flaky-3.7.0/requirements.txt000066400000000000000000000000001370143502300160770ustar00rootroot00000000000000flaky-3.7.0/setup.cfg000066400000000000000000000003741370143502300144520ustar00rootroot00000000000000[bdist_wheel] # This flag says that the code is written to work on both Python 2 and Python # 3. If at all possible, it is good practice to do this. If you cannot, you # will need to generate wheels for each Python version that you support. universal=1flaky-3.7.0/setup.py000066400000000000000000000052121370143502300143370ustar00rootroot00000000000000# -*- coding: utf-8 -*- from __future__ import unicode_literals from os.path import dirname, join import sys from setuptools.command.test import test as TestCommand from setuptools import setup, find_packages CLASSIFIERS = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Topic :: Software Development :: Testing', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', 'Operating System :: OS Independent', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', ] class Tox(TestCommand): user_options = [(b'tox-args=', b'a', 'Arguments to pass to tox')] def initialize_options(self): TestCommand.initialize_options(self) self.tox_args = None def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True def run_tests(self): import shlex import tox args = self.tox_args if args: args = shlex.split(self.tox_args) errno = tox.cmdline(args=args) sys.exit(errno) def main(): base_dir = dirname(__file__) setup( name='flaky', version='3.7.0', description='Plugin for nose or pytest that automatically reruns flaky tests.', long_description=open(join(base_dir, 'README.rst')).read(), author='Box', author_email='oss@box.com', url='https://github.com/box/flaky', license='Apache Software License, Version 2.0, http://www.apache.org/licenses/LICENSE-2.0', packages=find_packages(exclude=['test*']), test_suite='test', tests_require=['tox'], cmdclass={'test': Tox}, zip_safe=False, entry_points={ 'nose.plugins.0.10': [ 'flaky = flaky.flaky_nose_plugin:FlakyPlugin' ], 'pytest11': [ 'flaky = flaky.flaky_pytest_plugin' ] }, keywords='nose pytest plugin flaky tests rerun retry', python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*', classifiers=CLASSIFIERS, ) if __name__ == '__main__': main() flaky-3.7.0/test/000077500000000000000000000000001370143502300136045ustar00rootroot00000000000000flaky-3.7.0/test/__init__.py000066400000000000000000000000711370143502300157130ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals flaky-3.7.0/test/test.iml000066400000000000000000000006461370143502300152740ustar00rootroot00000000000000 flaky-3.7.0/test/test_flaky_decorator.py000066400000000000000000000026331370143502300203710ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals from unittest import TestCase from flaky.flaky_decorator import flaky from flaky.names import FlakyNames class TestFlakyDecorator(TestCase): def test_flaky_raises_for_non_positive_min_passes(self): def test_something(): pass self.assertRaises( ValueError, lambda: flaky(min_passes=0)(test_something), ) def test_flaky_raises_for_max_runs_less_than_min_passes(self): def test_something(): pass self.assertRaises( ValueError, lambda: flaky(max_runs=2, min_passes=3)(test_something), ) def test_flaky_adds_flaky_attributes_to_test_method(self): min_passes = 4 max_runs = 7 @flaky(max_runs, min_passes) def test_something(): pass flaky_attribute = { attr: getattr( test_something, attr, None ) for attr in FlakyNames() } self.assertIsNotNone(flaky_attribute) self.assertDictContainsSubset( { FlakyNames.MIN_PASSES: min_passes, FlakyNames.MAX_RUNS: max_runs, FlakyNames.CURRENT_PASSES: 0, FlakyNames.CURRENT_RUNS: 0, FlakyNames.CURRENT_ERRORS: None }, flaky_attribute ) flaky-3.7.0/test/test_flaky_plugin.py000066400000000000000000000044601370143502300177050ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals from io import StringIO from unittest import TestCase from flaky._flaky_plugin import _FlakyPlugin from flaky.names import FlakyNames from genty import genty, genty_dataset @genty class TestFlakyPlugin(TestCase): def setUp(self): super(TestFlakyPlugin, self).setUp() self._flaky_plugin = _FlakyPlugin() def test_flaky_plugin_handles_non_ascii_byte_string_in_exception(self): mock_method_name = 'my_method' mock_exception = 'ńőń ȁŝćȉȉ ŝƭȕƒƒ'.encode('utf-16') mock_message = 'information about retries' # pylint:disable=protected-access self._flaky_plugin._log_test_failure( mock_method_name, (ValueError.__name__, mock_exception, ''), mock_message, ) @genty_dataset( default_not_started=(2, 1, 0, 0, False), default_one_failure=(2, 1, 1, 0, False), default_one_success=(2, 1, 1, 1, False), default_two_failures=(2, 1, 2, 0, True), default_one_failure_one_success=(2, 1, 2, 1, False), three_two_not_started=(3, 2, 0, 0, False), three_two_one_failure=(3, 2, 1, 0, False), three_two_one_success=(3, 2, 1, 1, False), three_two_two_failures=(3, 2, 2, 0, True), three_two_one_failure_one_success=(3, 2, 2, 1, False), three_two_two_successes=(3, 2, 2, 2, False), ) def test_flaky_plugin_identifies_failure( self, max_runs, min_passes, current_runs, current_passes, expect_fail, ): flaky = { FlakyNames.CURRENT_PASSES: current_passes, FlakyNames.CURRENT_RUNS: current_runs, FlakyNames.MAX_RUNS: max_runs, FlakyNames.MIN_PASSES: min_passes, } # pylint:disable=protected-access self.assertEqual( self._flaky_plugin._has_flaky_test_failed(flaky), expect_fail, ) @genty_dataset('ascii stuff', 'ńőń ȁŝćȉȉ ŝƭȕƒƒ') def test_write_unicode_to_stream(self, message): stream = StringIO() stream.write('ascii stuff') # pylint:disable=protected-access self._flaky_plugin._stream.write(message) self._flaky_plugin._add_flaky_report(stream) flaky-3.7.0/test/test_multiprocess_string_io.py000066400000000000000000000031541370143502300220260ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals from io import StringIO from unittest import TestCase from genty import genty, genty_dataset @genty class TestMultiprocessStringIO(TestCase): _unicode_string = 'Plain Hello' _unicode_string_non_ascii = 'ńőń ȁŝćȉȉ ŝƭȕƒƒ' def setUp(self): super(TestMultiprocessStringIO, self).setUp() from flaky.multiprocess_string_io import MultiprocessingStringIO self._string_io = StringIO() self._mp_string_io = MultiprocessingStringIO() del self._mp_string_io.proxy[:] self._string_ios = (self._string_io, self._mp_string_io) @genty_dataset( no_writes=([], ''), one_write=([_unicode_string], _unicode_string), two_writes=( [_unicode_string, _unicode_string_non_ascii], '{}{}'.format(_unicode_string, _unicode_string_non_ascii), ) ) def test_write_then_read(self, writes, expected_value): for string_io in self._string_ios: for item in writes: string_io.write(item) self.assertEqual(string_io.getvalue(), expected_value) @genty_dataset( no_writes=([], ''), one_write=([_unicode_string], _unicode_string), two_writes=( [_unicode_string, _unicode_string_non_ascii], '{}{}'.format(_unicode_string, _unicode_string_non_ascii), ) ) def test_writelines_then_read(self, lines, expected_value): for string_io in self._string_ios: string_io.writelines(lines) self.assertEqual(string_io.getvalue(), expected_value) flaky-3.7.0/test/test_nose/000077500000000000000000000000001370143502300156075ustar00rootroot00000000000000flaky-3.7.0/test/test_nose/__init__.py000066400000000000000000000001121370143502300177120ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals, absolute_import flaky-3.7.0/test/test_nose/test_flaky_nose_plugin.py000066400000000000000000000376661370143502300227520ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals from unittest import TestCase from genty import genty, genty_dataset import mock from mock import MagicMock, Mock, patch from flaky import defaults, flaky_nose_plugin from flaky.flaky_decorator import flaky from flaky.names import FlakyNames @genty class TestFlakyNosePlugin(TestCase): def setUp(self): super(TestFlakyNosePlugin, self).setUp() self._mock_test_result = MagicMock() self._mock_stream = None self._flaky_plugin = flaky_nose_plugin.FlakyPlugin() self._mock_nose_result = Mock(flaky_nose_plugin.TextTestResult) self._flaky_plugin.prepareTestResult(self._mock_nose_result) self._mock_test = MagicMock(name='flaky_plugin_test') self._mock_test_case = MagicMock( name='flaky_plugin_test_case', spec=TestCase ) self._mock_test_case.address = MagicMock() self._mock_test_case.test = self._mock_test self._mock_test_module_name = 'test_module' self._mock_test_class_name = 'TestClass' self._mock_test_method_name = 'test_method' self._mock_test_names = '{}:{}.{}'.format( self._mock_test_module_name, self._mock_test_class_name, self._mock_test_method_name ) self._mock_exception = Exception('Error in {}'.format( self._mock_test_method_name) ) self._mock_stack_trace = '' self._mock_exception_type = Exception self._mock_error = ( self._mock_exception_type, self._mock_exception, None, ) self._mock_test_method = MagicMock( name=self._mock_test_method_name, spec=['__call__'] + list(FlakyNames().items()), ) setattr( self._mock_test, self._mock_test_method_name, self._mock_test_method, ) def _assert_flaky_plugin_configured(self): options = Mock() options.multiprocess_workers = 0 conf = Mock() self._flaky_plugin.enabled = True with patch.object(flaky_nose_plugin, 'TextTestResult') as flaky_result: flaky_result.return_value = self._mock_test_result from io import StringIO self._mock_stream = MagicMock(spec=StringIO) with patch.object(self._flaky_plugin, '_get_stream') as get_stream: get_stream.return_value = self._mock_stream self._flaky_plugin.configure(options, conf) def test_flaky_plugin_report(self): flaky_report = 'Flaky tests passed; others failed. ' \ 'No more tests; that ship has sailed.' self._test_flaky_plugin_report(flaky_report) def test_flaky_plugin_handles_success_for_test_method(self): self._test_flaky_plugin_handles_success() def test_flaky_plugin_handles_success_for_test_instance(self): self._test_flaky_plugin_handles_success(is_test_method=False) def test_flaky_plugin_handles_success_for_needs_rerun(self): self._test_flaky_plugin_handles_success(min_passes=2) def test_flaky_plugin_ignores_success_for_non_flaky_test(self): self._expect_test_not_flaky() self._flaky_plugin.addSuccess(self._mock_test_case) self._assert_test_ignored() def test_flaky_plugin_ignores_error_for_non_flaky_test(self): self._expect_test_not_flaky() self._flaky_plugin.handleError(self._mock_test_case, None) self._assert_test_ignored() def test_flaky_plugin_ignores_failure_for_non_flaky_test(self): self._expect_test_not_flaky() self._flaky_plugin.handleFailure(self._mock_test_case, None) self._assert_test_ignored() def test_flaky_plugin_ignores_error_for_nose_failure(self): self._mock_test_case.address.return_value = ( None, self._mock_test_module_name, None, ) self._flaky_plugin.handleError(self._mock_test_case, None) self._assert_test_ignored() def test_flaky_plugin_handles_error_for_test_method(self): self._test_flaky_plugin_handles_failure_or_error() def test_flaky_plugin_handles_error_for_test_instance(self): self._test_flaky_plugin_handles_failure_or_error(is_test_method=False) def test_flaky_plugin_handles_failure_for_test_method(self): self._test_flaky_plugin_handles_failure_or_error(is_failure=True) def test_flaky_plugin_handles_failure_for_test_instance(self): self._test_flaky_plugin_handles_failure_or_error( is_failure=True, is_test_method=False ) def test_flaky_plugin_handles_failure_for_no_more_retries(self): self._test_flaky_plugin_handles_failure_or_error( is_failure=True, max_runs=1 ) def test_flaky_plugin_handles_additional_errors(self): self._test_flaky_plugin_handles_failure_or_error( current_errors=[self._mock_error] ) def test_flaky_plugin_handles_bare_test(self): self._mock_test_names = self._mock_test_method_name self._mock_test.test = Mock() self._expect_call_test_address() attrib = defaults.default_flaky_attributes(2, 1) for name, value in attrib.items(): setattr( self._mock_test.test, name, value, ) delattr(self._mock_test, self._mock_test_method_name) self._flaky_plugin.prepareTestCase(self._mock_test_case) self.assertTrue(self._flaky_plugin.handleError( self._mock_test_case, self._mock_error, )) self.assertFalse(self._flaky_plugin.handleError( self._mock_test_case, self._mock_error, )) def _expect_call_test_address(self): self._mock_test_case.address.return_value = ( None, None, self._mock_test_names ) def _expect_test_flaky(self, is_test_method, max_runs, min_passes): self._expect_call_test_address() if is_test_method: mock_test_method = getattr( self._mock_test, self._mock_test_method_name ) for flaky_attr in FlakyNames(): setattr(self._mock_test, flaky_attr, None) setattr(mock_test_method, flaky_attr, None) flaky(max_runs, min_passes)(mock_test_method) else: flaky(max_runs, min_passes)(self._mock_test) mock_test_method = getattr( self._mock_test, self._mock_test_method_name ) for flaky_attr in FlakyNames(): setattr(mock_test_method, flaky_attr, None) def _expect_test_not_flaky(self): self._expect_call_test_address() for test_object in ( self._mock_test, getattr(self._mock_test, self._mock_test_method_name) ): for flaky_attr in FlakyNames(): setattr(test_object, flaky_attr, None) def _assert_test_ignored(self): self._mock_test_case.address.assert_called_with() self.assertEqual( self._mock_test_case.mock_calls, [mock.call.address()], ) self.assertEqual(self._mock_test.mock_calls, []) self.assertEqual(self._mock_nose_result.mock_calls, []) def _get_flaky_attributes(self): actual_flaky_attributes = { attr: getattr( self._mock_test_case, attr, None, ) for attr in FlakyNames() } for key, value in actual_flaky_attributes.items(): if isinstance(value, list): actual_flaky_attributes[key] = tuple(value) return actual_flaky_attributes def _set_flaky_attribute(self, attr, value): setattr(self._mock_test, attr, value) def _assert_flaky_attributes_contains( self, expected_flaky_attributes, ): actual_flaky_attributes = self._get_flaky_attributes() self.assertDictContainsSubset( expected_flaky_attributes, actual_flaky_attributes, 'Unexpected flaky attributes. Expected {} got {}'.format( expected_flaky_attributes, actual_flaky_attributes ) ) def _test_flaky_plugin_handles_failure_or_error( self, current_errors=None, current_passes=0, current_runs=0, is_failure=False, is_test_method=True, max_runs=2, min_passes=1, ): self._assert_flaky_plugin_configured() self._expect_test_flaky(is_test_method, max_runs, min_passes) if current_errors is None: current_errors = [self._mock_error] else: current_errors.append(self._mock_error) self._set_flaky_attribute( FlakyNames.CURRENT_ERRORS, current_errors, ) self._set_flaky_attribute( FlakyNames.CURRENT_PASSES, current_passes, ) self._set_flaky_attribute( FlakyNames.CURRENT_RUNS, current_runs, ) retries_remaining = current_runs + 1 < max_runs too_few_passes = current_passes < min_passes expected_plugin_handles_failure = too_few_passes and retries_remaining did_plugin_retry_test = max_runs > 1 self._flaky_plugin.prepareTestCase(self._mock_test_case) if is_failure: actual_plugin_handles_failure = self._flaky_plugin.handleFailure( self._mock_test_case, self._mock_error, ) else: actual_plugin_handles_failure = self._flaky_plugin.handleError( self._mock_test_case, self._mock_error, ) self.assertEqual( expected_plugin_handles_failure or None, actual_plugin_handles_failure, 'Expected plugin{} to handle the test run, but it did{}.'.format( ' to' if expected_plugin_handles_failure else '', '' if actual_plugin_handles_failure else ' not' ), ) self._assert_flaky_attributes_contains( { FlakyNames.CURRENT_RUNS: current_runs + 1, FlakyNames.CURRENT_ERRORS: tuple(current_errors), }, ) expected_test_case_calls = [mock.call.address(), mock.call.address()] expected_result_calls = [] if expected_plugin_handles_failure: expected_test_case_calls.append(('__hash__',)) expected_stream_calls = [mock.call.writelines([ self._mock_test_method_name, ' failed ({} runs remaining out of {}).'.format( max_runs - current_runs - 1, max_runs ), 'Exception: Error in test_method', '\n', ])] else: if did_plugin_retry_test: if is_failure: expected_result_calls.append( mock.call.addFailure( self._mock_test_case, self._mock_error, ), ) else: expected_result_calls.append(mock.call.addError( self._mock_test_case, self._mock_error, )) expected_stream_calls = [mock.call.writelines([ self._mock_test_method_name, ' failed; it passed {} out of the required {} times.'.format( current_passes, min_passes ), 'Exception: Error in test_method', '\n' ])] self.assertEqual( self._mock_nose_result.mock_calls, expected_result_calls, ) self.assertEqual( self._mock_test_case.mock_calls, expected_test_case_calls, 'Unexpected TestCase calls: {} vs {}'.format( self._mock_test_case.mock_calls, expected_test_case_calls ) ) self.assertEqual(self._mock_stream.mock_calls, expected_stream_calls) def _test_flaky_plugin_handles_success( self, current_passes=0, current_runs=0, is_test_method=True, max_runs=2, min_passes=1 ): self._assert_flaky_plugin_configured() self._expect_test_flaky(is_test_method, max_runs, min_passes) self._set_flaky_attribute( FlakyNames.CURRENT_PASSES, current_passes, ) self._set_flaky_attribute( FlakyNames.CURRENT_RUNS, current_runs, ) retries_remaining = current_runs + 1 < max_runs too_few_passes = current_passes + 1 < min_passes expected_plugin_handles_success = too_few_passes and retries_remaining self._flaky_plugin.prepareTestCase(self._mock_test_case) actual_plugin_handles_success = self._flaky_plugin.addSuccess( self._mock_test_case, ) self.assertEqual( expected_plugin_handles_success or None, actual_plugin_handles_success, 'Expected plugin{} to handle the test run, but it did{}.'.format( ' not' if expected_plugin_handles_success else '', '' if actual_plugin_handles_success else ' not' ), ) self._assert_flaky_attributes_contains( { FlakyNames.CURRENT_RUNS: current_runs + 1, FlakyNames.CURRENT_PASSES: current_passes + 1, }, ) expected_test_case_calls = [mock.call.address(), mock.call.address()] expected_stream_calls = [mock.call.writelines([ self._mock_test_method_name, " passed {} out of the required {} times. ".format( current_passes + 1, min_passes, ), ])] if expected_plugin_handles_success: _rerun_text = 'Running test again until it passes {0} times.\n' expected_test_case_calls.append(('__hash__',)) expected_stream_calls.append( mock.call.write(_rerun_text.format(min_passes)), ) else: expected_stream_calls.append(mock.call.write('Success!\n')) self.assertEqual( self._mock_test_case.mock_calls, expected_test_case_calls, 'Unexpected TestCase calls = {} vs {}'.format( self._mock_test_case.mock_calls, expected_test_case_calls, ), ) self.assertEqual(self._mock_stream.mock_calls, expected_stream_calls) def _test_flaky_plugin_report(self, expected_stream_value): self._assert_flaky_plugin_configured() mock_stream = Mock() self._mock_stream.getvalue.return_value = expected_stream_value self._flaky_plugin.report(mock_stream) self.assertEqual( mock_stream.mock_calls, [ mock.call.write('===Flaky Test Report===\n\n'), mock.call.write(expected_stream_value), mock.call.write('\n===End Flaky Test Report===\n'), ], ) @genty_dataset( multiprocess_plugin_absent=(None, 'StringIO'), processes_argument_absent=(0, 'StringIO'), processes_equals_one=(1, 'MultiprocessingStringIO'), processes_equals_two=(2, 'MultiprocessingStringIO'), ) def test_flaky_plugin_get_stream(self, mp_workers, expected_class_name): options = Mock() conf = Mock() self._flaky_plugin.enabled = True options.multiprocess_workers = mp_workers if mp_workers is None: del options.multiprocess_workers self._flaky_plugin.configure(options, conf) # pylint:disable=protected-access self.assertEqual( self._flaky_plugin._stream.__class__.__name__, expected_class_name, ) flaky-3.7.0/test/test_nose/test_nose_example.py000066400000000000000000000054771370143502300217140ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals from unittest import TestCase, skip from genty import genty, genty_dataset from nose.tools import raises from flaky import flaky # This is an end-to-end example of the flaky package in action. Consider it # a live tutorial, showing the various features in action. class ExampleTests(TestCase): _threshold = -1 def test_non_flaky_thing(self): """Flaky will not interact with this test""" @raises(AssertionError) def test_non_flaky_failing_thing(self): """Flaky will also not interact with this test""" self.assertEqual(0, 1) @flaky(3, 2) def test_flaky_thing_that_fails_then_succeeds(self): """ Flaky will run this test 3 times. It will fail once and then succeed twice. """ self._threshold += 1 if self._threshold < 1: raise Exception("Threshold is not high enough: {} vs {}.".format( self._threshold, 1), ) @flaky(3, 2) def test_flaky_thing_that_succeeds_then_fails_then_succeeds(self): """ Flaky will run this test 3 times. It will succeed once, fail once, and then succeed one more time. """ self._threshold += 1 if self._threshold == 1: self.assertEqual(0, 1) @flaky(2, 2) def test_flaky_thing_that_always_passes(self): """Flaky will run this test twice. Both will succeed.""" @skip("This really fails! Remove this decorator to see the test failure.") @flaky() def test_flaky_thing_that_always_fails(self): """Flaky will run this test twice. Both will fail.""" self.assertEqual(0, 1) @flaky class ExampleFlakyTests(TestCase): _threshold = -1 def test_flaky_thing_that_fails_then_succeeds(self): """ Flaky will run this test twice. It will fail once and then succeed. """ self._threshold += 1 if self._threshold < 1: raise Exception("Threshold is not high enough: {} vs {}.".format( self._threshold, 1), ) def test_function(): """ Nose will import this function and wrap it in a :class:`FunctionTestCase`. It's included in the example to make sure flaky handles it correctly. """ @flaky def test_flaky_function(param=[]): # pylint:disable=dangerous-default-value param_length = len(param) param.append(None) assert param_length == 1 @genty class ExampleFlakyTestsWithUnicodeTestNames(ExampleFlakyTests): @genty_dataset('ascii name', 'ńőń ȁŝćȉȉ ŝƭȕƒƒ') def test_non_flaky_thing(self, message): self._threshold += 1 if self._threshold < 1: raise Exception( "Threshold is not high enough: {} vs {} for '{}'.".format( self._threshold, 1, message), ) flaky-3.7.0/test/test_nose/test_nose_options_example.py000066400000000000000000000031371370143502300234560ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals from unittest import TestCase from flaky import flaky # This is a series of tests that do not use the flaky decorator; the flaky # behavior is intended to be enabled with the --force-flaky option on the # command line. class ExampleTests(TestCase): _threshold = -2 def test_something_flaky(self): """ Flaky will run this test twice. It will fail once and then succeed once. This ensures that we mark tests as flaky even if they don't have a decorator when we use the command-line options. """ self._threshold += 1 if self._threshold < 0: raise Exception("Threshold is not high enough.") @flaky(3, 1) def test_flaky_thing_that_fails_then_succeeds(self): """ Flaky will run this test 3 times. It will fail twice and then succeed once. This ensures that the flaky decorator overrides any command-line options we specify. """ self._threshold += 1 if self._threshold < 1: raise Exception("Threshold is not high enough.") @flaky(3, 1) class ExampleFlakyTests(TestCase): _threshold = -1 def test_flaky_thing_that_fails_then_succeeds(self): """ Flaky will run this test 3 times. It will fail twice and then succeed once. This ensures that the flaky decorator on a test suite overrides any command-line options we specify. """ self._threshold += 1 if self._threshold < 1: raise Exception("Threshold is not high enough.") flaky-3.7.0/test/test_pytest/000077500000000000000000000000001370143502300161735ustar00rootroot00000000000000flaky-3.7.0/test/test_pytest/__init__.py000066400000000000000000000001121370143502300202760ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals, absolute_import flaky-3.7.0/test/test_pytest/pytest_generate_example/000077500000000000000000000000001370143502300231105ustar00rootroot00000000000000flaky-3.7.0/test/test_pytest/pytest_generate_example/__init__.py000066400000000000000000000000711370143502300252170ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals flaky-3.7.0/test/test_pytest/pytest_generate_example/conftest.py000066400000000000000000000004201370143502300253030ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals def pytest_generate_tests(metafunc): """ Parameterize a fixture named 'dummy_list' with an empty list """ if 'dummy_list' in metafunc.fixturenames: metafunc.parametrize("dummy_list", [[]]) flaky-3.7.0/test/test_pytest/pytest_generate_example/test_pytest_generate_example.py000066400000000000000000000006761370143502300314470ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals from flaky import flaky @flaky def test_something_flaky(dummy_list): dummy_list.append(0) assert len(dummy_list) > 1 class TestExample(object): _threshold = -1 @flaky def test_flaky_thing_that_fails_then_succeeds(self, dummy_list): # pylint:disable=unused-argument,no-self-use TestExample._threshold += 1 assert TestExample._threshold >= 1 flaky-3.7.0/test/test_pytest/test_flaky_pytest_plugin.py000066400000000000000000000366771370143502300237230ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals from io import StringIO from mock import Mock, patch # pylint:disable=import-error import pytest from _pytest.runner import CallInfo # pylint:enable=import-error from flaky import flaky from flaky import _flaky_plugin from flaky.flaky_pytest_plugin import ( call_runtest_hook, FlakyPlugin, FlakyXdist, PLUGIN, ) from flaky.names import FlakyNames from flaky.utils import unicode_type @pytest.fixture def mock_io(monkeypatch): mock_string_io = StringIO() def string_io(): return mock_string_io monkeypatch.setattr(_flaky_plugin, 'StringIO', string_io) return mock_string_io @pytest.fixture def string_io(): return StringIO() @pytest.fixture def flaky_plugin(mock_io): # pylint:disable=unused-argument return FlakyPlugin() @pytest.fixture def mock_plugin_rerun(monkeypatch, flaky_plugin): calls = [] def rerun_test(test): calls.append(test) monkeypatch.setattr(flaky_plugin, '_mark_test_for_rerun', rerun_test) def get_calls(): return calls return get_calls @pytest.fixture(params=['instance', 'module', 'parent']) def flaky_test(request, mock_config): def test_function(): pass test_owner = Mock() setattr(test_owner, 'test_method', test_function) setattr(test_owner, 'obj', test_owner) kwargs = {request.param: test_owner} test = MockTestItem(**kwargs) setattr(test, 'owner', test_owner) setattr(test, 'config', mock_config) return test @pytest.fixture def call_info(flaky_test): return MockFlakyCallInfo(flaky_test, 'call') @pytest.fixture def mock_error(): return MockError() class MockError(object): def __init__(self): super(MockError, self).__init__() self.type = Mock() self.value = Mock() self.value.message = 'failed' self.traceback = Mock() class MockTestItem(object): name = 'test_method' instance = None module = None parent = None def __init__(self, instance=None, module=None, parent=None): if instance is not None: self.instance = instance if module is not None: self.module = module if parent is not None: self.parent = parent def runtest(self): pass class MockConfig(object): def getvalue(self, key): # pylint:disable=unused-argument,no-self-use return False def getoption(self, key, default): # pylint:disable=unused-argument,no-self-use return default @pytest.fixture def mock_config(): return MockConfig() class MockFlakyCallInfo(CallInfo): def __init__(self, item, when): # pylint:disable=super-init-not-called # super init not called because it has unwanted side effects self.when = when self._item = item def test_flaky_plugin_report(flaky_plugin, mock_io, string_io): flaky_report = 'Flaky tests passed; others failed. ' \ 'No more tests; that ship has sailed.' expected_string_io = StringIO() expected_string_io.write('===Flaky Test Report===\n\n') expected_string_io.write(flaky_report) expected_string_io.write('\n===End Flaky Test Report===\n') mock_io.write(flaky_report) flaky_plugin.pytest_terminal_summary(string_io) assert string_io.getvalue() == expected_string_io.getvalue() @pytest.fixture(params=( {}, {'flaky_report': ''}, {'flaky_report': 'ŝȁḿҏľȅ ƭȅхƭ'}, )) def mock_xdist_node_workeroutput(request): return request.param @pytest.fixture(params=(None, object())) def mock_xdist_error(request): return request.param @pytest.mark.parametrize('assign_workeroutput', (True, False)) def test_flaky_xdist_nodedown( mock_xdist_node_workeroutput, assign_workeroutput, mock_xdist_error ): flaky_xdist = FlakyXdist(PLUGIN) node = Mock() if assign_workeroutput: node.workeroutput = mock_xdist_node_workeroutput else: delattr(node, 'workeroutput') delattr(node, 'slaveoutput') mock_stream = Mock(StringIO) with patch.object(PLUGIN, '_stream', mock_stream): flaky_xdist.pytest_testnodedown(node, mock_xdist_error) if assign_workeroutput and 'flaky_report' in mock_xdist_node_workeroutput: mock_stream.write.assert_called_once_with( mock_xdist_node_workeroutput['flaky_report'], ) else: assert not mock_stream.write.called _REPORT_TEXT1 = 'Flaky report text' _REPORT_TEXT2 = 'Ḿőŕȅ ƒľȁƙŷ ŕȅҏőŕƭ ƭȅхƭ' @pytest.mark.parametrize('initial_report,stream_report,expected_report', ( ('', '', ''), ('', _REPORT_TEXT1, _REPORT_TEXT1), (_REPORT_TEXT1, '', _REPORT_TEXT1), (_REPORT_TEXT1, _REPORT_TEXT2, _REPORT_TEXT1 + _REPORT_TEXT2), (_REPORT_TEXT2, _REPORT_TEXT1, _REPORT_TEXT2 + _REPORT_TEXT1), )) def test_flaky_session_finish_copies_flaky_report( initial_report, stream_report, expected_report, ): PLUGIN.stream.seek(0) PLUGIN.stream.truncate() PLUGIN.stream.write(stream_report) PLUGIN.config = Mock() PLUGIN.config.workeroutput = {'flaky_report': initial_report} PLUGIN.pytest_sessionfinish() assert PLUGIN.config.workeroutput['flaky_report'] == expected_report def test_flaky_plugin_can_suppress_success_report( flaky_test, flaky_plugin, call_info, string_io, mock_io, ): flaky()(flaky_test) # pylint:disable=protected-access flaky_plugin._flaky_success_report = False # pylint:enable=protected-access call_info.when = 'call' actual_plugin_handles_success = flaky_plugin.add_success(flaky_test) assert actual_plugin_handles_success is False assert string_io.getvalue() == mock_io.getvalue() def test_flaky_plugin_raises_errors_in_fixture_setup( flaky_test, flaky_plugin, string_io, mock_io, ): """ Test for Issue #57 - fixtures which raise an error should show up as test errors. This test ensures that exceptions occurring when running a test fixture are copied into the call info's excinfo field. """ def error_raising_setup_function(item): assert item is flaky_test item.ran_setup = True return 5 / 0 flaky()(flaky_test) flaky_test.ihook = Mock() flaky_test.ihook.pytest_runtest_setup = error_raising_setup_function flaky_plugin._call_infos[flaky_test] = {} # pylint:disable=protected-access call_info = call_runtest_hook(flaky_test, 'setup') assert flaky_test.ran_setup assert string_io.getvalue() == mock_io.getvalue() assert call_info.excinfo.type is ZeroDivisionError class TestFlakyPytestPlugin(object): _test_method_name = 'test_method' def test_flaky_plugin_handles_success( self, flaky_test, flaky_plugin, call_info, string_io, mock_io, ): self._test_flaky_plugin_handles_success( flaky_test, flaky_plugin, call_info, string_io, mock_io, ) def test_flaky_plugin_handles_success_for_needs_rerun( self, flaky_test, flaky_plugin, call_info, string_io, mock_io, mock_plugin_rerun, ): self._test_flaky_plugin_handles_success( flaky_test, flaky_plugin, call_info, string_io, mock_io, min_passes=2, ) assert mock_plugin_rerun()[0] == flaky_test def test_flaky_plugin_ignores_success_for_non_flaky_test( self, flaky_plugin, flaky_test, call_info, string_io, mock_io, ): flaky_plugin.add_success(flaky_test) self._assert_test_ignored(mock_io, string_io, call_info) def test_flaky_plugin_ignores_failure_for_non_flaky_test( self, flaky_plugin, flaky_test, call_info, string_io, mock_io, ): flaky_plugin.add_failure(flaky_test, None) self._assert_test_ignored(mock_io, string_io, call_info) def test_flaky_plugin_handles_failure( self, flaky_test, flaky_plugin, call_info, string_io, mock_io, mock_error, mock_plugin_rerun, ): self._test_flaky_plugin_handles_failure( flaky_test, flaky_plugin, call_info, string_io, mock_io, mock_error, ) assert mock_plugin_rerun()[0] == flaky_test def test_flaky_plugin_handles_failure_for_no_more_retries( self, flaky_test, flaky_plugin, call_info, string_io, mock_io, mock_error, ): self._test_flaky_plugin_handles_failure( flaky_test, flaky_plugin, call_info, string_io, mock_io, mock_error, max_runs=1, ) def test_flaky_plugin_handles_additional_failures( self, flaky_test, flaky_plugin, call_info, string_io, mock_io, mock_error, mock_plugin_rerun, ): self._test_flaky_plugin_handles_failure( flaky_test, flaky_plugin, call_info, string_io, mock_io, mock_error, current_errors=[None], ) assert mock_plugin_rerun()[0] == flaky_test def _assert_flaky_attributes_contains( self, expected_flaky_attributes, test, ): actual_flaky_attributes = self._get_flaky_attributes(test) assert all( item in actual_flaky_attributes.items() for item in expected_flaky_attributes.items() ) def test_flaky_plugin_exits_after_false_rerun_filter( self, flaky_test, flaky_plugin, call_info, string_io, mock_io, mock_error, mock_plugin_rerun, ): err_tuple = (mock_error.type, mock_error.value, mock_error.traceback) def rerun_filter(err, name, test, plugin): assert err == err_tuple assert name == flaky_test.name assert test is flaky_test assert plugin is flaky_plugin return False flaky(rerun_filter=rerun_filter)(flaky_test) call_info.when = 'call' actual_plugin_handles_failure = flaky_plugin.add_failure( flaky_test, mock_error, ) assert actual_plugin_handles_failure is False assert not mock_plugin_rerun() string_io.writelines([ self._test_method_name, ' failed and was not selected for rerun.', '\n\t', unicode_type(mock_error.type), '\n\t', unicode_type(mock_error.value), '\n\t', unicode_type(mock_error.traceback), '\n', ]) assert string_io.getvalue() == mock_io.getvalue() @staticmethod def _assert_test_ignored(mock_io, string_io, call_info): assert call_info assert mock_io.getvalue() == string_io.getvalue() def _test_flaky_plugin_handles_success( self, test, plugin, info, stream, mock_stream, current_passes=0, current_runs=0, max_runs=2, min_passes=1, ): flaky(max_runs, min_passes)(test) setattr( test, FlakyNames.CURRENT_PASSES, current_passes, ) setattr( test, FlakyNames.CURRENT_RUNS, current_runs, ) too_few_passes = current_passes + 1 < min_passes retries_remaining = current_runs + 1 < max_runs expected_plugin_handles_success = too_few_passes and retries_remaining info.when = 'call' actual_plugin_handles_success = plugin.add_success(test) assert expected_plugin_handles_success == actual_plugin_handles_success self._assert_flaky_attributes_contains( { FlakyNames.CURRENT_PASSES: current_passes + 1, FlakyNames.CURRENT_RUNS: current_runs + 1, }, test, ) stream.writelines([ self._test_method_name, " passed {} out of the required {} times. ".format( current_passes + 1, min_passes, ), ]) if expected_plugin_handles_success: stream.write( 'Running test again until it passes {} times.\n'.format( min_passes, ), ) else: stream.write('Success!\n') assert stream.getvalue() == mock_stream.getvalue() def _test_flaky_plugin_handles_failure( self, test, plugin, info, stream, mock_stream, mock_error, current_errors=None, current_passes=0, current_runs=0, max_runs=2, min_passes=1, rerun_filter=None, ): flaky(max_runs, min_passes, rerun_filter)(test) if current_errors is None: current_errors = [None] else: current_errors.append(None) setattr( test, FlakyNames.CURRENT_ERRORS, current_errors, ) setattr( test, FlakyNames.CURRENT_PASSES, current_passes, ) setattr( test, FlakyNames.CURRENT_RUNS, current_runs, ) too_few_passes = current_passes < min_passes retries_remaining = current_runs + 1 < max_runs expected_plugin_handles_failure = too_few_passes and retries_remaining info.when = 'call' actual_plugin_handles_failure = plugin.add_failure( test, mock_error, ) assert expected_plugin_handles_failure == actual_plugin_handles_failure self._assert_flaky_attributes_contains( { FlakyNames.CURRENT_RUNS: current_runs + 1, FlakyNames.CURRENT_ERRORS: current_errors }, test, ) if expected_plugin_handles_failure: stream.writelines([ self._test_method_name, ' failed ({} runs remaining out of {}).'.format( max_runs - current_runs - 1, max_runs ), '\n\t', unicode_type(mock_error.type), '\n\t', unicode_type(mock_error.value), '\n\t', unicode_type(mock_error.traceback), '\n', ]) else: message = ' failed; it passed {0} out of the required {1} times.' stream.writelines([ self._test_method_name, message.format( current_passes, min_passes ), '\n\t', unicode_type(mock_error.type), '\n\t', unicode_type(mock_error.value), '\n\t', unicode_type(mock_error.traceback), '\n', ]) assert stream.getvalue() == mock_stream.getvalue() @staticmethod def _get_flaky_attributes(test): actual_flaky_attributes = { attr: getattr( test, attr, None, ) for attr in FlakyNames() } return actual_flaky_attributes flaky-3.7.0/test/test_pytest/test_pytest_example.py000066400000000000000000000100041370143502300226420ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals from unittest import TestCase, skip # pylint:disable=import-error import pytest # pylint:enable=import-error from flaky import flaky # This is an end-to-end example of the flaky package in action. Consider it # a live tutorial, showing the various features in action. @flaky def test_something_flaky(dummy_list=[]): # pylint:disable=dangerous-default-value dummy_list.append(0) assert len(dummy_list) > 1 @pytest.fixture(scope='function') def failing_setup_fixture(): assert False @flaky @pytest.mark.xfail(strict=True) @pytest.mark.usefixtures("failing_setup_fixture") def test_something_good_with_failing_setup_fixture(): assert True class TestExample(object): _threshold = -1 def test_non_flaky_thing(self): """Flaky will not interact with this test""" @pytest.mark.xfail def test_non_flaky_failing_thing(self): """Flaky will also not interact with this test""" assert self == 1 @flaky(3, 2) def test_flaky_thing_that_fails_then_succeeds(self): """ Flaky will run this test 3 times. It will fail once and then succeed twice. """ # pylint:disable=no-self-use TestExample._threshold += 1 assert TestExample._threshold >= 1 @flaky(3, 2) def test_flaky_thing_that_succeeds_then_fails_then_succeeds(self): """ Flaky will run this test 3 times. It will succeed once, fail once, and then succeed one more time. """ # pylint:disable=no-self-use TestExample._threshold += 1 assert TestExample._threshold != 1 @flaky(2, 2) def test_flaky_thing_that_always_passes(self): """Flaky will run this test twice. Both will succeed.""" @pytest.mark.skipif( 'True', reason="This really fails! Remove skipif to see the test failure." ) @flaky() def test_flaky_thing_that_always_fails(self): """Flaky will run this test twice. Both will fail.""" assert self is None @flaky class TestExampleFlakyTests(object): _threshold = -1 @staticmethod def test_flaky_thing_that_fails_then_succeeds(): """ Flaky will run this test twice. It will fail once and then succeed. """ TestExampleFlakyTests._threshold += 1 assert TestExampleFlakyTests._threshold >= 1 @flaky class TestExampleFlakyTestCase(TestCase): _threshold = -1 @staticmethod def test_flaky_thing_that_fails_then_succeeds(): """ Flaky will run this test twice. It will fail once and then succeed. """ TestExampleFlakyTestCase._threshold += 1 assert TestExampleFlakyTestCase._threshold >= 1 class TestFlakySubclass(TestExampleFlakyTestCase): pass @pytest.mark.flaky class TestMarkedClass(object): _threshold = -1 @staticmethod def test_flaky_thing_that_fails_then_succeeds(): """ Flaky will run this test twice. It will fail once and then succeed. """ TestMarkedClass._threshold += 1 assert TestMarkedClass._threshold >= 1 def _test_flaky_doctest(): """ Flaky ignores doctests. This test wouldn't be rerun if it failed. >>> _test_flaky_doctest() True """ return True @pytest.fixture def my_fixture(): return 42 @flaky def test_requiring_my_fixture(my_fixture, dummy_list=[]): # pylint:disable=dangerous-default-value,unused-argument dummy_list.append(0) assert len(dummy_list) > 1 def _rerun_filter(err, name, test, plugin): # pylint:disable=unused-argument return issubclass(err[0], AssertionError) class TestExampleRerunFilter(object): _threshold = -1 @flaky(rerun_filter=_rerun_filter) def test_something_flaky(self): # pylint:disable=no-self-use TestExampleRerunFilter._threshold += 1 assert TestExampleRerunFilter._threshold >= 1 @skip('This test always fails') @flaky def test_something_that_always_fails_but_should_be_skipped(): assert 0 flaky-3.7.0/test/test_pytest/test_pytest_options_example.py000066400000000000000000000024261370143502300244260ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals # This is a series of tests that do not use the flaky decorator; the flaky # behavior is intended to be enabled with the --force-flaky option on the # command line. from flaky import flaky def test_something_flaky(dummy_list=[]): # pylint:disable=dangerous-default-value dummy_list.append(0) assert len(dummy_list) > 1 class TestExample(object): _threshold = -2 @staticmethod @flaky(3, 1) def test_flaky_thing_that_fails_then_succeeds(): """ Flaky will run this test 3 times. It will fail twice and then succeed once. This ensures that the flaky decorator overrides any command-line options we specify. """ TestExample._threshold += 1 assert TestExample._threshold >= 1 @flaky(3, 1) class TestExampleFlakyTests(object): _threshold = -2 @staticmethod def test_flaky_thing_that_fails_then_succeeds(): """ Flaky will run this test 3 times. It will fail twice and then succeed once. This ensures that the flaky decorator on a test suite overrides any command-line options we specify. """ TestExampleFlakyTests._threshold += 1 assert TestExampleFlakyTests._threshold >= 1 flaky-3.7.0/test/test_pytest/test_pytester_plugin.py000066400000000000000000000007231370143502300230430ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals, absolute_import pytest_plugins = str('pytester') # pylint:disable=invalid-name TESTSUITE = """ def test_a_thing(): pass """ def test_output_without_capture(testdir): """ Test for Issue #82. Flaky was breaking tests using the pytester plugin. """ script = testdir.makepyfile(TESTSUITE) result = testdir.runpytest(script, '--verbose', '--capture', 'fd') assert result.ret == 0 flaky-3.7.0/test/test_utils.py000066400000000000000000000032021370143502300163520ustar00rootroot00000000000000# coding: utf-8 from __future__ import unicode_literals import sys from unittest import TestCase from flaky.utils import ensure_unicode_string, unicode_type from genty import genty, genty_dataset @genty class TestEnsureUnicodeString(TestCase): _unicode_string = 'Plain Hello' _byte_string = b'Plain Hello' _unicode_string_non_ascii = 'ńőń ȁŝćȉȉ ŝƭȕƒƒ' _byte_string_non_ascii = _unicode_string_non_ascii.encode('utf-8') _hello = 'Hèllö' _mangled_hello = 'H\ufffdll\ufffd' _byte_string_windows_encoded = _hello.encode('windows-1252') def test_ensure_unicode_string_handles_nonascii_exception_message(self): message = '\u2013' encoded_message = message.encode('utf-8') ex = Exception(encoded_message) string = ensure_unicode_string(ex) if sys.version_info.major >= 3: message = unicode_type(encoded_message) self.assertEqual(string, message) @genty_dataset( (_unicode_string, _unicode_string), (_byte_string, _unicode_string), (_unicode_string_non_ascii, _unicode_string_non_ascii), (_byte_string_non_ascii, _unicode_string_non_ascii), (_byte_string_windows_encoded, _mangled_hello), ) def test_ensure_unicode_string_handles_various_strings( self, string, expected_unicode_string, ): unicode_string = ensure_unicode_string(string) if sys.version_info.major >= 3: expected_unicode_string = unicode_type(string) self.assertIsInstance(unicode_string, unicode_type) self.assertIn(expected_unicode_string, unicode_string) flaky-3.7.0/tox.ini000066400000000000000000000030421370143502300141370ustar00rootroot00000000000000[tox] envlist = py27, py34, py35, py36, py37, pypy, pycodestyle, pylint, readme, coverage [testenv] deps = -rrequirements-dev.txt usedevelop = True commands = nosetests --with-flaky --exclude="test_nose_options_example" test/test_nose/ pytest -k 'example and not options' --doctest-modules test/test_pytest/ pytest -k 'example and not options' -n 1 test/test_pytest/ pytest -p no:flaky test/test_pytest/test_flaky_pytest_plugin.py nosetests --with-flaky --force-flaky --max-runs 2 test/test_nose/test_nose_options_example.py pytest --force-flaky --max-runs 2 test/test_pytest/test_pytest_options_example.py [testenv:pycodestyle] commands = pycodestyle --ignore=E501 flaky pycodestyle --ignore=E501 test [testenv:pylint] commands = pylint --rcfile=.pylintrc flaky pylint --rcfile=.pylintrc test -d C0330,W0621,C0411 [testenv:coverage] commands = python setup.py develop nosetests --with-flaky --with-coverage --cover-package=flaky --exclude="test_nose_options_example" --no-flaky-report --cover-erase test/test_nose/ pytest -k 'example and not options' --doctest-modules --no-flaky-report --cov flaky --cov-report term-missing test/test_pytest/ pytest -p no:flaky --cov flaky --cov-report term-missing test/test_pytest/test_flaky_pytest_plugin.py [testenv:readme] deps = docutils pygments commands = rst2html.py --strict README.rst rst2html.py --strict HISTORY.rst rst2html.py --strict CONTRIBUTING.rst [pycodestyle] show-pep8 = True show-source = True