pax_global_header 0000666 0000000 0000000 00000000064 14361602300 0014505 g ustar 00root root 0000000 0000000 52 comment=07f6707e2c5612960347f7c00125620457f490a7
pydocstyle-6.3.0/ 0000775 0000000 0000000 00000000000 14361602300 0013712 5 ustar 00root root 0000000 0000000 pydocstyle-6.3.0/.github/ 0000775 0000000 0000000 00000000000 14361602300 0015252 5 ustar 00root root 0000000 0000000 pydocstyle-6.3.0/.github/workflows/ 0000775 0000000 0000000 00000000000 14361602300 0017307 5 ustar 00root root 0000000 0000000 pydocstyle-6.3.0/.github/workflows/pre-release.yml 0000664 0000000 0000000 00000001547 14361602300 0022245 0 ustar 00root root 0000000 0000000 ---
name: Test PyPI publish
on:
release:
types: [prereleased]
jobs:
build:
runs-on: ubuntu-latest
environment: pypi-dev
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Install poetry
run: pipx install poetry
- name: Setup Python
uses: actions/setup-python@v3
with:
python-version: "3.7"
cache: "poetry"
- name: Install dependencies
run: |
poetry env use "3.7"
poetry install
poetry config repositories.testpypi https://test.pypi.org/legacy/
- name: Bump version number
run: poetry version ${{ github.event.release.tag_name }}
- name: Build package
run: poetry build
- name: Publish package
run: poetry publish -r testpypi -u __token__ -p ${{ secrets.TEST_PYPI_PASSWORD }}
pydocstyle-6.3.0/.github/workflows/release.yml 0000664 0000000 0000000 00000001403 14361602300 0021450 0 ustar 00root root 0000000 0000000 ---
name: PyPI publish
on:
release:
types: [released]
jobs:
build:
runs-on: ubuntu-latest
environment: pypi-prod
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Install poetry
run: pipx install poetry
- name: Setup Python
uses: actions/setup-python@v3
with:
python-version: "3.7"
cache: "poetry"
- name: Install dependencies
run: |
poetry env use "3.7"
poetry install
- name: Bump version number
run: poetry version ${{ github.event.release.tag_name }}
- name: Build package
run: poetry build
- name: Publish package
run: poetry publish -u __token__ -p ${{ secrets.PYPI_PASSWORD }}
pydocstyle-6.3.0/.github/workflows/test.yml 0000664 0000000 0000000 00000001204 14361602300 0021006 0 ustar 00root root 0000000 0000000 name: Run tests
on:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
test-latest:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [macos-latest, ubuntu-latest, windows-latest]
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Install tox
run: python -m pip install --upgrade pip tox
- name: Run Tests
run: make tests
pydocstyle-6.3.0/.gitignore 0000664 0000000 0000000 00000000774 14361602300 0015712 0 ustar 00root root 0000000 0000000 *.py[co]
__pycache__
# Vim
*.swp
*.swo
# Packages
*.egg
*.egg-info
dist
build
eggs
parts
bin
var
sdist
develop-eggs
.installed.cfg
.cache
.pytest_cache
MANIFEST
# Installer logs
pip-log.txt
# Unit test / coverage reports
.coverage
.tox
nosetests.xml
.mypy_cache
# Translations
*.mo
# Sphinx
docs/_*
# Eclipse files
.project
.pydevproject
.settings
# PyCharm files
.idea
# virtualenv
venv/
.venvs/
.venv/
# generated rst
docs/snippets/error_code_table.rst
# PyCharm files
.idea
# VS Code
.vscode/
pydocstyle-6.3.0/.gitpod.yml 0000664 0000000 0000000 00000000370 14361602300 0016001 0 ustar 00root root 0000000 0000000
tasks:
- init: pip install -r requirements.txt && pip install -e .
github:
prebuilds:
master: true
branches: true
pullRequests: true
pullRequestsFromForks: true
addCheck: true
vscode:
extensions:
- ms-python.python
pydocstyle-6.3.0/.pre-commit-hooks.yaml 0000664 0000000 0000000 00000000330 14361602300 0020045 0 ustar 00root root 0000000 0000000 - id: pydocstyle
name: pydocstyle
description: pydocstyle is a static analysis tool for checking compliance with Python docstring conventions.
entry: pydocstyle
language: python
types: [python]
pydocstyle-6.3.0/LICENSE-MIT 0000664 0000000 0000000 00000002272 14361602300 0015351 0 ustar 00root root 0000000 0000000 Copyright (c) 2012 GreenSteam,
Copyright (c) 2014-2020 Amir Rachum,
Copyright (c) 2020 Sambhav Kothari,
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
pydocstyle-6.3.0/MANIFEST.in 0000664 0000000 0000000 00000000037 14361602300 0015450 0 ustar 00root root 0000000 0000000 include README.rst LICENSE-MIT
pydocstyle-6.3.0/Makefile 0000664 0000000 0000000 00000000142 14361602300 0015347 0 ustar 00root root 0000000 0000000 all: format tests
format:
isort src/pydocstyle
black src/pydocstyle
tests:
tox -e py,install
pydocstyle-6.3.0/PULL_REQUEST_TEMPLATE.md 0000664 0000000 0000000 00000001347 14361602300 0017520 0 ustar 00root root 0000000 0000000 Thanks for submitting a PR!
Please make sure to check for the following items:
- [ ] Add unit tests and integration tests where applicable.
If you've added an error code or changed an error code behavior,
you should probably add or change a test case file under `tests/test_cases/` and add
it to the list under `tests/test_definitions.py`.
If you've added or changed a command line option,
you should probably add or change a test in `tests/test_integration.py`.
- [ ] Add a line to the release notes (docs/release_notes.rst) under "Current Development Version".
Make sure to include the PR number after you open and get one.
Please don't get discouraged as it may take a while to get a review.
pydocstyle-6.3.0/README.rst 0000664 0000000 0000000 00000004531 14361602300 0015404 0 ustar 00root root 0000000 0000000 pydocstyle - docstring style checker
====================================
.. image:: https://github.com/PyCQA/pydocstyle/workflows/Run%20tests/badge.svg
:target: https://github.com/PyCQA/pydocstyle/actions?query=workflow%3A%22Run+tests%22+branch%3Amaster
.. image:: https://readthedocs.org/projects/pydocstyle/badge/?version=latest
:target: https://readthedocs.org/projects/pydocstyle/?badge=latest
:alt: Documentation Status
.. image:: https://img.shields.io/pypi/pyversions/pydocstyle.svg
:target: https://pypi.org/project/pydocstyle
.. image:: https://pepy.tech/badge/pydocstyle
:target: https://pepy.tech/project/pydocstyle
.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
:target: https://github.com/psf/black
.. image:: https://img.shields.io/badge/%20imports-isort-%231674b1?style=flat&labelColor=ef8336
:target: https://pycqa.github.io/isort/
.. image:: https://img.shields.io/badge/Gitpod-ready--to--code-blue?logo=gitpod
:target: https://gitpod.io/#https://github.com/PyCQA/pydocstyle
:alt: Gitpod ready-to-code
**pydocstyle** is a static analysis tool for checking compliance with Python
docstring conventions.
**pydocstyle** supports most of
`PEP 257 `_ out of the box, but it
should not be considered a reference implementation.
**pydocstyle** supports Python 3.6+.
Quick Start
-----------
Install
^^^^^^^
.. code::
pip install pydocstyle
Run
^^^
.. code::
$ pydocstyle test.py
test.py:18 in private nested class `meta`:
D101: Docstring missing
test.py:27 in public function `get_user`:
D300: Use """triple double quotes""" (found '''-quotes)
test:75 in public function `init_database`:
D201: No blank lines allowed before function docstring (found 1)
...
Develop
^^^^^^^
You can use Gitpod to run pre-configured dev environment in the cloud right from your browser -
.. image:: https://gitpod.io/button/open-in-gitpod.svg
:target: https://gitpod.io/#https://github.com/PyCQA/pydocstyle
:alt: Open in Gitpod
Before submitting a PR make sure that you run `make all`.
Links
-----
* `Read the full documentation here `_.
* `Fork pydocstyle on GitHub `_.
* `PyPI project page `_.
pydocstyle-6.3.0/docs/ 0000775 0000000 0000000 00000000000 14361602300 0014642 5 ustar 00root root 0000000 0000000 pydocstyle-6.3.0/docs/Makefile 0000664 0000000 0000000 00000015152 14361602300 0016306 0 ustar 00root root 0000000 0000000 # Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = _build
# User-friendly check for sphinx-build
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
endif
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
help:
@echo "Please use \`make ' where is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
clean:
rm -rf $(BUILDDIR)/*
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/pep257.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/pep257.qhc"
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/pep257"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/pep257"
@echo "# devhelp"
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
latexpdfja:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
xml:
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
pseudoxml:
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
pydocstyle-6.3.0/docs/conf.py 0000664 0000000 0000000 00000020700 14361602300 0016140 0 ustar 00root root 0000000 0000000 # pydocstyle documentation build configuration file, created by
# sphinx-quickstart on Fri Jan 30 20:30:42 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src'))
import pydocstyle
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'pydocstyle'
copyright = '2020, Amir Rachum, Sambhav Kothari'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = pydocstyle.__version__
# The full version, including alpha/beta/rc tags.
release = pydocstyle.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
try:
import sphinx_rtd_theme
except ImportError:
html_theme = 'default'
else:
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# " v documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'pydocstyledoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'pydocstyle.tex', 'pydocstyle Documentation',
'Amir Rachum', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'pydocstyle', 'pydocstyle Documentation',
['Amir Rachum'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'pydocstyle', 'pydocstyle Documentation',
'Amir Rachum', 'pydocstyle', 'Docstring style checker',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
def generate_error_code_table():
from pydocstyle.violations import ErrorRegistry
with open(os.path.join('snippets', 'error_code_table.rst'), 'wt') as outf:
outf.write(ErrorRegistry.to_rst())
generate_error_code_table()
pydocstyle-6.3.0/docs/error_codes.rst 0000664 0000000 0000000 00000003413 14361602300 0017703 0 ustar 00root root 0000000 0000000 Error Codes
===========
Grouping
--------
.. include:: snippets/error_code_table.rst
Default conventions
-------------------
Not all error codes are checked for by default. There are three conventions
that may be used by pydocstyle: ``pep257``, ``numpy`` and ``google``.
The ``pep257`` convention (specified in `PEP257
`_), which is enabled by default in
pydocstyle, checks for all of the above errors except for D203, D212, D213,
D214, D215, D404, D405, D406, D407, D408, D409, D410, D411, D413, D415, D416
and D417.
The ``numpy`` convention added in v2.0.0 supports the `numpydoc docstring
`_ standard. This checks all of the
errors except for D107, D203, D212, D213, D402, D413, D415, D416, and D417.
The ``google`` convention added in v4.0.0 supports the `Google Python Style
Guide `_. This checks for
all the errors except D203, D204, D213, D215, D400, D401, D404, D406, D407,
D408, D409 and D413.
These conventions may be specified using ``--convention=`` when
running pydocstyle from the command line or by specifying the
convention in a configuration file. See the :ref:`cli_usage` section
for more details.
.. note::
It makes no sense to check the same docstring for both ``numpy`` and ``google``
conventions. Therefore, if we successfully detect that a docstring is in the
``numpy`` style, we don't check it for ``google``.
The reason ``numpy`` style takes precedence over ``google`` is that the
heuristics of detecting it are better, and we don't want to enforce users to
provide external hints to `pydocstyle` in order to let it know which style
docstrings are written in.
Publicity
---------
.. include:: snippets/publicity.rst
pydocstyle-6.3.0/docs/index.rst 0000664 0000000 0000000 00000001335 14361602300 0016505 0 ustar 00root root 0000000 0000000 pydocstyle's documentation
==========================
**pydocstyle** is a static analysis tool for checking compliance with Python
docstring conventions.
**pydocstyle** supports most of
`PEP 257 `_ out of the box, but it
should not be considered a reference implementation.
**pydocstyle** supports Python 3.7 through 3.11.
Although pydocstyle is tries to be compatible with Python 3.6, it is not tested.
.. include:: quickstart.rst
Contents:
.. toctree::
:maxdepth: 2
usage
error_codes
release_notes
license
Credits
=======
pydocstyle is a rename and continuation of pep257, a project created by
Vladimir Keleshev.
Maintained by Amir Rachum and Sambhav Kothari.
pydocstyle-6.3.0/docs/license.rst 0000664 0000000 0000000 00000000055 14361602300 0017016 0 ustar 00root root 0000000 0000000 License
=======
.. include:: ../LICENSE-MIT
pydocstyle-6.3.0/docs/make.bat 0000664 0000000 0000000 00000015055 14361602300 0016255 0 ustar 00root root 0000000 0000000 @ECHO OFF
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set BUILDDIR=_build
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
set I18NSPHINXOPTS=%SPHINXOPTS% .
if NOT "%PAPER%" == "" (
set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
)
if "%1" == "" goto help
if "%1" == "help" (
:help
echo.Please use `make ^` where ^ is one of
echo. html to make standalone HTML files
echo. dirhtml to make HTML files named index.html in directories
echo. singlehtml to make a single large HTML file
echo. pickle to make pickle files
echo. json to make JSON files
echo. htmlhelp to make HTML files and a HTML help project
echo. qthelp to make HTML files and a qthelp project
echo. devhelp to make HTML files and a Devhelp project
echo. epub to make an epub
echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
echo. text to make text files
echo. man to make manual pages
echo. texinfo to make Texinfo files
echo. gettext to make PO message catalogs
echo. changes to make an overview over all changed/added/deprecated items
echo. xml to make Docutils-native XML files
echo. pseudoxml to make pseudoxml-XML files for display purposes
echo. linkcheck to check all external links for integrity
echo. doctest to run all doctests embedded in the documentation if enabled
goto end
)
if "%1" == "clean" (
for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
del /q /s %BUILDDIR%\*
goto end
)
%SPHINXBUILD% 2> nul
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
if "%1" == "html" (
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/html.
goto end
)
if "%1" == "dirhtml" (
%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
goto end
)
if "%1" == "singlehtml" (
%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
goto end
)
if "%1" == "pickle" (
%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can process the pickle files.
goto end
)
if "%1" == "json" (
%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can process the JSON files.
goto end
)
if "%1" == "htmlhelp" (
%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can run HTML Help Workshop with the ^
.hhp project file in %BUILDDIR%/htmlhelp.
goto end
)
if "%1" == "qthelp" (
%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can run "qcollectiongenerator" with the ^
.qhcp project file in %BUILDDIR%/qthelp, like this:
echo.^> qcollectiongenerator %BUILDDIR%\qthelp\pep257.qhcp
echo.To view the help file:
echo.^> assistant -collectionFile %BUILDDIR%\qthelp\pep257.ghc
goto end
)
if "%1" == "devhelp" (
%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished.
goto end
)
if "%1" == "epub" (
%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The epub file is in %BUILDDIR%/epub.
goto end
)
if "%1" == "latex" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
if errorlevel 1 exit /b 1
echo.
echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "latexpdf" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
cd %BUILDDIR%/latex
make all-pdf
cd %BUILDDIR%/..
echo.
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "latexpdfja" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
cd %BUILDDIR%/latex
make all-pdf-ja
cd %BUILDDIR%/..
echo.
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "text" (
%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The text files are in %BUILDDIR%/text.
goto end
)
if "%1" == "man" (
%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The manual pages are in %BUILDDIR%/man.
goto end
)
if "%1" == "texinfo" (
%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
goto end
)
if "%1" == "gettext" (
%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
goto end
)
if "%1" == "changes" (
%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
if errorlevel 1 exit /b 1
echo.
echo.The overview file is in %BUILDDIR%/changes.
goto end
)
if "%1" == "linkcheck" (
%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
if errorlevel 1 exit /b 1
echo.
echo.Link check complete; look for any errors in the above output ^
or in %BUILDDIR%/linkcheck/output.txt.
goto end
)
if "%1" == "doctest" (
%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
if errorlevel 1 exit /b 1
echo.
echo.Testing of doctests in the sources finished, look at the ^
results in %BUILDDIR%/doctest/output.txt.
goto end
)
if "%1" == "xml" (
%SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The XML files are in %BUILDDIR%/xml.
goto end
)
if "%1" == "pseudoxml" (
%SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
goto end
)
:end
pydocstyle-6.3.0/docs/quickstart.rst 0000664 0000000 0000000 00000000774 14361602300 0017576 0 ustar 00root root 0000000 0000000 Quick Start
===========
1. Install
.. code::
pip install pydocstyle
2. Run
.. code::
$ pydocstyle test.py
test.py:18 in private nested class `meta`:
D101: Docstring missing
test.py:27 in public function `get_user`:
D300: Use """triple double quotes""" (found '''-quotes)
test:75 in public function `init_database`:
D201: No blank lines allowed before function docstring (found 1)
...
3. Fix your code :)
pydocstyle-6.3.0/docs/release_notes.rst 0000664 0000000 0000000 00000037030 14361602300 0020227 0 ustar 00root root 0000000 0000000 Release Notes
=============
**pydocstyle** version numbers follow the
`Semantic Versioning `_ specification.
6.3.0 - January 17th, 2023
--------------------------
New Features
* Add `ignore-self-only-init` config (#560).
Bug Fixes
* Revert - Obey match rules in pre-commit usage (#610).
6.2.3 - January 8th, 2023
---------------------------
Bug Fixes
* Fix decorator parsing for async function. Resolves some false positives
with async functions and ``overload``. (#577)
* Obey match rules in pre-commit usage (#610).
6.2.2 - January 3rd, 2023
---------------------------
Bug Fixes
* Fix false positives of D417 in google convention docstrings (#619).
6.2.1 - January 3rd, 2023
---------------------------
Bug Fixes
* Use tomllib/tomli to correctly read .toml files (#599, #600).
6.2.0 - January 2nd, 2023
---------------------------
New Features
* Allow for hanging indent when documenting args in Google style. (#449)
* Add support for `property_decorators` config to ignore D401.
* Add support for Python 3.10 (#554).
* Replace D10X errors with D419 if docstring exists but is empty (#559).
Bug Fixes
* Fix ``--match`` option to only consider filename when matching full paths (#550).
6.1.1 - May 17th, 2021
---------------------------
Bug Fixes
* Split ``--source`` by lines instead of by characters (#536).
6.1.0 - May 17th, 2021
---------------------------
New Features
* Enable full toml configuration and pyproject.toml (#534).
6.0.0 - March 18th, 2021
---------------------------
Major Updates
* Support for Python 3.5 has been dropped (#510).
New Features
* Add flag to disable `# noqa` comment processing in API (#485).
* Methods, Functions and Nested functions that have a docstring now throw D418 (#511).
* Methods decorated with @overload no longer reported as D102 (#511).
* Functions and nested functions decorated with @overload no longer reported as D103 (#511).
Bug Fixes
* Treat "package" as an imperative verb for D401 (#356).
* Fix the parsing of decorated one line functions (#499).
5.1.2 - September 13th, 2020
----------------------------
New Features
* Methods, Functions and Nested functions that have a docstring now throw D418 (#511).
* Methods decorated with @overload no longer reported as D102.
* Functions and nested functions decorated with @overload no longer reported as D103.
5.1.1 - August 29th, 2020
---------------------------
Bug Fixes
* Fix ``IndexError`` crash on one-line backslashed docstrings (#506).
5.1.0 - August 22nd, 2020
---------------------------
New Features
* Skip function arguments prefixed with `_` in D417 check (#440).
Bug Fixes
* Update convention support documentation (#386, #393)
* Detect inner asynchronous functions for D202 (#467)
* Fix indentation error while parsing class methods (#441).
* Fix a bug in parsing Google-style argument description.
The bug caused some argument names to go unreported in D417 (#448).
* Fixed an issue where skipping errors on module level docstring via #noqa
failed when there where more prior comments (#446).
* Support backslash-continued descriptions in docstrings (#472).
* Correctly detect publicity of modules inside directories (#470, #494).
5.0.2 - January 8th, 2020
---------------------------
Bug Fixes
* Fix ``DeprecationWarning`` / ``SyntaxError`` "invalid escape sequence" with
Python 3.6+ (#445).
5.0.1 - December 9th, 2019
--------------------------
Bug Fixes
* Fixed an issue where AttributeError was raised when parsing the parameter
section of a class docstring (#434, #436).
5.0.0 - December 9th, 2019
--------------------------
Major Updates
* Support for Python 3.4 has been dropped (#402).
New Features
* Extend support for detecting missing arguments in Google style
docstrings to method calls (#384).
* Extend support for detecting missing argument description in Numpy style
docstrings (#407).
* Added support for Python 3.8 (#423).
* Allow skipping errors on module level docstring via #noqa (#427).
* Whitespace is ignored with set options split across multiple lines (#221).
Bug Fixes
* Remove D413 from the google convention (#430).
* Remove D413 from the pep257 convention (#404).
* Replace `semicolon` with `colon` in D416 messages. (#409)
* D301 (Use r""" if any backslashes in a docstring) does not trigger on
backslashes for line continuation or unicode literals ``\u...`` and
``\N...`` anymore. These are considered intended elements of the docstring
and thus should not be escaped by using a raw docstring (#365).
* Fix decorator parsing (#411).
* Google-style sections no longer cause false errors when used with
Numpy-style sections (#388, #424).
* D202: Allow a blank line after function docstring when followed by
declaration of an inner function or class (#395, #426).
* Fix D401 and D404 checks not working for docstrings containing only one word and ending with non-alpha character (#421)
4.0.1 - August 14th, 2019
-------------------------
Bug Fixes
* D401: Fixed a false positive where one stem had multiple imperative forms,
e.g., init and initialize / initiate (#382).
* Fix parser hanging when there's a comment directly after ``__all__``
(#391, #366).
* Fixed RST error in table which resulted in the online documentation missing
the violation code table (#396).
* Fixed IndentationError when parsing function arguments (#392).
4.0.0 - July 6th, 2019
----------------------
Major Updates
* Support for Python 2.x and PyPy has been dropped (#340).
* Added initial support for Google convention (#357).
New Features
* Added pre-commit hook (#346)
Bug Fixes
* Fix parsing tuple syntax ``__all__`` (#355, #352).
3.0.0 - October 14th, 2018
--------------------------
Major Updates
* Support for Python 3.3 has been dropped (#315, #316).
* Added support for Python 3.7 (#324).
New features
* Violations are now reported on the line where the docstring starts, not the
line of the ``def``/``class`` it corresponds to (#238, #83).
* Updated description of pep257 and numpy conventions (#300).
* ``__all__`` parsing is now done on a best-effort basis - if ``__all__`` can't
be statically determined, it will be ignored (#320, #313).
Bug Fixes
* Fixed a false-positive recognition of section names causing D405 to be
reported (#311, #317).
* Fixed a bug where functions that don't end with a newline will sometimes
raise an exception (#321, #336).
2.1.1 - October 9th, 2017
-------------------------
Bug Fixes
* Changed wheel configuration to be NOT universal, as #281 added
``configparser`` as a dependency for Python 2.7.
* Updated usage documentation.
2.1.0 - October 8th, 2017
-------------------------
New Features
* Public nested classes missing a docstring are now reported as D106 instead
of D101 (#198, #261).
* ``__init__`` methods missing a docstring are now reported as D107 instead of
D102 (#273, #277).
* Added support for Python 3.6 (#270).
* Specifying an invalid error code prefix (e.g., ``--select=D9``) will print
a warning message to ``stderr`` (#253, #279).
* Configuration files now support multiple-lined entries (#250, #281).
* Improved description of how error selection works in the help section
(#231, #283).
Bug Fixes
* Fixed an issue where the ``--source`` flag would result in improperly
spaced output (#256, #257, #260).
* Fixed an issue where if a first word in a docstring had Unicode characters
and the docstring was not a unicode string, an exception would be raised
(#258, #264).
* Configuration files that were specified by CLI and don't contain a valid
section name will now issue a warning to ``stderr`` (#276, #280).
* Removed D107 from the numpy convention (#288).
2.0.0 - April 18th, 2017
------------------------
Major Updates
* Support for ``numpy`` conventions verification has been added (#129, #226).
* Support for Python 2.6 has been dropped (#206, #217).
* Support for PyPy3 has been temporarily dropped, until it will be
equivalent to CPython 3.3+ and supported by ``pip`` (#223).
* Support for the ``pep257`` console script has been dropped. Only the
``pydocstyle`` console script should be used (#216, #218).
* Errors are now printed to ``stdout`` instead of ``stderr`` (#201, #210).
New Features
* Decorator-based skipping via ``--ignore-decorators`` has been added (#204).
* Support for using pycodestyle style wildcards has been added (#72, #209).
* Superfluous opening quotes are now reported as part of D300 (#166, #225).
* Fixed a false-positive recognition of `D410` and added `D412` (#230, #233).
* Added ``--config=`` flag to override the normal config file discovery
and choose a specific config file (#117, #247).
* Support for specifying error codes with partial prefix has been added, e.g.,
``--select=D101,D2`` (#72, #209).
* All configuration file can now have the ``.ini`` extension (#237).
* Added better imperative mood checks using third party stemmer (#235, #68).
Bug Fixes
* Made parser more robust to bad source files (#168, #214)
* Modules are now considered private if their name starts with a single
underscore. This is a bugfix where "public module" (D100) was reported
regardless of module name (#199, #222).
* Removed error when ``__all__`` is a list (#62, #227).
* Fixed a bug where the ``@`` sign was used as a matrix multiplication operator
in Python 3.5, but was considered a decorator by the parser (#246, #191).
1.1.1 - October 4th, 2016
-------------------------
Bug Fixes
* Fixed an issue where the ``flake8-docstrings`` failed when accessing some
public API from ``pydocstyle``.
1.1.0 - September 29th, 2016
----------------------------
Major Updates
* ``pydocstyle`` is no longer a single file. This might make it difficult for
some users to just add it to their project, but the project has reached
certain complexity where splitting it into modules was necessary (#200).
New Features
* Added the optional error codes D212 and D213, for checking whether
the summary of a multi-line docstring starts at the first line,
respectively at the second line (#174).
* Added D404 - First word of the docstring should not be "This". It is turned
off by default (#183).
* Added the ability to ignore specific function and method docstrings with
inline comments:
1. "# noqa" skips all checks.
2. "# noqa: D102,D203" can be used to skip specific checks.
Bug Fixes
* Fixed an issue where file paths were printed in lower case (#179, #181).
* The error code D300 is now also being reported if a docstring has
uppercase literals (``R`` or ``U``) as prefix (#176).
* Fixed a bug where an ``__all__`` error was reported when ``__all__`` was
imported from another module with a different name (#182, #187).
* Fixed a bug where ``raise X from Y`` syntax caused ``pydocstyle`` to crash
(#196, #200).
1.0.0 - January 30th, 2016
--------------------------
Major Updates
* The project was renamed to **pydocstyle** and the new release will be 1.0.0!
New Features
* Added support for Python 3.5 (#145).
* Classes nested inside classes are no longer considered private. Nested
classes are considered public if their names are not prepended with an
underscore and if their parent class is public, recursively (#13, #146).
* Added the D403 error code - "First word of the first line should be
properly capitalized". This new error is turned on by default (#164, #165,
#170).
* Added support for ``.pydocstylerc`` and as configuration file name
(#140, #173).
Bug Fixes
* Fixed an issue where a ``NameError`` was raised when parsing complex
definitions of ``__all__`` (#142, #143).
* Fixed a bug where D202 was falsely reported when a function with just a
docstring and no content was followed by a comment (#165).
* Fixed wrong ``__all__`` definition in main module (#150, #156).
* Fixed a bug where an ``AssertionError`` could occur when parsing
``__future__`` imports (#154).
Older Versions
==============
.. note::
Versions documented below are before renaming the project from **pep257**
to **pydocstyle**.
0.7.0 - October 9th, 2015
-------------------------
New Features
* Added the D104 error code - "Missing docstring in public package". This new
error is turned on by default. Missing docstring in ``__init__.py`` files which
previously resulted in D100 errors ("Missing docstring in public module")
will now result in D104 (#105, #127).
* Added the D105 error code - "Missing docstring in magic method'. This new
error is turned on by default. Missing docstrings in magic method which
previously resulted in D102 error ("Missing docstring in public method")
will now result in D105. Note that exceptions to this rule are variadic
magic methods - specifically ``__init__``, ``__call__`` and ``__new__``, which
will be considered non-magic and missing docstrings in them will result
in D102 (#60, #139).
* Support the option to exclude all error codes. Running pep257 with
``--select=`` (or ``select=`` in the configuration file) will exclude all errors
which could then be added one by one using ``add-select``. Useful for projects
new to pep257 (#132, #135).
* Added check D211: No blank lines allowed before class docstring. This change
is a result of a change to the official PEP257 convention. Therefore, D211
will now be checked by default instead of D203, which required a single
blank line before a class docstring (#137).
* Configuration files are now handled correctly. The closer a configuration file
is to a checked file the more it matters.
Configuration files no longer support ``explain``, ``source``, ``debug``,
``verbose`` or ``count`` (#133).
Bug Fixes
* On Python 2.x, D302 ("Use u""" for Unicode docstrings") is not reported
if `unicode_literals` is imported from `__future__` (#113, #134).
* Fixed a bug where there was no executable for `pep257` on Windows (#73,
#136).
0.6.0 - July 20th, 2015
-----------------------
New Features
* Added support for more flexible error selections using ``--ignore``,
``--select``, ``--convention``, ``--add-ignore`` and ``--add-select``
(#96, #123).
Bug Fixes
* Property setter and deleter methods are now treated as private and do not
require docstrings separate from the main property method (#69, #107).
* Fixed an issue where pep257 did not accept docstrings that are both
unicode and raw in Python 2.x (#116, #119).
* Fixed an issue where Python 3.x files with Unicode encodings were
not read correctly (#118).
0.5.0 - March 14th, 2015
------------------------
New Features
* Added check D210: No whitespaces allowed surrounding docstring text (#95).
* Added real documentation rendering using Sphinx (#100, #101).
Bug Fixes
* Removed log level configuration from module level (#98).
* D205 used to check that there was *a* blank line between the one line summary
and the description. It now checks that there is *exactly* one blank line
between them (#79).
* Fixed a bug where ``--match-dir`` was not properly respected (#108, #109).
0.4.1 - January 10th, 2015
--------------------------
Bug Fixes
* Getting ``ImportError`` when trying to run pep257 as the installed script
(#92, #93).
0.4.0 - January 4th, 2015
-------------------------
.. warning::
A fatal bug was discovered in this version (#92). Please use a newer
version.
New Features
* Added configuration file support (#58, #87).
* Added a ``--count`` flag that prints the number of violations found (#86,
#89).
* Added support for Python 3.4, PyPy and PyPy3 (#81).
Bug Fixes
* Fixed broken tests (#74).
* Fixed parsing various colon and parenthesis combinations in definitions
(#82).
* Allow for greater flexibility in parsing ``__all__`` (#67).
* Fixed handling of one-liner definitions (#77).
0.3.2 - March 11th, 2014
------------------------
First documented release!
pydocstyle-6.3.0/docs/snippets/ 0000775 0000000 0000000 00000000000 14361602300 0016507 5 ustar 00root root 0000000 0000000 pydocstyle-6.3.0/docs/snippets/cli.rst 0000664 0000000 0000000 00000010612 14361602300 0020010 0 ustar 00root root 0000000 0000000 .. _cli_usage:
Usage
^^^^^
.. code::
Usage: pydocstyle [options] [...]
Options:
--version show program's version number and exit
-h, --help show this help message and exit
-e, --explain show explanation of each error
-s, --source show source for each error
-d, --debug print debug information
-v, --verbose print status information
--count print total number of errors to stdout
--config= use given config file and disable config discovery
--match= check only files that exactly match regular
expression; default is --match='(?!test_).*\.py' which
matches files that don't start with 'test_' but end
with '.py'
--match-dir=
search only dirs that exactly match regular
expression; default is --match-dir='[^\.].*', which
matches all dirs that don't start with a dot
--ignore-decorators=
ignore any functions or methods that are decorated by
a function with a name fitting the
regular expression; default is --ignore-decorators=''
which does not ignore any decorated functions.
Note:
When using --match, --match-dir or --ignore-decorators consider
whether you should use a single quote (') or a double quote ("),
depending on your OS, Shell, etc.
Error Check Options:
Only one of --select, --ignore or --convention can be specified. If
none is specified, defaults to `--convention=pep257`. These three
options select the "basic list" of error codes to check. If you wish
to change that list (for example, if you selected a known convention
but wish to ignore a specific error from it or add a new one) you can
use `--add-[ignore/select]` in order to do so.
--select= choose the basic list of checked errors by specifying
which errors to check for (with a list of comma-
separated error codes or prefixes). for example:
--select=D101,D2
--ignore= choose the basic list of checked errors by specifying
which errors to ignore out of all of the available
error codes (with a list of comma-separated error
codes or prefixes). for example: --ignore=D101,D2
--convention=
choose the basic list of checked errors by specifying
an existing convention. Possible conventions: pep257,
numpy, google.
--add-select=
add extra error codes to check to the basic list of
errors previously set by --select, --ignore or
--convention.
--add-ignore=
ignore extra error codes by removing them from the
basic list previously set by --select, --ignore or
--convention.
.. note::
When using any of the ``--select``, ``--ignore``, ``--add-select``, or
``--add-ignore`` command line flags, it is possible to pass a prefix for an
error code. It will be expanded so that any code beginning with that prefix
will match. For example, running the command ``pydocstyle --ignore=D4``
will ignore all docstring content issues as their error codes beginning with
"D4" (i.e. D400, D401, D402, D403, and D404).
Return Code
^^^^^^^^^^^
+--------------+--------------------------------------------------------------+
| 0 | Success - no violations |
+--------------+--------------------------------------------------------------+
| 1 | Some code violations were found |
+--------------+--------------------------------------------------------------+
| 2 | Illegal usage - see error message |
+--------------+--------------------------------------------------------------+
pydocstyle-6.3.0/docs/snippets/config.rst 0000664 0000000 0000000 00000005300 14361602300 0020504 0 ustar 00root root 0000000 0000000 ``pydocstyle`` supports *ini*-like and *toml* configuration files.
In order for ``pydocstyle`` to use a configuration file automatically, it must
be named one of the following options.
* ``setup.cfg``
* ``tox.ini``
* ``.pydocstyle``
* ``.pydocstyle.ini``
* ``.pydocstylerc``
* ``.pydocstylerc.ini``
* ``pyproject.toml``
When searching for a configuration file, ``pydocstyle`` looks for one of the
file specified above *in that exact order*. *ini*-like configuration files must
have a ``[pydocstyle]`` section while *toml* configuration files must have a
``[tool.pydocstyle]`` section. If a configuration file was not found,
``pydocstyle`` keeps looking for one up the directory tree until one is found
or uses the default configuration.
.. note::
*toml* configuration file support is only enabled if the ``toml`` python
package is installed. You can ensure that this is the case by installing
the ``pydocstyle[toml]`` optional dependency.
.. note::
For backwards compatibility purposes, **pydocstyle** supports configuration
files named ``.pep257``, as well as section header ``[pep257]``. However,
these are considered deprecated and support will be removed in the next
major version.
Available Options
#################
Not all configuration options are available in the configuration files.
Available options are:
* ``convention``
* ``select``
* ``ignore``
* ``add_select``
* ``add_ignore``
* ``match``
* ``match_dir``
* ``ignore_decorators``
* ``property_decorators``
* ``ignore_self_only_init``
See the :ref:`cli_usage` section for more information.
Inheritance
###########
By default, when finding a configuration file, ``pydocstyle`` tries to inherit
the parent directory's configuration and merge them to the local ones.
The merge process is as follows:
* If one of ``select``, ``ignore`` or ``convention`` was specified in the child
configuration - Ignores the parent configuration and set the new error codes
to check. Otherwise, simply copies the parent checked error codes.
* If ``add-ignore`` or ``add-select`` were specified, adds or removes the
specified error codes from the checked error codes list.
* If ``match`` or ``match-dir`` were specified - use them. Otherwise, use the
parent's.
In order to disable this (useful for configuration files located in your repo's
root), simply add ``inherit=false`` to your configuration file.
.. note::
If any of ``select``, ``ignore`` or ``convention`` were specified in
the CLI, the configuration files will take no part in choosing which error
codes will be checked. ``match`` and ``match-dir`` will still take effect.
Example
#######
.. code::
[pydocstyle]
inherit = false
ignore = D100,D203,D405
match = .*\.py
pydocstyle-6.3.0/docs/snippets/in_file.rst 0000664 0000000 0000000 00000001106 14361602300 0020644 0 ustar 00root root 0000000 0000000 ``pydocstyle`` supports inline commenting to skip specific checks on
specific functions or methods. The supported comments that can be added are:
1. ``"# noqa"`` skips all checks.
2. ``"# noqa: D102,D203"`` can be used to skip specific checks. Note that
this is compatible with skips from `flake8 `_,
e.g. ``# noqa: D102,E501,D203``.
For example, this will skip the check for a period at the end of a function
docstring::
>>> def bad_function(): # noqa: D400
... """Omit a period in the docstring as an exception"""
... pass
pydocstyle-6.3.0/docs/snippets/install.rst 0000664 0000000 0000000 00000000272 14361602300 0020710 0 ustar 00root root 0000000 0000000 Use `pip `_ or easy_install:
.. code::
pip install pydocstyle
Alternatively, you can use ``pydocstyle.py`` source file
directly - it is self-contained.
pydocstyle-6.3.0/docs/snippets/pre_commit.rst 0000664 0000000 0000000 00000001635 14361602300 0021404 0 ustar 00root root 0000000 0000000 **pydocstyle** can be included as a hook for `pre-commit`_. The easiest way to get
started is to add this configuration to your ``.pre-commit-config.yaml``:
.. parsed-literal::
- repo: https://github.com/pycqa/pydocstyle
rev: \ |version| \ # pick a git hash / tag to point to
hooks:
- id: pydocstyle
See the `pre-commit docs`_ for how to customize this configuration.
Checked-in python files will be passed as positional arguments so no need to use ``--match=*.py``.
You can also use command line arguments instead of configuration files
to achieve the same effect with less files.
.. code-block:: yaml
- id: pydocstyle
args:
- --ignore=D100,D203,D405
# or multiline
- |-
--select=
D101,
D2
.. _pre-commit:
https://pre-commit.com/
.. _pre-commit docs:
https://pre-commit.com/#pre-commit-configyaml---hooks
pydocstyle-6.3.0/docs/snippets/publicity.rst 0000664 0000000 0000000 00000005065 14361602300 0021253 0 ustar 00root root 0000000 0000000 The D1xx group of errors deals with missing docstring in public constructs:
modules, classes, methods, etc. It is important to note how publicity is
determined and what its effects are.
How publicity is determined
^^^^^^^^^^^^^^^^^^^^^^^^^^^
Publicity for all constructs is determined as follows: a construct is
considered *public* if -
1. Its immediate parent is public *and*
2. Its name does *not* start with a single or double underscore.
a. Note, names that start and end with a double underscore are *public* (e.g. ``__init__.py``).
A construct's immediate parent is the construct that contains it. For example,
a method's parent is a class object. A class' parent is usually a module, but
might also be a function, method, etc. A module can either have no parent, or
it can have a parent that is a package.
In order for a construct to be considered public, its immediate parent must
also be public. Since this definition is recursive, it means that *all* of its
parents need to be public. The corollary is that if a construct is considered
private, then all of its descendants are also considered private. For example,
a class called ``_Foo`` is considered private. A method ``bar`` in ``_Foo`` is
also considered private since its parent is a private class, even though its
name does not begin with a single underscore.
Note, a module's parent is recursively checked upward until we reach a directory
in ``sys.path`` to avoid considering the complete filepath of a module.
For example, consider the module ``/_foo/bar/baz.py``.
If ``PYTHONPATH`` is set to ``/``, then ``baz.py`` is *private*.
If ``PYTHONPATH`` is set to ``/_foo/``, then ``baz.py`` is *public*.
Modules are parsed to look if ``__all__`` is defined. If so, only those top
level constructs are considered public. The parser looks for ``__all__``
defined as a literal list or tuple. As the parser doesn't execute the module,
any mutation of ``__all__`` will not be considered.
How publicity affects error reports
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The immediate effect of a construct being determined as private is that no
D1xx errors will be reported for it (or its children, as the previous section
explains). A private method, for instance, will not generate a D102 error, even
if it has no docstring.
However, it is important to note that while docstring are optional for private
construct, they are still required to adhere to your style guide. So if a
private module `_foo.py` does not have a docstring, it will not generate a
D100 error, but if it *does* have a docstring, that docstring might generate
other errors.
pydocstyle-6.3.0/docs/usage.rst 0000664 0000000 0000000 00000000677 14361602300 0016512 0 ustar 00root root 0000000 0000000 Usage
=====
Installation
------------
.. include:: snippets/install.rst
Command Line Interface
----------------------
.. include:: snippets/cli.rst
Configuration Files
^^^^^^^^^^^^^^^^^^^
.. include:: snippets/config.rst
In-file configuration
^^^^^^^^^^^^^^^^^^^^^
.. include:: snippets/in_file.rst
Usage with the `pre-commit`_ git hooks framework
------------------------------------------------
.. include:: snippets/pre_commit.rst pydocstyle-6.3.0/poetry.lock 0000664 0000000 0000000 00000006306 14361602300 0016113 0 ustar 00root root 0000000 0000000 # This file is automatically @generated by Poetry and should not be changed by hand.
[[package]]
name = "importlib-metadata"
version = "4.8.3"
description = "Read metadata from Python packages"
category = "main"
optional = false
python-versions = ">=3.6"
files = [
{file = "importlib_metadata-4.8.3-py3-none-any.whl", hash = "sha256:65a9576a5b2d58ca44d133c42a241905cc45e34d2c06fd5ba2bafa221e5d7b5e"},
{file = "importlib_metadata-4.8.3.tar.gz", hash = "sha256:766abffff765960fcc18003801f7044eb6755ffae4521c8e8ce8e83b9c9b0668"},
]
[package.dependencies]
typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""}
zipp = ">=0.5"
[package.extras]
docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"]
perf = ["ipython"]
testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pep517", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy", "pytest-perf (>=0.9.2)"]
[[package]]
name = "snowballstemmer"
version = "2.2.0"
description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms."
category = "main"
optional = false
python-versions = "*"
files = [
{file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"},
{file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"},
]
[[package]]
name = "tomli"
version = "1.2.3"
description = "A lil' TOML parser"
category = "main"
optional = true
python-versions = ">=3.6"
files = [
{file = "tomli-1.2.3-py3-none-any.whl", hash = "sha256:e3069e4be3ead9668e21cb9b074cd948f7b3113fd9c8bba083f48247aab8b11c"},
{file = "tomli-1.2.3.tar.gz", hash = "sha256:05b6166bff487dc068d322585c7ea4ef78deed501cc124060e0f238e89a9231f"},
]
[[package]]
name = "typing-extensions"
version = "4.1.1"
description = "Backported and Experimental Type Hints for Python 3.6+"
category = "main"
optional = false
python-versions = ">=3.6"
files = [
{file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"},
{file = "typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"},
]
[[package]]
name = "zipp"
version = "3.6.0"
description = "Backport of pathlib-compatible object wrapper for zip files"
category = "main"
optional = false
python-versions = ">=3.6"
files = [
{file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"},
{file = "zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"},
]
[package.extras]
docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"]
testing = ["func-timeout", "jaraco.itertools", "pytest (>=4.6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"]
[extras]
toml = ["tomli"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.6"
content-hash = "1abb1b7c1fa0c27846501ad1b5d7916eb5ec6e7961eab46ced6887d16428977a"
pydocstyle-6.3.0/pyproject.toml 0000664 0000000 0000000 00000002632 14361602300 0016631 0 ustar 00root root 0000000 0000000 [tool.poetry]
name = "pydocstyle"
version = "0.0.0-dev"
description = "Python docstring style checker"
authors = ["Amir Rachum ", "Sambhav Kothari =1.2.3", optional = true, python = "<3.11"}
importlib-metadata = {version = ">=2.0.0,<5.0.0", python = "<3.8"}
[tool.poetry.extras]
toml = ["tomli"]
[tool.poetry.scripts]
pydocstyle = "pydocstyle.cli:main"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
[tool.black]
line-length = 79
target-version = ['py36']
skip-string-normalization = true
[tool.isort]
profile = "black"
src_paths = ["src/pydocstyle"]
line_length = 79
[tool.mypy]
ignore_missing_imports = true
strict_optional = true
disallow_incomplete_defs = true
[tool.pytest.ini_options]
norecursedirs = ["docs", ".tox"]
addopts = """
-vv
-rw
--cache-clear
"""
pydocstyle-6.3.0/requirements.txt 0000664 0000000 0000000 00000000154 14361602300 0017176 0 ustar 00root root 0000000 0000000 -r requirements/docs.txt
-r requirements/tests.txt
-r requirements/test_env.txt
-r requirements/runtime.txt
pydocstyle-6.3.0/requirements/ 0000775 0000000 0000000 00000000000 14361602300 0016435 5 ustar 00root root 0000000 0000000 pydocstyle-6.3.0/requirements/docs.txt 0000664 0000000 0000000 00000000116 14361602300 0020124 0 ustar 00root root 0000000 0000000 Jinja2
sphinx
sphinx_rtd_theme
# adding . so that pydocstyle gets installed
.
pydocstyle-6.3.0/requirements/runtime.txt 0000664 0000000 0000000 00000000166 14361602300 0020664 0 ustar 00root root 0000000 0000000 snowballstemmer>=1.2.1
tomli>=1.2.3; python_version < "3.11"
importlib-metadata<5.0.0,>=2.0.0; python_version < "3.8"
pydocstyle-6.3.0/requirements/test_env.txt 0000664 0000000 0000000 00000000003 14361602300 0021016 0 ustar 00root root 0000000 0000000 tox pydocstyle-6.3.0/requirements/tests.txt 0000664 0000000 0000000 00000000104 14361602300 0020333 0 ustar 00root root 0000000 0000000 pytest==6.2.5
mypy==0.930
black==22.3
isort==5.4.2
types-setuptools
pydocstyle-6.3.0/src/ 0000775 0000000 0000000 00000000000 14361602300 0014501 5 ustar 00root root 0000000 0000000 pydocstyle-6.3.0/src/__init__.py 0000664 0000000 0000000 00000000000 14361602300 0016600 0 ustar 00root root 0000000 0000000 pydocstyle-6.3.0/src/pydocstyle/ 0000775 0000000 0000000 00000000000 14361602300 0016700 5 ustar 00root root 0000000 0000000 pydocstyle-6.3.0/src/pydocstyle/__init__.py 0000664 0000000 0000000 00000000302 14361602300 0021004 0 ustar 00root root 0000000 0000000 from ._version import __version__
# Temporary hotfix for flake8-docstrings
from .checker import ConventionChecker, check
from .parser import AllError
from .violations import Error, conventions
pydocstyle-6.3.0/src/pydocstyle/__main__.py 0000664 0000000 0000000 00000000440 14361602300 0020770 0 ustar 00root root 0000000 0000000 #! /usr/bin/env python
"""Static analysis tool for checking docstring conventions and style.
The repository is located at:
http://github.com/PyCQA/pydocstyle
"""
__all__ = ()
def main() -> None:
from pydocstyle import cli
cli.main()
if __name__ == '__main__':
main()
pydocstyle-6.3.0/src/pydocstyle/_version.py 0000664 0000000 0000000 00000000620 14361602300 0021074 0 ustar 00root root 0000000 0000000 import sys
if sys.version_info[:2] >= (3, 8):
from importlib import metadata
else:
import importlib_metadata as metadata # pragma: no cover
# Used to automatically set version number from github actions
# as well as not break when being tested locally
try:
__version__ = metadata.version(__package__)
except metadata.PackageNotFoundError: # pragma: no cover
__version__ = "0.0.0"
pydocstyle-6.3.0/src/pydocstyle/checker.py 0000664 0000000 0000000 00000127524 14361602300 0020671 0 ustar 00root root 0000000 0000000 """Parsed source code checkers for docstring violations."""
import ast
import string
import tokenize as tk
from collections import namedtuple
from itertools import chain, takewhile
from re import compile as re
from textwrap import dedent
from . import violations
from .config import IllegalConfiguration
from .parser import (
AllError,
Class,
Definition,
Function,
Method,
Module,
NestedClass,
NestedFunction,
Package,
ParseError,
Parser,
StringIO,
)
from .utils import (
common_prefix_length,
is_blank,
log,
pairwise,
strip_non_alphanumeric,
)
from .wordlists import IMPERATIVE_BLACKLIST, IMPERATIVE_VERBS, stem
__all__ = ('check',)
def check_for(kind, terminal=False):
def decorator(f):
f._check_for = kind
f._terminal = terminal
return f
return decorator
class ConventionChecker:
"""Checker for PEP 257, NumPy and Google conventions.
D10x: Missing docstrings
D20x: Whitespace issues
D30x: Docstring formatting
D40x: Docstring content issues
"""
NUMPY_SECTION_NAMES = (
'Short Summary',
'Extended Summary',
'Parameters',
'Returns',
'Yields',
'Other Parameters',
'Raises',
'See Also',
'Notes',
'References',
'Examples',
'Attributes',
'Methods',
)
GOOGLE_SECTION_NAMES = (
'Args',
'Arguments',
'Attention',
'Attributes',
'Caution',
'Danger',
'Error',
'Example',
'Examples',
'Hint',
'Important',
'Keyword Args',
'Keyword Arguments',
'Methods',
'Note',
'Notes',
'Return',
'Returns',
'Raises',
'References',
'See Also',
'Tip',
'Todo',
'Warning',
'Warnings',
'Warns',
'Yield',
'Yields',
)
# Examples that will be matched -
# " random: Test" where random will be captured as the param
# " random : test" where random will be captured as the param
# " random_t (Test) : test " where random_t will be captured as the param
# Matches anything that fulfills all the following conditions:
GOOGLE_ARGS_REGEX = re(
# Begins with 0 or more whitespace characters
r"^\s*"
# Followed by 1 or more unicode chars, numbers or underscores
# The above is captured as the first group as this is the paramater name.
r"(\w+)"
# Followed by 0 or more whitespace characters
r"\s*"
# Matches patterns contained within round brackets.
# The `.*?`matches any sequence of characters in a non-greedy
# way (denoted by the `*?`)
r"(\(.*?\))?"
# Followed by 0 or more whitespace chars
r"\s*"
# Followed by a colon
r":"
# Might have a new line and leading whitespace
r"\n?\s*"
# Followed by 1 or more characters - which is the docstring for the parameter
".+"
)
def check_source(
self,
source,
filename,
ignore_decorators=None,
property_decorators=None,
ignore_inline_noqa=False,
ignore_self_only_init=False,
):
self.property_decorators = (
{} if property_decorators is None else property_decorators
)
self.ignore_self_only_init = ignore_self_only_init
module = parse(StringIO(source), filename)
for definition in module:
for this_check in self.checks:
terminate = False
if isinstance(definition, this_check._check_for):
skipping_all = definition.skipped_error_codes == 'all'
decorator_skip = ignore_decorators is not None and any(
len(ignore_decorators.findall(dec.name)) > 0
for dec in definition.decorators
)
if (
ignore_inline_noqa or not skipping_all
) and not decorator_skip:
error = this_check(
self, definition, definition.docstring
)
else:
error = None
errors = error if hasattr(error, '__iter__') else [error]
for error in errors:
if error is not None and (
ignore_inline_noqa
or error.code not in definition.skipped_error_codes
):
partition = this_check.__doc__.partition('.\n')
message, _, explanation = partition
error.set_context(
explanation=explanation, definition=definition
)
yield error
if this_check._terminal:
terminate = True
break
if terminate:
break
@property
def checks(self):
all = [
this_check
for this_check in vars(type(self)).values()
if hasattr(this_check, '_check_for')
]
return sorted(all, key=lambda this_check: not this_check._terminal)
@check_for(Definition, terminal=True)
def check_docstring_missing(self, definition, docstring):
"""D10{0,1,2,3}: Public definitions should have docstrings.
All modules should normally have docstrings. [...] all functions and
classes exported by a module should also have docstrings. Public
methods (including the __init__ constructor) should also have
docstrings.
Note: Public (exported) definitions are either those with names listed
in __all__ variable (if present), or those that do not start
with a single underscore.
"""
def method_violation():
if definition.is_magic:
return violations.D105()
if definition.is_init:
if (
self.ignore_self_only_init
and len(definition.param_names) == 1
):
return None
return violations.D107()
if not definition.is_overload:
return violations.D102()
return None
if not docstring and definition.is_public:
codes = {
Module: violations.D100,
Class: violations.D101,
NestedClass: violations.D106,
Method: method_violation,
NestedFunction: violations.D103,
Function: (
lambda: violations.D103()
if not definition.is_overload
else None
),
Package: violations.D104,
}
return codes[type(definition)]()
@check_for(Definition, terminal=True)
def check_docstring_empty(self, definition, docstring):
"""D419: Docstring is empty.
If the user provided a docstring but it was empty, it is like they never provided one.
NOTE: This used to report as D10X errors.
"""
if docstring and is_blank(ast.literal_eval(docstring)):
return violations.D419()
@check_for(Definition)
def check_one_liners(self, definition, docstring):
"""D200: One-liner docstrings should fit on one line with quotes.
The closing quotes are on the same line as the opening quotes.
This looks better for one-liners.
"""
if docstring:
lines = ast.literal_eval(docstring).split('\n')
if len(lines) > 1:
non_empty_lines = sum(1 for l in lines if not is_blank(l))
if non_empty_lines == 1:
return violations.D200(len(lines))
@check_for(Function)
def check_no_blank_before(self, function, docstring): # def
"""D20{1,2}: No blank lines allowed around function/method docstring.
There's no blank line either before or after the docstring unless directly
followed by an inner function or class.
"""
if docstring:
before, _, after = function.source.partition(docstring)
blanks_before = list(map(is_blank, before.split('\n')[:-1]))
blanks_after = list(map(is_blank, after.split('\n')[1:]))
blanks_before_count = sum(takewhile(bool, reversed(blanks_before)))
blanks_after_count = sum(takewhile(bool, blanks_after))
if blanks_before_count != 0:
yield violations.D201(blanks_before_count)
if not all(blanks_after) and blanks_after_count != 0:
# Report a D202 violation if the docstring is followed by a blank line
# and the blank line is not itself followed by an inner function or
# class.
if not (
blanks_after_count == 1
and re(r"\s+(?:(?:class|def|async def)\s|@)").match(after)
):
yield violations.D202(blanks_after_count)
@check_for(Class)
def check_blank_before_after_class(self, class_, docstring):
"""D20{3,4}: Class docstring should have 1 blank line around them.
Insert a blank line before and after all docstrings (one-line or
multi-line) that document a class -- generally speaking, the class's
methods are separated from each other by a single blank line, and the
docstring needs to be offset from the first method by a blank line;
for symmetry, put a blank line between the class header and the
docstring.
"""
# NOTE: this gives false-positive in this case
# class Foo:
#
# """Docstring."""
#
#
# # comment here
# def foo(): pass
if docstring:
before, _, after = class_.source.partition(docstring)
blanks_before = list(map(is_blank, before.split('\n')[:-1]))
blanks_after = list(map(is_blank, after.split('\n')[1:]))
blanks_before_count = sum(takewhile(bool, reversed(blanks_before)))
blanks_after_count = sum(takewhile(bool, blanks_after))
if blanks_before_count != 0:
yield violations.D211(blanks_before_count)
if blanks_before_count != 1:
yield violations.D203(blanks_before_count)
if not all(blanks_after) and blanks_after_count != 1:
yield violations.D204(blanks_after_count)
@check_for(Definition)
def check_blank_after_summary(self, definition, docstring):
"""D205: Put one blank line between summary line and description.
Multi-line docstrings consist of a summary line just like a one-line
docstring, followed by a blank line, followed by a more elaborate
description. The summary line may be used by automatic indexing tools;
it is important that it fits on one line and is separated from the
rest of the docstring by a blank line.
"""
if docstring:
lines = ast.literal_eval(docstring).strip().split('\n')
if len(lines) > 1:
post_summary_blanks = list(map(is_blank, lines[1:]))
blanks_count = sum(takewhile(bool, post_summary_blanks))
if blanks_count != 1:
return violations.D205(blanks_count)
@staticmethod
def _get_docstring_indent(definition, docstring):
"""Return the indentation of the docstring's opening quotes."""
before_docstring, _, _ = definition.source.partition(docstring)
_, _, indent = before_docstring.rpartition('\n')
return indent
@check_for(Definition)
def check_indent(self, definition, docstring):
"""D20{6,7,8}: The entire docstring should be indented same as code.
The entire docstring is indented the same as the quotes at its
first line.
"""
if docstring:
indent = self._get_docstring_indent(definition, docstring)
lines = docstring.split('\n')
if len(lines) > 1:
# First line and line continuations need no indent.
lines = [
line
for i, line in enumerate(lines)
if i and not lines[i - 1].endswith('\\')
]
indents = [leading_space(l) for l in lines if not is_blank(l)]
if set(' \t') == set(''.join(indents) + indent):
yield violations.D206()
if (len(indents) > 1 and min(indents[:-1]) > indent) or (
len(indents) > 0 and indents[-1] > indent
):
yield violations.D208()
if len(indents) > 0 and min(indents) < indent:
yield violations.D207()
@check_for(Definition)
def check_newline_after_last_paragraph(self, definition, docstring):
"""D209: Put multi-line docstring closing quotes on separate line.
Unless the entire docstring fits on a line, place the closing
quotes on a line by themselves.
"""
if docstring:
lines = [
l
for l in ast.literal_eval(docstring).split('\n')
if not is_blank(l)
]
if len(lines) > 1:
if docstring.split("\n")[-1].strip() not in ['"""', "'''"]:
return violations.D209()
@check_for(Definition)
def check_surrounding_whitespaces(self, definition, docstring):
"""D210: No whitespaces allowed surrounding docstring text."""
if docstring:
lines = ast.literal_eval(docstring).split('\n')
if (
lines[0].startswith(' ')
or len(lines) == 1
and lines[0].endswith(' ')
):
return violations.D210()
@check_for(Definition)
def check_multi_line_summary_start(self, definition, docstring):
"""D21{2,3}: Multi-line docstring summary style check.
A multi-line docstring summary should start either at the first,
or separately at the second line of a docstring.
"""
if docstring:
start_triple = [
'"""',
"'''",
'u"""',
"u'''",
'r"""',
"r'''",
'ur"""',
"ur'''",
]
lines = ast.literal_eval(docstring).split('\n')
if len(lines) > 1:
first = docstring.split("\n")[0].strip().lower()
if first in start_triple:
return violations.D212()
else:
return violations.D213()
@check_for(Definition)
def check_triple_double_quotes(self, definition, docstring):
r'''D300: Use """triple double quotes""".
For consistency, always use """triple double quotes""" around
docstrings. Use r"""raw triple double quotes""" if you use any
backslashes in your docstrings. For Unicode docstrings, use
u"""Unicode triple-quoted strings""".
Note: Exception to this is made if the docstring contains
""" quotes in its body.
'''
if docstring:
if '"""' in ast.literal_eval(docstring):
# Allow ''' quotes if docstring contains """, because
# otherwise """ quotes could not be expressed inside
# docstring. Not in PEP 257.
regex = re(r"[uU]?[rR]?'''[^'].*")
else:
regex = re(r'[uU]?[rR]?"""[^"].*')
if not regex.match(docstring):
illegal_matcher = re(r"""[uU]?[rR]?("+|'+).*""")
illegal_quotes = illegal_matcher.match(docstring).group(1)
return violations.D300(illegal_quotes)
@check_for(Definition)
def check_backslashes(self, definition, docstring):
r'''D301: Use r""" if any backslashes in a docstring.
Use r"""raw triple double quotes""" if you use any backslashes
(\) in your docstrings.
Exceptions are backslashes for line-continuation and unicode escape
sequences \N... and \u... These are considered intended unescaped
content in docstrings.
'''
# Just check that docstring is raw, check_triple_double_quotes
# ensures the correct quotes.
if (
docstring
and re(r'\\[^\nuN]').search(docstring)
and not docstring.startswith(('r', 'ur'))
):
return violations.D301()
@staticmethod
def _check_ends_with(docstring, chars, violation):
"""First line ends with one of `chars`.
First line of the docstring should end with one of the characters in `chars`.
`chars` supports either a `str` or an `Iterable[str]`. If the condition is
evaluated to be false, it raises `violation`.
"""
if docstring:
summary_line = ast.literal_eval(docstring).strip().split('\n')[0]
if not summary_line.endswith(chars):
return violation(summary_line[-1])
@check_for(Definition)
def check_ends_with_period(self, definition, docstring):
"""D400: First line should end with a period.
The [first line of a] docstring is a phrase ending in a period.
"""
return self._check_ends_with(docstring, '.', violations.D400)
@check_for(Definition)
def check_ends_with_punctuation(self, definition, docstring):
"""D415: should end with proper punctuation.
The [first line of a] docstring is a phrase ending in a period,
question mark, or exclamation point
"""
return self._check_ends_with(
docstring, ('.', '!', '?'), violations.D415
)
@check_for(Function)
def check_imperative_mood(self, function, docstring): # def context
"""D401: First line should be in imperative mood: 'Do', not 'Does'.
[Docstring] prescribes the function or method's effect as a command:
("Do this", "Return that"), not as a description; e.g. don't write
"Returns the pathname ...".
"""
if (
docstring
and not function.is_test
and not function.is_property(self.property_decorators)
):
stripped = ast.literal_eval(docstring).strip()
if stripped:
first_word = strip_non_alphanumeric(stripped.split()[0])
check_word = first_word.lower()
if check_word in IMPERATIVE_BLACKLIST:
return violations.D401b(first_word)
correct_forms = IMPERATIVE_VERBS.get(stem(check_word))
if correct_forms and check_word not in correct_forms:
best = max(
correct_forms,
key=lambda f: common_prefix_length(check_word, f),
)
return violations.D401(best.capitalize(), first_word)
@check_for(Function)
def check_no_signature(self, function, docstring): # def context
"""D402: First line should not be function's or method's "signature".
The one-line docstring should NOT be a "signature" reiterating the
function/method parameters (which can be obtained by introspection).
"""
if docstring:
first_line = ast.literal_eval(docstring).strip().split('\n')[0]
if function.name + '(' in first_line.replace(' ', ''):
return violations.D402()
@check_for(Function)
def check_capitalized(self, function, docstring):
"""D403: First word of the first line should be properly capitalized.
The [first line of a] docstring is a phrase ending in a period.
"""
if docstring:
first_word = ast.literal_eval(docstring).split()[0]
if first_word == first_word.upper():
return
for char in first_word:
if char not in string.ascii_letters and char != "'":
return
if first_word != first_word.capitalize():
return violations.D403(first_word.capitalize(), first_word)
@check_for(Function)
def check_if_needed(self, function, docstring):
"""D418: Function decorated with @overload shouldn't contain a docstring.
Functions that are decorated with @overload are definitions,
and are for the benefit of the type checker only,
since they will be overwritten by the non-@overload-decorated definition.
"""
if docstring and function.is_overload:
return violations.D418()
@check_for(Definition)
def check_starts_with_this(self, function, docstring):
"""D404: First word of the docstring should not be `This`.
Docstrings should use short, simple language. They should not begin
with "This class is [..]" or "This module contains [..]".
"""
if not docstring:
return
stripped = ast.literal_eval(docstring).strip()
if not stripped:
return
first_word = strip_non_alphanumeric(stripped.split()[0])
if first_word.lower() == 'this':
return violations.D404()
@staticmethod
def _is_docstring_section(context):
"""Check if the suspected context is really a section header.
Lets have a look at the following example docstring:
'''Title.
Some part of the docstring that specifies what the function
returns. <----- Not a real section name. It has a suffix and the
previous line is not empty and does not end with
a punctuation sign.
This is another line in the docstring. It describes stuff,
but we forgot to add a blank line between it and the section name.
Parameters <-- A real section name. The previous line ends with
---------- a period, therefore it is in a new
grammatical context.
param : int
examples : list <------- Not a section - previous line doesn't end
A list of examples. with punctuation.
notes : list <---------- Not a section - there's text after the
A list of notes. colon.
Notes: <--- Suspected as a context because there's a suffix to the
----- section, but it's a colon so it's probably a mistake.
Bla.
'''
To make sure this is really a section we check these conditions:
* There's no suffix to the section name or it's just a colon AND
* The previous line is empty OR it ends with punctuation.
If one of the conditions is true, we will consider the line as
a section name.
"""
section_name_suffix = (
context.line.strip().lstrip(context.section_name.strip()).strip()
)
section_suffix_is_only_colon = section_name_suffix == ':'
punctuation = [',', ';', '.', '-', '\\', '/', ']', '}', ')']
prev_line_ends_with_punctuation = any(
context.previous_line.strip().endswith(x) for x in punctuation
)
this_line_looks_like_a_section_name = (
is_blank(section_name_suffix) or section_suffix_is_only_colon
)
prev_line_looks_like_end_of_paragraph = (
prev_line_ends_with_punctuation or is_blank(context.previous_line)
)
return (
this_line_looks_like_a_section_name
and prev_line_looks_like_end_of_paragraph
)
@classmethod
def _check_blanks_and_section_underline(
cls, section_name, context, indentation
):
"""D4{07,08,09,12,14}, D215: Section underline checks.
Check for correct formatting for docstring sections. Checks that:
* The line that follows the section name contains
dashes (D40{7,8}).
* The amount of dashes is equal to the length of the section
name (D409).
* The section's content does not begin in the line that follows
the section header (D412).
* The section has no content (D414).
* The indentation of the dashed line is equal to the docstring's
indentation (D215).
"""
blank_lines_after_header = 0
for line in context.following_lines:
if not is_blank(line):
break
blank_lines_after_header += 1
else:
# There are only blank lines after the header.
yield violations.D407(section_name)
yield violations.D414(section_name)
return
non_empty_line = context.following_lines[blank_lines_after_header]
dash_line_found = ''.join(set(non_empty_line.strip())) == '-'
if not dash_line_found:
yield violations.D407(section_name)
if blank_lines_after_header > 0:
yield violations.D412(section_name)
else:
if blank_lines_after_header > 0:
yield violations.D408(section_name)
if non_empty_line.strip() != "-" * len(section_name):
yield violations.D409(
len(section_name),
section_name,
len(non_empty_line.strip()),
)
if leading_space(non_empty_line) > indentation:
yield violations.D215(section_name)
line_after_dashes_index = blank_lines_after_header + 1
# If the line index after the dashes is in range (perhaps we have
# a header + underline followed by another section header).
if line_after_dashes_index < len(context.following_lines):
line_after_dashes = context.following_lines[
line_after_dashes_index
]
if is_blank(line_after_dashes):
rest_of_lines = context.following_lines[
line_after_dashes_index:
]
if not is_blank(''.join(rest_of_lines)):
yield violations.D412(section_name)
else:
yield violations.D414(section_name)
else:
yield violations.D414(section_name)
@classmethod
def _check_common_section(
cls, docstring, definition, context, valid_section_names
):
"""D4{05,10,11,13}, D214: Section name checks.
Check for valid section names. Checks that:
* The section name is properly capitalized (D405).
* The section is not over-indented (D214).
* There's a blank line after the section (D410, D413).
* There's a blank line before the section (D411).
Also yields all the errors from `_check_blanks_and_section_underline`.
"""
indentation = cls._get_docstring_indent(definition, docstring)
capitalized_section = context.section_name.title()
if (
context.section_name not in valid_section_names
and capitalized_section in valid_section_names
):
yield violations.D405(capitalized_section, context.section_name)
if leading_space(context.line) > indentation:
yield violations.D214(capitalized_section)
if not context.following_lines or not is_blank(
context.following_lines[-1]
):
if context.is_last_section:
yield violations.D413(capitalized_section)
else:
yield violations.D410(capitalized_section)
if not is_blank(context.previous_line):
yield violations.D411(capitalized_section)
yield from cls._check_blanks_and_section_underline(
capitalized_section, context, indentation
)
@classmethod
def _check_numpy_section(cls, docstring, definition, context):
"""D406: NumPy-style section name checks.
Check for valid section names. Checks that:
* The section name has no superfluous suffix to it (D406).
Additionally, also yield all violations from `_check_common_section`
which are style-agnostic section checks.
"""
indentation = cls._get_docstring_indent(definition, docstring)
capitalized_section = context.section_name.title()
yield from cls._check_common_section(
docstring, definition, context, cls.NUMPY_SECTION_NAMES
)
suffix = context.line.strip().lstrip(context.section_name)
if suffix:
yield violations.D406(capitalized_section, context.line.strip())
if capitalized_section == "Parameters":
yield from cls._check_parameters_section(
docstring, definition, context
)
@staticmethod
def _check_parameters_section(docstring, definition, context):
"""D417: `Parameters` section check for numpy style.
Check for a valid `Parameters` section. Checks that:
* The section documents all function arguments (D417)
except `self` or `cls` if it is a method.
"""
docstring_args = set()
section_level_indent = leading_space(context.line)
# Join line continuations, then resplit by line.
content = (
'\n'.join(context.following_lines).replace('\\\n', '').split('\n')
)
for current_line, next_line in zip(content, content[1:]):
# All parameter definitions in the Numpy parameters
# section must be at the same indent level as the section
# name.
# Also, we ensure that the following line is indented,
# and has some string, to ensure that the parameter actually
# has a description.
# This means, this is a parameter doc with some description
if (
(leading_space(current_line) == section_level_indent)
and (
len(leading_space(next_line))
> len(leading_space(current_line))
)
and next_line.strip()
):
# In case the parameter has type definitions, it
# will have a colon
if ":" in current_line:
parameters, parameter_type = current_line.split(":", 1)
# Else, we simply have the list of parameters defined
# on the current line.
else:
parameters = current_line.strip()
# Numpy allows grouping of multiple parameters of same
# type in the same line. They are comma separated.
parameter_list = parameters.split(",")
for parameter in parameter_list:
docstring_args.add(parameter.strip())
yield from ConventionChecker._check_missing_args(
docstring_args, definition
)
@staticmethod
def _check_args_section(docstring, definition, context):
"""D417: `Args` section checks.
Check for a valid `Args` or `Argument` section. Checks that:
* The section documents all function arguments (D417)
except `self` or `cls` if it is a method.
Documentation for each arg should start at the same indentation
level. For example, in this case x and y are distinguishable::
Args:
x: Lorem ipsum dolor sit amet
y: Ut enim ad minim veniam
In the case below, we only recognize x as a documented parameter
because the rest of the content is indented as if it belongs
to the description for x::
Args:
x: Lorem ipsum dolor sit amet
y: Ut enim ad minim veniam
"""
docstring_args = set()
# normalize leading whitespace
if context.following_lines:
# any lines with shorter indent than the first one should be disregarded
first_line = context.following_lines[0]
leading_whitespaces = first_line[: -len(first_line.lstrip())]
args_content = dedent(
"\n".join(
[
line
for line in context.following_lines
if line.startswith(leading_whitespaces) or line == ""
]
)
).strip()
args_sections = []
for line in args_content.splitlines(keepends=True):
if not line[:1].isspace():
# This line is the start of documentation for the next
# parameter because it doesn't start with any whitespace.
args_sections.append(line)
else:
# This is a continuation of documentation for the last
# parameter because it does start with whitespace.
args_sections[-1] += line
for section in args_sections:
match = ConventionChecker.GOOGLE_ARGS_REGEX.match(section)
if match:
docstring_args.add(match.group(1))
yield from ConventionChecker._check_missing_args(
docstring_args, definition
)
@staticmethod
def _check_missing_args(docstring_args, definition):
"""D417: Yield error for missing arguments in docstring.
Given a list of arguments found in the docstring and the
callable definition, it checks if all the arguments of the
callable are present in the docstring, else it yields a
D417 with a list of missing arguments.
"""
if isinstance(definition, Function):
function_args = get_function_args(definition.source)
# If the method isn't static, then we skip the first
# positional argument as it is `cls` or `self`
if definition.kind == 'method' and not definition.is_static:
function_args = function_args[1:]
# Filtering out any arguments prefixed with `_` marking them
# as private.
function_args = [
arg_name
for arg_name in function_args
if not is_def_arg_private(arg_name)
]
missing_args = set(function_args) - docstring_args
if missing_args:
yield violations.D417(
", ".join(sorted(missing_args)), definition.name
)
@classmethod
def _check_google_section(cls, docstring, definition, context):
"""D416: Google-style section name checks.
Check for valid section names. Checks that:
* The section does not contain any blank line between its name
and content (D412).
* The section is not empty (D414).
* The section name has colon as a suffix (D416).
Additionally, also yield all violations from `_check_common_section`
which are style-agnostic section checks.
"""
capitalized_section = context.section_name.title()
yield from cls._check_common_section(
docstring, definition, context, cls.GOOGLE_SECTION_NAMES
)
suffix = context.line.strip().lstrip(context.section_name)
if suffix != ":":
yield violations.D416(
capitalized_section + ":", context.line.strip()
)
if capitalized_section in ("Args", "Arguments"):
yield from cls._check_args_section(docstring, definition, context)
@staticmethod
def _get_section_contexts(lines, valid_section_names):
"""Generate `SectionContext` objects for valid sections.
Given a list of `valid_section_names`, generate an
`Iterable[SectionContext]` which provides:
* Section Name
* String value of the previous line
* The section line
* Following lines till the next section
* Line index of the beginning of the section in the docstring
* Boolean indicating whether the section is the last section.
for each valid section.
"""
lower_section_names = [s.lower() for s in valid_section_names]
def _suspected_as_section(_line):
result = get_leading_words(_line.lower())
return result in lower_section_names
# Finding our suspects.
suspected_section_indices = [
i for i, line in enumerate(lines) if _suspected_as_section(line)
]
SectionContext = namedtuple(
'SectionContext',
(
'section_name',
'previous_line',
'line',
'following_lines',
'original_index',
'is_last_section',
),
)
# First - create a list of possible contexts. Note that the
# `following_lines` member is until the end of the docstring.
contexts = (
SectionContext(
get_leading_words(lines[i].strip()),
lines[i - 1],
lines[i],
lines[i + 1 :],
i,
False,
)
for i in suspected_section_indices
)
# Now that we have manageable objects - rule out false positives.
contexts = (
c for c in contexts if ConventionChecker._is_docstring_section(c)
)
# Now we shall trim the `following lines` field to only reach the
# next section name.
for a, b in pairwise(contexts, None):
end = -1 if b is None else b.original_index
yield SectionContext(
a.section_name,
a.previous_line,
a.line,
lines[a.original_index + 1 : end],
a.original_index,
b is None,
)
def _check_numpy_sections(self, lines, definition, docstring):
"""NumPy-style docstring sections checks.
Check the general format of a sectioned docstring:
'''This is my one-liner.
Short Summary
-------------
This is my summary.
Returns
-------
None.
'''
Section names appear in `NUMPY_SECTION_NAMES`.
Yields all violation from `_check_numpy_section` for each valid
Numpy-style section.
"""
found_any_numpy_section = False
for ctx in self._get_section_contexts(lines, self.NUMPY_SECTION_NAMES):
found_any_numpy_section = True
yield from self._check_numpy_section(docstring, definition, ctx)
return found_any_numpy_section
def _check_google_sections(self, lines, definition, docstring):
"""Google-style docstring section checks.
Check the general format of a sectioned docstring:
'''This is my one-liner.
Note:
This is my summary.
Returns:
None.
'''
Section names appear in `GOOGLE_SECTION_NAMES`.
Yields all violation from `_check_google_section` for each valid
Google-style section.
"""
for ctx in self._get_section_contexts(
lines, self.GOOGLE_SECTION_NAMES
):
yield from self._check_google_section(docstring, definition, ctx)
@check_for(Definition)
def check_docstring_sections(self, definition, docstring):
"""Check for docstring sections."""
if not docstring:
return
lines = docstring.split("\n")
if len(lines) < 2:
return
found_numpy = yield from self._check_numpy_sections(
lines, definition, docstring
)
if not found_numpy:
yield from self._check_google_sections(
lines, definition, docstring
)
parse = Parser()
def check(
filenames,
select=None,
ignore=None,
ignore_decorators=None,
property_decorators=None,
ignore_inline_noqa=False,
ignore_self_only_init=False,
):
"""Generate docstring errors that exist in `filenames` iterable.
By default, the PEP-257 convention is checked. To specifically define the
set of error codes to check for, supply either `select` or `ignore` (but
not both). In either case, the parameter should be a collection of error
code strings, e.g., {'D100', 'D404'}.
When supplying `select`, only specified error codes will be reported.
When supplying `ignore`, all error codes which were not specified will be
reported.
Note that ignored error code refer to the entire set of possible
error codes, which is larger than just the PEP-257 convention. To your
convenience, you may use `pydocstyle.violations.conventions.pep257` as
a base set to add or remove errors from.
`ignore_inline_noqa` controls if `# noqa` comments are respected or not.
`ignore_self_only_init` controls if D107 is reported on __init__ only containing `self`.
Examples
---------
>>> check(['pydocstyle.py'])
>>> check(['pydocstyle.py'], select=['D100'])
>>> check(['pydocstyle.py'], ignore=conventions.pep257 - {'D100'})
"""
if select is not None and ignore is not None:
raise IllegalConfiguration(
'Cannot pass both select and ignore. '
'They are mutually exclusive.'
)
elif select is not None:
checked_codes = select
elif ignore is not None:
checked_codes = list(
set(violations.ErrorRegistry.get_error_codes()) - set(ignore)
)
else:
checked_codes = violations.conventions.pep257
for filename in filenames:
log.info('Checking file %s.', filename)
try:
with tk.open(filename) as file:
source = file.read()
for error in ConventionChecker().check_source(
source,
filename,
ignore_decorators,
property_decorators,
ignore_inline_noqa,
ignore_self_only_init,
):
code = getattr(error, 'code', None)
if code in checked_codes:
yield error
except (OSError, AllError, ParseError) as error:
log.warning('Error in file %s: %s', filename, error)
yield error
except tk.TokenError:
yield SyntaxError('invalid syntax in file %s' % filename)
def is_ascii(string):
"""Return a boolean indicating if `string` only has ascii characters."""
return all(ord(char) < 128 for char in string)
def leading_space(string):
"""Return any leading space from `string`."""
return re(r'\s*').match(string).group()
def get_leading_words(line):
"""Return any leading set of words from `line`.
For example, if `line` is " Hello world!!!", returns "Hello world".
"""
result = re(r"[\w ]+").match(line.strip())
if result is not None:
return result.group()
def is_def_arg_private(arg_name):
"""Return a boolean indicating if the argument name is private."""
return arg_name.startswith("_")
def get_function_args(function_source):
"""Return the function arguments given the source-code string."""
# We are stripping the whitespace from the left of the
# function source.
# This is so that if the docstring has incorrectly
# indented lines, which are at a lower indent than the
# function source, we still dedent the source correctly
# and the AST parser doesn't throw an error.
try:
function_arg_node = ast.parse(function_source.lstrip()).body[0].args
except SyntaxError:
# If we still get a syntax error, we don't want the
# the checker to crash. Instead we just return a blank list.
return []
arg_nodes = function_arg_node.args
kwonly_arg_nodes = function_arg_node.kwonlyargs
return [arg_node.arg for arg_node in chain(arg_nodes, kwonly_arg_nodes)]
pydocstyle-6.3.0/src/pydocstyle/cli.py 0000664 0000000 0000000 00000005661 14361602300 0020031 0 ustar 00root root 0000000 0000000 """Command line interface for pydocstyle."""
import logging
import sys
from .checker import check
from .config import ConfigurationParser, IllegalConfiguration
from .utils import log
from .violations import Error
__all__ = ('main',)
class ReturnCode:
no_violations_found = 0
violations_found = 1
invalid_options = 2
def run_pydocstyle():
log.setLevel(logging.DEBUG)
conf = ConfigurationParser()
setup_stream_handlers(conf.get_default_run_configuration())
try:
conf.parse()
except IllegalConfiguration:
return ReturnCode.invalid_options
run_conf = conf.get_user_run_configuration()
# Reset the logger according to the command line arguments
setup_stream_handlers(run_conf)
log.debug("starting in debug mode.")
Error.explain = run_conf.explain
Error.source = run_conf.source
errors = []
try:
for (
filename,
checked_codes,
ignore_decorators,
property_decorators,
ignore_self_only_init,
) in conf.get_files_to_check():
errors.extend(
check(
(filename,),
select=checked_codes,
ignore_decorators=ignore_decorators,
property_decorators=property_decorators,
ignore_self_only_init=ignore_self_only_init,
)
)
except IllegalConfiguration as error:
# An illegal configuration file was found during file generation.
log.error(error.args[0])
return ReturnCode.invalid_options
count = 0
for error in errors: # type: ignore
if hasattr(error, 'code'):
sys.stdout.write('%s\n' % error)
count += 1
if count == 0:
exit_code = ReturnCode.no_violations_found
else:
exit_code = ReturnCode.violations_found
if run_conf.count:
print(count)
return exit_code
def main():
"""Run pydocstyle as a script."""
try:
sys.exit(run_pydocstyle())
except KeyboardInterrupt:
pass
def setup_stream_handlers(conf):
"""Set up logging stream handlers according to the options."""
class StdoutFilter(logging.Filter):
def filter(self, record):
return record.levelno in (logging.DEBUG, logging.INFO)
log.handlers = []
stdout_handler = logging.StreamHandler(sys.stdout)
stdout_handler.setLevel(logging.WARNING)
stdout_handler.addFilter(StdoutFilter())
if conf.debug:
stdout_handler.setLevel(logging.DEBUG)
elif conf.verbose:
stdout_handler.setLevel(logging.INFO)
else:
stdout_handler.setLevel(logging.WARNING)
log.addHandler(stdout_handler)
stderr_handler = logging.StreamHandler(sys.stderr)
msg_format = "%(levelname)s: %(message)s"
stderr_handler.setFormatter(logging.Formatter(fmt=msg_format))
stderr_handler.setLevel(logging.WARNING)
log.addHandler(stderr_handler)
pydocstyle-6.3.0/src/pydocstyle/config.py 0000664 0000000 0000000 00000101320 14361602300 0020514 0 ustar 00root root 0000000 0000000 """Configuration file parsing and utilities."""
import copy
import itertools
import operator
import os
import sys
from collections import namedtuple
from collections.abc import Set
from configparser import NoOptionError, NoSectionError, RawConfigParser
from functools import reduce
from re import compile as re
from ._version import __version__
from .utils import log
from .violations import ErrorRegistry, conventions
if sys.version_info >= (3, 11):
import tomllib
else:
try:
import tomli as tomllib
except ImportError: # pragma: no cover
tomllib = None # type: ignore
def check_initialized(method):
"""Check that the configuration object was initialized."""
def _decorator(self, *args, **kwargs):
if self._arguments is None or self._options is None:
raise RuntimeError('using an uninitialized configuration')
return method(self, *args, **kwargs)
return _decorator
class TomlParser:
"""ConfigParser that partially mimics RawConfigParser but for toml files.
See RawConfigParser for more info. Also, please note that not all
RawConfigParser functionality is implemented, but only the subset that is
currently used by pydocstyle.
"""
def __init__(self):
"""Create a toml parser."""
self._config = {}
def read(self, filenames, encoding=None):
"""Read and parse a filename or an iterable of filenames.
Files that cannot be opened are silently ignored; this is
designed so that you can specify an iterable of potential
configuration file locations (e.g. current directory, user's
home directory, systemwide directory), and all existing
configuration files in the iterable will be read. A single
filename may also be given.
Return list of successfully read files.
"""
if isinstance(filenames, (str, bytes, os.PathLike)):
filenames = [filenames]
read_ok = []
for filename in filenames:
try:
with open(filename, "rb") as fp:
if not tomllib:
log.warning(
"The %s configuration file was ignored, "
"because the `tomli` package is not installed.",
filename,
)
continue
self._config.update(tomllib.load(fp))
except OSError:
continue
if isinstance(filename, os.PathLike):
filename = os.fspath(filename)
read_ok.append(filename)
return read_ok
def _get_section(self, section, allow_none=False):
try:
current = reduce(
operator.getitem,
section.split('.'),
self._config['tool'],
)
except KeyError:
current = None
if isinstance(current, dict):
return current
elif allow_none:
return None
else:
raise NoSectionError(section)
def has_section(self, section):
"""Indicate whether the named section is present in the configuration."""
return self._get_section(section, allow_none=True) is not None
def options(self, section):
"""Return a list of option names for the given section name."""
current = self._get_section(section)
return list(current.keys())
def get(self, section, option, *, _conv=None):
"""Get an option value for a given section."""
d = self._get_section(section)
option = option.lower()
try:
value = d[option]
except KeyError:
raise NoOptionError(option, section)
if isinstance(value, dict):
raise TypeError(
f"Expected {section}.{option} to be an option, not a section."
)
# toml should convert types automatically
# don't manually convert, just check, that the type is correct
if _conv is not None and not isinstance(value, _conv):
raise TypeError(
f"The type of {section}.{option} should be {_conv}"
)
return value
def getboolean(self, section, option):
"""Get a boolean option value for a given section."""
return self.get(section, option, _conv=bool)
def getint(self, section, option):
"""Get an integer option value for a given section."""
return self.get(section, option, _conv=int)
class ConfigurationParser:
"""Responsible for parsing configuration from files and CLI.
There are 2 types of configurations: Run configurations and Check
configurations.
Run Configurations:
------------------
Responsible for deciding things that are related to the user interface and
configuration discovery, e.g. verbosity, debug options, etc.
All run configurations default to `False` or `None` and are decided only
by CLI.
Check Configurations:
--------------------
Configurations that are related to which files and errors will be checked.
These are configurable in 2 ways: using the CLI, and using configuration
files.
Configuration files are nested within the file system, meaning that the
closer a configuration file is to a checked file, the more relevant it will
be. For instance, imagine this directory structure:
A
+-- tox.ini: sets `select=D100`
+-- B
+-- foo.py
+-- tox.ini: sets `add-ignore=D100`
Then `foo.py` will not be checked for `D100`.
The configuration build algorithm is described in `self._get_config`.
Note: If any of `BASE_ERROR_SELECTION_OPTIONS` was selected in the CLI, all
configuration files will be ignored and each file will be checked for
the error codes supplied in the CLI.
"""
CONFIG_FILE_OPTIONS = (
'convention',
'select',
'ignore',
'add-select',
'add-ignore',
'match',
'match-dir',
'ignore-decorators',
'ignore-self-only-init',
)
BASE_ERROR_SELECTION_OPTIONS = ('ignore', 'select', 'convention')
DEFAULT_MATCH_RE = r'(?!test_).*\.py'
DEFAULT_MATCH_DIR_RE = r'[^\.].*'
DEFAULT_IGNORE_DECORATORS_RE = ''
DEFAULT_PROPERTY_DECORATORS = (
"property,cached_property,functools.cached_property"
)
DEFAULT_CONVENTION = conventions.pep257
DEFAULT_IGNORE_SELF_ONLY_INIT = False
PROJECT_CONFIG_FILES = (
'setup.cfg',
'tox.ini',
'.pydocstyle',
'.pydocstyle.ini',
'.pydocstylerc',
'.pydocstylerc.ini',
'pyproject.toml',
# The following is deprecated, but remains for backwards compatibility.
'.pep257',
)
POSSIBLE_SECTION_NAMES = ('pydocstyle', 'pep257')
def __init__(self):
"""Create a configuration parser."""
self._cache = {}
self._override_by_cli = None
self._options = self._arguments = self._run_conf = None
self._parser = self._create_option_parser()
# ---------------------------- Public Methods -----------------------------
def get_default_run_configuration(self):
"""Return a `RunConfiguration` object set with default values."""
options, _ = self._parse_args([])
return self._create_run_config(options)
def parse(self):
"""Parse the configuration.
If one of `BASE_ERROR_SELECTION_OPTIONS` was selected, overrides all
error codes to check and disregards any error code related
configurations from the configuration files.
"""
self._options, self._arguments = self._parse_args()
self._arguments = self._arguments or ['.']
if not self._validate_options(self._options):
raise IllegalConfiguration()
self._run_conf = self._create_run_config(self._options)
config = self._create_check_config(self._options, use_defaults=False)
self._override_by_cli = config
@check_initialized
def get_user_run_configuration(self):
"""Return the run configuration for the script."""
return self._run_conf
@check_initialized
def get_files_to_check(self):
"""Generate files and error codes to check on each one.
Walk dir trees under `self._arguments` and yield file names
that `match` under each directory that `match_dir`.
The method locates the configuration for each file name and yields a
tuple of (filename, [error_codes]).
With every discovery of a new configuration file `IllegalConfiguration`
might be raised.
"""
def _get_matches(conf):
"""Return the `match` and `match_dir` functions for `config`."""
match_func = re(conf.match + '$').match
match_dir_func = re(conf.match_dir + '$').match
return match_func, match_dir_func
def _get_ignore_decorators(conf):
"""Return the `ignore_decorators` as None or regex."""
return (
re(conf.ignore_decorators) if conf.ignore_decorators else None
)
def _get_property_decorators(conf):
"""Return the `property_decorators` as None or set."""
return (
set(conf.property_decorators.split(","))
if conf.property_decorators
else None
)
for name in self._arguments:
if os.path.isdir(name):
for root, dirs, filenames in os.walk(name):
config = self._get_config(os.path.abspath(root))
match, match_dir = _get_matches(config)
ignore_decorators = _get_ignore_decorators(config)
property_decorators = _get_property_decorators(config)
# Skip any dirs that do not match match_dir
dirs[:] = [d for d in dirs if match_dir(d)]
for filename in map(os.path.basename, filenames):
if match(filename):
full_path = os.path.join(root, filename)
yield (
full_path,
list(config.checked_codes),
ignore_decorators,
property_decorators,
config.ignore_self_only_init,
)
else:
config = self._get_config(os.path.abspath(name))
match, _ = _get_matches(config)
ignore_decorators = _get_ignore_decorators(config)
property_decorators = _get_property_decorators(config)
if match(os.path.basename(name)):
yield (
name,
list(config.checked_codes),
ignore_decorators,
property_decorators,
config.ignore_self_only_init,
)
# --------------------------- Private Methods -----------------------------
def _get_config_by_discovery(self, node):
"""Get a configuration for checking `node` by config discovery.
Config discovery happens when no explicit config file is specified. The
file system is searched for config files starting from the directory
containing the file being checked, and up until the root directory of
the project.
See `_get_config` for further details.
"""
path = self._get_node_dir(node)
if path in self._cache:
return self._cache[path]
config_file = self._get_config_file_in_folder(path)
if config_file is None:
parent_dir, tail = os.path.split(path)
if tail:
# No configuration file, simply take the parent's.
config = self._get_config(parent_dir)
else:
# There's no configuration file and no parent directory.
# Use the default configuration or the one given in the CLI.
config = self._create_check_config(self._options)
else:
# There's a config file! Read it and merge if necessary.
options, inherit = self._read_configuration_file(config_file)
parent_dir, tail = os.path.split(path)
if tail and inherit:
# There is a parent dir and we should try to merge.
parent_config = self._get_config(parent_dir)
config = self._merge_configuration(parent_config, options)
else:
# No need to merge or parent dir does not exist.
config = self._create_check_config(options)
return config
def _get_config(self, node):
"""Get and cache the run configuration for `node`.
If no configuration exists (not local and not for the parent node),
returns and caches a default configuration.
The algorithm:
-------------
* If the current directory's configuration exists in
`self._cache` - return it.
* If a configuration file does not exist in this directory:
* If the directory is not a root directory:
* Cache its configuration as this directory's and return it.
* Else:
* Cache a default configuration and return it.
* Else:
* Read the configuration file.
* If a parent directory exists AND the configuration file
allows inheritance:
* Read the parent configuration by calling this function with the
parent directory as `node`.
* Merge the parent configuration with the current one and
cache it.
* If the user has specified one of `BASE_ERROR_SELECTION_OPTIONS` in
the CLI - return the CLI configuration with the configuration match
clauses
* Set the `--add-select` and `--add-ignore` CLI configurations.
"""
if self._run_conf.config is None:
log.debug('No config file specified, discovering.')
config = self._get_config_by_discovery(node)
else:
log.debug('Using config file %r', self._run_conf.config)
if not os.path.exists(self._run_conf.config):
raise IllegalConfiguration(
'Configuration file {!r} specified '
'via --config was not found.'.format(self._run_conf.config)
)
if None in self._cache:
return self._cache[None]
options, _ = self._read_configuration_file(self._run_conf.config)
if options is None:
log.warning(
'Configuration file does not contain a '
'pydocstyle section. Using default configuration.'
)
config = self._create_check_config(self._options)
else:
config = self._create_check_config(options)
# Make the CLI always win
final_config = {}
for attr in CheckConfiguration._fields:
cli_val = getattr(self._override_by_cli, attr)
conf_val = getattr(config, attr)
final_config[attr] = cli_val if cli_val is not None else conf_val
config = CheckConfiguration(**final_config)
self._set_add_options(config.checked_codes, self._options)
# Handle caching
if self._run_conf.config is not None:
self._cache[None] = config
else:
self._cache[self._get_node_dir(node)] = config
return config
@staticmethod
def _get_node_dir(node):
"""Return the absolute path of the directory of a filesystem node."""
path = os.path.abspath(node)
return path if os.path.isdir(path) else os.path.dirname(path)
def _read_configuration_file(self, path):
"""Try to read and parse `path` as a configuration file.
If the configurations were illegal (checked with
`self._validate_options`), raises `IllegalConfiguration`.
Returns (options, should_inherit).
"""
if path.endswith('.toml'):
parser = TomlParser()
else:
parser = RawConfigParser(inline_comment_prefixes=('#', ';'))
options = None
should_inherit = True
if parser.read(path) and self._get_section_name(parser):
all_options = self._parser.option_list[:]
for group in self._parser.option_groups:
all_options.extend(group.option_list)
option_list = {o.dest: o.type or o.action for o in all_options}
# First, read the default values
new_options, _ = self._parse_args([])
# Second, parse the configuration
section_name = self._get_section_name(parser)
for opt in parser.options(section_name):
if opt == 'inherit':
should_inherit = parser.getboolean(section_name, opt)
continue
if opt.replace('_', '-') not in self.CONFIG_FILE_OPTIONS:
log.warning(f"Unknown option '{opt}' ignored")
continue
normalized_opt = opt.replace('-', '_')
opt_type = option_list[normalized_opt]
if opt_type in ('int', 'count'):
value = parser.getint(section_name, opt)
elif opt_type == 'string':
value = parser.get(section_name, opt)
else:
assert opt_type in ('store_true', 'store_false')
value = parser.getboolean(section_name, opt)
setattr(new_options, normalized_opt, value)
# Third, fix the set-options
options = self._fix_set_options(new_options)
if options is not None:
if not self._validate_options(options):
raise IllegalConfiguration(f'in file: {path}')
return options, should_inherit
def _merge_configuration(self, parent_config, child_options):
"""Merge parent config into the child options.
The migration process requires an `options` object for the child in
order to distinguish between mutually exclusive codes, add-select and
add-ignore error codes.
"""
# Copy the parent error codes so we won't override them
error_codes = copy.deepcopy(parent_config.checked_codes)
if self._has_exclusive_option(child_options):
error_codes = self._get_exclusive_error_codes(child_options)
self._set_add_options(error_codes, child_options)
kwargs = dict(checked_codes=error_codes)
for key in (
'match',
'match_dir',
'ignore_decorators',
'property_decorators',
'ignore_self_only_init',
):
child_value = getattr(child_options, key)
kwargs[key] = (
child_value
if child_value is not None
else getattr(parent_config, key)
)
return CheckConfiguration(**kwargs)
def _parse_args(self, args=None, values=None):
"""Parse the options using `self._parser` and reformat the options."""
options, arguments = self._parser.parse_args(args, values)
return self._fix_set_options(options), arguments
@staticmethod
def _create_run_config(options):
"""Create a `RunConfiguration` object from `options`."""
values = {
opt: getattr(options, opt) for opt in RunConfiguration._fields
}
return RunConfiguration(**values)
@classmethod
def _create_check_config(cls, options, use_defaults=True):
"""Create a `CheckConfiguration` object from `options`.
If `use_defaults`, any of the match options that are `None` will
be replaced with their default value and the default convention will be
set for the checked codes.
"""
checked_codes = None
if cls._has_exclusive_option(options) or use_defaults:
checked_codes = cls._get_checked_errors(options)
kwargs = dict(checked_codes=checked_codes)
defaults = {
'match': "MATCH_RE",
'match_dir': "MATCH_DIR_RE",
'ignore_decorators': "IGNORE_DECORATORS_RE",
'property_decorators': "PROPERTY_DECORATORS",
'ignore_self_only_init': "IGNORE_SELF_ONLY_INIT",
}
for key, default in defaults.items():
kwargs[key] = (
getattr(cls, f"DEFAULT_{default}")
if getattr(options, key) is None and use_defaults
else getattr(options, key)
)
return CheckConfiguration(**kwargs)
@classmethod
def _get_section_name(cls, parser):
"""Parse options from relevant section."""
for section_name in cls.POSSIBLE_SECTION_NAMES:
if parser.has_section(section_name):
return section_name
return None
@classmethod
def _get_config_file_in_folder(cls, path):
"""Look for a configuration file in `path`.
If exists return its full path, otherwise None.
"""
if os.path.isfile(path):
path = os.path.dirname(path)
for fn in cls.PROJECT_CONFIG_FILES:
if fn.endswith('.toml'):
config = TomlParser()
else:
config = RawConfigParser(inline_comment_prefixes=('#', ';'))
full_path = os.path.join(path, fn)
if config.read(full_path) and cls._get_section_name(config):
return full_path
@classmethod
def _get_exclusive_error_codes(cls, options):
"""Extract the error codes from the selected exclusive option."""
codes = set(ErrorRegistry.get_error_codes())
checked_codes = None
if options.ignore is not None:
ignored = cls._expand_error_codes(options.ignore)
checked_codes = codes - ignored
elif options.select is not None:
checked_codes = cls._expand_error_codes(options.select)
elif options.convention is not None:
checked_codes = getattr(conventions, options.convention)
# To not override the conventions nor the options - copy them.
return copy.deepcopy(checked_codes)
@classmethod
def _set_add_options(cls, checked_codes, options):
"""Set `checked_codes` by the `add_ignore` or `add_select` options."""
checked_codes |= cls._expand_error_codes(options.add_select)
checked_codes -= cls._expand_error_codes(options.add_ignore)
@staticmethod
def _expand_error_codes(code_parts):
"""Return an expanded set of error codes to ignore."""
codes = set(ErrorRegistry.get_error_codes())
expanded_codes = set()
try:
for part in code_parts:
# Dealing with split-lined configurations; The part might begin
# with a whitespace due to the newline character.
part = part.strip()
if not part:
continue
codes_to_add = {
code for code in codes if code.startswith(part)
}
if not codes_to_add:
log.warning(
'Error code passed is not a prefix of any '
'known errors: %s',
part,
)
expanded_codes.update(codes_to_add)
except TypeError as e:
raise IllegalConfiguration(e) from e
return expanded_codes
@classmethod
def _get_checked_errors(cls, options):
"""Extract the codes needed to be checked from `options`."""
checked_codes = cls._get_exclusive_error_codes(options)
if checked_codes is None:
checked_codes = cls.DEFAULT_CONVENTION
cls._set_add_options(checked_codes, options)
return checked_codes
@classmethod
def _validate_options(cls, options):
"""Validate the mutually exclusive options.
Return `True` iff only zero or one of `BASE_ERROR_SELECTION_OPTIONS`
was selected.
"""
for opt1, opt2 in itertools.permutations(
cls.BASE_ERROR_SELECTION_OPTIONS, 2
):
if getattr(options, opt1) and getattr(options, opt2):
log.error(
'Cannot pass both {} and {}. They are '
'mutually exclusive.'.format(opt1, opt2)
)
return False
if options.convention and options.convention not in conventions:
log.error(
"Illegal convention '{}'. Possible conventions: {}".format(
options.convention, ', '.join(conventions.keys())
)
)
return False
return True
@classmethod
def _has_exclusive_option(cls, options):
"""Return `True` iff one or more exclusive options were selected."""
return any(
[
getattr(options, opt) is not None
for opt in cls.BASE_ERROR_SELECTION_OPTIONS
]
)
@classmethod
def _fix_set_options(cls, options):
"""Alter the set options from None/strings to sets in place."""
optional_set_options = ('ignore', 'select')
mandatory_set_options = ('add_ignore', 'add_select')
def _get_set(value_str):
"""Split `value_str` by the delimiter `,` and return a set.
Removes empty values ('') and strips whitespace.
Also expands error code prefixes, to avoid doing this for every
file.
"""
if isinstance(value_str, str):
value_str = value_str.split(",")
return cls._expand_error_codes(
{x.strip() for x in value_str} - {""}
)
for opt in optional_set_options:
value = getattr(options, opt)
if value is not None:
setattr(options, opt, _get_set(value))
for opt in mandatory_set_options:
value = getattr(options, opt)
if value is None:
value = ''
if not isinstance(value, Set):
value = _get_set(value)
setattr(options, opt, value)
return options
@classmethod
def _create_option_parser(cls):
"""Return an option parser to parse the command line arguments."""
from optparse import OptionGroup, OptionParser
parser = OptionParser(
version=__version__,
usage='Usage: pydocstyle [options] [...]',
)
option = parser.add_option
# Run configuration options
option(
'-e',
'--explain',
action='store_true',
default=False,
help='show explanation of each error',
)
option(
'-s',
'--source',
action='store_true',
default=False,
help='show source for each error',
)
option(
'-d',
'--debug',
action='store_true',
default=False,
help='print debug information',
)
option(
'-v',
'--verbose',
action='store_true',
default=False,
help='print status information',
)
option(
'--count',
action='store_true',
default=False,
help='print total number of errors to stdout',
)
option(
'--config',
metavar='',
default=None,
help='use given config file and disable config discovery',
)
parser.add_option_group(
OptionGroup(
parser,
'Note',
'When using --match, --match-dir or --ignore-decorators consider '
'whether you should use a single quote (\') or a double quote ("), '
'depending on your OS, Shell, etc.',
)
)
check_group = OptionGroup(
parser,
'Error Check Options',
'Only one of --select, --ignore or --convention can be '
'specified. If none is specified, defaults to '
'`--convention=pep257`. These three options select the "basic '
'list" of error codes to check. If you wish to change that list '
'(for example, if you selected a known convention but wish to '
'ignore a specific error from it or add a new one) you can '
'use `--add-[ignore/select]` in order to do so.',
)
add_check = check_group.add_option
# Error check options
add_check(
'--select',
metavar='',
default=None,
help='choose the basic list of checked errors by '
'specifying which errors to check for (with a list of '
'comma-separated error codes or prefixes). '
'for example: --select=D101,D2',
)
add_check(
'--ignore',
metavar='',
default=None,
help='choose the basic list of checked errors by '
'specifying which errors to ignore out of all of the '
'available error codes (with a list of '
'comma-separated error codes or prefixes). '
'for example: --ignore=D101,D2',
)
add_check(
'--convention',
metavar='',
default=None,
help='choose the basic list of checked errors by specifying '
'an existing convention. Possible conventions: {}.'.format(
', '.join(conventions)
),
)
add_check(
'--add-select',
metavar='',
default=None,
help='add extra error codes to check to the basic list of '
'errors previously set by --select, --ignore or '
'--convention.',
)
add_check(
'--add-ignore',
metavar='',
default=None,
help='ignore extra error codes by removing them from the '
'basic list previously set by --select, --ignore '
'or --convention.',
)
add_check(
'--ignore-self-only-init',
default=None,
action='store_true',
help='ignore __init__ methods which only have a self param.',
)
parser.add_option_group(check_group)
# Match clauses
option(
'--match',
metavar='',
default=None,
help=(
"check only files that exactly match regular "
"expression; default is --match='{}' which matches "
"files that don't start with 'test_' but end with "
"'.py'"
).format(cls.DEFAULT_MATCH_RE),
)
option(
'--match-dir',
metavar='',
default=None,
help=(
"search only dirs that exactly match regular "
"expression; default is --match-dir='{}', which "
"matches all dirs that don't start with "
"a dot"
).format(cls.DEFAULT_MATCH_DIR_RE),
)
# Decorators
option(
'--ignore-decorators',
metavar='',
default=None,
help=(
"ignore any functions or methods that are decorated "
"by a function with a name fitting the "
"regular expression; default is --ignore-decorators='{}'"
" which does not ignore any decorated functions.".format(
cls.DEFAULT_IGNORE_DECORATORS_RE
)
),
)
option(
'--property-decorators',
metavar='',
default=None,
help=(
"consider any method decorated with one of these "
"decorators as a property, and consequently allow "
"a docstring which is not in imperative mood; default "
"is --property-decorators='{}'".format(
cls.DEFAULT_PROPERTY_DECORATORS
)
),
)
return parser
# Check configuration - used by the ConfigurationParser class.
CheckConfiguration = namedtuple(
'CheckConfiguration',
(
'checked_codes',
'match',
'match_dir',
'ignore_decorators',
'property_decorators',
'ignore_self_only_init',
),
)
class IllegalConfiguration(Exception):
"""An exception for illegal configurations."""
pass
# General configurations for pydocstyle run.
RunConfiguration = namedtuple(
'RunConfiguration',
('explain', 'source', 'debug', 'verbose', 'count', 'config'),
)
pydocstyle-6.3.0/src/pydocstyle/data/ 0000775 0000000 0000000 00000000000 14361602300 0017611 5 ustar 00root root 0000000 0000000 pydocstyle-6.3.0/src/pydocstyle/data/imperatives.txt 0000664 0000000 0000000 00000003435 14361602300 0022707 0 ustar 00root root 0000000 0000000 # Imperative forms of verbs
#
# This file contains the imperative form of frequently encountered
# docstring verbs. Some of these may be more commonly encountered as
# nouns, but blacklisting them for this may cause false positives.
accept
access
add
adjust
aggregate
allow
append
apply
archive
assert
assign
attempt
authenticate
authorize
break
build
cache
calculate
call
cancel
capture
change
check
clean
clear
close
collect
combine
commit
compare
compute
configure
confirm
connect
construct
control
convert
copy
count
create
customize
declare
decode
decorate
define
delegate
delete
deprecate
derive
describe
detect
determine
display
download
drop
dump
emit
empty
enable
encapsulate
encode
end
ensure
enumerate
establish
evaluate
examine
execute
exit
expand
expect
export
extend
extract
feed
fetch
fill
filter
finalize
find
fire
fix
flag
force
format
forward
generate
get
give
go
group
handle
help
hold
identify
implement
import
indicate
init
initialise
initialize
initiate
input
insert
instantiate
intercept
invoke
iterate
join
keep
launch
list
listen
load
log
look
make
manage
manipulate
map
mark
match
merge
mock
modify
monitor
move
normalize
note
obtain
open
output
override
overwrite
package
pad
parse
partial
pass
perform
persist
pick
plot
poll
populate
post
prepare
print
process
produce
provide
publish
pull
put
query
raise
read
record
refer
refresh
register
reload
remove
rename
render
replace
reply
report
represent
request
require
reset
resolve
retrieve
return
roll
rollback
round
run
sample
save
scan
search
select
send
serialise
serialize
serve
set
show
simulate
source
specify
split
start
step
stop
store
strip
submit
subscribe
sum
swap
sync
synchronise
synchronize
take
tear
test
time
transform
translate
transmit
truncate
try
turn
tweak
update
upload
use
validate
verify
view
wait
walk
wrap
write
yield
pydocstyle-6.3.0/src/pydocstyle/data/imperatives_blacklist.txt 0000664 0000000 0000000 00000002643 14361602300 0024737 0 ustar 00root root 0000000 0000000 # Blacklisted imperative words
#
# These are words that, if they begin a docstring, are a good indicator that
# the docstring is not written in an imperative voice.
#
# The words included in this list fall into a number of categories:
#
# - Starting with a noun/pronoun indicates that the docstring is a noun phrase
# or a sentence but not in the imperative mood
# - Adjectives are always followed by a noun, so same
# - Particles are also followed by a noun
# - Some adverbs don't really indicate an imperative sentence, for example
# "importantly" or "currently".
# - Some irregular verb forms that don't stem to the same string as the
# imperative does (eg. 'does')
a
an
the
action
always
api
base
basic
business
calculation
callback
collection
common
constructor
convenience
convenient
current
currently
custom
data
data
default
deprecated
description
dict
dictionary
does
dummy
example
factory
false
final
formula
function
generic
handler
handler
helper
here
hook
implementation
importantly
internal
it
main
method
module
new
number
optional
placeholder
reference
result
same
schema
setup
should
simple
some
special
sql
standard
static
string
subclasses
that
these
this
true
unique
unit
utility
what
wrapper
# These are nouns, but often used in the context of functions that act as
# objects; thus we do not blacklist these.
#
# context # as in context manager
# decorator
# class # as in class decorator
# property
# generator
pydocstyle-6.3.0/src/pydocstyle/parser.py 0000664 0000000 0000000 00000067203 14361602300 0020556 0 ustar 00root root 0000000 0000000 """Python code parser."""
import sys
import textwrap
import tokenize as tk
from io import StringIO
from itertools import chain, dropwhile
from pathlib import Path
from re import compile as re
from typing import Tuple
from .utils import log
__all__ = (
'Parser',
'Definition',
'Module',
'Package',
'Function',
'NestedFunction',
'Method',
'Class',
'NestedClass',
'AllError',
'StringIO',
'ParseError',
)
class ParseError(Exception):
"""An error parsing contents of a Python file."""
def __str__(self):
return "Cannot parse file."
class UnexpectedTokenError(ParseError):
def __init__(self, token, expected_kind):
self.token = token
self.expected_kind = expected_kind
def __str__(self):
return "Unexpected token {}, expected {}".format(
self.token, self.expected_kind
)
def humanize(string):
return re(r'(.)([A-Z]+)').sub(r'\1 \2', string).lower()
class Value:
"""A generic object with a list of preset fields."""
def __init__(self, *args):
if len(self._fields) != len(args):
raise ValueError(
'got {} arguments for {} fields for {}: {}'.format(
len(args),
len(self._fields),
self.__class__.__name__,
self._fields,
)
)
vars(self).update(zip(self._fields, args))
def __hash__(self):
return hash(repr(self))
def __eq__(self, other):
return other and vars(self) == vars(other)
def __repr__(self):
kwargs = ', '.join(
'{}={!r}'.format(field, getattr(self, field))
for field in self._fields
)
return f'{self.__class__.__name__}({kwargs})'
class Definition(Value):
"""A Python source code definition (could be class, function, etc)."""
_fields = (
'name',
'_source',
'start',
'end',
'decorators',
'docstring',
'children',
'callable_args',
'parent',
'skipped_error_codes',
) # type: Tuple[str, ...]
_human = property(lambda self: humanize(type(self).__name__))
kind = property(lambda self: self._human.split()[-1])
module = property(lambda self: self.parent.module)
dunder_all = property(lambda self: self.module.dunder_all)
_slice = property(lambda self: slice(self.start - 1, self.end))
is_class = False
def __iter__(self):
return chain([self], *self.children)
@property
def error_lineno(self):
"""Get the line number with which to report violations."""
if isinstance(self.docstring, Docstring):
return self.docstring.start
return self.start
@property
def _publicity(self):
return {True: 'public', False: 'private'}[self.is_public]
@property
def source(self):
"""Return the source code for the definition."""
full_src = self._source[self._slice]
def is_empty_or_comment(line):
return line.strip() == '' or line.strip().startswith('#')
filtered_src = dropwhile(is_empty_or_comment, reversed(full_src))
return ''.join(reversed(list(filtered_src)))
def __str__(self):
out = f'in {self._publicity} {self._human} `{self.name}`'
if self.skipped_error_codes:
out += f' (skipping {self.skipped_error_codes})'
return out
class Module(Definition):
"""A Python source code module."""
_fields = (
'name',
'_source',
'start',
'end',
'decorators',
'docstring',
'children',
'parent',
'_dunder_all',
'dunder_all_error',
'future_imports',
'skipped_error_codes',
)
_nest = staticmethod(lambda s: {'def': Function, 'class': Class}[s])
module = property(lambda self: self)
dunder_all = property(lambda self: self._dunder_all)
@property
def is_public(self):
"""Return True iff the module is considered public.
This helps determine if it requires a docstring.
"""
module_name = Path(self.name).stem
return not self._is_inside_private_package() and self._is_public_name(
module_name
)
def _is_inside_private_package(self):
"""Return True if the module is inside a private package."""
path = Path(self.name).parent # Ignore the actual module's name.
syspath = [Path(p) for p in sys.path] # Convert to pathlib.Path.
# Bail if we are at the root directory or in `PYTHONPATH`.
while path != path.parent and path not in syspath:
if self._is_private_name(path.name):
return True
path = path.parent
return False
def _is_public_name(self, module_name):
"""Determine whether a "module name" (i.e. module or package name) is public."""
return not module_name.startswith('_') or (
module_name.startswith('__') and module_name.endswith('__')
)
def _is_private_name(self, module_name):
"""Determine whether a "module name" (i.e. module or package name) is private."""
return not self._is_public_name(module_name)
def __str__(self):
return 'at module level'
class Package(Module):
"""A package is a __init__.py module."""
class Function(Definition):
"""A Python source code function."""
_nest = staticmethod(
lambda s: {'def': NestedFunction, 'class': NestedClass}[s]
)
@property
def is_public(self):
"""Return True iff this function should be considered public."""
if self.dunder_all is not None:
return self.name in self.dunder_all
else:
return not self.name.startswith('_')
@property
def is_overload(self):
"""Return True iff the method decorated with overload."""
return any(
decorator.name == "overload" for decorator in self.decorators
)
def is_property(self, property_decorator_names):
"""Return True if the method is decorated with any property decorator."""
return any(
decorator.name in property_decorator_names
for decorator in self.decorators
)
@property
def is_test(self):
"""Return True if this function is a test function/method.
We exclude tests from the imperative mood check, because to phrase
their docstring in the imperative mood, they would have to start with
a highly redundant "Test that ...".
"""
return self.name.startswith('test') or self.name == 'runTest'
@property
def param_names(self):
"""Return the parameter names."""
return self.callable_args
class NestedFunction(Function):
"""A Python source code nested function."""
is_public = False
class Method(Function):
"""A Python source code method."""
@property
def is_magic(self):
"""Return True iff this method is a magic method (e.g., `__str__`)."""
return (
self.name.startswith('__')
and self.name.endswith('__')
and self.name not in VARIADIC_MAGIC_METHODS
)
@property
def is_init(self):
"""Return True iff this method is `__init__`."""
return self.name == '__init__'
@property
def is_public(self):
"""Return True iff this method should be considered public."""
# Check if we are a setter/deleter method, and mark as private if so.
for decorator in self.decorators:
# Given 'foo', match 'foo.bar' but not 'foobar' or 'sfoo'
if re(fr"^{self.name}\.").match(decorator.name):
return False
name_is_public = (
not self.name.startswith('_')
or self.name in VARIADIC_MAGIC_METHODS
or self.is_magic
)
return self.parent.is_public and name_is_public
@property
def is_static(self):
"""Return True iff the method is static."""
for decorator in self.decorators:
if decorator.name == "staticmethod":
return True
return False
class Class(Definition):
"""A Python source code class."""
_nest = staticmethod(lambda s: {'def': Method, 'class': NestedClass}[s])
is_public = Function.is_public
is_class = True
class NestedClass(Class):
"""A Python source code nested class."""
@property
def is_public(self):
"""Return True iff this class should be considered public."""
return (
not self.name.startswith('_')
and self.parent.is_class
and self.parent.is_public
)
class Decorator(Value):
"""A decorator for function, method or class."""
_fields = 'name arguments'.split()
class Docstring(str):
"""Represent a docstring.
This is a string, but has additional start/end attributes representing
the start and end of the token.
"""
def __new__(cls, v, start, end):
return str.__new__(cls, v)
def __init__(self, v, start, end):
self.start = start
self.end = end
VARIADIC_MAGIC_METHODS = ('__init__', '__call__', '__new__')
class AllError(Exception):
"""Raised when there is a problem with __all__ when parsing."""
def __init__(self, message):
"""Initialize the error with a more specific message."""
Exception.__init__(
self,
message
+ textwrap.dedent(
"""
That means pydocstyle cannot decide which definitions are
public. Variable __all__ should be present at most once in
each file, in form
`__all__ = ('a_public_function', 'APublicClass', ...)`.
More info on __all__: http://stackoverflow.com/q/44834/. ')
"""
),
)
class TokenStream:
# A logical newline is where a new expression or statement begins. When
# there is a physical new line, but not a logical one, for example:
# (x +
# y)
# The token will be tk.NL, not tk.NEWLINE.
LOGICAL_NEWLINES = {tk.NEWLINE, tk.INDENT, tk.DEDENT}
def __init__(self, filelike):
self._generator = tk.generate_tokens(filelike.readline)
self.current = Token(*next(self._generator, None))
self.line = self.current.start[0]
self.log = log
self.got_logical_newline = True
def move(self):
previous = self.current
current = self._next_from_generator()
self.current = None if current is None else Token(*current)
self.line = self.current.start[0] if self.current else self.line
is_logical_blank = previous.kind in (tk.NL, tk.COMMENT)
self.got_logical_newline = (
previous.kind in self.LOGICAL_NEWLINES
# Retain logical_newline status if last line was logically blank
or (self.got_logical_newline and is_logical_blank)
)
return previous
def _next_from_generator(self):
try:
return next(self._generator, None)
except (SyntaxError, tk.TokenError):
self.log.warning('error generating tokens', exc_info=True)
return None
def __iter__(self):
while True:
if self.current is not None:
yield self.current
else:
return
self.move()
class TokenKind(int):
def __repr__(self):
return "tk.{}".format(tk.tok_name[self])
class Token(Value):
_fields = 'kind value start end source'.split()
def __init__(self, *args):
super().__init__(*args)
self.kind = TokenKind(self.kind)
def __str__(self):
return f"{self.kind!r} ({self.value})"
class Parser:
"""A Python source code parser."""
def parse(self, filelike, filename):
"""Parse the given file-like object and return its Module object."""
self.log = log
self.source = filelike.readlines()
src = ''.join(self.source)
try:
compile(src, filename, 'exec')
except SyntaxError as error:
raise ParseError() from error
self.stream = TokenStream(StringIO(src))
self.filename = filename
self.dunder_all = None
self.dunder_all_error = None
self.future_imports = set()
self._accumulated_decorators = []
return self.parse_module()
# TODO: remove
def __call__(self, *args, **kwargs):
"""Call the parse method."""
return self.parse(*args, **kwargs)
current = property(lambda self: self.stream.current)
line = property(lambda self: self.stream.line)
def consume(self, kind):
"""Consume one token and verify it is of the expected kind."""
next_token = self.stream.move()
if next_token.kind != kind:
raise UnexpectedTokenError(token=next_token, expected_kind=kind)
def leapfrog(self, kind, value=None):
"""Skip tokens in the stream until a certain token kind is reached.
If `value` is specified, tokens whose values are different will also
be skipped.
"""
while self.current is not None:
if self.current.kind == kind and (
value is None or self.current.value == value
):
self.consume(kind)
return
self.stream.move()
def parse_docstring(self):
"""Parse a single docstring and return its value."""
self.log.debug("parsing docstring, token is %s", self.current)
while self.current.kind in (tk.COMMENT, tk.NEWLINE, tk.NL):
self.stream.move()
self.log.debug(
"parsing docstring, token is %r (%s)",
self.current.kind,
self.current.value,
)
if self.current.kind == tk.STRING:
docstring = Docstring(
self.current.value, self.current.start[0], self.current.end[0]
)
self.stream.move()
return docstring
return None
def parse_decorators(self):
"""Parse decorators into self._accumulated_decorators.
Called after first @ is found.
Continue to do so until encountering the 'def' or 'class' start token.
"""
name = []
arguments = []
at_arguments = False
while self.current is not None:
self.log.debug(
"parsing decorators, current token is %r (%s)",
self.current.kind,
self.current.value,
)
if self.current.kind == tk.NAME and self.current.value in [
'async',
'def',
'class',
]:
# Done with decorators - found function or class proper
break
elif self.current.kind == tk.OP and self.current.value == '@':
# New decorator found. Store the decorator accumulated so far:
self._accumulated_decorators.append(
Decorator(''.join(name), ''.join(arguments))
)
# Now reset to begin accumulating the new decorator:
name = []
arguments = []
at_arguments = False
elif self.current.kind == tk.OP and self.current.value == '(':
at_arguments = True
elif self.current.kind == tk.OP and self.current.value == ')':
# Ignore close parenthesis
pass
elif self.current.kind == tk.NEWLINE or self.current.kind == tk.NL:
# Ignore newlines
pass
else:
# Keep accumulating current decorator's name or argument.
if not at_arguments:
name.append(self.current.value)
else:
arguments.append(self.current.value)
self.stream.move()
# Add decorator accumulated so far
self._accumulated_decorators.append(
Decorator(''.join(name), ''.join(arguments))
)
def parse_definitions(self, class_, dunder_all=False):
"""Parse multiple definitions and yield them."""
while self.current is not None:
self.log.debug(
"parsing definition list, current token is %r (%s)",
self.current.kind,
self.current.value,
)
self.log.debug('got_newline: %s', self.stream.got_logical_newline)
if dunder_all and self.current.value == '__all__':
self.parse_dunder_all()
elif (
self.current.kind == tk.OP
and self.current.value == '@'
and self.stream.got_logical_newline
):
self.consume(tk.OP)
self.parse_decorators()
elif self.current.value in ['def', 'class']:
yield self.parse_definition(class_._nest(self.current.value))
elif self.current.kind == tk.INDENT:
self.consume(tk.INDENT)
yield from self.parse_definitions(class_)
elif self.current.kind == tk.DEDENT:
self.consume(tk.DEDENT)
return
elif self.current.value == 'from':
self.parse_from_import_statement()
else:
self.stream.move()
def parse_dunder_all(self):
"""Parse the __all__ definition in a module."""
assert self.current.value == '__all__'
self.consume(tk.NAME)
# More than one __all__ definition means we ignore all __all__.
if self.dunder_all is not None or self.dunder_all_error is not None:
self.dunder_all = None
self.dunder_all_error = 'Could not evaluate contents of __all__. '
return
if self.current.value != '=':
self.dunder_all_error = 'Could not evaluate contents of __all__. '
return
self.consume(tk.OP)
is_surrounded = False
if self.current.value in '([':
is_surrounded = True
self.consume(tk.OP)
dunder_all_content = "("
while True:
if is_surrounded and self.current.value in ")]":
break
if self.current.kind in (tk.NEWLINE, tk.ENDMARKER):
break
if self.current.kind in (tk.NL, tk.COMMENT):
pass
elif self.current.kind == tk.STRING or self.current.value == ',':
dunder_all_content += self.current.value
else:
self.dunder_all_error = (
'Could not evaluate contents of __all__.'
)
return
self.stream.move()
if is_surrounded:
self.consume(tk.OP)
if not is_surrounded and ',' not in dunder_all_content:
self.dunder_all_error = (
'Unexpected token kind in __all__: {!r}. '.format(
self.current.kind
)
)
return
dunder_all_content += ")"
try:
self.dunder_all = eval(dunder_all_content, {})
except BaseException as e:
self.dunder_all_error = (
'Could not evaluate contents of __all__.'
'\bThe value was {}. The exception was:\n{}'.format(
dunder_all_content, e
)
)
while (
self.current.kind not in self.stream.LOGICAL_NEWLINES
and self.current.kind != tk.ENDMARKER
):
if self.current.kind != tk.COMMENT:
self.dunder_all = None
self.dunder_all_error = (
'Could not evaluate contents of __all__. '
)
return
self.stream.move()
def parse_module(self):
"""Parse a module (and its children) and return a Module object."""
self.log.debug("parsing module.")
start = self.line
skipped_error_codes = self.parse_skip_comment()
docstring = self.parse_docstring()
children = list(self.parse_definitions(Module, dunder_all=True))
assert self.current is None, self.current
end = self.line
cls = Module
if self.filename.endswith('__init__.py'):
cls = Package
module = cls(
self.filename,
self.source,
start,
end,
[],
docstring,
children,
None,
self.dunder_all,
self.dunder_all_error,
None,
skipped_error_codes,
)
for child in module.children:
child.parent = module
module.future_imports = self.future_imports
self.log.debug("finished parsing module.")
return module
def parse_definition(self, class_):
"""Parse a definition and return its value in a `class_` object."""
start = self.line
self.consume(tk.NAME)
name = self.current.value
self.log.debug("parsing %s '%s'", class_.__name__, name)
self.stream.move()
callable_args = []
if self.current.kind == tk.OP and self.current.value == '(':
parenthesis_level = 0
in_default_arg = False
while True:
if self.current.kind == tk.OP:
if self.current.value == '(':
parenthesis_level += 1
elif self.current.value == ')':
parenthesis_level -= 1
if parenthesis_level == 0:
break
elif self.current.value == ',':
in_default_arg = False
elif (
parenthesis_level == 1
and self.current.kind == tk.NAME
and not in_default_arg
):
callable_args.append(self.current.value)
in_default_arg = True
self.stream.move()
if self.current.kind != tk.OP or self.current.value != ':':
self.leapfrog(tk.OP, value=":")
else:
self.consume(tk.OP)
if self.current.kind in (tk.NEWLINE, tk.COMMENT):
skipped_error_codes = self.parse_skip_comment()
self.leapfrog(tk.INDENT)
assert self.current.kind != tk.INDENT
docstring = self.parse_docstring()
decorators = self._accumulated_decorators
self.log.debug("current accumulated decorators: %s", decorators)
self._accumulated_decorators = []
self.log.debug("parsing nested definitions.")
children = list(self.parse_definitions(class_))
self.log.debug(
"finished parsing nested definitions for '%s'", name
)
end = self.line - 1
else: # one-liner definition
skipped_error_codes = ''
docstring = self.parse_docstring()
decorators = self._accumulated_decorators
self.log.debug("current accumulated decorators: %s", decorators)
self._accumulated_decorators = []
children = []
end = self.line
self.leapfrog(tk.NEWLINE)
definition = class_(
name,
self.source,
start,
end,
decorators,
docstring,
children,
callable_args,
None, # parent
skipped_error_codes,
)
for child in definition.children:
child.parent = definition
self.log.debug(
"finished parsing %s '%s'. Next token is %r",
class_.__name__,
name,
self.current,
)
return definition
def parse_skip_comment(self):
"""Parse a definition comment for noqa skips."""
skipped_error_codes = ''
while self.current.kind in (tk.COMMENT, tk.NEWLINE, tk.NL):
if self.current.kind == tk.COMMENT:
if 'noqa: ' in self.current.value:
skipped_error_codes = ''.join(
self.current.value.split('noqa: ')[1:]
)
elif self.current.value.startswith('# noqa'):
skipped_error_codes = 'all'
self.stream.move()
self.log.debug(
"parsing comments before docstring, token is %r (%s)",
self.current.kind,
self.current.value,
)
if skipped_error_codes:
break
return skipped_error_codes
def check_current(self, kind=None, value=None):
"""Verify the current token is of type `kind` and equals `value`."""
msg = textwrap.dedent(
"""
Unexpected token at line {self.line}:
In file: {self.filename}
Got kind {self.current.kind!r}
Got value {self.current.value}
""".format(
self=self
)
)
kind_valid = self.current.kind == kind if kind else True
value_valid = self.current.value == value if value else True
assert kind_valid and value_valid, msg
def parse_from_import_statement(self):
"""Parse a 'from x import y' statement.
The purpose is to find __future__ statements.
"""
self.log.debug('parsing from/import statement.')
is_future_import = self._parse_from_import_source()
self._parse_from_import_names(is_future_import)
def _parse_from_import_source(self):
"""Parse the 'from x import' part in a 'from x import y' statement.
Return true iff `x` is __future__.
"""
assert self.current.value == 'from', self.current.value
self.stream.move()
is_future_import = self.current.value == '__future__'
self.stream.move()
while (
self.current is not None
and self.current.kind in (tk.DOT, tk.NAME, tk.OP)
and self.current.value != 'import'
):
self.stream.move()
if self.current is None or self.current.value != 'import':
return False
self.check_current(value='import')
assert self.current.value == 'import', self.current.value
self.stream.move()
return is_future_import
def _parse_from_import_names(self, is_future_import):
"""Parse the 'y' part in a 'from x import y' statement."""
if self.current.value == '(':
self.consume(tk.OP)
expected_end_kinds = (tk.OP,)
else:
expected_end_kinds = (tk.NEWLINE, tk.ENDMARKER)
while self.current.kind not in expected_end_kinds and not (
self.current.kind == tk.OP and self.current.value == ';'
):
if self.current.kind != tk.NAME:
self.stream.move()
continue
self.log.debug(
"parsing import, token is %r (%s)",
self.current.kind,
self.current.value,
)
if is_future_import:
self.log.debug('found future import: %s', self.current.value)
self.future_imports.add(self.current.value)
self.consume(tk.NAME)
self.log.debug(
"parsing import, token is %r (%s)",
self.current.kind,
self.current.value,
)
if self.current.kind == tk.NAME and self.current.value == 'as':
self.consume(tk.NAME) # as
if self.current.kind == tk.NAME:
self.consume(tk.NAME) # new name, irrelevant
if self.current.value == ',':
self.consume(tk.OP)
self.log.debug(
"parsing import, token is %r (%s)",
self.current.kind,
self.current.value,
)
pydocstyle-6.3.0/src/pydocstyle/utils.py 0000664 0000000 0000000 00000002413 14361602300 0020412 0 ustar 00root root 0000000 0000000 """General shared utilities."""
import logging
import re
from itertools import tee, zip_longest
from typing import Any, Iterable, Tuple
# Do not update the version manually - it is managed by `bumpversion`.
log = logging.getLogger(__name__)
#: Regular expression for stripping non-alphanumeric characters
NON_ALPHANUMERIC_STRIP_RE = re.compile(r'[\W_]+')
def is_blank(string: str) -> bool:
"""Return True iff the string contains only whitespaces."""
return not string.strip()
def pairwise(
iterable: Iterable,
default_value: Any,
) -> Iterable[Tuple[Any, Any]]:
"""Return pairs of items from `iterable`.
pairwise([1, 2, 3], default_value=None) -> (1, 2) (2, 3), (3, None)
"""
a, b = tee(iterable)
_ = next(b, default_value)
return zip_longest(a, b, fillvalue=default_value)
def common_prefix_length(a: str, b: str) -> int:
"""Return the length of the longest common prefix of a and b.
>>> common_prefix_length('abcd', 'abce')
3
"""
for common, (ca, cb) in enumerate(zip(a, b)):
if ca != cb:
return common
return min(len(a), len(b))
def strip_non_alphanumeric(string: str) -> str:
"""Strip string from any non-alphanumeric characters."""
return NON_ALPHANUMERIC_STRIP_RE.sub('', string)
pydocstyle-6.3.0/src/pydocstyle/violations.py 0000664 0000000 0000000 00000030173 14361602300 0021445 0 ustar 00root root 0000000 0000000 """Docstring violation definition."""
from collections import namedtuple
from functools import partial
from itertools import dropwhile
from typing import Any, Callable, Iterable, List, Optional
from .parser import Definition
from .utils import is_blank
__all__ = ('Error', 'ErrorRegistry', 'conventions')
ErrorParams = namedtuple('ErrorParams', ['code', 'short_desc', 'context'])
class Error:
"""Error in docstring style."""
# Options that define how errors are printed:
explain = False
source = False
def __init__(
self,
code: str,
short_desc: str,
context: str,
*parameters: Iterable[str],
) -> None:
"""Initialize the object.
`parameters` are specific to the created error.
"""
self.code = code
self.short_desc = short_desc
self.context = context
self.parameters = parameters
self.definition = None # type: Optional[Definition]
self.explanation = None # type: Optional[str]
def set_context(self, definition: Definition, explanation: str) -> None:
"""Set the source code context for this error."""
self.definition = definition
self.explanation = explanation
filename = property(lambda self: self.definition.module.name)
line = property(lambda self: self.definition.error_lineno)
@property
def message(self) -> str:
"""Return the message to print to the user."""
ret = f'{self.code}: {self.short_desc}'
if self.context is not None:
specific_error_msg = self.context.format(*self.parameters)
ret += f' ({specific_error_msg})'
return ret
@property
def lines(self) -> str:
"""Return the source code lines for this error."""
if self.definition is None:
return ''
source = ''
lines = self.definition.source.splitlines(keepends=True)
offset = self.definition.start # type: ignore
lines_stripped = list(
reversed(list(dropwhile(is_blank, reversed(lines))))
)
numbers_width = len(str(offset + len(lines_stripped)))
line_format = f'{{:{numbers_width}}}:{{}}'
for n, line in enumerate(lines_stripped):
if line:
line = ' ' + line
source += line_format.format(n + offset, line)
if n > 5:
source += ' ...\n'
break
return source
def __str__(self) -> str:
if self.explanation:
self.explanation = '\n'.join(
l for l in self.explanation.split('\n') if not is_blank(l)
)
template = '{filename}:{line} {definition}:\n {message}'
if self.source and self.explain:
template += '\n\n{explanation}\n\n{lines}\n'
elif self.source and not self.explain:
template += '\n\n{lines}\n'
elif self.explain and not self.source:
template += '\n\n{explanation}\n\n'
return template.format(
**{
name: getattr(self, name)
for name in [
'filename',
'line',
'definition',
'message',
'explanation',
'lines',
]
}
)
def __repr__(self) -> str:
return str(self)
def __lt__(self, other: 'Error') -> bool:
return (self.filename, self.line) < (other.filename, other.line)
class ErrorRegistry:
"""A registry of all error codes, divided to groups."""
groups = [] # type: ignore
class ErrorGroup:
"""A group of similarly themed errors."""
def __init__(self, prefix: str, name: str) -> None:
"""Initialize the object.
`Prefix` should be the common prefix for errors in this group,
e.g., "D1".
`name` is the name of the group (its subject).
"""
self.prefix = prefix
self.name = name
self.errors = [] # type: List[ErrorParams]
def create_error(
self,
error_code: str,
error_desc: str,
error_context: Optional[str] = None,
) -> Callable[[Iterable[str]], Error]:
"""Create an error, register it to this group and return it."""
# TODO: check prefix
error_params = ErrorParams(error_code, error_desc, error_context)
factory = partial(Error, *error_params)
self.errors.append(error_params)
return factory
@classmethod
def create_group(cls, prefix: str, name: str) -> ErrorGroup:
"""Create a new error group and return it."""
group = cls.ErrorGroup(prefix, name)
cls.groups.append(group)
return group
@classmethod
def get_error_codes(cls) -> Iterable[str]:
"""Yield all registered codes."""
for group in cls.groups:
for error in group.errors:
yield error.code
@classmethod
def to_rst(cls) -> str:
"""Output the registry as reStructuredText, for documentation."""
max_len = max(
len(error.short_desc)
for group in cls.groups
for error in group.errors
)
sep_line = '+' + 6 * '-' + '+' + '-' * (max_len + 2) + '+\n'
blank_line = '|' + (max_len + 9) * ' ' + '|\n'
table = ''
for group in cls.groups:
table += sep_line
table += blank_line
table += '|' + f'**{group.name}**'.center(max_len + 9) + '|\n'
table += blank_line
for error in group.errors:
table += sep_line
table += (
'|'
+ error.code.center(6)
+ '| '
+ error.short_desc.ljust(max_len + 1)
+ '|\n'
)
table += sep_line
return table
D1xx = ErrorRegistry.create_group('D1', 'Missing Docstrings')
D100 = D1xx.create_error(
'D100',
'Missing docstring in public module',
)
D101 = D1xx.create_error(
'D101',
'Missing docstring in public class',
)
D102 = D1xx.create_error(
'D102',
'Missing docstring in public method',
)
D103 = D1xx.create_error(
'D103',
'Missing docstring in public function',
)
D104 = D1xx.create_error(
'D104',
'Missing docstring in public package',
)
D105 = D1xx.create_error(
'D105',
'Missing docstring in magic method',
)
D106 = D1xx.create_error(
'D106',
'Missing docstring in public nested class',
)
D107 = D1xx.create_error(
'D107',
'Missing docstring in __init__',
)
D2xx = ErrorRegistry.create_group('D2', 'Whitespace Issues')
D200 = D2xx.create_error(
'D200',
'One-line docstring should fit on one line ' 'with quotes',
'found {0}',
)
D201 = D2xx.create_error(
'D201',
'No blank lines allowed before function docstring',
'found {0}',
)
D202 = D2xx.create_error(
'D202',
'No blank lines allowed after function docstring',
'found {0}',
)
D203 = D2xx.create_error(
'D203',
'1 blank line required before class docstring',
'found {0}',
)
D204 = D2xx.create_error(
'D204',
'1 blank line required after class docstring',
'found {0}',
)
D205 = D2xx.create_error(
'D205',
'1 blank line required between summary line and description',
'found {0}',
)
D206 = D2xx.create_error(
'D206',
'Docstring should be indented with spaces, not tabs',
)
D207 = D2xx.create_error(
'D207',
'Docstring is under-indented',
)
D208 = D2xx.create_error(
'D208',
'Docstring is over-indented',
)
D209 = D2xx.create_error(
'D209',
'Multi-line docstring closing quotes should be on a separate line',
)
D210 = D2xx.create_error(
'D210',
'No whitespaces allowed surrounding docstring text',
)
D211 = D2xx.create_error(
'D211',
'No blank lines allowed before class docstring',
'found {0}',
)
D212 = D2xx.create_error(
'D212',
'Multi-line docstring summary should start at the first line',
)
D213 = D2xx.create_error(
'D213',
'Multi-line docstring summary should start at the second line',
)
D214 = D2xx.create_error(
'D214',
'Section is over-indented',
'{0!r}',
)
D215 = D2xx.create_error(
'D215',
'Section underline is over-indented',
'in section {0!r}',
)
D3xx = ErrorRegistry.create_group('D3', 'Quotes Issues')
D300 = D3xx.create_error(
'D300',
'Use """triple double quotes"""',
'found {0}-quotes',
)
D301 = D3xx.create_error(
'D301',
'Use r""" if any backslashes in a docstring',
)
D302 = D3xx.create_error(
'D302',
'Deprecated: Use u""" for Unicode docstrings',
)
D4xx = ErrorRegistry.create_group('D4', 'Docstring Content Issues')
D400 = D4xx.create_error(
'D400',
'First line should end with a period',
'not {0!r}',
)
D401 = D4xx.create_error(
'D401',
'First line should be in imperative mood',
"perhaps '{0}', not '{1}'",
)
D401b = D4xx.create_error(
'D401',
'First line should be in imperative mood; try rephrasing',
"found '{0}'",
)
D402 = D4xx.create_error(
'D402',
'First line should not be the function\'s "signature"',
)
D403 = D4xx.create_error(
'D403',
'First word of the first line should be properly capitalized',
'{0!r}, not {1!r}',
)
D404 = D4xx.create_error(
'D404',
'First word of the docstring should not be `This`',
)
D405 = D4xx.create_error(
'D405',
'Section name should be properly capitalized',
'{0!r}, not {1!r}',
)
D406 = D4xx.create_error(
'D406',
'Section name should end with a newline',
'{0!r}, not {1!r}',
)
D407 = D4xx.create_error(
'D407',
'Missing dashed underline after section',
'{0!r}',
)
D408 = D4xx.create_error(
'D408',
'Section underline should be in the line following the section\'s name',
'{0!r}',
)
D409 = D4xx.create_error(
'D409',
'Section underline should match the length of its name',
'Expected {0!r} dashes in section {1!r}, got {2!r}',
)
D410 = D4xx.create_error(
'D410',
'Missing blank line after section',
'{0!r}',
)
D411 = D4xx.create_error(
'D411',
'Missing blank line before section',
'{0!r}',
)
D412 = D4xx.create_error(
'D412',
'No blank lines allowed between a section header and its content',
'{0!r}',
)
D413 = D4xx.create_error(
'D413',
'Missing blank line after last section',
'{0!r}',
)
D414 = D4xx.create_error(
'D414',
'Section has no content',
'{0!r}',
)
D415 = D4xx.create_error(
'D415',
(
'First line should end with a period, question '
'mark, or exclamation point'
),
'not {0!r}',
)
D416 = D4xx.create_error(
'D416',
'Section name should end with a colon',
'{0!r}, not {1!r}',
)
D417 = D4xx.create_error(
'D417',
'Missing argument descriptions in the docstring',
'argument(s) {0} are missing descriptions in {1!r} docstring',
)
D418 = D4xx.create_error(
'D418',
'Function/ Method decorated with @overload shouldn\'t contain a docstring',
)
D419 = D4xx.create_error(
'D419',
'Docstring is empty',
)
class AttrDict(dict):
def __getattr__(self, item: str) -> Any:
return self[item]
all_errors = set(ErrorRegistry.get_error_codes())
conventions = AttrDict(
{
'pep257': all_errors
- {
'D203',
'D212',
'D213',
'D214',
'D215',
'D404',
'D405',
'D406',
'D407',
'D408',
'D409',
'D410',
'D411',
'D413',
'D415',
'D416',
'D417',
'D418',
},
'numpy': all_errors
- {
'D107',
'D203',
'D212',
'D213',
'D402',
'D413',
'D415',
'D416',
'D417',
},
'google': all_errors
- {
'D203',
'D204',
'D213',
'D215',
'D400',
'D401',
'D404',
'D406',
'D407',
'D408',
'D409',
'D413',
},
}
)
pydocstyle-6.3.0/src/pydocstyle/wordlists.py 0000664 0000000 0000000 00000003014 14361602300 0021302 0 ustar 00root root 0000000 0000000 """Wordlists loaded from package data.
We can treat them as part of the code for the imperative mood check, and
therefore we load them at import time, rather than on-demand.
"""
import pkgutil
import re
from typing import Dict, Iterator, Set
import snowballstemmer
#: Regular expression for stripping comments from the wordlists
COMMENT_RE = re.compile(r'\s*#.*')
#: Stemmer function for stemming words in English
stem = snowballstemmer.stemmer('english').stemWord
def load_wordlist(name: str) -> Iterator[str]:
"""Iterate over lines of a wordlist data file.
`name` should be the name of a package data file within the data/
directory.
Whitespace and #-prefixed comments are stripped from each line.
"""
data = pkgutil.get_data('pydocstyle', 'data/' + name)
if data is not None:
text = data.decode('utf8')
for line in text.splitlines():
line = COMMENT_RE.sub('', line).strip()
if line:
yield line
def make_imperative_verbs_dict(wordlist: Iterator[str]) -> Dict[str, Set[str]]:
"""Create a dictionary mapping stemmed verbs to the imperative form."""
imperative_verbs = {} # type: Dict[str, Set[str]]
for word in wordlist:
imperative_verbs.setdefault(stem(word), set()).add(word)
return imperative_verbs
IMPERATIVE_VERBS = make_imperative_verbs_dict(load_wordlist('imperatives.txt'))
#: Words that are forbidden to appear as the first word in a docstring
IMPERATIVE_BLACKLIST = set(load_wordlist('imperatives_blacklist.txt'))
pydocstyle-6.3.0/src/tests/ 0000775 0000000 0000000 00000000000 14361602300 0015643 5 ustar 00root root 0000000 0000000 pydocstyle-6.3.0/src/tests/__init__.py 0000664 0000000 0000000 00000000000 14361602300 0017742 0 ustar 00root root 0000000 0000000 pydocstyle-6.3.0/src/tests/error_tests.py 0000664 0000000 0000000 00000003000 14361602300 0020561 0 ustar 00root root 0000000 0000000 """Tests for the violations.Error class."""
import pytest
import collections
import textwrap
from pydocstyle.violations import Error
MockDefinition = collections.namedtuple('MockDefinition', ['source', 'start'])
def test_message_without_context():
"""Test a simple error message without parameters."""
error = Error('CODE', 'an error', None)
assert error.message == 'CODE: an error'
def test_message_with_context():
"""Test an error message with parameters."""
error = Error('CODE', 'an error', 'got {}', 0)
assert error.message == 'CODE: an error (got 0)'
def test_message_with_insufficient_parameters():
"""Test an error message with invalid parameter invocation."""
error = Error('CODE', 'an error', 'got {}')
with pytest.raises(IndexError):
assert error.message
def test_lines():
"""Test proper printing of source lines, including blank line trimming."""
error = Error('CODE', 'an error', None)
definition = MockDefinition(source=['def foo():\n',
' """A docstring."""\n',
'\n',
' pass\n',
'\n',
'\n'],
start=424)
error.set_context(definition, None)
print(error.lines)
assert error.lines == textwrap.dedent('''\
424: def foo():
425: """A docstring."""
426:
427: pass
''')
pydocstyle-6.3.0/src/tests/parser_test.py 0000664 0000000 0000000 00000067041 14361602300 0020560 0 ustar 00root root 0000000 0000000 """Parser tests."""
import io
import sys
import pytest
import textwrap
from pathlib import Path
from pydocstyle.parser import Parser, ParseError
class CodeSnippet(io.StringIO):
"""A code snippet.
Automatically wraps snippet as a file-like object and handles line wraps.
"""
def __init__(self, code_string):
"""Initialize the object."""
io.StringIO.__init__(self, textwrap.dedent(code_string))
def test_function():
"""Test parsing of a simple function."""
parser = Parser()
code = CodeSnippet("""\
def do_something(pos_param0, pos_param1, kw_param0="default"):
\"""Do something.\"""
return None
""")
module = parser.parse(code, 'file_path')
assert module.is_public
assert module.dunder_all is None
function, = module.children
assert function.name == 'do_something'
assert function.decorators == []
assert function.children == []
assert function.docstring == '"""Do something."""'
assert function.docstring.start == 2
assert function.docstring.end == 2
assert function.kind == 'function'
assert function.parent == module
assert function.start == 1
assert function.end == 3
assert function.error_lineno == 2
assert function.source == code.getvalue()
assert function.is_public
assert str(function) == 'in public function `do_something`'
def test_simple_fstring():
"""Test parsing of a function with a simple fstring as a docstring."""
parser = Parser()
code = CodeSnippet("""\
def do_something(pos_param0, pos_param1, kw_param0="default"):
f\"""Do something.\"""
return None
""")
module = parser.parse(code, 'file_path')
assert module.is_public
assert module.dunder_all is None
function, = module.children
assert function.name == 'do_something'
assert function.decorators == []
assert function.children == []
assert function.docstring == 'f"""Do something."""'
assert function.docstring.start == 2
assert function.docstring.end == 2
assert function.kind == 'function'
assert function.parent == module
assert function.start == 1
assert function.end == 3
assert function.error_lineno == 2
assert function.source == code.getvalue()
assert function.is_public
assert str(function) == 'in public function `do_something`'
def test_fstring_with_args():
"""Test parsing of a function with an fstring with args as a docstring."""
parser = Parser()
code = CodeSnippet("""\
foo = "bar"
bar = "baz"
def do_something(pos_param0, pos_param1, kw_param0="default"):
f\"""Do some {foo} and some {bar}.\"""
return None
""")
module = parser.parse(code, 'file_path')
assert module.is_public
assert module.dunder_all is None
function, = module.children
assert function.name == 'do_something'
assert function.decorators == []
assert function.children == []
assert function.docstring == 'f"""Do some {foo} and some {bar}."""'
assert function.docstring.start == 4
assert function.docstring.end == 4
assert function.kind == 'function'
assert function.parent == module
assert function.start == 3
assert function.end == 5
assert function.error_lineno == 4
assert function.source == textwrap.dedent("""\
def do_something(pos_param0, pos_param1, kw_param0="default"):
f\"""Do some {foo} and some {bar}.\"""
return None
""")
assert function.is_public
assert str(function) == 'in public function `do_something`'
def test_decorated_function():
"""Test parsing of a simple function with a decorator."""
parser = Parser()
code = CodeSnippet("""\
@single_decorator
def do_something():
\"""Do something.\"""
return None
""")
module = parser.parse(code, 'file_path')
function, = module.children
assert function.name == 'do_something'
assert len(function.decorators) == 1
assert function.decorators[0].name == 'single_decorator'
assert function.children == []
assert function.docstring == '"""Do something."""'
assert function.kind == 'function'
assert function.parent == module
assert function.start == 2
assert function.end == 4
assert function.source == textwrap.dedent("""\
def do_something():
\"""Do something.\"""
return None
""")
assert function.is_public
assert str(function) == 'in public function `do_something`'
def test_nested_function():
"""Test parsing of a nested function."""
parser = Parser()
code = CodeSnippet("""\
def outer_function():
\"""This is the outer function.\"""
def inner_function():
'''This is the inner function.'''
return None
return None
""")
module = parser.parse(code, 'file_path')
outer_function, = module.children
assert outer_function.name == 'outer_function'
assert outer_function.decorators == []
assert outer_function.docstring == '"""This is the outer function."""'
assert outer_function.kind == 'function'
assert outer_function.parent == module
assert outer_function.start == 1
assert outer_function.end == 6
assert outer_function.error_lineno == 2
assert outer_function.source == code.getvalue()
assert outer_function.is_public
assert str(outer_function) == 'in public function `outer_function`'
inner_function, = outer_function.children
assert inner_function.name == 'inner_function'
assert inner_function.decorators == []
assert inner_function.docstring == "'''This is the inner function.'''"
assert inner_function.kind == 'function'
assert inner_function.parent == outer_function
assert inner_function.start == 3
assert inner_function.end == 5
assert inner_function.error_lineno == 4
assert textwrap.dedent(inner_function.source) == textwrap.dedent("""\
def inner_function():
'''This is the inner function.'''
return None
""")
assert not inner_function.is_public
assert str(inner_function) == 'in private nested function `inner_function`'
def test_conditional_nested_function():
"""Test parsing of a nested function inside a condition."""
parser = Parser()
code = CodeSnippet("""\
def outer_function():
\"""This is the outer function.\"""
if True:
def inner_function():
'''This is the inner function.'''
return None
return None
""")
module = parser.parse(code, 'file_path')
outer_function, = module.children
assert outer_function.name == 'outer_function'
assert outer_function.decorators == []
assert outer_function.docstring == '"""This is the outer function."""'
assert outer_function.kind == 'function'
assert outer_function.parent == module
assert outer_function.start == 1
assert outer_function.end == 7
assert outer_function.source == code.getvalue()
assert outer_function.is_public
assert str(outer_function) == 'in public function `outer_function`'
inner_function, = outer_function.children
assert inner_function.name == 'inner_function'
assert inner_function.decorators == []
assert inner_function.docstring == "'''This is the inner function.'''"
assert inner_function.kind == 'function'
assert inner_function.parent == outer_function
assert inner_function.start == 4
assert inner_function.end == 6
assert textwrap.dedent(inner_function.source) == textwrap.dedent("""\
def inner_function():
'''This is the inner function.'''
return None
""")
assert not inner_function.is_public
assert str(inner_function) == 'in private nested function `inner_function`'
def test_doubly_nested_function():
"""Test parsing of a nested function inside a nested function."""
parser = Parser()
code = CodeSnippet("""\
def outer_function():
\"""This is the outer function.\"""
def middle_function():
def inner_function():
'''This is the inner function.'''
return None
return None
""")
module = parser.parse(code, 'file_path')
outer_function, = module.children
assert outer_function.name == 'outer_function'
assert outer_function.decorators == []
assert outer_function.docstring == '"""This is the outer function."""'
assert outer_function.kind == 'function'
assert outer_function.parent == module
assert outer_function.start == 1
assert outer_function.end == 7
assert outer_function.source == code.getvalue()
assert outer_function.is_public
assert str(outer_function) == 'in public function `outer_function`'
middle_function, = outer_function.children
assert middle_function.name == 'middle_function'
assert middle_function.decorators == []
assert middle_function.docstring is None
assert middle_function.kind == 'function'
assert middle_function.parent == outer_function
assert middle_function.start == 3
assert middle_function.end == 6
assert textwrap.dedent(middle_function.source) == textwrap.dedent("""\
def middle_function():
def inner_function():
'''This is the inner function.'''
return None
""")
assert not middle_function.is_public
assert (str(middle_function) ==
'in private nested function `middle_function`')
inner_function, = middle_function.children
assert inner_function.name == 'inner_function'
assert inner_function.decorators == []
assert inner_function.docstring == "'''This is the inner function.'''"
assert inner_function.kind == 'function'
assert inner_function.parent == middle_function
assert inner_function.start == 4
assert inner_function.end == 6
assert textwrap.dedent(inner_function.source) == textwrap.dedent("""\
def inner_function():
'''This is the inner function.'''
return None
""")
assert not inner_function.is_public
assert str(inner_function) == 'in private nested function `inner_function`'
def test_class():
"""Test parsing of a class."""
parser = Parser()
code = CodeSnippet("""\
class TestedClass(object):
" an ugly docstring "
""")
module = parser.parse(code, 'file_path')
klass, = module.children
assert klass.name == 'TestedClass'
assert klass.decorators == []
assert klass.children == []
assert klass.docstring == '" an ugly docstring "'
assert klass.kind == 'class'
assert klass.parent == module
assert klass.start == 1
assert klass.end == 3
assert klass.error_lineno == 3
assert klass.source == code.getvalue()
assert klass.is_public
assert str(klass) == 'in public class `TestedClass`'
def test_public_method():
"""Test parsing of a public method."""
parser = Parser()
code = CodeSnippet("""\
class TestedClass(object):
def do_it(param):
\"""Do the 'it'\"""
# do nothing
return None
""")
module = parser.parse(code, 'file_path')
klass, = module.children
assert klass.name == 'TestedClass'
assert klass.decorators == []
assert klass.docstring is None
assert klass.kind == 'class'
assert klass.parent == module
assert klass.start == 1
assert klass.end == 5
assert klass.error_lineno == 1
assert klass.source == code.getvalue()
assert klass.is_public
assert str(klass) == 'in public class `TestedClass`'
method, = klass.children
assert method.name == 'do_it'
assert method.decorators == []
assert method.docstring == '''"""Do the 'it'"""'''
assert method.kind == 'method'
assert method.parent == klass
assert method.start == 2
assert method.end == 5
assert method.error_lineno == 3
assert textwrap.dedent(method.source) == textwrap.dedent("""\
def do_it(param):
\"""Do the 'it'\"""
# do nothing
return None
""")
assert method.is_public
assert not method.is_magic
assert str(method) == 'in public method `do_it`'
def test_private_method():
"""Test parsing of a private method."""
parser = Parser()
code = CodeSnippet("""\
class TestedClass(object):
def _do_it(param):
\"""Do the 'it'\"""
# do nothing
return None
""")
module = parser.parse(code, 'file_path')
klass, = module.children
assert klass.name == 'TestedClass'
assert klass.decorators == []
assert klass.docstring is None
assert klass.kind == 'class'
assert klass.parent == module
assert klass.start == 1
assert klass.end == 5
assert klass.error_lineno == 1
assert klass.source == code.getvalue()
assert klass.is_public
assert str(klass) == 'in public class `TestedClass`'
method, = klass.children
assert method.name == '_do_it'
assert method.decorators == []
assert method.docstring == '''"""Do the 'it'"""'''
assert method.kind == 'method'
assert method.parent == klass
assert method.start == 2
assert method.end == 5
assert method.error_lineno == 3
assert textwrap.dedent(method.source) == textwrap.dedent("""\
def _do_it(param):
\"""Do the 'it'\"""
# do nothing
return None
""")
assert not method.is_public
assert not method.is_magic
assert str(method) == 'in private method `_do_it`'
def test_magic_method():
"""Test parsing of a magic method."""
parser = Parser()
code = CodeSnippet("""\
class TestedClass(object):
def __str__(self):
return "me"
""")
module = parser.parse(code, 'file_path')
klass, = module.children
assert klass.name == 'TestedClass'
assert klass.decorators == []
assert klass.docstring is None
assert klass.kind == 'class'
assert klass.parent == module
assert klass.start == 1
assert klass.end == 3
assert klass.error_lineno == 1
assert klass.source == code.getvalue()
assert klass.is_public
assert str(klass) == 'in public class `TestedClass`'
method, = klass.children[0]
assert method.name == '__str__'
assert method.decorators == []
assert method.docstring is None
assert method.kind == 'method'
assert method.parent == klass
assert method.start == 2
assert method.end == 3
assert method.error_lineno == 2
assert textwrap.dedent(method.source) == textwrap.dedent("""\
def __str__(self):
return "me"
""")
assert method.is_public
assert method.is_magic
assert str(method) == 'in public method `__str__`'
def test_nested_class():
"""Test parsing of a class."""
parser = Parser()
code = CodeSnippet("""\
class OuterClass(object):
' an outer docstring'
class InnerClass(object):
"An inner docstring."
""")
module = parser.parse(code, 'file_path')
outer_class, = module.children
assert outer_class.name == 'OuterClass'
assert outer_class.decorators == []
assert outer_class.docstring == "' an outer docstring'"
assert outer_class.kind == 'class'
assert outer_class.parent == module
assert outer_class.start == 1
assert outer_class.end == 4
assert outer_class.error_lineno == 2
assert outer_class.source == code.getvalue()
assert outer_class.is_public
assert str(outer_class) == 'in public class `OuterClass`'
inner_class, = outer_class.children
assert inner_class.name == 'InnerClass'
assert inner_class.decorators == []
assert inner_class.children == []
assert inner_class.docstring == '"An inner docstring."'
assert inner_class.kind == 'class'
assert inner_class.parent == outer_class
assert inner_class.start == 3
assert inner_class.end == 4
assert inner_class.error_lineno == 4
assert textwrap.dedent(inner_class.source) == textwrap.dedent("""\
class InnerClass(object):
"An inner docstring."
""")
assert inner_class.is_public
assert str(inner_class) == 'in public nested class `InnerClass`'
def test_raise_from():
"""Make sure 'raise x from y' doesn't trip the parser."""
parser = Parser()
code = CodeSnippet("raise ValueError() from None")
parser.parse(code, 'file_path')
def test_simple_matrix_multiplication():
"""Make sure 'a @ b' doesn't trip the parser."""
parser = Parser()
code = CodeSnippet("""
def foo():
a @ b
""")
parser.parse(code, 'file_path')
@pytest.mark.parametrize("code", (
CodeSnippet("""
def foo():
a @ b
(a
@b)
@a
def b():
pass
"""),
CodeSnippet("""
def foo():
a @ b
(a
@b)
a\
@b
@a
def b():
pass
"""),
CodeSnippet("""
def foo():
a @ b
(a
# A random comment here
@b)
a\
@b
@a
def b():
pass
"""),
))
def test_matrix_multiplication_with_decorators(code):
"""Make sure 'a @ b' doesn't trip the parser."""
parser = Parser()
module = parser.parse(code, 'file_path')
outer_function, = module.children
assert outer_function.name == 'foo'
inner_function, = outer_function.children
assert len(inner_function.decorators) == 1
assert inner_function.decorators[0].name == 'a'
@pytest.mark.parametrize("public_path", (
Path(""),
Path("module.py"),
Path("package") / "module.py",
Path("package") / "__init__.py",
Path("") / "package" / "module.py",
Path("") / "__dunder__" / "package" / "module.py"
))
def test_module_publicity_with_public_path(public_path):
"""Test module publicity with public path.
Module names such as my_module.py are considered public.
Special "dunder" modules,
with leading and trailing double-underscores (e.g. __init__.py) are public.
The same rules for publicity apply to both packages and modules.
"""
parser = Parser()
code = CodeSnippet("")
module = parser.parse(code, str(public_path))
assert module.is_public
@pytest.mark.parametrize("private_path", (
# single underscore
Path("_private_module.py"),
Path("_private_package") / "module.py",
Path("_private_package") / "package" / "module.py",
Path("") / "_private_package" / "package" / "module.py",
# double underscore
Path("__private_module.py"),
Path("__private_package") / "module.py",
Path("__private_package") / "package" / "module.py",
Path("") / "__private_package" / "package" / "module.py"
))
def test_module_publicity_with_private_paths(private_path):
"""Test module publicity with private path.
Module names starting with single or double-underscore are private.
For example, _my_private_module.py and __my_private_module.py.
Any module within a private package is considered private.
The same rules for publicity apply to both packages and modules.
"""
parser = Parser()
code = CodeSnippet("")
module = parser.parse(code, str(private_path))
assert not module.is_public
@pytest.mark.parametrize("syspath,is_public", (
("/", False),
("_foo/", True),
))
def test_module_publicity_with_different_sys_path(syspath,
is_public,
monkeypatch):
"""Test module publicity for same path and different sys.path."""
parser = Parser()
code = CodeSnippet("")
monkeypatch.syspath_prepend(syspath)
path = Path("_foo") / "bar" / "baz.py"
module = parser.parse(code, str(path))
assert module.is_public == is_public
def test_complex_module():
"""Test that a complex module is parsed correctly."""
parser = Parser()
code = CodeSnippet('''\
"""Module."""
__all__ = ('a', 'b'
'c',)
def function():
"Function."
def nested_1():
"""Nested."""
if True:
def nested_2():
pass
class class_(object):
"""Class."""
def method_1(self):
"""Method."""
def method_2(self):
def nested_3(self):
"""Nested."""
''')
module = parser.parse(code, "filepath")
assert list(module)[0] == module
assert len(list(module)) == 8
@pytest.mark.parametrize("code", (
CodeSnippet("""\
__all__ = ['foo', 'bar']
"""),
CodeSnippet("""\
__all__ = ['foo', 'ba'
'r',]
"""),
CodeSnippet("""\
__all__ = ('foo',
'bar'
)
"""),
CodeSnippet("""\
__all__ = ['foo',
# Inconvenient comment
'bar'
]
"""),
CodeSnippet("""\
__all__ = 'foo', 'bar'
"""),
CodeSnippet("""\
__all__ = 'foo', 'bar',
"""),
CodeSnippet(
"""__all__ = 'foo', 'bar'"""
),
CodeSnippet("""\
__all__ = 'foo', \
'bar'
"""),
CodeSnippet("""\
foo = 1
__all__ = 'foo', 'bar'
"""),
CodeSnippet("""\
__all__ = 'foo', 'bar'
foo = 1
"""),
CodeSnippet("""\
__all__ = ['foo', 'bar'] # never freeze
"""),
))
def test_dunder_all(code):
"""Test that __all__ is parsed correctly."""
parser = Parser()
module = parser.parse(code, "filepath")
assert module.dunder_all == ('foo', 'bar')
def test_single_value_dunder_all():
"""Test that single value __all__ is parsed correctly."""
parser = Parser()
code = CodeSnippet("""\
__all__ = 'foo',
""")
module = parser.parse(code, "filepath")
assert module.dunder_all == ('foo', )
code = CodeSnippet("""\
__all__ = 'foo'
""")
module = parser.parse(code, "filepath")
assert module.dunder_all is None
assert module.dunder_all_error
code = CodeSnippet("""\
__all__ = ('foo', )
""")
module = parser.parse(code, "filepath")
assert module.dunder_all == ('foo', )
indeterminable_dunder_all_test_cases = [
CodeSnippet("""\
__all__ = ['foo']
__all__ += ['bar']
"""),
CodeSnippet("""\
__all__ = ['foo'] + ['bar']
"""),
CodeSnippet("""\
__all__ = ['foo']
__all__.insert('bar')
"""),
CodeSnippet("""\
__all__ = foo()
"""),
CodeSnippet("""\
all = ['foo']
__all__ = all
"""),
CodeSnippet("""\
foo = 'foo'
__all__ = [foo]
"""),
CodeSnippet("""\
__all__ = (*foo, 'bar')
"""),
]
@pytest.mark.parametrize("code", indeterminable_dunder_all_test_cases)
def test_indeterminable_dunder_all(code):
"""Test that __all__ is ignored if it can't be statically evaluated."""
parser = Parser()
module = parser.parse(code, "filepath")
assert module.dunder_all is None
assert module.dunder_all_error
@pytest.mark.parametrize("code", (
CodeSnippet("""\
from __future__ import unicode_literals, nested_scopes
"""),
CodeSnippet("""\
from __future__ import unicode_literals, nested_scopes;
"""),
CodeSnippet("""\
from __future__ import unicode_literals
from __future__ import nested_scopes;
"""),
CodeSnippet("""\
from __future__ import unicode_literals
from __future__ import nested_scopes as ns
"""),
CodeSnippet("""\
from __future__ import (unicode_literals as nl,
nested_scopes)
"""),
CodeSnippet("""\
from __future__ import (unicode_literals as nl,)
from __future__ import (nested_scopes)
"""),
CodeSnippet("""\
from __future__ \\
import unicode_literals
from __future__ \\
import nested_scopes
"""),
))
def test_future_import(code):
"""Test that __future__ imports are properly parsed and collected."""
parser = Parser()
module = parser.parse(code, "filepath")
assert module.future_imports == {'unicode_literals', 'nested_scopes'}
def test_noqa_function():
"""Test that "# noqa" comments are correctly collected for definitions."""
code = CodeSnippet("""\
def foo(): # noqa: D100,D101
pass
""")
parser = Parser()
module = parser.parse(code, "filepath")
function, = module.children
assert function.skipped_error_codes == 'D100,D101'
@pytest.mark.parametrize("code", (
CodeSnippet("""\
while True:
try:
pass
"""),
CodeSnippet("[\n"),
# Should result in `SyntaxError: from __future__ imports must occur
# at the beginning of the file`
CodeSnippet("""\
from __future__ import unicode_literals; import string; from \
__future__ import nested_scopes
"""),
))
def test_invalid_syntax(code):
"""Test invalid code input to the parser."""
parser = Parser()
with pytest.raises(ParseError):
module = parser.parse(code, "filepath")
@pytest.mark.parametrize("code", (
CodeSnippet("""\
'''Test this'''
@property
def test():
pass
"""),
CodeSnippet("""\
'''Test this'''
@property
def test():
pass
"""),
CodeSnippet("""\
'''Test this'''
@property
def test():
pass
"""),
CodeSnippet("""\
'''Test this'''
@property
def test():
pass
"""),
CodeSnippet("""\
'''Test this'''
# A random comment in the middle to break things
@property
def test():
pass
"""),
CodeSnippet("""\
'''Test this'''
@property
def test(): pass
"""),
CodeSnippet("""\
'''Test this'''
@first_decorator
@property
def test(): pass
"""),
))
def test_parsing_function_decorators(code):
"""Test to ensure we are correctly parsing function decorators."""
parser = Parser()
module = parser.parse(code, "filename")
function, = module.children
decorator_names = {dec.name for dec in function.decorators}
assert "property" in decorator_names
@pytest.mark.parametrize("code", (
CodeSnippet("""\
class Test:
@property
def test(self):
pass
"""),
CodeSnippet("""\
class Test:
@property
def test(self):
pass
"""),
CodeSnippet("""\
class Test:
# Random comment to trip decorator parsing
@property
def test(self):
pass
"""),
CodeSnippet("""\
class Test:
# Random comment to trip decorator parsing
A = 1
@property
def test(self):
pass
"""),
CodeSnippet("""\
class Test:
# Random comment to trip decorator parsing
A = 1
'''Another random comment'''
@property
def test(self):
pass
"""),
))
def test_parsing_method_decorators(code):
"""Test to ensure we are correctly parsing method decorators."""
parser = Parser()
module = parser.parse(code, "filename")
function, = module.children[0].children
decorator_names = {dec.name for dec in function.decorators}
assert "property" in decorator_names
pydocstyle-6.3.0/src/tests/test_cases/ 0000775 0000000 0000000 00000000000 14361602300 0020000 5 ustar 00root root 0000000 0000000 pydocstyle-6.3.0/src/tests/test_cases/__init__.py 0000664 0000000 0000000 00000000000 14361602300 0022077 0 ustar 00root root 0000000 0000000 pydocstyle-6.3.0/src/tests/test_cases/all_import.py 0000664 0000000 0000000 00000000501 14361602300 0022510 0 ustar 00root root 0000000 0000000 """A valid module docstring."""
from .all_import_aux import __all__
from .expected import Expectation
expectation = Expectation()
expect = expectation.expect
@expect("D103: Missing docstring in public function")
def public_func():
pass
@expect("D103: Missing docstring in public function")
def this():
pass
pydocstyle-6.3.0/src/tests/test_cases/all_import_as.py 0000664 0000000 0000000 00000000502 14361602300 0023174 0 ustar 00root root 0000000 0000000 """A valid module docstring."""
from .all_import_aux import __all__ as not_dunder_all
from .expected import Expectation
expectation = Expectation()
expect = expectation.expect
__all__ = ('public_func', )
@expect("D103: Missing docstring in public function")
def public_func():
pass
def private_func():
pass
pydocstyle-6.3.0/src/tests/test_cases/all_import_aux.py 0000664 0000000 0000000 00000000060 14361602300 0023365 0 ustar 00root root 0000000 0000000 __all__ = ('this', 'is', 'a', 'helper', 'file')
pydocstyle-6.3.0/src/tests/test_cases/canonical_google_examples.py 0000664 0000000 0000000 00000010120 14361602300 0025525 0 ustar 00root root 0000000 0000000 """A one line summary of the module or program, terminated by a period.
Leave one blank line. The rest of this docstring should contain an
overall description of the module or program. Optionally, it may also
contain a brief description of exported classes and functions and/or usage
examples.
Typical usage example:
foo = ClassFoo()
bar = foo.FunctionBar()
"""
# above: "2.8.2 Modules" section example
# https://google.github.io/styleguide/pyguide.html#382-modules
# Examples from the official "Google Python Style Guide" documentation:
# * As HTML: https://google.github.io/styleguide/pyguide.html
# * Source Markdown:
# https://github.com/google/styleguide/blob/gh-pages/pyguide.md
import os
from .expected import Expectation
expectation = Expectation()
expect = expectation.expect
# module docstring expected violations:
expectation.expected.add((
os.path.normcase(__file__),
"D213: Multi-line docstring summary should start at the second line"))
# "3.8.3 Functions and Methods" section example
# https://google.github.io/styleguide/pyguide.html#383-functions-and-methods
@expect("D213: Multi-line docstring summary should start at the second line",
arg_count=3)
@expect("D401: First line should be in imperative mood "
"(perhaps 'Fetch', not 'Fetches')", arg_count=3)
@expect("D406: Section name should end with a newline "
"('Raises', not 'Raises:')", arg_count=3)
@expect("D406: Section name should end with a newline "
"('Returns', not 'Returns:')", arg_count=3)
@expect("D407: Missing dashed underline after section ('Raises')", arg_count=3)
@expect("D407: Missing dashed underline after section ('Returns')",
arg_count=3)
@expect("D413: Missing blank line after last section ('Raises')", arg_count=3)
def fetch_bigtable_rows(big_table, keys, other_silly_variable=None):
"""Fetches rows from a Bigtable.
Retrieves rows pertaining to the given keys from the Table instance
represented by big_table. Silly things may happen if
other_silly_variable is not None.
Args:
big_table: An open Bigtable Table instance.
keys: A sequence of strings representing the key of each table row
to fetch.
other_silly_variable: Another optional variable, that has a much
longer name than the other args, and which does nothing.
Returns:
A dict mapping keys to the corresponding table row data
fetched. Each row is represented as a tuple of strings. For
example:
{'Serak': ('Rigel VII', 'Preparer'),
'Zim': ('Irk', 'Invader'),
'Lrrr': ('Omicron Persei 8', 'Emperor')}
If a key from the keys argument is missing from the dictionary,
then that row was not found in the table.
Raises:
IOError: An error occurred accessing the bigtable.Table object.
"""
# "3.8.4 Classes" section example
# https://google.github.io/styleguide/pyguide.html#384-classes
@expect("D203: 1 blank line required before class docstring (found 0)")
@expect("D213: Multi-line docstring summary should start at the second line")
@expect("D406: Section name should end with a newline "
"('Attributes', not 'Attributes:')")
@expect("D407: Missing dashed underline after section ('Attributes')")
@expect("D413: Missing blank line after last section ('Attributes')")
class SampleClass:
"""Summary of class here.
Longer class information....
Longer class information....
Attributes:
likes_spam: A boolean indicating if we like SPAM or not.
eggs: An integer count of the eggs we have laid.
"""
@expect("D401: First line should be in imperative mood "
"(perhaps 'Init', not 'Inits')", arg_count=2)
def __init__(self, likes_spam=False):
"""Inits SampleClass with blah."""
if self: # added to avoid NameError when run via @expect decorator
self.likes_spam = likes_spam
self.eggs = 0
@expect("D401: First line should be in imperative mood "
"(perhaps 'Perform', not 'Performs')", arg_count=1)
def public_method(self):
"""Performs operation blah."""
pydocstyle-6.3.0/src/tests/test_cases/canonical_numpy_examples.py 0000664 0000000 0000000 00000012303 14361602300 0025426 0 ustar 00root root 0000000 0000000 """This is the docstring for the example.py module. Modules names should
have short, all-lowercase names. The module name may have underscores if
this improves readability.
Every module should have a docstring at the very top of the file. The
module's docstring may extend over multiple lines. If your docstring does
extend over multiple lines, the closing three quotation marks must be on
a line by itself, preferably preceded by a blank line.
"""
# Example source file from the official "numpydoc docstring guide"
# documentation (with the modification of commenting out all the original
# ``import`` lines, plus adding this note and ``Expectation`` code):
# * As HTML: https://numpydoc.readthedocs.io/en/latest/example.html
# * Source Python:
# https://github.com/numpy/numpydoc/blob/master/doc/example.py
# from __future__ import division, absolute_import, print_function
#
# import os # standard library imports first
#
# Do NOT import using *, e.g. from numpy import *
#
# Import the module using
#
# import numpy
#
# instead or import individual functions as needed, e.g
#
# from numpy import array, zeros
#
# If you prefer the use of abbreviated module names, we suggest the
# convention used by NumPy itself::
#
# import numpy as np
# import matplotlib as mpl
# import matplotlib.pyplot as plt
#
# These abbreviated names are not to be used in docstrings; users must
# be able to paste and execute docstrings after importing only the
# numpy module itself, unabbreviated.
import os
from .expected import Expectation
expectation = Expectation()
expect = expectation.expect
# module docstring expected violations:
expectation.expected.add((
os.path.normcase(__file__),
"D205: 1 blank line required between summary line and description "
"(found 0)"))
expectation.expected.add((
os.path.normcase(__file__),
"D213: Multi-line docstring summary should start at the second line"))
expectation.expected.add((
os.path.normcase(__file__),
"D400: First line should end with a period (not 'd')"))
expectation.expected.add((
os.path.normcase(__file__),
"D404: First word of the docstring should not be `This`"))
expectation.expected.add((
os.path.normcase(__file__),
"D415: First line should end with a period, question mark, or exclamation "
"point (not 'd')"))
@expect("D213: Multi-line docstring summary should start at the second line",
arg_count=3)
@expect("D401: First line should be in imperative mood; try rephrasing "
"(found 'A')", arg_count=3)
@expect("D413: Missing blank line after last section ('Examples')",
arg_count=3)
def foo(var1, var2, long_var_name='hi'):
r"""A one-line summary that does not use variable names.
Several sentences providing an extended description. Refer to
variables using back-ticks, e.g. `var`.
Parameters
----------
var1 : array_like
Array_like means all those objects -- lists, nested lists, etc. --
that can be converted to an array. We can also refer to
variables like `var1`.
var2 : int
The type above can either refer to an actual Python type
(e.g. ``int``), or describe the type of the variable in more
detail, e.g. ``(N,) ndarray`` or ``array_like``.
long_var_name : {'hi', 'ho'}, optional
Choices in brackets, default first when optional.
Returns
-------
type
Explanation of anonymous return value of type ``type``.
describe : type
Explanation of return value named `describe`.
out : type
Explanation of `out`.
type_without_description
Other Parameters
----------------
only_seldom_used_keywords : type
Explanation
common_parameters_listed_above : type
Explanation
Raises
------
BadException
Because you shouldn't have done that.
See Also
--------
numpy.array : Relationship (optional).
numpy.ndarray : Relationship (optional), which could be fairly long, in
which case the line wraps here.
numpy.dot, numpy.linalg.norm, numpy.eye
Notes
-----
Notes about the implementation algorithm (if needed).
This can have multiple paragraphs.
You may include some math:
.. math:: X(e^{j\omega } ) = x(n)e^{ - j\omega n}
And even use a Greek symbol like :math:`\omega` inline.
References
----------
Cite the relevant literature, e.g. [1]_. You may also cite these
references in the notes section above.
.. [1] O. McNoleg, "The integration of GIS, remote sensing,
expert systems and adaptive co-kriging for environmental habitat
modelling of the Highland Haggis using object-oriented, fuzzy-logic
and neural-network techniques," Computers & Geosciences, vol. 22,
pp. 585-588, 1996.
Examples
--------
These are written in doctest format, and should illustrate how to
use the function.
>>> a = [1, 2, 3]
>>> print([x + 3 for x in a])
[4, 5, 6]
>>> print("a\nb")
a
b
"""
# After closing class docstring, there should be one blank line to
# separate following codes (according to PEP257).
# But for function, method and module, there should be no blank lines
# after closing the docstring.
pass
pydocstyle-6.3.0/src/tests/test_cases/canonical_pep257_examples.py 0000664 0000000 0000000 00000002517 14361602300 0025306 0 ustar 00root root 0000000 0000000 """Examples from the pep257 specification."""
# Examples from the official "Python PEP 257 -- Docstring Conventions"
# documentation:
# * As HTML: https://www.python.org/dev/peps/pep-0257
# * Source reST: https://github.com/python/peps/blob/master/pep-0257.txt
from .expected import Expectation
expectation = Expectation()
expect = expectation.expect
# "One-line Docstrings" section example
# https://www.python.org/dev/peps/pep-0257/#id16
def kos_root():
"""Return the pathname of the KOS root directory."""
global _kos_root
if _kos_root:
return _kos_root
# "Multiline-line Docstrings" section example
# https://www.python.org/dev/peps/pep-0257/#id17
@expect("D213: Multi-line docstring summary should start at the second line")
@expect("D405: Section name should be properly capitalized "
"('Keyword Arguments', not 'Keyword arguments')")
@expect("D407: Missing dashed underline after section ('Keyword Arguments')")
@expect("D413: Missing blank line after last section ('Keyword Arguments')")
def complex(real=0.0, imag=0.0):
"""Form a complex number.
Keyword arguments:
real -- the real part (default 0.0)
imag -- the imaginary part (default 0.0)
"""
if imag == 0.0 and real == 0.0:
complex_zero = 0 # added to avoid NameError with @expect decorator
return complex_zero
pydocstyle-6.3.0/src/tests/test_cases/capitalization.py 0000664 0000000 0000000 00000002347 14361602300 0023373 0 ustar 00root root 0000000 0000000 """A valid module docstring."""
from .expected import Expectation
expectation = Expectation()
expect = expectation.expect
@expect("D403: First word of the first line should be properly capitalized "
"('Do', not 'do')")
def not_capitalized():
"""do something."""
# Make sure empty docstrings don't generate capitalization errors.
@expect("D419: Docstring is empty")
def empty_docstring():
""""""
def all_caps():
"""GET the request."""
def non_letter_characters():
"""Create/Edit the doodli-do."""
def more_non_letter_characters():
"""(Un)register the user."""
def even_more_non_letter():
"""'laser' the planet."""
def dash():
"""git-push it."""
def digit_in_word():
"""sha1 the string."""
@expect("D403: First word of the first line should be properly capitalized "
"(\"Don't\", not \"Don'T\")")
def partial_caps():
"""Don'T do that."""
@expect("D403: First word of the first line should be properly capitalized "
"('Return', not 'ReTurn')")
def more_partial_caps():
"""ReTurn the field."""
@expect("D403: First word of the first line should be properly capitalized "
"('Generate', not 'generate')")
def just_one_more_example():
"""generate a function."""
pydocstyle-6.3.0/src/tests/test_cases/comment_after_def_bug.py 0000664 0000000 0000000 00000000505 14361602300 0024650 0 ustar 00root root 0000000 0000000 """Check for a bug in parsing comments after definitions."""
from .expected import Expectation
expectation = Expectation()
expect = expectation.expect
def should_be_ok():
"""Just a function without violations."""
# This is a comment that triggers a bug that causes the previous function
# to generate a D202 error.
pydocstyle-6.3.0/src/tests/test_cases/expected.py 0000664 0000000 0000000 00000001151 14361602300 0022151 0 ustar 00root root 0000000 0000000 class Expectation:
"""Hold expectation for pep257 violations in tests."""
def __init__(self):
self.expected = set()
def expect(self, *args, arg_count=0, func_name=""):
"""Decorator that expects a certain PEP 257 violation."""
# The `arg_count` parameter helps the decorator
# with functions that have positional arguments.
if len(args) == 1:
def decorate(f):
self.expected.add((func_name or f.__name__, args[0]))
f(*[None]*arg_count)
return f
return decorate
self.expected.add(args)
pydocstyle-6.3.0/src/tests/test_cases/functions.py 0000664 0000000 0000000 00000002715 14361602300 0022367 0 ustar 00root root 0000000 0000000 """A valid module docstrings."""
from .expected import Expectation
expectation = Expectation()
expect = expectation.expect
@expect("D201: No blank lines allowed before function docstring (found 1)")
def func_with_space_before():
"""Test a function with space before docstring."""
pass
@expect("D202: No blank lines allowed after function docstring (found 1)")
def func_with_space_after():
"""Test a function with space after docstring."""
pass
def func_with_inner_func_after():
"""Test a function with inner function after docstring."""
def inner():
pass
pass
def func_with_inner_async_func_after():
"""Test a function with inner async function after docstring."""
async def inner():
pass
pass
def fake_decorator(decorated):
"""Fake decorator used to test decorated inner func."""
return decorated
def func_with_inner_decorated_func_after():
"""Test a function with inner decorated function after docstring."""
@fake_decorator
def inner():
pass
pass
def func_with_inner_decorated_async_func_after():
"""Test a function with inner decorated async function after docstring."""
@fake_decorator
async def inner():
pass
pass
def func_with_inner_class_after():
"""Test a function with inner class after docstring."""
class inner():
pass
pass
def func_with_weird_backslash():
"""Test a function with a weird backslash.\
"""
pydocstyle-6.3.0/src/tests/test_cases/multi_line_summary_start.py 0000664 0000000 0000000 00000004005 14361602300 0025504 0 ustar 00root root 0000000 0000000 """Module to check different multi-line docstring flavors."""
from .expected import Expectation
expectation = Expectation()
expect = expectation.expect
_D212 = 'D212: Multi-line docstring summary should start at the first line'
_D213 = 'D213: Multi-line docstring summary should start at the second line'
_D300 = 'D300: Use """triple double quotes""" (found \'\'\'-quotes)'
_D301 = 'D301: Use r""" if any backslashes in a docstring'
@expect(_D212)
def multi_line_starts_second_line():
"""
Summary.
Description.
"""
@expect(_D212)
@expect(_D300)
def multi_line_starts_second_line_single_quote():
'''
Summary.
Description.
'''
@expect(_D212)
def multi_line_starts_second_line_raw():
r"""
Summary.
Description with \backslash\.
"""
@expect(_D212)
@expect(_D301)
def multi_line_starts_second_line_upper_raw():
R"""
Summary.
Description with \backslash\.
"""
@expect(_D212)
@expect(_D300)
def multi_line_starts_second_line_raw_single_quote():
r'''
Summary.
Description with \backslash\.
'''
@expect(_D212)
@expect(_D300)
@expect(_D301)
def multi_line_starts_second_line_upper_raw_single_quote():
R'''
Summary.
Description with \backslash\.
'''
@expect(_D213)
def multi_line_starts_first_line():
"""Summary.
Description.
"""
@expect(_D213)
@expect(_D300)
def multi_line_starts_first_line_single_quote():
'''Summary.
Description.
'''
@expect(_D213)
def multi_line_starts_first_line_raw():
r"""Summary.
Description with \backslash\.
"""
@expect(_D213)
@expect(_D301)
def multi_line_starts_first_line_upper_raw():
R"""Summary.
Description with \backslash\.
"""
@expect(_D213)
@expect(_D300)
def multi_line_starts_first_line_raw_single_quote():
r'''Summary.
Description with \backslash\.
'''
@expect(_D213)
@expect(_D300)
@expect(_D301)
def multi_line_starts_first_line_upper_raw_single_quote():
R'''Summary.
Description with \backslash\.
'''
pydocstyle-6.3.0/src/tests/test_cases/nested_class.py 0000664 0000000 0000000 00000001406 14361602300 0023022 0 ustar 00root root 0000000 0000000 """A valid module docstring."""
from .expected import Expectation
expectation = Expectation()
expect = expectation.expect
expect('PublicClass', 'D101: Missing docstring in public class')
class PublicClass:
expect('PublicNestedClass',
'D106: Missing docstring in public nested class')
class PublicNestedClass:
expect('PublicNestedClassInPublicNestedClass',
'D106: Missing docstring in public nested class')
class PublicNestedClassInPublicNestedClass:
pass
class _PrivateNestedClassInPublicNestedClass:
pass
class _PrivateNestedClass:
class PublicNestedClassInPrivateNestedClass:
pass
class _PrivateNestedClassInPrivateNestedClass:
pass
pydocstyle-6.3.0/src/tests/test_cases/noqa.py 0000664 0000000 0000000 00000001017 14361602300 0021307 0 ustar 00root root 0000000 0000000 # noqa: D400,D415
"""Test case for "# noqa" comments"""
from .expected import Expectation
expectation = Expectation()
expect = expectation.expect
def docstring_bad_ignore_all(): # noqa
"""Runs something"""
pass
def docstring_bad_ignore_one(): # noqa: D400,D401,D415
"""Runs something"""
pass
@expect("D401: First line should be in imperative mood "
"(perhaps 'Run', not 'Runs')")
def docstring_ignore_some_violations_but_catch_D401(): # noqa: E501,D400,D415
"""Runs something"""
pass
pydocstyle-6.3.0/src/tests/test_cases/sections.py 0000664 0000000 0000000 00000030346 14361602300 0022207 0 ustar 00root root 0000000 0000000 """A valid module docstring."""
from .expected import Expectation
expectation = Expectation()
expect = expectation.expect
_D213 = 'D213: Multi-line docstring summary should start at the second line'
_D400 = "D400: First line should end with a period (not '!')"
@expect(_D213)
@expect("D405: Section name should be properly capitalized "
"('Returns', not 'returns')")
def not_capitalized(): # noqa: D416
"""Toggle the gizmo.
returns
-------
A value of some sort.
"""
@expect(_D213)
@expect("D406: Section name should end with a newline "
"('Returns', not 'Returns:')")
def superfluous_suffix(): # noqa: D416
"""Toggle the gizmo.
Returns:
-------
A value of some sort.
"""
@expect(_D213)
@expect("D407: Missing dashed underline after section ('Returns')")
def no_underline(): # noqa: D416
"""Toggle the gizmo.
Returns
A value of some sort.
"""
@expect(_D213)
@expect("D407: Missing dashed underline after section ('Returns')")
@expect("D414: Section has no content ('Returns')")
def no_underline_and_no_description(): # noqa: D416
"""Toggle the gizmo.
Returns
"""
@expect(_D213)
@expect("D410: Missing blank line after section ('Returns')")
@expect("D414: Section has no content ('Returns')")
@expect("D411: Missing blank line before section ('Yields')")
@expect("D414: Section has no content ('Yields')")
def consecutive_sections(): # noqa: D416
"""Toggle the gizmo.
Returns
-------
Yields
------
Raises
------
Questions.
"""
@expect(_D213)
@expect("D408: Section underline should be in the line following the "
"section's name ('Returns')")
def blank_line_before_underline(): # noqa: D416
"""Toggle the gizmo.
Returns
-------
A value of some sort.
"""
@expect(_D213)
@expect("D409: Section underline should match the length of its name "
"(Expected 7 dashes in section 'Returns', got 2)")
def bad_underline_length(): # noqa: D416
"""Toggle the gizmo.
Returns
--
A value of some sort.
"""
@expect(_D213)
@expect("D413: Missing blank line after last section ('Returns')")
def no_blank_line_after_last_section(): # noqa: D416
"""Toggle the gizmo.
Returns
-------
A value of some sort.
"""
@expect(_D213)
@expect("D411: Missing blank line before section ('Returns')")
def no_blank_line_before_section(): # noqa: D416
"""Toggle the gizmo.
The function's description.
Returns
-------
A value of some sort.
"""
@expect(_D213)
@expect("D214: Section is over-indented ('Returns')")
def section_overindented(): # noqa: D416
"""Toggle the gizmo.
Returns
-------
A value of some sort.
"""
@expect(_D213)
@expect("D215: Section underline is over-indented (in section 'Returns')")
def section_underline_overindented(): # noqa: D416
"""Toggle the gizmo.
Returns
-------
A value of some sort.
"""
@expect(_D213)
@expect("D215: Section underline is over-indented (in section 'Returns')")
@expect("D413: Missing blank line after last section ('Returns')")
@expect("D414: Section has no content ('Returns')")
def section_underline_overindented_and_contentless(): # noqa: D416
"""Toggle the gizmo.
Returns
-------
"""
@expect(_D213)
def ignore_non_actual_section(): # noqa: D416
"""Toggle the gizmo.
This is the function's description, which will also specify what it
returns
"""
@expect(_D213)
@expect("D401: First line should be in imperative mood "
"(perhaps 'Return', not 'Returns')")
@expect("D400: First line should end with a period (not 's')")
@expect("D415: First line should end with a period, question "
"mark, or exclamation point (not 's')")
@expect("D205: 1 blank line required between summary line and description "
"(found 0)")
def section_name_in_first_line(): # noqa: D416
"""Returns
-------
A value of some sort.
"""
@expect(_D213)
@expect("D405: Section name should be properly capitalized "
"('Short Summary', not 'Short summary')")
@expect("D412: No blank lines allowed between a section header and its "
"content ('Short Summary')")
@expect("D409: Section underline should match the length of its name "
"(Expected 7 dashes in section 'Returns', got 6)")
@expect("D410: Missing blank line after section ('Returns')")
@expect("D411: Missing blank line before section ('Raises')")
@expect("D406: Section name should end with a newline "
"('Raises', not 'Raises:')")
@expect("D407: Missing dashed underline after section ('Raises')")
def multiple_sections(): # noqa: D416
"""Toggle the gizmo.
Short summary
-------------
This is the function's description, which will also specify what it
returns.
Returns
------
Many many wonderful things.
Raises:
My attention.
"""
@expect(_D213)
def false_positive_section_prefix(): # noqa: D416
"""Toggle the gizmo.
Parameters
----------
attributes_are_fun: attributes for the function.
"""
@expect(_D213)
def section_names_as_parameter_names(): # noqa: D416
"""Toggle the gizmo.
Parameters
----------
notes : list
A list of wonderful notes.
examples: list
A list of horrible examples.
"""
@expect(_D213)
@expect("D414: Section has no content ('Returns')")
def valid_google_style_section(): # noqa: D406, D407
"""Toggle the gizmo.
Args:
note: A random string.
Returns:
Raises:
RandomError: A random error that occurs randomly.
"""
@expect(_D213)
@expect("D416: Section name should end with a colon "
"('Args:', not 'Args')")
def missing_colon_google_style_section(): # noqa: D406, D407
"""Toggle the gizmo.
Args
note: A random string.
"""
@expect("D417: Missing argument descriptions in the docstring "
"(argument(s) y are missing descriptions in "
"'bar' docstring)", func_name="bar")
def _test_nested_functions():
x = 1
def bar(y=2): # noqa: D207, D213, D406, D407
"""Nested function test for docstrings.
Will this work when referencing x?
Args:
x: Test something
that is broken.
"""
print(x)
@expect(_D213)
@expect("D417: Missing argument descriptions in the docstring "
"(argument(s) y are missing descriptions in "
"'test_missing_google_args' docstring)")
def test_missing_google_args(x=1, y=2, _private=3): # noqa: D406, D407
"""Toggle the gizmo.
Args:
x (int): The greatest integer.
"""
class TestGoogle: # noqa: D203
"""Test class."""
def test_method(self, test, another_test, _): # noqa: D213, D407
"""Test a valid args section.
Args:
test: A parameter.
another_test: Another parameter.
"""
def test_detailed_description(self, test, another_test, _): # noqa: D213, D407
"""Test a valid args section.
Args:
test: A parameter.
another_test: Another parameter.
Detailed description.
"""
@expect("D417: Missing argument descriptions in the docstring "
"(argument(s) test, y, z are missing descriptions in "
"'test_missing_args' docstring)", arg_count=5)
def test_missing_args(self, test, x, y, z=3, _private_arg=3): # noqa: D213, D407
"""Test a valid args section.
Args:
x: Another parameter.
"""
@classmethod
@expect("D417: Missing argument descriptions in the docstring "
"(argument(s) test, y, z are missing descriptions in "
"'test_missing_args_class_method' docstring)", arg_count=5)
def test_missing_args_class_method(cls, test, x, y, _, z=3): # noqa: D213, D407
"""Test a valid args section.
Args:
x: Another parameter. The parameter below is missing description.
y:
"""
@staticmethod
@expect("D417: Missing argument descriptions in the docstring "
"(argument(s) a, y, z are missing descriptions in "
"'test_missing_args_static_method' docstring)", arg_count=4)
def test_missing_args_static_method(a, x, y, _test, z=3): # noqa: D213, D407
"""Test a valid args section.
Args:
x: Another parameter.
"""
@staticmethod
@expect("D417: Missing argument descriptions in the docstring "
"(argument(s) a, b are missing descriptions in "
"'test_missing_docstring' docstring)", arg_count=2)
def test_missing_docstring(a, b): # noqa: D213, D407
"""Test a valid args section.
Args:
a:
"""
@staticmethod
def test_hanging_indent(skip, verbose): # noqa: D213, D407
"""Do stuff.
Args:
skip (:attr:`.Skip`):
Lorem ipsum dolor sit amet, consectetur adipiscing elit.
Etiam at tellus a tellus faucibus maximus. Curabitur tellus
mauris, semper id vehicula ac, feugiat ut tortor.
verbose (bool):
If True, print out as much infromation as possible.
If False, print out concise "one-liner" information.
"""
@expect(_D213)
@expect("D417: Missing argument descriptions in the docstring "
"(argument(s) y are missing descriptions in "
"'test_missing_numpy_args' docstring)")
def test_missing_numpy_args(_private_arg=0, x=1, y=2): # noqa: D406, D407
"""Toggle the gizmo.
Parameters
----------
x : int
The greatest integer in the history \
of the entire world.
"""
class TestNumpy: # noqa: D203
"""Test class."""
def test_method(self, test, another_test, z, _, x=1, y=2, _private_arg=1): # noqa: D213, D407
"""Test a valid args section.
Some long string with a \
line continuation.
Parameters
----------
test, another_test
Some parameters without type.
z : some parameter with a very long type description that requires a \
line continuation.
But no further description.
x, y : int
Some integer parameters.
"""
@expect("D417: Missing argument descriptions in the docstring "
"(argument(s) test, y, z are missing descriptions in "
"'test_missing_args' docstring)", arg_count=5)
def test_missing_args(self, test, x, y, z=3, t=1, _private=0): # noqa: D213, D407
"""Test a valid args section.
Parameters
----------
x, t : int
Some parameters.
"""
@classmethod
@expect("D417: Missing argument descriptions in the docstring "
"(argument(s) test, y, z are missing descriptions in "
"'test_missing_args_class_method' docstring)", arg_count=4)
def test_missing_args_class_method(cls, test, x, y, z=3): # noqa: D213, D407
"""Test a valid args section.
Parameters
----------
z
x
Another parameter. The parameters y, test below are
missing descriptions. The parameter z above is also missing
a description.
y
test
"""
@staticmethod
@expect("D417: Missing argument descriptions in the docstring "
"(argument(s) a, z are missing descriptions in "
"'test_missing_args_static_method' docstring)", arg_count=3)
def test_missing_args_static_method(a, x, y, z=3, t=1): # noqa: D213, D407
"""Test a valid args section.
Parameters
----------
x, y
Another parameter.
t : int
Yet another parameter.
"""
@staticmethod
def test_mixing_numpy_and_google(danger): # noqa: D213
"""Repro for #388.
Parameters
----------
danger
Zoneeeeee!
"""
class TestIncorrectIndent: # noqa: D203
"""Test class."""
@expect("D417: Missing argument descriptions in the docstring "
"(argument(s) y are missing descriptions in "
"'test_incorrect_indent' docstring)", arg_count=3)
def test_incorrect_indent(self, x=1, y=2): # noqa: D207, D213, D407
"""Reproducing issue #437.
Testing this incorrectly indented docstring.
Args:
x: Test argument.
"""
pydocstyle-6.3.0/src/tests/test_cases/superfluous_quotes.py 0000664 0000000 0000000 00000000701 14361602300 0024344 0 ustar 00root root 0000000 0000000 """A valid module docstring."""
from .expected import Expectation
expectation = Expectation()
expect = expectation.expect
def correct_func():
"""Three quotes in both sides."""
@expect('D300: Use """triple double quotes""" (found """"-quotes)')
def extra_opening():
""""Extra quote on the left."""
@expect('D300: Use """triple double quotes""" (found """""-quotes)')
def two_extra_opening():
"""""Two extra quotes on the left."""
pydocstyle-6.3.0/src/tests/test_cases/test.py 0000664 0000000 0000000 00000027575 14361602300 0021351 0 ustar 00root root 0000000 0000000 # No docstring, so we can test D100
from functools import wraps
import os
from .expected import Expectation
from typing import overload
expectation = Expectation()
expect = expectation.expect
expect('class_', 'D101: Missing docstring in public class')
class class_:
expect('meta', 'D419: Docstring is empty')
class meta:
""""""
@expect('D102: Missing docstring in public method')
def method(self=None):
pass
def _ok_since_private(self=None):
pass
@overload
def overloaded_method(self, a: int) -> str:
...
@overload
def overloaded_method(self, a: str) -> str:
"""Foo bar documentation."""
...
def overloaded_method(a):
"""Foo bar documentation."""
return str(a)
expect('overloaded_method',
"D418: Function/ Method decorated with @overload"
" shouldn't contain a docstring")
@property
def foo(self):
"""The foo of the thing, which isn't in imperitive mood."""
return "hello"
@expect('D102: Missing docstring in public method')
def __new__(self=None):
pass
@expect('D107: Missing docstring in __init__')
def __init__(self=None):
pass
@expect('D105: Missing docstring in magic method')
def __str__(self=None):
pass
@expect('D102: Missing docstring in public method')
def __call__(self=None, x=None, y=None, z=None):
pass
@expect('D419: Docstring is empty')
def function():
""" """
def ok_since_nested():
pass
@expect('D419: Docstring is empty')
def nested():
''
def function_with_nesting():
"""Foo bar documentation."""
@overload
def nested_overloaded_func(a: int) -> str:
...
@overload
def nested_overloaded_func(a: str) -> str:
"""Foo bar documentation."""
...
def nested_overloaded_func(a):
"""Foo bar documentation."""
return str(a)
expect('nested_overloaded_func',
"D418: Function/ Method decorated with @overload"
" shouldn't contain a docstring")
@overload
def overloaded_func(a: int) -> str:
...
@overload
def overloaded_func(a: str) -> str:
"""Foo bar documentation."""
...
def overloaded_func(a):
"""Foo bar documentation."""
return str(a)
expect('overloaded_func',
"D418: Function/ Method decorated with @overload"
" shouldn't contain a docstring")
@expect('D200: One-line docstring should fit on one line with quotes '
'(found 3)')
@expect('D212: Multi-line docstring summary should start at the first line')
def asdlkfasd():
"""
Wrong.
"""
@expect('D201: No blank lines allowed before function docstring (found 1)')
def leading_space():
"""Leading space."""
@expect('D202: No blank lines allowed after function docstring (found 1)')
def trailing_space():
"""Leading space."""
pass
@expect('D201: No blank lines allowed before function docstring (found 1)')
@expect('D202: No blank lines allowed after function docstring (found 1)')
def trailing_and_leading_space():
"""Trailing and leading space."""
pass
expect('LeadingSpaceMissing',
'D203: 1 blank line required before class docstring (found 0)')
class LeadingSpaceMissing:
"""Leading space missing."""
expect('WithLeadingSpace',
'D211: No blank lines allowed before class docstring (found 1)')
class WithLeadingSpace:
"""With leading space."""
expect('TrailingSpace',
'D204: 1 blank line required after class docstring (found 0)')
expect('TrailingSpace',
'D211: No blank lines allowed before class docstring (found 1)')
class TrailingSpace:
"""TrailingSpace."""
pass
expect('LeadingAndTrailingSpaceMissing',
'D203: 1 blank line required before class docstring (found 0)')
expect('LeadingAndTrailingSpaceMissing',
'D204: 1 blank line required after class docstring (found 0)')
class LeadingAndTrailingSpaceMissing:
"""Leading and trailing space missing."""
pass
@expect('D205: 1 blank line required between summary line and description '
'(found 0)')
@expect('D213: Multi-line docstring summary should start at the second line')
def multi_line_zero_separating_blanks():
"""Summary.
Description.
"""
@expect('D205: 1 blank line required between summary line and description '
'(found 2)')
@expect('D213: Multi-line docstring summary should start at the second line')
def multi_line_two_separating_blanks():
"""Summary.
Description.
"""
@expect('D213: Multi-line docstring summary should start at the second line')
def multi_line_one_separating_blanks():
"""Summary.
Description.
"""
@expect('D207: Docstring is under-indented')
@expect('D213: Multi-line docstring summary should start at the second line')
def asdfsdf():
"""Summary.
Description.
"""
@expect('D207: Docstring is under-indented')
@expect('D213: Multi-line docstring summary should start at the second line')
def asdsdfsdffsdf():
"""Summary.
Description.
"""
@expect('D208: Docstring is over-indented')
@expect('D213: Multi-line docstring summary should start at the second line')
def asdfsdsdf24():
"""Summary.
Description.
"""
@expect('D208: Docstring is over-indented')
@expect('D213: Multi-line docstring summary should start at the second line')
def asdfsdsdfsdf24():
"""Summary.
Description.
"""
@expect('D208: Docstring is over-indented')
@expect('D213: Multi-line docstring summary should start at the second line')
def asdfsdfsdsdsdfsdf24():
"""Summary.
Description.
"""
@expect('D209: Multi-line docstring closing quotes should be on a separate '
'line')
@expect('D213: Multi-line docstring summary should start at the second line')
def asdfljdf24():
"""Summary.
Description."""
@expect('D210: No whitespaces allowed surrounding docstring text')
def endswith():
"""Whitespace at the end. """
@expect('D210: No whitespaces allowed surrounding docstring text')
def around():
""" Whitespace at everywhere. """
@expect('D210: No whitespaces allowed surrounding docstring text')
@expect('D213: Multi-line docstring summary should start at the second line')
def multiline():
""" Whitespace at the beginning.
This is the end.
"""
@expect('D300: Use """triple double quotes""" (found \'\'\'-quotes)')
def triple_single_quotes_raw():
r'''Summary.'''
@expect('D300: Use """triple double quotes""" (found \'\'\'-quotes)')
def triple_single_quotes_raw_uppercase():
R'''Summary.'''
@expect('D300: Use """triple double quotes""" (found \'-quotes)')
def single_quotes_raw():
r'Summary.'
@expect('D300: Use """triple double quotes""" (found \'-quotes)')
def single_quotes_raw_uppercase():
R'Summary.'
@expect('D300: Use """triple double quotes""" (found \'-quotes)')
@expect('D301: Use r""" if any backslashes in a docstring')
def single_quotes_raw_uppercase_backslash():
R'Sum\mary.'
@expect('D301: Use r""" if any backslashes in a docstring')
def double_quotes_backslash():
"""Sum\\mary."""
@expect('D301: Use r""" if any backslashes in a docstring')
def double_quotes_backslash_uppercase():
R"""Sum\\mary."""
@expect('D213: Multi-line docstring summary should start at the second line')
def exceptions_of_D301():
"""Exclude some backslashes from D301.
In particular, line continuations \
and unicode literals \u0394 and \N{GREEK CAPITAL LETTER DELTA}.
They are considered to be intentionally unescaped.
"""
@expect("D400: First line should end with a period (not 'y')")
@expect("D415: First line should end with a period, question mark, "
"or exclamation point (not 'y')")
def lwnlkjl():
"""Summary"""
@expect("D401: First line should be in imperative mood "
"(perhaps 'Return', not 'Returns')")
def liouiwnlkjl():
"""Returns foo."""
@expect("D401: First line should be in imperative mood; try rephrasing "
"(found 'Constructor')")
def sdgfsdg23245():
"""Constructor for a foo."""
@expect("D401: First line should be in imperative mood; try rephrasing "
"(found 'Constructor')")
def sdgfsdg23245777():
"""Constructor."""
@expect('D402: First line should not be the function\'s "signature"')
def foobar():
"""Signature: foobar()."""
@expect('D213: Multi-line docstring summary should start at the second line')
def new_209():
"""First line.
More lines.
"""
pass
@expect('D213: Multi-line docstring summary should start at the second line')
def old_209():
"""One liner.
Multi-line comments. OK to have extra blank line
"""
@expect("D103: Missing docstring in public function")
def oneliner_d102(): return
@expect("D400: First line should end with a period (not 'r')")
@expect("D415: First line should end with a period, question mark,"
" or exclamation point (not 'r')")
def oneliner_withdoc(): """One liner"""
def ignored_decorator(func): # noqa: D400,D401,D415
"""Runs something"""
func()
pass
def decorator_for_test(func): # noqa: D400,D401,D415
"""Runs something"""
func()
pass
@ignored_decorator
def oneliner_ignored_decorator(): """One liner"""
@decorator_for_test
@expect("D400: First line should end with a period (not 'r')")
@expect("D415: First line should end with a period, question mark,"
" or exclamation point (not 'r')")
def oneliner_with_decorator_expecting_errors(): """One liner"""
@decorator_for_test
def valid_oneliner_with_decorator(): """One liner."""
@expect("D207: Docstring is under-indented")
@expect('D213: Multi-line docstring summary should start at the second line')
def docstring_start_in_same_line(): """First Line.
Second Line
"""
def function_with_lambda_arg(x=lambda y: y):
"""Wrap the given lambda."""
@expect('D213: Multi-line docstring summary should start at the second line')
def a_following_valid_function(x=None):
"""Check for a bug where the previous function caused an assertion.
The assertion was caused in the next function, so this one is necessary.
"""
def outer_function():
"""Do something."""
def inner_function():
"""Do inner something."""
return 0
@expect("D400: First line should end with a period (not 'g')")
@expect("D401: First line should be in imperative mood "
"(perhaps 'Run', not 'Runs')")
@expect("D415: First line should end with a period, question mark, "
"or exclamation point (not 'g')")
def docstring_bad():
"""Runs something"""
pass
def docstring_bad_ignore_all(): # noqa
"""Runs something"""
pass
def docstring_bad_ignore_one(): # noqa: D400,D401,D415
"""Runs something"""
pass
@expect("D401: First line should be in imperative mood "
"(perhaps 'Run', not 'Runs')")
def docstring_ignore_some_violations_but_catch_D401(): # noqa: E501,D400,D415
"""Runs something"""
pass
@expect(
"D401: First line should be in imperative mood "
"(perhaps 'Initiate', not 'Initiates')"
)
def docstring_initiates():
"""Initiates the process."""
@expect(
"D401: First line should be in imperative mood "
"(perhaps 'Initialize', not 'Initializes')"
)
def docstring_initializes():
"""Initializes the process."""
@wraps(docstring_bad_ignore_one)
def bad_decorated_function():
"""Bad (E501) but decorated"""
pass
def valid_google_string(): # noqa: D400
"""Test a valid something!"""
@expect("D415: First line should end with a period, question mark, "
"or exclamation point (not 'g')")
def bad_google_string(): # noqa: D400
"""Test a valid something"""
# This is reproducing a bug where AttributeError is raised when parsing class
# parameters as functions for Google / Numpy conventions.
class Blah: # noqa: D203,D213
"""A Blah.
Parameters
----------
x : int
"""
def __init__(self, x):
pass
expect(os.path.normcase(__file__ if __file__[-1] != 'c' else __file__[:-1]),
'D100: Missing docstring in public module')
pydocstyle-6.3.0/src/tests/test_cases/unicode_literals.py 0000664 0000000 0000000 00000000271 14361602300 0023677 0 ustar 00root root 0000000 0000000 """A module."""
from .expected import Expectation
expectation = Expectation()
expect = expectation.expect
def with_unicode_docstring_without_u():
r"""Check unicode: \u2611."""
pydocstyle-6.3.0/src/tests/test_decorators.py 0000664 0000000 0000000 00000016557 14361602300 0021437 0 ustar 00root root 0000000 0000000 """Unit test for pydocstyle module decorator handling.
Use tox or pytest to run the test suite.
"""
import io
import textwrap
from pydocstyle import parser, checker
__all__ = ()
class TestParser:
"""Check parsing of Python source code."""
def test_parse_class_single_decorator(self):
"""Class decorator is recorded in class instance."""
code = textwrap.dedent("""\
@first_decorator
class Foo:
pass
""")
module = checker.parse(io.StringIO(code), 'dummy.py')
decorators = module.children[0].decorators
assert 1 == len(decorators)
assert 'first_decorator' == decorators[0].name
assert '' == decorators[0].arguments
def test_parse_class_decorators(self):
"""Class decorators are accumulated together with their arguments."""
code = textwrap.dedent("""\
@first_decorator
@second.decorator(argument)
@third.multi.line(
decorator,
key=value,
)
class Foo:
pass
""")
module = checker.parse(io.StringIO(code), 'dummy.py')
defined_class = module.children[0]
decorators = defined_class.decorators
assert 3 == len(decorators)
assert 'first_decorator' == decorators[0].name
assert '' == decorators[0].arguments
assert 'second.decorator' == decorators[1].name
assert 'argument' == decorators[1].arguments
assert 'third.multi.line' == decorators[2].name
assert 'decorator,key=value,' == decorators[2].arguments
def test_parse_class_nested_decorator(self):
"""Class decorator is recorded even for nested classes."""
code = textwrap.dedent("""\
@parent_decorator
class Foo:
pass
@first_decorator
class NestedClass:
pass
""")
module = checker.parse(io.StringIO(code), 'dummy.py')
nested_class = module.children[0].children[0]
decorators = nested_class.decorators
assert 1 == len(decorators)
assert 'first_decorator' == decorators[0].name
assert '' == decorators[0].arguments
def test_parse_method_single_decorator(self):
"""Method decorators are accumulated."""
code = textwrap.dedent("""\
class Foo:
@first_decorator
def method(self):
pass
""")
module = checker.parse(io.StringIO(code), 'dummy.py')
defined_class = module.children[0]
decorators = defined_class.children[0].decorators
assert 1 == len(decorators)
assert 'first_decorator' == decorators[0].name
assert '' == decorators[0].arguments
def test_parse_method_decorators(self):
"""Multiple method decorators are accumulated along with their args."""
code = textwrap.dedent("""\
class Foo:
@first_decorator
@second.decorator(argument)
@third.multi.line(
decorator,
key=value,
)
def method(self):
pass
""")
module = checker.parse(io.StringIO(code), 'dummy.py')
defined_class = module.children[0]
decorators = defined_class.children[0].decorators
assert 3 == len(decorators)
assert 'first_decorator' == decorators[0].name
assert '' == decorators[0].arguments
assert 'second.decorator' == decorators[1].name
assert 'argument' == decorators[1].arguments
assert 'third.multi.line' == decorators[2].name
assert 'decorator,key=value,' == decorators[2].arguments
def test_parse_function_decorator(self):
"""A function decorator is also accumulated."""
code = textwrap.dedent("""\
@first_decorator
def some_method(self):
pass
""")
module = checker.parse(io.StringIO(code), 'dummy.py')
decorators = module.children[0].decorators
assert 1 == len(decorators)
assert 'first_decorator' == decorators[0].name
assert '' == decorators[0].arguments
def test_parse_async_function_decorator(self):
"""Decorators for async functions are also accumulated."""
code = textwrap.dedent("""\
@first_decorator
async def some_method(self):
pass
""")
module = checker.parse(io.StringIO(code), 'dummy.py')
decorators = module.children[0].decorators
assert 1 == len(decorators)
assert 'first_decorator' == decorators[0].name
assert '' == decorators[0].arguments
def test_parse_method_nested_decorator(self):
"""Method decorators are accumulated for nested methods."""
code = textwrap.dedent("""\
class Foo:
@parent_decorator
def method(self):
@first_decorator
def nested_method(arg):
pass
""")
module = checker.parse(io.StringIO(code), 'dummy.py')
defined_class = module.children[0]
decorators = defined_class.children[0].children[0].decorators
assert 1 == len(decorators)
assert 'first_decorator' == decorators[0].name
assert '' == decorators[0].arguments
class TestMethod:
"""Unit test for Method class."""
def makeMethod(self, name='someMethodName'):
"""Return a simple method instance."""
children = []
dunder_all = ['ClassName']
source = textwrap.dedent("""\
class ClassName:
def %s(self):
""" % (name))
module = parser.Module('module_name', source, 0, 1, [],
'Docstring for module', [], None,
dunder_all, None, None, '')
cls = parser.Class('ClassName', source, 0, 1, [],
'Docstring for class', children, [], module, '')
return parser.Method(name, source, 0, 1, [],
'Docstring for method', children, [], cls, '')
def test_is_public_normal(self):
"""Test that methods are normally public, even if decorated."""
method = self.makeMethod('methodName')
method.decorators = [parser.Decorator('some_decorator', [])]
assert method.is_public
def test_is_public_setter(self):
"""Test that setter methods are considered private."""
method = self.makeMethod('methodName')
method.decorators = [
parser.Decorator('some_decorator', []),
parser.Decorator('methodName.setter', []),
]
assert not method.is_public
def test_is_public_deleter(self):
"""Test that deleter methods are also considered private."""
method = self.makeMethod('methodName')
method.decorators = [
parser.Decorator('methodName.deleter', []),
parser.Decorator('another_decorator', []),
]
assert not method.is_public
def test_is_public_trick(self):
"""Test that common prefix does not necessarily indicate private."""
method = self.makeMethod("foo")
method.decorators = [
parser.Decorator('foobar', []),
parser.Decorator('foobar.baz', []),
]
assert method.is_public
pydocstyle-6.3.0/src/tests/test_definitions.py 0000664 0000000 0000000 00000003207 14361602300 0021571 0 ustar 00root root 0000000 0000000 """Old parser tests."""
import os
import re
import pytest
from pydocstyle.violations import Error, ErrorRegistry
from pydocstyle.checker import check
from pydocstyle.config import ConfigurationParser
DEFAULT_PROPERTY_DECORATORS = ConfigurationParser.DEFAULT_PROPERTY_DECORATORS
@pytest.mark.parametrize('test_case', [
'test',
'unicode_literals',
'nested_class',
'capitalization',
'comment_after_def_bug',
'multi_line_summary_start',
'all_import',
'all_import_as',
'superfluous_quotes',
'noqa',
'sections',
'functions',
'canonical_google_examples',
'canonical_numpy_examples',
'canonical_pep257_examples',
])
def test_complex_file(test_case):
"""Run domain-specific tests from test.py file."""
case_module = __import__(f'test_cases.{test_case}',
globals=globals(),
locals=locals(),
fromlist=['expectation'],
level=1)
test_case_dir = os.path.normcase(os.path.dirname(__file__))
test_case_file = os.path.join(test_case_dir,
'test_cases',
test_case + '.py')
results = list(
check(
[test_case_file],
select=set(ErrorRegistry.get_error_codes()),
ignore_decorators=re.compile('wraps|ignored_decorator'),
property_decorators=DEFAULT_PROPERTY_DECORATORS,
)
)
for error in results:
assert isinstance(error, Error)
results = {(e.definition.name, e.message) for e in results}
assert case_module.expectation.expected == results
pydocstyle-6.3.0/src/tests/test_integration.py 0000664 0000000 0000000 00000131515 14361602300 0021605 0 ustar 00root root 0000000 0000000 """Use tox or pytest to run the test-suite."""
from collections import namedtuple
import os
import shlex
import shutil
import pytest
import pathlib
import tempfile
import textwrap
import subprocess
import sys
from unittest import mock
from pydocstyle import checker, violations
__all__ = ()
class SandboxEnv:
"""An isolated environment where pydocstyle can be run.
Since running pydocstyle as a script is affected by local config files,
it's important that tests will run in an isolated environment. This class
should be used as a context manager and offers utility methods for adding
files to the environment and changing the environment's configuration.
"""
Result = namedtuple('Result', ('out', 'err', 'code'))
def __init__(
self,
script_name='pydocstyle',
section_name='pydocstyle',
config_name='tox.ini',
):
"""Initialize the object."""
self.tempdir = None
self.script_name = script_name
self.section_name = section_name
self.config_name = config_name
def write_config(self, prefix='', name=None, **kwargs):
"""Change an environment config file.
Applies changes to `tox.ini` relative to `tempdir/prefix`.
If the given path prefix does not exist it is created.
"""
base = os.path.join(self.tempdir, prefix) if prefix else self.tempdir
if not os.path.isdir(base):
self.makedirs(base)
name = self.config_name if name is None else name
if name.endswith('.toml'):
def convert_value(val):
return (
repr(val).lower()
if isinstance(val, bool)
else repr(val)
)
else:
def convert_value(val):
return val
with open(os.path.join(base, name), 'wt') as conf:
conf.write(f"[{self.section_name}]\n")
for k, v in kwargs.items():
conf.write("{} = {}\n".format(
k.replace('_', '-'), convert_value(v)
))
def open(self, path, *args, **kwargs):
"""Open a file in the environment.
The file path should be relative to the base of the environment.
"""
return open(os.path.join(self.tempdir, path), *args, **kwargs)
def get_path(self, name, prefix=''):
return os.path.join(self.tempdir, prefix, name)
def makedirs(self, path, *args, **kwargs):
"""Create a directory in a path relative to the environment base."""
os.makedirs(os.path.join(self.tempdir, path), *args, **kwargs)
def invoke(self, args="", target=None):
"""Run pydocstyle on the environment base folder with the given args.
If `target` is not None, will run pydocstyle on `target` instead of
the environment base folder.
"""
run_target = self.tempdir if target is None else \
os.path.join(self.tempdir, target)
cmd = shlex.split("{} {} {}"
.format(self.script_name, run_target, args),
posix=False)
p = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = p.communicate()
return self.Result(out=out.decode('utf-8'),
err=err.decode('utf-8'),
code=p.returncode)
def __enter__(self):
self.tempdir = tempfile.mkdtemp()
# Make sure we won't be affected by other config files
self.write_config()
return self
def __exit__(self, *args, **kwargs):
shutil.rmtree(self.tempdir)
pass
@pytest.fixture(scope="module")
def install_package(request):
"""Install the package in development mode for the tests.
This is so we can run the integration tests on the installed console
script.
"""
cwd = os.path.join(os.path.dirname(__file__), '..', '..')
subprocess.check_call(
[sys.executable, "-m", "pip", "install", "-e", "."], cwd=cwd
)
yield
subprocess.check_call(
[sys.executable, "-m", "pip", "uninstall", "-y", "pydocstyle"], cwd=cwd
)
@pytest.fixture(scope="function", params=['ini', 'toml'])
def env(request):
"""Add a testing environment to a test method."""
sandbox_settings = {
'ini': {
'section_name': 'pydocstyle',
'config_name': 'tox.ini',
},
'toml': {
'section_name': 'tool.pydocstyle',
'config_name': 'pyproject.toml',
},
}[request.param]
with SandboxEnv(**sandbox_settings) as test_env:
yield test_env
pytestmark = pytest.mark.usefixtures("install_package")
def parse_errors(err):
"""Parse `err` to a dictionary of {filename: error_codes}.
This is for test purposes only. All file names should be different.
"""
result = {}
py_ext = '.py'
lines = err.split('\n')
while lines:
curr_line = lines.pop(0)
filename = curr_line[:curr_line.find(py_ext) + len(py_ext)]
if lines:
err_line = lines.pop(0).strip()
err_code = err_line.split(':')[0]
basename = os.path.basename(filename)
result.setdefault(basename, set()).add(err_code)
return result
def test_pep257_conformance():
"""Test that we conform to PEP 257."""
base_dir = (pathlib.Path(__file__).parent / '..').resolve()
excluded = base_dir / 'tests' / 'test_cases'
src_files = (str(path) for path in base_dir.glob('**/*.py')
if excluded not in path.parents)
ignored = {'D104', 'D105'}
select = violations.conventions.pep257 - ignored
errors = list(checker.check(src_files, select=select))
assert errors == [], errors
def test_ignore_list():
"""Test that `ignore`d errors are not reported in the API."""
function_to_check = textwrap.dedent('''
def function_with_bad_docstring(foo):
""" does spacinwithout a period in the end
no blank line after one-liner is bad. Also this - """
return foo
''')
expected_error_codes = {'D100', 'D400', 'D401', 'D205', 'D209', 'D210',
'D403', 'D415', 'D213'}
mock_open = mock.mock_open(read_data=function_to_check)
from pydocstyle import checker
with mock.patch.object(
checker.tk, 'open', mock_open, create=True):
# Passing a blank ignore here explicitly otherwise
# checkers takes the pep257 ignores by default.
errors = tuple(checker.check(['filepath'], ignore={}))
error_codes = {error.code for error in errors}
assert error_codes == expected_error_codes
# We need to recreate the mock, otherwise the read file is empty
mock_open = mock.mock_open(read_data=function_to_check)
with mock.patch.object(
checker.tk, 'open', mock_open, create=True):
ignored = {'D100', 'D202', 'D213'}
errors = tuple(checker.check(['filepath'], ignore=ignored))
error_codes = {error.code for error in errors}
assert error_codes == expected_error_codes - ignored
def test_skip_errors():
"""Test that `ignore`d errors are not reported in the API."""
function_to_check = textwrap.dedent('''
def function_with_bad_docstring(foo): # noqa: D400, D401, D403, D415
""" does spacinwithout a period in the end
no blank line after one-liner is bad. Also this - """
return foo
''')
expected_error_codes = {'D100', 'D205', 'D209', 'D210', 'D213'}
mock_open = mock.mock_open(read_data=function_to_check)
from pydocstyle import checker
with mock.patch.object(
checker.tk, 'open', mock_open, create=True):
# Passing a blank ignore here explicitly otherwise
# checkers takes the pep257 ignores by default.
errors = tuple(checker.check(['filepath'], ignore={}))
error_codes = {error.code for error in errors}
assert error_codes == expected_error_codes
skipped_error_codes = {'D400', 'D401', 'D403', 'D415'}
# We need to recreate the mock, otherwise the read file is empty
mock_open = mock.mock_open(read_data=function_to_check)
with mock.patch.object(
checker.tk, 'open', mock_open, create=True):
errors = tuple(checker.check(['filepath'], ignore={},
ignore_inline_noqa=True))
error_codes = {error.code for error in errors}
assert error_codes == expected_error_codes | skipped_error_codes
def test_run_as_named_module():
"""Test that pydocstyle can be run as a "named module".
This means that the following should run pydocstyle:
python -m pydocstyle
"""
# Add --match='' so that no files are actually checked (to make sure that
# the return code is 0 and to reduce execution time).
cmd = [sys.executable, "-m", "pydocstyle", "--match=''"]
p = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = p.communicate()
assert p.returncode == 0, out.decode('utf-8') + err.decode('utf-8')
def test_config_file(env):
"""Test that options are correctly loaded from a config file.
This test create a temporary directory and creates two files in it: a
Python file that has two violations (D100 and D103) and a config
file (tox.ini). This test alternates settings in the config file and checks
that we give the correct output.
"""
with env.open('example.py', 'wt') as example:
example.write(textwrap.dedent("""\
def foo():
pass
"""))
env.write_config(ignore='D100')
out, err, code = env.invoke()
assert code == 1
assert 'D100' not in out
assert 'D103' in out
env.write_config(ignore='')
out, err, code = env.invoke()
assert code == 1
assert 'D100' in out
assert 'D103' in out
env.write_config(ignore='D100,D103')
out, err, code = env.invoke()
assert code == 0
assert 'D100' not in out
assert 'D103' not in out
env.write_config(ignore='D10')
_, err, code = env.invoke()
assert code == 0
assert 'D100' not in err
assert 'D103' not in err
def test_sectionless_config_file(env):
"""Test that config files without a valid section name issue a warning."""
with env.open('config.ini', 'wt') as conf:
conf.write('[pdcstl]')
config_path = conf.name
_, err, code = env.invoke(f'--config={config_path}')
assert code == 0
assert 'Configuration file does not contain a pydocstyle section' in err
with env.open('example.py', 'wt') as example:
example.write(textwrap.dedent("""\
def foo():
pass
"""))
with env.open('tox.ini', 'wt') as conf:
conf.write('[pdcstl]\n')
conf.write('ignore = D100')
out, err, code = env.invoke()
assert code == 1
assert 'D100' in out
assert 'file does not contain a pydocstyle section' not in err
@pytest.mark.parametrize(
# Don't parametrize over 'pyproject.toml'
# since this test applies only to '.ini' files
'env', ['ini'], indirect=True
)
def test_multiple_lined_config_file(env):
"""Test that .ini files with multi-lined entries are parsed correctly."""
with env.open('example.py', 'wt') as example:
example.write(textwrap.dedent("""\
class Foo(object):
"Doc string"
def foo():
pass
"""))
select_string = ('D100,\n'
' #D103,\n'
' D204, D300 # Just remember - don\'t check D103!')
env.write_config(select=select_string)
out, err, code = env.invoke()
assert code == 1
assert 'D100' in out
assert 'D204' in out
assert 'D300' in out
assert 'D103' not in out
@pytest.mark.parametrize(
# Don't parametrize over 'tox.ini' since
# this test applies only to '.toml' files
'env', ['toml'], indirect=True
)
def test_accepts_select_error_code_list(env):
"""Test that .ini files with multi-lined entries are parsed correctly."""
with env.open('example.py', 'wt') as example:
example.write(textwrap.dedent("""\
class Foo(object):
"Doc string"
def foo():
pass
"""))
env.write_config(select=['D100', 'D204', 'D300'])
out, err, code = env.invoke()
assert code == 1
assert 'D100' in out
assert 'D204' in out
assert 'D300' in out
assert 'D103' not in out
def test_config_path(env):
"""Test that options are correctly loaded from a specific config file.
Make sure that a config file passed via --config is actually used and that
normal config file discovery is disabled.
"""
with env.open('example.py', 'wt') as example:
example.write(textwrap.dedent("""\
def foo():
pass
"""))
# either my_config.ini or my_config.toml
config_ext = env.config_name.split('.')[-1]
config_name = 'my_config.' + config_ext
env.write_config(ignore='D100')
env.write_config(name=config_name, ignore='D103')
out, err, code = env.invoke()
assert code == 1
assert 'D100' not in out
assert 'D103' in out
out, err, code = env.invoke('--config={} -d'
.format(env.get_path(config_name)))
assert code == 1, out + err
assert 'D100' in out
assert 'D103' not in out
def test_non_existent_config(env):
out, err, code = env.invoke('--config=does_not_exist')
assert code == 2
def test_verbose(env):
"""Test that passing --verbose prints more information."""
with env.open('example.py', 'wt') as example:
example.write('"""Module docstring."""\n')
out, _, code = env.invoke()
assert code == 0
assert 'example.py' not in out
out, _, code = env.invoke(args="--verbose")
assert code == 0
assert 'example.py' in out
def test_count(env):
"""Test that passing --count correctly prints the error num."""
with env.open('example.py', 'wt') as example:
example.write(textwrap.dedent("""\
def foo():
pass
"""))
out, err, code = env.invoke(args='--count')
assert code == 1
assert '2' in out
# The error count should be in the last line of the output.
# -2 since there is a newline at the end of the output.
assert '2' == out.split('\n')[-2].strip()
def test_select_cli(env):
"""Test choosing error codes with `--select` in the CLI."""
with env.open('example.py', 'wt') as example:
example.write(textwrap.dedent("""\
def foo():
pass
"""))
out, err, code = env.invoke(args="--select=D100")
assert code == 1
assert 'D100' in out
assert 'D103' not in out
def test_select_config(env):
"""Test choosing error codes with `select` in the config file."""
with env.open('example.py', 'wt') as example:
example.write(textwrap.dedent("""\
class Foo(object):
"Doc string"
def foo():
pass
"""))
env.write_config(select="D100,D3")
out, err, code = env.invoke()
assert code == 1
assert 'D100' in out
assert 'D300' in out
assert 'D103' not in out
def test_add_select_cli(env):
"""Test choosing error codes with --add-select in the CLI."""
with env.open('example.py', 'wt') as example:
example.write(textwrap.dedent("""\
class Foo(object):
"Doc string"
def foo():
pass
"""))
env.write_config(select="D100")
out, err, code = env.invoke(args="--add-select=D204,D3")
assert code == 1
assert 'D100' in out
assert 'D204' in out
assert 'D300' in out
assert 'D103' not in out
def test_add_ignore_cli(env):
"""Test choosing error codes with --add-ignore in the CLI."""
with env.open('example.py', 'wt') as example:
example.write(textwrap.dedent("""\
class Foo(object):
def foo():
pass
"""))
env.write_config(select="D100,D101")
out, err, code = env.invoke(args="--add-ignore=D101")
assert code == 1
assert 'D100' in out
assert 'D101' not in out
assert 'D103' not in out
def test_wildcard_add_ignore_cli(env):
"""Test choosing error codes with --add-ignore in the CLI."""
with env.open('example.py', 'wt') as example:
example.write(textwrap.dedent("""\
class Foo(object):
"Doc string"
def foo():
pass
"""))
env.write_config(select="D203,D300")
out, err, code = env.invoke(args="--add-ignore=D30")
assert code == 1
assert 'D203' in out
assert 'D300' not in out
@pytest.mark.parametrize(
# Don't parametrize over 'pyproject.toml'
# since this test applies only to '.ini' files
'env', ['ini'], indirect=True
)
def test_ignores_whitespace_in_fixed_option_set(env):
with env.open('example.py', 'wt') as example:
example.write("class Foo(object):\n 'Doc string'")
env.write_config(ignore="D100,\n # comment\n D300")
out, err, code = env.invoke()
assert code == 1
assert 'D300' not in out
assert err == ''
@pytest.mark.parametrize(
# Don't parametrize over 'tox.ini' since
# this test applies only to '.toml' files
'env', ['toml'], indirect=True
)
def test_accepts_ignore_error_code_list(env):
with env.open('example.py', 'wt') as example:
example.write("class Foo(object):\n 'Doc string'")
env.write_config(ignore=['D100', 'D300'])
out, err, code = env.invoke()
assert code == 1
assert 'D300' not in out
assert err == ''
def test_bad_wildcard_add_ignore_cli(env):
"""Test adding a non-existent error codes with --add-ignore."""
with env.open('example.py', 'wt') as example:
example.write(textwrap.dedent("""\
class Foo(object):
"Doc string"
def foo():
pass
"""))
env.write_config(select="D203,D300")
out, err, code = env.invoke(args="--add-ignore=D3004")
assert code == 1
assert 'D203' in out
assert 'D300' in out
assert 'D3004' not in out
assert ('Error code passed is not a prefix of any known errors: D3004'
in err)
def test_overload_function(env):
"""Functions decorated with @overload trigger D418 error."""
with env.open('example.py', 'wt') as example:
example.write(textwrap.dedent('''\
from typing import overload
@overload
def overloaded_func(a: int) -> str:
...
@overload
def overloaded_func(a: str) -> str:
"""Foo bar documentation."""
...
def overloaded_func(a):
"""Foo bar documentation."""
return str(a)
'''))
env.write_config(ignore="D100")
out, err, code = env.invoke()
assert code == 1
assert 'D418' in out
assert 'D103' not in out
def test_overload_async_function(env):
"""Async functions decorated with @overload trigger D418 error."""
with env.open('example.py', 'wt') as example:
example.write(textwrap.dedent('''\
from typing import overload
@overload
async def overloaded_func(a: int) -> str:
...
@overload
async def overloaded_func(a: str) -> str:
"""Foo bar documentation."""
...
async def overloaded_func(a):
"""Foo bar documentation."""
return str(a)
'''))
env.write_config(ignore="D100")
out, err, code = env.invoke()
assert code == 1
assert 'D418' in out
assert 'D103' not in out
def test_overload_method(env):
"""Methods decorated with @overload trigger D418 error."""
with env.open('example.py', 'wt') as example:
example.write(textwrap.dedent('''\
from typing import overload
class ClassWithMethods:
@overload
def overloaded_method(a: int) -> str:
...
@overload
def overloaded_method(a: str) -> str:
"""Foo bar documentation."""
...
def overloaded_method(a):
"""Foo bar documentation."""
return str(a)
'''))
env.write_config(ignore="D100")
out, err, code = env.invoke()
assert code == 1
assert 'D418' in out
assert 'D102' not in out
assert 'D103' not in out
def test_overload_method_valid(env):
"""Valid case for overload decorated Methods.
This shouldn't throw any errors.
"""
with env.open('example.py', 'wt') as example:
example.write(textwrap.dedent('''\
from typing import overload
class ClassWithMethods:
"""Valid docstring in public Class."""
@overload
def overloaded_method(a: int) -> str:
...
@overload
def overloaded_method(a: str) -> str:
...
def overloaded_method(a):
"""Foo bar documentation."""
return str(a)
'''))
env.write_config(ignore="D100, D203")
out, err, code = env.invoke()
assert code == 0
def test_overload_function_valid(env):
"""Valid case for overload decorated functions.
This shouldn't throw any errors.
"""
with env.open('example.py', 'wt') as example:
example.write(textwrap.dedent('''\
from typing import overload
@overload
def overloaded_func(a: int) -> str:
...
@overload
def overloaded_func(a: str) -> str:
...
def overloaded_func(a):
"""Foo bar documentation."""
return str(a)
'''))
env.write_config(ignore="D100")
out, err, code = env.invoke()
assert code == 0
def test_overload_async_function_valid(env):
"""Valid case for overload decorated async functions.
This shouldn't throw any errors.
"""
with env.open('example.py', 'wt') as example:
example.write(textwrap.dedent('''\
from typing import overload
@overload
async def overloaded_func(a: int) -> str:
...
@overload
async def overloaded_func(a: str) -> str:
...
async def overloaded_func(a):
"""Foo bar documentation."""
return str(a)
'''))
env.write_config(ignore="D100")
out, err, code = env.invoke()
assert code == 0
def test_overload_nested_function(env):
"""Nested functions decorated with @overload trigger D418 error."""
with env.open('example.py', 'wt') as example:
example.write(textwrap.dedent('''\
from typing import overload
def function_with_nesting():
"""Valid docstring in public function."""
@overload
def overloaded_func(a: int) -> str:
...
@overload
def overloaded_func(a: str) -> str:
"""Foo bar documentation."""
...
def overloaded_func(a):
"""Foo bar documentation."""
return str(a)
'''))
env.write_config(ignore="D100")
out, err, code = env.invoke()
assert code == 1
assert 'D418' in out
assert 'D103' not in out
def test_overload_nested_function_valid(env):
"""Valid case for overload decorated nested functions.
This shouldn't throw any errors.
"""
with env.open('example.py', 'wt') as example:
example.write(textwrap.dedent('''\
from typing import overload
def function_with_nesting():
"""Adding a docstring to a function."""
@overload
def overloaded_func(a: int) -> str:
...
@overload
def overloaded_func(a: str) -> str:
...
def overloaded_func(a):
"""Foo bar documentation."""
return str(a)
'''))
env.write_config(ignore="D100")
out, err, code = env.invoke()
assert code == 0
def test_conflicting_select_ignore_config(env):
"""Test that select and ignore are mutually exclusive."""
env.write_config(select="D100", ignore="D101")
_, err, code = env.invoke()
assert code == 2
assert 'mutually exclusive' in err
def test_conflicting_select_convention_config(env):
"""Test that select and convention are mutually exclusive."""
env.write_config(select="D100", convention="pep257")
_, err, code = env.invoke()
assert code == 2
assert 'mutually exclusive' in err
def test_conflicting_ignore_convention_config(env):
"""Test that select and convention are mutually exclusive."""
env.write_config(ignore="D100", convention="pep257")
_, err, code = env.invoke()
assert code == 2
assert 'mutually exclusive' in err
def test_missing_docstring_in_package(env):
"""Make sure __init__.py files are treated as packages."""
with env.open('__init__.py', 'wt') as init:
pass # an empty package file
out, err, code = env.invoke()
assert code == 1
assert 'D100' not in out # shouldn't be treated as a module
assert 'D104' in out # missing docstring in package
def test_illegal_convention(env):
"""Test that illegal convention names are dealt with properly."""
_, err, code = env.invoke('--convention=illegal_conv')
assert code == 2, err
assert "Illegal convention 'illegal_conv'." in err
assert 'Possible conventions' in err
assert 'pep257' in err
assert 'numpy' in err
def test_empty_select_cli(env):
"""Test excluding all error codes with `--select=` in the CLI."""
with env.open('example.py', 'wt') as example:
example.write(textwrap.dedent("""\
def foo():
pass
"""))
_, _, code = env.invoke(args="--select=")
assert code == 0
def test_empty_select_config(env):
"""Test excluding all error codes with `select=` in the config file."""
with env.open('example.py', 'wt') as example:
example.write(textwrap.dedent("""\
def foo():
pass
"""))
env.write_config(select="")
_, _, code = env.invoke()
assert code == 0
def test_empty_select_with_added_error(env):
"""Test excluding all errors but one."""
with env.open('example.py', 'wt') as example:
example.write(textwrap.dedent("""\
def foo():
pass
"""))
env.write_config(select="")
out, err, code = env.invoke(args="--add-select=D100")
assert code == 1
assert 'D100' in out
assert 'D101' not in out
assert 'D103' not in out
def test_pep257_convention(env):
"""Test that the 'pep257' convention options has the correct errors."""
with env.open('example.py', 'wt') as example:
example.write(textwrap.dedent('''
class Foo(object):
"""Docstring for this class"""
def foo():
pass
# Original PEP-257 example from -
# https://www.python.org/dev/peps/pep-0257/
def complex(real=0.0, imag=0.0):
"""Form a complex number.
Keyword arguments:
real -- the real part (default 0.0)
imag -- the imaginary part (default 0.0)
"""
if imag == 0.0 and real == 0.0:
return complex_zero
'''))
env.write_config(convention="pep257")
out, err, code = env.invoke()
assert code == 1
assert 'D100' in out
assert 'D211' in out
assert 'D203' not in out
assert 'D212' not in out
assert 'D213' not in out
assert 'D413' not in out
def test_numpy_convention(env):
"""Test that the 'numpy' convention options has the correct errors."""
with env.open('example.py', 'wt') as example:
example.write(textwrap.dedent('''
class Foo(object):
"""Docstring for this class.
returns
------
"""
def __init__(self):
pass
'''))
env.write_config(convention="numpy")
out, err, code = env.invoke()
assert code == 1
assert 'D107' not in out
assert 'D213' not in out
assert 'D215' in out
assert 'D405' in out
assert 'D409' in out
assert 'D414' in out
assert 'D410' not in out
assert 'D413' not in out
def test_google_convention(env):
"""Test that the 'google' convention options has the correct errors."""
with env.open('example.py', 'wt') as example:
example.write(textwrap.dedent('''
def func(num1, num2, num_three=0):
"""Docstring for this function.
Args:
num1 (int): Number 1.
num2: Number 2.
"""
class Foo(object):
"""Docstring for this class.
Attributes:
test: Test
returns:
"""
def __init__(self):
pass
'''))
env.write_config(convention="google")
out, err, code = env.invoke()
assert code == 1
assert 'D107' in out
assert 'D213' not in out
assert 'D215' not in out
assert 'D405' in out
assert 'D409' not in out
assert 'D410' not in out
assert 'D412' in out
assert 'D413' not in out
assert 'D414' in out
assert 'D417' in out
def test_config_file_inheritance(env):
"""Test configuration files inheritance.
The test creates 2 configuration files:
env_base
+-- tox.ini
| This configuration will set `select=`.
+-- A
+-- tox.ini
| This configuration will set `inherit=false`.
+-- test.py
The file will contain code that violates D100,D103.
When invoking pydocstyle, the first config file found in the base directory
will set `select=`, so no error codes should be checked.
The `A/tox.ini` configuration file sets `inherit=false` but has an empty
configuration, therefore the default convention will be checked.
We expect pydocstyle to ignore the `select=` configuration and raise all
the errors stated above.
"""
env.write_config(select='')
env.write_config(prefix='A', inherit=False)
with env.open(os.path.join('A', 'test.py'), 'wt') as test:
test.write(textwrap.dedent("""\
def bar():
pass
"""))
out, err, code = env.invoke()
assert code == 1
assert 'D100' in out
assert 'D103' in out
def test_config_file_cumulative_add_ignores(env):
"""Test that add-ignore is cumulative.
env_base
+-- tox.ini
| This configuration will set `select=D100,D103` and `add-ignore=D100`.
+-- base.py
| Will violate D100,D103
+-- A
+-- tox.ini
| This configuration will set `add-ignore=D103`.
+-- a.py
Will violate D100,D103.
The desired result is that `base.py` will fail with D103 and
`a.py` will pass.
"""
env.write_config(select='D100,D103', add_ignore='D100')
env.write_config(prefix='A', add_ignore='D103')
test_content = textwrap.dedent("""\
def foo():
pass
""")
with env.open('base.py', 'wt') as test:
test.write(test_content)
with env.open(os.path.join('A', 'a.py'), 'wt') as test:
test.write(test_content)
out, err, code = env.invoke()
err = parse_errors(out)
assert code == 1
assert 'base.py' in err, err
assert 'a.py' not in err, err
assert 'D100' not in err['base.py'], err
assert 'D103' in err['base.py'], err
def test_config_file_cumulative_add_select(env):
"""Test that add-select is cumulative.
env_base
+-- tox.ini
| This configuration will set `select=` and `add-select=D100`.
+-- base.py
| Will violate D100,D103
+-- A
+-- tox.ini
| This configuration will set `add-select=D103`.
+-- a.py
Will violate D100,D103.
The desired result is that `base.py` will fail with D100 and
`a.py` will fail with D100,D103.
"""
env.write_config(select='', add_select='D100')
env.write_config(prefix='A', add_select='D103')
test_content = textwrap.dedent("""\
def foo():
pass
""")
with env.open('base.py', 'wt') as test:
test.write(test_content)
with env.open(os.path.join('A', 'a.py'), 'wt') as test:
test.write(test_content)
out, err, code = env.invoke()
err = parse_errors(out)
assert code == 1
assert 'base.py' in err, err
assert 'a.py' in err, err
assert err['base.py'] == {'D100'}, err
assert err['a.py'] == {'D100', 'D103'}, err
def test_config_file_convention_overrides_select(env):
"""Test that conventions override selected errors.
env_base
+-- tox.ini
| This configuration will set `select=D103`.
+-- base.py
| Will violate D100.
+-- A
+-- tox.ini
| This configuration will set `convention=pep257`.
+-- a.py
Will violate D100.
The expected result is that `base.py` will be clear of errors and
`a.py` will violate D100.
"""
env.write_config(select='D103')
env.write_config(prefix='A', convention='pep257')
test_content = ""
with env.open('base.py', 'wt') as test:
test.write(test_content)
with env.open(os.path.join('A', 'a.py'), 'wt') as test:
test.write(test_content)
out, err, code = env.invoke()
assert code == 1
assert 'D100' in out, out
assert 'base.py' not in out, out
assert 'a.py' in out, out
def test_cli_overrides_config_file(env):
"""Test that the CLI overrides error codes selected in the config file.
env_base
+-- tox.ini
| This configuration will set `select=D103` and `match-dir=foo`.
+-- base.py
| Will violate D100.
+-- A
+-- a.py
Will violate D100,D103.
We shall run with `--convention=pep257`.
We expect `base.py` to be checked and violate `D100` and that `A/a.py` will
not be checked because of `match-dir=foo` in the config file.
"""
env.write_config(select='D103', match_dir='foo')
with env.open('base.py', 'wt') as test:
test.write("")
env.makedirs('A')
with env.open(os.path.join('A', 'a.py'), 'wt') as test:
test.write(textwrap.dedent("""\
def foo():
pass
"""))
out, err, code = env.invoke(args="--convention=pep257")
assert code == 1
assert 'D100' in out, out
assert 'D103' not in out, out
assert 'base.py' in out, out
assert 'a.py' not in out, out
def test_cli_match_overrides_config_file(env):
"""Test that the CLI overrides the match clauses in the config file.
env_base
+-- tox.ini
| This configuration will set `match-dir=foo`.
+-- base.py
| Will violate D100,D103.
+-- A
+-- tox.ini
| This configuration will set `match=bar.py`.
+-- a.py
Will violate D100.
We shall run with `--match=a.py` and `--match-dir=A`.
We expect `base.py` will not be checked and that `A/a.py` will be checked.
"""
env.write_config(match_dir='foo')
env.write_config(prefix='A', match='bar.py')
with env.open('base.py', 'wt') as test:
test.write(textwrap.dedent("""\
def foo():
pass
"""))
with env.open(os.path.join('A', 'a.py'), 'wt') as test:
test.write("")
out, err, code = env.invoke(args="--match=a.py --match-dir=A")
assert code == 1
assert 'D100' in out, out
assert 'D103' not in out, out
assert 'base.py' not in out, out
assert 'a.py' in out, out
def test_config_file_convention_overrides_ignore(env):
"""Test that conventions override ignored errors.
env_base
+-- tox.ini
| This configuration will set `ignore=D100,D103`.
+-- base.py
| Will violate D100,D103.
+-- A
+-- tox.ini
| This configuration will set `convention=pep257`.
+-- a.py
Will violate D100,D103.
The expected result is that `base.py` will be clear of errors and
`a.py` will violate D103.
"""
env.write_config(ignore='D100,D103')
env.write_config(prefix='A', convention='pep257')
test_content = textwrap.dedent("""\
def foo():
pass
""")
with env.open('base.py', 'wt') as test:
test.write(test_content)
with env.open(os.path.join('A', 'a.py'), 'wt') as test:
test.write(test_content)
out, err, code = env.invoke()
assert code == 1
assert 'D100' in out, out
assert 'D103' in out, out
assert 'base.py' not in out, out
assert 'a.py' in out, out
def test_config_file_ignore_overrides_select(env):
"""Test that ignoring any error overrides selecting errors.
env_base
+-- tox.ini
| This configuration will set `select=D100`.
+-- base.py
| Will violate D100,D101,D102.
+-- A
+-- tox.ini
| This configuration will set `ignore=D102`.
+-- a.py
Will violate D100,D101,D102.
The expected result is that `base.py` will violate D100 and
`a.py` will violate D100,D101.
"""
env.write_config(select='D100')
env.write_config(prefix='A', ignore='D102')
test_content = textwrap.dedent("""\
class Foo(object):
def bar():
pass
""")
with env.open('base.py', 'wt') as test:
test.write(test_content)
with env.open(os.path.join('A', 'a.py'), 'wt') as test:
test.write(test_content)
out, err, code = env.invoke()
err = parse_errors(out)
assert code == 1
assert 'base.py' in err, err
assert 'a.py' in err, err
assert err['base.py'] == {'D100'}, err
assert err['a.py'] == {'D100', 'D101'}, err
def test_config_file_nearest_to_checked_file(env):
"""Test that the configuration to each file is the nearest one.
In this test there will be 2 identical files in 2 branches in the directory
tree. Both of them will violate the same error codes, but their config
files will contain different ignores.
env_base
+-- tox.ini
| This configuration will set `convention=pep257` and `add-ignore=D100`
+-- base.py
| Will violate D100,D101,D102.
+-- A
| +-- a.py
| Will violate D100,D101,D102.
+-- B
+-- tox.ini
| Will set `add-ignore=D101`
+-- b.py
Will violate D100,D101,D102.
We should see that `a.py` and `base.py` act the same and violate
D101,D102 (since they are both configured by `tox.ini`) and that
`b.py` violates D102, since it's configured by `B/tox.ini` as well.
"""
env.write_config(convention='pep257', add_ignore='D100')
env.write_config(prefix='B', add_ignore='D101')
test_content = textwrap.dedent("""\
class Foo(object):
def bar():
pass
""")
with env.open('base.py', 'wt') as test:
test.write(test_content)
env.makedirs('A')
with env.open(os.path.join('A', 'a.py'), 'wt') as test:
test.write(test_content)
with env.open(os.path.join('B', 'b.py'), 'wt') as test:
test.write(test_content)
out, err, code = env.invoke()
err = parse_errors(out)
assert code == 1
assert 'base.py' in err, err
assert 'a.py' in err, err
assert 'b.py' in err, err
assert err['base.py'] == {'D101', 'D102'}, err
assert err['a.py'] == {'D101', 'D102'}, err
assert err['b.py'] == {'D102'}, err
def test_config_file_nearest_match_re(env):
"""Test that the `match` and `match-dir` options are handled correctly.
env_base
+-- tox.ini
| This configuration will set `convention=pep257` and `add-ignore=D100`.
+-- A
+-- tox.ini
| Will set `match-dir=C`.
+-- B
| +-- b.py
| Will violate D100,D103.
+-- C
+-- tox.ini
| Will set `match=bla.py`.
+-- c.py
| Will violate D100,D103.
+-- bla.py
Will violate D100.
We expect the call to pydocstyle to be successful, since `b.py` and
`c.py` are not supposed to be found by the re.
"""
env.write_config(convention='pep257', add_ignore='D100')
env.write_config(prefix='A', match_dir='C')
env.write_config(prefix=os.path.join('A', 'C'), match='bla.py')
content = textwrap.dedent("""\
def foo():
pass
""")
env.makedirs(os.path.join('A', 'B'))
with env.open(os.path.join('A', 'B', 'b.py'), 'wt') as test:
test.write(content)
with env.open(os.path.join('A', 'C', 'c.py'), 'wt') as test:
test.write(content)
with env.open(os.path.join('A', 'C', 'bla.py'), 'wt') as test:
test.write('')
_, _, code = env.invoke()
assert code == 0
def test_syntax_error_multiple_files(env):
"""Test that a syntax error in a file doesn't prevent further checking."""
for filename in ('first.py', 'second.py'):
with env.open(filename, 'wt') as fobj:
fobj.write("[")
out, err, code = env.invoke(args="-v")
assert code == 1
assert 'first.py: Cannot parse file' in err
assert 'second.py: Cannot parse file' in err
def test_indented_function(env):
"""Test that nested functions do not cause IndentationError."""
env.write_config(ignore='D')
with env.open("test.py", 'wt') as fobj:
fobj.write(textwrap.dedent('''\
def foo():
def bar(a):
"""A docstring
Args:
a : An argument.
"""
pass
'''))
out, err, code = env.invoke(args="-v")
assert code == 0
assert "IndentationError: unexpected indent" not in err
def test_only_comment_file(env):
"""Test that file with only comments does only cause D100."""
with env.open('comments.py', 'wt') as comments:
comments.write(
'#!/usr/bin/env python3\n'
'# -*- coding: utf-8 -*-\n'
'# Useless comment\n'
'# Just another useless comment\n'
)
out, _, code = env.invoke()
assert 'D100' in out
out = out.replace('D100', '')
for err in {'D1', 'D2', 'D3', 'D4'}:
assert err not in out
assert code == 1
def test_comment_plus_docstring_file(env):
"""Test that file with comments and docstring does not cause errors."""
with env.open('comments_plus.py', 'wt') as comments_plus:
comments_plus.write(
'#!/usr/bin/env python3\n'
'# -*- coding: utf-8 -*-\n'
'# Useless comment\n'
'# Just another useless comment\n'
'"""Module docstring."""\n'
)
out, _, code = env.invoke()
assert '' == out
assert code == 0
def test_only_comment_with_noqa_file(env):
"""Test that file with noqa and only comments does not cause errors."""
with env.open('comments.py', 'wt') as comments:
comments.write(
'#!/usr/bin/env python3\n'
'# -*- coding: utf-8 -*-\n'
'# Useless comment\n'
'# Just another useless comment\n'
'# noqa: D100\n'
)
out, _, code = env.invoke()
assert 'D100' not in out
assert code == 0
def test_comment_with_noqa_plus_docstring_file(env):
"""Test that file with comments, noqa, docstring does not cause errors."""
with env.open('comments_plus.py', 'wt') as comments_plus:
comments_plus.write(
'#!/usr/bin/env python3\n'
'# -*- coding: utf-8 -*-\n'
'# Useless comment\n'
'# Just another useless comment\n'
'# noqa: D400\n'
'"""Module docstring without period"""\n'
)
out, _, code = env.invoke()
assert '' == out
assert code == 0
def test_ignore_self_only_init(env):
"""Test that ignore_self_only_init works ignores __init__ with only self."""
with env.open('example.py', 'wt') as example:
example.write(textwrap.dedent("""\
class Foo:
def __init__(self):
pass
"""))
env.write_config(ignore_self_only_init=True, select="D107")
out, err, code = env.invoke()
assert '' == out
assert code == 0
def test_match_considers_basenames_for_path_args(env):
"""Test that `match` option only considers basenames for path arguments.
The test environment consists of a single empty module `test_a.py`. The
match option is set to a pattern that ignores test_ prefixed .py filenames.
When pydocstyle is invoked with full path to `test_a.py`, we expect it to
succeed since match option will match against just the file name and not
full path.
"""
# Ignore .py files prefixed with 'test_'
env.write_config(select='D100', match='(?!test_).+.py')
# Create an empty module (violates D100)
with env.open('test_a.py', 'wt') as test:
test.write('')
# env.invoke calls pydocstyle with full path to test_a.py
out, _, code = env.invoke(target='test_a.py')
assert '' == out
assert code == 0 pydocstyle-6.3.0/src/tests/test_utils.py 0000664 0000000 0000000 00000001625 14361602300 0020420 0 ustar 00root root 0000000 0000000 """Unit test for pydocstyle utils.
Use tox or pytest to run the test suite.
"""
from pydocstyle import utils
__all__ = ()
def test_common_prefix():
"""Test common prefix length of two strings."""
assert utils.common_prefix_length('abcd', 'abce') == 3
def test_no_common_prefix():
"""Test common prefix length of two strings that have no common prefix."""
assert utils.common_prefix_length('abcd', 'cdef') == 0
def test_differ_length():
"""Test common prefix length of two strings differing in length."""
assert utils.common_prefix_length('abcd', 'ab') == 2
def test_empty_string():
"""Test common prefix length of two strings, one of them empty."""
assert utils.common_prefix_length('abcd', '') == 0
def test_strip_non_alphanumeric():
"""Test strip of a string leaves only alphanumeric characters."""
assert utils.strip_non_alphanumeric(" 1abcd1...") == "1abcd1"
pydocstyle-6.3.0/tox.ini 0000664 0000000 0000000 00000003757 14361602300 0015241 0 ustar 00root root 0000000 0000000 # Tox (http://tox.testrun.org/) is a tool for running tests in
# multiple virtualenvs. This configuration file will run the
# test suite on all supported python versions. To use it, "pip
# install tox" and then run "tox" from this directory.
[tox]
envlist = py{36,37,38,39,310,311}-{tests,install},docs,install,py36-docs
[testenv]
download = true
# Make sure reading the UTF-8 from test.py works regardless of the locale used.
# Note, that "en_US" is used here instead of "C" as "C" is not available on mac.
setenv =
LC_ALL=en_US.UTF-8
LANG=en_US.UTF-8
# To pass arguments to pytest, use `tox [options] -- [pytest posargs]`.
commands =
pytest src/tests {posargs}
mypy src/
black --check src/pydocstyle
isort --check src/pydocstyle
deps =
-rrequirements/runtime.txt
-rrequirements/tests.txt
[testenv:install]
skip_install = True
commands =
pip wheel . -w dist --no-deps
pip install --no-index --find-links=dist pydocstyle
pydocstyle --help
[testenv:docs]
changedir = docs
deps =
-rrequirements/runtime.txt
-rrequirements/docs.txt
commands = sphinx-build -b html . _build
[testenv:py36-docs]
changedir = {[testenv:docs]changedir}
deps = {[testenv:docs]deps}
commands = {[testenv:docs]commands}
# There's no way to generate sub-sections in tox.
# The following sections are all references to the `py37-install`.
[testenv:py36-install]
skip_install = {[testenv:install]skip_install}
commands = {[testenv:install]commands}
[testenv:py37-install]
skip_install = {[testenv:install]skip_install}
commands = {[testenv:install]commands}
[testenv:py38-install]
skip_install = {[testenv:install]skip_install}
commands = {[testenv:install]commands}
[testenv:py39-install]
skip_install = {[testenv:install]skip_install}
commands = {[testenv:install]commands}
[testenv:py310-install]
skip_install = {[testenv:install]skip_install}
commands = {[testenv:install]commands}
[testenv:py311-install]
skip_install = {[testenv:install]skip_install}
commands = {[testenv:install]commands}