pax_global_header 0000666 0000000 0000000 00000000064 14211451604 0014510 g ustar 00root root 0000000 0000000 52 comment=8927e2f0aba1eb25c4ad7f3fab5f1c16d9278a9f
pytkdocs-0.16.1/ 0000775 0000000 0000000 00000000000 14211451604 0013435 5 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/.copier-answers.yml 0000664 0000000 0000000 00000001167 14211451604 0017204 0 ustar 00root root 0000000 0000000 # Changes here will be overwritten by Copier
_commit: 0.9.6
_src_path: gh:pawamoy/copier-pdm
author_email: pawamoy@pm.me
author_fullname: Timothée Mazzucotelli
author_username: pawamoy
copyright_date: '2020'
copyright_holder: Timothée Mazzucotelli
copyright_holder_email: pawamoy@pm.me
copyright_license: ISC License
project_description: Load Python objects documentation.
project_name: pytkdocs
python_package_command_line_name: pytkdocs
python_package_distribution_name: pytkdocs
python_package_import_name: pytkdocs
repository_name: pytkdocs
repository_namespace: pawamoy
repository_provider: github.com
use_precommit: false
pytkdocs-0.16.1/.github/ 0000775 0000000 0000000 00000000000 14211451604 0014775 5 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/.github/FUNDING.yml 0000664 0000000 0000000 00000000161 14211451604 0016610 0 ustar 00root root 0000000 0000000 github:
- pawamoy
ko_fi: pawamoy
liberapay: pawamoy
patreon: pawamoy
custom:
- https://www.paypal.me/pawamoy
pytkdocs-0.16.1/.github/ISSUE_TEMPLATE/ 0000775 0000000 0000000 00000000000 14211451604 0017160 5 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/.github/ISSUE_TEMPLATE/bug_report.md 0000664 0000000 0000000 00000001255 14211451604 0021655 0 ustar 00root root 0000000 0000000 ---
name: Bug report
about: Create a report to help us improve
title: "[BUG] "
labels: ''
assignees: ''
---
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Give us an example of Python code or docstrings that trigger the issue.
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If you are using `pytkdocs` through `mkdocstrings` and if relevant, please attach a screenshot.
**System (please complete the following information):**
- `pytkdocs` version [e.g. 0.2.1]
- Python version: [e.g. 3.8]
- OS: [Windows/Linux]
**Additional context**
Add any other context about the problem here.
pytkdocs-0.16.1/.github/ISSUE_TEMPLATE/feature_request.md 0000664 0000000 0000000 00000001130 14211451604 0022700 0 ustar 00root root 0000000 0000000 ---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: feature
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.
pytkdocs-0.16.1/.github/workflows/ 0000775 0000000 0000000 00000000000 14211451604 0017032 5 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/.github/workflows/ci.yml 0000664 0000000 0000000 00000004626 14211451604 0020160 0 ustar 00root root 0000000 0000000 name: ci
on:
push:
branches:
- master
pull_request:
branches:
- master
defaults:
run:
shell: bash
env:
LANG: en_US.utf-8
LC_ALL: en_US.utf-8
PYTHONIOENCODING: UTF-8
jobs:
quality:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up PDM
uses: pdm-project/setup-pdm@v2.6
with:
python-version: "3.8"
- name: Set cache variables
id: set_variables
run: |
echo "::set-output name=PIP_CACHE::$(pip cache dir)"
echo "::set-output name=PDM_CACHE::$(pdm config cache_dir)"
- name: Set up cache
uses: actions/cache@v2
with:
path: |
${{ steps.set_variables.outputs.PIP_CACHE }}
${{ steps.set_variables.outputs.PDM_CACHE }}
key: checks-cache
- name: Resolving dependencies
run: pdm lock
- name: Install dependencies
run: pdm install -G duty -G docs -G quality -G typing -G security -G numpy-style
- name: Check if the documentation builds correctly
run: pdm run duty check-docs
- name: Check the code quality
run: pdm run duty check-quality
- name: Check if the code is correctly typed
run: pdm run duty check-types
- name: Check for vulnerabilities in dependencies
run: pdm run duty check-dependencies
tests:
strategy:
matrix:
os:
- ubuntu-latest
- macos-latest
- windows-latest
python-version:
- "3.7"
- "3.8"
- "3.9"
- "3.10"
- "3.11-dev"
runs-on: ${{ matrix.os }}
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up PDM
uses: pdm-project/setup-pdm@v2.6
with:
python-version: ${{ matrix.python-version }}
- name: Set cache variables
id: set_variables
run: |
echo "::set-output name=PIP_CACHE::$(pip cache dir)"
echo "::set-output name=PDM_CACHE::$(pdm config cache_dir)"
- name: Set up cache
uses: actions/cache@v2
with:
path: |
${{ steps.set_variables.outputs.PIP_CACHE }}
${{ steps.set_variables.outputs.PDM_CACHE }}
key: tests-cache-${{ runner.os }}-${{ matrix.python-version }}
- name: Install dependencies
run: pdm install --no-editable -G duty -G tests -G numpy-style
- name: Run the test suite
run: pdm run duty test
pytkdocs-0.16.1/.gitignore 0000664 0000000 0000000 00000000253 14211451604 0015425 0 ustar 00root root 0000000 0000000 .idea/
__pycache__/
*.py[cod]
dist/
*.egg-info/
build/
htmlcov/
.coverage*
pip-wheel-metadata/
.pytest_cache/
.mypy_cache/
site/
pdm.lock
.pdm.toml
__pypackages__/
.venv/
pytkdocs-0.16.1/.gitpod.dockerfile 0000664 0000000 0000000 00000000221 14211451604 0017025 0 ustar 00root root 0000000 0000000 FROM gitpod/workspace-full
USER gitpod
ENV PIP_USER=no
ENV PYTHON_VERSIONS=
RUN pip3 install pipx; \
pipx install pdm; \
pipx ensurepath
pytkdocs-0.16.1/.gitpod.yml 0000664 0000000 0000000 00000000217 14211451604 0015524 0 ustar 00root root 0000000 0000000 vscode:
extensions:
- ms-python.python
image:
file: .gitpod.dockerfile
ports:
- port: 8000
onOpen: notify
tasks:
- init: make setup
pytkdocs-0.16.1/CHANGELOG.md 0000664 0000000 0000000 00000054407 14211451604 0015260 0 ustar 00root root 0000000 0000000 # Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
## [0.16.1](https://github.com/mkdocstrings/pytkdocs/releases/tag/0.16.1) - 2022-03-07
[Compare with 0.16.0](https://github.com/mkdocstrings/pytkdocs/compare/0.16.0...0.16.1)
### Bug Fixes
- Always return strings (not `None`) and warn about missing descriptions in numpy parser ([50b9597](https://github.com/mkdocstrings/pytkdocs/commit/50b9597d52c4b22de110821fe646d9f992e2977b) by Joseph Richardson). [Issue #137](https://github.com/mkdocstrings/pytkdocs/issues/137), [PR #138](https://github.com/mkdocstrings/pytkdocs/pull/138)
## [0.16.0](https://github.com/mkdocstrings/pytkdocs/releases/tag/0.16.0) - 2022-02-19
[Compare with 0.15.0](https://github.com/mkdocstrings/pytkdocs/compare/0.15.0...0.16.0)
### Maintenance
- Drop Python 3.6 support ([0d39665](https://github.com/mkdocstrings/pytkdocs/commit/0d396653cb2cb2b286bae4c948b0dae869c32cd1) by Timothée Mazzucotelli).
### Features
- Add `trim_doctest_flag` to google and numpy parsers ([0fecc43](https://github.com/mkdocstrings/pytkdocs/commit/0fecc4338061ecfa374ce823a34be0764d550547) by Jeremy Goh). [Issue mkdocstrings/mkdocstrings#386](https://github.com/mkdocstrings/mkdocstrings/issues/386), [PR #134](https://github.com/mkdocstrings/pytkdocs/pull/134)
## [0.15.0](https://github.com/mkdocstrings/pytkdocs/releases/tag/0.15.0) - 2021-12-27
[Compare with 0.14.2](https://github.com/mkdocstrings/pytkdocs/compare/0.14.2...0.15.0)
### Features
- Add support for `help_text` field parameter as docstring for django model fields ([01ac524](https://github.com/mkdocstrings/pytkdocs/commit/01ac524a1d353aa816adbb4ee46731451b58db37) by mabugaj). References: [#127](https://github.com/mkdocstrings/pytkdocs/issues/127), [#129](https://github.com/mkdocstrings/pytkdocs/issues/129)
## [0.14.2](https://github.com/mkdocstrings/pytkdocs/releases/tag/0.14.2) - 2021-12-16
[Compare with 0.14.1](https://github.com/mkdocstrings/pytkdocs/compare/0.14.1...0.14.2)
### Dependencies
- Remove upper bounds on production dependencies ([22ff7df](https://github.com/mkdocstrings/pytkdocs/commit/22ff7df70361bc460ba3b92bfba51d90481112fd) by Timothée Mazzucotelli). [Issue #124](https://github.com/mkdocstrings/pytkdocs/issues/124), [PR #128](https://github.com/mkdocstrings/pytkdocs/pull/128)
## [0.14.1](https://github.com/mkdocstrings/pytkdocs/releases/tag/0.14.1) - 2021-12-16
[Compare with 0.14.0](https://github.com/mkdocstrings/pytkdocs/compare/0.14.0...0.14.1)
### Code Refactoring
- Remove upper bounds on development dependencies ([e1a4eba](https://github.com/mkdocstrings/pytkdocs/commit/e1a4eba87b2253024eea5fb68510aa6cda1d9f1c) by Timothée Mazzucotelli). [PR #126](https://github.com/mkdocstrings/pytkdocs/pull/126). See https://iscinumpy.dev/post/bound-version-constraints/.
## [0.14.0](https://github.com/mkdocstrings/pytkdocs/releases/tag/0.14.0) - 2021-10-08
[Compare with 0.13.0](https://github.com/mkdocstrings/pytkdocs/compare/0.13.0...0.14.0)
### Features
- Add Markdown docstring-style support ([06556e3](https://github.com/mkdocstrings/pytkdocs/commit/06556e37634e0c520b28fa323d8d4ea459c32892) by Timothée Mazzucotelli). [PR #121](https://github.com/mkdocstrings/pytkdocs/pull/121)
### Bug Fixes
- Serialize yields and keyword arguments sections ([8fb86d6](https://github.com/mkdocstrings/pytkdocs/commit/8fb86d6777f11ff9ead322b901106d1e5a6d4741) by Timothée Mazzucotelli).
## [0.13.0](https://github.com/mkdocstrings/pytkdocs/releases/tag/0.13.0) - 2021-10-06
[Compare with 0.12.0](https://github.com/mkdocstrings/pytkdocs/compare/0.12.0...0.13.0)
### Features
- Support google yields sections ([4b99cbc](https://github.com/mkdocstrings/pytkdocs/commit/4b99cbc7192ab4a1093237a1c79fdf8d70c39b6b) by Timothée Mazzucotelli). [Issue #89](https://github.com/mkdocstrings/pytkdocs/issues/89), [PR #116](https://github.com/mkdocstrings/pytkdocs/pull/116)
### Bug Fixes
- Add source to class objects ([8931df8](https://github.com/mkdocstrings/pytkdocs/commit/8931df8f7ef9c98d2a36efcee09339d012a08157) by jakekaplan). [PR #120](https://github.com/mkdocstrings/pytkdocs/pull/120)
- Pass context when parsing class docstring ([4a62039](https://github.com/mkdocstrings/pytkdocs/commit/4a6203926e1ad42c0cc9652f1e42b1570d193564) by jakekaplan).[PR #118](https://github.com/mkdocstrings/pytkdocs/pull/118)
- Don't mistakenly return a 'missing annotation' error ([4afc97f](https://github.com/mkdocstrings/pytkdocs/commit/4afc97f912472e9a29931d09feb88d07376b4afd) by Timothée Mazzucotelli).
### Code Refactoring
- Set keyword-only kind on keyword arguments ([c5c2ef0](https://github.com/mkdocstrings/pytkdocs/commit/c5c2ef0655bce35fe8df4d8f2674701fc8086c48) by Timothée Mazzucotelli).
## [0.12.0](https://github.com/mkdocstrings/pytkdocs/releases/tag/0.12.0) - 2021-09-21
[Compare with 0.11.1](https://github.com/mkdocstrings/pytkdocs/compare/0.11.1...0.12.0)
### Features
- Include base classes in output ([f7f6652](https://github.com/mkdocstrings/pytkdocs/commit/f7f6652f5b796c37980cc6b68865b2441a469ebd) by Brian Koropoff). [Issue mkdocstrings#269](https://github.com/mkdocstrings/mkdocstrings/issues/269), [PR #108](https://github.com/mkdocstrings/pytkdocs/pull/108)
- Support "Keyword Args" sections for Gooogle-style ([0133369](https://github.com/mkdocstrings/pytkdocs/commit/013336970029edc0ff95a025007492786d77ed9c) by HacKan). [Issue #88](https://github.com/mkdocstrings/pytkdocs/issues/88), [PR #105](https://github.com/mkdocstrings/pytkdocs/pull/105)
- Allow method descriptors to be serialized as methods ([8e1b1b2](https://github.com/mkdocstrings/pytkdocs/commit/8e1b1b2375070ab5b01757c686da4bbde3a771cd) by jmrgibson). [PR #103](https://github.com/mkdocstrings/pytkdocs/pull/103)
- Add support for Django models ([6416a05](https://github.com/mkdocstrings/pytkdocs/commit/6416a05c080d2f15206b26d641cd7d5ca18af316) by Michał Rokita). [Issue #39](https://github.com/mkdocstrings/pytkdocs/issues/39), [PR #101](https://github.com/mkdocstrings/pytkdocs/pull/101)
### Bug Fixes
- Fix getting parent module of decorated functions ([88b457f](https://github.com/mkdocstrings/pytkdocs/commit/88b457f8aae51a422470d6c34859439d97b110e0) by Timothée Mazzucotelli). [Issue mkdocstrings#162](https://github.com/mkdocstrings/mkdocstrings/issues/162), [PR #109](https://github.com/mkdocstrings/pytkdocs/pull/109)
### Code Refactoring
- Stop recording errors in the loader ([3191bac](https://github.com/mkdocstrings/pytkdocs/commit/3191bac307a85f8c1e108eea5c7ee72bd50c8803) by Timothée Mazzucotelli). [Issue #111](https://github.com/mkdocstrings/pytkdocs/issues/111), [PR #114](https://github.com/mkdocstrings/pytkdocs/pull/114)
- Remove warning about new path style option ([14b18be](https://github.com/mkdocstrings/pytkdocs/commit/14b18beb2116564c1ad2c1bb3b1f2316d813a7c7) by Timothée Mazzucotelli).
- Switch preference order between annotation and docstring type ([c4f6bdc](https://github.com/mkdocstrings/pytkdocs/commit/c4f6bdc8136497eeca43583c04fa72d9d316df4b) by Andy Challis, and [75b4024](https://github.com/mkdocstrings/pytkdocs/commit/75b40247a4002823cdc2505cc864a70db745950e) by Timothée Mazzucotelli). [Issue mkdocstrings#143](https://github.com/mkdocstrings/mkdocstrings/issues/143), [PR #110](https://github.com/mkdocstrings/pytkdocs/pull/110)
## [0.11.1](https://github.com/mkdocstrings/pytkdocs/releases/tag/0.11.1) - 2021-04-03
[Compare with 0.11.0](https://github.com/mkdocstrings/pytkdocs/compare/0.11.0...0.11.1)
### Bug Fixes
- Remove duplicate dataclass attributes when they have defaults ([c0277b2](https://github.com/mkdocstrings/pytkdocs/commit/c0277b2104d615a38558ab2d93e495faf360bd63) by Bernhard Stadlbauer). [Issue #52](https://github.com/mkdocstrings/pytkdocs/issues/52), [PR #100](https://github.com/mkdocstrings/pytkdocs/pull/100)
## [0.11.0](https://github.com/mkdocstrings/pytkdocs/releases/tag/0.11.0) - 2021-02-28
[Compare with 0.10.1](https://github.com/mkdocstrings/pytkdocs/compare/0.10.1...0.11.0)
### Features
- Add support for Numpy docstrings ([de0424a](https://github.com/mkdocstrings/pytkdocs/commit/de0424a33e94f7dfdfd3b613c500a6fb428406aa) by Achille M). [Issue #7](https://github.com/mkdocstrings/pytkdocs/issues/7), [PR #87](https://github.com/mkdocstrings/pytkdocs/pull/87)
### Bug Fixes
- Fix type annotations parsing ([9025438](https://github.com/mkdocstrings/pytkdocs/commit/90254380a01483172c910b82844fdfb0f38fe1fb) by Timothée Mazzucotelli). [Issue #92](https://github.com/mkdocstrings/pytkdocs/issues/92), [PR #96](https://github.com/mkdocstrings/pytkdocs/pull/96)
- Fix pydantic type documentation for List/Set/Tuple ([b99c661](https://github.com/mkdocstrings/pytkdocs/commit/b99c661398ad71562bd909cebe1e40df109f058e) by Shashank Sharma). [Issue #94](https://github.com/mkdocstrings/pytkdocs/issues/94), [PR #95](https://github.com/mkdocstrings/pytkdocs/pull/95)
- Support cached properties ([4052eab](https://github.com/mkdocstrings/pytkdocs/commit/4052eabdd45a7f4fe8c3fc8591bb23e1763a5a0f) by Timothée Mazzucotelli). [Issue #86](https://github.com/mkdocstrings/pytkdocs/issues/86)
- Get inherited properties docstrings from parent class ([c88282c](https://github.com/mkdocstrings/pytkdocs/commit/c88282cc89a4d8a6c897a6d6851d832466b2360b) by Timothée Mazzucotelli). [Issue #90](https://github.com/mkdocstrings/pytkdocs/issues/90)
- Fix dedent for attributes docstrings ([0326005](https://github.com/mkdocstrings/pytkdocs/commit/032600563ae613aa14dd18c0d1f44d0c78316ecd) by Timothée Mazzucotelli). [Issue #54](https://github.com/mkdocstrings/pytkdocs/issues/54), [issue mkdocstrings#225](https://github.com/mkdocstrings/mkdocstrings/issues/225)
## [0.10.1](https://github.com/pawamoy/pytkdocs/releases/tag/0.10.1) - 2021-01-03
[Compare with 0.10.0](https://github.com/pawamoy/pytkdocs/compare/0.10.0...0.10.1)
### Bug Fixes
- Warn when examples section is empty ([e1d2dfc](https://github.com/pawamoy/pytkdocs/commit/e1d2dfc3a9bff690c9061892268fd480e83c6f91) by Timothée Mazzucotelli).
- Allow newer version of dataclasses backport ([4392f2e](https://github.com/pawamoy/pytkdocs/commit/4392f2e4669c76bf2acf74b6124f74b7734b638b) by Patrick Lannigan).
- Ignore errors parsing c-extension modules ([1930054](https://github.com/pawamoy/pytkdocs/commit/19300544cb31f6ad6be5828d041022d7bf917668) by Wang Yuzhi).
- Fix attribute parser for Python 3.9 ([ae80e98](https://github.com/pawamoy/pytkdocs/commit/ae80e988edf362ce99a880063639e4cd74bc44bb) by Timothée Mazzucotelli). [Issue #73](https://github.com/pawamoy/pytkdocs/issues/73) and [#75](https://github.com/pawamoy/pytkdocs/issues/75)
## [0.10.0](https://github.com/pawamoy/pytkdocs/releases/tag/0.10.0) - 2020-12-06
[Compare with 0.9.0](https://github.com/pawamoy/pytkdocs/compare/0.9.0...0.10.0)
### Bug Fixes
- Avoid recursion if a class has a reference to itself ([c92a791](https://github.com/pawamoy/pytkdocs/commit/c92a7911ea9f6321614bb692960f5252f79f6320) by Matthew Wardrop).
### Features
- Add initial restructured text docstring parsing ([0b58c8d](https://github.com/pawamoy/pytkdocs/commit/0b58c8d64846d3fb87588a5cf154dbd5bf60accf) by Patrick Lannigan). Issue [#67](https://github.com/pawamoy/pytkdocs/issues/67), PR [#71](https://github.com/pawamoy/pytkdocs/issues/71)
## [0.9.0](https://github.com/pawamoy/pytkdocs/releases/tag/0.9.0) - 2020-09-28
[Compare with 0.8.0](https://github.com/pawamoy/pytkdocs/compare/0.8.0...0.9.0)
### Features
- Add `new_path_syntax` option ([a0b677c](https://github.com/pawamoy/pytkdocs/commit/a0b677c9bbe62f344dfda05b50d729c4d8e7c36a) by Timothée Mazzucotelli).
See: ["Details on `new_path_syntax`"](https://pawamoy.github.io/pytkdocs/#details-on-new_path_syntax) in the documentation.
Issue [#66](https://github.com/pawamoy/pytkdocs/issues/66).
## [0.8.0](https://github.com/pawamoy/pytkdocs/releases/tag/0.8.0) - 2020-09-25
[Compare with 0.7.0](https://github.com/pawamoy/pytkdocs/compare/0.7.0...0.8.0)
### Features
- Add async property for coroutine functions ([a013c07](https://github.com/pawamoy/pytkdocs/commit/a013c07f73fce72f73e1267de97d041036106ab5) by Arthur Pastel). Issue [pawamoy/mkdocstrings#151](https://github.com/pawamoy/mkdocstrings/issues/151), PR [#65](https://github.com/pawamoy/pytkdocs/pull/65)
## [0.7.0](https://github.com/pawamoy/pytkdocs/releases/tag/0.7.0) - 2020-07-24
[Compare with 0.6.0](https://github.com/pawamoy/pytkdocs/compare/0.6.0...0.7.0)
### Bug Fixes
- Fix code detecting dataclass fields ([4c4a18b](https://github.com/pawamoy/pytkdocs/commit/4c4a18b881865c3182eef77a95ef1a6b1f1a5b6d) by Timothée Mazzucotelli).
- Prevent crash in case of empty dataclasses ([835c066](https://github.com/pawamoy/pytkdocs/commit/835c066ac47cdb1203dc3feb9dfc3f96df7109e0) by Jared Khan). PR [#56](https://github.com/pawamoy/pytkdocs/issues/56)
- Use `inspect.cleandoc` for stripping docstrings whitespace ([8009940](https://github.com/pawamoy/pytkdocs/commit/8009940c43a551a86ca91e0f81b234933d47bd6e) by Jared Khan). Issue [#54](https://github.com/pawamoy/pytkdocs/issues/54), PR [#55](https://github.com/pawamoy/pytkdocs/issues/55)
### Features
- Add support for Marshmallow models ([c250466](https://github.com/pawamoy/pytkdocs/commit/c250466e219edf24d2f85b7337b5670e6f27a724) by Stu Fisher). References: [#51](https://github.com/pawamoy/pytkdocs/issues/51)
## [0.6.0](https://github.com/pawamoy/pytkdocs/releases/tag/0.6.0) - 2020-06-14
[Compare with 0.5.2](https://github.com/pawamoy/pytkdocs/compare/0.5.2...0.6.0)
### Features
- Support attributes sections for Google-style docstrings ([02c0042](https://github.com/pawamoy/pytkdocs/commit/02c0042f9d4d8ab799550418d8474d1a6669feec) by Timothée Mazzucotelli).
## [0.5.2](https://github.com/pawamoy/pytkdocs/releases/tag/0.5.2) - 2020-06-11
[Compare with 0.5.1](https://github.com/pawamoy/pytkdocs/compare/0.5.1...0.5.2)
### Bug Fixes
- Ignore exceptions when trying to unwrap ([02ba876](https://github.com/pawamoy/pytkdocs/commit/02ba8762716c416499bdd4d4834c5de35bca23cb) by Timothée Mazzucotelli). References: [#45](https://github.com/pawamoy/pytkdocs/issues/45)
## [0.5.1](https://github.com/pawamoy/pytkdocs/releases/tag/0.5.1) - 2020-06-09
[Compare with 0.5.0](https://github.com/pawamoy/pytkdocs/compare/0.5.0...0.5.1)
### Bug Fixes
- Fix parsing tuple unpacking assignment ([6535fe8](https://github.com/pawamoy/pytkdocs/commit/6535fe813b6c4b756d1d481f097208c52470da6a) by Timothée Mazzucotelli). References: [#43](https://github.com/pawamoy/pytkdocs/issues/43)
## [0.5.0](https://github.com/pawamoy/pytkdocs/releases/tag/0.5.0) - 2020-06-08
[Compare with 0.4.0](https://github.com/pawamoy/pytkdocs/compare/0.4.0...0.5.0)
### Bug Fixes
- Fix getting documentation for wrapped objects ([09f38a5](https://github.com/pawamoy/pytkdocs/commit/09f38a501edde2963af50130c11ff38107d14367) by Timothée Mazzucotelli). References: [#32](https://github.com/pawamoy/pytkdocs/issues/32)
- Dedent attributes docstrings ([1a6809c](https://github.com/pawamoy/pytkdocs/commit/1a6809ce4358707b6b144a331955974e8891c475) by Timothée Mazzucotelli). References: [#42](https://github.com/pawamoy/pytkdocs/issues/42)
### Code Refactoring
- Accept any valid loader option in JSON input ([b58f4a9](https://github.com/pawamoy/pytkdocs/commit/b58f4a98b3da3d3dcfc82738ee560c1affa6d387) by Timothée Mazzucotelli).
- Change Pydantic properties names ([fa8d2e7](https://github.com/pawamoy/pytkdocs/commit/fa8d2e7a60ebcc39012cea8a6228770a4e7db2c4) by Timothée Mazzucotelli).
- Refactor parsers ([3caefba](https://github.com/pawamoy/pytkdocs/commit/3caefba1dcbd85a0bc2d05948073677c751aa1f3) by Timothée Mazzucotelli).
- Don't serialize empty error lists in the result ([7bec6c4](https://github.com/pawamoy/pytkdocs/commit/7bec6c4aca9d3087bb5fb4e34b2801a58839dd3a) by Timothée Mazzucotelli).
### Features
- Accept docstring options in JSON input ([400af0b](https://github.com/pawamoy/pytkdocs/commit/400af0bccb4297c3e872910d13c0b44ca3ce1339) by Timothée Mazzucotelli).
- Retrieve dataclass fields docstrings ([09eb224](https://github.com/pawamoy/pytkdocs/commit/09eb224c3c961bdd82640221b888cbe52b9a489e) by Timothée Mazzucotelli). References: [#31](https://github.com/pawamoy/pytkdocs/issues/31)
- Add support for class inheritance (inherited members) ([1af9a53](https://github.com/pawamoy/pytkdocs/commit/1af9a53f6c387cad17ec50b523bc22e149fdc8d1) by Timothée Mazzucotelli). References: [#18](https://github.com/pawamoy/pytkdocs/issues/18), [#41](https://github.com/pawamoy/pytkdocs/issues/41)
- Add support for examples section ([9521c7f](https://github.com/pawamoy/pytkdocs/commit/9521c7f0f27513d18918e7260fb51d73fa548865) by Iago GR). References: [#8](https://github.com/pawamoy/pytkdocs/issues/8)
- As a consequence of the attribute parser refactor: pick attributes without docstrings. References: [#11](https://github.com/pawamoy/pytkdocs/issues/11)
## [0.4.0](https://github.com/pawamoy/pytkdocs/releases/tag/0.4.0) - 2020-05-17
[Compare with 0.3.0](https://github.com/pawamoy/pytkdocs/compare/0.3.0...0.4.0)
### Bug Fixes
- Never attempt to parse a null docstring ([aa92668](https://github.com/pawamoy/pytkdocs/commit/aa926686c9f3b9922968387ec68e3a1caeee08a7) by Timothée Mazzucotelli). References: [#37](https://github.com/pawamoy/pytkdocs/issues/37)
- Restore stdout before printing a traceback ([20c21e9](https://github.com/pawamoy/pytkdocs/commit/20c21e9fa8e5a08e113cbbec2da1af240eb6ce16) by Timothée Mazzucotelli). References: [#36](https://github.com/pawamoy/pytkdocs/issues/36)
- Discard import-time stdout ([17f71af](https://github.com/pawamoy/pytkdocs/commit/17f71afb46631dc64cfac9b37a4da8d5cb001801) by Timothée Mazzucotelli). References: [#24](https://github.com/pawamoy/pytkdocs/issues/24)
- Don't allow `None` for a property's docstring ([b5868f8](https://github.com/pawamoy/pytkdocs/commit/b5868f83fc6590ee37325377e4cfd42f6dd3a566) by Timothée Mazzucotelli).
- Fix relative path for native namespace packages ([a74dccf](https://github.com/pawamoy/pytkdocs/commit/a74dccf9d753b956044ad3b643457d9ad6c86c64) by Shyam Dwaraknath). References: [#19](https://github.com/pawamoy/pytkdocs/issues/19), [#22](https://github.com/pawamoy/pytkdocs/issues/22)
### Code Refactoring
- Layout a docstring parser base ([d427bcc](https://github.com/pawamoy/pytkdocs/commit/d427bccbfd619f65ae2d12559fcd6f1f1649d036) by Timothée Mazzucotelli).
### Features
- Add dataclass and pydantic support ([a172ad8](https://github.com/pawamoy/pytkdocs/commit/a172ad88ee3b1735ee4ad0c91f3274c359e1e82e) by Shyam Dwaraknath). References: [#9](https://github.com/pawamoy/pytkdocs/issues/9), [#27](https://github.com/pawamoy/pytkdocs/issues/27)
## [0.3.0](https://github.com/pawamoy/pytkdocs/releases/tag/0.3.0) - 2020-04-10
[Compare with 0.2.1](https://github.com/pawamoy/pytkdocs/compare/0.2.1...0.3.0)
### Bug Fixes
- Fix parsing of `*args` and `**kwargs` ([b81c93e](https://github.com/pawamoy/pytkdocs/commit/b81c93eef2435f2ed1d70b4d7c3946caa564c59e) by adrienhenry). Related issues/PRs: [#20](https://github.com/pawamoy/pytkdocs/issues/20), [#21](https://github.com/pawamoy/pytkdocs/issues/21)
### Features
- Support different indentations and complex markup in docstrings sections ([2f53082](https://github.com/pawamoy/pytkdocs/commit/2f53082dbd2bcb72423d4aff0cb3bf4319476be7) by Timothée Mazzucotelli). Related issues/PRs: [#17](https://github.com/pawamoy/pytkdocs/issues/17)
## [0.2.1](https://github.com/pawamoy/pytkdocs/releases/tag/0.2.1) - 2020-04-07
[Compare with 0.2.0](https://github.com/pawamoy/pytkdocs/compare/0.2.0...0.2.1)
### Bug Fixes
- Fix forward refs replacement for python > 3.6 ([6a90aca](https://github.com/pawamoy/pytkdocs/commit/6a90aca346209fe2a4e3eec6bfb45f353bce679f) by Timothée Mazzucotelli).
- Handle exception parsing error ([d6561f8](https://github.com/pawamoy/pytkdocs/commit/d6561f86362e7a9d8c45471f1d6eb5deffd5e0c8) by Timothée Mazzucotelli). Related issues/PRs: [#16](https://github.com/pawamoy/pytkdocs/issues/16)
## [0.2.0](https://github.com/pawamoy/py-tkdocs/releases/tag/0.2.0) - 2020-03-27
[Compare with 0.1.2](https://github.com/pawamoy/pytkdocs/compare/0.1.2...V0.2.0)
### Added
- Add members and filters options ([7af68cc](https://github.com/pawamoy/py-tkdocs/commit/7af68ccffe51557853899a04b5ce5610891d9228)).
- Read type annotations in docstrings.
- Add modules' source code to the output ([f05290b](https://github.com/pawamoy/py-tkdocs/commit/f05290b5a3fb33790c66847a71862c2026585a00)).
### Changed
- The code was refactored for readability and robustness ([ef9ba9d](https://github.com/pawamoy/py-tkdocs/commit/ef9ba9d62bceca7795a751a730fc3f64c9ec9daf)).
This is a breaking change as some items in the JSON output have changed:
- the object `signature` value was moved from `obj.docstring.signature` to `obj.signature`,
- the docstring `sections` value was moved from `obj.docstring.sections` to `obj.docstring_sections`,
- the docstring `parsing_errors` value was moved from `obj.docstring.parsing_errors` to `obj.docstring_errors`,
## [0.1.2](https://github.com/pawamoy/pytkdocs/releases/tag/0.1.2) - 2020-03-23
[Compare with 0.1.1](https://github.com/pawamoy/pytkdocs/compare/0.1.1...0.1.2)
### Fixed
- Catch error when trying to get builtins module file path ([48df6bc](https://github.com/pawamoy/pytkdocs/commit/48df6bc9cf878f3ce281fac6ccaf8fe1d4e89c84)).
## [0.1.1](https://github.com/pawamoy/pytkdocs/releases/tag/0.1.1) - 2020-03-21
[Compare with 0.1.0](https://github.com/pawamoy/pytkdocs/compare/0.1.0...0.1.1)
### Fixed
- Fix 'no parsing_errors attribute in Docstring' error ([0c8a986](https://github.com/pawamoy/pytkdocs/commit/0c8a986a05efe35caebb67d66320ced813065ae4)).
- Handle `KeyError` when searching for param type annotation in signature ([b87fe78](https://github.com/pawamoy/pytkdocs/commit/b87fe78fc5201bac8d54fa70ebb53476480a4126)).
## [0.1.0](https://github.com/pawamoy/pytkdocs/releases/tag/0.1.0) - 2020-03-20
[Compare with first commit](https://github.com/pawamoy/pytkdocs/compare/dce21c1b7e15e44529d3cd3ff0fc33f88328de5d...0.1.0)
### Added
- Initial contents, moved from [`mkdocstrings`](https://github.com/pawamoy/mkdocstrings) and tweaked a bit.
pytkdocs-0.16.1/CODE_OF_CONDUCT.md 0000664 0000000 0000000 00000006225 14211451604 0016241 0 ustar 00root root 0000000 0000000 # Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as
contributors and maintainers pledge to making participation in our project and
our community a harassment-free experience for everyone, regardless of age, body
size, disability, ethnicity, gender identity and expression, level of experience,
nationality, personal appearance, race, religion, or sexual identity and
orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment
include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or
advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic
address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable
behavior and are expected to take appropriate and fair corrective action in
response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct, or to ban temporarily or
permanently any contributor for other behaviors that they deem inappropriate,
threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces
when an individual is representing the project or its community. Examples of
representing a project or community include using an official project e-mail
address, posting via an official social media account, or acting as an appointed
representative at an online or offline event. Representation of a project may be
further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project team at pawamoy@pm.me. All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. The project team is
obligated to maintain confidentiality with regard to the reporter of an incident.
Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good
faith may face temporary or permanent repercussions as determined by other
members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
available at [http://contributor-covenant.org/version/1/4][version]
[homepage]: http://contributor-covenant.org
[version]: http://contributor-covenant.org/version/1/4/
pytkdocs-0.16.1/CONTRIBUTING.md 0000664 0000000 0000000 00000006404 14211451604 0015672 0 ustar 00root root 0000000 0000000 # Contributing
Contributions are welcome, and they are greatly appreciated!
Every little bit helps, and credit will always be given.
## Environment setup
Nothing easier!
Fork and clone the repository, then:
```bash
cd pytkdocs
make setup
```
!!! note
If it fails for some reason,
you'll need to install
[PDM](https://github.com/pdm-project/pdm)
manually.
You can install it with:
```bash
python3 -m pip install --user pipx
pipx install pdm
```
Now you can try running `make setup` again,
or simply `pdm install`.
You now have the dependencies installed.
You can run the application with `pdm run pytkdocs [ARGS...]`.
Run `make help` to see all the available actions!
## Tasks
This project uses [duty](https://github.com/pawamoy/duty) to run tasks.
A Makefile is also provided. The Makefile will try to run certain tasks
on multiple Python versions. If for some reason you don't want to run the task
on multiple Python versions, you can do one of the following:
1. `export PYTHON_VERSIONS= `: this will run the task
with only the current Python version
2. run the task directly with `pdm run duty TASK`
The Makefile detects if a virtual environment is activated,
so `make` will work the same with the virtualenv activated or not.
## Development
As usual:
1. create a new branch: `git checkout -b feature-or-bugfix-name`
1. edit the code and/or the documentation
**Before committing:**
1. run `make format` to auto-format the code
1. run `make check` to check everything (fix any warning)
1. run `make test` to run the tests (fix any issue)
1. if you updated the documentation or the project dependencies:
1. run `make docs-serve`
1. go to http://localhost:8000 and check that everything looks good
1. follow our [commit message convention](#commit-message-convention)
If you are unsure about how to fix or ignore a warning,
just let the continuous integration fail,
and we will help you during review.
Don't bother updating the changelog, we will take care of this.
## Commit message convention
Commits messages must follow the
[Angular style](https://gist.github.com/stephenparish/9941e89d80e2bc58a153#format-of-the-commit-message):
```
[(scope)]: Subject
[Body]
```
Scope and body are optional. Type can be:
- `build`: About packaging, building wheels, etc.
- `chore`: About packaging or repo/files management.
- `ci`: About Continuous Integration.
- `docs`: About documentation.
- `feat`: New feature.
- `fix`: Bug fix.
- `perf`: About performance.
- `refactor`: Changes which are not features nor bug fixes.
- `style`: A change in code style/format.
- `tests`: About tests.
**Subject (and body) must be valid Markdown.**
If you write a body, please add issues references at the end:
```
Body.
References: #10, #11.
Fixes #15.
```
## Pull requests guidelines
Link to any related issue in the Pull Request message.
During review, we recommend using fixups:
```bash
# SHA is the SHA of the commit you want to fix
git commit --fixup=SHA
```
Once all the changes are approved, you can squash your commits:
```bash
git rebase -i --autosquash master
```
And force-push:
```bash
git push -f
```
If this seems all too complicated, you can push or force-push each new commit,
and we will squash them ourselves if needed, before merging.
pytkdocs-0.16.1/CREDITS.md 0000664 0000000 0000000 00000017144 14211451604 0015063 0 ustar 00root root 0000000 0000000
# Credits
These projects were used to build `pytkdocs`. **Thank you!**
[`python`](https://www.python.org/) |
[`poetry`](https://poetry.eustace.io/) |
[`copier-poetry`](https://github.com/pawamoy/copier-poetry)
### Direct dependencies
[`autoflake`](https://github.com/myint/autoflake) |
[`black`](https://github.com/psf/black) |
[`cached-property`](https://github.com/pydanny/cached-property) |
[`dataclasses`](https://github.com/ericvsmith/dataclasses) |
[`docstring_parser`]() |
[`duty`](https://github.com/pawamoy/duty) |
[`flake8-black`](https://github.com/peterjc/flake8-black) |
[`flake8-builtins`](https://github.com/gforcada/flake8-builtins) |
[`flake8-pytest-style`](https://pypi.org/project/flake8-pytest-style) |
[`flake8-tidy-imports`](https://github.com/adamchainz/flake8-tidy-imports) |
[`flake8-variables-names`](https://github.com/best-doctor/flake8-variables-names) |
[`flakehell`](None) |
[`git-changelog`](https://github.com/pawamoy/git-changelog) |
[`httpx`](https://github.com/encode/httpx) |
[`ipython`](https://ipython.org) |
[`isort`](https://github.com/timothycrosley/isort) |
[`jinja2-cli`](https://github.com/mattrobenolt/jinja2-cli) |
[`markdown-include`](https://github.com/cmacmackin/markdown-include/) |
[`marshmallow`](https://github.com/marshmallow-code/marshmallow) |
[`mkdocs`](https://www.mkdocs.org) |
[`mkdocs-material`](https://squidfunk.github.io/mkdocs-material/) |
[`mkdocstrings`](https://github.com/pawamoy/mkdocstrings) |
[`mypy`](http://www.mypy-lang.org/) |
[`pydantic`](https://github.com/samuelcolvin/pydantic) |
[`pytest`](https://docs.pytest.org/en/latest/) |
[`pytest-cov`](https://github.com/pytest-dev/pytest-cov) |
[`pytest-randomly`](https://github.com/pytest-dev/pytest-randomly) |
[`pytest-sugar`](http://pivotfinland.com/pytest-sugar/) |
[`pytest-xdist`](https://github.com/pytest-dev/pytest-xdist) |
[`toml`](https://github.com/uiri/toml) |
[`typing-extensions`](https://github.com/python/typing/blob/master/typing_extensions/README.rst) |
[`wemake-python-styleguide`](https://wemake-python-stylegui.de)
### Indirect dependencies
[`ansimarkup`](https://github.com/gvalkov/python-ansimarkup) |
[`apipkg`](https://github.com/pytest-dev/apipkg) |
[`appdirs`](http://github.com/ActiveState/appdirs) |
[`appnope`](http://github.com/minrk/appnope) |
[`astor`](https://github.com/berkerpeksag/astor) |
[`astroid`](https://github.com/PyCQA/astroid) |
[`atomicwrites`](https://github.com/untitaker/python-atomicwrites) |
[`attrs`](https://www.attrs.org/) |
[`backcall`](https://github.com/takluyver/backcall) |
[`bandit`](https://bandit.readthedocs.io/en/latest/) |
[`certifi`](https://certifiio.readthedocs.io/en/latest/) |
[`chardet`](https://github.com/chardet/chardet) |
[`click`](https://palletsprojects.com/p/click/) |
[`colorama`](https://github.com/tartley/colorama) |
[`contextvars`](http://github.com/MagicStack/contextvars) |
[`coverage`](https://github.com/nedbat/coveragepy) |
[`darglint`](None) |
[`decorator`](https://github.com/micheles/decorator) |
[`docstring-parser`](https://github.com/rr-/docstring_parser) |
[`docutils`](http://docutils.sourceforge.net/) |
[`entrypoints`](https://github.com/takluyver/entrypoints) |
[`eradicate`](https://github.com/myint/eradicate) |
[`execnet`](https://execnet.readthedocs.io/en/latest/) |
[`failprint`](https://github.com/pawamoy/failprint) |
[`flake8`](https://gitlab.com/pycqa/flake8) |
[`flake8-bandit`](https://github.com/tylerwince/flake8-bandit) |
[`flake8-broken-line`](https://github.com/sobolevn/flake8-broken-line) |
[`flake8-bugbear`](https://github.com/PyCQA/flake8-bugbear) |
[`flake8-commas`](https://github.com/PyCQA/flake8-commas/) |
[`flake8-comprehensions`](https://github.com/adamchainz/flake8-comprehensions) |
[`flake8-debugger`](https://github.com/jbkahn/flake8-debugger) |
[`flake8-docstrings`](https://gitlab.com/pycqa/flake8-docstrings) |
[`flake8-eradicate`](https://github.com/sobolevn/flake8-eradicate) |
[`flake8-isort`](https://github.com/gforcada/flake8-isort) |
[`flake8-plugin-utils`](https://pypi.org/project/flake8-plugin-utils) |
[`flake8-polyfill`](https://gitlab.com/pycqa/flake8-polyfill) |
[`flake8-quotes`](http://github.com/zheller/flake8-quotes/) |
[`flake8-rst-docstrings`](https://github.com/peterjc/flake8-rst-docstrings) |
[`flake8-string-format`](https://github.com/xZise/flake8-string-format) |
[`future`](https://python-future.org) |
[`gitdb`](https://github.com/gitpython-developers/gitdb) |
[`GitPython`](https://github.com/gitpython-developers/GitPython) |
[`h11`](https://github.com/python-hyper/h11) |
[`httpcore`](https://github.com/encode/httpcore) |
[`idna`](https://github.com/kjd/idna) |
[`immutables`](https://github.com/MagicStack/immutables) |
[`importlib-metadata`](https://github.com/python/importlib_metadata) |
[`iniconfig`](http://github.com/RonnyPfannschmidt/iniconfig) |
[`ipython-genutils`](http://ipython.org) |
[`jedi`](https://github.com/davidhalter/jedi) |
[`Jinja2`](https://palletsprojects.com/p/jinja/) |
[`joblib`](https://joblib.readthedocs.io) |
[`lazy-object-proxy`](https://github.com/ionelmc/python-lazy-object-proxy) |
[`livereload`](https://github.com/lepture/python-livereload) |
[`lunr`](https://github.com/yeraydiazdiaz/lunr.py) |
[`Markdown`](https://Python-Markdown.github.io/) |
[`MarkupSafe`](https://palletsprojects.com/p/markupsafe/) |
[`mccabe`](https://github.com/pycqa/mccabe) |
[`mkdocs-material-extensions`](https://github.com/facelessuser/mkdocs-material-extensions) |
[`mypy-extensions`](https://github.com/python/mypy_extensions) |
[`nltk`](http://nltk.org/) |
[`packaging`](https://github.com/pypa/packaging) |
[`parso`](https://github.com/davidhalter/parso) |
[`pathspec`](https://github.com/cpburnz/python-path-specification) |
[`pbr`](https://docs.openstack.org/pbr/latest/) |
[`pep8-naming`](https://github.com/PyCQA/pep8-naming) |
[`pexpect`](https://pexpect.readthedocs.io/) |
[`pickleshare`](https://github.com/pickleshare/pickleshare) |
[`pluggy`](https://github.com/pytest-dev/pluggy) |
[`prompt-toolkit`](https://github.com/prompt-toolkit/python-prompt-toolkit) |
[`ptyprocess`](https://github.com/pexpect/ptyprocess) |
[`py`](https://py.readthedocs.io/) |
[`pycodestyle`](https://pycodestyle.readthedocs.io/) |
[`pydocstyle`](https://github.com/PyCQA/pydocstyle/) |
[`pyflakes`](https://github.com/PyCQA/pyflakes) |
[`Pygments`](https://pygments.org/) |
[`pylint`](https://github.com/PyCQA/pylint) |
[`pymdown-extensions`](https://github.com/facelessuser/pymdown-extensions) |
[`pyparsing`](https://github.com/pyparsing/pyparsing/) |
[`pytest-forked`](https://github.com/pytest-dev/pytest-forked) |
[`PyYAML`](https://pyyaml.org/) |
[`regex`](https://bitbucket.org/mrabarnett/mrab-regex) |
[`restructuredtext-lint`](https://github.com/twolfson/restructuredtext-lint) |
[`rfc3986`](http://rfc3986.readthedocs.io) |
[`six`](https://github.com/benjaminp/six) |
[`smmap`](https://github.com/gitpython-developers/smmap) |
[`sniffio`](https://github.com/python-trio/sniffio) |
[`snowballstemmer`](https://github.com/snowballstem/snowball) |
[`stevedore`](https://docs.openstack.org/stevedore/latest/) |
[`termcolor`](http://pypi.python.org/pypi/termcolor) |
[`testfixtures`](https://github.com/Simplistix/testfixtures) |
[`tornado`](http://www.tornadoweb.org/) |
[`tqdm`](https://github.com/tqdm/tqdm) |
[`traitlets`](http://ipython.org) |
[`typed-ast`](https://github.com/python/typed_ast) |
[`urllib3`](https://urllib3.readthedocs.io/) |
[`wcwidth`](https://github.com/jquast/wcwidth) |
[`wrapt`](https://github.com/GrahamDumpleton/wrapt) |
[`zipp`](https://github.com/jaraco/zipp)
**[More credits from the author](http://pawamoy.github.io/credits/)** pytkdocs-0.16.1/LICENSE 0000664 0000000 0000000 00000001362 14211451604 0014444 0 ustar 00root root 0000000 0000000 ISC License
Copyright (c) 2020, Timothée Mazzucotelli
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
pytkdocs-0.16.1/Makefile 0000664 0000000 0000000 00000001566 14211451604 0015105 0 ustar 00root root 0000000 0000000 .DEFAULT_GOAL := help
SHELL := bash
DUTY = $(shell [ -n "${VIRTUAL_ENV}" ] || echo pdm run) duty
args = $(foreach a,$($(subst -,_,$1)_args),$(if $(value $a),$a="$($a)"))
check_quality_args = files
docs_serve_args = host port
release_args = version
test_args = match
BASIC_DUTIES = \
changelog \
check-dependencies \
clean \
coverage \
docs \
docs-deploy \
docs-regen \
docs-serve \
format \
release
QUALITY_DUTIES = \
check-quality \
check-docs \
check-types \
test
.PHONY: help
help:
@$(DUTY) --list
.PHONY: lock
lock:
@pdm lock
.PHONY: setup
setup:
@bash scripts/setup.sh
.PHONY: check
check:
@bash scripts/multirun.sh duty check-quality check-types check-docs
@$(DUTY) check-dependencies
.PHONY: $(BASIC_DUTIES)
$(BASIC_DUTIES):
@$(DUTY) $@ $(call args,$@)
.PHONY: $(QUALITY_DUTIES)
$(QUALITY_DUTIES):
@bash scripts/multirun.sh duty $@ $(call args,$@)
pytkdocs-0.16.1/README.md 0000664 0000000 0000000 00000015511 14211451604 0014717 0 ustar 00root root 0000000 0000000 # pytkdocs
[](https://github.com/pawamoy/pytkdocs/actions?query=workflow%3Aci)
[](https://pawamoy.github.io/pytkdocs/)
[](https://pypi.org/project/pytkdocs/)
[](https://anaconda.org/conda-forge/pytkdocs)
[](https://gitpod.io/#https://github.com/pawamoy/pytkdocs)
[](https://gitter.im/pytkdocs/community)
Load Python objects documentation.
## Installation
With `pip`:
```bash
pip install pytkdocs
```
With [`pipx`](https://github.com/pipxproject/pipx):
```bash
python3.7 -m pip install --user pipx
pipx install pytkdocs
```
With `conda`:
```python
conda install -c conda-forge pytkdocs
```
## Usage
`pytkdocs` accepts JSON on standard input and writes JSON on standard output.
Input format:
```json
{
"objects": [
{
"path": "pytkdocs",
"new_path_syntax": false,
"members": true,
"inherited_members": false,
"filters": [
"!^_[^_]"
],
"docstring_style": "google",
"docstring_options": {
"replace_admonitions": true
}
}
]
}
```
Output format:
```json
{
"loading_errors": [
"string (message)"
],
"parsing_errors": {
"string (object)": [
"string (message)"
]
},
"objects": [
{
"name": "pytkdocs",
"path": "pytkdocs",
"category": "module",
"file_path": "/media/data/dev/pawamoy/pytkdocs/src/pytkdocs/__init__.py",
"relative_file_path": "pytkdocs/__init__.py",
"properties": [
"special"
],
"parent_path": "pytkdocs",
"has_contents": true,
"docstring": "pytkdocs package.\n\nLoad Python objects documentation.",
"docstring_sections": [
{
"type": "markdown",
"value": "pytkdocs package.\n\nLoad Python objects documentation."
}
],
"source": {
"code": "\"\"\"\npytkdocs package.\n\nLoad Python objects documentation.\n\"\"\"\n\nfrom typing import List\n\n__all__: List[str] = []\n",
"line_start": 1
},
"children": {
"pytkdocs.__all__": {
"name": "__all__",
"path": "pytkdocs.__all__",
"category": "attribute",
"file_path": "/media/data/dev/pawamoy/pytkdocs/src/pytkdocs/__init__.py",
"relative_file_path": "pytkdocs/__init__.py",
"properties": [
"special"
],
"parent_path": "pytkdocs",
"has_contents": false,
"docstring": null,
"docstring_sections": [],
"source": {},
"children": {},
"attributes": [],
"methods": [],
"functions": [],
"modules": [],
"classes": []
}
},
"attributes": [
"pytkdocs.__all__"
],
"methods": [],
"functions": [],
"modules": [
"pytkdocs.__main__",
"pytkdocs.cli",
"pytkdocs.loader",
"pytkdocs.objects",
"pytkdocs.parsers",
"pytkdocs.properties",
"pytkdocs.serializer"
],
"classes": []
}
]
}
```
## Command-line
Running `pytkdocs` without argument will read the whole standard input,
and output the result once.
Running `pytkdocs --line-by-line` will enter an infinite loop,
where at each iteration one line is read on the standard input,
and the result is written back on one line.
This allows other programs to use `pytkdocs` in a subprocess,
feeding it single lines of JSON, and reading back single lines of JSON as well.
This mode was actually implemented specifically for
[mkdocstrings](https://github.com/pawamoy/mkdocstrings).
## Configuration
The configuration options available are:
- `new_path_syntax`: when set to true, this option forces the use of the new object path syntax,
which uses a colon (`:`) to delimit modules from other objects.
- `filters`: filters are regular expressions that allow to select or un-select objects based on their name.
They are applied recursively (on every child of every object).
If the expression starts with an exclamation mark,
it will filter out objects matching it (the exclamation mark is removed before evaluation).
If not, objects matching it are selected.
Every regular expression is performed against every name.
It allows fine-grained filtering. Example:
- `!^_`: filter out every object whose name starts with `_` (private/protected)
- `^__`: but still select those who start with two `_` (class-private)
- `!^__.*__$`: except those who also end with two `_` (specials)
- `members`: this option allows to explicitly select the members of the top-object.
If `True`, select every members that passes filters. If `False`, select nothing.
If it's a list of names, select only those members, and apply filters on their children only.
- `inherited_members`: true or false (default). When enabled, inherited members will be selected as well.
- `docstring_style`: the docstring style to use when parsing the docstring. `google`, `restructured-text`1 and `numpy`2.
- `docstring_options`: options to pass to the docstring parser.
- `replace_admonitions` boolean option (default: true). When enabled, this option will
replace titles of an indented block by their Markdown admonition equivalent:
`AdmonitionType: Title` will become `!!! admonitiontype "Title"`.
- `trim_doctest_flags` boolean option (default: true). When enabled, all doctest
flags (of the form `# doctest: +FLAG` and ``) located within python
example blocks will be removed from the parsed output.
The `google` docstring style accepts both options. The `numpy` style only accepts `trim_doctest_flags`. The `restructured-text` style does not accept any options.
1: reStructured Text parsing is in active development and is not feature complete yet.
2: The following sections are currently not supported : `Notes`, `See Also`, `Warns` and `References`.
### Details on `new_path_syntax`
Example:
New syntax
package.module:Class.attribute
Old syntax
package.module.Class.attribute
- If there is a colon is an object's path, `pytkdocs` splits the path accordingly,
regardless of the value of `new_path_syntax`.
- If there isn't a colon, and `new_path_syntax` is false, `pytkdocs` uses the
old importing behavior.
- If there isn't a colon, and `new_path_syntax` is true, `pytkdocs` uses the new
importing behavior and therefore considers that the path points to a module.
pytkdocs-0.16.1/config/ 0000775 0000000 0000000 00000000000 14211451604 0014702 5 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/config/coverage.ini 0000664 0000000 0000000 00000000445 14211451604 0017201 0 ustar 00root root 0000000 0000000 [coverage:run]
branch = true
parallel = true
source =
src/
tests/
[coverage:paths]
equivalent =
src/
__pypackages__/
[coverage:report]
precision = 2
omit =
src/*/__init__.py
src/*/__main__.py
tests/__init__.py
tests/fixtures/*
[coverage:json]
output = htmlcov/coverage.json
pytkdocs-0.16.1/config/flake8.ini 0000664 0000000 0000000 00000005052 14211451604 0016557 0 ustar 00root root 0000000 0000000 [flake8]
exclude = fixtures,site
max-line-length = 132
docstring-convention = google
ban-relative-imports = true
ignore =
# redundant with W0622 (builtin override), which is more precise about line number
A001
# missing docstring in magic method
D105
# multi-line docstring summary should start at the first line
D212
# does not support Parameters sections
D417
# whitespace before ':' (incompatible with Black)
E203
# redundant with E0602 (undefined variable)
F821
# black already deals with quoting
Q000
# use of assert
S101
# we are not parsing XML
S405
# line break before binary operator (incompatible with Black)
W503
# two-lowercase-letters variable DO conform to snake_case naming style
C0103
# redundant with D102 (missing docstring)
C0116
# line too long
C0301
# too many instance attributes
R0902
# too few public methods
R0903
# too many public methods
R0904
# too many branches
R0912
# too many methods
R0913
# too many local variables
R0914
# too many statements
R0915
# redundant with F401 (unused import)
W0611
# lazy formatting for logging calls
W1203
# short name
VNE001
# f-strings
WPS305
# common variable names (too annoying)
WPS110
# redundant with W0622 (builtin override), which is more precise about line number
WPS125
# too many imports
WPS201
# too many module members
WPS202
# overused expression
WPS204
# too many local variables
WPS210
# too many arguments
WPS211
# too many expressions
WPS213
# too many methods
WPS214
# too deep nesting
WPS220
# high Jones complexity
WPS221
# too many elif branches
WPS223
# string over-use: can't disable it per file?
WPS226
# too many public instance attributes
WPS230
# too complex f-string
WPS237
# too cumbersome, asks to write class A(object)
WPS306
# multi-line parameters (incompatible with Black)
WPS317
# multi-line strings (incompatible with attributes docstrings)
WPS322
# implicit string concatenation
WPS326
# explicit string concatenation
WPS336
# noqa overuse
WPS402
# __init__ modules with logic
WPS412
# print statements
WPS421
# statement with no effect (not compatible with attribute docstrings)
WPS428
# redundant with C0415 (not top-level import)
WPS433
# implicit dict.get usage (generally false-positive)
WPS529
pytkdocs-0.16.1/config/mypy.ini 0000664 0000000 0000000 00000000163 14211451604 0016401 0 ustar 00root root 0000000 0000000 [mypy]
ignore_missing_imports = true
exclude = tests/fixtures/
warn_unused_ignores = false
show_error_codes = true
pytkdocs-0.16.1/config/pytest.ini 0000664 0000000 0000000 00000000270 14211451604 0016732 0 ustar 00root root 0000000 0000000 [pytest]
norecursedirs =
.git
.tox
.env
dist
build
python_files =
test_*.py
*_test.py
tests.py
addopts =
--cov
--cov-config config/coverage.ini
testpaths =
tests
pytkdocs-0.16.1/docs/ 0000775 0000000 0000000 00000000000 14211451604 0014365 5 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/docs/changelog.md 0000664 0000000 0000000 00000000026 14211451604 0016634 0 ustar 00root root 0000000 0000000 --8<-- "CHANGELOG.md"
pytkdocs-0.16.1/docs/code_of_conduct.md 0000664 0000000 0000000 00000000034 14211451604 0020021 0 ustar 00root root 0000000 0000000 --8<-- "CODE_OF_CONDUCT.md"
pytkdocs-0.16.1/docs/contributing.md 0000664 0000000 0000000 00000000031 14211451604 0017410 0 ustar 00root root 0000000 0000000 --8<-- "CONTRIBUTING.md"
pytkdocs-0.16.1/docs/css/ 0000775 0000000 0000000 00000000000 14211451604 0015155 5 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/docs/css/material.css 0000664 0000000 0000000 00000000131 14211451604 0017460 0 ustar 00root root 0000000 0000000 /* More space at the bottom of the page. */
.md-main__inner {
margin-bottom: 1.5rem;
}
pytkdocs-0.16.1/docs/css/mkdocstrings.css 0000664 0000000 0000000 00000000217 14211451604 0020376 0 ustar 00root root 0000000 0000000 /* Indentation. */
div.doc-contents:not(.first) {
padding-left: 25px;
border-left: 4px solid rgba(230, 230, 230);
margin-bottom: 80px;
}
pytkdocs-0.16.1/docs/gen_credits.py 0000664 0000000 0000000 00000004160 14211451604 0017226 0 ustar 00root root 0000000 0000000 """Generate the credits page."""
import functools
import re
from itertools import chain
from pathlib import Path
from urllib.request import urlopen
import mkdocs_gen_files
import toml
from jinja2 import StrictUndefined
from jinja2.sandbox import SandboxedEnvironment
def get_credits_data() -> dict:
"""Return data used to generate the credits file.
Returns:
Data required to render the credits template.
"""
project_dir = Path(__file__).parent.parent
metadata = toml.load(project_dir / "pyproject.toml")["project"]
metadata_pdm = toml.load(project_dir / "pyproject.toml")["tool"]["pdm"]
lock_data = toml.load(project_dir / "pdm.lock")
project_name = metadata["name"]
all_dependencies = chain(
metadata.get("dependencies", []),
chain(*metadata.get("optional-dependencies", {}).values()),
chain(*metadata_pdm.get("dev-dependencies", {}).values()),
)
direct_dependencies = {re.sub(r"[^\w-].*$", "", dep) for dep in all_dependencies}
direct_dependencies = {dep.lower() for dep in direct_dependencies}
indirect_dependencies = {pkg["name"].lower() for pkg in lock_data["package"]}
indirect_dependencies -= direct_dependencies
return {
"project_name": project_name,
"direct_dependencies": sorted(direct_dependencies),
"indirect_dependencies": sorted(indirect_dependencies),
"more_credits": "http://pawamoy.github.io/credits/",
}
@functools.lru_cache(maxsize=None)
def get_credits():
"""Return credits as Markdown.
Returns:
The credits page Markdown.
"""
jinja_env = SandboxedEnvironment(undefined=StrictUndefined)
commit = "c78c29caa345b6ace19494a98b1544253cbaf8c1"
template_url = f"https://raw.githubusercontent.com/pawamoy/jinja-templates/{commit}/credits.md"
template_data = get_credits_data()
template_text = urlopen(template_url).read().decode("utf8") # noqa: S310
return jinja_env.from_string(template_text).render(**template_data)
with mkdocs_gen_files.open("credits.md", "w") as fd:
fd.write(get_credits())
mkdocs_gen_files.set_edit_path("credits.md", "gen_credits.py")
pytkdocs-0.16.1/docs/gen_ref_nav.py 0000775 0000000 0000000 00000002023 14211451604 0017210 0 ustar 00root root 0000000 0000000 """Generate the code reference pages and navigation."""
from pathlib import Path
import mkdocs_gen_files
nav = mkdocs_gen_files.Nav()
for path in sorted(Path("src").glob("**/*.py")):
module_path = path.relative_to("src").with_suffix("")
doc_path = path.relative_to("src").with_suffix(".md")
full_doc_path = Path("reference", doc_path)
parts = list(module_path.parts)
if parts[-1] == "__init__":
parts = parts[:-1]
doc_path = doc_path.with_name("index.md")
full_doc_path = full_doc_path.with_name("index.md")
elif parts[-1] == "__main__":
continue
nav_parts = list(parts)
nav[nav_parts] = doc_path
with mkdocs_gen_files.open(full_doc_path, "w") as fd:
ident = ".".join(parts)
print("::: " + ident, file=fd)
mkdocs_gen_files.set_edit_path(full_doc_path, path)
# add pages manually:
# nav["package", "module"] = "path/to/file.md"
with mkdocs_gen_files.open("reference/SUMMARY.md", "w") as nav_file:
nav_file.writelines(nav.build_literate_nav())
pytkdocs-0.16.1/docs/index.md 0000664 0000000 0000000 00000000023 14211451604 0016011 0 ustar 00root root 0000000 0000000 --8<-- "README.md"
pytkdocs-0.16.1/docs/license.md 0000664 0000000 0000000 00000000031 14211451604 0016323 0 ustar 00root root 0000000 0000000 ```
--8<-- "LICENSE"
```
pytkdocs-0.16.1/duties.py 0000664 0000000 0000000 00000026107 14211451604 0015312 0 ustar 00root root 0000000 0000000 """Development tasks."""
import importlib
import os
import re
import sys
import tempfile
from contextlib import suppress
from io import StringIO
from pathlib import Path
from typing import List, Optional, Pattern
from urllib.request import urlopen
from duty import duty
PY_SRC_PATHS = (Path(_) for _ in ("src", "tests", "duties.py", "docs"))
PY_SRC_LIST = tuple(str(_) for _ in PY_SRC_PATHS)
PY_SRC = " ".join(PY_SRC_LIST)
TESTING = os.environ.get("TESTING", "0") in {"1", "true"}
CI = os.environ.get("CI", "0") in {"1", "true", "yes", ""}
WINDOWS = os.name == "nt"
PTY = not WINDOWS and not CI
def _latest(lines: List[str], regex: Pattern) -> Optional[str]:
for line in lines:
match = regex.search(line)
if match:
return match.groupdict()["version"]
return None
def _unreleased(versions, last_release):
for index, version in enumerate(versions):
if version.tag == last_release:
return versions[:index]
return versions
def update_changelog(
inplace_file: str,
marker: str,
version_regex: str,
template_url: str,
) -> None:
"""
Update the given changelog file in place.
Arguments:
inplace_file: The file to update in-place.
marker: The line after which to insert new contents.
version_regex: A regular expression to find currently documented versions in the file.
template_url: The URL to the Jinja template used to render contents.
"""
from git_changelog.build import Changelog
from git_changelog.commit import AngularStyle
from jinja2.sandbox import SandboxedEnvironment
AngularStyle.DEFAULT_RENDER.insert(0, AngularStyle.TYPES["build"])
env = SandboxedEnvironment(autoescape=False)
template_text = urlopen(template_url).read().decode("utf8") # noqa: S310
template = env.from_string(template_text)
changelog = Changelog(".", style="angular")
if len(changelog.versions_list) == 1:
last_version = changelog.versions_list[0]
if last_version.planned_tag is None:
planned_tag = "0.1.0"
last_version.tag = planned_tag
last_version.url += planned_tag
last_version.compare_url = last_version.compare_url.replace("HEAD", planned_tag)
with open(inplace_file, "r") as changelog_file:
lines = changelog_file.read().splitlines()
last_released = _latest(lines, re.compile(version_regex))
if last_released:
changelog.versions_list = _unreleased(changelog.versions_list, last_released)
rendered = template.render(changelog=changelog, inplace=True)
lines[lines.index(marker)] = rendered
with open(inplace_file, "w") as changelog_file: # noqa: WPS440
changelog_file.write("\n".join(lines).rstrip("\n") + "\n")
@duty
def changelog(ctx):
"""
Update the changelog in-place with latest commits.
Arguments:
ctx: The context instance (passed automatically).
"""
commit = "166758a98d5e544aaa94fda698128e00733497f4"
template_url = f"https://raw.githubusercontent.com/pawamoy/jinja-templates/{commit}/keepachangelog.md"
ctx.run(
update_changelog,
kwargs={
"inplace_file": "CHANGELOG.md",
"marker": "",
"version_regex": r"^## \[v?(?P[^\]]+)",
"template_url": template_url,
},
title="Updating changelog",
pty=PTY,
)
@duty(pre=["check_quality", "check_types", "check_docs", "check_dependencies"])
def check(ctx):
"""
Check it all!
Arguments:
ctx: The context instance (passed automatically).
"""
@duty
def check_quality(ctx, files=PY_SRC):
"""
Check the code quality.
Arguments:
ctx: The context instance (passed automatically).
files: The files to check.
"""
ctx.run(
f"flake8 --config=config/flake8.ini {files}",
title="Checking code quality",
pty=PTY,
nofail=True,
quiet=True,
)
@duty
def check_dependencies(ctx):
"""
Check for vulnerabilities in dependencies.
Arguments:
ctx: The context instance (passed automatically).
"""
# undo possible patching
# see https://github.com/pyupio/safety/issues/348
for module in sys.modules: # noqa: WPS528
if module.startswith("safety.") or module == "safety":
del sys.modules[module] # noqa: WPS420
importlib.invalidate_caches()
# reload original, unpatched safety
from safety.formatter import report
from safety.safety import check as safety_check
from safety.util import read_requirements
# retrieve the list of dependencies
requirements = ctx.run(
["pdm", "export", "-f", "requirements", "--without-hashes"],
title="Exporting dependencies as requirements",
allow_overrides=False,
)
# check using safety as a library
def safety(): # noqa: WPS430
packages = list(read_requirements(StringIO(requirements)))
vulns = safety_check(packages=packages, ignore_ids="", key="", db_mirror="", cached=False, proxy={})
output_report = report(vulns=vulns, full=True, checked_packages=len(packages))
if vulns:
print(output_report)
ctx.run(safety, title="Checking dependencies")
@duty
def check_docs(ctx):
"""
Check if the documentation builds correctly.
Arguments:
ctx: The context instance (passed automatically).
"""
Path("htmlcov").mkdir(parents=True, exist_ok=True)
Path("htmlcov/index.html").touch(exist_ok=True)
ctx.run("mkdocs build", title="Building documentation", pty=PTY)
@duty # noqa: WPS231
def check_types(ctx): # noqa: WPS231
"""
Check that the code is correctly typed.
Arguments:
ctx: The context instance (passed automatically).
"""
# NOTE: the following code works around this issue:
# https://github.com/python/mypy/issues/10633
# compute packages directory path
py = f"{sys.version_info.major}.{sys.version_info.minor}"
pkgs_dir = Path("__pypackages__", py, "lib").resolve()
# build the list of available packages
packages = {}
for package in pkgs_dir.glob("*"):
if package.suffix not in {".dist-info", ".pth"} and package.name != "__pycache__":
packages[package.name] = package
# handle .pth files
for pth in pkgs_dir.glob("*.pth"):
with suppress(OSError):
for package in Path(pth.read_text().splitlines()[0]).glob("*"): # noqa: WPS440
if package.suffix != ".dist-info":
packages[package.name] = package
# create a temporary directory to assign to MYPYPATH
with tempfile.TemporaryDirectory() as tmpdir:
# symlink the stubs
ignore = set()
for stubs in (path for name, path in packages.items() if name.endswith("-stubs")): # noqa: WPS335
Path(tmpdir, stubs.name).symlink_to(stubs, target_is_directory=True)
# try to symlink the corresponding package
# see https://www.python.org/dev/peps/pep-0561/#stub-only-packages
pkg_name = stubs.name.replace("-stubs", "")
if pkg_name in packages:
ignore.add(pkg_name)
Path(tmpdir, pkg_name).symlink_to(packages[pkg_name], target_is_directory=True)
# create temporary mypy config to ignore stubbed packages
newconfig = Path("config", "mypy.ini").read_text()
newconfig += "\n" + "\n\n".join(f"[mypy-{pkg}.*]\nignore_errors=true" for pkg in ignore)
tmpconfig = Path(tmpdir, "mypy.ini")
tmpconfig.write_text(newconfig)
# set MYPYPATH and run mypy
os.environ["MYPYPATH"] = tmpdir
ctx.run(f"mypy --config-file {tmpconfig} {PY_SRC}", title="Type-checking", pty=PTY)
@duty(silent=True)
def clean(ctx):
"""
Delete temporary files.
Arguments:
ctx: The context instance (passed automatically).
"""
ctx.run("rm -rf .coverage*")
ctx.run("rm -rf .mypy_cache")
ctx.run("rm -rf .pytest_cache")
ctx.run("rm -rf tests/.pytest_cache")
ctx.run("rm -rf build")
ctx.run("rm -rf dist")
ctx.run("rm -rf htmlcov")
ctx.run("rm -rf pip-wheel-metadata")
ctx.run("rm -rf site")
ctx.run("find . -type d -name __pycache__ | xargs rm -rf")
ctx.run("find . -name '*.rej' -delete")
@duty
def docs(ctx):
"""
Build the documentation locally.
Arguments:
ctx: The context instance (passed automatically).
"""
ctx.run("mkdocs build", title="Building documentation")
@duty
def docs_serve(ctx, host="127.0.0.1", port=8000):
"""
Serve the documentation (localhost:8000).
Arguments:
ctx: The context instance (passed automatically).
host: The host to serve the docs from.
port: The port to serve the docs on.
"""
ctx.run(f"mkdocs serve -a {host}:{port}", title="Serving documentation", capture=False)
@duty
def docs_deploy(ctx):
"""
Deploy the documentation on GitHub pages.
Arguments:
ctx: The context instance (passed automatically).
"""
ctx.run("mkdocs gh-deploy", title="Deploying documentation")
@duty
def format(ctx):
"""
Run formatting tools on the code.
Arguments:
ctx: The context instance (passed automatically).
"""
ctx.run(
f"autoflake -ir --exclude tests/fixtures --remove-all-unused-imports {PY_SRC}",
title="Removing unused imports",
pty=PTY,
)
ctx.run(f"isort {PY_SRC}", title="Ordering imports", pty=PTY)
ctx.run(f"black {PY_SRC}", title="Formatting code", pty=PTY)
@duty
def release(ctx, version):
"""
Release a new Python package.
Arguments:
ctx: The context instance (passed automatically).
version: The new version number to use.
"""
ctx.run("git add pyproject.toml CHANGELOG.md", title="Staging files", pty=PTY)
ctx.run(["git", "commit", "-m", f"chore: Prepare release {version}"], title="Committing changes", pty=PTY)
ctx.run(f"git tag {version}", title="Tagging commit", pty=PTY)
if not TESTING:
ctx.run("git push", title="Pushing commits", pty=False)
ctx.run("git push --tags", title="Pushing tags", pty=False)
ctx.run("pdm build", title="Building dist/wheel", pty=PTY)
ctx.run("twine upload --skip-existing dist/*", title="Publishing version", pty=PTY)
docs_deploy.run()
@duty(silent=True)
def coverage(ctx):
"""
Report coverage as text and HTML.
Arguments:
ctx: The context instance (passed automatically).
"""
ctx.run("coverage combine", nofail=True)
ctx.run("coverage report --rcfile=config/coverage.ini", capture=False)
ctx.run("coverage html --rcfile=config/coverage.ini")
@duty
def test(ctx, match: str = ""):
"""
Run the test suite.
Arguments:
ctx: The context instance (passed automatically).
match: A pytest expression to filter selected tests.
"""
py_version = f"{sys.version_info.major}{sys.version_info.minor}"
os.environ["COVERAGE_FILE"] = f".coverage.{py_version}"
ctx.run(
["pytest", "-c", "config/pytest.ini", "-p", "no:sugar", "-n", "auto", "-k", match, "tests"],
title="Running tests",
pty=PTY,
)
pytkdocs-0.16.1/mkdocs.yml 0000664 0000000 0000000 00000003515 14211451604 0015444 0 ustar 00root root 0000000 0000000 site_name: "pytkdocs"
site_description: "Load Python objects documentation."
site_url: "https://pawamoy.github.io/pytkdocs"
repo_url: "https://github.com/pawamoy/pytkdocs"
repo_name: "pawamoy/pytkdocs"
site_dir: "site"
nav:
- Home:
- Overview: index.md
- Changelog: changelog.md
- Credits: credits.md
- License: license.md
# defer to gen-files + literate-nav
- Code Reference: reference/
- Development:
- Contributing: contributing.md
- Code of Conduct: code_of_conduct.md
- Coverage report: coverage.md
- Author's website: https://pawamoy.github.io/
theme:
name: material
icon:
logo: material/currency-sign
features:
- navigation.tabs
- navigation.top
palette:
- media: "(prefers-color-scheme: light)"
scheme: default
primary: teal
accent: purple
toggle:
icon: material/weather-sunny
name: Switch to dark mode
- media: "(prefers-color-scheme: dark)"
scheme: slate
primary: black
accent: lime
toggle:
icon: material/weather-night
name: Switch to light mode
extra_css:
- css/material.css
- css/mkdocstrings.css
markdown_extensions:
- admonition
- pymdownx.emoji
- pymdownx.magiclink
- pymdownx.snippets:
check_paths: true
- pymdownx.superfences
- pymdownx.tabbed:
alternate_style: true
- pymdownx.tasklist
- toc:
permalink: "¤"
plugins:
- search
- gen-files:
scripts:
- docs/gen_credits.py
- docs/gen_ref_nav.py
- literate-nav:
nav_file: SUMMARY.md
- coverage
- section-index
- mkdocstrings:
handlers:
python:
setup_commands:
- import sys
- sys.path.append(".")
selection:
new_path_syntax: yes
watch:
- src/pytkdocs
extra:
social:
- icon: fontawesome/brands/github
link: https://github.com/pawamoy
- icon: fontawesome/brands/twitter
link: https://twitter.com/pawamoy
pytkdocs-0.16.1/pyproject.toml 0000664 0000000 0000000 00000006265 14211451604 0016362 0 ustar 00root root 0000000 0000000 [build-system]
requires = ["pdm-pep517"]
build-backend = "pdm.pep517.api"
[project]
name = "pytkdocs"
description = "Load Python objects documentation."
authors = [{name = "Timothée Mazzucotelli", email = "pawamoy@pm.me"}]
license = {file = "LICENSE"}
readme = "README.md"
requires-python = ">=3.7"
keywords = ["python", "source", "signature", "docs"]
dynamic = ["version"]
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: ISC License (ISCL)",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Topic :: Documentation",
"Topic :: Software Development",
"Topic :: Software Development :: Documentation",
"Topic :: Utilities",
"Typing :: Typed",
]
dependencies = [
"astunparse>=1.6; python_version < '3.9'",
"cached-property>=1.5; python_version < '3.8'",
"typing-extensions>=3.7; python_version < '3.8'",
]
[project.optional-dependencies]
numpy-style = [
"docstring_parser>=0.7",
]
[project.urls]
Homepage = "https://pawamoy.github.io/pytkdocs"
Documentation = "https://pawamoy.github.io/pytkdocs"
Changelog = "https://pawamoy.github.io/pytkdocs/changelog"
Repository = "https://github.com/pawamoy/pytkdocs"
Issues = "https://github.com/pawamoy/pytkdocs/issues"
Discussions = "https://github.com/pawamoy/pytkdocs/discussions"
Gitter = "https://gitter.im/pytkdocs/community"
Funding = "https://github.com/sponsors/pawamoy"
[project.scripts]
pytkdocs = "pytkdocs.cli:main"
[tool.pdm]
version = {use_scm = true}
package-dir = "src"
editable-backend = "editables"
[tool.pdm.dev-dependencies]
duty = ["duty>=0.7"]
docs = [
"mkdocs>=1.2",
"mkdocs-coverage>=0.2",
"mkdocs-gen-files>=0.3",
"mkdocs-literate-nav>=0.4",
"mkdocs-material>=7.3",
"mkdocs-section-index>=0.3",
"mkdocstrings>=0.16",
"toml>=0.10",
]
format = [
"autoflake>=1.4",
"black>=21.10b0",
"isort>=5.10",
]
maintain = [
"git-changelog>=0.4",
]
quality = [
"darglint>=1.8",
"flake8-bandit>=2.1",
"flake8-black>=0.2",
"flake8-bugbear>=21.9",
"flake8-builtins>=1.5",
"flake8-comprehensions>=3.7",
"flake8-docstrings>=1.6",
"flake8-pytest-style>=1.5",
"flake8-string-format>=0.3",
"flake8-tidy-imports>=4.5",
"flake8-variables-names>=0.0",
"pep8-naming>=0.12",
"wps-light>=0.15",
]
tests = [
"pytest>=6.2",
"pytest-cov>=3.0",
"pytest-randomly>=3.10",
"pytest-sugar>=0.9",
"pytest-xdist>=2.4",
"django>=3.2",
"marshmallow>=3.13",
"pydantic>=1.8",
]
typing = [
"mypy>=0.910",
"types-markdown>=3.3",
"types-toml>=0.10",
]
security = ["safety>=1.10"]
[tool.black]
line-length = 120
exclude = "tests/fixtures"
[tool.isort]
line_length = 120
not_skip = "__init__.py"
multi_line_output = 3
force_single_line = false
balanced_wrapping = true
default_section = "THIRDPARTY"
known_first_party = "pytkdocs"
include_trailing_comma = true
pytkdocs-0.16.1/scripts/ 0000775 0000000 0000000 00000000000 14211451604 0015124 5 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/scripts/get_annotations.py 0000775 0000000 0000000 00000004304 14211451604 0020676 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python
"""Scan Python files to retrieve real-world type annotations."""
import ast
import glob
import re
import sys
from multiprocessing import Pool, cpu_count
from pathlib import Path
from typing import List
try:
from ast import unparse # type: ignore
except ImportError:
from astunparse import unparse as _unparse
unparse = lambda node: _unparse(node).rstrip("\n").replace("(", "").replace(")", "")
regex = re.compile(r"\w+")
def scan_file(filepath: str) -> set:
"""
Scan a Python file and return a set of annotations.
Since parsing `Optional[typing.List]` and `Optional[typing.Dict]` is the same,
we're not interested in keeping the actual names.
Therefore we replace every word with "a".
It has two benefits:
- we can get rid of syntaxically equivalent annotations (duplicates)
- the resulting annotations takes less bytes
Arguments:
filepath: The path to the Python file to scan.
Returns:
A set of annotations.
"""
annotations: set = set()
path = Path(filepath)
try:
code = ast.parse(path.read_text())
except:
return annotations
for node in ast.walk(code):
if hasattr(node, "annotation"):
try:
unparsed = unparse(node.annotation) # type: ignore
annotations.add(regex.sub("a", unparsed))
except:
continue
return annotations
def main(directories: List[str]) -> int:
"""
Scan Python files in a list of directories.
First, all the files are stored in a list,
then the scanning is done in parallel with a multiprocessing pool.
Arguments:
directories: A list of directories to scan.
Returns:
An exit code.
"""
if not directories:
return 1
all_files = []
for directory in directories:
all_files.extend(glob.glob(directory.rstrip("/") + "/**/*.py", recursive=True))
n_files = len(all_files)
with Pool(cpu_count() - 1) as pool:
sets = pool.map(scan_file, all_files)
annotations: set = set().union(*sets)
print("a: " + "\na: ".join(sorted(annotations)))
return 0
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
pytkdocs-0.16.1/scripts/multirun.sh 0000775 0000000 0000000 00000001456 14211451604 0017350 0 ustar 00root root 0000000 0000000 #!/usr/bin/env bash
set -e
PYTHON_VERSIONS="${PYTHON_VERSIONS-3.7 3.8 3.9 3.10 3.11}"
restore_previous_python_version() {
if pdm use -f "$1" &>/dev/null; then
echo "> Restored previous Python version: ${1##*/}"
fi
}
if [ -n "${PYTHON_VERSIONS}" ]; then
old_python_version="$(pdm config python.path)"
echo "> Currently selected Python version: ${old_python_version##*/}"
trap "restore_previous_python_version ${old_python_version}" EXIT
for python_version in ${PYTHON_VERSIONS}; do
if pdm use -f "python${python_version}" &>/dev/null; then
echo "> pdm run $@ (python${python_version})"
pdm run "$@"
else
echo "> pdm use -f python${python_version}: Python interpreter not available?" >&2
fi
done
else
pdm run "$@"
fi
pytkdocs-0.16.1/scripts/setup.sh 0000775 0000000 0000000 00000002134 14211451604 0016623 0 ustar 00root root 0000000 0000000 #!/usr/bin/env bash
set -e
PYTHON_VERSIONS="${PYTHON_VERSIONS-3.7 3.8 3.9 3.10 3.11}"
install_with_pipx() {
if ! command -v "$1" &>/dev/null; then
if ! command -v pipx &>/dev/null; then
python3 -m pip install --user pipx
fi
pipx install "$1"
fi
}
install_with_pipx pdm
restore_previous_python_version() {
if pdm use -f "$1" &>/dev/null; then
echo "> Restored previous Python version: ${1##*/}"
fi
}
if [ -n "${PYTHON_VERSIONS}" ]; then
if old_python_version="$(pdm config python.path 2>/dev/null)"; then
echo "> Currently selected Python version: ${old_python_version##*/}"
trap "restore_previous_python_version ${old_python_version}" EXIT
fi
for python_version in ${PYTHON_VERSIONS}; do
if pdm use -f "python${python_version}" &>/dev/null; then
echo "> Using Python ${python_version} interpreter"
pdm install -G numpy-style
else
echo "> pdm use -f python${python_version}: Python interpreter not available?" >&2
fi
done
else
pdm install -G numpy-style
fi pytkdocs-0.16.1/src/ 0000775 0000000 0000000 00000000000 14211451604 0014224 5 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/src/py.typed 0000664 0000000 0000000 00000000000 14211451604 0015711 0 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/src/pytkdocs/ 0000775 0000000 0000000 00000000000 14211451604 0016064 5 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/src/pytkdocs/__init__.py 0000664 0000000 0000000 00000000237 14211451604 0020177 0 ustar 00root root 0000000 0000000 """
pytkdocs package.
Load Python objects documentation.
"""
from typing import List
__all__: List[str] = [] # noqa: WPS410 (the only __variable__ we use)
pytkdocs-0.16.1/src/pytkdocs/__main__.py 0000664 0000000 0000000 00000000512 14211451604 0020154 0 ustar 00root root 0000000 0000000 """
Entry-point module, in case you use `python -m pytkdocs`.
Why does this file exist, and why `__main__`? For more info, read:
- https://www.python.org/dev/peps/pep-0338/
- https://docs.python.org/3/using/cmdline.html#cmdoption-m
"""
import sys
from pytkdocs.cli import main
if __name__ == "__main__":
sys.exit(main())
pytkdocs-0.16.1/src/pytkdocs/cli.py 0000664 0000000 0000000 00000014001 14211451604 0017201 0 ustar 00root root 0000000 0000000 # Why does this file exist, and why not put this in `__main__`?
#
# You might be tempted to import things from `__main__` later,
# but that will cause problems: the code will get executed twice:
#
# - When you run `python -m pytkdocs` python will execute
# `__main__.py` as a script. That means there won't be any
# `pytkdocs.__main__` in `sys.modules`.
# - When you import `__main__` it will get executed again (as a module) because
# there's no `pytkdocs.__main__` in `sys.modules`.
"""Module that contains the command line application."""
import argparse
import json
import sys
import traceback
from contextlib import contextmanager
from io import StringIO
from typing import Dict, List, Optional
from pytkdocs.loader import Loader
from pytkdocs.objects import Object
from pytkdocs.serializer import serialize_object
def process_config(config: dict) -> dict:
"""
Process a loading configuration.
The `config` argument is a dictionary looking like this:
```python
{
"objects": [
{"path": "python.dotted.path.to.the.object1"},
{"path": "python.dotted.path.to.the.object2"}
]
}
```
The result is a dictionary looking like this:
```python
{
"loading_errors": [
"message1",
"message2",
],
"parsing_errors": {
"path.to.object1": [
"message1",
"message2",
],
"path.to.object2": [
"message1",
"message2",
]
},
"objects": [
{
"path": "path.to.object1",
# other attributes, see the documentation for `pytkdocs.objects` or `pytkdocs.serializer`
},
{
"path": "path.to.object2",
# other attributes, see the documentation for `pytkdocs.objects` or `pytkdocs.serializer`
},
]
}
```
Arguments:
config: The configuration.
Returns:
The collected documentation along with the errors that occurred.
"""
collected = []
loading_errors = []
parsing_errors = {}
for obj_config in config["objects"]:
path = obj_config.pop("path")
members = obj_config.pop("members", set())
if isinstance(members, list):
members = set(members)
loader = Loader(**obj_config)
obj = loader.get_object_documentation(path, members)
loading_errors.extend(loader.errors)
parsing_errors.update(extract_errors(obj))
serialized_obj = serialize_object(obj)
collected.append(serialized_obj)
return {"loading_errors": loading_errors, "parsing_errors": parsing_errors, "objects": collected}
def process_json(json_input: str) -> dict:
"""
Process JSON input.
Simply load the JSON as a Python dictionary, then pass it to [`process_config`][pytkdocs.cli.process_config].
Arguments:
json_input: The JSON to load.
Returns:
The result of the call to [`process_config`][pytkdocs.cli.process_config].
"""
return process_config(json.loads(json_input))
def extract_docstring_parsing_errors(errors: dict, obj: Object) -> None:
"""
Recursion helper.
Update the `errors` dictionary by side-effect. Recurse on the object's children.
Arguments:
errors: The dictionary to update.
obj: The object.
"""
if hasattr(obj, "docstring_errors") and obj.docstring_errors: # noqa: WPS421 (hasattr)
errors[obj.path] = obj.docstring_errors
for child in obj.children:
extract_docstring_parsing_errors(errors, child)
def extract_errors(obj: Object) -> dict:
"""
Extract the docstring parsing errors of each object, recursively, into a flat dictionary.
Arguments:
obj: An object from `pytkdocs.objects`.
Returns:
A flat dictionary. Keys are the objects' names.
"""
parsing_errors: Dict[str, List[str]] = {}
extract_docstring_parsing_errors(parsing_errors, obj)
return parsing_errors
def get_parser() -> argparse.ArgumentParser:
"""
Return the program argument parser.
Returns:
The argument parser for the program.
"""
parser = argparse.ArgumentParser(prog="pytkdocs")
parser.add_argument(
"-1",
"--line-by-line",
action="store_true",
dest="line_by_line",
help="Process each line read on stdin, one by one.",
)
return parser
@contextmanager
def discarded_stdout():
"""
Discard standard output.
Yields:
Nothing: We only yield to act as a context manager.
"""
# Discard things printed at import time to avoid corrupting our JSON output
# See https://github.com/pawamoy/pytkdocs/issues/24
old_stdout = sys.stdout
sys.stdout = StringIO()
yield
# Flush imported modules' output, and restore true sys.stdout
sys.stdout.flush()
sys.stdout = old_stdout
def main(args: Optional[List[str]] = None) -> int:
"""
Run the main program.
This function is executed when you type `pytkdocs` or `python -m pytkdocs`.
Arguments:
args: Arguments passed from the command line.
Returns:
An exit code.
"""
parser = get_parser()
parsed_args: argparse.Namespace = parser.parse_args(args)
if parsed_args.line_by_line:
for line in sys.stdin:
with discarded_stdout():
try:
output = json.dumps(process_json(line))
except Exception as error: # noqa: W0703 (we purposely catch everything)
# Don't fail on error. We must handle the next inputs.
# Instead, print error as JSON.
output = json.dumps({"error": str(error), "traceback": traceback.format_exc()})
print(output) # noqa: WPS421 (we need to print at some point)
else:
with discarded_stdout():
output = json.dumps(process_json(sys.stdin.read()))
print(output) # noqa: WPS421 (we need to print at some point)
return 0
pytkdocs-0.16.1/src/pytkdocs/loader.py 0000664 0000000 0000000 00000106403 14211451604 0017710 0 ustar 00root root 0000000 0000000 """
This module is responsible for loading the documentation from Python objects.
It uses [`inspect`](https://docs.python.org/3/library/inspect.html) for introspecting objects,
iterating over their members, etc.
"""
import importlib
import inspect
import pkgutil
import re
from functools import lru_cache
from itertools import chain
from operator import attrgetter
from pathlib import Path
from typing import Any, Dict, List, Mapping, Optional, Set, Tuple, Union
from pytkdocs.objects import Attribute, Class, Function, Method, Module, Object, Source
from pytkdocs.parsers.attributes import get_class_attributes, get_instance_attributes, get_module_attributes, merge
from pytkdocs.parsers.docstrings import PARSERS
from pytkdocs.properties import RE_SPECIAL
try:
from functools import cached_property # type: ignore
except ImportError:
from cached_property import cached_property # type: ignore
class ObjectNode:
"""
Helper class to represent an object tree.
It's not really a tree but more a backward-linked list:
each node has a reference to its parent, but not to its child (for simplicity purposes and to avoid bugs).
Each node stores an object, its name, and a reference to its parent node.
"""
def __init__(self, obj: Any, name: str, parent: Optional["ObjectNode"] = None) -> None:
"""
Initialize the object.
Arguments:
obj: A Python object.
name: The object's name.
parent: The object's parent node.
"""
try:
obj = inspect.unwrap(obj)
except Exception: # noqa: S110,W0703 (we purposely catch every possible exception)
# inspect.unwrap at some point runs hasattr(obj, "__wrapped__"),
# which triggers the __getattr__ method of the object, which in
# turn can raise various exceptions. Probably not just __getattr__.
# See https://github.com/pawamoy/pytkdocs/issues/45
pass # noqa: WPS420 (no other way than passing)
self.obj: Any = obj
"""The actual Python object."""
self.name: str = name
"""The Python object's name."""
self.parent: Optional[ObjectNode] = parent
"""The parent node."""
@property
def dotted_path(self) -> str:
"""
Return the Python dotted path to the object.
Returns:
The Python dotted path to the object.
"""
parts = [self.name]
current = self.parent
while current:
parts.append(current.name)
current = current.parent
return ".".join(reversed(parts))
@property
def file_path(self) -> str:
"""
Return the object's module file path.
Returns:
The object's module file path.
"""
return inspect.getabsfile(self.root.obj)
@property
def root(self) -> "ObjectNode":
"""
Return the root of the tree.
Returns:
The root of the tree.
"""
if self.parent is not None:
return self.parent.root
return self
def is_module(self) -> bool:
"""
Tell if this node's object is a module.
Returns:
The root of the tree.
"""
return inspect.ismodule(self.obj)
def is_class(self) -> bool:
"""
Tell if this node's object is a class.
Returns:
If this node's object is a class.
"""
return inspect.isclass(self.obj)
def is_function(self) -> bool:
"""
Tell if this node's object is a function.
Returns:
If this node's object is a function.
"""
return inspect.isfunction(self.obj)
def is_coroutine_function(self) -> bool:
"""
Tell if this node's object is a coroutine.
Returns:
If this node's object is a coroutine.
"""
return inspect.iscoroutinefunction(self.obj)
def is_property(self) -> bool:
"""
Tell if this node's object is a property.
Returns:
If this node's object is a property.
"""
return isinstance(self.obj, property) or self.is_cached_property()
def is_cached_property(self) -> bool:
"""
Tell if this node's object is a cached property.
Returns:
If this node's object is a cached property.
"""
return isinstance(self.obj, cached_property)
def parent_is_class(self) -> bool:
"""
Tell if the object of this node's parent is a class.
Returns:
If the object of this node's parent is a class.
"""
return bool(self.parent and self.parent.is_class())
def is_method(self) -> bool:
"""
Tell if this node's object is a method.
Returns:
If this node's object is a method.
"""
function_type = type(lambda: None)
return self.parent_is_class() and isinstance(self.obj, function_type)
def is_method_descriptor(self) -> bool:
"""
Tell if this node's object is a method descriptor.
Built-in methods (e.g. those implemented in C/Rust) are often
method descriptors, rather than normal methods.
Returns:
If this node's object is a method descriptor.
"""
return inspect.ismethoddescriptor(self.obj)
def is_staticmethod(self) -> bool:
"""
Tell if this node's object is a staticmethod.
Returns:
If this node's object is a staticmethod.
"""
if not self.parent:
return False
self_from_parent = self.parent.obj.__dict__.get(self.name, None)
return self.parent_is_class() and isinstance(self_from_parent, staticmethod)
def is_classmethod(self) -> bool:
"""
Tell if this node's object is a classmethod.
Returns:
If this node's object is a classmethod.
"""
if not self.parent:
return False
self_from_parent = self.parent.obj.__dict__.get(self.name, None)
return self.parent_is_class() and isinstance(self_from_parent, classmethod)
# New path syntax: the new path syntax uses a colon to separate the
# modules (to import) from the objects (to get with getattr).
# It's easier to deal with, and it naturally improves error handling.
# At first, we default to the old syntax, then at some point we will
# default to the new syntax, and later again we will drop the old syntax.
def get_object_tree(path: str, new_path_syntax: bool = False) -> ObjectNode:
"""
Transform a path into an actual Python object.
The path can be arbitrary long. You can pass the path to a package,
a module, a class, a function or a global variable, as deep as you
want, as long as the deepest module is importable through
`importlib.import_module` and each object is obtainable through
the `getattr` method. It is not possible to load local objects.
Args:
path: The dot/colon-separated path of the object.
new_path_syntax: Whether to use the "colon" syntax for the path.
Raises:
ValueError: When the path is not valid (evaluates to `False`).
ImportError: When the object or its parent module could not be imported.
Returns:
The leaf node representing the object and its parents.
"""
if not path:
raise ValueError(f"path must be a valid Python path, not {path}")
objects: List[str] = []
if ":" in path or new_path_syntax:
try:
module_path, object_path = path.split(":")
except ValueError: # no colon
module_path, objects = path, []
else:
objects = object_path.split(".")
# let the ImportError bubble up
parent_module = importlib.import_module(module_path)
else:
# We will try to import the longest dotted-path first.
# If it fails, we remove the right-most part and put it in a list of "objects", used later.
# We loop until we find the deepest importable submodule.
obj_parent_modules = path.split(".")
while True:
parent_module_path = ".".join(obj_parent_modules)
try:
parent_module = importlib.import_module(parent_module_path)
except ImportError as error:
if len(obj_parent_modules) == 1:
raise ImportError(
f"Importing '{path}' failed, possible causes are:\n"
f"- an exception happened while importing\n"
f"- an element in the path does not exist",
) from error
objects.insert(0, obj_parent_modules.pop(-1))
else:
break
# We now have the module containing the desired object.
# We will build the object tree by iterating over the previously stored objects names
# and trying to get them as attributes.
current_node = ObjectNode(parent_module, parent_module.__name__)
for obj_name in objects:
obj = getattr(current_node.obj, obj_name)
child = ObjectNode(obj, obj_name, parent=current_node)
current_node = child
leaf = current_node
# We now try to get the "real" parent module, not the one the object was imported into.
# This is important if we want to be able to retrieve the docstring of an attribute for example.
# Once we find an object for which we could get the module, we stop trying to get the module.
# Once we reach the node before the root, we apply the module if found, and break.
real_module = None
while current_node.parent is not None:
if real_module is None:
real_module = inspect.getmodule(current_node.obj)
if inspect.ismodule(current_node.parent.obj):
if real_module is not None and real_module is not current_node.parent.obj:
current_node.parent = ObjectNode(real_module, real_module.__name__)
break
current_node = current_node.parent
return leaf
class Loader:
"""
This class contains the object documentation loading mechanisms.
Any error that occurred during collection of the objects and their documentation is stored in the `errors` list.
"""
def __init__(
self,
filters: Optional[List[str]] = None,
docstring_style: str = "google",
docstring_options: Optional[dict] = None,
inherited_members: bool = False,
new_path_syntax: bool = False,
) -> None:
"""
Initialize the object.
Arguments:
filters: A list of regular expressions to fine-grain select members. It is applied recursively.
docstring_style: The style to use when parsing docstrings.
docstring_options: The options to pass to the docstrings parser.
inherited_members: Whether to select inherited members for classes.
new_path_syntax: Whether to use the "colon" syntax for the path.
"""
if not filters:
filters = []
self.filters = [(filtr, re.compile(filtr.lstrip("!"))) for filtr in filters]
self.docstring_parser = PARSERS[docstring_style](**(docstring_options or {})) # type: ignore
self.errors: List[str] = []
self.select_inherited_members = inherited_members
self.new_path_syntax = new_path_syntax
def get_object_documentation(self, dotted_path: str, members: Optional[Union[Set[str], bool]] = None) -> Object:
"""
Get the documentation for an object and its children.
Arguments:
dotted_path: The Python dotted path to the desired object.
members: `True` to select members and filter them, `False` to select no members,
or a list of names to explicitly select the members with these names.
It is applied only on the root object.
Returns:
The documented object.
"""
if members is True:
members = set()
root_object: Object
leaf = get_object_tree(dotted_path, self.new_path_syntax)
if leaf.is_module():
root_object = self.get_module_documentation(leaf, members)
elif leaf.is_class():
root_object = self.get_class_documentation(leaf, members)
elif leaf.is_staticmethod():
root_object = self.get_staticmethod_documentation(leaf)
elif leaf.is_classmethod():
root_object = self.get_classmethod_documentation(leaf)
elif leaf.is_method_descriptor():
root_object = self.get_regular_method_documentation(leaf)
elif leaf.is_method():
root_object = self.get_regular_method_documentation(leaf)
elif leaf.is_function():
root_object = self.get_function_documentation(leaf)
elif leaf.is_property():
root_object = self.get_property_documentation(leaf)
else:
root_object = self.get_attribute_documentation(leaf)
root_object.parse_all_docstrings(self.docstring_parser)
return root_object
def get_module_documentation(self, node: ObjectNode, select_members=None) -> Module:
"""
Get the documentation for a module and its children.
Arguments:
node: The node representing the module and its parents.
select_members: Explicit members to select.
Returns:
The documented module object.
"""
module = node.obj
path = node.dotted_path
name = path.split(".")[-1]
source: Optional[Source]
try:
source = Source(inspect.getsource(module), 1)
except OSError as error:
try:
code = Path(node.file_path).read_text()
except (OSError, UnicodeDecodeError):
source = None
else:
source = Source(code, 1) if code else None
root_object = Module(
name=name,
path=path,
file_path=node.file_path,
docstring=inspect.getdoc(module),
source=source,
)
if select_members is False:
return root_object
select_members = select_members or set()
attributes_data = get_module_attributes(module)
root_object.parse_docstring(self.docstring_parser, attributes=attributes_data)
for member_name, member in inspect.getmembers(module):
if self.select(member_name, select_members):
child_node = ObjectNode(member, member_name, parent=node)
if child_node.is_class() and node.root.obj is inspect.getmodule(child_node.obj):
root_object.add_child(self.get_class_documentation(child_node))
elif child_node.is_function() and node.root.obj is inspect.getmodule(child_node.obj):
root_object.add_child(self.get_function_documentation(child_node))
elif member_name in attributes_data:
root_object.add_child(self.get_attribute_documentation(child_node, attributes_data[member_name]))
if hasattr(module, "__path__"): # noqa: WPS421 (hasattr)
for _, modname, _ in pkgutil.iter_modules(module.__path__):
if self.select(modname, select_members):
leaf = get_object_tree(f"{path}.{modname}")
root_object.add_child(self.get_module_documentation(leaf))
return root_object
@staticmethod
def _class_path(cls):
mod = cls.__module__
qname = cls.__qualname__
if mod == "builtins":
return qname
else:
return f"{mod}.{qname}"
def get_class_documentation(self, node: ObjectNode, select_members=None) -> Class:
"""
Get the documentation for a class and its children.
Arguments:
node: The node representing the class and its parents.
select_members: Explicit members to select.
Returns:
The documented class object.
"""
class_ = node.obj
docstring = inspect.cleandoc(class_.__doc__ or "")
bases = [self._class_path(b) for b in class_.__bases__]
source: Optional[Source]
try:
source = Source(*inspect.getsourcelines(node.obj))
except (OSError, TypeError) as error:
source = None
root_object = Class(
name=node.name,
path=node.dotted_path,
file_path=node.file_path,
docstring=docstring,
bases=bases,
source=source,
)
# Even if we don't select members, we want to correctly parse the docstring
attributes_data: Dict[str, Dict[str, Any]] = {}
for parent_class in reversed(class_.__mro__[:-1]):
merge(attributes_data, get_class_attributes(parent_class))
context: Dict[str, Any] = {"attributes": attributes_data}
if "__init__" in class_.__dict__:
try:
attributes_data.update(get_instance_attributes(class_.__init__))
context["signature"] = inspect.signature(class_.__init__)
except (TypeError, ValueError):
pass
root_object.parse_docstring(self.docstring_parser, **context)
if select_members is False:
return root_object
select_members = select_members or set()
# Build the list of members
members = {}
inherited = set()
direct_members = class_.__dict__
all_members = dict(inspect.getmembers(class_))
for member_name, member in all_members.items():
if member is class_:
continue
if not (member is type or member is object) and self.select(member_name, select_members):
if member_name not in direct_members:
if self.select_inherited_members:
members[member_name] = member
inherited.add(member_name)
else:
members[member_name] = member
# Iterate on the selected members
child: Object
for member_name, member in members.items():
child_node = ObjectNode(member, member_name, parent=node)
if child_node.is_class():
child = self.get_class_documentation(child_node)
elif child_node.is_classmethod():
child = self.get_classmethod_documentation(child_node)
elif child_node.is_staticmethod():
child = self.get_staticmethod_documentation(child_node)
elif child_node.is_method():
child = self.get_regular_method_documentation(child_node)
elif child_node.is_property():
child = self.get_property_documentation(child_node)
elif member_name in attributes_data:
child = self.get_attribute_documentation(child_node, attributes_data[member_name])
else:
continue
if member_name in inherited:
child.properties.append("inherited")
root_object.add_child(child)
for attr_name, properties, add_method in (
("__fields__", ["pydantic-model"], self.get_pydantic_field_documentation),
("_declared_fields", ["marshmallow-model"], self.get_marshmallow_field_documentation),
("_meta.get_fields", ["django-model"], self.get_django_field_documentation),
("__dataclass_fields__", ["dataclass"], self.get_annotated_dataclass_field),
):
if self.detect_field_model(attr_name, direct_members, all_members):
root_object.properties.extend(properties)
self.add_fields(
node,
root_object,
attr_name,
all_members,
select_members,
class_,
add_method,
)
break
return root_object
def detect_field_model(self, attr_name: str, direct_members, all_members) -> bool:
"""
Detect if an attribute is present in members.
Arguments:
attr_name: The name of the attribute to detect, can contain dots.
direct_members: The direct members of the class.
all_members: All members of the class.
Returns:
Whether the attribute is present.
"""
first_order_attr_name, remainder = split_attr_name(attr_name)
if not (
first_order_attr_name in direct_members
or (self.select_inherited_members and first_order_attr_name in all_members)
):
return False
if remainder and not attrgetter(remainder)(all_members[first_order_attr_name]):
return False
return True
def add_fields(
self,
node: ObjectNode,
root_object: Object,
attr_name: str,
members,
select_members,
base_class,
add_method,
) -> None:
"""
Add detected fields to the current object.
Arguments:
node: The current object node.
root_object: The current object.
attr_name: The fields attribute name.
members: The members to pick the fields attribute in.
select_members: The members to select.
base_class: The class declaring the fields.
add_method: The method to add the children object.
"""
fields = get_fields(attr_name, members=members)
for field_name, field in fields.items():
select_field = self.select(field_name, select_members)
is_inherited = field_is_inherited(field_name, attr_name, base_class)
if select_field and (self.select_inherited_members or not is_inherited):
child_node = ObjectNode(obj=field, name=field_name, parent=node)
root_object.add_child(add_method(child_node))
def get_function_documentation(self, node: ObjectNode) -> Function:
"""
Get the documentation for a function.
Arguments:
node: The node representing the function and its parents.
Returns:
The documented function object.
"""
function = node.obj
path = node.dotted_path
source: Optional[Source]
signature: Optional[inspect.Signature]
try:
signature = inspect.signature(function)
except TypeError as error:
signature = None
try:
source = Source(*inspect.getsourcelines(function))
except OSError as error:
source = None
properties: List[str] = []
if node.is_coroutine_function():
properties.append("async")
return Function(
name=node.name,
path=node.dotted_path,
file_path=node.file_path,
docstring=inspect.getdoc(function),
signature=signature,
source=source,
properties=properties,
)
def get_property_documentation(self, node: ObjectNode) -> Attribute:
"""
Get the documentation for a property.
Arguments:
node: The node representing the property and its parents.
Returns:
The documented attribute object (properties are considered attributes for now).
"""
prop = node.obj
path = node.dotted_path
properties = ["property"]
if node.is_cached_property():
# cached_property is always writable, see the docs
properties.extend(["writable", "cached"])
sig_source_func = prop.func
else:
properties.append("readonly" if prop.fset is None else "writable")
sig_source_func = prop.fget
source: Optional[Source]
try:
signature = inspect.signature(sig_source_func)
except (TypeError, ValueError) as error:
attr_type = None
else:
attr_type = signature.return_annotation
try:
source = Source(*inspect.getsourcelines(sig_source_func))
except (OSError, TypeError) as error:
source = None
return Attribute(
name=node.name,
path=path,
file_path=node.file_path,
docstring=inspect.getdoc(prop),
attr_type=attr_type,
properties=properties,
source=source,
)
@staticmethod
def get_pydantic_field_documentation(node: ObjectNode) -> Attribute:
"""
Get the documentation for a Pydantic Field.
Arguments:
node: The node representing the Field and its parents.
Returns:
The documented attribute object.
"""
prop = node.obj
path = node.dotted_path
properties = ["pydantic-field"]
if prop.required:
properties.append("required")
return Attribute(
name=node.name,
path=path,
file_path=node.file_path,
docstring=prop.field_info.description,
attr_type=prop.outer_type_,
properties=properties,
)
@staticmethod
def get_django_field_documentation(node: ObjectNode) -> Attribute:
"""
Get the documentation for a Django Field.
Arguments:
node: The node representing the Field and its parents.
Returns:
The documented attribute object.
"""
prop = node.obj
path = node.dotted_path
properties = ["django-field"]
if prop.null:
properties.append("nullable")
if prop.blank:
properties.append("blank")
# set correct docstring based on verbose_name and help_text
# both should be converted to str type in case lazy translation
# is being used, which is common scenario in django
if prop.help_text:
docstring = f"{prop.verbose_name}: {prop.help_text}"
else:
docstring = str(prop.verbose_name)
return Attribute(
name=node.name,
path=path,
file_path=node.file_path,
docstring=docstring,
attr_type=prop.__class__,
properties=properties,
)
@staticmethod
def get_marshmallow_field_documentation(node: ObjectNode) -> Attribute:
"""
Get the documentation for a Marshmallow Field.
Arguments:
node: The node representing the Field and its parents.
Returns:
The documented attribute object.
"""
prop = node.obj
path = node.dotted_path
properties = ["marshmallow-field"]
if prop.required:
properties.append("required")
return Attribute(
name=node.name,
path=path,
file_path=node.file_path,
docstring=prop.metadata.get("description"),
attr_type=type(prop),
properties=properties,
)
@staticmethod
def get_annotated_dataclass_field(node: ObjectNode, attribute_data: Optional[dict] = None) -> Attribute:
"""
Get the documentation for a dataclass field.
Arguments:
node: The node representing the annotation and its parents.
attribute_data: Docstring and annotation for this attribute.
Returns:
The documented attribute object.
"""
if attribute_data is None:
if node.parent_is_class():
attribute_data = get_class_attributes(node.parent.obj).get(node.name, {}) # type: ignore
else:
attribute_data = get_module_attributes(node.root.obj).get(node.name, {})
return Attribute(
name=node.name,
path=node.dotted_path,
file_path=node.file_path,
docstring=attribute_data["docstring"],
attr_type=attribute_data["annotation"],
properties=["dataclass-field"],
)
def get_classmethod_documentation(self, node: ObjectNode) -> Method:
"""
Get the documentation for a class-method.
Arguments:
node: The node representing the class-method and its parents.
Returns:
The documented method object.
"""
return self.get_method_documentation(node, ["classmethod"])
def get_staticmethod_documentation(self, node: ObjectNode) -> Method:
"""
Get the documentation for a static-method.
Arguments:
node: The node representing the static-method and its parents.
Returns:
The documented method object.
"""
return self.get_method_documentation(node, ["staticmethod"])
def get_regular_method_documentation(self, node: ObjectNode) -> Method:
"""
Get the documentation for a regular method (not class- nor static-method).
We do extra processing in this method to discard docstrings of `__init__` methods
that were inherited from parent classes.
Arguments:
node: The node representing the method and its parents.
Returns:
The documented method object.
"""
method = self.get_method_documentation(node)
if node.parent:
class_ = node.parent.obj
if RE_SPECIAL.match(node.name):
docstring = method.docstring
parent_classes = class_.__mro__[1:]
for parent_class in parent_classes:
try:
parent_method = getattr(parent_class, node.name)
except AttributeError:
continue
else:
if docstring == inspect.getdoc(parent_method):
method.docstring = ""
break
return method
def get_method_documentation(self, node: ObjectNode, properties: Optional[List[str]] = None) -> Method:
"""
Get the documentation for a method or method descriptor.
Arguments:
node: The node representing the method and its parents.
properties: A list of properties to apply to the method.
Returns:
The documented method object.
"""
method = node.obj
path = node.dotted_path
signature: Optional[inspect.Signature]
source: Optional[Source]
try:
source = Source(*inspect.getsourcelines(method))
except OSError as error:
source = None
except TypeError:
source = None
if node.is_coroutine_function():
if properties is None:
properties = ["async"]
else:
properties.append("async")
try:
# for "built-in" functions, e.g. those implemented in C,
# inspect.signature() uses the __text_signature__ attribute, which
# provides a limited but still useful amount of signature information.
# "built-in" functions with no __text_signature__ will
# raise a ValueError().
signature = inspect.signature(method)
except ValueError as error:
signature = None
return Method(
name=node.name,
path=path,
file_path=node.file_path,
docstring=inspect.getdoc(method),
signature=signature,
properties=properties or [],
source=source,
)
@staticmethod
def get_attribute_documentation(node: ObjectNode, attribute_data: Optional[dict] = None) -> Attribute:
"""
Get the documentation for an attribute.
Arguments:
node: The node representing the method and its parents.
attribute_data: Docstring and annotation for this attribute.
Returns:
The documented attribute object.
"""
if attribute_data is None:
if node.parent_is_class():
attribute_data = get_class_attributes(node.parent.obj).get(node.name, {}) # type: ignore
else:
attribute_data = get_module_attributes(node.root.obj).get(node.name, {})
return Attribute(
name=node.name,
path=node.dotted_path,
file_path=node.file_path,
docstring=attribute_data.get("docstring", ""),
attr_type=attribute_data.get("annotation", None),
)
def select(self, name: str, names: Set[str]) -> bool:
"""
Tells whether we should select an object or not, given its name.
If the set of names is not empty, we check against it, otherwise we check against filters.
Arguments:
name: The name of the object to select or not.
names: An explicit list of names to select.
Returns:
Yes or no.
"""
if names:
return name in names
return not self.filter_name_out(name)
@lru_cache(maxsize=None)
def filter_name_out(self, name: str) -> bool:
"""
Filter a name based on the loader's filters.
Arguments:
name: The name to filter.
Returns:
True if the name was filtered out, False otherwise.
"""
if not self.filters:
return False
keep = True
for fltr, regex in self.filters:
is_matching = bool(regex.search(name))
if is_matching:
if str(fltr).startswith("!"):
is_matching = not is_matching
keep = is_matching
return not keep
def field_is_inherited(field_name: str, fields_name: str, base_class: type) -> bool:
"""
Check if a field with a certain name was inherited from parent classes.
Arguments:
field_name: The name of the field to check.
fields_name: The name of the attribute in which the fields are stored.
base_class: The base class in which the field appears.
Returns:
Whether the field was inherited.
"""
# To tell if a field was inherited, we check if it exists in parent classes __fields__ attributes.
# We don't check the current class, nor the top one (object), hence __mro__[1:-1]
return field_name in set(
chain(
*(getattr(parent_class, fields_name, {}).keys() for parent_class in base_class.__mro__[1:-1]),
),
)
def split_attr_name(attr_name: str) -> Tuple[str, Optional[str]]:
"""
Split an attribute name into a first-order attribute name and remainder.
Args:
attr_name: Attribute name (a)
Returns:
Tuple containing:
first_order_attr_name: Name of the first order attribute (a)
remainder: The remainder (b.c)
"""
first_order_attr_name, *remaining = attr_name.split(".", maxsplit=1)
remainder = remaining[0] if remaining else None
return first_order_attr_name, remainder
def get_fields(attr_name: str, *, members: Mapping[str, Any] = None, class_obj=None) -> Dict[str, Any]:
if not (bool(members) ^ bool(class_obj)):
raise ValueError("Either members or class_obj is required.")
first_order_attr_name, remainder = split_attr_name(attr_name)
fields = members[first_order_attr_name] if members else dict(vars(class_obj)).get(first_order_attr_name, {})
if remainder:
fields = attrgetter(remainder)(fields)
if callable(fields):
fields = fields()
if not isinstance(fields, dict):
# Support Django models
fields = {getattr(f, "name", str(f)): f for f in fields if not getattr(f, "auto_created", False)}
return fields
pytkdocs-0.16.1/src/pytkdocs/objects.py 0000664 0000000 0000000 00000034321 14211451604 0020072 0 ustar 00root root 0000000 0000000 """
This module defines the documented objects classes.
- the generic [`Object`][pytkdocs.objects.Object] class
- the [`Module`][pytkdocs.objects.Module] class
- the [`Class`][pytkdocs.objects.Class] class
- the [`Method`][pytkdocs.objects.Method] class
- the [`Function`][pytkdocs.objects.Function] class
- the [`Attribute`][pytkdocs.objects.Attribute] class
Note that properties are considered attributes, because they are used like such.
It also defines a convenient [`Source`][pytkdocs.objects.Source] class to represent source code.
"""
import importlib
import inspect
import os
import sys
from abc import ABCMeta
from functools import lru_cache
from pathlib import Path
from typing import List, Optional, Union
from pytkdocs.parsers.docstrings.base import Parser, Section
from pytkdocs.properties import NAME_CLASS_PRIVATE, NAME_PRIVATE, NAME_SPECIAL, ApplicableNameProperty
class Source:
"""
Helper class to represent source code.
It is simply used to wrap the result of
[`inspect.getsourceslines`](https://docs.python.org/3/library/inspect.html#inspect.getsourcelines).
"""
def __init__(self, lines: Union[str, List[str]], line_start: int) -> None:
"""
Initialize the object.
Arguments:
lines: A list of strings. The strings should have trailing newlines.
line_start: The line number of where the code starts in the file.
"""
if isinstance(lines, list):
code = "".join(lines)
else:
code = lines
self.code = code
"""The code, as a single string."""
self.line_start = line_start
"""The first line number."""
class Object(metaclass=ABCMeta):
"""
A base class to store information about a Python object.
Each instance additionally stores references to its children, grouped by category.
"""
possible_name_properties: List[ApplicableNameProperty] = []
"""
The properties that we can apply to the object based on its name.
The applicable properties vary from one subclass of `Object` to another.
"""
def __init__(
self,
name: str,
path: str,
file_path: str,
docstring: Optional[str] = "",
properties: Optional[List[str]] = None,
source: Optional[Source] = None,
) -> None:
"""
Initialize the object.
Arguments:
name: The object's name.
path: The object's dotted-path.
file_path: The file path of the object's direct parent module.
docstring: The object's docstring.
properties: The object's properties.
source: The object's source code.
"""
self.name = name
"""The object's name."""
self.path = path
"""The object's dotted-path."""
self.file_path = file_path
"""The file path of the object's direct parent module."""
self.docstring = docstring
"""The object's docstring."""
self.docstring_sections: List[Section] = []
"""The object's docstring parsed into sections."""
self.docstring_errors: List[str] = []
"""The errors detected while parsing the docstring."""
self.properties = properties or []
"""The object's properties."""
self.parent: Optional[Object] = None
"""The object's parent (another instance of a subclass of `Object`)."""
self.source = source
"""The object's source code."""
self._path_map = {self.path: self}
self._parsed = False
self.attributes: List[Attribute] = []
"""The list of all the object's attributes."""
self.methods: List[Method] = []
"""The list of all the object's methods."""
self.functions: List[Function] = []
"""The list of all the object's functions."""
self.modules: List[Module] = []
"""The list of all the object's submodules."""
self.classes: List[Class] = []
"""The list of all the object's classes."""
self.children: List[Object] = []
"""The list of all the object's children."""
def __str__(self) -> str:
return self.path
@property
def category(self) -> str:
"""
Return the object's category.
Returns:
The object's category (module, class, function, method or attribute).
"""
return self.__class__.__name__.lower()
@property
def root(self) -> "Object":
"""
Return the object's root.
Returns:
The object's root (top-most parent).
"""
obj = self
while obj.parent:
obj = obj.parent
return obj
@property
def relative_file_path(self) -> str:
"""
Return the relative file path of the object.
It is the relative path to the object's module,
starting at the path of the top-most package it is contained in.
For example:
- package is `a`
- package absolute path is `/abs/path/to/a`
- module is `a.b.c`
- object is `c` or anything defined in `c`
- relative file path is `a/b/c.py`
If the relative file path cannot be determined, the value returned is `""` (empty string).
Returns:
The path relative to the object's package.
"""
parts = self.path.split(".")
namespaces = [".".join(parts[:length]) for length in range(1, len(parts) + 1)] # noqa: WPS221 (not complex)
# Iterate through all sub namespaces including the last in case it is a module
for namespace in namespaces:
try: # noqa: WPS229 (more compact)
importlib.import_module(namespace)
top_package = sys.modules[namespace]
except (ModuleNotFoundError, ImportError, KeyError):
# ImportError: Triggered if the namespace is not importable
# ModuleNotFoundError: Triggered if the namespace is not a module
# KeyError: Triggered if the imported package isn't referenced under the same fully qualified name
# Namespace packages are importable, so this should work for them
return ""
try: # noqa: WPS229 (more compact)
top_package_path = Path(inspect.getabsfile(top_package)).parent
return str(Path(self.file_path).relative_to(top_package_path.parent))
except TypeError:
# Triggered if getabsfile() can't be found in the case of a Namespace package
pass # noqa: WPS420 (passing is the only way)
except ValueError:
# Triggered if Path().relative_to can't find an appropriate path
return ""
return ""
@property
def name_to_check(self) -> str:
"""
Return the attribute to check against name-properties regular expressions (private, class-private, special).
Returns:
The attribute to check (its name).
"""
return self.name
@property
def name_properties(self) -> List[str]:
"""
Return the object's name properties.
Returns:
The object's name properties (private, class-private, special).
"""
properties = []
for prop, predicate in self.possible_name_properties:
if predicate(self.name_to_check):
properties.append(prop)
return properties
@property
def parent_path(self) -> str:
"""
Return the parent's path, computed from the current path.
The parent object path is not used: this property is used to see if an object is really related to another one,
to add it as a child to the other. When we do that, the child doesn't even have a parent.
Returns:
The dotted path of the parent object.
"""
return self.path.rsplit(".", 1)[0]
def add_child(self, obj: "Object") -> None: # noqa: WPS231 (not complex)
"""
Add an object as a child of this object.
If the child computed `parent_path` is not equal to this object's path, abort.
Append the child to the `children` list, and to the right category list.
Arguments:
obj: An instance of documented object.
"""
if obj.parent_path != self.path:
return
self.children.append(obj)
if isinstance(obj, Module):
self.modules.append(obj)
elif isinstance(obj, Class):
self.classes.append(obj)
elif isinstance(obj, Function):
self.functions.append(obj)
elif isinstance(obj, Method):
self.methods.append(obj)
elif isinstance(obj, Attribute):
# Dataclass attributes with default values will already be present in `self.attributes` as they are
# resolved differently by the python interpreter. As they have a concrete value, they are already present
# in the "original" class. They should be overridden with the new "dataclass" attribute coming in here
# (having the "dataclass_field" property set)
new_attribute_name = obj.name
for attribute in self.attributes:
if attribute.name == new_attribute_name:
self.attributes.remove(attribute)
self.attributes.append(obj)
obj.parent = self
self._path_map[obj.path] = obj
def add_children(self, children: List["Object"]) -> None:
"""
Add a list of objects as children of this object.
Arguments:
children: The list of children to add.
"""
for child in children:
self.add_child(child)
def parse_docstring(self, parser: Parser, **context) -> None:
"""
Parse the docstring of this object.
Arguments:
parser: A parser to parse the docstrings.
**context: Additional context to use when parsing.
"""
if self.docstring and not self._parsed:
sections, errors = parser.parse(self.docstring, {"obj": self, **context})
self.docstring_sections = sections
self.docstring_errors = errors
self._parsed = True
def parse_all_docstrings(self, parser: Parser) -> None:
"""
Recursively parse the docstring of this object and its children.
Arguments:
parser: A parser to parse the docstrings.
"""
self.parse_docstring(parser)
for child in self.children:
child.parse_all_docstrings(parser)
@lru_cache()
def has_contents(self) -> bool:
"""
Tells if the object has "contents".
An object has contents when:
- it is the root of the object tree
- it has a docstring
- at least one of its children (whatever the depth) has contents
The value is cached, so this method should be called last, when the tree doesn't change anymore.
Returns:
Whether this object has contents or not.
"""
has_docstring = bool(self.docstring)
is_root = not self.parent
children_have_contents = any(child.has_contents() for child in self.children)
return has_docstring or is_root or children_have_contents
class Module(Object):
"""A class to store information about a module."""
possible_name_properties: List[ApplicableNameProperty] = [NAME_SPECIAL, NAME_PRIVATE]
@property
def file_name(self) -> str:
"""
Return the base name of the module file, without the extension.
Returns:
The module file's base name.
"""
return os.path.splitext(os.path.basename(self.file_path))[0]
@property
def name_to_check(self) -> str: # noqa: D102
return self.file_name
class Class(Object):
"""A class to store information about a class."""
possible_name_properties: List[ApplicableNameProperty] = [NAME_PRIVATE]
def __init__(self, *args, bases: List[str] = None, **kwargs):
"""
Initialize the object.
Arguments:
*args: Arguments passed to the parent class Initialize the object.
bases: The base classes (dotted paths).
**kwargs: Arguments passed to the parent class Initialize the object.
"""
super().__init__(*args, **kwargs)
self.bases = bases or ["object"]
class Function(Object):
"""
A class to store information about a function.
It accepts an additional `signature` argument at instantiation.
"""
possible_name_properties: List[ApplicableNameProperty] = [NAME_PRIVATE]
def __init__(self, *args, signature=None, **kwargs):
"""
Initialize the object.
Arguments:
*args: Arguments passed to the parent class Initialize the object.
signature: The function signature.
**kwargs: Arguments passed to the parent class Initialize the object.
"""
super().__init__(*args, **kwargs)
self.signature = signature
class Method(Object):
"""
A class to store information about a method.
It accepts an additional `signature` argument at instantiation.
"""
possible_name_properties: List[ApplicableNameProperty] = [NAME_SPECIAL, NAME_PRIVATE]
def __init__(self, *args, signature=None, **kwargs):
"""
Initialize the object.
Arguments:
*args: Arguments passed to the parent class Initialize the object.
signature: The function signature.
**kwargs: Arguments passed to the parent class Initialize the object.
"""
super().__init__(*args, **kwargs)
self.signature = signature
class Attribute(Object):
"""
A class to store information about an attribute.
It accepts an additional `attr_type` argument at instantiation.
"""
possible_name_properties: List[ApplicableNameProperty] = [NAME_SPECIAL, NAME_CLASS_PRIVATE, NAME_PRIVATE]
def __init__(self, *args, attr_type=None, **kwargs):
"""
Initialize the object.
Arguments:
*args: Arguments passed to the parent class Initialize the object.
attr_type: The attribute type.
**kwargs: Arguments passed to the parent class Initialize the object.
"""
super().__init__(*args, **kwargs)
self.type = attr_type
pytkdocs-0.16.1/src/pytkdocs/parsers/ 0000775 0000000 0000000 00000000000 14211451604 0017543 5 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/src/pytkdocs/parsers/__init__.py 0000664 0000000 0000000 00000000047 14211451604 0021655 0 ustar 00root root 0000000 0000000 """The docstrings parsers' package."""
pytkdocs-0.16.1/src/pytkdocs/parsers/attributes.py 0000664 0000000 0000000 00000011320 14211451604 0022300 0 ustar 00root root 0000000 0000000 """Module containing functions to parse attributes in the source code."""
import ast
import inspect
from functools import lru_cache
from textwrap import dedent
from typing import get_type_hints
try:
from ast import unparse # type: ignore
except ImportError:
from astunparse import unparse # type: ignore
RECURSIVE_NODES = (ast.If, ast.IfExp, ast.Try, ast.With)
def get_nodes(obj):
try:
source = inspect.getsource(obj)
except (OSError, TypeError):
source = ""
return ast.parse(dedent(source)).body
def recurse_on_node(node):
if isinstance(node, ast.Try):
yield from get_pairs(node.body)
for handler in node.handlers:
yield from get_pairs(handler.body)
yield from get_pairs(node.orelse)
yield from get_pairs(node.finalbody)
elif isinstance(node, ast.If):
yield from get_pairs(node.body)
yield from get_pairs(node.orelse)
else:
yield from get_pairs(node.body)
def get_pairs(nodes):
if len(nodes) < 2:
return
index = 0
while index < len(nodes):
node1 = nodes[index]
if index < len(nodes) - 1:
node2 = nodes[index + 1]
else:
node2 = None
if isinstance(node1, (ast.Assign, ast.AnnAssign)):
if isinstance(node2, ast.Expr) and isinstance(node2.value, ast.Str):
yield node1, node2.value
index += 2
else:
yield node1, None
index += 1
else:
index += 1
if isinstance(node1, RECURSIVE_NODES):
yield from recurse_on_node(node1)
if isinstance(node2, RECURSIVE_NODES):
yield from recurse_on_node(node2)
index += 1
elif not isinstance(node2, (ast.Assign, ast.AnnAssign)):
index += 1
def get_module_or_class_attributes(nodes):
result = {}
for assignment, string_node in get_pairs(nodes):
string = inspect.cleandoc(string_node.s) if string_node else None
if isinstance(assignment, ast.Assign):
names = []
for target in assignment.targets:
if isinstance(target, ast.Name):
names.append(target.id)
elif isinstance(target, ast.Tuple):
names.extend([name.id for name in target.elts])
else:
names = [assignment.target.id]
for name in names:
result[name] = string
return result
def combine(docstrings, type_hints):
return {
name: {"annotation": type_hints.get(name, inspect.Signature.empty), "docstring": docstrings.get(name)}
for name in set(docstrings.keys()) | set(type_hints.keys())
}
def merge(base, extra):
for attr_name, data in extra.items():
if attr_name in base:
if data["annotation"] is not inspect.Signature.empty:
base[attr_name]["annotation"] = data["annotation"]
if data["docstring"] is not None:
base[attr_name]["docstring"] = data["docstring"]
else:
base[attr_name] = data
@lru_cache()
def get_module_attributes(module):
return combine(get_module_or_class_attributes(get_nodes(module)), get_type_hints(module))
@lru_cache()
def get_class_attributes(cls):
nodes = get_nodes(cls)
if not nodes:
return {}
try:
type_hints = get_type_hints(cls)
except NameError:
# The __config__ attribute (a class) of Pydantic models trigger this error:
# NameError: name 'SchemaExtraCallable' is not defined
type_hints = {}
return combine(get_module_or_class_attributes(nodes[0].body), type_hints)
def pick_target(target):
return isinstance(target, ast.Attribute) and isinstance(target.value, ast.Name) and target.value.id == "self"
def unparse_annotation(node):
code = unparse(node).rstrip("\n")
return code.replace("(", "").replace(")", "")
@lru_cache()
def get_instance_attributes(func):
nodes = get_nodes(func)
if not nodes:
return {}
result = {}
for assignment, string in get_pairs(nodes[0].body):
annotation = names = None
if isinstance(assignment, ast.AnnAssign):
if pick_target(assignment.target):
names = [assignment.target.attr]
annotation = unparse_annotation(assignment.annotation)
else:
names = [target.attr for target in assignment.targets if pick_target(target)]
if not names or (string is None and annotation is None):
continue
docstring = inspect.cleandoc(string.s) if string else None
for name in names:
result[name] = {"annotation": annotation, "docstring": docstring}
return result
pytkdocs-0.16.1/src/pytkdocs/parsers/docstrings/ 0000775 0000000 0000000 00000000000 14211451604 0021722 5 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/src/pytkdocs/parsers/docstrings/__init__.py 0000664 0000000 0000000 00000001265 14211451604 0024037 0 ustar 00root root 0000000 0000000 """The parsers' package."""
from typing import Dict, Type
from pytkdocs.parsers.docstrings.base import Parser, UnavailableParser
from pytkdocs.parsers.docstrings.google import Google
from pytkdocs.parsers.docstrings.markdown import Markdown
from pytkdocs.parsers.docstrings.restructured_text import RestructuredText
try:
from pytkdocs.parsers.docstrings.numpy import Numpy
except ImportError:
Numpy = UnavailableParser( # type: ignore
"pytkdocs must be installed with 'numpy-style' extra to parse Numpy docstrings"
)
PARSERS: Dict[str, Type[Parser]] = {
"google": Google,
"restructured-text": RestructuredText,
"numpy": Numpy,
"markdown": Markdown,
}
pytkdocs-0.16.1/src/pytkdocs/parsers/docstrings/base.py 0000664 0000000 0000000 00000013372 14211451604 0023214 0 ustar 00root root 0000000 0000000 """The base module for docstring parsing."""
import inspect
from abc import ABCMeta, abstractmethod
from typing import Any, List, Optional, Tuple
empty = inspect.Signature.empty
class AnnotatedObject:
"""A helper class to store information about an annotated object."""
def __init__(self, annotation: Any, description: str) -> None:
"""
Initialize the object.
Arguments:
annotation: The object's annotation.
description: The object's description.
"""
self.annotation = annotation
self.description = description
class Attribute(AnnotatedObject):
"""A helper class to store information about a documented attribute."""
def __init__(self, name: str, annotation: Any, description: str) -> None:
"""
Initialize the object.
Arguments:
name: The attribute's name.
annotation: The object's annotation.
description: The object's description.
"""
super().__init__(annotation, description)
self.name = name
class Parameter(AnnotatedObject):
"""A helper class to store information about a signature parameter."""
def __init__(self, name: str, annotation: Any, description: str, kind: Any, default: Any = empty) -> None:
"""
Initialize the object.
Arguments:
name: The parameter's name.
annotation: The parameter's annotation.
description: The parameter's description.
kind: The parameter's kind (positional only, keyword only, etc.).
default: The parameter's default value.
"""
super().__init__(annotation, description)
self.name = name
self.kind = kind
self.default = default
def __str__(self):
return self.name
def __repr__(self):
return f""
@property
def is_optional(self):
"""Tell if this parameter is optional."""
return self.default is not empty
@property
def is_required(self):
"""Tell if this parameter is required."""
return not self.is_optional
@property
def is_args(self):
"""Tell if this parameter is positional."""
return self.kind is inspect.Parameter.VAR_POSITIONAL
@property
def is_kwargs(self):
"""Tell if this parameter is a keyword."""
return self.kind is inspect.Parameter.VAR_KEYWORD
@property
def default_string(self):
"""Return the default value as a string."""
if self.is_kwargs:
return "{}"
if self.is_args:
return "()"
if self.is_required:
return ""
return repr(self.default)
class Section:
"""A helper class to store a docstring section."""
class Type:
"""The possible section types."""
MARKDOWN = "markdown"
PARAMETERS = "parameters"
EXCEPTIONS = "exceptions"
RETURN = "return"
YIELD = "yield"
EXAMPLES = "examples"
ATTRIBUTES = "attributes"
KEYWORD_ARGS = "keyword_args"
def __init__(self, section_type: str, value: Any) -> None:
"""
Initialize the object.
Arguments:
section_type: The type of the section, from the [`Type`][pytkdocs.parsers.docstrings.base.Section.Type] enum.
value: The section value.
"""
self.type = section_type
self.value = value
def __str__(self):
return self.type
def __repr__(self):
return f""
class Parser(metaclass=ABCMeta):
"""
A class to parse docstrings.
It is instantiated with an object's path, docstring, signature and return type.
The `parse` method then returns structured data,
in the form of a list of [`Section`][pytkdocs.parsers.docstrings.base.Section]s.
It also return the list of errors that occurred during parsing.
"""
def __init__(self) -> None:
"""Initialize the object."""
self.context: dict = {}
self.errors: List[str] = []
def parse(self, docstring: str, context: Optional[dict] = None) -> Tuple[List[Section], List[str]]:
"""
Parse a docstring and return a list of sections and parsing errors.
Arguments:
docstring: The docstring to parse.
context: Some context helping to parse the docstring.
Returns:
A tuple containing the list of sections and the parsing errors.
"""
self.context = context or {}
self.errors = []
sections = self.parse_sections(docstring)
errors = self.errors
return sections, errors
def error(self, message) -> None:
"""
Record a parsing error.
Arguments:
message: A message described the error.
"""
if self.context["obj"]:
message = f"{self.context['obj'].path}: {message}"
self.errors.append(message)
@abstractmethod
def parse_sections(self, docstring: str) -> List[Section]:
"""
Parse a docstring as a list of sections.
Arguments:
docstring: The docstring to parse.
Returns:
A list of [`Section`][pytkdocs.parsers.docstrings.base.Section]s.
"""
raise NotImplementedError
class UnavailableParser:
def __init__(self, message):
self.message = message
def parse(self, docstring: str, context: Optional[dict] = None) -> Tuple[List[Section], List[str]]:
context = context or {}
message = self.message
if "obj" in context:
message = f"{context['obj'].path}: {message}"
return [], [message]
def __call__(self, *args, **kwargs):
return self
pytkdocs-0.16.1/src/pytkdocs/parsers/docstrings/google.py 0000664 0000000 0000000 00000047356 14211451604 0023567 0 ustar 00root root 0000000 0000000 """This module defines functions and classes to parse docstrings into structured data."""
import inspect
import re
from typing import Any, List, Optional, Pattern, Tuple
from pytkdocs.parsers.docstrings.base import AnnotatedObject, Attribute, Parameter, Parser, Section, empty
SECTIONS_TITLES = {
"args:": Section.Type.PARAMETERS,
"arguments:": Section.Type.PARAMETERS,
"params:": Section.Type.PARAMETERS,
"parameters:": Section.Type.PARAMETERS,
"keyword args:": Section.Type.KEYWORD_ARGS,
"keyword arguments:": Section.Type.KEYWORD_ARGS,
"raise:": Section.Type.EXCEPTIONS,
"raises:": Section.Type.EXCEPTIONS,
"except:": Section.Type.EXCEPTIONS,
"exceptions:": Section.Type.EXCEPTIONS,
"return:": Section.Type.RETURN,
"returns:": Section.Type.RETURN,
"yield:": Section.Type.YIELD,
"yields:": Section.Type.YIELD,
"example:": Section.Type.EXAMPLES,
"examples:": Section.Type.EXAMPLES,
"attribute:": Section.Type.ATTRIBUTES,
"attributes:": Section.Type.ATTRIBUTES,
}
RE_GOOGLE_STYLE_ADMONITION: Pattern = re.compile(r"^(?P\s*)(?P[\w-]+):((?:\s+)(?P.+))?$")
"""Regular expressions to match lines starting admonitions, of the form `TYPE: [TITLE]`."""
RE_DOCTEST_BLANKLINE: Pattern = re.compile(r"^\s*\s*$")
"""Regular expression to match lines of the form ``."""
RE_DOCTEST_FLAGS: Pattern = re.compile(r"(\s*#\s*doctest:.+)$")
"""Regular expression to match lines containing doctest flags of the form `# doctest: +FLAG`."""
class Google(Parser):
"""A Google-style docstrings parser."""
def __init__(self, replace_admonitions: bool = True, trim_doctest_flags: bool = True) -> None:
"""
Initialize the object.
Arguments:
replace_admonitions: Whether to replace admonitions by their Markdown equivalent.
trim_doctest_flags: Whether to remove doctest flags.
"""
super().__init__()
self.replace_admonitions = replace_admonitions
self.trim_doctest_flags = trim_doctest_flags
self.section_reader = {
Section.Type.PARAMETERS: self.read_parameters_section,
Section.Type.KEYWORD_ARGS: self.read_keyword_arguments_section,
Section.Type.EXCEPTIONS: self.read_exceptions_section,
Section.Type.EXAMPLES: self.read_examples_section,
Section.Type.ATTRIBUTES: self.read_attributes_section,
Section.Type.RETURN: self.read_return_section,
Section.Type.YIELD: self.read_yield_section,
}
def parse_sections(self, docstring: str) -> List[Section]: # noqa: D102
if "signature" not in self.context:
self.context["signature"] = getattr(self.context["obj"], "signature", None)
if "annotation" not in self.context:
self.context["annotation"] = getattr(self.context["obj"], "type", empty)
if "attributes" not in self.context:
self.context["attributes"] = {}
sections = []
current_section = []
in_code_block = False
lines = docstring.split("\n")
i = 0
while i < len(lines):
line_lower = lines[i].lower()
if in_code_block:
if line_lower.lstrip(" ").startswith("```"):
in_code_block = False
current_section.append(lines[i])
elif line_lower in SECTIONS_TITLES:
if current_section:
if any(current_section):
sections.append(Section(Section.Type.MARKDOWN, "\n".join(current_section)))
current_section = []
section_reader = self.section_reader[SECTIONS_TITLES[line_lower]]
section, i = section_reader(lines, i + 1)
if section:
sections.append(section)
elif line_lower.lstrip(" ").startswith("```"):
in_code_block = True
current_section.append(lines[i])
else:
if self.replace_admonitions and not in_code_block and i + 1 < len(lines):
match = RE_GOOGLE_STYLE_ADMONITION.match(lines[i])
if match:
groups = match.groupdict()
indent = groups["indent"]
if lines[i + 1].startswith(indent + " " * 4):
lines[i] = f"{indent}!!! {groups['type'].lower()}"
if groups["title"]:
lines[i] += f' "{groups["title"]}"'
current_section.append(lines[i])
i += 1
if current_section:
sections.append(Section(Section.Type.MARKDOWN, "\n".join(current_section)))
return sections
def read_block_items(self, lines: List[str], start_index: int) -> Tuple[List[str], int]:
"""
Parse an indented block as a list of items.
The first indentation level is used as a reference to determine if the next lines are new items
or continuation lines.
Arguments:
lines: The block lines.
start_index: The line number to start at.
Returns:
A tuple containing the list of concatenated lines and the index at which to continue parsing.
"""
if start_index >= len(lines):
return [], start_index
i = start_index
items: List[str] = []
# skip first empty lines
while is_empty_line(lines[i]):
i += 1
# get initial indent
indent = len(lines[i]) - len(lines[i].lstrip())
if indent == 0:
# first non-empty line was not indented, abort
return [], i - 1
# start processing first item
current_item = [lines[i][indent:]]
i += 1
# loop on next lines
while i < len(lines):
line = lines[i]
if line.startswith(indent * 2 * " "):
# continuation line
current_item.append(line[indent * 2 :])
elif line.startswith((indent + 1) * " "):
# indent between initial and continuation: append but add error
cont_indent = len(line) - len(line.lstrip())
current_item.append(line[cont_indent:])
self.error(
f"Confusing indentation for continuation line {i+1} in docstring, "
f"should be {indent} * 2 = {indent*2} spaces, not {cont_indent}"
)
elif line.startswith(indent * " "):
# indent equal to initial one: new item
items.append("\n".join(current_item))
current_item = [line[indent:]]
elif is_empty_line(line):
# empty line: preserve it in the current item
current_item.append("")
else:
# indent lower than initial one: end of section
break
i += 1
if current_item:
items.append("\n".join(current_item).rstrip("\n"))
return items, i - 1
def read_block(self, lines: List[str], start_index: int) -> Tuple[str, int]:
"""
Parse an indented block.
Arguments:
lines: The block lines.
start_index: The line number to start at.
Returns:
A tuple containing the list of lines and the index at which to continue parsing.
"""
if start_index >= len(lines):
return "", start_index
i = start_index
block: List[str] = []
# skip first empty lines
while is_empty_line(lines[i]):
i += 1
# get initial indent
indent = len(lines[i]) - len(lines[i].lstrip())
if indent == 0:
# first non-empty line was not indented, abort
return "", i - 1
# start processing first item
block.append(lines[i].lstrip())
i += 1
# loop on next lines
while i < len(lines) and (lines[i].startswith(indent * " ") or is_empty_line(lines[i])):
block.append(lines[i][indent:])
i += 1
return "\n".join(block).rstrip("\n"), i - 1
def _parse_parameters_section(self, lines: List[str], start_index: int) -> Tuple[List[Parameter], int]:
"""
Parse a "parameters" or "keyword args" section.
Arguments:
lines: The parameters block lines.
start_index: The line number to start at.
Returns:
A tuple containing a `Section` (or `None`) and the index at which to continue parsing.
"""
parameters = []
type_: Any
block, i = self.read_block_items(lines, start_index)
for param_line in block:
# Check that there is an annotation in the docstring
try:
name_with_type, description = param_line.split(":", 1)
except ValueError:
self.error(f"Failed to get 'name: description' pair from '{param_line}'")
continue
# Setting defaults
default = empty
annotation = empty
kind = None
# Can only get description from docstring - keep if no type was given
description = description.lstrip()
# If we have managed to find a type in the docstring use this
if " " in name_with_type:
name, type_ = name_with_type.split(" ", 1)
annotation = type_.strip("()")
if annotation.endswith(", optional"): # type: ignore
annotation = annotation[:-10] # type: ignore
# Otherwise try to use the signature as `annotation` would still be empty
else:
name = name_with_type
# Check in the signature to get extra details
try:
signature_param = self.context["signature"].parameters[name.lstrip("*")]
except (AttributeError, KeyError):
if annotation is empty:
self.error(f"No type annotation for parameter '{name}'")
else:
if annotation is empty:
annotation = signature_param.annotation
# If signature_param.X are empty it doesnt matter as defaults are empty anyway
default = signature_param.default
kind = signature_param.kind
parameters.append(
Parameter(name=name, annotation=annotation, description=description, default=default, kind=kind)
)
return parameters, i
def read_parameters_section(self, lines: List[str], start_index: int) -> Tuple[Optional[Section], int]:
"""
Parse a "parameters" section.
Arguments:
lines: The parameters block lines.
start_index: The line number to start at.
Returns:
A tuple containing a `Section` (or `None`) and the index at which to continue parsing.
"""
parameters, i = self._parse_parameters_section(lines, start_index)
if parameters:
return Section(Section.Type.PARAMETERS, parameters), i
self.error(f"Empty parameters section at line {start_index}")
return None, i
def read_keyword_arguments_section(self, lines: List[str], start_index: int) -> Tuple[Optional[Section], int]:
"""
Parse a "keyword arguments" section.
Arguments:
lines: The parameters block lines.
start_index: The line number to start at.
Returns:
A tuple containing a `Section` (or `None`) and the index at which to continue parsing.
"""
parameters, i = self._parse_parameters_section(lines, start_index)
for parameter in parameters:
parameter.kind = inspect.Parameter.KEYWORD_ONLY
if parameters:
return Section(Section.Type.KEYWORD_ARGS, parameters), i
self.error(f"Empty keyword arguments section at line {start_index}")
return None, i
def read_attributes_section(self, lines: List[str], start_index: int) -> Tuple[Optional[Section], int]:
"""
Parse an "attributes" section.
Arguments:
lines: The parameters block lines.
start_index: The line number to start at.
Returns:
A tuple containing a `Section` (or `None`) and the index at which to continue parsing.
"""
attributes = []
block, i = self.read_block_items(lines, start_index)
for attr_line in block:
try:
name_with_type, description = attr_line.split(":", 1)
except ValueError:
self.error(f"Failed to get 'name: description' pair from '{attr_line}'")
continue
description = description.lstrip()
if " " in name_with_type:
name, annotation = name_with_type.split(" ", 1)
annotation = annotation.strip("()")
if annotation.endswith(", optional"):
annotation = annotation[:-10]
else:
name = name_with_type
annotation = self.context["attributes"].get(name, {}).get("annotation", empty)
attributes.append(Attribute(name=name, annotation=annotation, description=description))
if attributes:
return Section(Section.Type.ATTRIBUTES, attributes), i
self.error(f"Empty attributes section at line {start_index}")
return None, i
def read_exceptions_section(self, lines: List[str], start_index: int) -> Tuple[Optional[Section], int]:
"""
Parse an "exceptions" section.
Arguments:
lines: The exceptions block lines.
start_index: The line number to start at.
Returns:
A tuple containing a `Section` (or `None`) and the index at which to continue parsing.
"""
exceptions = []
block, i = self.read_block_items(lines, start_index)
for exception_line in block:
try:
annotation, description = exception_line.split(": ", 1)
except ValueError:
self.error(f"Failed to get 'exception: description' pair from '{exception_line}'")
else:
exceptions.append(AnnotatedObject(annotation, description.lstrip(" ")))
if exceptions:
return Section(Section.Type.EXCEPTIONS, exceptions), i
self.error(f"Empty exceptions section at line {start_index}")
return None, i
def read_return_section(self, lines: List[str], start_index: int) -> Tuple[Optional[Section], int]:
"""
Parse an "returns" section.
Arguments:
lines: The return block lines.
start_index: The line number to start at.
Returns:
A tuple containing a `Section` (or `None`) and the index at which to continue parsing.
"""
text, i = self.read_block(lines, start_index)
# Early exit if there is no text in the return section
if not text:
self.error(f"Empty return section at line {start_index}")
return None, i
# First try to get the annotation and description from the docstring
try:
type_, text = text.split(":", 1)
except ValueError:
description = text
annotation = self.context["annotation"]
# If there was no annotation in the docstring then move to signature
if annotation is empty and self.context["signature"]:
annotation = self.context["signature"].return_annotation
else:
annotation = type_.lstrip()
description = text.lstrip()
# There was no type in the docstring and no annotation
if annotation is empty:
self.error("No return type/annotation in docstring/signature")
return Section(Section.Type.RETURN, AnnotatedObject(annotation, description)), i
def read_yield_section(self, lines: List[str], start_index: int) -> Tuple[Optional[Section], int]:
"""
Parse a "yields" section.
Arguments:
lines: The return block lines.
start_index: The line number to start at.
Returns:
A tuple containing a `Section` (or `None`) and the index at which to continue parsing.
"""
text, i = self.read_block(lines, start_index)
# Early exit if there is no text in the yield section
if not text:
self.error(f"Empty yield section at line {start_index}")
return None, i
# First try to get the annotation and description from the docstring
try:
type_, text = text.split(":", 1)
except ValueError:
description = text
annotation = self.context["annotation"]
# If there was no annotation in the docstring then move to signature
if annotation is empty and self.context["signature"]:
annotation = self.context["signature"].return_annotation
else:
annotation = type_.lstrip()
description = text.lstrip()
# There was no type in the docstring and no annotation
if annotation is empty:
self.error("No yield type/annotation in docstring/signature")
return Section(Section.Type.YIELD, AnnotatedObject(annotation, description)), i
def read_examples_section(self, lines: List[str], start_index: int) -> Tuple[Optional[Section], int]:
"""
Parse an "examples" section.
Arguments:
lines: The examples block lines.
start_index: The line number to start at.
Returns:
A tuple containing a `Section` (or `None`) and the index at which to continue parsing.
"""
text, i = self.read_block(lines, start_index)
sub_sections = []
in_code_example = False
in_code_block = False
current_text: List[str] = []
current_example: List[str] = []
for line in text.split("\n"):
if is_empty_line(line):
if in_code_example:
if current_example:
sub_sections.append((Section.Type.EXAMPLES, "\n".join(current_example)))
current_example = []
in_code_example = False
else:
current_text.append(line)
elif in_code_example:
if self.trim_doctest_flags:
line = RE_DOCTEST_FLAGS.sub("", line)
line = RE_DOCTEST_BLANKLINE.sub("", line)
current_example.append(line)
elif line.startswith("```"):
in_code_block = not in_code_block
current_text.append(line)
elif in_code_block:
current_text.append(line)
elif line.startswith(">>>"):
if current_text:
sub_sections.append((Section.Type.MARKDOWN, "\n".join(current_text)))
current_text = []
in_code_example = True
if self.trim_doctest_flags:
line = RE_DOCTEST_FLAGS.sub("", line)
current_example.append(line)
else:
current_text.append(line)
if current_text:
sub_sections.append((Section.Type.MARKDOWN, "\n".join(current_text)))
elif current_example:
sub_sections.append((Section.Type.EXAMPLES, "\n".join(current_example)))
if sub_sections:
return Section(Section.Type.EXAMPLES, sub_sections), i
self.error(f"Empty examples section at line {start_index}")
return None, i
def is_empty_line(line) -> bool:
"""
Tell if a line is empty.
Arguments:
line: The line to check.
Returns:
True if the line is empty or composed of blanks only, False otherwise.
"""
return not line.strip()
pytkdocs-0.16.1/src/pytkdocs/parsers/docstrings/markdown.py 0000664 0000000 0000000 00000000574 14211451604 0024124 0 ustar 00root root 0000000 0000000 """This module defines functions and classes to parse docstrings into structured data."""
from typing import List
from pytkdocs.parsers.docstrings.base import Parser, Section
class Markdown(Parser):
"""A Markdown docstrings parser."""
def parse_sections(self, docstring: str) -> List[Section]: # noqa: D102
return [Section(Section.Type.MARKDOWN, docstring)]
pytkdocs-0.16.1/src/pytkdocs/parsers/docstrings/numpy.py 0000664 0000000 0000000 00000024631 14211451604 0023452 0 ustar 00root root 0000000 0000000 """This module defines functions and classes to parse docstrings into structured data."""
import re
from typing import List, Optional, Pattern
from docstring_parser import parse
from docstring_parser.common import Docstring, DocstringMeta
from pytkdocs.parsers.docstrings.base import AnnotatedObject, Attribute, Parameter, Parser, Section, empty
RE_DOCTEST_BLANKLINE: Pattern = re.compile(r"^\s*\s*$")
"""Regular expression to match lines of the form ``."""
RE_DOCTEST_FLAGS: Pattern = re.compile(r"(\s*#\s*doctest:.+)$")
"""Regular expression to match lines containing doctest flags of the form `# doctest: +FLAG`."""
class Numpy(Parser):
"""A Numpy-style docstrings parser."""
def __init__(self, trim_doctest_flags: bool = True) -> None:
"""
Initialize the objects.
Arguments:
trim_doctest_flags: Whether to remove doctest flags.
"""
super().__init__()
self.trim_doctest_flags = trim_doctest_flags
self.section_reader = {
Section.Type.PARAMETERS: self.read_parameters_section,
Section.Type.EXCEPTIONS: self.read_exceptions_section,
Section.Type.EXAMPLES: self.read_examples_section,
Section.Type.ATTRIBUTES: self.read_attributes_section,
Section.Type.RETURN: self.read_return_section,
}
def parse_sections(self, docstring: str) -> List[Section]: # noqa: D102
if "signature" not in self.context:
self.context["signature"] = getattr(self.context["obj"], "signature", None)
if "annotation" not in self.context:
self.context["annotation"] = getattr(self.context["obj"], "type", empty)
if "attributes" not in self.context:
self.context["attributes"] = {}
docstring_obj = parse(docstring)
description_all = (
none_str_cast(docstring_obj.short_description) + "\n\n" + none_str_cast(docstring_obj.long_description)
).strip()
sections = [Section(Section.Type.MARKDOWN, description_all)] if description_all else []
sections_other = [
reader(docstring_obj) # type: ignore
if sec == Section.Type.RETURN
else reader(docstring, docstring_obj) # type: ignore
for (sec, reader) in self.section_reader.items()
]
sections.extend([sec for sec in sections_other if sec])
return sections
def read_parameters_section(
self,
docstring: str,
docstring_obj: Docstring,
) -> Optional[Section]:
"""
Parse a "parameters" section.
Arguments:
docstring: The raw docstring.
docstring_obj: Docstring object parsed by docstring_parser.
Returns:
A `Section` object (or `None` if section is empty).
"""
parameters = []
docstring_params = [p for p in docstring_obj.params if p.args[0] == "param"]
for param in docstring_params:
name = param.arg_name
kind = None
type_name = param.type_name
default = param.default or empty
try:
signature_param = self.context["signature"].parameters[name.lstrip("*")]
except (AttributeError, KeyError):
self.error(f"No type annotation for parameter '{name}'")
else:
if signature_param.annotation is not empty:
type_name = signature_param.annotation
if signature_param.default is not empty:
default = signature_param.default
kind = signature_param.kind
description = param.description or ""
if not description:
self.error(f"No description for parameter '{name}'")
parameters.append(
Parameter(
name=param.arg_name,
annotation=type_name,
description=description,
default=default,
kind=kind,
)
)
if parameters:
return Section(Section.Type.PARAMETERS, parameters)
if re.search("Parameters\n", docstring):
self.error("Empty parameter section")
return None
def read_attributes_section(
self,
docstring: str,
docstring_obj: Docstring,
) -> Optional[Section]:
"""
Parse an "attributes" section.
Arguments:
docstring: The raw docstring.
docstring_obj: Docstring object parsed by docstring_parser.
Returns:
A `Section` object (or `None` if section is empty).
"""
attributes = []
docstring_attributes = [p for p in docstring_obj.params if p.args[0] == "attribute"]
for attr in docstring_attributes:
description = attr.description or ""
if not description:
self.error(f"No description for attribute '{attr.arg_name}'")
attributes.append(
Attribute(
name=attr.arg_name,
annotation=attr.type_name,
description=attr.description,
)
)
if attributes:
return Section(Section.Type.ATTRIBUTES, attributes)
if re.search("Attributes\n", docstring):
self.error("Empty attributes section")
return None
def read_exceptions_section(
self,
docstring: str,
docstring_obj: Docstring,
) -> Optional[Section]:
"""
Parse an "exceptions" section.
Arguments:
docstring: The raw docstring.
docstring_obj: Docstring object parsed by docstring_parser.
Returns:
A `Section` object (or `None` if section is empty).
"""
exceptions = []
except_obj = docstring_obj.raises
for exception in except_obj:
description = exception.description or ""
if not description:
self.error(f"No description for exception '{exception.type_name}'")
exceptions.append(AnnotatedObject(exception.type_name, description))
if exceptions:
return Section(Section.Type.EXCEPTIONS, exceptions)
if re.search("Raises\n", docstring):
self.error("Empty exceptions section")
return None
def read_return_section(
self,
docstring_obj: Docstring,
) -> Optional[Section]:
"""
Parse a "returns" section.
Arguments:
docstring_obj: Docstring object parsed by docstring_parser.
Returns:
A `Section` object (or `None` if section is empty).
"""
if docstring_obj.returns:
return_obj = docstring_obj.returns
if return_obj.description:
description = return_obj.description
else:
self.error("Empty return description")
description = ""
if self.context["signature"]:
annotation = self.context["signature"].return_annotation
else:
annotation = self.context["annotation"]
if annotation is empty and return_obj.type_name:
annotation = return_obj.type_name
if not annotation:
self.error("No return type annotation")
annotation = ""
if annotation or description:
return Section(Section.Type.RETURN, AnnotatedObject(annotation, description))
return None
def read_examples_section(
self,
docstring: str,
docstring_obj: Docstring,
) -> Optional[Section]:
"""
Parse an "examples" section.
Arguments:
docstring: The raw docstring.
docstring_obj: Docstring object parsed by docstring_parser.
Returns:
A `Section` object (or `None` if section is empty).
"""
text = next(
(
meta.description
for meta in docstring_obj.meta
if isinstance(meta, DocstringMeta) and meta.args[0] == "examples"
),
"",
)
sub_sections = []
in_code_example = False
in_code_block = False
current_text: List[str] = []
current_example: List[str] = []
if text:
for line in text.split("\n"):
if is_empty_line(line):
if in_code_example:
if current_example:
sub_sections.append((Section.Type.EXAMPLES, "\n".join(current_example)))
current_example = []
in_code_example = False
else:
current_text.append(line)
elif in_code_example:
if self.trim_doctest_flags:
line = RE_DOCTEST_FLAGS.sub("", line)
line = RE_DOCTEST_BLANKLINE.sub("", line)
current_example.append(line)
elif line.startswith("```"):
in_code_block = not in_code_block
current_text.append(line)
elif in_code_block:
current_text.append(line)
elif line.startswith(">>>"):
if current_text:
sub_sections.append((Section.Type.MARKDOWN, "\n".join(current_text)))
current_text = []
in_code_example = True
if self.trim_doctest_flags:
line = RE_DOCTEST_FLAGS.sub("", line)
current_example.append(line)
else:
current_text.append(line)
if current_text:
sub_sections.append((Section.Type.MARKDOWN, "\n".join(current_text)))
elif current_example:
sub_sections.append((Section.Type.EXAMPLES, "\n".join(current_example)))
if sub_sections:
return Section(Section.Type.EXAMPLES, sub_sections)
if re.search("Examples\n", docstring):
self.error("Empty examples section")
return None
def is_empty_line(line: str) -> bool:
"""
Tell if a line is empty.
Arguments:
line: The line to check.
Returns:
True if the line is empty or composed of blanks only, False otherwise.
"""
return not line.strip()
def none_str_cast(string: Optional[str]):
return string or ""
pytkdocs-0.16.1/src/pytkdocs/parsers/docstrings/restructured_text.py 0000664 0000000 0000000 00000046116 14211451604 0026103 0 ustar 00root root 0000000 0000000 """This module defines functions and classes to parse docstrings into structured data."""
from collections import defaultdict
from dataclasses import dataclass, field
from inspect import Signature
from typing import Any, Callable, DefaultDict, Dict, FrozenSet, List, Optional, Tuple, Type, Union, cast # noqa: WPS235
from pytkdocs.parsers.docstrings.base import AnnotatedObject, Attribute, Parameter, Parser, Section, empty
try:
from typing import TypedDict # type: ignore
except ImportError:
from typing_extensions import TypedDict # noqa: WPS440 # type: ignore
try:
from typing import Literal # type: ignore
except ImportError:
# https://github.com/python/mypy/issues/8520
from typing_extensions import Literal # type: ignore # noqa: WPS440
# TODO: Examples: from the documentation, I'm not sure there is a standard format for examples
PARAM_NAMES = frozenset(("param", "parameter", "arg", "argument", "key", "keyword"))
PARAM_TYPE_NAMES = frozenset(("type",))
ATTRIBUTE_NAMES = frozenset(("var", "ivar", "cvar"))
ATTRIBUTE_TYPE_NAMES = frozenset(("vartype",))
RETURN_NAMES = frozenset(("returns", "return"))
RETURN_TYPE_NAMES = frozenset(("rtype",))
EXCEPTION_NAMES = frozenset(("raises", "raise", "except", "exception"))
@dataclass(frozen=True)
class FieldType:
"""Maps directive names to parser functions."""
names: FrozenSet[str]
reader: Callable[[List[str], int], int]
def matches(self, line: str) -> bool:
"""
Check if a line matches the field type.
Args:
line: Line to check against
Returns:
True if the line matches the field type, False otherwise.
"""
return any(line.startswith(f":{name}") for name in self.names)
class AttributesDict(TypedDict):
"""Attribute details."""
docstring: str
annotation: Type # TODO: Not positive this is correct
class ParseContext:
"""Typed replacement for context dictionary."""
obj: Any # I think this might be pytkdos.Object & subclasses
attributes: DefaultDict[str, AttributesDict]
signature: Optional[Signature]
# Not sure real type yet. Maybe Optional[Union[Literal[Signature.empty],str,Type]]
annotation: Any
# This might be be better as the obj & optional attributes
def __init__(self, context: Dict):
"""
Initialize the object.
Args:
context: Context of parsing operation.
"""
self.obj = context["obj"]
self.attributes = defaultdict(cast(Callable[[], AttributesDict], dict))
attributes = context.get("attributes")
if attributes is not None:
self.attributes.update(attributes)
self.signature = getattr(self.obj, "signature", None)
self.annotation = getattr(self.obj, "type", empty)
@dataclass
class ParsedDirective:
"""Directive information that has been parsed from a docstring."""
line: str
next_index: int
directive_parts: List[str]
value: str
invalid: bool = False
@dataclass
class ParsedValues:
"""Values parsed from the docstring to be used to produce sections."""
description: List[str] = field(default_factory=list)
parameters: Dict[str, Parameter] = field(default_factory=dict)
param_types: Dict[str, str] = field(default_factory=dict)
attributes: Dict[str, Attribute] = field(default_factory=dict)
attribute_types: Dict[str, str] = field(default_factory=dict)
exceptions: List[AnnotatedObject] = field(default_factory=list)
return_value: Optional[AnnotatedObject] = None
return_type: Optional[str] = None
class RestructuredText(Parser):
"""A reStructuredText docstrings parser."""
def __init__(self) -> None:
"""Initialize the object."""
super().__init__()
self._typed_context = ParseContext({"obj": None})
self._parsed_values: ParsedValues = ParsedValues()
# Ordering is significant so that directives like ":vartype" are checked before ":var"
self.field_types = [
FieldType(PARAM_TYPE_NAMES, self._read_parameter_type),
FieldType(PARAM_NAMES, self._read_parameter),
FieldType(ATTRIBUTE_TYPE_NAMES, self._read_attribute_type),
FieldType(ATTRIBUTE_NAMES, self._read_attribute),
FieldType(EXCEPTION_NAMES, self._read_exception),
FieldType(RETURN_NAMES, self._read_return),
FieldType(RETURN_TYPE_NAMES, self._read_return_type),
]
def parse_sections(self, docstring: str) -> List[Section]: # noqa: D102
self._typed_context = ParseContext(self.context)
self._parsed_values = ParsedValues()
lines = docstring.split("\n")
curr_line_index = 0
while curr_line_index < len(lines):
line = lines[curr_line_index]
for field_type in self.field_types:
if field_type.matches(line):
# https://github.com/python/mypy/issues/5485
curr_line_index = field_type.reader(lines, curr_line_index) # type: ignore
break
else:
self._parsed_values.description.append(line)
curr_line_index += 1
return self._parsed_values_to_sections()
def _read_parameter(self, lines: List[str], start_index: int) -> int:
"""
Parse a parameter value.
Arguments:
lines: The docstring lines.
start_index: The line number to start at.
Returns:
Index at which to continue parsing.
"""
parsed_directive = self._parse_directive(lines, start_index)
if parsed_directive.invalid:
return parsed_directive.next_index
directive_type = None
if len(parsed_directive.directive_parts) == 2:
# no type info
name = parsed_directive.directive_parts[1]
elif len(parsed_directive.directive_parts) == 3:
directive_type = parsed_directive.directive_parts[1]
name = parsed_directive.directive_parts[2]
else:
self.error(f"Failed to parse field directive from '{parsed_directive.line}'")
return parsed_directive.next_index
if name in self._parsed_values.parameters:
self.errors.append(f"Duplicate parameter entry for '{name}'")
return parsed_directive.next_index
annotation = self._determine_param_annotation(name, directive_type)
default, kind = self._determine_param_details(name)
self._parsed_values.parameters[name] = Parameter(
name=name,
annotation=annotation,
description=parsed_directive.value,
default=default,
kind=kind,
)
return parsed_directive.next_index
def _determine_param_details(self, name: str) -> Tuple[Any, Any]:
default = empty
kind = empty
if self._typed_context.signature is not None:
param_signature = self._typed_context.signature.parameters.get(name.lstrip("*"))
# an error for param_signature being none is already reported by _determine_param_annotation()
if param_signature is not None:
if param_signature.default is not empty:
default = param_signature.default
kind = param_signature.kind # type: ignore[assignment]
return default, kind
def _determine_param_annotation(self, name: str, directive_type: Optional[str]) -> Any:
# Annotation precedence:
# - signature annotation
# - in-line directive type
# - "type" directive type
# - empty
annotation = empty
parsed_param_type = self._parsed_values.param_types.get(name)
if parsed_param_type is not None:
annotation = parsed_param_type # type: ignore[assignment]
if directive_type is not None:
annotation = directive_type # type: ignore[assignment]
if directive_type is not None and parsed_param_type is not None:
self.error(f"Duplicate parameter information for '{name}'")
if self._typed_context.signature is not None:
try:
param_signature = self._typed_context.signature.parameters[name.lstrip("*")]
except KeyError:
self.error(f"No matching parameter for '{name}'")
else:
if param_signature.annotation is not empty:
annotation = param_signature.annotation
return annotation
def _read_parameter_type(self, lines: List[str], start_index: int) -> int:
"""
Parse a parameter type.
Arguments:
lines: The docstring lines.
start_index: The line number to start at.
Returns:
Index at which to continue parsing.
"""
parsed_directive = self._parse_directive(lines, start_index)
if parsed_directive.invalid:
return parsed_directive.next_index
param_type = _consolidate_descriptive_type(parsed_directive.value.strip())
if len(parsed_directive.directive_parts) == 2:
param_name = parsed_directive.directive_parts[1]
else:
self.error(f"Failed to get parameter name from '{parsed_directive.line}'")
return parsed_directive.next_index
self._parsed_values.param_types[param_name] = param_type
param = self._parsed_values.parameters.get(param_name)
if param is not None:
if param.annotation is empty:
param.annotation = param_type
else:
self.error(f"Duplicate parameter information for '{param_name}'")
return parsed_directive.next_index
def _read_attribute(self, lines: List[str], start_index: int) -> int:
"""
Parse an attribute value.
Arguments:
lines: The docstring lines.
start_index: The line number to start at.
Returns:
Index at which to continue parsing.
"""
parsed_directive = self._parse_directive(lines, start_index)
if parsed_directive.invalid:
return parsed_directive.next_index
if len(parsed_directive.directive_parts) == 2:
name = parsed_directive.directive_parts[1]
else:
self.error(f"Failed to parse field directive from '{parsed_directive.line}'")
return parsed_directive.next_index
annotation = empty
# Annotation precedence:
# - external context type TODO: spend time understanding where this comes from
# - "vartype" directive type
# - empty
parsed_attribute_type = self._parsed_values.attribute_types.get(name)
if parsed_attribute_type is not None:
annotation = parsed_attribute_type # type: ignore[assignment]
context_attribute_annotation = self._typed_context.attributes[name].get("annotation")
if context_attribute_annotation is not None:
annotation = context_attribute_annotation
if name in self._parsed_values.attributes:
self.errors.append(f"Duplicate attribute entry for '{name}'")
else:
self._parsed_values.attributes[name] = Attribute(
name=name,
annotation=annotation,
description=parsed_directive.value,
)
return parsed_directive.next_index
def _read_attribute_type(self, lines: List[str], start_index: int) -> int:
"""
Parse a parameter type.
Arguments:
lines: The docstring lines.
start_index: The line number to start at.
Returns:
Index at which to continue parsing.
"""
parsed_directive = self._parse_directive(lines, start_index)
if parsed_directive.invalid:
return parsed_directive.next_index
attribute_type = _consolidate_descriptive_type(parsed_directive.value.strip())
if len(parsed_directive.directive_parts) == 2:
attribute_name = parsed_directive.directive_parts[1]
else:
self.error(f"Failed to get attribute name from '{parsed_directive.line}'")
return parsed_directive.next_index
self._parsed_values.attribute_types[attribute_name] = attribute_type
attribute = self._parsed_values.attributes.get(attribute_name)
if attribute is not None:
if attribute.annotation is empty:
attribute.annotation = attribute_type
else:
self.error(f"Duplicate attribute information for '{attribute_name}'")
return parsed_directive.next_index
def _read_exception(self, lines: List[str], start_index: int) -> int:
"""
Parse an exceptions value.
Arguments:
lines: The docstring lines.
start_index: The line number to start at.
Returns:
A tuple containing a `Section` (or `None`) and the index at which to continue parsing.
"""
parsed_directive = self._parse_directive(lines, start_index)
if parsed_directive.invalid:
return parsed_directive.next_index
if len(parsed_directive.directive_parts) == 2:
ex_type = parsed_directive.directive_parts[1]
self._parsed_values.exceptions.append(AnnotatedObject(ex_type, parsed_directive.value))
else:
self.error(f"Failed to parse exception directive from '{parsed_directive.line}'")
return parsed_directive.next_index
def _read_return(self, lines: List[str], start_index: int) -> int:
"""
Parse an return value.
Arguments:
lines: The docstring lines.
start_index: The line number to start at.
Returns:
Index at which to continue parsing.
"""
parsed_directive = self._parse_directive(lines, start_index)
if parsed_directive.invalid:
return parsed_directive.next_index
annotation = empty
# Annotation precedence:
# - signature annotation
# - "rtype" directive type
# - external context type TODO: spend time understanding where this comes from
# - empty
if self._typed_context.signature is not None and self._typed_context.signature.return_annotation is not empty:
annotation = self._typed_context.signature.return_annotation
elif self._parsed_values.return_type is not None:
annotation = self._parsed_values.return_type # type: ignore[assignment]
else:
annotation = self._typed_context.annotation
self._parsed_values.return_value = AnnotatedObject(annotation, parsed_directive.value)
return parsed_directive.next_index
def _read_return_type(self, lines: List[str], start_index: int) -> int:
"""
Parse an return type value.
Arguments:
lines: The docstring lines.
start_index: The line number to start at.
Returns:
Index at which to continue parsing.
"""
parsed_directive = self._parse_directive(lines, start_index)
if parsed_directive.invalid:
return parsed_directive.next_index
return_type = _consolidate_descriptive_type(parsed_directive.value.strip())
self._parsed_values.return_type = return_type
return_value = self._parsed_values.return_value
if return_value is not None:
if return_value.annotation is empty:
return_value.annotation = return_type
else:
self.error("Duplicate type information for return")
return parsed_directive.next_index
def _parsed_values_to_sections(self) -> List[Section]:
markdown_text = "\n".join(_strip_blank_lines(self._parsed_values.description))
result = [Section(Section.Type.MARKDOWN, markdown_text)]
if self._parsed_values.parameters:
param_values = list(self._parsed_values.parameters.values())
result.append(Section(Section.Type.PARAMETERS, param_values))
if self._parsed_values.attributes:
attribute_values = list(self._parsed_values.attributes.values())
result.append(Section(Section.Type.ATTRIBUTES, attribute_values))
if self._parsed_values.return_value is not None:
result.append(Section(Section.Type.RETURN, self._parsed_values.return_value))
if self._parsed_values.exceptions:
result.append(Section(Section.Type.EXCEPTIONS, self._parsed_values.exceptions))
return result
def _parse_directive(self, lines: List[str], start_index: int) -> ParsedDirective:
line, next_index = _consolidate_continuation_lines(lines, start_index)
try:
_, directive, value = line.split(":", 2)
except ValueError:
self.error(f"Failed to get ':directive: value' pair from '{line}'")
return ParsedDirective(line, next_index, [], "", invalid=True)
value = value.strip()
return ParsedDirective(line, next_index, directive.split(" "), value)
def _consolidate_continuation_lines(lines: List[str], start_index: int) -> Tuple[str, int]:
"""
Convert a docstring field into a single line if a line continuation exists.
Arguments:
lines: The docstring lines.
start_index: The line number to start at.
Returns:
A tuple containing the continued lines as a single string and the index at which to continue parsing.
"""
curr_line_index = start_index
block = [lines[curr_line_index].lstrip()]
# start processing after first item
curr_line_index += 1
while curr_line_index < len(lines) and not lines[curr_line_index].startswith(":"):
block.append(lines[curr_line_index].lstrip())
curr_line_index += 1
return " ".join(block).rstrip("\n"), curr_line_index - 1
def _consolidate_descriptive_type(descriptive_type: str) -> str:
"""
Convert type descriptions with "or" into respective type signature.
"x or None" or "None or x" -> "Optional[x]"
"x or x" or "x or y[ or z [...]]" -> "Union[x, y, ...]"
Args:
descriptive_type: Descriptions of an item's type.
Returns:
Type signature for descriptive type.
"""
types = descriptive_type.split("or")
if len(types) == 1:
return descriptive_type
types = [pt.strip() for pt in types]
if len(types) == 2:
if types[0] == "None":
return f"Optional[{types[1]}]"
if types[1] == "None":
return f"Optional[{types[0]}]"
return f"Union[{','.join(types)}]"
def _strip_blank_lines(lines: List[str]) -> List[str]:
"""
Remove lines with no text or only whitespace characters from the start and end of the list.
Args:
lines: Lines to be stripped.
Returns:
A list with the same contents, with any blank lines at the start or end removed.
"""
if not lines:
return lines
# remove blank lines from the start and end
content_found = False
initial_content = 0
final_content = 0
for index, line in enumerate(lines):
if line == "" or line.isspace():
if not content_found:
initial_content += 1
else:
content_found = True
final_content = index
return lines[initial_content : final_content + 1]
pytkdocs-0.16.1/src/pytkdocs/properties.py 0000664 0000000 0000000 00000002540 14211451604 0020633 0 ustar 00root root 0000000 0000000 """This module simply defines regular expressions and their associated predicates."""
import re
from typing import Callable, Pattern, Tuple
ApplicableNameProperty = Tuple[str, Callable[[str], bool]]
# exactly two leading underscores, exactly two trailing underscores
# since we enforce one non-underscore after the two leading underscores,
# we put the rest in an optional group
RE_SPECIAL: Pattern = re.compile(r"^__[^_]([\w_]*[^_])?__$")
"""Regular expression to match `__special__` names."""
# at least two leading underscores, at most one trailing underscore
# since we enforce one non-underscore before the last,
# we make the previous characters optional with an asterisk
RE_CLASS_PRIVATE: Pattern = re.compile(r"^__[\w_]*[^_]_?$")
"""Regular expression to match `__class_private` names."""
# at most one leading underscore, then whatever
RE_PRIVATE: Pattern = re.compile(r"^_[^_][\w_]*$")
"""Regular expression to match `_private` names."""
NAME_SPECIAL: ApplicableNameProperty = ("special", lambda name: bool(RE_SPECIAL.match(name)))
"""Applicable property: `special`."""
NAME_CLASS_PRIVATE: ApplicableNameProperty = ("class-private", lambda name: bool(RE_CLASS_PRIVATE.match(name)))
"""Applicable property: `class-private`."""
NAME_PRIVATE: ApplicableNameProperty = ("private", lambda name: bool(RE_PRIVATE.match(name)))
"""Applicable property: `private`."""
pytkdocs-0.16.1/src/pytkdocs/py.typed 0000664 0000000 0000000 00000000000 14211451604 0017551 0 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/src/pytkdocs/serializer.py 0000664 0000000 0000000 00000020227 14211451604 0020612 0 ustar 00root root 0000000 0000000 """
This module defines function to serialize objects.
These functions simply take objects as parameters and return dictionaries that can be dumped by `json.dumps`.
"""
import inspect
import re
from typing import Any, Match, Optional, Pattern
from pytkdocs.objects import Object, Source
from pytkdocs.parsers.docstrings.base import AnnotatedObject, Attribute, Parameter, Section
try:
from typing import GenericMeta # type: ignore
except ImportError:
# in 3.7, GenericMeta doesn't exist but we don't need it
class GenericMeta(type): # type: ignore # noqa: WPS440 (variable overlap)
"""GenericMeta type."""
RE_OPTIONAL: Pattern = re.compile(r"Union\[(.+), NoneType\]")
"""Regular expression to match optional annotations of the form `Union[T, NoneType]`."""
RE_FORWARD_REF: Pattern = re.compile(r"_?ForwardRef\('([^']+)'\)")
"""Regular expression to match forward-reference annotations of the form `_ForwardRef('T')`."""
def rebuild_optional(match: Match) -> str:
"""
Rebuild `Union[T, None]` as `Optional[T]`.
Arguments:
match: The match object when matching against a regular expression (by the parent caller).
Returns:
The rebuilt type string.
"""
group = match.group(1)
brackets_level = 0
for char in group:
if char == "," and brackets_level == 0:
return f"Union[{group}]"
if char == "[":
brackets_level += 1
elif char == "]":
brackets_level -= 1
return f"Optional[{group}]"
def annotation_to_string(annotation: Any) -> str:
"""
Return an annotation as a string.
Arguments:
annotation: The annotation to return as a string.
Returns:
The annotation as a string.
"""
if annotation is inspect.Signature.empty:
return ""
if inspect.isclass(annotation) and not isinstance(annotation, GenericMeta):
string = annotation.__name__
else:
string = str(annotation).replace("typing.", "")
string = RE_FORWARD_REF.sub(lambda match: match.group(1), string)
string = RE_OPTIONAL.sub(rebuild_optional, string)
return string # noqa: WPS331 (false-positive, string is not only used for the return)
def serialize_annotated_object(obj: AnnotatedObject) -> dict:
"""
Serialize an instance of [`AnnotatedObject`][pytkdocs.parsers.docstrings.base.AnnotatedObject].
Arguments:
obj: The object to serialize.
Returns:
A JSON-serializable dictionary.
"""
return {"description": obj.description, "annotation": annotation_to_string(obj.annotation)}
def serialize_attribute(attribute: Attribute) -> dict:
"""
Serialize an instance of [`Attribute`][pytkdocs.parsers.docstrings.base.Attribute].
Arguments:
attribute: The attribute to serialize.
Returns:
A JSON-serializable dictionary.
"""
return {
"name": attribute.name,
"description": attribute.description,
"annotation": annotation_to_string(attribute.annotation),
}
def serialize_parameter(parameter: Parameter) -> dict:
"""
Serialize an instance of [`Parameter`][pytkdocs.parsers.docstrings.base.Parameter].
Arguments:
parameter: The parameter to serialize.
Returns:
A JSON-serializable dictionary.
"""
serialized = serialize_annotated_object(parameter)
serialized.update(
{
"name": parameter.name,
"kind": str(parameter.kind),
"default": parameter.default_string,
"is_optional": parameter.is_optional,
"is_required": parameter.is_required,
"is_args": parameter.is_args,
"is_kwargs": parameter.is_kwargs,
},
)
return serialized
def serialize_signature_parameter(parameter: inspect.Parameter) -> dict:
"""
Serialize an instance of `inspect.Parameter`.
Arguments:
parameter: The parameter to serialize.
Returns:
A JSON-serializable dictionary.
"""
serialized = {"kind": str(parameter.kind), "name": parameter.name}
if parameter.annotation is not parameter.empty:
serialized["annotation"] = annotation_to_string(parameter.annotation)
if parameter.default is not parameter.empty:
serialized["default"] = repr(parameter.default)
return serialized
def serialize_signature(signature: inspect.Signature) -> dict:
"""
Serialize an instance of `inspect.Signature`.
Arguments:
signature: The signature to serialize.
Returns:
A JSON-serializable dictionary.
"""
if signature is None:
return {}
serialized: dict = {
"parameters": [serialize_signature_parameter(value) for name, value in signature.parameters.items()],
}
if signature.return_annotation is not inspect.Signature.empty:
serialized["return_annotation"] = annotation_to_string(signature.return_annotation)
return serialized
def serialize_docstring_section(section: Section) -> dict: # noqa: WPS231 (not complex)
"""
Serialize an instance of `inspect.Signature`.
Arguments:
section: The section to serialize.
Returns:
A JSON-serializable dictionary.
"""
serialized = {"type": section.type}
if section.type == section.Type.MARKDOWN:
serialized.update({"value": section.value})
elif section.type == section.Type.RETURN:
serialized.update({"value": serialize_annotated_object(section.value)}) # type: ignore
elif section.type == section.Type.YIELD:
serialized.update({"value": serialize_annotated_object(section.value)}) # type: ignore
elif section.type == section.Type.EXCEPTIONS:
serialized.update({"value": [serialize_annotated_object(exc) for exc in section.value]}) # type: ignore
elif section.type == section.Type.PARAMETERS:
serialized.update({"value": [serialize_parameter(param) for param in section.value]}) # type: ignore
elif section.type == section.Type.KEYWORD_ARGS:
serialized.update({"value": [serialize_parameter(param) for param in section.value]}) # type: ignore
elif section.type == section.Type.ATTRIBUTES:
serialized.update({"value": [serialize_attribute(attr) for attr in section.value]}) # type: ignore
elif section.type == section.Type.EXAMPLES:
serialized.update({"value": section.value})
return serialized
def serialize_source(source: Optional[Source]) -> dict:
"""
Serialize an instance of [`Source`][pytkdocs.objects.Source].
Arguments:
source: The source to serialize.
Returns:
A JSON-serializable dictionary.
"""
if source:
return {"code": source.code, "line_start": source.line_start}
return {}
def serialize_object(obj: Object) -> dict:
"""
Serialize an instance of a subclass of [`Object`][pytkdocs.objects.Object].
Arguments:
obj: The object to serialize.
Returns:
A JSON-serializable dictionary.
"""
serialized = {
"name": obj.name,
"path": obj.path,
"category": obj.category,
"file_path": obj.file_path,
"relative_file_path": obj.relative_file_path,
"properties": sorted(set(obj.properties + obj.name_properties)),
"parent_path": obj.parent_path,
"has_contents": obj.has_contents(),
"docstring": obj.docstring,
"docstring_sections": [serialize_docstring_section(sec) for sec in obj.docstring_sections],
"source": serialize_source(obj.source),
"children": {child.path: serialize_object(child) for child in obj.children},
"attributes": [attr.path for attr in obj.attributes],
"methods": [meth.path for meth in obj.methods],
"functions": [func.path for func in obj.functions],
"modules": [mod.path for mod in obj.modules],
"classes": [clas.path for clas in obj.classes],
}
if hasattr(obj, "type"): # noqa: WPS421 (hasattr)
serialized["type"] = annotation_to_string(obj.type) # type: ignore
if hasattr(obj, "signature"): # noqa: WPS421 (hasattr)
serialized["signature"] = serialize_signature(obj.signature) # type: ignore
if hasattr(obj, "bases"): # noqa: WPS421 (hasattr)
serialized["bases"] = obj.bases # type: ignore
return serialized
pytkdocs-0.16.1/tests/ 0000775 0000000 0000000 00000000000 14211451604 0014577 5 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/tests/__init__.py 0000664 0000000 0000000 00000000520 14211451604 0016705 0 ustar 00root root 0000000 0000000 """In this module we simply define some path constants."""
from pathlib import Path
TESTS_DIR: Path = Path(__file__).parent
"""The tests directory path object."""
TMP_DIR: Path = TESTS_DIR / "tmp"
"""The tests/tmp directory path object."""
FIXTURES_DIR: Path = TESTS_DIR / "fixtures"
"""The tests/fixtures directory path object."""
pytkdocs-0.16.1/tests/conftest.py 0000664 0000000 0000000 00000000057 14211451604 0017000 0 ustar 00root root 0000000 0000000 """Configuration for the pytest test suite."""
pytkdocs-0.16.1/tests/fixtures/ 0000775 0000000 0000000 00000000000 14211451604 0016450 5 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/tests/fixtures/__init__.py 0000664 0000000 0000000 00000000000 14211451604 0020547 0 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/tests/fixtures/asyncio.py 0000664 0000000 0000000 00000000227 14211451604 0020470 0 ustar 00root root 0000000 0000000 class ClassContainingCoroutineMethod:
async def coroutine_method(self) -> None:
return
async def coroutine_function() -> None:
return
pytkdocs-0.16.1/tests/fixtures/cached_properties.py 0000664 0000000 0000000 00000000270 14211451604 0022504 0 ustar 00root root 0000000 0000000 try:
from functools import cached_property
except ImportError:
from cached_property import cached_property
class C:
@cached_property
def aaa(self):
"""aaa"""
pytkdocs-0.16.1/tests/fixtures/corrupt_output.py 0000664 0000000 0000000 00000000206 14211451604 0022136 0 ustar 00root root 0000000 0000000 """This module would corrupt the JSON output if we didn't discard what it's printing to stdout."""
print("*corruption intensifies*")
pytkdocs-0.16.1/tests/fixtures/dataclass.py 0000664 0000000 0000000 00000000375 14211451604 0020766 0 ustar 00root root 0000000 0000000 from dataclasses import dataclass
@dataclass
class Person:
"""Simple dataclass for a person's information"""
name: str
age: int = 2
"""Field description."""
@dataclass
class Empty:
"""A dataclass without any fields"""
pass
pytkdocs-0.16.1/tests/fixtures/decorated_function.py 0000664 0000000 0000000 00000000347 14211451604 0022665 0 ustar 00root root 0000000 0000000 from functools import lru_cache
@lru_cache()
def add(a, b):
return a + b
# control
def sub(a, b):
return a - b
# simulating a decorator that does not set __module__ properly
# on the wrapper object
del add.__module__
pytkdocs-0.16.1/tests/fixtures/django.py 0000664 0000000 0000000 00000001072 14211451604 0020264 0 ustar 00root root 0000000 0000000 from django import setup
from django.conf import settings
from django.db import models
settings.configure()
setup()
class Person(models.Model):
"""Simple Django Model for a person's information"""
name = models.CharField(verbose_name='Name')
age = models.IntegerField(verbose_name='Age')
parent = models.ForeignKey(verbose_name='Parent', to='Child', on_delete=models.CASCADE)
class Meta:
app_label = 'django'
class Child(models.Model):
name: str = models.CharField(verbose_name='Name')
class Meta:
app_label = 'django'
pytkdocs-0.16.1/tests/fixtures/docstring_attributes_section.py 0000664 0000000 0000000 00000000306 14211451604 0025007 0 ustar 00root root 0000000 0000000 """
Let's describe some attributes.
Attributes:
A: Alpha.
B (bytes): Beta.
C: Gamma.
D: Delta.
E (float): Epsilon.
"""
A: int = 0
B: str = "ŧ"
C: bool = True
D = 3.0
E = None
pytkdocs-0.16.1/tests/fixtures/dynamic_members.py 0000664 0000000 0000000 00000000426 14211451604 0022162 0 ustar 00root root 0000000 0000000 # See issue 65: https://github.com/pawamoy/mkdocstrings/issues/65
def method1():
pass
def method2():
pass
METHODS = {
"method1": method1,
"method2": method2,
}
class Class:
pass
for name, method in METHODS.items():
setattr(Class, name, method)
pytkdocs-0.16.1/tests/fixtures/first_line_class_docstring.py 0000664 0000000 0000000 00000000154 14211451604 0024421 0 ustar 00root root 0000000 0000000 class TheClass:
"""The first line of the docstring.
A bit more of the docstring.
"""
pass
pytkdocs-0.16.1/tests/fixtures/inherited_members.py 0000664 0000000 0000000 00000000554 14211451604 0022513 0 ustar 00root root 0000000 0000000 from pydantic import BaseModel as PydanticModel
class Base:
V1 = "v1"
"""Variable 1."""
def method1(self):
"""Method 1."""
pass
class Child(Base):
V2 = "v2"
"""Variable 2."""
def method2(self):
"""Method 2."""
pass
class BaseModel(PydanticModel):
a: int
class ChildModel(BaseModel):
b: str
pytkdocs-0.16.1/tests/fixtures/inherited_properties.py 0000664 0000000 0000000 00000000743 14211451604 0023255 0 ustar 00root root 0000000 0000000 class SuperClass:
@property
def read_only(self):
"""SuperClass.read_only docs"""
return 0
@property
def mutable(self):
"""SuperClass.mutable getter docs"""
return 0
@mutable.setter
def mutable(self, value):
pass
class SubClass(SuperClass):
@property
def read_only(self):
return 1
@property
def mutable(self):
return 1
@mutable.setter
def mutable(self, value):
pass
pytkdocs-0.16.1/tests/fixtures/inheriting_enum_Enum.py 0000664 0000000 0000000 00000001762 14211451604 0023200 0 ustar 00root root 0000000 0000000 """
While recursing on a class inheriting from `enum.Enum`, the class' `__dict__` returns private items
whose values are the `object` builtin for example.
The `object` builtin is a class, so `pytkdocs` is trying to recurse on it,
and tries to get its file path by first getting its module.
The `object` class' module is `builtins`, which does not have a `__file__` attribute,
so trying to access it generates an `AttributeError` error.
Instead of failing, we simply catch the error and set `file_path = ""`.
References:
- Test case: [tests.test_loader.test_inheriting_enum_Enum][].
- Issue reported on commit [5053f81](https://github.com/pawamoy/mkdocstrings/commit/5053f8142913f01358481e4801e5222d88482c35).
- Fixed by commit [48df6bc](https://github.com/pawamoy/pytkdocs/commit/48df6bc9cf878f3ce281fac6ccaf8fe1d4e89c84).
- See other "inheriting" test cases.
"""
import enum
class MyEnum(enum.Enum):
"""My custom enumeration docstring."""
A = 0
"""Item A."""
B = 1
"""Item B."""
pytkdocs-0.16.1/tests/fixtures/inheriting_typing_NamedTuple.py 0000664 0000000 0000000 00000001557 14211451604 0024702 0 ustar 00root root 0000000 0000000 """
While recursing on a class inheriting from `typing.NamedTuple`, the class' `__dict__` returns items
that are identified as properties.
When trying to get the signature of these properties' `fget` methods,
`inspect` raises a `ValueError: callable operator.itemgetter(0) is not supported by signature` error.
Instead of failing, we simply set the signature to `None`.
References:
- Test case: [tests.test_loader.test_inheriting_typing_NamedTuple][].
- Issue reported at [pawamoy/pytkdocs#15](https://github.com/pawamoy/pytkdocs/issues/15).
- Fixed by commit [67cee40](https://github.com/pawamoy/pytkdocs/commit/67cee406baccc8789566ed72ee040728c934f49d).
- See other "inheriting" test cases.
"""
from typing import NamedTuple
class MyNamedTupleType(NamedTuple):
"""My custom named tuple type docstring."""
aaa: float
bbb: float
ccc: float
ddd: float
pytkdocs-0.16.1/tests/fixtures/marshmallow.py 0000664 0000000 0000000 00000000455 14211451604 0021354 0 ustar 00root root 0000000 0000000 from marshmallow import Schema, fields
class Person(Schema):
"""Simple Marshmallow Model for a person's information"""
name: fields.Str = fields.Str(description="The person's name", required=True)
age: fields.Int = fields.Int(description="The person's age which must be at minimum 18")
pytkdocs-0.16.1/tests/fixtures/method_descriptor.py 0000664 0000000 0000000 00000000172 14211451604 0022540 0 ustar 00root root 0000000 0000000 """See https://docs.python.org/3/library/inspect.html#inspect.ismethoddescriptor for details"""
descriptor = int.__add__
pytkdocs-0.16.1/tests/fixtures/nested_class.py 0000664 0000000 0000000 00000000131 14211451604 0021464 0 ustar 00root root 0000000 0000000 class Main:
"""Main docstring."""
class Nested:
"""Nested docstring."""
pytkdocs-0.16.1/tests/fixtures/no_contents/ 0000775 0000000 0000000 00000000000 14211451604 0021001 5 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/tests/fixtures/no_contents/__init__.py 0000664 0000000 0000000 00000000000 14211451604 0023100 0 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/tests/fixtures/no_contents/module.py 0000664 0000000 0000000 00000000033 14211451604 0022634 0 ustar 00root root 0000000 0000000 class EmptyClass:
pass
pytkdocs-0.16.1/tests/fixtures/parsing/ 0000775 0000000 0000000 00000000000 14211451604 0020113 5 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/tests/fixtures/parsing/__init__.py 0000664 0000000 0000000 00000000000 14211451604 0022212 0 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/tests/fixtures/parsing/annotations.txt 0000664 0000000 0000000 00000032166 14211451604 0023221 0 ustar 00root root 0000000 0000000 a: "a['a', 'a', 'a', 'a']"
a: "a['a', 'a', 'a']"
a: "a['a', 'a']"
a: "a['a']"
a: 'a *'
a: 'a a *'
a: 'a a'
a: 'a'
a: 'a.a'
a: 'a.a.a'
a: 'a.a[a, a.a[a]]'
a: 'a.a[a.a[a, a.a, a], a.a[a, a.a, a, a, a], ...]'
a: 'a.a[a.a[a], a.a[a.a[a]]]'
a: 'a.a[a.a[a]]'
a: 'a.a[a[a[a, a]]]'
a: 'a.a[a]'
a: 'a[[], a]'
a: 'a[[a, a[a], a], a[a, a]]'
a: 'a[[a, a], a]'
a: 'a[a, a, a[a]]'
a: 'a[a, a, a]'
a: 'a[a, a.a[a[a, a]]]'
a: 'a[a, a[[a, a], a]]'
a: 'a[a, a[a, a[a.a]]]'
a: 'a[a, a[a.a, a[a, a]]]'
a: 'a[a, a[a]]'
a: 'a[a, a]'
a: 'a[a.a, a]'
a: 'a[a.a.a]'
a: 'a[a.a[a], a[a]]'
a: 'a[a.a]'
a: 'a[a[a, a[a]]]'
a: 'a[a[a, a]]'
a: 'a[a[a[a], a[a[a], ...]]]'
a: 'a[a[a], a, a.a]'
a: 'a[a[a], a.a.a[a]]'
a: 'a[a[a], a[a[a], ...]]'
a: 'a[a[a]]'
a: 'a[a]'
a: a
a: a.a
a: a.a.a
a: a.a.a.a
a: a.a.a[a]
a: a.a['a', 'a']
a: a.a['a', a, a]
a: a.a['a', a.a, a.a[a.a[a.a, a.a]], a, a]
a: a.a['a', a.a, a.a[a.a[a.a, a.a]]]
a: a.a['a', a.a[a, a.a[a, a]], a.a[a.a[a, a.a[a, a]]]]
a: a.a['a', a.a]
a: a.a['a', a]
a: a.a['a']
a: a.a['a.a', 'a.a']
a: a.a['a[a]']
a: a.a[..., a]
a: a.a[['a'], a.a]
a: a.a[[], a.a]
a: a.a[[], a]
a: a.a[[a, a], a]
a: a.a[[a.a, a.a], a]
a: a.a[[a.a], a.a]
a: a.a[[a.a], a]
a: a.a[[a], a]
a: a.a[a, 'a']
a: a.a[a, ...]
a: a.a[a, a, 'a.a[a]', a, 'a.a[a]']
a: a.a[a, a, a, a, a, a, a, a]
a: a.a[a, a, a, a.a[[], a]]
a: a.a[a, a, a, a.a[a]]
a: a.a[a, a, a, a]
a: a.a[a, a, a.a[a], a]
a: a.a[a, a, a.a[a]]
a: a.a[a, a, a.a]
a: a.a[a, a, a]
a: a.a[a, a.a, a.a[a]]
a: a.a[a, a.a, a.a]
a: a.a[a, a.a, a]
a: a.a[a, a.a['a', a.a['a']]]
a: a.a[a, a.a['a']]
a: a.a[a, a.a[[a.a], a.a]]
a: a.a[a, a.a[a, 'a']]
a: a.a[a, a.a[a, a.a[a]]]
a: a.a[a, a.a[a, a]]
a: a.a[a, a.a[a.a, a]]
a: a.a[a, a.a[a.a[a, a]]]
a: a.a[a, a.a[a.a]]
a: a.a[a, a.a[a], a]
a: a.a[a, a.a[a]]
a: a.a[a, a.a]
a: a.a[a, a]
a: a.a[a.a, a.a, a.a, a.a]
a: a.a[a.a, a.a, a]
a: a.a[a.a, a.a[a.a, ...]]
a: a.a[a.a, a.a[a.a]]
a: a.a[a.a, a.a]
a: a.a[a.a, a]
a: a.a[a.a['a']]
a: a.a[a.a[[], 'a']]
a: a.a[a.a[[], a.a[a, a]]]
a: a.a[a.a[[], a.a]]
a: a.a[a.a[[a.a, a, a.a], a.a]]
a: a.a[a.a[[a], a.a[a, a]]]
a: a.a[a.a[a, 'a']]
a: a.a[a.a[a, 'a[a]']]
a: a.a[a.a[a, a, a, a]]
a: a.a[a.a[a, a, a]]
a: a.a[a.a[a, a.a, a, a, a, a, a]]
a: a.a[a.a[a, a.a[a, a, a, a.a[a]]]]
a: a.a[a.a[a, a.a[a, a.a[a.a]]]]
a: a.a[a.a[a, a.a[a, a.a[a]]]]
a: a.a[a.a[a, a.a[a], a]]
a: a.a[a.a[a, a.a[a]], a.a]
a: a.a[a.a[a, a.a[a]]]
a: a.a[a.a[a, a.a], a.a[a.a[a, a.a]]]
a: a.a[a.a[a, a.a]]
a: a.a[a.a[a, a]]
a: a.a[a.a[a.a, a.a[a.a]]]
a: a.a[a.a[a.a, a.a]]
a: a.a[a.a[a.a, a]]
a: a.a[a.a[a.a[[a.a], a.a], a.a[[a.a, a.a], a.a]]]
a: a.a[a.a[a.a[[a.a], a.a], a.a[a.a[[a.a], a.a]]]]
a: a.a[a.a[a.a[a, 'a']]]
a: a.a[a.a[a.a[a, a.a], a.a[a.a[a, a.a]]]]
a: a.a[a.a[a.a[a, a]], a, a]
a: a.a[a.a[a.a[a.a, a.a[a.a]]]]
a: a.a[a.a[a.a[a.a[a, a.a]], a.a[a, a.a]]]
a: a.a[a.a[a.a[a], a.a[a]]]
a: a.a[a.a[a.a]]
a: a.a[a.a[a], 'a[a]']
a: a.a[a.a[a], a.a[a], a.a[a]]
a: a.a[a.a[a], a.a[a]]
a: a.a[a.a[a]]
a: a.a[a.a]
a: a.a[a[a, a]]
a: a.a[a]
a: a["a['a', 'a', 'a', 'a', 'a']"]
a: a["a['a', 'a', 'a', 'a']"]
a: a["a['a', 'a', 'a']"]
a: a['"', "'", '"""', "'''"]
a: a['', '*', '**']
a: a['a', 'a', 'a', 'a', 'a']
a: a['a', 'a', 'a']
a: a['a', 'a']
a: a['a', a, a, a]
a: a['a', a, a]
a: a['a', a['a', a]]
a: a['a', a[a, a]]
a: a['a', a[a['a', 'a']]]
a: a['a', a[a], a]
a: a['a', a]
a: a['a']
a: a['a.a', 'a.a']
a: a['a.a']
a: a['a.a.a', 'a.a.a', 'a.a.a', 'a.a.a']
a: a['a.a.a']
a: a['a.a[a]', a, a]
a: a['a.a[a]']
a: a['a["a", "a"]']
a: a['a[a[a]]']
a: a['a[a]', 'a', a]
a: a['a[a]', 'a[a]']
a: a['a[a]', ...]
a: a['a[a]', a, a]
a: a['a[a]', a['a[a]', ...]]
a: a['a[a]']
a: a[..., 'a[a, a, a]']
a: a[..., a[a, 'a']]
a: a[..., a[a.a, a.a]]
a: a[..., a[a[a]]]
a: a[..., a[a]]
a: a[..., a]
a: a[['a', a[a, a], a, a], 'a']
a: a[['a', a], 'a']
a: a[['a'], a[a]]
a: a[['a'], a]
a: a[['a.a[a]'], a]
a: a[['a[a[a, a, a]]'], a]
a: a[['a[a]'], a]
a: a[[], a.a]
a: a[[], a[a[a, a]]]
a: a[[], a[a]]
a: a[[], a]
a: a[[a, 'a'], a[a[a, a]]]
a: a[[a, a, a, a], a]
a: a[[a, a, a[a]], a]
a: a[[a, a, a], a]
a: a[[a, a], a[a]]
a: a[[a, a], a]
a: a[[a.a, a.a], a]
a: a[[a.a, a[a, a[a.a, a[a.a]]]], a[a, a, a.a]]
a: a[[a.a, a], a[a, 'a[a]']]
a: a[[a.a, a], a]
a: a[[a.a], a.a]
a: a[[a.a], a[a.a]]
a: a[[a.a], a]
a: a[[a[[a], a], a[[a], a]], a]
a: a[[a[[a], a]], 'a[a]']
a: a[[a[a, a, a]], a]
a: a[[a[a[a]], a], a]
a: a[[a[a], a], a]
a: a[[a[a]], a[a, a]]
a: a[[a[a]], a[a]]
a: a[[a[a]], a]
a: a[[a], 'a[a]']
a: a[[a], a.a]
a: a[[a], a[a]]
a: a[[a], a]
a: a[a, "a['a', 'a']"]
a: a[a, 'a', a[[], 'a']]
a: a[a, 'a', a[a, a]]
a: a[a, 'a']
a: a[a, 'a.a[a]', a]
a: a[a, 'a.a[a]']
a: a[a, 'a[a]']
a: a[a, ...]
a: a[a, a, 'a']
a: a[a, a, a, a, a, a]
a: a[a, a, a, a, a]
a: a[a, a, a, a.a, a.a]
a: a[a, a, a, a.a]
a: a[a, a, a, a['a[a[a]]', a[a], a[a]]]
a: a[a, a, a, a[a]]
a: a[a, a, a, a]
a: a[a, a, a.a, a.a]
a: a[a, a, a.a, a[[a.a, a[a, a[a.a, a[a.a]]]], a[a, a, a.a]]]
a: a[a, a, a.a[a]]
a: a[a, a, a.a]
a: a[a, a, a[[], a]]
a: a[a, a, a[a, ...]]
a: a[a, a, a[a, a[a, a[a, a]]]]
a: a[a, a, a[a, a]]
a: a[a, a, a[a[[a], a]], a, a]
a: a[a, a, a[a], a[a]]
a: a[a, a, a[a], a]
a: a[a, a, a[a]]
a: a[a, a, a]
a: a[a, a.a, a, a[a]]
a: a[a, a.a, a.a]
a: a[a, a.a.a, a]
a: a[a, a.a.a]
a: a[a, a.a[a.a[a]]]
a: a[a, a.a[a.a]]
a: a[a, a.a[a]]
a: a[a, a.a]
a: a[a, a['a', 'a']]
a: a[a, a['a']]
a: a[a, a['a[a]']]
a: a[a, a[..., a]]
a: a[a, a[[], a[a, a]], a]
a: a[a, a[[], a]]
a: a[a, a[[a, a[a]], a]]
a: a[a, a[[a[a[a]]], a]]
a: a[a, a[[a[a]], a]]
a: a[a, a[[a], a]]
a: a[a, a[a, 'a[a.a]']]
a: a[a, a[a, ...]]
a: a[a, a[a, a, a], a, a]
a: a[a, a[a, a, a]]
a: a[a, a[a, a[[a, a], a]]]
a: a[a, a[a, a[a, ...]]]
a: a[a, a[a, a[a, a]]]
a: a[a, a[a, a[a.a]]]
a: a[a, a[a, a[a], a]]
a: a[a, a[a, a[a]]]
a: a[a, a[a, a], a[a[a, a]], a[a[a, a], ...]]
a: a[a, a[a, a]]
a: a[a, a[a.a, a.a]]
a: a[a, a[a.a, a[a, a]]]
a: a[a, a[a.a, a[a.a]]]
a: a[a, a[a.a, a]]
a: a[a, a[a.a[a]]]
a: a[a, a[a.a]]
a: a[a, a[a[..., a]], a, a]
a: a[a, a[a[..., a]]]
a: a[a, a[a[[], a]]]
a: a[a, a[a[[a.a, a.a], a[a.a, a.a]]]]
a: a[a, a[a[[a.a], a]]]
a: a[a, a[a[[a], a]], a, a[a[a, a[a[[a], a]], a]], a[a[a, a[a[[a], a]], a]]]
a: a[a, a[a[[a], a]], a, a[a[a, a[a[[a], a]], a]]]
a: a[a, a[a[[a], a]], a]
a: a[a, a[a[a, a, a, a, a]]]
a: a[a, a[a[a, a[a]]]]
a: a[a, a[a[a, a]]]
a: a[a, a[a[a[a], a[a]]]]
a: a[a, a[a[a], a]]
a: a[a, a[a[a]]]
a: a[a, a[a], a[[], a[a]]]
a: a[a, a[a], a[a, ...]]
a: a[a, a[a], a[a, a, a], a, a]
a: a[a, a[a], a[a], a[a], a[a]]
a: a[a, a[a], a[a]]
a: a[a, a[a], a]
a: a[a, a[a]]
a: a[a, a]
a: a[a.a, ...]
a: a[a.a, a.a, a.a, a.a]
a: a[a.a, a.a, a.a, a[[a.a, a[a, a[a.a, a[a.a]]]], a[a.a, a.a, a.a]]]
a: a[a.a, a.a, a.a]
a: a[a.a, a.a, a]
a: a[a.a, a.a]
a: a[a.a, a[[a.a], a]]
a: a[a.a, a[a, a]]
a: a[a.a, a[a.a, a.a]]
a: a[a.a, a[a.a, a]]
a: a[a.a, a[a.a]]
a: a[a.a, a]
a: a[a.a.a, a.a.a]
a: a[a.a.a, a[a.a]]
a: a[a.a.a, a]
a: a[a.a.a[a]]
a: a[a.a.a]
a: a[a.a[a, a.a[a]]]
a: a[a.a[a[a, a.a]]]
a: a[a.a[a[a, a[a]]]]
a: a[a.a[a]]
a: a[a.a]
a: a[a['"', "'", '"""', "'''"], a, a[a[[a['"', "'", '"""', "'''"]], a]], a, a[a[a['"', "'", '"""', "'''"], a, a[a[[a['"', "'", '"""', "'''"]], a]]]], a[a[a['"', "'", '"""', "'''"], a, a[a[[a['"', "'", '"""', "'''"]], a]]]]]
a: a[a['', '*', '**'], a, a[a[[a['', '*', '**']], a]], a, a[a[a['', '*', '**'], a, a[a[[a['', '*', '**']], a]]]], a[a[a['', '*', '**'], a, a[a[[a['', '*', '**']], a]]]]]
a: a[a['a', 'a']]
a: a[a['a', a[a], a]]
a: a[a['a', a]]
a: a[a['a'], a['a']]
a: a[a['a']]
a: a[a['a.a', 'a.a']]
a: a[a['a.a']]
a: a[a['a.a.a']]
a: a[a['a[a[a]]', a[a], a[a]]]
a: a[a['a[a]', a['a[a]', ...]]]
a: a[a[..., 'a[a, a, a]'], a[..., a]]
a: a[a[..., a[a, 'a']]]
a: a[a[..., a], a[a]]
a: a[a[..., a]]
a: a[a[['a', 'a'], a]]
a: a[a[['a'], a]]
a: a[a[['a.a'], a]]
a: a[a[['a[a[a, a, a]]'], a]]
a: a[a[['a[a]'], a]]
a: a[a[[], a[a]]]
a: a[a[[], a], a]
a: a[a[[], a]]
a: a[a[[a, a, a], a[a]]]
a: a[a[[a, a, a], a]]
a: a[a[[a, a[a], a], a[a, a]]]
a: a[a[[a, a], a]]
a: a[a[[a.a], a], ...]
a: a[a[[a[[a], a]], 'a[a]']]
a: a[a[[a[a, a, a]], a]]
a: a[a[[a[a], a], a]]
a: a[a[[a], a.a]]
a: a[a[[a], a[a[a]]]]
a: a[a[[a], a[a]]]
a: a[a[[a], a], a[[a], a]]
a: a[a[[a], a]]
a: a[a[a, 'a']]
a: a[a[a, 'a.a[a]']]
a: a[a[a, 'a[a]']]
a: a[a[a, ...], a[a, a]]
a: a[a[a, ...], a[a]]
a: a[a[a, ...]]
a: a[a[a, a, a, a, a, a]]
a: a[a[a, a, a, a, a]]
a: a[a[a, a, a, a[a, ...]]]
a: a[a[a, a, a, a], a]
a: a[a[a, a, a, a]]
a: a[a[a, a, a.a, a]]
a: a[a[a, a, a.a]]
a: a[a[a, a, a[a, a]]]
a: a[a[a, a, a[a[[a], a]], a, a]]
a: a[a[a, a, a[a], a]]
a: a[a[a, a, a[a]], a]
a: a[a[a, a, a[a]]]
a: a[a[a, a, a], a[a, a, a, a]]
a: a[a[a, a, a], a[a[a, a]]]
a: a[a[a, a, a], a]
a: a[a[a, a, a]]
a: a[a[a, a.a.a]]
a: a[a[a, a.a[a], a]]
a: a[a[a, a.a[a]]]
a: a[a[a, a.a]]
a: a[a[a, a['a']]]
a: a[a[a, a[[], a]]]
a: a[a[a, a[a, a.a]]]
a: a[a[a, a[a, a[a, ...]], a], ...]
a: a[a[a, a[a, a], a[a[a, a]]]]
a: a[a[a, a[a, a], a]]
a: a[a[a, a[a, a]], ...]
a: a[a[a, a[a, a]]]
a: a[a[a, a[a.a, a[a.a]]]]
a: a[a[a, a[a.a]]]
a: a[a[a, a[a[a, a], a]]]
a: a[a[a, a[a[a, a]]]]
a: a[a[a, a[a], a[a]]]
a: a[a[a, a[a], a], a[a]]
a: a[a[a, a[a], a]]
a: a[a[a, a[a]], a]
a: a[a[a, a[a]]]
a: a[a[a, a], 'a']
a: a[a[a, a], ...]
a: a[a[a, a], a, a]
a: a[a[a, a], a.a]
a: a[a[a, a], a[[a], a]]
a: a[a[a, a], a[a, a]]
a: a[a[a, a], a[a[[], a]]]
a: a[a[a, a], a]
a: a[a[a, a]]
a: a[a[a.a, a, a[a.a]]]
a: a[a[a.a, a.a, a.a]]
a: a[a[a.a, a.a]]
a: a[a[a.a, a[a.a, a.a]]]
a: a[a[a.a, a]]
a: a[a[a.a.a, a[a]]]
a: a[a[a.a.a[a]]]
a: a[a[a.a], ...]
a: a[a[a.a], a[a.a]]
a: a[a[a.a], a[a[a.a]]]
a: a[a[a.a], a]
a: a[a[a.a]]
a: a[a[a['a'], a['a']]]
a: a[a[a['a']]]
a: a[a[a[..., a], a[a], a[a]]]
a: a[a[a[a, a, a[a], a[a], a[a[a, a, a[a], a[a]]], a[a[a, a, a[a], a[a]]]]], a, a[a[[a[a.a]], a]], a[a[a[a[a, a[a], a[a], a[a[a, a[a], a[a]]], a[a[a, a[a], a[a]]]]], a[a[[a[a.a]], a]]]], a[a[a[a[a, a[a], a[a], a[a[a, a[a], a[a]]], a[a[a, a[a], a[a]]]]], a[a[[a[a.a]], a]]]]]
a: a[a[a[a, a, a[a], a[a], a[a[a, a, a[a], a[a]]], a[a[a, a, a[a], a[a]]]]], a, a[a[[a[a[a.a, a.a, a[a[a.a, a.a]], a[a[a.a, a.a]]]]], a]], a[a[a[a[a, a[a], a[a], a[a[a, a[a], a[a]]], a[a[a, a[a], a[a]]]]], a[a[[a[a[a.a, a.a, a[a[a.a, a.a]], a[a[a.a, a.a]]]]], a]]]], a[a[a[a[a, a[a], a[a], a[a[a, a[a], a[a]]], a[a[a, a[a], a[a]]]]], a[a[[a[a[a.a, a.a, a[a[a.a, a.a]], a[a[a.a, a.a]]]]], a]]]]]
a: a[a[a[a, a.a.a], a]]
a: a[a[a[a, a[a, a]]]]
a: a[a[a[a, a[a[a, a]]]]]
a: a[a[a[a, a[a]], a]]
a: a[a[a[a, a[a]]]]
a: a[a[a[a, a], a, a]]
a: a[a[a[a, a], a.a]]
a: a[a[a[a, a], a[a, a]]]
a: a[a[a[a, a], a], a]
a: a[a[a[a, a], a]]
a: a[a[a[a, a]], a[a, a]]
a: a[a[a[a, a]], a]
a: a[a[a[a, a]]]
a: a[a[a[a.a, a.a]]]
a: a[a[a[a.a], a.a.a], a.a]
a: a[a[a[a.a], a], a]
a: a[a[a[a[a, a, a, a, a], ...], a[[a], a[a]]]]
a: a[a[a[a[a, a, a, a, a]], a[[a], a[a]]]]
a: a[a[a[a[a, a, a[a], a[a], a[a[a, a, a[a], a[a]]], a[a[a, a, a[a], a[a]]]]], a, a[a[[a[a.a]], a]], a[a[a[a[a, a[a], a[a], a[a[a, a[a], a[a]]], a[a[a, a[a], a[a]]]]], a[a[[a[a.a]], a]]]], a[a[a[a[a, a[a], a[a], a[a[a, a[a], a[a]]], a[a[a, a[a], a[a]]]]], a[a[[a[a.a]], a]]]]], 'a', a, a[a[[a[a[a.a], a.a, a[a[a[a.a], a.a]], a[a[a[a.a], a.a]]]], a]], a, a[a[a[a[a[a, a[a], a[a], a[a[a, a[a], a[a]]], a[a[a, a[a], a[a]]]]], a[a[[a[a.a]], a]], a[a[a[a[a, a[a], a[a], a[a[a, a[a], a[a]]], a[a[a, a[a], a[a]]]]], a[a[[a[a.a]], a]]]], a[a[a[a[a, a[a], a[a], a[a[a, a[a], a[a]]], a[a[a, a[a], a[a]]]]], a[a[[a[a.a]], a]]]]], 'a', a, a[a[[a[a[a.a], a.a, a[a[a[a.a], a.a]], a[a[a[a.a], a.a]]]], a]]]], a[a[a[a[a[a, a[a], a[a], a[a[a, a[a], a[a]]], a[a[a, a[a], a[a]]]]], a[a[[a[a.a]], a]], a[a[a[a[a, a[a], a[a], a[a[a, a[a], a[a]]], a[a[a, a[a], a[a]]]]], a[a[[a[a.a]], a]]]], a[a[a[a[a, a[a], a[a], a[a[a, a[a], a[a]]], a[a[a, a[a], a[a]]]]], a[a[[a[a.a]], a]]]]], 'a', a, a[a[[a[a[a.a], a.a, a[a[a[a.a], a.a]], a[a[a[a.a], a.a]]]], a]]]]]
a: a[a[a[a[a, a, a[a], a[a], a[a[a, a, a[a], a[a]]], a[a[a, a, a[a], a[a]]]]], a, a[a[[a[a.a]], a]], a[a[a[a[a, a[a], a[a], a[a[a, a[a], a[a]]], a[a[a, a[a], a[a]]]]], a[a[[a[a.a]], a]]]], a[a[a[a[a, a[a], a[a], a[a[a, a[a], a[a]]], a[a[a, a[a], a[a]]]]], a[a[[a[a.a]], a]]]]], a, a, a[a[[a[a[a.a], a, a[a[a[a.a], a]], a[a[a[a.a], a]]]], a]], a, a[a[a[a[a[a, a[a], a[a], a[a[a, a[a], a[a]]], a[a[a, a[a], a[a]]]]], a[a[[a[a.a]], a]], a[a[a[a[a, a[a], a[a], a[a[a, a[a], a[a]]], a[a[a, a[a], a[a]]]]], a[a[[a[a.a]], a]]]], a[a[a[a[a, a[a], a[a], a[a[a, a[a], a[a]]], a[a[a, a[a], a[a]]]]], a[a[[a[a.a]], a]]]]], a, a, a[a[[a[a[a.a], a, a[a[a[a.a], a]], a[a[a[a.a], a]]]], a]]]], a[a[a[a[a[a, a[a], a[a], a[a[a, a[a], a[a]]], a[a[a, a[a], a[a]]]]], a[a[[a[a.a]], a]], a[a[a[a[a, a[a], a[a], a[a[a, a[a], a[a]]], a[a[a, a[a], a[a]]]]], a[a[[a[a.a]], a]]]], a[a[a[a[a, a[a], a[a], a[a[a, a[a], a[a]]], a[a[a, a[a], a[a]]]]], a[a[[a[a.a]], a]]]]], a, a, a[a[[a[a[a.a], a, a[a[a[a.a], a]], a[a[a[a.a], a]]]], a]]]]]
a: a[a[a[a[a.a, a.a]], a[a]]]
a: a[a[a[a[a], a[[a[a[a], a]], a]]]]
a: a[a[a[a[a], a]]]
a: a[a[a[a], 'a']]
a: a[a[a[a], a.a.a]]
a: a[a[a[a], a[a, a[a, ...]]], a[a]]
a: a[a[a[a], a[a, a]]]
a: a[a[a[a], a[a[a], ...]]]
a: a[a[a[a], a[a], a]]
a: a[a[a[a], a[a]], a]
a: a[a[a[a], a[a]]]
a: a[a[a[a], a]]
a: a[a[a[a]]]
a: a[a[a], 'a']
a: a[a[a], ...]
a: a[a[a], a, a, a]
a: a[a[a], a, a.a]
a: a[a[a], a, a]
a: a[a[a], a[[], a[a]]]
a: a[a[a], a[[a[a]], a[a]]]
a: a[a[a], a[[a], a]]
a: a[a[a], a[a, ...]]
a: a[a[a], a[a, a]]
a: a[a[a], a[a[a, a]], a[a[a, a]]]
a: a[a[a], a[a[a, a]]]
a: a[a[a], a[a[a], ...]]
a: a[a[a], a[a[a]]]
a: a[a[a], a[a], a[a.a]]
a: a[a[a], a[a], a[a], a[a, ...], a[a], a[a]]
a: a[a[a], a[a]]
a: a[a[a], a]
a: a[a[a]]
a: a[a]
pytkdocs-0.16.1/tests/fixtures/parsing/attributes.py 0000664 0000000 0000000 00000016525 14211451604 0022664 0 ustar 00root root 0000000 0000000 """
Module docstring.
Attributes:
DESCRIBED_IN_MODULE_DOCSTRING: Described in module docstring.
DESCRIBED_AND_ANNOTATED_IN_MODULE_DOCSTRING (bool): Described and annotated in module docstring.
DESCRIBED_IN_BOTH: Described in both.
DESCRIBED_AND_ANNOTATED_IN_BOTH (bool): Described and annotated in both.
"""
from datetime import datetime
from typing import Optional, Tuple
from marshmallow import Schema, fields
from pydantic import BaseModel
NO_DOC_NO_TYPE = 0
NO_TYPE = 1
"""No type."""
NO_DOC_NO_VALUE: int
NO_VALUE: str
"""No value."""
NO_DOC: int = 2
FULL: int = 3
"""Full."""
DESCRIBED_IN_MODULE_DOCSTRING: bool = True
DESCRIBED_AND_ANNOTATED_IN_MODULE_DOCSTRING = True
DESCRIBED_IN_BOTH: bool = True
"""Described in both."""
DESCRIBED_AND_ANNOTATED_IN_BOTH: bool = True
"""Described and annotated in both."""
COMPLEX_TYPE: Optional[Tuple[int, str]] = None
"""Complex type."""
ATTRIBUTE_C1: "C"
"""Forward reference for type."""
ATTRIBUTE_C2: Optional["C"] = None
"""Optional forward reference for type."""
class C:
"""
Class doctring.
Attributes:
DESCRIBED_IN_CLASS_DOCSTRING: Described in class docstring.
DESCRIBED_AND_ANNOTATED_IN_CLASS_DOCSTRING (bool): Described and annotated in class docstring.
DESCRIBED_IN_BOTH: Described in both.
DESCRIBED_AND_ANNOTATED_IN_BOTH (bool): Described and annotated in both.
described_in_class_docstring: Described in class docstring.
described_and_annotated_in_class_docstring (bool): Described and annotated in class docstring.
described_in_both: Described in both.
described_and_annotated_in_both (bool): Described and annotated in both.
"""
IN_CLASS = 0
"""In class."""
DESCRIBED_IN_CLASS_DOCSTRING: bool = True
DESCRIBED_AND_ANNOTATED_IN_CLASS_DOCSTRING = True
DESCRIBED_IN_BOTH: bool = True
"""Described in both."""
DESCRIBED_AND_ANNOTATED_IN_BOTH: bool = True
"""Described and annotated in both."""
both_class_and_instance_attribute: Optional[bool] = None
def __init__(self):
self.in_init = True
"""In init."""
self.annotated_in_init: bool = True
"""Annotated in init."""
self.described_in_class_docstring: bool = True
self.described_and_annotated_in_class_docstring = True
self.described_in_both: bool = True
"""Described in both."""
self.described_and_annotated_in_both: bool = True
"""Described and annotated in both."""
non_attribute: bool = True
"""Non attribute."""
if not self.both_class_and_instance_attribute:
self.both_class_and_instance_attribute = True
class D:
def __init__(self):
"""
Init doctring.
Attributes:
described_in_class_docstring: Described in class docstring.
described_and_annotated_in_class_docstring (bool): Described and annotated in class docstring.
described_in_both: Described in both.
described_and_annotated_in_both (bool): Described and annotated in both.
"""
self.in_init = True
"""In init."""
self.annotated_in_init: bool = True
"""Annotated in init."""
self.described_in_class_docstring: bool = True
self.described_and_annotated_in_class_docstring = True
self.described_in_both: bool = True
"""Described in both."""
self.described_and_annotated_in_both: bool = True
"""Described and annotated in both."""
non_attribute: bool = True
"""Non attribute."""
if not self.both_class_and_instance_attribute:
self.both_class_and_instance_attribute = True
class E:
"""
Class doctring.
Attributes:
DESCRIBED_IN_CLASS_DOCSTRING: Described in class docstring.
DESCRIBED_AND_ANNOTATED_IN_CLASS_DOCSTRING (bool): Described and annotated in class docstring.
DESCRIBED_IN_BOTH: Described in both.
DESCRIBED_AND_ANNOTATED_IN_BOTH (bool): Described and annotated in both.
described_in_class_and_init_docstring: Described in class and init docstring.
described_and_annotated_in_class_and_init_docstring (bool): Described and annotated in class and init docstring.
described_everywhere: Described everywhere.
described_and_annotated_everywhere (bool): Described and annotated everywhere.
"""
IN_CLASS = 0
"""In class."""
DESCRIBED_IN_CLASS_DOCSTRING: bool = True
DESCRIBED_AND_ANNOTATED_IN_CLASS_DOCSTRING = True
DESCRIBED_IN_BOTH: bool = True
"""Described in both."""
DESCRIBED_AND_ANNOTATED_IN_BOTH: bool = True
"""Described and annotated in both."""
both_class_and_instance_attribute: Optional[bool] = None
DEDENT = 0
"""This docstring starts immediately (no blank line).
Use `inspect.cleandoc` instead of `textwrap.dedent`."""
def __init__(self):
"""
Init doctring.
Attributes:
described_in_class_and_init_docstring: Described in class and init docstring.
described_and_annotated_in_class_and_init_docstring (bool): Described and annotated in class and init docstring.
described_everywhere: Described everywhere.
described_and_annotated_everywhere (bool): Described and annotated everywhere.
"""
self.in_init = True
"""In init."""
self.annotated_in_init: bool = True
"""Annotated in init."""
self.described_in_class_and_init_docstring: bool = True
self.described_and_annotated_in_class_and_init_docstring = True
self.described_everywhere: bool = True
"""Described everywhere."""
self.described_and_annotated_everywhere: bool = True
"""Described and annotated everywhere."""
non_attribute: bool = True
"""Non attribute."""
non_attribute2 = True
"""Non attribute 2."""
if not self.both_class_and_instance_attribute:
self.both_class_and_instance_attribute = True
d = D()
d.non_self_attribute = 0
"""Non self attribute."""
self.d = d
self.d.non_self_attribute2 = 0
"""Non self attribute 2."""
c = C()
c.non_self_attribute: int = 0
"""Non self attribute."""
self.c = c
self.c.non_self_attribute2: int = 0
"""Non self attribute 2."""
self.dedent = 0
"""This docstring starts immediately (no blank line).
Use `inspect.cleandoc` instead of `textwrap.dedent`."""
if True:
IN_IF: bytes = b""
"""In if."""
ANNOTATED_IN_IF: str = ""
"""Annotated in if."""
else:
IN_ELSE: list = []
"""In else."""
try:
IN_TRY: int = 1000
"""In try."""
except: # noqa
IN_EXCEPT: float = 9000.0
"""In except."""
else:
IN_TRY_ELSE: str = "-1"
"""In try else."""
finally:
IN_FINALLY: bool = bool(-9000)
"""In finally."""
class Model(BaseModel):
in_pydantic_model: int
"""In Pydantic model."""
model_field: Optional[datetime] = None
"""A model field."""
class MarshmallowSchema(Schema):
in_marshmallow_model: int
"""In Marshmallow model."""
model_field: fields.Str = fields.Str()
"""A model field."""
OK, WARNING, CRITICAL, UNKNOWN = 0, 0, 0, 0
if True:
DEDENT = 0
"""This docstring starts immediately (no blank line).
Use `inspect.cleandoc` instead of `textwrap.dedent`."""
pytkdocs-0.16.1/tests/fixtures/parsing/docstrings.py 0000664 0000000 0000000 00000000411 14211451604 0022640 0 ustar 00root root 0000000 0000000 class NotDefinedYet:
@property
def ha(self) -> "NotDefinedYet":
"""
This property returns `self`.
It's fun because you can call it like `obj.ha.ha.ha.ha.ha.ha...`.
Returns:
self!
"""
return self
pytkdocs-0.16.1/tests/fixtures/parsing/restructured_text/ 0000775 0000000 0000000 00000000000 14211451604 0023712 5 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/tests/fixtures/parsing/restructured_text/__init__.py 0000664 0000000 0000000 00000000000 14211451604 0026011 0 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/tests/fixtures/parsing/restructured_text/attributes.py 0000664 0000000 0000000 00000017117 14211451604 0026461 0 ustar 00root root 0000000 0000000 """
Module docstring.
:var DESCRIBED_IN_MODULE_DOCSTRING: Described in module docstring.
:var DESCRIBED_AND_ANNOTATED_IN_MODULE_DOCSTRING: Described and annotated in module docstring.
:vartype DESCRIBED_AND_ANNOTATED_IN_MODULE_DOCSTRING: bool
:var DESCRIBED_IN_BOTH: Described in both.
:var DESCRIBED_AND_ANNOTATED_IN_BOTH: Described and annotated in both.
:vartype DESCRIBED_AND_ANNOTATED_IN_BOTH: bool
"""
from datetime import datetime
from typing import Optional, Tuple
from marshmallow import Schema, fields
from pydantic import BaseModel
NO_DOC_NO_TYPE = 0
NO_TYPE = 1
"""No type."""
NO_DOC_NO_VALUE: int
NO_VALUE: str
"""No value."""
NO_DOC: int = 2
FULL: int = 3
"""Full."""
DESCRIBED_IN_MODULE_DOCSTRING: bool = True
DESCRIBED_AND_ANNOTATED_IN_MODULE_DOCSTRING = True
DESCRIBED_IN_BOTH: bool = True
"""Described in both."""
DESCRIBED_AND_ANNOTATED_IN_BOTH: bool = True
"""Described and annotated in both."""
COMPLEX_TYPE: Optional[Tuple[int, str]] = None
"""Complex type."""
ATTRIBUTE_C1: "C"
"""Forward reference for type."""
ATTRIBUTE_C2: Optional["C"] = None
"""Optional forward reference for type."""
class C:
"""
Class doctring.
:cvar DESCRIBED_IN_CLASS_DOCSTRING: Described in class docstring.
:cvar DESCRIBED_AND_ANNOTATED_IN_CLASS_DOCSTRING: Described and annotated in class docstring.
:vartype DESCRIBED_AND_ANNOTATED_IN_CLASS_DOCSTRING: bool
:cvar DESCRIBED_IN_BOTH: Described in both.
:cvar DESCRIBED_AND_ANNOTATED_IN_BOTH: Described and annotated in both.
:vartype DESCRIBED_AND_ANNOTATED_IN_BOTH: bool
:var described_in_class_docstring: Described in class docstring.
:var described_and_annotated_in_class_docstring: Described and annotated in class docstring.
:vartype described_and_annotated_in_class_docstring: bool
:var described_in_both: Described in both.
:var described_and_annotated_in_both: Described and annotated in both.
:vartype described_and_annotated_in_both: bool
"""
IN_CLASS = 0
"""In class."""
DESCRIBED_IN_CLASS_DOCSTRING: bool = True
DESCRIBED_AND_ANNOTATED_IN_CLASS_DOCSTRING = True
DESCRIBED_IN_BOTH: bool = True
"""Described in both."""
DESCRIBED_AND_ANNOTATED_IN_BOTH: bool = True
"""Described and annotated in both."""
both_class_and_instance_attribute: Optional[bool] = None
def __init__(self):
self.in_init = True
"""In init."""
self.annotated_in_init: bool = True
"""Annotated in init."""
self.described_in_class_docstring: bool = True
self.described_and_annotated_in_class_docstring = True
self.described_in_both: bool = True
"""Described in both."""
self.described_and_annotated_in_both: bool = True
"""Described and annotated in both."""
non_attribute: bool = True
"""Non attribute."""
if not self.both_class_and_instance_attribute:
self.both_class_and_instance_attribute = True
class D:
def __init__(self):
"""
Init doctring.
:var described_in_class_docstring: Described in class docstring.
:var described_and_annotated_in_class_docstring: Described and annotated in class docstring.
:vartype described_and_annotated_in_class_docstring: bool
:var described_in_both: Described in both.
:var described_and_annotated_in_both: Described and annotated in both.
:vartype described_and_annotated_in_both: bool
"""
self.in_init = True
"""In init."""
self.annotated_in_init: bool = True
"""Annotated in init."""
self.described_in_class_docstring: bool = True
self.described_and_annotated_in_class_docstring = True
self.described_in_both: bool = True
"""Described in both."""
self.described_and_annotated_in_both: bool = True
"""Described and annotated in both."""
non_attribute: bool = True
"""Non attribute."""
if not self.both_class_and_instance_attribute:
self.both_class_and_instance_attribute = True
class E:
"""
Class doctring.
:cvar DESCRIBED_IN_CLASS_DOCSTRING: Described in class docstring.
:cvar DESCRIBED_AND_ANNOTATED_IN_CLASS_DOCSTRING: Described and annotated in class docstring.
:vartype DESCRIBED_AND_ANNOTATED_IN_CLASS_DOCSTRING: bool
:cvar DESCRIBED_IN_BOTH: Described in both.
:cvar DESCRIBED_AND_ANNOTATED_IN_BOTH: Described and annotated in both.
:vartype DESCRIBED_AND_ANNOTATED_IN_BOTH: bool
:var described_in_class_and_init_docstring: Described in class and init docstring.
:var described_and_annotated_in_class_and_init_docstring: Described and annotated in class and init docstring.
:vartype described_and_annotated_in_class_and_init_docstring: bool
:var described_everywhere: Described everywhere.
:var described_and_annotated_everywhere: Described and annotated everywhere.
:vartype described_and_annotated_everywhere: bool
"""
IN_CLASS = 0
"""In class."""
DESCRIBED_IN_CLASS_DOCSTRING: bool = True
DESCRIBED_AND_ANNOTATED_IN_CLASS_DOCSTRING = True
DESCRIBED_IN_BOTH: bool = True
"""Described in both."""
DESCRIBED_AND_ANNOTATED_IN_BOTH: bool = True
"""Described and annotated in both."""
both_class_and_instance_attribute: Optional[bool] = None
def __init__(self):
"""
Init doctring.
:var described_in_class_and_init_docstring: Described in class and init docstring.
:var described_and_annotated_in_class_and_init_docstring: Described and annotated in class and init docstring.
:vartype described_and_annotated_in_class_and_init_docstring: bool
:var described_everywhere: Described everywhere.
:var described_and_annotated_everywhere: Described and annotated everywhere.
:vartype described_and_annotated_everywhere: bool
"""
self.in_init = True
"""In init."""
self.annotated_in_init: bool = True
"""Annotated in init."""
self.described_in_class_and_init_docstring: bool = True
self.described_and_annotated_in_class_and_init_docstring = True
self.described_everywhere: bool = True
"""Described everywhere."""
self.described_and_annotated_everywhere: bool = True
"""Described and annotated everywhere."""
non_attribute: bool = True
"""Non attribute."""
non_attribute2 = True
"""Non attribute 2."""
if not self.both_class_and_instance_attribute:
self.both_class_and_instance_attribute = True
d = D()
d.non_self_attribute = 0
"""Non self attribute."""
self.d = d
self.d.non_self_attribute2 = 0
"""Non self attribute 2."""
c = C()
c.non_self_attribute: int = 0
"""Non self attribute."""
self.c = c
self.c.non_self_attribute2: int = 0
"""Non self attribute 2."""
if True:
IN_IF: bytes = b""
"""In if."""
ANNOTATED_IN_IF: str = ""
"""Annotated in if."""
else:
IN_ELSE: list = []
"""In else."""
try:
IN_TRY: int = 1000
"""In try."""
except: # noqa
IN_EXCEPT: float = 9000.0
"""In except."""
else:
IN_TRY_ELSE: str = "-1"
"""In try else."""
finally:
IN_FINALLY: bool = bool(-9000)
"""In finally."""
class Model(BaseModel):
in_pydantic_model: int
"""In Pydantic model."""
model_field: Optional[datetime] = None
"""A model field."""
class MarshmallowSchema(Schema):
in_marshmallow_model: int
"""In Marshmallow model."""
model_field: fields.Str = fields.Str()
"""A model field."""
OK, WARNING, CRITICAL, UNKNOWN = 0, 0, 0, 0
pytkdocs-0.16.1/tests/fixtures/parsing/restructured_text/class_docstrings.py 0000664 0000000 0000000 00000001502 14211451604 0027626 0 ustar 00root root 0000000 0000000 class NotDefinedYet:
@property
def ha(self) -> "NotDefinedYet":
"""
This property returns `self`.
It's fun because you can call it like `obj.ha.ha.ha.ha.ha.ha...`.
:return: self!
"""
return self
class ClassInitFunction:
def __init__(self, value: str, other=1) -> None:
"""
Initialize instance.
:param value: Value to store
:param int other: Other value with default
"""
self.value = value
self.other = other
class ClassWithFunction:
def thing(self, value: str, other=1) -> str:
"""
Concatenate a integer after a string.
:param value: Value to store
:param int other: Other value with default
:return: Concatenated result
"""
return f"{value}{other}"
pytkdocs-0.16.1/tests/fixtures/parsing/restructured_text/docstring_attributes_section.py 0000664 0000000 0000000 00000000323 14211451604 0032250 0 ustar 00root root 0000000 0000000 """
Let's describe some attributes.
:var A: Alpha.
:var B: Beta.
:vartype B: bytes
:var C: Gamma.
:var D: Delta.
:var E: Epsilon.
:vartype E: float
"""
A: int = 0
B: str = "ŧ"
C: bool = True
D = 3.0
E = None
pytkdocs-0.16.1/tests/fixtures/pkg1/ 0000775 0000000 0000000 00000000000 14211451604 0017312 5 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/tests/fixtures/pkg1/__init__.py 0000664 0000000 0000000 00000000000 14211451604 0021411 0 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/tests/fixtures/pkg1/pkg2/ 0000775 0000000 0000000 00000000000 14211451604 0020155 5 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/tests/fixtures/pkg1/pkg2/__init__.py 0000664 0000000 0000000 00000000000 14211451604 0022254 0 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/tests/fixtures/pkg1/pkg2/pkg3/ 0000775 0000000 0000000 00000000000 14211451604 0021021 5 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/tests/fixtures/pkg1/pkg2/pkg3/__init__.py 0000664 0000000 0000000 00000000000 14211451604 0023120 0 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/tests/fixtures/pkg1/pkg2/pkg3/pkg4/ 0000775 0000000 0000000 00000000000 14211451604 0021666 5 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/tests/fixtures/pkg1/pkg2/pkg3/pkg4/__init__.py 0000664 0000000 0000000 00000000000 14211451604 0023765 0 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/tests/fixtures/pkg1/pkg2/pkg3/pkg4/pkg5/ 0000775 0000000 0000000 00000000000 14211451604 0022534 5 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/tests/fixtures/pkg1/pkg2/pkg3/pkg4/pkg5/__init__.py 0000664 0000000 0000000 00000000034 14211451604 0024642 0 ustar 00root root 0000000 0000000 """Hello from the abyss."""
pytkdocs-0.16.1/tests/fixtures/pydantic.py 0000664 0000000 0000000 00000000613 14211451604 0020635 0 ustar 00root root 0000000 0000000 from typing import Set
from pydantic import BaseModel, Field
class Person(BaseModel):
"""Simple Pydantic Model for a person's information"""
name: str = Field("PersonA", description="The person's name")
age: int = Field(18, description="The person's age which must be at minimum 18")
labels: Set[str] = Field(set(), description="Set of labels the person can be referred by")
pytkdocs-0.16.1/tests/fixtures/real_path/ 0000775 0000000 0000000 00000000000 14211451604 0020407 5 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/tests/fixtures/real_path/__init__.py 0000664 0000000 0000000 00000000000 14211451604 0022506 0 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/tests/fixtures/real_path/module_a.py 0000664 0000000 0000000 00000000072 14211451604 0022545 0 ustar 00root root 0000000 0000000 from .module_b import ATTRIBUTE, DefinedInModuleB # noqa
pytkdocs-0.16.1/tests/fixtures/real_path/module_b.py 0000664 0000000 0000000 00000000136 14211451604 0022547 0 ustar 00root root 0000000 0000000 ATTRIBUTE = 0
class DefinedInModuleB:
ATTRIBUTE = 1
def method(self):
pass
pytkdocs-0.16.1/tests/fixtures/test_namespace/ 0000775 0000000 0000000 00000000000 14211451604 0021443 5 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/tests/fixtures/test_namespace/subspace/ 0000775 0000000 0000000 00000000000 14211451604 0023250 5 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/tests/fixtures/test_namespace/subspace/__init__.py 0000664 0000000 0000000 00000000042 14211451604 0025355 0 ustar 00root root 0000000 0000000 "The subspace package docstring."
pytkdocs-0.16.1/tests/fixtures/the_package/ 0000775 0000000 0000000 00000000000 14211451604 0020703 5 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/tests/fixtures/the_package/__init__.py 0000664 0000000 0000000 00000000035 14211451604 0023012 0 ustar 00root root 0000000 0000000 """The package docstring."""
pytkdocs-0.16.1/tests/fixtures/the_package/the_module.py 0000664 0000000 0000000 00000002426 14211451604 0023406 0 ustar 00root root 0000000 0000000 """The module docstring."""
THE_ATTRIBUTE: int = 0
"""The attribute docstring."""
def the_function():
"""The function docstring."""
class TheClass:
"""The class docstring."""
THE_ATTRIBUTE: float = 0.1
"""The attribute 0.1 docstring."""
class TheNestedClass:
"""The nested class docstring."""
THE_ATTRIBUTE: float = 0.2
"""The attribute 0.2 docstring."""
class TheDoubleNestedClass:
"""The double nested class docstring."""
THE_ATTRIBUTE: float = 0.3
"""The attribute 0.3 docstring."""
def the_method(self):
"""The method3 docstring."""
def the_method(self):
"""The method2 docstring."""
def the_method(self):
"""The method1 docstring."""
@staticmethod
def the_static_method():
"""The static method docstring."""
@classmethod
def the_class_method(cls):
"""The class method docstring."""
@property
def the_property(self):
"""The property docstring."""
@property
def the_writable_property(self):
"""The writable property getter docstring."""
@the_writable_property.setter
def the_writable_property(self, value):
"""The writable property setter docstring."""
pytkdocs-0.16.1/tests/fixtures/unwrap_getattr_raises.py 0000664 0000000 0000000 00000000132 14211451604 0023432 0 ustar 00root root 0000000 0000000 class TryMe:
def __getattr__(self, item):
raise ValueError
TRY_ME = TryMe()
pytkdocs-0.16.1/tests/fixtures/wrapped_objects.py 0000664 0000000 0000000 00000000377 14211451604 0022204 0 ustar 00root root 0000000 0000000 import functools
@functools.lru_cache(maxsize=1024)
def my_function(some_arg):
"""My docstring."""
return some_arg
class A:
@functools.lru_cache(maxsize=1024)
def hello(self, name):
"""Hello!"""
return f"Hello {name}!"
pytkdocs-0.16.1/tests/test_cli.py 0000664 0000000 0000000 00000005106 14211451604 0016761 0 ustar 00root root 0000000 0000000 """Tests for [the `cli` module][pytkdocs.cli]."""
import io
import json
import pytest
from pytkdocs import cli
def test_show_help(capsys):
"""
Show help.
Arguments:
capsys: Pytest fixture to capture output.
"""
with pytest.raises(SystemExit):
cli.main(["-h"])
captured = capsys.readouterr()
assert "pytkdocs" in captured.out
def test_read_whole_stdin(monkeypatch):
"""Read whole standard input."""
monkeypatch.setattr(
"sys.stdin",
io.StringIO(
"""
{
"objects": [
{
"path": "pytkdocs.cli.main"
},
{
"path": "pytkdocs.cli.get_parser"
}
]
}
"""
),
)
cli.main([])
def test_read_stdin_line_by_line(monkeypatch):
"""Read standard input line by line."""
monkeypatch.setattr(
"sys.stdin",
io.StringIO(
'{"objects": [{"path": "pytkdocs.cli.main"}]}\n{"objects": [{"path": "pytkdocs.cli.get_parser"}]}\n'
),
)
cli.main(["--line-by-line"])
def test_load_complete_tree(monkeypatch):
"""Load `pytkdocs` own documentation."""
monkeypatch.setattr("sys.stdin", io.StringIO('{"objects": [{"path": "pytkdocs"}]}'))
cli.main(["--line-by-line"])
def test_discard_stdout(monkeypatch, capsys):
"""Discard standard output at import time."""
monkeypatch.setattr("sys.stdin", io.StringIO('{"objects": [{"path": "tests.fixtures.corrupt_output"}]}'))
cli.main(["--line-by-line"])
captured = capsys.readouterr()
assert not captured.out.startswith("*corruption intensifies*")
# assert no JSON parsing error
json.loads(captured.out)
def test_exception_raised_while_discard_stdout(monkeypatch, capsys):
"""Check that an error is still printed when an exception is raised and stdout is discarded."""
monkeypatch.setattr("sys.stdin", io.StringIO('{"objects": [{"path": "pytkdocs.cli"}]}'))
# raise an exception during the process
monkeypatch.setattr("pytkdocs.cli.process_json", lambda _: 1 / 0)
# assert no exception
cli.main(["--line-by-line"])
# assert json error was written to stdout
captured = capsys.readouterr()
assert captured.out
# assert no JSON parsing error
json.loads(captured.out)
def test_load_complete_tests_tree(monkeypatch):
"""Load `pytkdocs` own tests' documentation."""
monkeypatch.setattr("sys.stdin", io.StringIO('{"objects": [{"path": "tests"}]}'))
cli.main(["--line-by-line"])
pytkdocs-0.16.1/tests/test_loader.py 0000664 0000000 0000000 00000050505 14211451604 0017463 0 ustar 00root root 0000000 0000000 """Tests for [the `loader` module][pytkdocs.loader]."""
import os
import sys
from pathlib import Path
from typing import Set
import pytest
from django.db.models.fields import CharField
from marshmallow import fields
from pytkdocs.loader import Loader, get_object_tree
from tests import FIXTURES_DIR
def test_import_no_path():
"""Raise error when getting tree for empty object name."""
with pytest.raises(ValueError):
get_object_tree("")
def test_import_error():
"""Raise error when getting tree for missing object."""
with pytest.raises(ImportError):
get_object_tree("eeeeeeeeeeeeeeeeeee")
def test_can_find_class_real_path():
"""Find real path of a class."""
leaf = get_object_tree("tests.fixtures.real_path.module_a.DefinedInModuleB")
assert leaf.dotted_path == "tests.fixtures.real_path.module_b.DefinedInModuleB"
def test_can_find_class_method_real_path():
"""Find real path of a class method."""
leaf = get_object_tree("tests.fixtures.real_path.module_a.DefinedInModuleB.method")
assert leaf.dotted_path == "tests.fixtures.real_path.module_b.DefinedInModuleB.method"
def test_can_find_class_attribute_real_path():
"""Find real path of a class attribute."""
leaf = get_object_tree("tests.fixtures.real_path.module_a.DefinedInModuleB.ATTRIBUTE")
assert leaf.dotted_path == "tests.fixtures.real_path.module_b.DefinedInModuleB.ATTRIBUTE"
def test_cannot_find_module_attribute_real_path():
"""Find real path of a module attribute."""
leaf = get_object_tree("tests.fixtures.real_path.module_a.ATTRIBUTE")
assert leaf.dotted_path != "tests.fixtures.real_path.module_b.ATTRIBUTE"
def test_import_module_with_colon_path_syntax():
"""Import a module using the "colon" path syntax."""
leaf = get_object_tree("tests.fixtures.the_package.the_module", new_path_syntax=True)
def test_import_attribute_with_colon_path_syntax():
"""Import an attribute using the "colon" path syntax."""
leaf = get_object_tree("tests.fixtures.the_package.the_module:THE_ATTRIBUTE")
def test_import_nested_attribute_with_colon_path_syntax():
"""Import a nested attribute using the "colon" path syntax."""
leaf = get_object_tree("tests.fixtures.the_package.the_module:TheClass.THE_ATTRIBUTE")
def test_fail_to_import_module_with_colon_path_syntax():
"""Import a module using the "colon" path syntax."""
with pytest.raises(ImportError):
get_object_tree("tests.fixtures.does_not_exist", new_path_syntax=True)
def test_fail_to_import_attribute_with_colon_path_syntax():
"""Import an attribute using the "colon" path syntax."""
with pytest.raises(AttributeError) as error:
leaf = get_object_tree("tests.fixtures.the_package.the_module:does_not_exist")
def test_fail_to_import_nested_attribute_with_colon_path_syntax():
"""Import a nested attribute using the "colon" path syntax."""
with pytest.raises(AttributeError) as error:
leaf = get_object_tree("tests.fixtures.the_package.the_module:TheClass.does_not_exist")
def test_fail_to_import_module_with_dot_path_syntax():
"""Import a module using the "dot" path syntax."""
with pytest.raises(ImportError, match=r"possible causes"):
get_object_tree("does_not_exist")
def test_fail_to_import_attribute_with_dot_path_syntax():
"""Import an attribute using the "dot" path syntax."""
with pytest.raises(AttributeError) as error:
leaf = get_object_tree("tests.fixtures.the_package.the_module.does_not_exist")
def test_fail_to_import_nested_attribute_with_dot_path_syntax():
"""Import a nested attribute using the "dot" path syntax."""
with pytest.raises(AttributeError) as error:
leaf = get_object_tree("tests.fixtures.the_package.the_module.TheClass.does_not_exist")
def test_inheriting_enum_Enum():
"""Handle `enum.Enum` classes."""
"""See details at [tests.fixtures.inheriting_enum_Enum][]."""
loader = Loader()
loader.get_object_documentation("tests.fixtures.inheriting_enum_Enum")
assert not loader.errors
def test_inheriting_typing_NamedTuple():
"""
Handle `typing.NamedTuple classes`.
See details at [tests.fixtures.inheriting_typing_NamedTuple][].
"""
loader = Loader()
loader.get_object_documentation("tests.fixtures.inheriting_typing_NamedTuple")
assert len(loader.errors) == 0
def test_nested_class():
"""Handle nested classes."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.nested_class")
assert obj.classes
assert obj.classes[0].docstring == "Main docstring."
assert obj.classes[0].classes
assert obj.classes[0].classes[0].docstring == "Nested docstring."
def test_loading_deep_package():
"""Handle deep nesting of packages."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.pkg1.pkg2.pkg3.pkg4.pkg5")
assert obj.docstring == "Hello from the abyss."
assert obj.path == "tests.fixtures.pkg1.pkg2.pkg3.pkg4.pkg5"
def test_loading_package():
"""Handle basic packages."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.the_package")
assert obj.docstring == "The package docstring."
def test_loading_namespace_package():
"""Handle native namespace packages."""
loader = Loader()
old_paths = list(sys.path)
sys.path.append(str(Path(FIXTURES_DIR).resolve()))
obj = loader.get_object_documentation("test_namespace.subspace")
assert obj.docstring == "The subspace package docstring."
assert obj.relative_file_path == f"subspace{os.sep}__init__.py"
sys.path = old_paths
def test_loading_module():
"""Handle single modules."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.the_package.the_module")
assert obj.docstring == "The module docstring."
def test_loading_class():
"""Handle classes."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.the_package.the_module.TheClass")
assert obj.docstring == "The class docstring."
assert obj.bases == ["object"]
def test_loading_class_with_multiline_docstring_starting_on_first_line():
"""Handle classes with multiline docstrings where the first line is next to the triple-quotes."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.first_line_class_docstring.TheClass")
assert obj.docstring == """The first line of the docstring.\n\nA bit more of the docstring."""
def test_loading_dataclass():
"""Handle dataclasses."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.dataclass.Person")
assert obj.docstring == "Simple dataclass for a person's information"
assert len(obj.attributes) == 2
name_attr = next(attr for attr in obj.attributes if attr.name == "name")
assert name_attr.type == str
age_attr = next(attr for attr in obj.attributes if attr.name == "age")
assert age_attr.type == int
assert age_attr.docstring == "Field description."
assert "dataclass" in obj.properties
not_dataclass = loader.get_object_documentation("tests.fixtures.the_package.the_module.TheClass.TheNestedClass")
assert "dataclass" not in not_dataclass.properties
def test_loading_empty_dataclass():
"""Handle empty dataclasses."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.dataclass.Empty")
assert obj.docstring == "A dataclass without any fields"
assert len(obj.attributes) == 0
assert "dataclass" in obj.properties
def test_loading_pydantic_model():
"""Handle Pydantic models."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.pydantic.Person")
assert obj.docstring == "Simple Pydantic Model for a person's information"
assert "pydantic-model" in obj.properties
name_attr = next(attr for attr in obj.attributes if attr.name == "name")
assert name_attr.type == str
assert name_attr.docstring == "The person's name"
assert "pydantic-field" in name_attr.properties
age_attr = next(attr for attr in obj.attributes if attr.name == "age")
assert age_attr.type == int
assert age_attr.docstring == "The person's age which must be at minimum 18"
assert "pydantic-field" in age_attr.properties
labels_attr = next(attr for attr in obj.attributes if attr.name == "labels")
assert labels_attr.type == Set[str]
assert labels_attr.docstring == "Set of labels the person can be referred by"
assert "pydantic-field" in labels_attr.properties
def test_loading_django_model():
"""Handle Django models"""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.django.Person")
assert obj.docstring == "Simple Django Model for a person's information"
name_attr = next(attr for attr in obj.attributes if attr.name == "name")
assert name_attr.type == CharField
assert name_attr.docstring == "Name"
def test_loading_marshmallow_model():
"""Handle Marshmallow models."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.marshmallow.Person")
assert obj.docstring == "Simple Marshmallow Model for a person's information"
assert "marshmallow-model" in obj.properties
name_attr = next(attr for attr in obj.attributes if attr.name == "name")
assert name_attr.type == fields.Str
assert name_attr.docstring == "The person's name"
assert "marshmallow-field" in name_attr.properties
assert "required" in name_attr.properties
age_attr = next(attr for attr in obj.attributes if attr.name == "age")
assert age_attr.type == fields.Int
assert age_attr.docstring == "The person's age which must be at minimum 18"
assert "marshmallow-field" in age_attr.properties
def test_loading_nested_class():
"""Select nested class."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.the_package.the_module.TheClass.TheNestedClass")
assert obj.docstring == "The nested class docstring."
def test_loading_double_nested_class():
"""Select double-nested class."""
loader = Loader()
obj = loader.get_object_documentation(
"tests.fixtures.the_package.the_module.TheClass.TheNestedClass.TheDoubleNestedClass"
)
assert obj.docstring == "The double nested class docstring."
def test_loading_class_attribute():
"""Select class attribute."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.the_package.the_module.TheClass.THE_ATTRIBUTE")
assert obj.docstring == "The attribute 0.1 docstring."
def test_loading_nested_class_attribute():
"""Select nested-class attribute."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.the_package.the_module.TheClass.TheNestedClass.THE_ATTRIBUTE")
assert obj.docstring == "The attribute 0.2 docstring."
def test_loading_double_nested_class_attribute():
"""Select double-nested-class attribute."""
loader = Loader()
obj = loader.get_object_documentation(
"tests.fixtures.the_package.the_module.TheClass.TheNestedClass.TheDoubleNestedClass.THE_ATTRIBUTE"
)
assert obj.docstring == "The attribute 0.3 docstring."
def test_loading_class_method():
"""Select class method."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.the_package.the_module.TheClass.the_method")
assert obj.docstring == "The method1 docstring."
def test_loading_nested_class_method():
"""Select nested class method."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.the_package.the_module.TheClass.TheNestedClass.the_method")
assert obj.docstring == "The method2 docstring."
def test_loading_double_nested_class_method():
"""Select double-nested class method."""
loader = Loader()
obj = loader.get_object_documentation(
"tests.fixtures.the_package.the_module.TheClass.TheNestedClass.TheDoubleNestedClass.the_method"
)
assert obj.docstring == "The method3 docstring."
def test_loading_staticmethod():
"""Select static method."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.the_package.the_module.TheClass.the_static_method")
assert obj.docstring == "The static method docstring."
def test_loading_classmethod():
"""Select class method."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.the_package.the_module.TheClass.the_class_method")
assert obj.docstring == "The class method docstring."
def test_loading_property():
"""Select property."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.the_package.the_module.TheClass.the_property")
assert obj.docstring == "The property docstring."
def test_loading_writable_property():
"""Select writable property."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.the_package.the_module.TheClass.the_writable_property")
assert obj.docstring == "The writable property getter docstring."
def test_loading_function():
"""Select function."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.the_package.the_module.the_function")
assert obj.docstring == "The function docstring."
def test_loading_attribute():
"""Select attribute."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.the_package.the_module.THE_ATTRIBUTE")
assert obj.docstring == "The attribute docstring."
def test_loading_explicit_members():
"""Select members explicitly."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.the_package.the_module", members={"TheClass"})
assert len(obj.children) == 1
assert obj.children[0].name == "TheClass"
def test_loading_no_members():
"""Select no members."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.the_package.the_module", members=False)
assert not obj.children
def test_loading_with_filters():
"""Select with filters."""
loader = Loader(filters=["!^[A-Z_]+$"])
obj = loader.get_object_documentation("tests.fixtures.the_package.the_module")
for child in obj.children:
assert child.name != "THE_ATTRIBUTE"
def test_loading_with_filters_reselection():
"""A filter can cancel a previous filter."""
loader = Loader(filters=["![A-Z_]", "[a-z]"])
obj = loader.get_object_documentation("tests.fixtures.the_package.the_module")
assert obj.classes
assert obj.classes[0].name == "TheClass"
def test_loading_with_members_and_filters():
"""Select members with filters."""
loader = Loader(filters=["!THE"])
obj = loader.get_object_documentation(
"tests.fixtures.the_package.the_module", members={"THE_ATTRIBUTE", "TheClass"}
)
assert obj.attributes
assert obj.attributes[0].name == "THE_ATTRIBUTE"
assert obj.classes
assert obj.classes[0].name == "TheClass"
assert not any(a.name == "THE_ATTRIBUTE" for a in obj.classes[0].attributes)
def test_loading_members_set_at_import_time():
"""Select dynamic members."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.dynamic_members")
assert obj.functions
assert len(obj.classes) == 1
class_ = obj.classes[0]
assert class_.methods
def test_loading_inherited_members():
"""Select inherited members."""
loader = Loader(inherited_members=True)
obj = loader.get_object_documentation("tests.fixtures.inherited_members.Child")
for child_name in ("method1", "method2", "V1", "V2"):
assert child_name in (child.name for child in obj.children)
def test_not_loading_inherited_members():
"""Do not select inherited members."""
loader = Loader(inherited_members=False)
obj = loader.get_object_documentation("tests.fixtures.inherited_members.Child")
for child_name in ("method1", "V1"):
assert child_name not in (child.name for child in obj.children)
for child_name in ("method2", "V2"):
assert child_name in (child.name for child in obj.children)
def test_loading_selected_inherited_members():
"""Select specific members, some of them being inherited."""
loader = Loader(inherited_members=True)
obj = loader.get_object_documentation("tests.fixtures.inherited_members.Child", members={"V1", "V2"})
for child_name in ("V1", "V2"):
assert child_name in (child.name for child in obj.children)
def test_loading_pydantic_inherited_members():
"""Select inherited members in Pydantic models."""
loader = Loader(inherited_members=True)
obj = loader.get_object_documentation("tests.fixtures.inherited_members.ChildModel")
for child_name in ("a", "b"):
assert child_name in (child.name for child in obj.children)
def test_not_loading_pydantic_inherited_members():
"""Do not select inherited members in Pydantic models."""
loader = Loader(inherited_members=False)
obj = loader.get_object_documentation("tests.fixtures.inherited_members.ChildModel")
assert "a" not in (child.name for child in obj.children)
def test_loading_wrapped_function():
"""Load documentation for wrapped function, not wrapper."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.wrapped_objects.my_function")
assert obj.docstring == "My docstring."
def test_loading_module_wrapped_members():
"""Load documentation for wrapped function, not wrapper."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.wrapped_objects")
assert obj.functions and obj.functions[0].docstring == "My docstring."
assert obj.classes and obj.classes[0].methods and obj.classes[0].methods[0].docstring == "Hello!"
def test_unwrap_object_with_getattr_method_raising_exception():
"""Try loading an object that defines a `__getattr__` method which raises an exception."""
loader = Loader()
loader.get_object_documentation("tests.fixtures.unwrap_getattr_raises")
def test_loading_coroutine():
"""Load documentation for a coroutine."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.asyncio.coroutine_function")
assert "async" in obj.properties
def test_loading_coroutine_method():
"""Load documentation for a coroutine method."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.asyncio.ClassContainingCoroutineMethod.coroutine_method")
assert "async" in obj.properties
def test_loading_function_without_async_property():
"""Load documentation for a function that is not a coroutine."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.the_package.the_module.the_function")
assert "async" not in obj.properties
def test_loading_method_without_async_property():
"""Load documentation for a method that is not a coroutine."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.the_package.the_module.TheClass.the_method")
assert "async" not in obj.properties
def test_inherited_properties_docstrings():
"""Load docstrings from parent class for inherited properties."""
loader = Loader(new_path_syntax=True)
obj = loader.get_object_documentation("tests.fixtures.inherited_properties:SubClass.read_only")
assert obj.docstring == "SuperClass.read_only docs"
obj = loader.get_object_documentation("tests.fixtures.inherited_properties:SubClass.mutable")
assert obj.docstring == "SuperClass.mutable getter docs"
def test_loading_cached_properties():
"""Load cached properties."""
loader = Loader(new_path_syntax=True)
obj = loader.get_object_documentation("tests.fixtures.cached_properties:C")
assert len(obj.children) == 1
assert obj.children[0].name == obj.children[0].docstring == "aaa"
assert "cached" in obj.children[0].properties
def test_method_descriptor():
"""Load a method descriptor."""
loader = Loader(new_path_syntax=True)
obj = loader.get_object_documentation("tests.fixtures.method_descriptor:descriptor")
assert obj.name == "descriptor"
assert obj.signature
assert len(obj.signature.parameters) == 2
assert obj.docstring
assert obj.category == "method"
def test_load_decorated_function():
"""Load a decorated function."""
loader = Loader(new_path_syntax=True)
obj = loader.get_object_documentation("tests.fixtures.decorated_function")
assert [child.name for child in obj.children] == ["add", "sub"]
for child in obj.children:
assert child.category == "function"
assert child.parent is child.root
assert child.parent.name == "decorated_function"
pytkdocs-0.16.1/tests/test_objects.py 0000664 0000000 0000000 00000012274 14211451604 0017647 0 ustar 00root root 0000000 0000000 """Tests for [the `objects` module][pytkdocs.objects]."""
import os
from pytkdocs.loader import Loader
from pytkdocs.objects import Attribute, Class, Function, Method, Module, Object
from tests import FIXTURES_DIR
def test_creating_module():
"""Create a Module."""
assert Module(name="my_object", path="my.dotted.path", file_path="/my/absolute/path.py")
def test_creating_class():
"""Create a Class."""
assert Class(name="my_object", path="my.dotted.path", file_path="/my/absolute/path.py")
def test_creating_method():
"""Create a Method."""
assert Method(name="my_object", path="my.dotted.path", file_path="/my/absolute/path.py")
def test_creating_function():
"""Create a Function."""
assert Function(name="my_object", path="my.dotted.path", file_path="/my/absolute/path.py")
def test_creating_attribute():
"""Create an Attribute."""
assert Attribute(name="my_object", path="my.dotted.path", file_path="/my/absolute/path.py")
def test_add_child():
"""Add a child."""
parent = Module(name="my_module", path="my.dotted.path", file_path="/my/absolute/path.py")
child = Attribute(name="my_attribute", path="my.dotted.path.my_attribute", file_path="/my/absolute/path.py")
parent.add_child(child)
assert parent.children[0] is child
assert parent.attributes[0] is child
def test_do_not_add_child_if_parent_is_not_self():
"""Don't add a child the parent is not the right one."""
parent = Module(name="my_module", path="my.dotted.path", file_path="/my/absolute/path.py")
child = Attribute(name="my_attribute", path="my.other.path.my_attribute", file_path="/my/absolute/path.py")
parent.add_child(child)
assert not parent.children
assert not parent.attributes
def test_get_root():
"""Get the root object."""
root = Module(name="my_module", path="my.dotted.path", file_path="")
node1 = Class(name="my_class1", path="my.dotted.path.my_class1", file_path="")
node2 = Class(name="my_class2", path="my.dotted.path.my_class2", file_path="")
leaf = Method(name="my_method", path="my.dotted.path.my_class1.my_method", file_path="")
root.add_children([node1, node2])
node1.add_child(leaf)
assert root.root is root
assert node1.root is root
assert node2.root is root
assert leaf.root is root
def test_relative_file_path_for_root():
"""Get the relative file of a shallow object."""
obj = Object(
name="nested_class", path="tests.fixtures.nested_class", file_path=str(FIXTURES_DIR / "nested_class.py")
)
assert obj.relative_file_path == os.path.join("tests", "fixtures", "nested_class.py")
def test_relative_file_path_for_leaf():
"""Get the relative file path of a deep object."""
obj = Loader().get_object_documentation("tests.fixtures.pkg1")
leaf = obj.children[0].children[0].children[0].children[0]
assert leaf.relative_file_path == os.path.join(
"tests", "fixtures", "pkg1", "pkg2", "pkg3", "pkg4", "pkg5", "__init__.py"
)
def test_no_relative_file_path_for_non_existent_package():
"""Cannot find relative file path."""
obj = Object(name="o", path="a.b.o", file_path="/some/non_existent/path/a/b/o.py")
assert not obj.relative_file_path
def test_no_relative_file_path_for_wrong_path():
"""Cannot find relative file path with wrong dotted path."""
obj = Object(name="o", path="wrong.dotted.path", file_path=str(FIXTURES_DIR / "nested_class.py"))
assert not obj.relative_file_path
def test_no_relative_file_path_for_wrong_file_path():
"""Cannot find relative file path with wrong file path."""
obj = Object(name="o", path="tests.fixtures.nested_class", file_path="/wrong/module/path.py")
assert not obj.relative_file_path
def test_add_children():
"""Add multiple children at once."""
root = Object(name="o", path="o", file_path="o.py")
class_ = Class(name="c", path="o.c", file_path="o.py")
attribute = Attribute(name="a", path="o.c.a", file_path="o.py")
class_.add_child(attribute)
root.add_children(
[
# class has wrong path
Class(name="w", path="wrong.path.w", file_path="/wrong/path/w.py"),
# class OK
class_,
# not a direct child,
attribute,
# function OK
Function(name="f", path="o.f", file_path="o.py"),
# not a direct child, not even a child of known child
Method(name="missing_node", path="o.mn.missing_node", file_path="o.py"),
]
)
assert len(root.children) == 2
assert root.classes and root.classes[0] is class_
assert root.functions and root.functions[0].name == "f"
def test_has_contents():
"""Check if an object has contents."""
obj = Loader().get_object_documentation("tests.fixtures.pkg1")
assert obj.has_contents()
obj = Loader().get_object_documentation("tests.fixtures.__init__")
assert not obj.children
assert obj.has_contents() # we specified that the root always 'has contents'
obj = Loader().get_object_documentation("tests.fixtures.no_contents")
assert obj.children
assert obj.has_contents
assert not obj.children[0].has_contents()
def test_has_no_contents():
"""Check that an object has no contents."""
pass # TODO
pytkdocs-0.16.1/tests/test_parsers/ 0000775 0000000 0000000 00000000000 14211451604 0017315 5 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/tests/test_parsers/__init__.py 0000664 0000000 0000000 00000000030 14211451604 0021417 0 ustar 00root root 0000000 0000000 """Test the parsers."""
pytkdocs-0.16.1/tests/test_parsers/test_annotations.py 0000664 0000000 0000000 00000001267 14211451604 0023271 0 ustar 00root root 0000000 0000000 """Tests for [the `parsers.attributes` module][pytkdocs.parsers.attributes] on annotations."""
import ast
import pytest
from pytkdocs.parsers.attributes import unparse_annotation
def annotations(annotations_file):
with open(annotations_file) as fp:
for line in fp:
line = line.rstrip("\n")
yield line # annotation
yield line + " = 0" # annotated assignment
@pytest.mark.parametrize("code", list(annotations("tests/fixtures/parsing/annotations.txt")))
def test_annotation_to_text(code):
node = ast.parse(code, mode="single").body[0]
assert unparse_annotation(node.annotation) == code[3:].replace(" = 0", "") # remove "a: " prefix
pytkdocs-0.16.1/tests/test_parsers/test_attributes.py 0000664 0000000 0000000 00000013510 14211451604 0023114 0 ustar 00root root 0000000 0000000 """Tests for [the `parsers.attributes` module][pytkdocs.parsers.attributes]."""
from pytkdocs.parsers.attributes import get_class_attributes, get_instance_attributes, get_module_attributes
from tests.fixtures.parsing import attributes as attr_module
class TestParsing:
"""Test the parser in general."""
def setup(self):
"""Setup reusable attributes."""
self.attributes = get_module_attributes(attr_module)
def test_parse_tuple_target(self):
"""Assert can parse `a, b, c = 0, 0, 0`."""
assert "OK" in self.attributes
assert "WARNING" in self.attributes
assert "CRITICAL" in self.attributes
assert "UNKNOWN" in self.attributes
class TestModuleAttributes:
"""Test the parser for module attributes."""
def setup(self):
"""Setup reusable attributes."""
self.attributes = get_module_attributes(attr_module)
def test_pick_up_attribute_without_docstring(self):
"""Don't pick attributes without docstrings."""
assert "NO_DOC_NO_TYPE" in self.attributes
assert "NO_DOC_NO_VALUE" in self.attributes
assert "NO_DOC" in self.attributes
def test_pick_up_attribute_without_type(self):
"""Pick up attribute without a type."""
assert "NO_TYPE" in self.attributes
assert self.attributes["NO_TYPE"]["docstring"] == "No type."
def test_pick_up_attribute_without_value(self):
"""Pick up attribute without a value."""
assert "NO_VALUE" in self.attributes
assert self.attributes["NO_VALUE"]["docstring"] == "No value."
def test_pick_up_attribute_with_type_and_value(self):
"""Pick up attribute with type and value."""
assert "FULL" in self.attributes
assert self.attributes["FULL"]["docstring"] == "Full."
def test_pick_up_attribute_with_complex_type(self):
"""Pick up attribute with complex type."""
assert "COMPLEX_TYPE" in self.attributes
assert self.attributes["COMPLEX_TYPE"]["docstring"] == "Complex type."
def test_pick_up_attribute_in_if(self):
"""Pick attribute in `if` and `else`."""
assert "IN_IF" in self.attributes
assert self.attributes["IN_IF"]["docstring"] == "In if."
assert "IN_ELSE" in self.attributes
assert self.attributes["IN_ELSE"]["docstring"] == "In else."
def test_pick_up_attribute_in_try_except(self):
"""Pick attribute in `try`, `except`, `else` and `finally`.."""
assert "IN_TRY" in self.attributes
assert self.attributes["IN_TRY"]["docstring"] == "In try."
assert "IN_EXCEPT" in self.attributes
assert self.attributes["IN_EXCEPT"]["docstring"] == "In except."
assert "IN_TRY_ELSE" in self.attributes
assert self.attributes["IN_TRY_ELSE"]["docstring"] == "In try else."
assert "IN_FINALLY" in self.attributes
assert self.attributes["IN_FINALLY"]["docstring"] == "In finally."
def test_docstring_is_correctly_dedented(self):
assert "\n " not in self.attributes["DEDENT"]["docstring"]
class TestClassAttributes:
"""Test the parser for module attributes."""
def setup(self):
"""Setup reusable attributes."""
self.attributes = get_class_attributes(attr_module.E)
def test_pick_up_attribute_in_class(self):
"""Pick up class attribute."""
assert "IN_CLASS" in self.attributes
assert self.attributes["IN_CLASS"]["docstring"] == "In class."
def test_docstring_is_correctly_dedented(self):
assert "\n " not in self.attributes["DEDENT"]["docstring"]
class TestInstanceAttributes:
"""Test the parser for module attributes."""
def setup(self):
"""Setup reusable attributes."""
self.attributes = get_instance_attributes(attr_module.E.__init__)
def test_pick_up_attribute_in_init_method(self):
"""Pick up instance attribute."""
assert "in_init" in self.attributes
assert self.attributes["in_init"]["docstring"] == "In init."
def test_do_not_pick_up_non_attributes(self):
"""Don't pick documented variables in functions."""
assert "non_attribute" not in self.attributes
assert "non_attribute2" not in self.attributes
assert "non_self_attribute" not in self.attributes
assert "non_self_attribute2" not in self.attributes
def test_do_not_pick_up_subscript_attribute(self):
"""Don't pick documented variables in functions."""
assert "d" not in self.attributes
assert "d.subscript" not in self.attributes
assert "subscript" not in self.attributes
def test_docstring_is_correctly_dedented(self):
assert "\n " not in self.attributes["dedent"]["docstring"]
class TestPydanticFields:
"""Test the parser for module attributes."""
def setup(self):
"""Setup reusable attributes."""
self.attributes = get_class_attributes(attr_module.Model)
def test_pick_up_attribute_in_pydantic_model(self):
"""Pick up attribute in Pydantic model."""
assert "in_pydantic_model" in self.attributes
assert self.attributes["in_pydantic_model"]["docstring"] == "In Pydantic model."
assert "model_field" in self.attributes
assert self.attributes["model_field"]["docstring"] == "A model field."
class TestMarshmallowFields:
"""Test the parser for module attributes."""
def setup(self):
"""Setup reusable attributes."""
self.attributes = get_class_attributes(attr_module.MarshmallowSchema)
def test_pick_up_attribute_in_pydantic_model(self):
"""Pick up attribute in Marshmallow model."""
assert "in_marshmallow_model" in self.attributes
assert self.attributes["in_marshmallow_model"]["docstring"] == "In Marshmallow model."
assert "model_field" in self.attributes
assert self.attributes["model_field"]["docstring"] == "A model field."
pytkdocs-0.16.1/tests/test_parsers/test_docstrings/ 0000775 0000000 0000000 00000000000 14211451604 0022533 5 ustar 00root root 0000000 0000000 pytkdocs-0.16.1/tests/test_parsers/test_docstrings/__init__.py 0000664 0000000 0000000 00000000043 14211451604 0024641 0 ustar 00root root 0000000 0000000 """Test the docstrings parsers."""
pytkdocs-0.16.1/tests/test_parsers/test_docstrings/test_google.py 0000664 0000000 0000000 00000051412 14211451604 0025423 0 ustar 00root root 0000000 0000000 """Tests for [the `parsers.docstrings.google` module][pytkdocs.parsers.docstrings.google]."""
import inspect
from textwrap import dedent
from typing import Iterator
from pytkdocs.loader import Loader
from pytkdocs.parsers.docstrings.base import Section
from pytkdocs.parsers.docstrings.google import Google
from pytkdocs.serializer import serialize_attribute
class DummyObject:
path = "o"
def parse(
docstring,
signature=None,
return_type=inspect.Signature.empty,
admonitions=True,
trim_doctest=False,
):
"""Helper to parse a doctring."""
parser = Google(replace_admonitions=admonitions, trim_doctest_flags=trim_doctest)
return parser.parse(
dedent(docstring).strip(),
context={"obj": DummyObject(), "signature": signature, "type": return_type},
)
def test_simple_docstring():
"""Parse a simple docstring."""
sections, errors = parse("A simple docstring.")
assert len(sections) == 1
assert not errors
def test_multi_line_docstring():
"""Parse a multi-line docstring."""
sections, errors = parse(
"""
A somewhat longer docstring.
Blablablabla.
"""
)
assert len(sections) == 1
assert not errors
def test_sections_without_signature():
"""Parse a docstring without a signature."""
sections, errors = parse(
"""
Sections without signature.
Parameters:
void: SEGFAULT.
niet: SEGFAULT.
nada: SEGFAULT.
rien: SEGFAULT.
Keyword Args:
keywd: SEGFAULT.
Exceptions:
GlobalError: when nothing works as expected.
Returns:
Itself.
"""
)
assert len(sections) == 5
assert len(errors) == 6 # missing annotations for params and return
for error in errors[:-1]:
assert "param" in error
assert "return" in errors[-1]
def test_property_docstring():
"""Parse a property docstring."""
class_ = Loader().get_object_documentation("tests.fixtures.parsing.docstrings.NotDefinedYet")
prop = class_.attributes[0]
sections, errors = prop.docstring_sections, prop.docstring_errors
assert len(sections) == 2
assert not errors
def test_function_without_annotations():
"""Parse a function docstring without signature annotations."""
def f(x, y, *, z):
"""
This function has no annotations.
Parameters:
x: X value.
y: Y value.
Keyword Args:
z: Z value.
Returns:
Sum X + Y + Z.
"""
return x + y + z
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 4
assert len(errors) == 1
assert "No return type/annotation in" in errors[0]
def test_function_with_annotations():
"""Parse a function docstring with signature annotations."""
def f(x: int, y: int, *, z: int) -> int:
"""
This function has annotations.
Parameters:
x: X value.
y: Y value.
Keyword Arguments:
z: Z value.
Returns:
Sum X + Y.
"""
return x + y
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 4
assert not errors
def test_function_with_examples_trim_doctest():
"""Parse example docstring with trim_doctest_flags option."""
def f(x: int) -> int:
"""Test function.
Example:
We want to skip the following test.
>>> 1 + 1 == 3 # doctest: +SKIP
True
And then a few more examples here:
>>> print("a\\n\\nb")
a
b
>>> 1 + 1 == 2 # doctest: +SKIP
>>> print(list(range(1, 100))) # doctest: +ELLIPSIS
[1, 2, ..., 98, 99]
"""
return x
sections, errors = parse(
inspect.getdoc(f),
inspect.signature(f),
trim_doctest=True,
)
assert len(sections) == 2
assert len(sections[1].value) == 4
assert not errors
# Verify that doctest flags have indeed been trimmed
example_str = sections[1].value[1][1]
assert "# doctest: +SKIP" not in example_str
example_str = sections[1].value[3][1]
assert "" not in example_str
assert "\n>>> print(list(range(1, 100)))\n" in example_str
def test_function_with_examples():
"""Parse a function docstring with examples."""
def f(x: int, y: int) -> int:
"""
This function has annotations.
Examples:
Some examples that will create an unified code block:
>>> 2 + 2 == 5
False
>>> print("examples")
"examples"
This is just a random comment in the examples section.
These examples will generate two different code blocks. Note the blank line.
>>> print("I'm in the first code block!")
"I'm in the first code block!"
>>> print("I'm in other code block!")
"I'm in other code block!"
We also can write multiline examples:
>>> x = 3 + 2
>>> y = x + 10
>>> y
15
This is just a typical Python code block:
```python
print("examples")
return 2 + 2
```
Even if it contains doctests, the following block is still considered a normal code-block.
```python
>>> print("examples")
"examples"
>>> 2 + 2
4
```
The blank line before an example is optional.
>>> x = 3
>>> y = "apple"
>>> z = False
>>> l = [x, y, z]
>>> my_print_list_function(l)
3
"apple"
False
"""
return x + y
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 2
assert len(sections[1].value) == 9
assert not errors
def test_types_in_docstring():
"""Parse types in docstring."""
def f(x, y, *, z):
"""
The types are written in the docstring.
Parameters:
x (int): X value.
y (int): Y value.
Keyword Args:
z (int): Z value.
Returns:
int: Sum X + Y + Z.
"""
return x + y + z
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 4
assert not errors
assert sections[0].type == Section.Type.MARKDOWN
assert sections[1].type == Section.Type.PARAMETERS
assert sections[2].type == Section.Type.KEYWORD_ARGS
assert sections[3].type == Section.Type.RETURN
x, y = sections[1].value
(z,) = sections[2].value
r = sections[3].value
assert x.name == "x"
assert x.annotation == "int"
assert x.description == "X value."
assert x.kind is inspect.Parameter.POSITIONAL_OR_KEYWORD
assert x.default is inspect.Signature.empty
assert y.name == "y"
assert y.annotation == "int"
assert y.description == "Y value."
assert y.kind is inspect.Parameter.POSITIONAL_OR_KEYWORD
assert y.default is inspect.Signature.empty
assert z.name == "z"
assert z.annotation == "int"
assert z.description == "Z value."
assert z.kind is inspect.Parameter.KEYWORD_ONLY
assert z.default is inspect.Signature.empty
assert r.annotation == "int"
assert r.description == "Sum X + Y + Z."
def test_types_and_optional_in_docstring():
"""Parse optional types in docstring."""
def f(x=1, y=None, *, z=None):
"""
The types are written in the docstring.
Parameters:
x (int): X value.
y (int, optional): Y value.
Keyword Args:
z (int, optional): Z value.
Returns:
int: Sum X + Y + Z.
"""
return x + (y or 1) + (z or 1)
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 4
assert not errors
assert sections[0].type == Section.Type.MARKDOWN
assert sections[1].type == Section.Type.PARAMETERS
assert sections[2].type == Section.Type.KEYWORD_ARGS
x, y = sections[1].value
(z,) = sections[2].value
assert x.name == "x"
assert x.annotation == "int"
assert x.description == "X value."
assert x.kind is inspect.Parameter.POSITIONAL_OR_KEYWORD
assert x.default == 1
assert y.name == "y"
assert y.annotation == "int"
assert y.description == "Y value."
assert y.kind is inspect.Parameter.POSITIONAL_OR_KEYWORD
assert y.default is None
assert z.name == "z"
assert z.annotation == "int"
assert z.description == "Z value."
assert z.kind is inspect.Parameter.KEYWORD_ONLY
assert z.default is None
def test_types_in_signature_and_docstring():
"""Parse types in both signature and docstring. Should prefer the docstring type"""
def f(x: int, y: int, *, z: int) -> int:
"""
The types are written both in the signature and in the docstring.
Parameters:
x (str): X value.
y (str): Y value.
Keyword Args:
z (str): Z value.
Returns:
str: Sum X + Y + Z.
"""
return x + y + z
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 4
assert not errors
assert sections[0].type == Section.Type.MARKDOWN
assert sections[1].type == Section.Type.PARAMETERS
assert sections[2].type == Section.Type.KEYWORD_ARGS
assert sections[3].type == Section.Type.RETURN
x, y = sections[1].value
(z,) = sections[2].value
r = sections[3].value
assert x.name == "x"
assert x.annotation == "str"
assert x.description == "X value."
assert x.kind is inspect.Parameter.POSITIONAL_OR_KEYWORD
assert x.default is inspect.Signature.empty
assert y.name == "y"
assert y.annotation == "str"
assert y.description == "Y value."
assert y.kind is inspect.Parameter.POSITIONAL_OR_KEYWORD
assert y.default is inspect.Signature.empty
assert z.name == "z"
assert z.annotation == "str"
assert z.description == "Z value."
assert z.kind is inspect.Parameter.KEYWORD_ONLY
assert z.default is inspect.Signature.empty
assert r.annotation == "str"
assert r.description == "Sum X + Y + Z."
def test_close_sections():
"""Parse sections without blank lines in between."""
def f(x, y, z):
"""
Parameters:
x: X.
Parameters:
y: Y.
Parameters:
z: Z.
Exceptions:
Error2: error.
Exceptions:
Error1: error.
Returns:
1.
Returns:
2.
"""
return x + y + z
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 7
assert len(errors) == 2 # no return type annotations
def test_code_blocks():
"""Parse code blocks."""
def f(s): # noqa: D300,D301 (escape sequences)
"""
This docstring contains a docstring in a code block o_O!
```python
\"\"\"
This docstring is contained in another docstring O_o!
Parameters:
s: A string.
\"\"\"
```
"""
return s
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 1
assert not errors
def test_indented_code_block():
"""Parse indented code blocks."""
def f(s): # noqa: D300,D301 (escape sequences)
"""
This docstring contains a docstring in a code block o_O!
\"\"\"
This docstring is contained in another docstring O_o!
Parameters:
s: A string.
\"\"\"
"""
return s
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 1
assert not errors
def test_extra_parameter():
"""Warn on extra parameter in docstring."""
def f(x):
"""
Parameters:
x: Integer.
y: Integer.
"""
return x
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 1
assert len(errors) == 1
assert "No type" in errors[0]
def test_missing_parameter():
"""Don't warn on missing parameter in docstring."""
# FIXME: could warn
def f(x, y):
"""
Parameters:
x: Integer.
"""
return x + y
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 1
assert not errors
def test_param_line_without_colon():
"""Warn when missing colon."""
def f(x: int):
"""
Parameters:
x is an integer.
"""
return x
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert not sections # getting x fails, so the section is empty and discarded
assert len(errors) == 2
assert "pair" in errors[0]
assert "Empty" in errors[1]
def test_param_line_without_colon_keyword_only():
"""Warn when missing colon."""
def f(*, x: int):
"""
Keyword Args:
x is an integer.
"""
return x
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert not sections # getting x fails, so the section is empty and discarded
assert len(errors) == 2
assert "pair" in errors[0]
assert "Empty" in errors[1]
def test_admonitions():
"""Parse admonitions."""
def f():
"""
Note:
Hello.
Note: With title.
Hello again.
Something:
Something.
"""
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 1
assert not errors
def test_invalid_sections():
"""Warn on invalid (empty) sections."""
def f():
"""
Parameters:
Exceptions:
Exceptions:
Returns:
Note:
Important:
"""
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 1
for error in errors[:3]:
assert "Empty" in error
assert "Empty return section at line" in errors[3]
assert "Empty" in errors[-1]
def test_multiple_lines_in_sections_items():
"""Parse multi-line item description."""
def f(p: str, q: str):
"""
Hi.
Arguments:
p: This argument
has a description
spawning on multiple lines.
It even has blank lines in it.
Some of these lines
are indented for no reason.
q:
What if the first line is blank?
"""
return p + q
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 2
assert len(sections[1].value) == 2
assert errors
for error in errors:
assert "should be 4 * 2 = 8 spaces, not" in error
def test_parse_args_kwargs():
"""Parse args and kwargs."""
def f(a, *args, **kwargs):
"""
Arguments:
a: a parameter.
*args: args parameters.
**kwargs: kwargs parameters.
"""
return 1
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 1
expected_parameters = {"a": "a parameter.", "*args": "args parameters.", "**kwargs": "kwargs parameters."}
for param in sections[0].value:
assert param.name in expected_parameters
assert expected_parameters[param.name] == param.description
assert not errors
def test_parse_args_kwargs_keyword_only():
"""Parse args and kwargs."""
def f(a, *args, **kwargs):
"""
Arguments:
a: a parameter.
*args: args parameters.
Keyword Args:
**kwargs: kwargs parameters.
"""
return 1
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 2
expected_parameters = {"a": "a parameter.", "*args": "args parameters."}
for param in sections[0].value:
assert param.name in expected_parameters
assert expected_parameters[param.name] == param.description
expected_parameters = {"**kwargs": "kwargs parameters."}
for param in sections[1].value:
assert param.name in expected_parameters
assert expected_parameters[param.name] == param.description
assert not errors
def test_different_indentation():
"""Parse different indentations, warn on confusing indentation."""
def f():
"""
Hello.
Raises:
StartAt5: this section's items starts with 5 spaces of indentation.
Well indented continuation line.
Badly indented continuation line (will trigger an error).
Empty lines are preserved, as well as extra-indentation (this line is a code block).
AnyOtherLine: ...starting with exactly 5 spaces is a new item.
AnyLine: ...indented with less than 5 spaces signifies the end of the section.
"""
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 3
assert len(sections[1].value) == 2
assert sections[1].value[0].description == (
"this section's items starts with 5 spaces of indentation.\n"
"Well indented continuation line.\n"
"Badly indented continuation line (will trigger an error).\n"
"\n"
" Empty lines are preserved, as well as extra-indentation (this line is a code block)."
)
assert sections[2].value == " AnyLine: ...indented with less than 5 spaces signifies the end of the section."
assert len(errors) == 1
assert "should be 5 * 2 = 10 spaces, not 6" in errors[0]
def test_parse_module_attributes_section():
"""Parse attributes section in modules."""
loader = Loader()
obj = loader.get_object_documentation("tests.fixtures.docstring_attributes_section")
assert len(obj.docstring_sections) == 2
assert not obj.docstring_errors
attr_section = obj.docstring_sections[1]
assert attr_section.type == Section.Type.ATTRIBUTES
assert len(attr_section.value) == 5
expected = [
{"name": "A", "annotation": "int", "description": "Alpha."},
{"name": "B", "annotation": "bytes", "description": "Beta."},
{"name": "C", "annotation": "bool", "description": "Gamma."},
{"name": "D", "annotation": "", "description": "Delta."},
{"name": "E", "annotation": "float", "description": "Epsilon."},
]
assert [serialize_attribute(attr) for attr in attr_section.value] == expected
def test_docstring_with_yield_section():
"""Parse Yields section."""
def f():
"""A useless range wrapper.
Yields:
int: Integers.
"""
yield from range(10)
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 2
annotated = sections[1].value
assert annotated.annotation == "int"
assert annotated.description == "Integers."
assert not errors
def test_docstring_with_yield_section_and_return_annotation():
"""Parse Yields section."""
def f() -> Iterator[int]:
"""A useless range wrapper.
Yields:
Integers.
"""
yield from range(10)
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 2
annotated = sections[1].value
assert annotated.annotation is Iterator[int]
assert annotated.description == "Integers."
assert not errors
def test_keyword_args_no_type():
"""Parse types for keyword arguments."""
def f(**kwargs):
"""Do nothing.
Keyword arguments:
a: No type.
"""
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 2
kwargs = sections[1].value
assert kwargs[0].name == "a"
assert kwargs[0].annotation is inspect.Parameter.empty
assert kwargs[0].description == "No type."
assert kwargs[0].kind is inspect.Parameter.KEYWORD_ONLY
assert kwargs[0].default is inspect.Parameter.empty
assert len(errors) == 1
assert "No type annotation for parameter" in errors[0]
def test_keyword_args_type():
"""Parse types for keyword arguments."""
def f(**kwargs):
"""Do nothing.
Keyword arguments:
a (int): Typed.
"""
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 2
kwargs = sections[1].value
assert kwargs[0].name == "a"
assert kwargs[0].annotation == "int"
assert kwargs[0].description == "Typed."
assert kwargs[0].kind is inspect.Parameter.KEYWORD_ONLY
assert kwargs[0].default is inspect.Parameter.empty
assert not errors
pytkdocs-0.16.1/tests/test_parsers/test_docstrings/test_numpy.py 0000664 0000000 0000000 00000032174 14211451604 0025323 0 ustar 00root root 0000000 0000000 """Tests for [the `parsers.docstrings.numpy` module][pytkdocs.parsers.docstrings.numpy]."""
import inspect
from textwrap import dedent
from pytkdocs.loader import Loader
from pytkdocs.parsers.docstrings.base import Section
from pytkdocs.parsers.docstrings.numpy import Numpy
class DummyObject:
path = "o"
def parse(
docstring,
signature=None,
return_type=inspect.Signature.empty,
trim_doctest=False,
):
"""Helper to parse a doctring."""
parser = Numpy(trim_doctest_flags=trim_doctest)
return parser.parse(
dedent(docstring).strip(),
context={"obj": DummyObject(), "signature": signature, "type": return_type},
)
def test_simple_docstring():
"""Parse a simple docstring."""
sections, errors = parse("A simple docstring.")
assert len(sections) == 1
assert not errors
def test_multi_line_docstring():
"""Parse a multi-line docstring."""
sections, errors = parse(
"""
A somewhat longer docstring.
Blablablabla.
"""
)
assert len(sections) == 1
assert not errors
def test_sections_without_signature():
"""Parse a docstring without a signature."""
# type of return value always required
sections, errors = parse(
"""
Sections without signature.
Parameters
----------
void :
SEGFAULT.
niet :
SEGFAULT.
nada :
SEGFAULT.
rien :
SEGFAULT.
Raises
------
GlobalError
when nothing works as expected.
Returns
-------
bool
Itself.
"""
)
assert len(sections) == 4
assert len(errors) == 4 # missing annotations for params
for error in errors:
assert "param" in error
def test_sections_without_description():
"""Parse a docstring without descriptions."""
# type of return value always required
sections, errors = parse(
"""
Sections without descriptions.
Parameters
----------
void : str
niet : str
Raises
------
GlobalError
Returns
-------
bool
"""
)
# Assert that errors are as expected
assert len(sections) == 4
assert len(errors) == 6
for error in errors[:4]:
assert "param" in error
assert "exception" in errors[4]
assert "return description" in errors[5]
# Assert that no descriptions are ever None (can cause exceptions downstream)
assert sections[1].type is Section.Type.PARAMETERS
for p in sections[1].value:
assert p.description is not None
assert sections[2].type is Section.Type.EXCEPTIONS
for p in sections[2].value:
assert p.description is not None
assert sections[3].type is Section.Type.RETURN
assert sections[3].value.description is not None
def test_property_docstring():
"""Parse a property docstring."""
class_ = Loader().get_object_documentation("tests.fixtures.parsing.docstrings.NotDefinedYet")
prop = class_.attributes[0]
sections, errors = prop.docstring_sections, prop.docstring_errors
assert len(sections) == 2
assert not errors
def test_function_without_annotations():
"""Parse a function docstring without signature annotations."""
def f(x, y):
"""
This function has no annotations.
Parameters
----------
x:
X value.
y:
Y value.
Returns
-------
float
Sum X + Y.
"""
return x + y
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 3
assert not errors
def test_function_with_annotations():
"""Parse a function docstring with signature annotations."""
def f(x: int, y: int) -> int:
"""
This function has annotations.
Parameters
----------
x:
X value.
y:
Y value.
Returns
-------
int
Sum X + Y.
"""
return x + y
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 3
assert not errors
def test_function_with_examples_trim_doctest():
"""Parse example docstring with trim_doctest_flags option."""
def f(x: int) -> int:
"""Test function.
Example
-------
We want to skip the following test.
>>> 1 + 1 == 3 # doctest: +SKIP
True
And then a few more examples here:
>>> print("a\\n\\nb")
a
b
>>> 1 + 1 == 2 # doctest: +SKIP
>>> print(list(range(1, 100))) # doctest: +ELLIPSIS
[1, 2, ..., 98, 99]
"""
return x
sections, errors = parse(
inspect.getdoc(f),
inspect.signature(f),
trim_doctest=True,
)
assert len(sections) == 2
assert len(sections[1].value) == 4
assert not errors
# Verify that doctest flags have indeed been trimmed
example_str = sections[1].value[1][1]
assert "# doctest: +SKIP" not in example_str
example_str = sections[1].value[3][1]
assert "" not in example_str
assert "\n>>> print(list(range(1, 100)))\n" in example_str
def test_function_with_examples():
"""Parse a function docstring with examples."""
def f(x: int, y: int) -> int:
"""
This function has annotations.
Examples
--------
Some examples that will create an unified code block:
>>> 2 + 2 == 5
False
>>> print("examples")
"examples"
This is just a random comment in the examples section.
These examples will generate two different code blocks. Note the blank line.
>>> print("I'm in the first code block!")
"I'm in the first code block!"
>>> print("I'm in other code block!")
"I'm in other code block!"
We also can write multiline examples:
>>> x = 3 + 2
>>> y = x + 10
>>> y
15
This is just a typical Python code block:
```python
print("examples")
return 2 + 2
```
Even if it contains doctests, the following block is still considered a normal code-block.
```python
>>> print("examples")
"examples"
>>> 2 + 2
4
```
The blank line before an example is optional.
>>> x = 3
>>> y = "apple"
>>> z = False
>>> l = [x, y, z]
>>> my_print_list_function(l)
3
"apple"
False
"""
return x + y
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 2
assert len(sections[1].value) == 9
assert not errors
def test_types_in_docstring():
"""Parse types in docstring."""
def f(x, y):
"""
The types are written in the docstring.
Parameters
----------
x : int
X value.
y : int
Y value.
Returns
-------
int
Sum X + Y.
"""
return x + y
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 3
assert not errors
x, y = sections[1].value
r = sections[2].value
assert x.name == "x"
assert x.annotation == "int"
assert x.description == "X value."
assert x.kind is inspect.Parameter.POSITIONAL_OR_KEYWORD
assert x.default is inspect.Signature.empty
assert y.name == "y"
assert y.annotation == "int"
assert y.description == "Y value."
assert y.kind is inspect.Parameter.POSITIONAL_OR_KEYWORD
assert y.default is inspect.Signature.empty
assert r.annotation == "int"
assert r.description == "Sum X + Y."
def test_types_and_optional_in_docstring():
"""Parse optional types in docstring."""
def f(x=1, y=None):
"""
The types are written in the docstring.
Parameters
----------
x : int
X value.
y : int, optional
Y value.
Returns
-------
int
Sum X + Y.
"""
return x + (y or 1)
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 3
assert not errors
x, y = sections[1].value
assert x.name == "x"
assert x.annotation == "int"
assert x.description == "X value."
assert x.kind is inspect.Parameter.POSITIONAL_OR_KEYWORD
assert x.default == 1
assert y.name == "y"
assert y.annotation == "int"
assert y.description == "Y value."
assert y.kind is inspect.Parameter.POSITIONAL_OR_KEYWORD
assert y.default is None
def test_types_in_signature_and_docstring():
"""Parse types in both signature and docstring."""
def f(x: int, y: int) -> int:
"""
The types are written both in the signature and in the docstring.
Parameters
----------
x : int
X value.
y : int
Y value.
Returns
-------
int
Sum X + Y.
"""
return x + y
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 3
assert not errors
def test_close_sections():
"""Parse sections without blank lines in between."""
def f(x, y, z):
"""
Parameters
----------
x :
X
y :
Y
z :
Z
Raises
------
Error2
error.
Error1
error.
Returns
-------
str
value
"""
return x + y + z
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 3
assert not errors
# test_code_blocks was removed as docstrings within a code block
# are not applicable to numpy docstrings
def test_extra_parameter():
"""Warn on extra parameter in docstring."""
def f(x):
"""
Parameters
----------
x :
Integer.
y :
Integer.
"""
return x
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 1
assert len(errors) == 1
assert "No type" in errors[0]
def test_missing_parameter():
"""Don't warn on missing parameter in docstring."""
# FIXME: could warn
def f(x, y):
"""
Parameters
----------
x :
Integer.
"""
return x + y
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 1
assert not errors
def test_multiple_lines_in_sections_items():
"""Parse multi-line item description."""
def f(p: str, q: str):
"""
Hi.
Parameters
----------
p :
This argument
has a description
spawning on multiple lines.
It even has blank lines in it.
Some of these lines
are indented for no reason.
q :
What if the first line is blank?
"""
return p + q
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 2
assert len(sections[1].value) == 2
# numpy docstrings parameter description can be parsed even if misindentated
assert not errors
def test_parse_args_kwargs():
"""Parse args and kwargs."""
def f(a, *args, **kwargs):
"""
Parameters
----------
a :
a parameter.
*args :
args parameters.
**kwargs :
kwargs parameters.
"""
return 1
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 1
expected_parameters = {
"a": "a parameter.",
"*args": "args parameters.",
"**kwargs": "kwargs parameters.",
}
for param in sections[0].value:
assert param.name in expected_parameters
assert expected_parameters[param.name] == param.description
assert not errors
def test_different_indentation():
"""Parse different indentations, warn on confusing indentation."""
def f():
"""
Hello.
Raises
------
StartAt5
this section's items starts with x spaces of indentation.
Well indented continuation line.
Badly indented continuation line (will not trigger an error).
Empty lines are preserved, as well as extra-indentation (this line is a code block).
AnyOtherLine
...starting with exactly 5 spaces is a new item.
"""
sections, errors = parse(inspect.getdoc(f), inspect.signature(f))
assert len(sections) == 2
assert len(sections[1].value) == 2
assert sections[1].value[0].description == (
"this section's items starts with x spaces of indentation.\n"
"Well indented continuation line.\n"
" Badly indented continuation line (will not trigger an error).\n"
"\n"
" Empty lines are preserved, as well as extra-indentation (this line is a code block)."
)
assert not errors
pytkdocs-0.16.1/tests/test_parsers/test_docstrings/test_restructured_text.py 0000664 0000000 0000000 00000070130 14211451604 0027744 0 ustar 00root root 0000000 0000000 """Tests for [the `parsers.docstrings.google` module][pytkdocs.parsers.docstrings.google]."""
import inspect
from textwrap import dedent
import pytest
from pytkdocs.loader import Loader
from pytkdocs.objects import Object
from pytkdocs.parsers.docstrings.base import AnnotatedObject, Attribute, Parameter, Section, empty
from pytkdocs.parsers.docstrings.restructured_text import RestructuredText
from pytkdocs.serializer import serialize_attribute
class DummyObject:
def __init__(self, signature, return_type):
self.path = "o"
self.signature = signature
self.type = return_type
SOME_NAME = "foo"
SOME_TEXT = "descriptive test text"
SOME_EXTRA_TEXT = "more test text"
SOME_EXCEPTION_NAME = "SomeException"
SOME_OTHER_EXCEPTION_NAME = "SomeOtherException"
def dedent_strip(text: str) -> str:
return dedent(text).strip()
def parse(obj, strip_docstring=True):
"""Helper to parse a docstring."""
return parse_detailed(inspect.getdoc(obj), inspect.signature(obj), strip_docstring=strip_docstring)
def parse_detailed(docstring, signature=None, return_type=inspect.Signature.empty, strip_docstring=True):
"""Helper to parse a docstring."""
docstring = dedent_strip(docstring) if strip_docstring else dedent(docstring)
return RestructuredText().parse(docstring, {"obj": DummyObject(signature, return_type)})
def assert_parameter_equal(actual: Parameter, expected: Parameter) -> None:
assert actual.name == expected.name
assert_annotated_obj_equal(actual, expected)
assert actual.kind == expected.kind
assert actual.default == expected.default
def assert_attribute_equal(actual: Attribute, expected: Attribute) -> None:
assert actual.name == expected.name
assert_annotated_obj_equal(actual, expected)
def assert_annotated_obj_equal(actual: AnnotatedObject, expected: AnnotatedObject) -> None:
assert actual.annotation == expected.annotation
assert actual.description == expected.description
def get_rst_object_documentation(dotted_fixture_subpath) -> Object:
return Loader(docstring_style="restructured-text").get_object_documentation(
f"tests.fixtures.parsing.restructured_text.{dotted_fixture_subpath}"
)
@pytest.mark.parametrize(
"docstring",
[
"One line docstring description",
"""
Multiple line docstring description.
With more text.
""",
],
)
def test_parse__description_only_docstring__single_markdown_section(docstring):
sections, errors = parse_detailed(docstring)
assert len(sections) == 1
assert sections[0].type == Section.Type.MARKDOWN
assert sections[0].value == dedent_strip(docstring)
assert not errors
def test_parse__no_description__single_markdown_section():
sections, errors = parse_detailed("")
assert len(sections) == 1
assert sections[0].type == Section.Type.MARKDOWN
assert sections[0].value == ""
assert not errors
def test_parse__multiple_blank_lines_before_description__single_markdown_section():
sections, errors = parse_detailed(
"""
Now text""",
strip_docstring=False,
)
assert len(sections) == 1
assert sections[0].type == Section.Type.MARKDOWN
assert sections[0].value == "Now text"
assert not errors
def test_parse__description_with_initial_newline__single_markdown_section():
docstring = """
With initial newline
"""
sections, errors = parse_detailed(docstring, strip_docstring=False)
assert len(sections) == 1
assert sections[0].type == Section.Type.MARKDOWN
assert sections[0].value == dedent_strip(docstring)
assert not errors
def test_parse__param_field__param_section():
"""Parse a simple docstring."""
sections, errors = parse_detailed(
f"""
Docstring with one line param.
:param {SOME_NAME}: {SOME_TEXT}
"""
)
assert len(sections) == 2
assert sections[1].type == Section.Type.PARAMETERS
assert_parameter_equal(
sections[1].value[0], Parameter(SOME_NAME, annotation=empty, description=SOME_TEXT, kind=empty)
)
def test_parse__only_param_field__empty_markdown():
sections, errors = parse_detailed(":param foo: text")
assert len(sections) == 2
assert sections[0].type == Section.Type.MARKDOWN
assert sections[0].value == ""
@pytest.mark.parametrize(
"param_directive_name",
[
"param",
"parameter",
"arg",
"argument",
"key",
"keyword",
],
)
def test_parse__all_param_names__param_section(param_directive_name):
sections, errors = parse_detailed(
f"""
Docstring with one line param.
:{param_directive_name} {SOME_NAME}: {SOME_TEXT}
"""
)
assert len(sections) == 2
assert sections[1].type == Section.Type.PARAMETERS
assert_parameter_equal(
sections[1].value[0], Parameter(SOME_NAME, annotation=empty, description=SOME_TEXT, kind=empty)
)
@pytest.mark.parametrize(
"docstring",
[
f"""
Docstring with param with continuation, no indent.
:param {SOME_NAME}: {SOME_TEXT}
{SOME_EXTRA_TEXT}
""",
f"""
Docstring with param with continuation, with indent.
:param {SOME_NAME}: {SOME_TEXT}
{SOME_EXTRA_TEXT}
""",
],
)
def test_parse__param_field_multi_line__param_section(docstring):
"""Parse a simple docstring."""
sections, errors = parse_detailed(docstring)
assert len(sections) == 2
assert sections[1].type == Section.Type.PARAMETERS
assert_parameter_equal(
sections[1].value[0],
Parameter(SOME_NAME, annotation=empty, description=f"{SOME_TEXT} {SOME_EXTRA_TEXT}", kind=empty),
)
def test_parse__param_field_for_function__param_section_with_kind():
"""Parse a simple docstring."""
def f(foo):
"""
Docstring with line continuation.
:param foo: descriptive test text
"""
sections, errors = parse(f)
assert len(sections) == 2
assert sections[1].type == Section.Type.PARAMETERS
assert_parameter_equal(
sections[1].value[0],
Parameter(SOME_NAME, annotation=empty, description=SOME_TEXT, kind=inspect.Parameter.POSITIONAL_OR_KEYWORD),
)
def test_parse__param_field_docs_type__param_section_with_type():
"""Parse a simple docstring."""
def f(foo):
"""
Docstring with line continuation.
:param str foo: descriptive test text
"""
sections, errors = parse(f)
assert len(sections) == 2
assert sections[1].type == Section.Type.PARAMETERS
assert_parameter_equal(
sections[1].value[0],
Parameter(SOME_NAME, annotation="str", description=SOME_TEXT, kind=inspect.Parameter.POSITIONAL_OR_KEYWORD),
)
def test_parse__param_field_type_field__param_section_with_type():
"""Parse a simple docstring."""
def f(foo):
"""
Docstring with line continuation.
:param foo: descriptive test text
:type foo: str
"""
sections, errors = parse(f)
assert len(sections) == 2
assert sections[1].type == Section.Type.PARAMETERS
assert_parameter_equal(
sections[1].value[0],
Parameter(SOME_NAME, annotation="str", description=SOME_TEXT, kind=inspect.Parameter.POSITIONAL_OR_KEYWORD),
)
def test_parse__param_field_type_field_first__param_section_with_type():
"""Parse a simple docstring."""
def f(foo):
"""
Docstring with line continuation.
:type foo: str
:param foo: descriptive test text
"""
sections, errors = parse(f)
assert len(sections) == 2
assert sections[1].type == Section.Type.PARAMETERS
assert_parameter_equal(
sections[1].value[0],
Parameter(SOME_NAME, annotation="str", description=SOME_TEXT, kind=inspect.Parameter.POSITIONAL_OR_KEYWORD),
)
def test_parse__param_field_type_field_or_none__param_section_with_optional():
"""Parse a simple docstring."""
def f(foo):
"""
Docstring with line continuation.
:param foo: descriptive test text
:type foo: str or None
"""
sections, errors = parse(f)
assert len(sections) == 2
assert sections[1].type == Section.Type.PARAMETERS
assert_parameter_equal(
sections[1].value[0],
Parameter(
SOME_NAME, annotation="Optional[str]", description=SOME_TEXT, kind=inspect.Parameter.POSITIONAL_OR_KEYWORD
),
)
def test_parse__param_field_type_none_or_field__param_section_with_optional():
"""Parse a simple docstring."""
def f(foo):
"""
Docstring with line continuation.
:param foo: descriptive test text
:type foo: None or str
"""
sections, errors = parse(f)
assert len(sections) == 2
assert sections[1].type == Section.Type.PARAMETERS
assert_parameter_equal(
sections[1].value[0],
Parameter(
SOME_NAME, annotation="Optional[str]", description=SOME_TEXT, kind=inspect.Parameter.POSITIONAL_OR_KEYWORD
),
)
def test_parse__param_field_type_field_or_int__param_section_with_union():
"""Parse a simple docstring."""
def f(foo):
"""
Docstring with line continuation.
:param foo: descriptive test text
:type foo: str or int
"""
sections, errors = parse(f)
assert len(sections) == 2
assert sections[1].type == Section.Type.PARAMETERS
assert_parameter_equal(
sections[1].value[0],
Parameter(
SOME_NAME, annotation="Union[str,int]", description=SOME_TEXT, kind=inspect.Parameter.POSITIONAL_OR_KEYWORD
),
)
def test_parse__param_field_type_multiple__param_section_with_union():
"""Parse a simple docstring."""
def f(foo):
"""
Docstring with line continuation.
:param foo: descriptive test text
:type foo: str or int or float
"""
sections, errors = parse(f)
assert len(sections) == 2
assert sections[1].type == Section.Type.PARAMETERS
assert_parameter_equal(
sections[1].value[0],
Parameter(
SOME_NAME,
annotation="Union[str,int,float]",
description=SOME_TEXT,
kind=inspect.Parameter.POSITIONAL_OR_KEYWORD,
),
)
def test_parse__param_field_annotate_type__param_section_with_type():
"""Parse a simple docstring."""
def f(foo: str):
"""
Docstring with line continuation.
:param foo: descriptive test text
"""
sections, errors = parse(f)
assert len(sections) == 2
assert sections[1].type == Section.Type.PARAMETERS
assert_parameter_equal(
sections[1].value[0],
Parameter(SOME_NAME, annotation=str, description=SOME_TEXT, kind=inspect.Parameter.POSITIONAL_OR_KEYWORD),
)
def test_parse__param_field_no_matching_param__result_from_docstring():
"""Parse a simple docstring."""
def f(foo: str):
"""
Docstring with line continuation.
:param other: descriptive test text
"""
sections, errors = parse(f)
assert len(sections) == 2
assert sections[1].type == Section.Type.PARAMETERS
assert_parameter_equal(
sections[1].value[0],
Parameter("other", annotation=empty, description=SOME_TEXT, kind=empty),
)
def test_parse__param_field_with_default__result_from_docstring():
"""Parse a simple docstring."""
def f(foo=""):
"""
Docstring with line continuation.
:param foo: descriptive test text
"""
sections, errors = parse(f)
assert len(sections) == 2
assert sections[1].type == Section.Type.PARAMETERS
assert_parameter_equal(
sections[1].value[0],
Parameter(
"foo", annotation=empty, description=SOME_TEXT, default="", kind=inspect.Parameter.POSITIONAL_OR_KEYWORD
),
)
def test_parse__param_field_no_matching_param__error_message():
"""Parse a simple docstring."""
def f(foo: str):
"""
Docstring with line continuation.
:param other: descriptive test text
"""
sections, errors = parse(f)
assert "No matching parameter for 'other'" in errors[0]
def test_parse__invalid_param_field_only_initial_marker__error_message():
"""Parse a simple docstring."""
def f(foo: str):
"""
Docstring with line continuation.
:param foo descriptive test text
"""
sections, errors = parse(f)
assert "Failed to get ':directive: value' pair" in errors[0]
def test_parse__invalid_param_field_wrong_part_count__error_message():
"""Parse a simple docstring."""
def f(foo: str):
"""
Docstring with line continuation.
:param: descriptive test text
"""
sections, errors = parse(f)
assert "Failed to parse field directive" in errors[0]
def test_parse__param_twice__error_message():
"""Parse a simple docstring."""
def f(foo: str):
"""
Docstring with line continuation.
:param foo: descriptive test text
:param foo: descriptive test text again
"""
sections, errors = parse(f)
assert "Duplicate parameter entry for 'foo'" in errors[0]
def test_parse__param_type_twice_doc__error_message():
"""Parse a simple docstring."""
def f(foo):
"""
Docstring with line continuation.
:param str foo: descriptive test text
:type foo: str
"""
sections, errors = parse(f)
assert "Duplicate parameter information for 'foo'" in errors[0]
def test_parse__param_type_twice_type_directive_first__error_message():
"""Parse a simple docstring."""
def f(foo):
"""
Docstring with line continuation.
:type foo: str
:param str foo: descriptive test text
"""
sections, errors = parse(f)
assert "Duplicate parameter information for 'foo'" in errors[0]
def test_parse__param_type_twice_annotated__error_message():
"""Parse a simple docstring."""
def f(foo: str):
"""
Docstring with line continuation.
:param str foo: descriptive test text
:type foo: str
"""
sections, errors = parse(f)
assert "Duplicate parameter information for 'foo'" in errors[0]
def test_parse__param_type_no_type__error_message():
"""Parse a simple docstring."""
def f(foo: str):
"""
Docstring with line continuation.
:param str foo: descriptive test text
:type str
"""
sections, errors = parse(f)
assert "Failed to get ':directive: value' pair from" in errors[0]
def test_parse__param_type_no_name__error_message():
"""Parse a simple docstring."""
def f(foo: str):
"""
Docstring with line continuation.
:param str foo: descriptive test text
:type: str
"""
sections, errors = parse(f)
assert "Failed to get parameter name from" in errors[0]
@pytest.mark.parametrize(
"docstring",
[
f"""
Docstring with param with continuation, no indent.
:var {SOME_NAME}: {SOME_TEXT}
{SOME_EXTRA_TEXT}
""",
f"""
Docstring with param with continuation, with indent.
:var {SOME_NAME}: {SOME_TEXT}
{SOME_EXTRA_TEXT}
""",
],
)
def test_parse__attribute_field_multi_line__param_section(docstring):
"""Parse a simple docstring."""
sections, errors = parse_detailed(docstring)
assert len(sections) == 2
assert sections[1].type == Section.Type.ATTRIBUTES
assert_attribute_equal(
sections[1].value[0],
Attribute(SOME_NAME, annotation=empty, description=f"{SOME_TEXT} {SOME_EXTRA_TEXT}"),
)
@pytest.mark.parametrize(
"attribute_directive_name",
[
"var",
"ivar",
"cvar",
],
)
def test_parse__all_attribute_names__param_section(attribute_directive_name):
sections, errors = parse_detailed(
f"""
Docstring with one line attribute.
:{attribute_directive_name} {SOME_NAME}: {SOME_TEXT}
"""
)
assert len(sections) == 2
assert sections[1].type == Section.Type.ATTRIBUTES
assert_attribute_equal(
sections[1].value[0],
Attribute(SOME_NAME, annotation=empty, description=SOME_TEXT),
)
def test_parse__class_attributes__attributes_section():
class Foo:
"""
Class docstring with attributes
:var foo: descriptive test text
"""
sections, errors = parse(Foo)
assert len(sections) == 2
assert sections[1].type == Section.Type.ATTRIBUTES
assert_attribute_equal(
sections[1].value[0],
Attribute(SOME_NAME, annotation=empty, description=SOME_TEXT),
)
def test_parse__class_attributes_with_type__annotation_in_attributes_section():
class Foo:
"""
Class docstring with attributes
:vartype foo: str
:var foo: descriptive test text
"""
sections, errors = parse(Foo)
assert len(sections) == 2
assert sections[1].type == Section.Type.ATTRIBUTES
assert_attribute_equal(
sections[1].value[0],
Attribute(SOME_NAME, annotation="str", description=SOME_TEXT),
)
def test_parse__attribute_invalid_directive___error():
class Foo:
"""
Class docstring with attributes
:var descriptive test text
"""
sections, errors = parse(Foo)
assert "Failed to get ':directive: value' pair from" in errors[0]
def test_parse__attribute_no_name__error():
class Foo:
"""
Class docstring with attributes
:var: descriptive test text
"""
sections, errors = parse(Foo)
assert "Failed to parse field directive from" in errors[0]
def test_parse__attribute_duplicate__error():
class Foo:
"""
Class docstring with attributes
:var foo: descriptive test text
:var foo: descriptive test text
"""
sections, errors = parse(Foo)
assert "Duplicate attribute entry for 'foo'" in errors[0]
def test_parse__class_attributes_type_invalid__error():
class Foo:
"""
Class docstring with attributes
:vartype str
:var foo: descriptive test text
"""
sections, errors = parse(Foo)
assert "Failed to get ':directive: value' pair from " in errors[0]
def test_parse__class_attributes_type_no_name__error():
class Foo:
"""
Class docstring with attributes
:vartype: str
:var foo: descriptive test text
"""
sections, errors = parse(Foo)
assert "Failed to get attribute name from" in errors[0]
def test_parse__return_directive__return_section_no_type():
def f(foo: str):
"""
Function with only return directive
:return: descriptive test text
"""
return foo
sections, errors = parse(f)
assert len(sections) == 2
assert sections[1].type == Section.Type.RETURN
assert_annotated_obj_equal(
sections[1].value,
AnnotatedObject(annotation=empty, description=SOME_TEXT),
)
def test_parse__return_directive_rtype__return_section_with_type():
def f(foo: str):
"""
Function with only return & rtype directive
:return: descriptive test text
:rtype: str
"""
return foo
sections, errors = parse(f)
assert len(sections) == 2
assert sections[1].type == Section.Type.RETURN
assert_annotated_obj_equal(
sections[1].value,
AnnotatedObject(annotation="str", description=SOME_TEXT),
)
def test_parse__return_directive_rtype_first__return_section_with_type():
def f(foo: str):
"""
Function with only return & rtype directive
:rtype: str
:return: descriptive test text
"""
return foo
sections, errors = parse(f)
assert len(sections) == 2
assert sections[1].type == Section.Type.RETURN
assert_annotated_obj_equal(
sections[1].value,
AnnotatedObject(annotation="str", description=SOME_TEXT),
)
def test_parse__return_directive_annotation__return_section_with_type():
def f(foo: str) -> str:
"""
Function with return directive, rtype directive, & annotation
:return: descriptive test text
"""
return foo
sections, errors = parse(f)
assert len(sections) == 2
assert sections[1].type == Section.Type.RETURN
assert_annotated_obj_equal(
sections[1].value,
AnnotatedObject(annotation=str, description=SOME_TEXT),
)
def test_parse__return_directive_annotation__return_section_with_type_error():
def f(foo: str) -> str:
"""
Function with return directive, rtype directive, & annotation
:return: descriptive test text
:rtype: str
"""
return foo
sections, errors = parse(f)
assert len(sections) == 2
assert sections[1].type == Section.Type.RETURN
assert_annotated_obj_equal(
sections[1].value,
AnnotatedObject(annotation=str, description=SOME_TEXT),
)
assert "Duplicate type information for return" in errors[0]
def test_parse__return_invalid__error():
def f(foo: str):
"""
Function with only return directive
:return descriptive test text
"""
return foo
sections, errors = parse(f)
assert "Failed to get ':directive: value' pair from " in errors[0]
def test_parse__rtype_invalid__error():
def f(foo: str):
"""
Function with only return directive
:rtype str
"""
return foo
sections, errors = parse(f)
assert "Failed to get ':directive: value' pair from " in errors[0]
def test_parse__raises_directive__exception_section():
def f(foo: str):
"""
Function with only return directive
:raise SomeException: descriptive test text
"""
return foo
sections, errors = parse(f)
assert len(sections) == 2
assert sections[1].type == Section.Type.EXCEPTIONS
assert_annotated_obj_equal(
sections[1].value[0],
AnnotatedObject(annotation=SOME_EXCEPTION_NAME, description=SOME_TEXT),
)
def test_parse__multiple_raises_directive__exception_section_with_two():
def f(foo: str):
"""
Function with only return directive
:raise SomeException: descriptive test text
:raise SomeOtherException: descriptive test text
"""
return foo
sections, errors = parse(f)
assert len(sections) == 2
assert sections[1].type == Section.Type.EXCEPTIONS
assert_annotated_obj_equal(
sections[1].value[0],
AnnotatedObject(annotation=SOME_EXCEPTION_NAME, description=SOME_TEXT),
)
assert_annotated_obj_equal(
sections[1].value[1],
AnnotatedObject(annotation=SOME_OTHER_EXCEPTION_NAME, description=SOME_TEXT),
)
@pytest.mark.parametrize(
"attribute_directive_name",
[
"raises",
"raise",
"except",
"exception",
],
)
def test_parse__all_exception_names__param_section(attribute_directive_name):
sections, errors = parse_detailed(
f"""
Docstring with one line attribute.
:{attribute_directive_name} {SOME_EXCEPTION_NAME}: {SOME_TEXT}
"""
)
assert len(sections) == 2
assert sections[1].type == Section.Type.EXCEPTIONS
assert_annotated_obj_equal(
sections[1].value[0],
AnnotatedObject(annotation=SOME_EXCEPTION_NAME, description=SOME_TEXT),
)
def test_parse__raise_invalid__error():
def f(foo: str):
"""
Function with only return directive
:raise descriptive test text
"""
return foo
sections, errors = parse(f)
assert "Failed to get ':directive: value' pair from " in errors[0]
def test_parse__raise_no_name__error():
def f(foo: str):
"""
Function with only return directive
:raise: descriptive test text
"""
return foo
sections, errors = parse(f)
assert "Failed to parse exception directive from" in errors[0]
# -------------------------------
# Fixture tests
# -------------------------------
def test_parse_module_attributes_section__expected_attributes_section():
"""Parse attributes section in modules."""
obj = get_rst_object_documentation("docstring_attributes_section")
assert len(obj.docstring_sections) == 2
attr_section = obj.docstring_sections[1]
assert attr_section.type == Section.Type.ATTRIBUTES
assert len(attr_section.value) == 5
expected = [
{"name": "A", "annotation": "int", "description": "Alpha."},
# type annotation takes preference over docstring
{"name": "B", "annotation": "str", "description": "Beta."},
{"name": "C", "annotation": "bool", "description": "Gamma."},
{"name": "D", "annotation": "", "description": "Delta."},
{"name": "E", "annotation": "float", "description": "Epsilon."},
]
assert [serialize_attribute(attr) for attr in attr_section.value] == expected
def test_parse_module_attributes_section__expected_docstring_errors():
"""Parse attributes section in modules."""
obj = get_rst_object_documentation("docstring_attributes_section")
assert len(obj.docstring_errors) == 1
assert "Duplicate attribute information for 'B'" in obj.docstring_errors[0]
def test_property_docstring__expected_description():
"""Parse a property docstring."""
class_ = get_rst_object_documentation("class_docstrings:NotDefinedYet")
prop = class_.attributes[0]
sections = prop.docstring_sections
assert len(sections) == 2
assert sections[0].type == Section.Type.MARKDOWN
assert (
sections[0].value
== "This property returns `self`.\n\nIt's fun because you can call it like `obj.ha.ha.ha.ha.ha.ha...`."
)
def test_property_docstring__expected_return():
"""Parse a property docstring."""
class_ = get_rst_object_documentation("class_docstrings:NotDefinedYet")
prop = class_.attributes[0]
sections = prop.docstring_sections
assert len(sections) == 2
assert sections[1].type == Section.Type.RETURN
assert_annotated_obj_equal(sections[1].value, AnnotatedObject("NotDefinedYet", "self!"))
def test_property_class_init__expected_description():
class_ = get_rst_object_documentation("class_docstrings:ClassInitFunction")
init = class_.methods[0]
sections = init.docstring_sections
assert len(sections) == 2
assert sections[0].type == Section.Type.MARKDOWN
assert sections[0].value == "Initialize instance."
def test_class_init__expected_param():
class_ = get_rst_object_documentation("class_docstrings:ClassInitFunction")
init = class_.methods[0]
sections = init.docstring_sections
assert len(sections) == 2
assert sections[1].type == Section.Type.PARAMETERS
param_section = sections[1]
assert_parameter_equal(
param_section.value[0], Parameter("value", str, "Value to store", kind=inspect.Parameter.POSITIONAL_OR_KEYWORD)
)
assert_parameter_equal(
param_section.value[1],
Parameter("other", "int", "Other value with default", kind=inspect.Parameter.POSITIONAL_OR_KEYWORD, default=1),
)
def test_member_function___expected_param():
class_ = get_rst_object_documentation("class_docstrings:ClassWithFunction")
init = class_.methods[0]
sections = init.docstring_sections
assert len(sections) == 3
param_section = sections[1]
assert param_section.type == Section.Type.PARAMETERS
assert_parameter_equal(
param_section.value[0], Parameter("value", str, "Value to store", kind=inspect.Parameter.POSITIONAL_OR_KEYWORD)
)
assert_parameter_equal(
param_section.value[1],
Parameter("other", "int", "Other value with default", kind=inspect.Parameter.POSITIONAL_OR_KEYWORD, default=1),
)
def test_member_function___expected_return():
class_ = get_rst_object_documentation("class_docstrings:ClassWithFunction")
init = class_.methods[0]
sections = init.docstring_sections
assert len(sections) == 3
assert sections[2].type == Section.Type.RETURN
assert_annotated_obj_equal(sections[2].value, AnnotatedObject(str, "Concatenated result"))
def test_property_docstring__no_errors():
"""Parse a property docstring."""
class_ = get_rst_object_documentation("class_docstrings:NotDefinedYet")
prop = class_.attributes[0]
assert not prop.docstring_errors
pytkdocs-0.16.1/tests/test_properties.py 0000664 0000000 0000000 00000004350 14211451604 0020406 0 ustar 00root root 0000000 0000000 """Tests for [the `properties` module][pytkdocs.properties]."""
from pytkdocs.objects import Attribute, Class, Function, Method, Module
def test_name_properties_on_module():
"""Check module name properties."""
assert not Module(name="a", path="a", file_path="a.py").name_properties
assert "private" in Module(name="_a", path="a", file_path="_a.py").name_properties
assert not Module(name="__a", path="__a", file_path="__a.py").name_properties
assert "special" in Module(name="__a__", path="a", file_path="__a__.py").name_properties
def test_name_properties_on_class():
"""Check class name properties."""
assert not Class(name="b", path="a.b", file_path="a.py").name_properties
assert "private" in Class(name="_b", path="a._b", file_path="a.py").name_properties
assert not Class(name="__b", path="a.__b", file_path="a.py").name_properties
assert not Class(name="__b__", path="a.__b__", file_path="a.py").name_properties
def test_name_properties_on_method():
"""Check method name properties."""
assert not Method(name="c", path="a.b.c", file_path="a.py").name_properties
assert "private" in Method(name="_c", path="a.b._c", file_path="a.py").name_properties
assert not Method(name="__c", path="a.b.__c", file_path="a.py").name_properties
assert "special" in Method(name="__c__", path="a.b.__c__", file_path="a.py").name_properties
def test_name_properties_on_function():
"""Check function name properties."""
assert not Function(name="b", path="a.b", file_path="a.py").name_properties
assert "private" in Function(name="_b", path="a._b", file_path="a.py").name_properties
assert not Function(name="__b", path="a.__b", file_path="a.py").name_properties
assert not Function(name="__b__", path="a.__b__", file_path="a.py").name_properties
def test_name_properties_on_attribute():
"""Check attribute name properties."""
assert not Attribute(name="b", path="a.b", file_path="a.py").name_properties
assert "private" in Attribute(name="_b", path="a._b", file_path="a.py").name_properties
assert "class-private" in Attribute(name="__b", path="a.__b", file_path="a.py").name_properties
assert "special" in Attribute(name="__b__", path="a.__b__", file_path="a.py").name_properties
pytkdocs-0.16.1/tests/test_serializer.py 0000664 0000000 0000000 00000000100 14211451604 0020350 0 ustar 00root root 0000000 0000000 """Tests for [the `serializer` module][pytkdocs.serializer]."""