pax_global_header00006660000000000000000000000064146221071120014506gustar00rootroot0000000000000052 comment=f40ac96bfa6a77b593492939049729f46d141767 aio-libs-aiosmtpd-b634d9b/000077500000000000000000000000001462210711200154425ustar00rootroot00000000000000aio-libs-aiosmtpd-b634d9b/.codecov.yml000066400000000000000000000000521462210711200176620ustar00rootroot00000000000000codecov: notify: after_n_builds: 10 aio-libs-aiosmtpd-b634d9b/.coveragerc000066400000000000000000000002511462210711200175610ustar00rootroot00000000000000# .coveragerc to control coverage.py [run] branch = True omit = aiosmtpd/docs/* [report] exclude_also = ^\s*if TYPE_CHECKING: : \.\.\.(\s*#.*)?$ ^ +\.\.\.$ aio-libs-aiosmtpd-b634d9b/.github/000077500000000000000000000000001462210711200170025ustar00rootroot00000000000000aio-libs-aiosmtpd-b634d9b/.github/PULL_REQUEST_TEMPLATE/000077500000000000000000000000001462210711200222615ustar00rootroot00000000000000aio-libs-aiosmtpd-b634d9b/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md000066400000000000000000000026761462210711200272350ustar00rootroot00000000000000 ## What do these changes do? ## Are there changes in behavior for the user? ## Related issue number ## Checklist - [ ] I think the code is well written - [ ] Unit tests for the changes exist - [ ] tox testenvs have been executed in the following environments: - [ ] Linux (Ubuntu 18.04, Ubuntu 20.04, Arch): `{py36,py37,py38,py39}-{nocov,cov,diffcov}, qa, docs` - [ ] Windows (7, 10): `{py36,py37,py38,py39}-{nocov,cov,diffcov}` - [ ] WSL 1.0 (Ubuntu 18.04): `{py36,py37,py38,py39}-{nocov,cov,diffcov}, pypy3-{nocov,cov}, qa, docs` - [ ] FreeBSD (12.2, 12.1, 11.4): `{py36,pypy3}-{nocov,cov,diffcov}, qa` - [ ] Cygwin: `py36-{nocov,cov,diffcov}, qa, docs` - [ ] Documentation reflects the changes - [ ] Add a news fragment into the `NEWS.rst` file * Add under the "aiosmtpd-next" section, creating one if necessary * You may create subsections to group the changes, if you like * Use full sentences with correct case and punctuation * Refer to relevant Issue if applicable aio-libs-aiosmtpd-b634d9b/.github/SECURITY.rst000066400000000000000000000011301462210711200207760ustar00rootroot00000000000000========================= Reporting Vulnerabilities ========================= **⚠️ Please do not file public GitHub issues for security vulnerabilities as they are open for everyone to see! ⚠️** We encourage responsible disclosure practices for security vulnerabilities. If you believe you've found a security-related bug, email `Andrew Svetlov `_ and/or `Pandu POLUAN `_ instead of filing a ticket or posting to any public groups. We'll try to assess the problem and disclose it in a responsible way. aio-libs-aiosmtpd-b634d9b/.github/dependabot.yml000066400000000000000000000003121462210711200216260ustar00rootroot00000000000000version: 2 updates: - package-ecosystem: pip directory: "/" schedule: interval: daily - package-ecosystem: "github-actions" directory: "/" schedule: interval: "monthly" aio-libs-aiosmtpd-b634d9b/.github/pull_request_template.md000066400000000000000000000026761462210711200237560ustar00rootroot00000000000000 ## What do these changes do? ## Are there changes in behavior for the user? ## Related issue number ## Checklist - [ ] I think the code is well written - [ ] Unit tests for the changes exist - [ ] tox testenvs have been executed in the following environments: - [ ] Linux (Ubuntu 18.04, Ubuntu 20.04, Arch): `{py36,py37,py38,py39}-{nocov,cov,diffcov}, qa, docs` - [ ] Windows (7, 10): `{py36,py37,py38,py39}-{nocov,cov,diffcov}` - [ ] WSL 1.0 (Ubuntu 18.04): `{py36,py37,py38,py39}-{nocov,cov,diffcov}, pypy3-{nocov,cov}, qa, docs` - [ ] FreeBSD (12.2, 12.1, 11.4): `{py36,pypy3}-{nocov,cov,diffcov}, qa` - [ ] Cygwin: `py36-{nocov,cov,diffcov}, qa, docs` - [ ] Documentation reflects the changes - [ ] Add a news fragment into the `NEWS.rst` file * Add under the "aiosmtpd-next" section, creating one if necessary * You may create subsections to group the changes, if you like * Use full sentences with correct case and punctuation * Refer to relevant Issue if applicable aio-libs-aiosmtpd-b634d9b/.github/workflows/000077500000000000000000000000001462210711200210375ustar00rootroot00000000000000aio-libs-aiosmtpd-b634d9b/.github/workflows/auto-merge.yml000066400000000000000000000011401462210711200236230ustar00rootroot00000000000000name: Dependabot auto-merge on: pull_request_target permissions: pull-requests: write contents: write jobs: dependabot: runs-on: ubuntu-latest if: ${{ github.actor == 'dependabot[bot]' }} steps: - name: Dependabot metadata id: metadata uses: dependabot/fetch-metadata@v2.1.0 with: github-token: "${{ secrets.GITHUB_TOKEN }}" - name: Enable auto-merge for Dependabot PRs run: gh pr merge --auto --squash "$PR_URL" env: PR_URL: ${{github.event.pull_request.html_url}} GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} aio-libs-aiosmtpd-b634d9b/.github/workflows/ci-cd.yml000066400000000000000000000103271462210711200225440ustar00rootroot00000000000000name: CI on: push: branches: - master - '[0-9].[0-9]+' # matches to backport branches, e.g. 3.6 tags: [ 'v*' ] pull_request: branches: - master - '[0-9].[0-9]+' jobs: lint: name: Linter runs-on: ubuntu-latest timeout-minutes: 5 steps: - name: Checkout uses: actions/checkout@v4 - name: Setup Python uses: actions/setup-python@v5 with: python-version: 3.11 cache: 'pip' cache-dependency-path: '**/requirements*.txt' - name: Install dependencies uses: py-actions/py-dependency-install@v4 with: path: requirements-dev.txt - name: Install itself run: | pip install . - name: Mypy run: mypy - name: Flake8 run: python -m flake8 aiosmtpd setup.py housekeep.py release.py - name: Docs Checking run: | # Prepare sphinx and the deps for sphinx extensions sphinx-build --color -b doctest -d build/.doctree aiosmtpd/docs build/doctest sphinx-build --color -b html -d build/.doctree aiosmtpd/docs build/html sphinx-build --color -b man -d build/.doctree aiosmtpd/docs build/man - name: Other QA Checks shell: bash run: | # Final checks before launching the runners ver_sed='s/^__version__ = (["'"'"'])(.*)\1/\2/p;d' verfile="aiosmtpd/__init__.py" if [[ $GITHUB_REF != refs/heads/master ]]; then # Fetch master because some test cases need its existence git fetch --no-tags --prune --no-recurse-submodules --depth=1 origin master:master fi pytest -v aiosmtpd/qa check-manifest -v - name: Prepare twine checker run: | pip install -U build twine wheel python -m build - name: Run twine checker run: | twine check dist/* test: name: Test strategy: matrix: pyver: ['3.8', '3.9', '3.10', '3.11', '3.12'] os: [ubuntu, macos, windows] include: - pyver: pypy-3.8 os: ubuntu runs-on: ${{ matrix.os }}-latest timeout-minutes: 15 steps: - name: Checkout uses: actions/checkout@v4 - name: Setup Python ${{ matrix.pyver }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.pyver }} cache: 'pip' cache-dependency-path: '**/requirements*.txt' - name: Install dependencies uses: py-actions/py-dependency-install@v4 with: path: requirements.txt - name: Run unittests run: pytest env: COLOR: 'yes' - run: python -m coverage xml - name: Upload coverage uses: codecov/codecov-action@v4 with: fail_ci_if_error: true token: ${{ secrets.CODECOV_TOKEN }} check: # This job does nothing and is only used for the branch protection if: always() needs: [lint, test] runs-on: ubuntu-latest steps: - name: Decide whether the needed jobs succeeded or failed uses: re-actors/alls-green@release/v1 with: jobs: ${{ toJSON(needs) }} deploy: name: Deploy environment: pypi runs-on: ubuntu-latest needs: [check] if: github.event_name == 'push' && contains(github.ref, 'refs/tags/') permissions: contents: write # IMPORTANT: mandatory for making GitHub Releases id-token: write # IMPORTANT: mandatory for trusted publishing & sigstore steps: - name: Checkout uses: actions/checkout@v4 - name: Setup Python uses: actions/setup-python@v5 with: python-version: 3.11 - name: Install dependencies run: python -m pip install -U pip wheel setuptools build twine - name: Build dists run: | python -m build - name: Make Release uses: aio-libs/create-release@v1.6.6 with: changes_file: aiosmtpd/docs/NEWS.rst name: aiosmtpd version_file: aiosmtpd/__init__.py github_token: ${{ secrets.GITHUB_TOKEN }} pypi_token: ${{ secrets.PYPI_API_TOKEN }} dist_dir: dist fix_issue_regex: "`#(\\d+) `" fix_issue_repl: "(#\\1)" - name: Publish 🐍📦 to PyPI uses: pypa/gh-action-pypi-publish@release/v1 aio-libs-aiosmtpd-b634d9b/.github/workflows/codeql.yml000066400000000000000000000015061462210711200230330ustar00rootroot00000000000000name: "CodeQL" on: push: branches: [ "master" ] pull_request: branches: [ "master" ] schedule: - cron: "42 17 * * 1" jobs: analyze: name: Analyze runs-on: ubuntu-latest permissions: actions: read contents: read security-events: write strategy: fail-fast: false matrix: language: [ python ] steps: - name: Checkout uses: actions/checkout@v4 - name: Initialize CodeQL uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} queries: +security-and-quality - name: Autobuild uses: github/codeql-action/autobuild@v3 - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v3 with: category: "/language:${{ matrix.language }}" aio-libs-aiosmtpd-b634d9b/.gitignore000066400000000000000000000005541462210711200174360ustar00rootroot00000000000000*~ bin parts coverage eggs sources build/ dist venv .installed.cfg develop-eggs var/* *.egg-info *.pyc *.pyo *.bak *.egg *.tar.gz *.so .tox .DS_Store .coverage .coverage.*.* .noseids docs/_build/ nosetests.xml __pycache__/ htmlcov/ _dynamic/ _dump/ coverage.xml coverage-*.xml diffcov.html diffcov-*.html .python-version prof/ .pytype/ ~temp* *.sw[a-p] pyvenv.cfg aio-libs-aiosmtpd-b634d9b/.mypy.ini000066400000000000000000000024051462210711200172200ustar00rootroot00000000000000[mypy] files = aiosmtpd, examples check_untyped_defs = True follow_imports_for_stubs = True #disallow_any_decorated = True #disallow_any_generics = True disallow_any_unimported = True #disallow_incomplete_defs = True disallow_subclassing_any = True #disallow_untyped_calls = True disallow_untyped_decorators = True #disallow_untyped_defs = True # TODO(PY312): explicit-override enable_error_code = ignore-without-code, possibly-undefined, redundant-expr, redundant-self, truthy-bool, truthy-iterable, unused-awaitable extra_checks = True implicit_reexport = False no_implicit_optional = True pretty = True show_column_numbers = True show_error_codes = True show_error_code_links = True strict_equality = True warn_incomplete_stub = True warn_redundant_casts = True #warn_return_any = True warn_unreachable = True warn_unused_ignores = True [mypy-aiosmtpd.tests.*] # TODO: Type tests ignore_errors = True [mypy-aiosmtpd.proxy_protocol] disable_error_code = misc # TODO: Fix this. Maybe try switching to dataclasses? #aiosmtpd/proxy_protocol.py:312:9: error: Trying to assign name "error" that is not in "__slots__" of type "aiosmtpd.proxy_protocol.ProxyData" [misc] # self.error = error_msg # ^~~~~~~~~~ [mypy-sphinx_rtd_theme.*] ignore_missing_imports = True aio-libs-aiosmtpd-b634d9b/.readthedocs.yml000066400000000000000000000006101462210711200205250ustar00rootroot00000000000000# Read the Docs configuration file # See https://docs.readthedocs.io/en/stable/config-file/v2.html # for details --- version: 2 submodules: include: all exclude: [] recursive: true build: os: ubuntu-22.04 tools: python: "3.11" sphinx: configuration: aiosmtpd/docs/conf.py fail_on_warning: true python: install: - requirements: aiosmtpd/docs/requirements.txt ... aio-libs-aiosmtpd-b634d9b/DESCRIPTION.rst000066400000000000000000000117371462210711200177700ustar00rootroot00000000000000###################################### aiosmtpd - asyncio based SMTP server ###################################### | |github license| |_| |PyPI Version| |_| |PyPI Python| |_| |PyPI PythonImpl| | |GA badge| |_| |CodeQL badge| |_| |codecov| |_| |readthedocs| | |GH Release| |_| |GH PRs| |_| |GH LastCommit| | |PyPI DL| |_| |GH DL| | | |GH Discussions| | .. .. U+00A0 is non-breaking space .. |_| unicode:: 0xA0 :trim: .. |github license| image:: https://img.shields.io/github/license/aio-libs/aiosmtpd?logo=Open+Source+Initiative&logoColor=0F0 :target: https://github.com/aio-libs/aiosmtpd/blob/master/LICENSE :alt: Project License on GitHub .. |PyPI Version| image:: https://img.shields.io/pypi/v/aiosmtpd?logo=pypi&logoColor=yellow :target: https://pypi.org/project/aiosmtpd/ :alt: PyPI Package .. |PyPI Python| image:: https://img.shields.io/pypi/pyversions/aiosmtpd?logo=python&logoColor=yellow :target: https://pypi.org/project/aiosmtpd/ :alt: Supported Python Versions .. |PyPI PythonImpl| image:: https://img.shields.io/pypi/implementation/aiosmtpd?logo=python :target: https://pypi.org/project/aiosmtpd/ :alt: Supported Python Implementations .. .. For |GA badge|, don't forget to check actual workflow name in unit-testing-and-coverage.yml .. |GA badge| image:: https://github.com/aio-libs/aiosmtpd/workflows/aiosmtpd%20CI/badge.svg :target: https://github.com/aio-libs/aiosmtpd/actions/workflows/unit-testing-and-coverage.yml :alt: GitHub CI status .. |CodeQL badge| image:: https://github.com/aio-libs/aiosmtpd/workflows/CodeQL/badge.svg :target: https://github.com/aio-libs/aiosmtpd/actions/workflows/codeql.yml :alt: CodeQL status .. |codecov| image:: https://codecov.io/github/aio-libs/aiosmtpd/coverage.svg?branch=master :target: https://codecov.io/github/aio-libs/aiosmtpd?branch=master :alt: Code Coverage .. |readthedocs| image:: https://img.shields.io/readthedocs/aiosmtpd?logo=Read+the+Docs :target: https://aiosmtpd.readthedocs.io/en/latest/?badge=latest :alt: Documentation Status .. |GH Release| image:: https://img.shields.io/github/v/release/aio-libs/aiosmtpd?logo=github :target: https://github.com/aio-libs/aiosmtpd/releases :alt: GitHub latest release .. |GH PRs| image:: https://img.shields.io/github/issues-pr/aio-libs/aiosmtpd?logo=GitHub :target: https://github.com/aio-libs/aiosmtpd/pulls :alt: GitHub pull requests .. |GH LastCommit| image:: https://img.shields.io/github/last-commit/aio-libs/aiosmtpd?logo=GitHub :target: https://github.com/aio-libs/aiosmtpd/commits/master :alt: GitHub last commit .. |PyPI DL| image:: https://img.shields.io/pypi/dm/aiosmtpd?logo=pypi :target: https://pypi.org/project/aiosmtpd/ :alt: PyPI monthly downloads .. |GH DL| image:: https://img.shields.io/github/downloads/aio-libs/aiosmtpd/total?logo=github :target: https://github.com/aio-libs/aiosmtpd/releases :alt: GitHub downloads .. |GH Discussions| image:: https://img.shields.io/github/discussions/aio-libs/aiosmtpd?logo=github&style=social :target: https://github.com/aio-libs/aiosmtpd/discussions :alt: GitHub Discussions This is a server for SMTP and related MTA protocols, similar in utility to the standard library's |smtpd.py|_ module, but rewritten to be based on ``asyncio``. Please visit the `Project Homepage`_ for more information. .. _`Project Homepage`: https://aiosmtpd.readthedocs.io/ .. |smtpd.py| replace:: ``smtpd.py`` .. _`smtpd.py`: https://docs.python.org/3/library/smtpd.html Signing Keys ============ Starting version 1.3.1, files provided through PyPI or `GitHub Releases`_ will be signed using one of the following GPG Keys: .. _`GitHub Releases`: https://github.com/aio-libs/aiosmtpd/releases .. .. In the second column of the table, prefix each line with "| " .. In the third column, refrain from putting in a direct link to keep the table tidy. Rather, use the |...|_ construct and do the replacement+linking directive below the table +-------------------------+------------------------------------+-----------+ | GPG Key ID | Owner / Email | Key | +=========================+====================================+===========+ | ``5D60 CE28 9CD7 C258`` | | Pandu POLUAN / | |pep_gh|_ | | | | pepoluan at gmail period com | | +-------------------------+------------------------------------+-----------+ | ``5555 A6A6 7AE1 DC91`` | | Pandu E POLUAN | | | | | pepoluan at gmail period com | | +-------------------------+------------------------------------+-----------+ | ``E309 FD82 73BD 8465`` | | Wayne Werner | | | | | waynejwerner at gmail period com | | +-------------------------+------------------------------------+-----------+ .. .. The |_| contruct is U+00A0 (non-breaking space), defined at the start of the file .. |pep_gh| replace:: On |_| GitHub .. _`pep_gh`: https://github.com/pepoluan.gpg aio-libs-aiosmtpd-b634d9b/LICENSE000066400000000000000000000236111462210711200164520ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work. "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS aio-libs-aiosmtpd-b634d9b/MANIFEST.in000066400000000000000000000002111462210711200171720ustar00rootroot00000000000000graft aiosmtpd include LICENSE NOTICE .coveragerc *.cfg *.ini *.py *.rst *.yml *.toml *.txt global-exclude *.py[oc] *.sw[a-p] pyvenv.cfg aio-libs-aiosmtpd-b634d9b/NOTICE000066400000000000000000000004051462210711200163450ustar00rootroot00000000000000aiosmtpd Copyright 2014-2021 The aiosmtpd Developers This product includes software developed by The aio-libs Collaboration (https://github.com/aio-libs) This package includes software developed by Hong Minhee (https://github.com/sphinx-contrib/autoprogram/) aio-libs-aiosmtpd-b634d9b/README.rst000066400000000000000000000321401462210711200171310ustar00rootroot00000000000000========================================= aiosmtpd - An asyncio based SMTP server ========================================= | |github license| |_| |PyPI Version| |_| |PyPI Python| |_| |PyPI PythonImpl| | |GA badge| |_| |CodeQL badge| |_| |codecov| |_| |readthedocs| | | |GH Discussions| | .. |_| unicode:: 0xA0 :trim: .. |github license| image:: https://img.shields.io/github/license/aio-libs/aiosmtpd?logo=Open+Source+Initiative&logoColor=0F0 :target: https://github.com/aio-libs/aiosmtpd/blob/master/LICENSE :alt: Project License on GitHub .. |PyPI Version| image:: https://img.shields.io/pypi/v/aiosmtpd?logo=pypi&logoColor=yellow :target: https://pypi.org/project/aiosmtpd/ :alt: PyPI Package .. |PyPI Python| image:: https://img.shields.io/pypi/pyversions/aiosmtpd?logo=python&logoColor=yellow :target: https://pypi.org/project/aiosmtpd/ :alt: Supported Python Versions .. |PyPI PythonImpl| image:: https://img.shields.io/pypi/implementation/aiosmtpd?logo=python :target: https://pypi.org/project/aiosmtpd/ :alt: Supported Python Implementations .. .. For |GA badge|, don't forget to check actual workflow name in unit-testing-and-coverage.yml .. |GA badge| image:: https://github.com/aio-libs/aiosmtpd/workflows/aiosmtpd%20CI/badge.svg :target: https://github.com/aio-libs/aiosmtpd/actions/workflows/unit-testing-and-coverage.yml :alt: GitHub CI status .. |CodeQL badge| image:: https://github.com/aio-libs/aiosmtpd/workflows/CodeQL/badge.svg :target: https://github.com/aio-libs/aiosmtpd/actions/workflows/codeql.yml :alt: CodeQL status .. |codecov| image:: https://codecov.io/github/aio-libs/aiosmtpd/coverage.svg?branch=master :target: https://codecov.io/github/aio-libs/aiosmtpd?branch=master :alt: Code Coverage .. |readthedocs| image:: https://img.shields.io/readthedocs/aiosmtpd?logo=Read+the+Docs&logoColor=white :target: https://aiosmtpd.readthedocs.io/en/latest/ :alt: Documentation Status .. |GH Discussions| image:: https://img.shields.io/github/discussions/aio-libs/aiosmtpd?logo=github&style=social :target: https://github.com/aio-libs/aiosmtpd/discussions :alt: GitHub Discussions The Python standard library includes a basic |SMTP|_ server in the |smtpd|_ module, based on the old asynchronous libraries |asyncore|_ and |asynchat|_. These modules are quite old and are definitely showing their age; ``asyncore`` and ``asynchat`` are difficult APIs to work with, understand, extend, and fix. (And have been deprecated since Python 3.6, and will be removed in Python 3.12.) With the introduction of the |asyncio|_ module in Python 3.4, a much better way of doing asynchronous I/O is now available. It seems obvious that an asyncio-based version of the SMTP and related protocols are needed for Python 3. This project brings together several highly experienced Python developers collaborating on this reimplementation. This package provides such an implementation of both the SMTP and LMTP protocols. Full documentation is available on |aiosmtpd rtd|_ Requirements ============ Supported Platforms ----------------------- ``aiosmtpd`` has been tested on **CPython**>=3.8 and |PyPy|_>=3.8 for the following platforms (in alphabetical order): * Cygwin (as of 2022-12-22, only for CPython 3.8, and 3.9) * MacOS 11 and 12 * Ubuntu 18.04 * Ubuntu 20.04 * Ubuntu 22.04 * Windows 10 * Windows Server 2019 * Windows Server 2022 ``aiosmtpd`` *probably* can run on platforms not listed above, but we cannot provide support for unlisted platforms. .. |PyPy| replace:: **PyPy** .. _`PyPy`: https://www.pypy.org/ Installation ============ Install as usual with ``pip``:: pip install aiosmtpd If you receive an error message ``ModuleNotFoundError: No module named 'public'``, it likely means your ``setuptools`` is too old; try to upgrade ``setuptools`` to at least version ``46.4.0`` which had `implemented a fix for this issue`_. .. _`implemented a fix for this issue`: https://setuptools.readthedocs.io/en/latest/history.html#v46-4-0 Project details =============== As of 2016-07-14, aiosmtpd has been put under the |aiolibs|_ umbrella project and moved to GitHub. * Project home: https://github.com/aio-libs/aiosmtpd * PyPI project page: https://pypi.org/project/aiosmtpd/ * Report bugs at: https://github.com/aio-libs/aiosmtpd/issues * Git clone: https://github.com/aio-libs/aiosmtpd.git * Documentation: http://aiosmtpd.readthedocs.io/ * StackOverflow: https://stackoverflow.com/questions/tagged/aiosmtpd The best way to contact the developers is through the GitHub links above. You can also request help by submitting a question on StackOverflow. Building ======== You can install this package in a virtual environment like so:: $ python3 -m venv /path/to/venv $ source /path/to/venv/bin/activate $ python setup.py install This will give you a command line script called ``aiosmtpd`` which implements the SMTP server. Use ``aiosmtpd --help`` for a quick reference. You will also have access to the ``aiosmtpd`` library, which you can use as a testing environment for your SMTP clients. See the documentation links above for details. Developing ========== You'll need the `tox `__ tool to run the test suite for Python 3. Once you've got that, run:: $ tox Individual tests can be run like this:: $ tox -- where ```` is the "node id" of the test case to run, as explained in `the pytest documentation`_. The command above will run that one test case against all testenvs defined in ``tox.ini`` (see below). If you want test to stop as soon as it hit a failure, use the ``-x``/``--exitfirst`` option:: $ tox -- -x You can also add the ``-s``/``--capture=no`` option to show output, e.g.:: $ tox -e py311-nocov -- -s and these options can be combined:: $ tox -e py311-nocov -- -x -s (The ``-e`` parameter is explained in the next section about 'testenvs'. In general, you'll want to choose the ``nocov`` testenvs if you want to show output, so you can see which test is generating which output.) Supported 'testenvs' ------------------------ In general, the ``-e`` parameter to tox specifies one (or more) **testenv** to run (separate using comma if more than one testenv). The following testenvs have been configured and tested: * ``{py38,py39,py310,py311,py312,pypy3,pypy37,pypy38,pypy39}-{nocov,cov,diffcov,profile}`` Specifies the interpreter to run and the kind of testing to perform. - ``nocov`` = no coverage testing. Tests will run verbosely. - ``cov`` = with coverage testing. Tests will run in brief mode (showing a single character per test run) - ``diffcov`` = with diff-coverage report (showing difference in coverage compared to previous commit). Tests will run in brief mode - ``profile`` = no coverage testing, but code profiling instead. This must be **invoked manually** using the ``-e`` parameter **Note 1:** As of 2021-02-23, only the ``{py38,py39}-{nocov,cov}`` combinations work on **Cygwin**. **Note 2:** It is also possible to use whatever Python version is used when invoking ``tox`` by using the ``py`` target, but you must explicitly include the type of testing you want. For example:: $ tox -e "py-{nocov,cov,diffcov}" (Don't forget the quotes if you want to use braces!) You might want to do this for CI platforms where the exact Python version is pre-prepared, such as Travis CI or |GitHub Actions|_; this will definitely save some time during tox's testenv prepping. For all testenv combinations except diffcov, |bandit|_ security check will also be run prior to running pytest. .. _bandit: https://github.com/PyCQA/bandit .. |bandit| replace:: ``bandit`` * ``qa`` Performs |flake8|_ code style checking, and |flake8-bugbear|_ design checking. In addition, some tests to help ensure that ``aiosmtpd`` is *releasable* to PyPI are also run. .. _flake8: https://flake8.pycqa.org/en/latest/ .. |flake8| replace:: ``flake8`` .. _flake8-bugbear: https://github.com/PyCQA/flake8-bugbear .. |flake8-bugbear| replace:: ``flake8-bugbear`` * ``docs`` Builds **HTML documentation** and **manpage** using Sphinx. A `pytest doctest`_ will run prior to actual building of the documentation. * ``static`` Performs a **static type checking** using ``pytype``. **Note 1:** Please ensure that `all pytype dependencies`_ have been installed before executing this testenv. **Note 2:** This testenv will be _SKIPPED_ on Windows, because ``pytype`` currently cannot run on Windows. **Note 3:** This testenv does NOT work on **Cygwin**. .. _`all pytype dependencies`: https://github.com/google/pytype/blob/2021.02.09/CONTRIBUTING.md#pytype-dependencies Environment Variables ------------------------- ``ASYNCIO_CATCHUP_DELAY`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Due to how asyncio event loop works, some actions do not instantly get responded to. This is especially so on slower / overworked systems. In consideration of such situations, some test cases invoke a slight delay to let the event loop catch up. Defaults to `0.1` and can be set to any float value you want. Different Python Versions ----------------------------- The tox configuration files have been created to cater for more than one Python versions `safely`: If an interpreter is not found for a certain Python version, tox will skip that whole testenv. However, with a little bit of effort, you can have multiple Python interpreter versions on your system by using ``pyenv``. General steps: 1. Install ``pyenv`` from https://github.com/pyenv/pyenv#installation 2. Install ``tox-pyenv`` from https://pypi.org/project/tox-pyenv/ 3. Using ``pyenv``, install the Python versions you want to test on 4. Create a ``.python-version`` file in the root of the repo, listing the Python interpreter versions you want to make available to tox (see pyenv's documentation about this file) **Tip:** The 1st line of ``.python-version`` indicates your *preferred* Python version which will be used to run tox. 5. Invoke tox with the option ``--tox-pyenv-no-fallback`` (see tox-pyenv's documentation about this option) ``housekeep.py`` ---------------- If you ever need to 'reset' your repo, you can use the ``housekeep.py`` utility like so:: $ python housekeep.py superclean It is *strongly* recommended to NOT do superclean too often, though. Every time you invoke ``superclean``, tox will have to recreate all its testenvs, and this will make testing *much* longer to finish. ``superclean`` is typically only needed when you switch branches, or if you want to really ensure that artifacts from previous testing sessions won't interfere with your next testing sessions. For example, you want to force Sphinx to rebuild all documentation. Or, you're sharing a repo between environments (say, PSCore and Cygwin) and the cached Python bytecode messes up execution (e.g., sharing the exact same directory between Windows PowerShell and Cygwin will cause problems as Python becomes confused about the locations of the source code). Signing Keys ============ Starting version 1.3.1, files provided through `PyPI`_ or `GitHub Releases`_ will be signed using one of the following GPG Keys: +-------------------------+----------------+----------------------------------+ | GPG Key ID | Owner | Email | +=========================+================+==================================+ | ``5D60 CE28 9CD7 C258`` | Pandu E POLUAN | pepoluan at gmail period com | +-------------------------+----------------+----------------------------------+ | ``5555 A6A6 7AE1 DC91`` | Pandu E POLUAN | pepoluan at gmail period com | +-------------------------+----------------+----------------------------------+ | ``E309 FD82 73BD 8465`` | Wayne Werner | waynejwerner at gmail period com | +-------------------------+----------------+----------------------------------+ | ``5FE9 28CD 9626 CE2B`` | Sam Bull | sam at sambull period org | +-------------------------+----------------+----------------------------------+ .. _PyPI: https://pypi.org/project/aiosmtpd/ .. _`GitHub Releases`: https://github.com/aio-libs/aiosmtpd/releases License ======= ``aiosmtpd`` is released under the Apache License version 2.0. .. _`GitHub Actions`: https://docs.github.com/en/free-pro-team@latest/actions/guides/building-and-testing-python#running-tests-with-tox .. |GitHub Actions| replace:: **GitHub Actions** .. _`pytest doctest`: https://docs.pytest.org/en/stable/doctest.html .. _`the pytest documentation`: https://docs.pytest.org/en/stable/usage.html#specifying-tests-selecting-tests .. _`aiosmtpd rtd`: https://aiosmtpd.readthedocs.io .. |aiosmtpd rtd| replace:: **aiosmtpd.readthedocs.io** .. _`SMTP`: https://tools.ietf.org/html/rfc5321 .. |SMTP| replace:: **SMTP** .. _`smtpd`: https://docs.python.org/3/library/smtpd.html .. |smtpd| replace:: **smtpd** .. _`asyncore`: https://docs.python.org/3/library/asyncore.html .. |asyncore| replace:: ``asyncore`` .. _`asynchat`: https://docs.python.org/3/library/asynchat.html .. |asynchat| replace:: ``asynchat`` .. _`asyncio`: https://docs.python.org/3/library/asyncio.html .. |asyncio| replace:: ``asyncio`` .. _`aiolibs`: https://github.com/aio-libs .. |aiolibs| replace:: **aio-libs** aio-libs-aiosmtpd-b634d9b/aiosmtpd/000077500000000000000000000000001462210711200172625ustar00rootroot00000000000000aio-libs-aiosmtpd-b634d9b/aiosmtpd/__init__.py000066400000000000000000000011711462210711200213730ustar00rootroot00000000000000# Copyright 2014-2021 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 import asyncio import warnings __version__ = "1.4.6" def _get_or_new_eventloop() -> asyncio.AbstractEventLoop: loop = None with warnings.catch_warnings(): warnings.simplefilter("error") try: loop = asyncio.get_event_loop() except (DeprecationWarning, RuntimeError): # pragma: py-lt-310 if loop is None: # pragma: py-lt-312 loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) assert isinstance(loop, asyncio.AbstractEventLoop) return loop aio-libs-aiosmtpd-b634d9b/aiosmtpd/__main__.py000066400000000000000000000002331462210711200213520ustar00rootroot00000000000000# Copyright 2014-2021 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 from aiosmtpd.main import main if __name__ == '__main__': main() aio-libs-aiosmtpd-b634d9b/aiosmtpd/controller.py000066400000000000000000000433361462210711200220300ustar00rootroot00000000000000# Copyright 2014-2021 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 import asyncio import errno import os import ssl import threading import time from abc import ABCMeta, abstractmethod from contextlib import ExitStack from pathlib import Path from socket import AF_INET6, SOCK_STREAM, create_connection, has_ipv6 from socket import socket as makesock from socket import timeout as socket_timeout try: from socket import AF_UNIX except ImportError: # pragma: on-not-win32 AF_UNIX = None # type: ignore[assignment] from typing import Any, Awaitable, Dict, Literal, Optional, Union from warnings import warn from public import public from aiosmtpd.smtp import SMTP DEFAULT_READY_TIMEOUT: float = 5.0 @public class IP6_IS: # Apparently errno.E* constants adapts to the OS, so on Windows they will # automatically use the WSAE* constants NO = {errno.EADDRNOTAVAIL, errno.EAFNOSUPPORT} YES = {errno.EADDRINUSE} def _has_ipv6() -> bool: # Helper function to assist in mocking return has_ipv6 @public def get_localhost() -> Literal["::1", "127.0.0.1"]: """Returns numeric address to localhost depending on IPv6 availability""" # Ref: # - https://github.com/urllib3/urllib3/pull/611#issuecomment-100954017 # - https://github.com/python/cpython/blob/ : # - v3.6.13/Lib/test/support/__init__.py#L745-L758 # - v3.9.1/Lib/test/support/socket_helper.py#L124-L137 if not _has_ipv6(): # socket.has_ipv6 only tells us of current Python's IPv6 support, not the # system's. But if the current Python does not support IPv6, it's pointless to # explore further. return "127.0.0.1" try: with makesock(AF_INET6, SOCK_STREAM) as sock: sock.bind(("::1", 0)) # If we reach this point, that means we can successfully bind ::1 (on random # unused port), so IPv6 is definitely supported return "::1" except OSError as e: if e.errno in IP6_IS.NO: return "127.0.0.1" if e.errno in IP6_IS.YES: # We shouldn't ever get these errors, but if we do, that means IPv6 is # supported return "::1" # Other kinds of errors MUST be raised so we can inspect raise def _server_to_client_ssl_ctx(server_ctx: ssl.SSLContext) -> ssl.SSLContext: """ Given an SSLContext object with TLS_SERVER_PROTOCOL return a client context that can connect to the server. """ client_ctx = ssl.create_default_context(purpose=ssl.Purpose.SERVER_AUTH) client_ctx.options = server_ctx.options client_ctx.check_hostname = False # We do not verify the ssl cert for the server here simply because this # is a local connection to poke at the server for it to do its lazy # initialization sequence. The only purpose of this client context # is to make a connection to the *local* server created using the same # code. That is also the reason why we disable cert verification below # and the flake8 check for the same. client_ctx.verify_mode = ssl.CERT_NONE # noqa: DUO122 return client_ctx class _FakeServer(asyncio.StreamReaderProtocol): """ Returned by _factory_invoker() in lieu of an SMTP instance in case factory() failed to instantiate an SMTP instance. """ def __init__(self, loop: asyncio.AbstractEventLoop): # Imitate what SMTP does super().__init__( asyncio.StreamReader(loop=loop), client_connected_cb=self._cb_client_connected, loop=loop, ) def _cb_client_connected( self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter ) -> None: pass @public class BaseController(metaclass=ABCMeta): smtpd = None server: Optional[asyncio.AbstractServer] = None server_coro: Optional[Awaitable[asyncio.AbstractServer]] = None _thread_exception: Optional[Exception] = None def __init__( self, handler: Any, loop: Optional[asyncio.AbstractEventLoop] = None, *, ssl_context: Optional[ssl.SSLContext] = None, # SMTP parameters server_hostname: Optional[str] = None, **SMTP_parameters, ): self.handler = handler if loop is None: self.loop = asyncio.new_event_loop() else: self.loop = loop self.ssl_context = ssl_context self.SMTP_kwargs: Dict[str, Any] = {} if "server_kwargs" in SMTP_parameters: warn( "server_kwargs will be removed in version 2.0. " "Just specify the keyword arguments to forward to SMTP " "as kwargs to this __init__ method.", DeprecationWarning, ) self.SMTP_kwargs = SMTP_parameters.pop("server_kwargs") self.SMTP_kwargs.update(SMTP_parameters) if server_hostname: self.SMTP_kwargs["hostname"] = server_hostname # Emulate previous behavior of defaulting enable_SMTPUTF8 to True # It actually conflicts with SMTP class's default, but the reasoning is # discussed in the docs. self.SMTP_kwargs.setdefault("enable_SMTPUTF8", True) # self._factory_invoked = threading.Event() def factory(self): """Subclasses can override this to customize the handler/server creation.""" return SMTP(self.handler, **self.SMTP_kwargs) def _factory_invoker(self) -> Union[SMTP, _FakeServer]: """Wraps factory() to catch exceptions during instantiation""" try: self.smtpd = self.factory() if self.smtpd is None: raise RuntimeError("factory() returned None") return self.smtpd except Exception as err: self._thread_exception = err return _FakeServer(self.loop) finally: self._factory_invoked.set() @abstractmethod def _create_server(self) -> Awaitable[asyncio.AbstractServer]: """ Overridden by subclasses to actually perform the async binding to the listener endpoint. When overridden, MUST refer the _factory_invoker() method. """ def _cleanup(self): """Reset internal variables to prevent contamination""" self._thread_exception = None self._factory_invoked.clear() self.server_coro = None self.server = None self.smtpd = None def cancel_tasks(self, stop_loop: bool = True): """ Convenience method to stop the loop and cancel all tasks. Use loop.call_soon_threadsafe() to invoke this. """ if stop_loop: # pragma: nobranch self.loop.stop() for task in asyncio.all_tasks(self.loop): # This needs to be invoked in a thread-safe way task.cancel() @public class BaseThreadedController(BaseController, metaclass=ABCMeta): _thread: Optional[threading.Thread] = None def __init__( self, handler: Any, loop: Optional[asyncio.AbstractEventLoop] = None, *, ready_timeout: float = DEFAULT_READY_TIMEOUT, ssl_context: Optional[ssl.SSLContext] = None, # SMTP parameters server_hostname: Optional[str] = None, **SMTP_parameters, ): super().__init__( handler, loop, ssl_context=ssl_context, server_hostname=server_hostname, **SMTP_parameters, ) self.ready_timeout = float( os.getenv("AIOSMTPD_CONTROLLER_TIMEOUT", ready_timeout) ) @abstractmethod def _trigger_server(self): """ Overridden by subclasses to trigger asyncio to actually initialize the SMTP class (it's lazy initialization, done only on initial connection). """ def _run(self, ready_event: threading.Event) -> None: asyncio.set_event_loop(self.loop) try: self.server_coro = self._create_server() self.server = self.loop.run_until_complete(self.server_coro) except Exception as error: # pragma: on-wsl # Usually will enter this part only if create_server() cannot bind to the # specified host:port. # # Somehow WSL 1.0 (Windows Subsystem for Linux) allows multiple # listeners on one port?! # That is why we add "pragma: on-wsl" there, so this block will not affect # coverage on WSL 1.0. self._thread_exception = error return self.loop.call_soon(ready_event.set) self.loop.run_forever() # We reach this point when loop is ended (by external code) # Perform some stoppages to ensure endpoint no longer bound. assert self.server is not None self.server.close() self.loop.run_until_complete(self.server.wait_closed()) self.loop.close() self.server = None def start(self) -> None: """ Start a thread and run the asyncio event loop in that thread """ assert self._thread is None, "SMTP daemon already running" self._factory_invoked.clear() ready_event = threading.Event() self._thread = threading.Thread(target=self._run, args=(ready_event,)) self._thread.daemon = True self._thread.start() # Wait a while until the server is responding. start = time.monotonic() if not ready_event.wait(self.ready_timeout): # An exception within self._run will also result in ready_event not set # So, we first test for that, before raising TimeoutError if self._thread_exception is not None: # pragma: on-wsl # See comment about WSL1.0 in the _run() method raise self._thread_exception else: raise TimeoutError( "SMTP server failed to start within allotted time. " "This might happen if the system is too busy. " "Try increasing the `ready_timeout` parameter." ) respond_timeout = self.ready_timeout - (time.monotonic() - start) # Apparently create_server invokes factory() "lazily", so exceptions in # factory() go undetected. To trigger factory() invocation we need to open # a connection to the server and 'exchange' some traffic. try: self._trigger_server() except socket_timeout: # We totally don't care of timeout experienced by _testconn, pass except Exception: # Raise other exceptions though raise if not self._factory_invoked.wait(respond_timeout): raise TimeoutError( "SMTP server started, but not responding within allotted time. " "This might happen if the system is too busy. " "Try increasing the `ready_timeout` parameter." ) if self._thread_exception is not None: raise self._thread_exception # Defensive if self.smtpd is None: raise RuntimeError("Unknown Error, failed to init SMTP server") def stop(self, no_assert: bool = False): """ Stop the loop, the tasks in the loop, and terminate the thread as well. """ assert no_assert or self._thread is not None, "SMTP daemon not running" self.loop.call_soon_threadsafe(self.cancel_tasks) if self._thread is not None: self._thread.join() self._thread = None self._cleanup() @public class BaseUnthreadedController(BaseController, metaclass=ABCMeta): def __init__( self, handler: Any, loop: Optional[asyncio.AbstractEventLoop] = None, *, ssl_context: Optional[ssl.SSLContext] = None, # SMTP parameters server_hostname: Optional[str] = None, **SMTP_parameters, ): super().__init__( handler, loop, ssl_context=ssl_context, server_hostname=server_hostname, **SMTP_parameters, ) self.ended = threading.Event() def begin(self): """ Sets up the asyncio server task and inject it into the asyncio event loop. Does NOT actually start the event loop itself. """ asyncio.set_event_loop(self.loop) self.server_coro = self._create_server() self.server = self.loop.run_until_complete(self.server_coro) async def finalize(self): """ Perform orderly closing of the server listener. NOTE: This is an async method; await this from an async or use loop.create_task() (if loop is still running), or loop.run_until_complete() (if loop has stopped) """ self.ended.clear() server = self.server assert server is not None server.close() await server.wait_closed() assert self.server_coro is not None # TODO: Where does .close() come from...? self.server_coro.close() # type: ignore[attr-defined] self._cleanup() self.ended.set() def end(self): """ Convenience method to asynchronously invoke finalize(). Consider using loop.call_soon_threadsafe to invoke this method, especially if your loop is running in a different thread. You can afterwards .wait() on ended attribute (a threading.Event) to check for completion, if needed. """ self.ended.clear() if self.loop.is_running(): # TODO: Should store and await on task at some point. self.loop.create_task(self.finalize()) # type: ignore[unused-awaitable] else: self.loop.run_until_complete(self.finalize()) @public class InetMixin(BaseController, metaclass=ABCMeta): def __init__( self, handler: Any, hostname: Optional[str] = None, port: int = 8025, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs, ): super().__init__( handler, loop, **kwargs, ) self._localhost = get_localhost() self.hostname = self._localhost if hostname is None else hostname self.port = port def _create_server(self) -> Awaitable[asyncio.AbstractServer]: """ Creates a 'server task' that listens on an INET host:port. Does NOT actually start the protocol object itself; _factory_invoker() is only called upon fist connection attempt. """ return self.loop.create_server( self._factory_invoker, host=self.hostname, port=self.port, ssl=self.ssl_context, ) def _trigger_server(self): """ Opens a socket connection to the newly launched server, wrapping in an SSL Context if necessary, and read some data from it to ensure that factory() gets invoked. """ # At this point, if self.hostname is Falsy, it most likely is "" (bind to all # addresses). In such case, it should be safe to connect to localhost) hostname = self.hostname or self._localhost with ExitStack() as stk: s = stk.enter_context(create_connection((hostname, self.port), 1.0)) if self.ssl_context: client_ctx = _server_to_client_ssl_ctx(self.ssl_context) s = stk.enter_context(client_ctx.wrap_socket(s)) s.recv(1024) @public class UnixSocketMixin(BaseController, metaclass=ABCMeta): # pragma: no-unixsock def __init__( self, handler: Any, unix_socket: Union[str, Path], loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs, ): super().__init__( handler, loop, **kwargs, ) self.unix_socket = str(unix_socket) def _create_server(self) -> Awaitable[asyncio.AbstractServer]: """ Creates a 'server task' that listens on a Unix Socket file. Does NOT actually start the protocol object itself; _factory_invoker() is only called upon fist connection attempt. """ return self.loop.create_unix_server( self._factory_invoker, path=self.unix_socket, ssl=self.ssl_context, ) def _trigger_server(self): """ Opens a socket connection to the newly launched server, wrapping in an SSL Context if necessary, and read some data from it to ensure that factory() gets invoked. """ with ExitStack() as stk: s: makesock = stk.enter_context(makesock(AF_UNIX, SOCK_STREAM)) s.connect(self.unix_socket) if self.ssl_context: client_ctx = _server_to_client_ssl_ctx(self.ssl_context) s = stk.enter_context(client_ctx.wrap_socket(s)) s.recv(1024) @public class Controller(InetMixin, BaseThreadedController): """Provides a multithreaded controller that listens on an INET endpoint""" def _trigger_server(self): # Prevent confusion on which _trigger_server() to invoke. # Or so LGTM.com claimed InetMixin._trigger_server(self) @public class UnixSocketController( # pragma: no-unixsock UnixSocketMixin, BaseThreadedController ): """Provides a multithreaded controller that listens on a Unix Socket file""" def _trigger_server(self): # pragma: no-unixsock # Prevent confusion on which _trigger_server() to invoke. # Or so LGTM.com claimed UnixSocketMixin._trigger_server(self) @public class UnthreadedController(InetMixin, BaseUnthreadedController): """Provides an unthreaded controller that listens on an INET endpoint""" @public class UnixSocketUnthreadedController( # pragma: no-unixsock UnixSocketMixin, BaseUnthreadedController ): """Provides an unthreaded controller that listens on a Unix Socket file""" aio-libs-aiosmtpd-b634d9b/aiosmtpd/docs/000077500000000000000000000000001462210711200202125ustar00rootroot00000000000000aio-libs-aiosmtpd-b634d9b/aiosmtpd/docs/.gitignore000066400000000000000000000000071462210711200221770ustar00rootroot00000000000000_build aio-libs-aiosmtpd-b634d9b/aiosmtpd/docs/Makefile000066400000000000000000000151621462210711200216570ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/aiosmtpd.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/aiosmtpd.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/aiosmtpd" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/aiosmtpd" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." aio-libs-aiosmtpd-b634d9b/aiosmtpd/docs/NEWS.rst000066400000000000000000000316001462210711200215200ustar00rootroot00000000000000################### NEWS for aiosmtpd ################### .. towncrier release notes start 1.4.6 (2024-05-18) ================== * STARTTLS is now fully enforced if used. 1.4.5 (2024-03-02) ================== * Fixed incorrect handling of newlines. 1.4.4.post2 (2023-01-19) ======================== Fixed/Improved -------------- * Prevent unclean repo from being built (Closes #365) * Reduce chance of not-ready-for-release packages from being uploaded 1.4.4 (2023-01-17) ================== Fixed/Improved -------------- * No longer expect an implicit creation of the event loop through ``get_event_loop()`` (Closes #353) 1.4.3 (2022-12-21) ===================== Fixed/Improved -------------- * Is now compatible with uvloop * Add compatibility for Python 3.10 and 3.11 (Closes #322) * Test matrix update (Closes #306) * Drop Python 3.6, PyPy 3.6 (some) and MacOS 10 * Add Python 3.10 & 3.11, PyPy 3.7 & 3.8, Ubuntu 22.04, MacOS 11 & 12 * Expanded tox environments * Longer AUTOSTOP_DELAY especially for Windows (Closes #313) * Update signing keys * Some documentation fixes 1.4.2 (2021-03-08) ===================== Fixed/Improved -------------- * Controller's ``ready_timeout`` parameter increased from ``1.0`` to ``5.0``. This won't slow down Controller startup because it's just a timeout limit (instead of a sleep delay), but this should help prevent Controller from giving up too soon, especially during situations where system/network is a bit busy causing slowdowns. (See #262) * Timeout messages in ``Controller.start()`` gets more details and a mention about the ``ready_timeout`` parameter. (See #262) * Prevent sensitive AUTH information leak by sanitizing the repr() of AuthResult and LoginPassword. 1.4.1 (2021-03-04) ================== Fixed/Improved -------------- * Maximum length of email address local part is customizable, defaults to no limit. (Closes #257) 1.4.0 (2021-02-26) ================== Added ----- * Support for |PROXY Protocol|_ (Closes #174) * Example for authentication * SSL Support for CLI. See :ref:`the man page ` for more info. (Closes #172) * New :class:`UnixSocketController` class to implement Unix socket-based SMTP server (Closes #114) .. _`PROXY Protocol`: https://www.haproxy.com/blog/using-haproxy-with-the-proxy-protocol-to-better-secure-your-database/ .. |PROXY Protocol| replace:: **PROXY Protocol** Fixed/Improved -------------- * ``pypy3`` testenv for tox can now run on Windows * ``static`` testenv now auto-skipped on Windows * Now uses Sphinx's Doctest facility, which is much more flexible than pytest's doctest 1.3.2 (2021-02-20) ================== Added ----- * GPG Signing Key info to ``README.rst`` and PyPI Long Desc * Hidden ``static`` test env for static code checking Fixed/Improved -------------- * Fixed Documentation Issues that might cause automatic package builders to fail * Also consider ``EAFNOSUPPORT`` in IPv6 detection (Closes #244, again) * Update PyPI Long Description 1.3.1 (2021-02-18) ================== Fixed/Improved -------------- * ``ready_timeout`` now actually enforced, raising ``TimeoutError`` if breached * Hides only expected exceptions raised by ``Controller._testconn()`` * No longer fail with opaque "Unknown Error" if ``hostname=""`` (Closes #244) * No longer hardcode localhost as ``::1`` but perform IPv6 detection first (Closes #244) 1.3.0 (2021-02-09) ================== Added ----- * New :meth:`handle_EHLO` interaction where said method can now modify list of responses to the EHLO command (Closes #155) Fixed/Improved -------------- * No longer have to workaround ``bpo-27931`` which has been fixed in Python 3.6 anyways. * New :meth:`handle_EHLO` interaction where said method can now modify list of responses to the EHLO command (Closes #155) * ``authenticator`` system improves on ``auth_callback`` by enabling the called function to see the SMTP Session and other info. (``auth_callback`` will be deprecated in 2.0) * ``__version__`` is now an attribute in ``__init__.py``, and can be imported from the 'plain' ``aiosmtpd`` module. (It gets reimported to ``aiosmtpd.smtp``, so programs relying on ``aiosmtpd.smtp.__version__`` should still work.) (Closes #241) * Uses pure ``pytest`` for all test cases (Closes #198) 1.2.4 (2021-01-24) ================== Added ----- * Optional (default-disabled) logging of ``AUTH`` interaction -- with severe warnings Fixed/Improved -------------- * ``AUTH`` command line now sanitized before logging (Closes #233) * Remove special handling for lone ``=`` during AUTH; it is now treated as simple Base64-encoded ``b""``. This is the correct, strict interpretation of :rfc:`4954` mentions about ``=`` 1.2.3 (2021-01-14) ================== Added ----- * Test for ``SMTP.__init__`` behavior after taking out code that edits TLS Context * Implement mechanism to limit the number of commands sent (Closes #145) Fixed/Improved -------------- * ``handle_exception()`` no longer gets called when the client disconnected (Closes #127, #162) * Implement & enforce line-length-limit, thus becoming Compliant with RFC 5321 § 4.5.3.1.6 * Delay all SMTP Status Code replies during ``DATA`` phase until the phase termination (Closes #9) * Now catches ``Controller.factory()`` failure during ``Controller.start()`` (Closes #212) * :class:`SMTP` no longer edits user-supplied SSL Context (Closes #191) * Implement waiting for SSL setup/handshake within ``STARTTLS`` handler to be able to catch and handle (log) errors and to avoid session hanging around until timeout in such cases * Add session peer information to some logging output where it was missing * Support AUTH mechanisms with dash(es) in their names (Closes #224) * Remove some double-logging of commands sent by clients * LMTP servers now correctly advertise extensions in reply to ``LHLO`` (Closes #123, #124) * ``NOOP`` now accepted before ``STARTTLS`` even if ``require_starttls=True`` (Closes #124) 1.2.2 (2020-11-08) ================== Added ----- * **Apache License version 2.0** * Support for SMTP ``AUTH``, with AUTH hooks feature * Built-in implementation for ``AUTH PLAIN`` and ``AUTH LOGIN`` logic (Closes #102) * Feature to inject keyword args during server class instantiation in ``Controller.factory`` (potentially Closes #194, #179) * Support for Python 3.8 and 3.9.0 (also Closes #188) Fixed/Improved -------------- * Don't strip last ``\r\n`` prior to terminating dot. * Slight improvement to make Test Suite more maintainable * No more failures/DeprecationWarnings for Python 3.8 (Closes #167) * Faster ``_handle_client()`` processing * Faster method access for ``smtp_*``, ``handle_*``, and ``auth_*`` hooks Removed ------- * Unit Tests that mocked too deep, possibly masking observable internal behaviors * Drop support for Python 3.5 1.2 (2018-09-01) ================ * Improve the documentation on enabling ``STARTTLS``. (Closes #125) * Add customizable ident field to SMTP class constructor. (Closes #131) * Remove asyncio.coroutine decorator as it was introduced in Python 3.5. * Add Controller docstring, explain dual-stack binding. (Closes #140) * Gracefully handle ASCII decoding exceptions. (Closes #142) * Fix typo. * Improve Controller ssl_context documentation. * Add timeout feature. (Partial fix for #145) 1.1 (2017-07-06) ================ * Drop support for Python 3.4. * As per RFC 5321, §4.1.4, multiple ``HELO`` / ``EHLO`` commands in the same session are semantically equivalent to ``RSET``. (Closes #78) * As per RFC 5321, $4.1.1.9, ``NOOP`` takes an optional argument, which is ignored. **API BREAK** If you have a handler that implements ``handle_NOOP()``, it previously took zero arguments but now requires a single argument. (Closes #107) * The command line options ``--version`` / ``-v`` has been added to print the package's current version number. (Closes #111) * General improvements in the ``Controller`` class. (Closes #104) * When aiosmtpd handles a ``STARTTLS`` it must arrange for the original transport to be closed when the wrapped transport is closed. This fixes a hidden exception which occurs when an EOF is received on the original tranport after the connection is lost. (Closes #83) * Widen the catch of ``ConnectionResetError`` and ``CancelledError`` to also catch such errors from handler methods. (Closes #110) * Added a manpage for the ``aiosmtpd`` command line script. (Closes #116) * Added much better support for the ``HELP``. There's a new decorator called ``@syntax()`` which you can use in derived classes to decorate ``smtp_*()`` methods. These then show up in ``HELP`` responses. This also fixes ``HELP`` responses for the ``LMTP`` subclass. (Closes #113) * The ``Controller`` class now takes an optional keyword argument ``ssl_context`` which is passed directly to the asyncio ``create_server()`` call. 1.0 (2017-05-15) ================ * Release. 1.0rc1 (2017-05-12) =================== * Improved documentation. 1.0b1 (2017-05-07) ================== * The connection peer is displayed in all INFO level logging. * When running the test suite, you can include a ``-E`` option after the ``--`` separator to boost the debugging output. * The main SMTP readline loops are now more robust against connection resets and mid-read EOFs. (Closes #62) * ``Proxy`` handlers work with ``SMTP`` servers regardless of the value of the ``decode_data`` argument. * The command line script is now installed as ``aiosmtpd`` instead of ``smtpd``. * The ``SMTP`` class now does a better job of handling Unicode, when the client does not claim to support ``SMTPUTF8`` but sends non-ASCII anyway. The server forces ASCII-only handling when ``enable_SMTPUTF8=False`` (the default) is passed to the constructor. The command line arguments ``decode_data=True`` and ``enable_SMTPUTF8=True`` are no longer mutually exclusive. * Officially support Windows. (Closes #76) 1.0a5 (2017-04-06) ================== * A new handler hook API has been added which provides more flexibility but requires more responsibility (e.g. hooks must return a string status). Deprecate ``SMTP.ehlo_hook()`` and ``SMTP.rset_hook()``. * Deprecate handler ``process_message()`` methods. Use the new asynchronous ``handle_DATA()`` methods, which take a session and an envelope object. * Added the ``STARTTLS`` extension. Given by Konstantin Volkov. * Minor changes to the way the ``Debugging`` handler prints ``mail_options`` and ``rcpt_options`` (although the latter is still not support in ``SMTP``). * ``DATA`` method now respects original line endings, and passing size limits is now handled better. Given by Konstantin Volkov. * The ``Controller`` class has two new optional keyword arguments. - ``ready_timeout`` specifies a timeout in seconds that can be used to limit the amount of time it waits for the server to become ready. This can also be overridden with the environment variable ``AIOSMTPD_CONTROLLER_TIMEOUT``. (Closes #35) - ``enable_SMTPUTF8`` is passed through to the ``SMTP`` constructor in the default factory. If you override ``Controller.factory()`` you can pass ``self.enable_SMTPUTF8`` yourself. * Handlers can define a ``handle_tls_handshake()`` method, which takes a session object, and is called if SSL is enabled during the making of the connection. (Closes #48) * Better Windows compatibility. * Better Python 3.4 compatibility. * Use ``flufl.testing`` package for nose2 and flake8 plugins. * The test suite has achieved 100% code coverage. (Closes #2) 1.0a4 (2016-11-29) ================== * The SMTP server connection identifier can be changed by setting the ``__ident__`` attribute on the ``SMTP`` instance. (Closes #20) * Fixed a new incompatibility with the ``atpublic`` library. 1.0a3 (2016-11-24) ================== * Fix typo in ``Message.prepare_message()`` handler. The crafted ``X-RcptTos`` header is renamed to ``X-RcptTo`` for backward compatibility with older libraries. * Add a few hooks to make subclassing easier: * ``SMTP.ehlo_hook()`` is called just before the final, non-continuing 250 response to allow subclasses to add additional ``EHLO`` sub-responses. * ``SMTP.rset_hook()`` is called just before the final 250 command to allow subclasses to provide additional ``RSET`` functionality. * ``Controller.make_socket()`` allows subclasses to customize the creation of the socket before binding. 1.0a2 (2016-11-22) ================== * Officially support Python 3.6. * Fix support for both IPv4 and IPv6 based on the ``--listen`` option. Given by Jason Coombs. (Closes #3) * Correctly handle client disconnects. Given by Konstantin vz'One Enchant. * The SMTP class now takes an optional ``hostname`` argument. Use this if you want to avoid the use of ``socket.getfqdn()``. Given by Konstantin vz'One Enchant. * Close the transport and thus the connection on SMTP ``QUIT``. (Closes #11) * Added an ``AsyncMessage`` handler. Given by Konstantin vz'One Enchant. * Add an examples/ directory. * Flake8 clean. 1.0a1 (2015-10-19) ================== * Initial release. aio-libs-aiosmtpd-b634d9b/aiosmtpd/docs/__init__.py000066400000000000000000000000001462210711200223110ustar00rootroot00000000000000aio-libs-aiosmtpd-b634d9b/aiosmtpd/docs/_exts/000077500000000000000000000000001462210711200213345ustar00rootroot00000000000000aio-libs-aiosmtpd-b634d9b/aiosmtpd/docs/_exts/autoprogramm.py000066400000000000000000000267451462210711200244410ustar00rootroot00000000000000""" autoprogramm ~~~~~~~~~~~~ ``autoprogram-modified`` (hence the two "m"s at the end of the name.) Documenting CLI programs. Adapted & simplified from https://github.com/sphinx-contrib/autoprogram Besides the name change, here is a summary of the changes: * Remove unit testing * Remove .lower() when processing metavar/desc * Add setup() return dict * Add :notitle: * Add :nodesc: * Add :options_title: * Add :options_adornment: * black-ification **WARNING:** This is custom-stripped for aiosmtpd; using this custom extension outside of aiosmtpd might not work. Check the code! The aiosmtpd Developers will NOT provide ANY kind of support with this custom extension; it is just hacked together in a half-day by pepoluan <== all blame goes to him! :copyright: Copyright 2014 by Hong Minhee :license: BSD """ import argparse import builtins import collections import os from functools import reduce from typing import Any, Dict, Iterable, List, Optional, Tuple import sphinx from docutils import nodes # pytype: disable=pyi-error from docutils.parsers.rst import Directive # pytype: disable=pyi-error from docutils.parsers.rst.directives import unchanged # pytype: disable=pyi-error from docutils.statemachine import StringList from sphinx.util.nodes import nested_parse_with_titles __all__ = ("AutoprogrammDirective", "import_object", "scan_programs", "setup") # Need to temporarily disable this particular check, because although this function # is guaranteed to return a proper value (due to how ArgumentParser works), pytype # doesn't really know that, and therefore raised an error in the (to its view) # possible fallthrough of "implicit return None" if the "for a" loop exits without # finding the right item. # # pytype: disable=bad-return-type def get_subparser_action( parser: argparse.ArgumentParser ) -> Optional[argparse._SubParsersAction]: neg1_action = parser._actions[-1] if isinstance(neg1_action, argparse._SubParsersAction): return neg1_action for a in parser._actions: if isinstance(a, argparse._SubParsersAction): return a return None # pytype: enable=bad-return-type def scan_programs( parser: argparse.ArgumentParser, command: Optional[List[str]] = None, maxdepth: int = 0, depth: int = 0, groups: bool = False, ): if command is None: command = [] if maxdepth and depth >= maxdepth: return if groups: yield command, [], parser for group in parser._action_groups: options = list(scan_options(group._group_actions)) if options: yield command, options, group else: options = list(scan_options(parser._actions)) yield command, options, parser if parser._subparsers: choices: Iterable[tuple[str, int]] = () subp_action = get_subparser_action(parser) if subp_action: # noinspection PyUnresolvedReferences choices = subp_action.choices.items() if not isinstance(choices, collections.OrderedDict): choices = sorted(choices, key=lambda pair: pair[0]) for cmd, sub in choices: if isinstance(sub, argparse.ArgumentParser): yield from scan_programs(sub, command + [cmd], maxdepth, depth + 1) def scan_options(actions: list): for arg in actions: if not (arg.option_strings or isinstance(arg, argparse._SubParsersAction)): yield format_positional_argument(arg) for arg in actions: if arg.option_strings and arg.help is not argparse.SUPPRESS: yield format_option(arg) def format_positional_argument(arg: argparse.Action) -> Tuple[List[str], str]: desc: str = (arg.help or "") % {"default": arg.default} name: str if isinstance(arg.metavar, tuple): name = arg.metavar[0] else: name = arg.metavar or arg.dest or "" return [name], desc def format_option(arg: argparse.Action) -> Tuple[List[str], str]: desc = (arg.help or "") % {"default": arg.default} if not isinstance(arg, (argparse._StoreAction, argparse._AppendAction)): names = list(arg.option_strings) return names, desc if arg.choices is not None: value = "{{{0}}}".format(",".join(str(c) for c in arg.choices)) else: metavar = arg.metavar or arg.dest if not isinstance(metavar, tuple): metavar = (metavar,) value = "<{0}>".format("> <".join(metavar)) names = [ "{0} {1}".format(option_string, value) for option_string in arg.option_strings ] return names, desc def import_object(import_name: str) -> Any: module_name, expr = import_name.split(":", 1) try: mod = __import__(module_name) except ImportError: # This happens if the file is a script with no .py extension. Here we # trick autoprogram to load a module in memory with the contents of # the script, if there is a script named module_name. Otherwise, raise # an ImportError as it did before. import glob import sys import imp for p in sys.path: f = glob.glob(os.path.join(p, module_name)) if len(f) > 0: with open(f[0]) as fobj: codestring = fobj.read() foo = imp.new_module("foo") # noinspection BuiltinExec exec(codestring, foo.__dict__) # noqa: DUO105 # nosec sys.modules["foo"] = foo mod = __import__("foo") break else: raise ImportError("No module named {}".format(module_name)) mod = reduce(getattr, module_name.split(".")[1:], mod) globals_ = builtins if not isinstance(globals_, dict): globals_ = globals_.__dict__ # type: ignore[assignment] return eval(expr, globals_, mod.__dict__) # type: ignore[arg-type] # noqa: DUO104 # nosec class AutoprogrammDirective(Directive): has_content = False required_arguments = 1 option_spec = { "prog": unchanged, "maxdepth": unchanged, "start_command": unchanged, "strip_usage": unchanged, "no_usage_codeblock": unchanged, "groups": unchanged, "notitle": unchanged, "nodesc": unchanged, "options_title": unchanged, "options_adornment": unchanged, } def make_rst(self): (import_name,) = self.arguments parser = import_object(import_name or "__undefined__") prog = self.options.get("prog") original_prog = None if prog: original_prog = parser.prog parser.prog = prog start_command = self.options.get("start_command", "").split(" ") strip_usage = "strip_usage" in self.options usage_codeblock = "no_usage_codeblock" not in self.options maxdepth = int(self.options.get("maxdepth", 0)) groups = "groups" in self.options options_title = self.options.get("options_title") options_adornment = self.options.get("options_adornment", "~") if start_command[0] == "": start_command.pop(0) if start_command: def get_start_cmd_parser( p: argparse.ArgumentParser, ) -> argparse.ArgumentParser: looking_for = start_command.pop(0) action = get_subparser_action(p) if not action: raise ValueError("No actions for command " + looking_for) # noinspection PyUnresolvedReferences subp = action.choices[looking_for] if start_command: return get_start_cmd_parser(subp) return subp parser = get_start_cmd_parser(parser) if prog and parser.prog.startswith(original_prog): parser.prog = parser.prog.replace(original_prog, prog, 1) for commands, options, group_or_parser in scan_programs( parser, maxdepth=maxdepth, groups=groups ): if isinstance(group_or_parser, argparse._ArgumentGroup): title = group_or_parser.title description = group_or_parser.description usage = None epilog = None is_subgroup = True is_program = False else: cmd_parser = group_or_parser if prog and cmd_parser.prog.startswith(original_prog): cmd_parser.prog = cmd_parser.prog.replace(original_prog, prog, 1) title = cmd_parser.prog.rstrip() description = cmd_parser.description usage = cmd_parser.format_usage() epilog = cmd_parser.epilog is_subgroup = bool(commands) is_program = True if "notitle" in self.options: title = None if "nodesc" in self.options: description = None yield from render_rst( title, options, is_program=is_program, is_subgroup=is_subgroup, description=description, usage=usage, usage_strip=strip_usage, usage_codeblock=usage_codeblock, epilog=epilog, options_title=options_title, options_adornment=options_adornment, ) def run(self) -> list: node = nodes.section() node.document = self.state.document result = StringList() for line in self.make_rst(): result.append(line, "") nested_parse_with_titles(self.state, result, node) return node.children def render_rst( title: Optional[str], options: List[Tuple[List[str], str]], is_program: bool, is_subgroup: bool, description: Optional[str], usage: Optional[str], usage_strip: bool, usage_codeblock: bool, epilog: Optional[str], options_title: Optional[str], options_adornment: str, ): if usage_strip: assert usage is not None to_strip = (title or "").rsplit(" ", 1)[0] len_to_strip = len(to_strip) - 4 usage_lines: List[str] = usage.splitlines() usage = os.linesep.join( [ usage_lines[0].replace(to_strip, "..."), ] + [line[len_to_strip:] for line in usage_lines[1:]] ) yield "" if title is not None: if is_program: yield ".. program:: " + title yield "" yield title yield ("!" if is_subgroup else "?") * len(title) yield "" yield from (description or "").splitlines() yield "" if usage is None: pass elif usage_codeblock: yield ".. code-block:: console" yield "" for usage_line in usage.splitlines(): yield " " + usage_line else: yield usage yield "" if options_title: yield options_title yield options_adornment * len(options_title) for option_strings, help_ in options: yield ".. option:: {0}".format(", ".join(option_strings)) yield "" yield " " + help_.replace("\n", " \n") yield "" for line in (epilog or "").splitlines(): yield line or "" def setup(app: sphinx.application.Sphinx) -> Dict[str, Any]: app.add_directive("autoprogramm", AutoprogrammDirective) return { "version": "0.2a0", "parallel_read_safe": True, "parallel_write_safe": True, } aio-libs-aiosmtpd-b634d9b/aiosmtpd/docs/_static/000077500000000000000000000000001462210711200216405ustar00rootroot00000000000000aio-libs-aiosmtpd-b634d9b/aiosmtpd/docs/_static/aiosmtpd.css000066400000000000000000000023331462210711200241730ustar00rootroot00000000000000.boldital { font-weight: bold; font-style: italic; } .parthead { font-weight: bold; border-bottom-width: 1px; border-bottom-style: solid; padding-right: 10em; margin-top: 1em; } div.body h3 { margin-left: -5px; font-size: 125%; } div.body h4 { margin-left: 0; font-weight: bold; background-color: transparent !important; border-bottom-style: none !important; } .pre, pre, code { font-family: "Fira Code",Menlo,Consolas,"Ubuntu Mono",Inconsolata,"Bitstream Vera Sans Mono","lucida console","Courier New",monospace; font-size: 100%; } div.highlight pre { font-size: 90%; } /* code.descname { font-size: 100% !important; } */ dl.class > dt { font-size: 100% !important; } dl.attribute > dt { font-size: 100% !important; } dl.field-list { margin-bottom: 0.25em !important; } dl.field-list ul.simple { margin-bottom: 0.25em !important; } .sig-paren { font-size: 100% !important; } div#news-for-aiosmtpd h3 { background-color: transparent !important; border-bottom-style: none !important; } em.sig-param > span.default_value, em.sig-param > span.o { color: rgb(128, 128, 128); } a { text-decoration: underline dotted !important; } aio-libs-aiosmtpd-b634d9b/aiosmtpd/docs/auth.rst000066400000000000000000000163621462210711200217150ustar00rootroot00000000000000.. _auth: ======================= Authentication System ======================= ``aiosmtpd`` provides a framework for SMTP Authentication that fully complies with :rfc:`4954`. Activating Authentication ========================= ``aiosmtpd`` authentication is always activated, but attempts to authenticate will always be rejected unless the :attr:`authenticator` parameter of :class:`~aiosmtpd.smtp.SMTP` is set to a valid & working :ref:`authcallback`. AUTH API ======== The ``aiosmtpd`` Authentication Framework comprises several components, who are collectivelly called the "AUTH API". .. _authhandler: AUTH Handler Hook ----------------- .. py:method:: handle_AUTH(server: SMTP, session: Session, envelope: Envelope, args) :async: Called to handle ``AUTH`` command, if you need custom AUTH behavior. Most of the time, you will NOT *need* to implement this hook; :ref:`authmech` are provided to override/implement selective SMTP AUTH mechanisms (see below). If you do implement this hook: You *MUST* comply with :rfc:`4954`. ``args`` will contain the list of words following the ``AUTH`` command. You will have to leverage the :meth:`SMTP.push` and :meth:`SMTP.challenge_auth` methods to interact with the clients. You will need to modify the :attr:`session.auth_data ` and :attr:`session.authenticated ` attributes. You may ignore the ``envelope``. .. _authmech: AUTH Mechanism Hooks -------------------- Separately from :ref:`authhandler`, ``aiosmtpd`` also implement support for "AUTH Mechanism Hooks". These **async** hooks will implement the logic for SMTP Authentication Mechanisms. Every AUTH Mechanism Hook is named ``auth_MECHANISM`` where ``MECHANISM`` is the all-uppercase name of the mechanism that the hook will implement. (Mechanism is the word following the ``AUTH`` command sent by client.) .. important:: If ``MECHANISM`` has a dash within its name, use **double-underscore** to represent the dash. For example, to implement a ``MECH-WITH-DASHES`` mechanism, name the AUTH hook as ``auth_MECH__WITH__DASHES``. Single underscores will not be modified. So a hook named ``auth_MECH_WITH_UNDERSCORE`` will implement the ``MECH_WITH_UNDERSCORE`` mechanism. (If in the future a SASL mechanism with double underscores in its name gets defined, this name-mangling mechanism will be revisited. That is very unlikely to happen, though.) Alternatively, you can also use the :func:`~aiosmtpd.smtp.auth_mechanism` decorator, which you can import from the :mod:`aiosmtpd.smtp` module. The SMTP class provides built-in AUTH hooks for the ``LOGIN`` and ``PLAIN`` mechanisms, named ``auth_LOGIN`` and ``auth_PLAIN``, respectively. If the handler class implements ``auth_LOGIN`` and/or ``auth_PLAIN``, then the methods of the handler instance will override the built-in methods. .. py:method:: auth_MECHANISM(server: SMTP, args: List[str]) -> aiosmtpd.smtp.AuthResult :async: :param server: The instance of the :class:`SMTP` class invoking the AUTH Mechanism hook :param args: A list of string split from the characters following the ``AUTH`` command. ``args[0]`` is usually equal to ``MECHANISM`` (unless the :func:`~aiosmtpd.smtp.auth_mechanism` decorator has been used). The AUTH hook MUST perform the actual validation of AUTH credentials. In the built-in AUTH hooks, this is done by invoking the function specified by the :attr:`authenticator` initialization argument. AUTH Mechanism Hooks in handlers are NOT required to do the same, and MAY implement their own authenticator system. The AUTH Mechanism Hook MUST return an instance of :class:`AuthResult` containing the result of the Authentication process. .. important:: Defining *additional* AUTH hooks in your handler will NOT disable the built-in LOGIN and PLAIN hooks; if you do not want to offer the LOGIN and PLAIN mechanisms, specify them in the :attr:`auth_exclude_mechanism` parameter of the :class:`SMTP` class. .. _authcallback: Authenticator Callback ---------------------- .. py:function:: Authenticator(server, session, envelope, mechanism, auth_data) -> AuthResult :param server: The :class:`~aiosmtpd.smtp.SMTP` instance that invoked the authenticator :param session: A :class:`Session` instance containing session data *so far* :param envelope: An :class:`Envelope` instance containing transaction data *so far* :param mechanism: name of the AUTH Mechanism chosen by the client :type mechanism: str :param auth_data: A data structure containing authentication data gathered by the AUTH Mechanism :return: Result of authentication :rtype: AuthResult This function would be invoked during or at the end of an Authentication Process by AUTH Mechanisms. Based on ``mechanism`` and ``auth_data``, this function should return a decision on whether Authentication has been successful or not. This function SHOULD NOT modify the attributes of ``session`` and ``envelope``. The type and contents of the ``auth_data`` parameter is wholly at the discretion of the calling AUTH Mechanism. For the built-in ``LOGIN`` and ``PLAIN`` Mechanisms, the type of data will be :class:`aiosmtpd.smtp.LoginPassword` .. versionadded:: 1.3 AuthResult API -------------- .. class:: AuthResult(*, success, handled, message, auth_data) .. py:attribute:: success :type: bool This attribute indicates whether Authentication is successful or not. .. py:attribute:: handled :type: bool :value: True This attribute indicates whether Authenticator Decision process (e.g., sending of status codes) have been carried out by Authenticator or not. If set to ``True``, :meth:`smtp_AUTH` will not perform additional processing and will simply exits. Applicable only if ``success=False`` .. py:attribute:: message :type: Optional[str] :value: None The message to send back to client, regardless of success status. This message will be sent as-is; as such, it MUST be prefixed with the correct SMTP Status Code and optionally, SMTP Extended Status Code. If not given (set/kept to ``None``), :meth:`smtp_AUTH` will use standard SMTP Status Code & Message. .. py:attribute:: auth_data :type: Any :value: None Optional free-form authentication data. This will be saved by :meth:`smtp_AUTH` into the ``session.auth_data`` attribute. If ``auth_data`` has the attribute ``login``, then :meth:`smtp_AUTH` will save ``auth_data.login`` into ``session.login_data`` as well. This is to cater for possible backward-compatibility requirements, where legacy handlers might be looking for ``session.login_data`` for some reasons. Security Considerations ======================= We have taken steps to prevent leakage of sensitive information (i.e., password) through logging by overriding the ``__repr__`` and ``__str__`` methods of the :class:`AuthResult` and :class:`LoginPassword` classes. However, we have no control on the (logging) output of your custom hooks. Please be very careful emitting/recording AUTH information to prevent leakage. Example ======= An example is provided in ``examples/authenticated_relayer``. aio-libs-aiosmtpd-b634d9b/aiosmtpd/docs/cli.rst000066400000000000000000000023451462210711200215170ustar00rootroot00000000000000.. _cli: ==================== Command line usage ==================== ``aiosmtpd`` provides a main entry point which can be used to run the server on the command line. There are two ways to run the server, depending on how the package has been installed. You can run the server by passing it to Python directly:: $ python3 -m aiosmtpd -n This starts a server on localhost, port 8025 without setting the uid to 'nobody' (i.e. because you aren't running it as root). Once you've done that, you can connect directly to the server using your favorite command line protocol tool. Type the ``QUIT`` command at the server once you see the greeting:: % telnet localhost 8025 Trying 127.0.0.1... Connected to localhost. Escape character is '^]'. 220 subdivisions Python SMTP ... QUIT 221 Bye Connection closed by foreign host. Of course, you could use Python's :mod:`smtplib` module, or any other SMTP client to talk to the server. Hit control-C at the server to stop it. The entry point may also be installed as the ``aiosmtpd`` command, so this is equivalent to the above ``python3`` invocation:: $ aiosmtpd -n Options ======= Optional arguments are described in the :ref:`man page ` document. aio-libs-aiosmtpd-b634d9b/aiosmtpd/docs/concepts.rst000066400000000000000000000132521462210711200225650ustar00rootroot00000000000000========== Concepts ========== There are two general ways you can run the SMTP server, via the :ref:`command line ` or :ref:`programmatically `. There are several dimensions in which you can extend the basic functionality of the SMTP server. You can implement an *event handler* which uses well defined :ref:`handler hooks ` that are called during the various steps in the SMTP dialog. If such a hook is implemented, it assumes responsibility for the status messages returned to the client. You can also :ref:`subclass ` the core ``SMTP`` class to implement new commands, or change the semantics of existing commands. For example, if you wanted to print the received message on the console, you could implement a handler that hooks into the ``DATA`` command. The contents of the message will be available on one of the hook's arguments, and your handler could print this content to stdout. On the other hand, if you wanted to implement an SMTP-like server that adds a new command called ``PING``, you would do this by subclassing ``SMTP``, adding a method that implements whatever semantics for ``PING`` that you want. .. _sessions_and_envelopes: Sessions and envelopes ====================== Two classes are used during the SMTP dialog with clients. Instances of these are passed to the handler hooks. .. note:: Handler Hooks MAY add new attributes to these classes for inter-hook coordination. Session ------- The session represents the state built up during a client's socket connection to the server. Each time a client connects to the server, a new session object is created. .. class:: Session(loop) :param loop: asyncio event loop currently running :class:`SMTP`. .. attribute:: peer Defaulting to None, this attribute will contain the transport's socket's |peername|_ value. .. attribute:: ssl Defaulting to None, this attribute will contain some extra information, as a dictionary, from the ``asyncio.sslproto.SSLProtocol`` instance. This dictionary provides additional information about the connection. It contains implementation-specific information so its contents may change, but it should roughly correspond to the information available through :meth:`asyncio.BaseTransport.get_extra_info` .. attribute:: host_name Defaulting to None, this attribute will contain the host name argument as seen in the ``HELO`` or ``EHLO`` (or for :ref:`LMTP `, the ``LHLO``) command. .. attribute:: extended_smtp Defaulting to False, this flag will be True when the ``EHLO`` greeting was seen, indicating :rfc:`ESMTP <1869>`. .. attribute:: loop This is the asyncio event loop instance. :ref:`hooks` can utilize this if needed, for instance invoking :meth:`~asyncio.loop.call_later` to set some timers. .. attribute:: login_data Contains the login information gathered during the ``AUTH`` procedure. If it contains ``None``, that means authentication has not taken place or has failed. .. warning:: This is the "legacy" login_data, populated only if :attr:`auth_callback` parameter is set. .. deprecated:: 1.3 This attribute **will be removed in version 2.0**. .. py:attribute:: auth_data Contains the authentication data returned by the :attr:`authenticator` callback. .. py:attribute:: authenticated :type: Optional[bool] A tri-state flag indicating status of authentication: * ``None`` := Authentication has not been performed * ``False`` := Authentication has been performed, but failed * ``True`` := Authentication has been performed, and succeeded Envelope -------- The envelope represents state built up during the client's SMTP dialog. Each time the protocol state is reset, a new envelope is created. E.g. when the SMTP ``RSET`` command is sent, the state is reset and a new envelope is created. A new envelope is also created after the ``DATA`` command is completed, or in certain error conditions as mandated by :rfc:`5321`. .. class:: Envelope .. attribute:: mail_from :type: str Defaulting to None, this attribute holds the email address given in the ``MAIL FROM`` command. .. attribute:: mail_options :type: List[str] Defaulting to None, this attribute contains a list of any ESMTP mail options provided by the client, such as those passed in by :meth:`smtplib.SMTP.sendmail` .. attribute:: content :type: AnyStr Defaulting to None, this attribute will contain the contents of the message as provided by the ``DATA`` command. If the ``decode_data`` parameter to the ``SMTP`` constructor was True, then this attribute will contain the UTF-8 decoded string, otherwise it will contain the raw bytes. .. attribute:: original_content :type: bytes Defaulting to None, this attribute will contain the contents of the message as provided by the ``DATA`` command. Unlike the :attr:`content` attribute, this attribute will always contain the raw bytes. .. attribute:: rcpt_tos :type: List[str] Defaulting to the empty list, this attribute will contain a list of the email addresses provided in the ``RCPT TO`` commands. .. attribute:: rcpt_options :type: List[str] Defaulting to the empty list, this attribute will contain the list of any recipient options provided by the client, such as those passed in by :meth:`smtplib.SMTP.sendmail` .. _peername: https://docs.python.org/3/library/asyncio-protocol.html?highlight=peername#asyncio.BaseTransport.get_extra_info .. |peername| replace:: ``peername`` aio-libs-aiosmtpd-b634d9b/aiosmtpd/docs/conf.py000066400000000000000000000236301462210711200215150ustar00rootroot00000000000000# Copyright 2014-2021 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 # aiosmtpd documentation build configuration file, originally created by # sphinx-quickstart on Fri Oct 16 12:18:52 2015. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import datetime import sys from pathlib import Path from typing import Dict import sphinx_rtd_theme # noqa: F401 # pytype: disable=import-error try: # noinspection PyPackageRequirements from colorama import init as colorama_init # pytype: disable=import-error except ImportError: pass else: colorama_init() # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. repo_root = Path(".").expanduser().absolute().parent.parent def syspath_insert(pth: Path): print(f"Inserting {pth}") sys.path.insert(0, str(pth)) syspath_insert(repo_root) syspath_insert(repo_root / "aiosmtpd" / "docs" / "_exts") syspath_insert(repo_root / "aiosmtpd") # region -- General configuration ------------------------------------------------ # autoprogramm needs Sphinx>=1.2.2 # :classmethod: needs Sphinx>=2.1 # :noindex: needs Sphinx>=3.2 needs_sphinx = "3.2" # If you change the above, don't forget to change the version limit in # `RTD-requirements.txt` # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ "sphinx.ext.intersphinx", "sphinx.ext.autodoc", "sphinx.ext.doctest", "sphinx_autofixture", "autoprogramm", "sphinx_rtd_theme" ] # IMPORTANT: If you edit the above list, check if you need to edit the deps list # in `RTD-requirements.txt` # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] # The suffix of source filenames. source_suffix = ".rst" # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = "index" # General information about the project. author = "The aiosmtpd Developers" project = "aiosmtpd" # noinspection PyShadowingBuiltins copyright = f"2015-{datetime.datetime.now().year}, {author}" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # from aiosmtpd import __version__ # noqa: E402 # "noqa: E402" used to silence flake8 protest release = __version__ version = __version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. language = "en" # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' today_fmt = "%Y-%m-%d" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ["_exts/*", ".git*", "*.py", "*.txt", "Makefile"] # The reST default role (used for this markup: `text`) to use for all # documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. modindex_common_prefix = ["aiosmtpd."] # If true, keep warnings as "system message" paragraphs in the built documents. # keep_warnings = False rst_prolog = f""" .. role:: boldital :class: boldital .. role:: part :class: parthead .. |author| replace:: {author} .. |copyright| replace:: {copyright} """ # endregion # region -- Extensions configuration --------------------------------------------- intersphinx_mapping = { "python": ("https://docs.python.org/3", None), } doctest_global_setup = """ import sys in_win32 = sys.platform == "win32" in_cygwin = sys.platform == "cygwin" """ # endregion # region -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = "default" html_theme = "sphinx_rtd_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["_static"] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. # html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = "aiosmtpddoc" # endregion # region -- Options for LaTeX output --------------------------------------------- latex_elements: Dict[str, str] = { # The paper size ('letterpaper' or 'a4paper'). # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # 'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ("index", "aiosmtpd.tex", "aiosmtpd Documentation", "aiosmtpd hackers", "manual"), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # endregion # region -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ("manpage", "aiosmtpd", "asyncio based SMTP server", [author], 1), ] # If true, show URL addresses after external links. # man_show_urls = False # endregion # region -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ( "index", "aiosmtpd", "aiosmtpd Documentation", "aiosmtpd hackers", "aiosmtpd", "One line description of project.", "Miscellaneous", ), ] # Documents to append as an appendix to all manuals. # texinfo_appendices = [] # If false, no module index is generated. # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. # texinfo_no_detailmenu = False # endregion def setup(app): # noqa: ANN001 app.add_css_file("aiosmtpd.css") aio-libs-aiosmtpd-b634d9b/aiosmtpd/docs/controller.rst000066400000000000000000000606411462210711200231360ustar00rootroot00000000000000.. _controller: ==================== Programmatic usage ==================== If you already have an `asyncio event loop`_, you can `create a server`_ using the :class:`~aiosmtpd.smtp.SMTP` class as the *protocol factory*, and then run the loop forever. If you need to pass arguments to the ``SMTP`` constructor, use :func:`functools.partial` or write your own wrapper function. You might also want to add a signal handler so that the loop can be stopped, say when you hit control-C. It's probably easier to use a *threaded controller* which runs the SMTP server in a separate thread with a dedicated event loop. The controller provides useful and reliable ``start`` and ``stop`` semantics so that the foreground thread doesn't block. Among other use cases, this makes it convenient to spin up an SMTP server for unit tests. In both cases, you need to pass a :ref:`handler ` to the ``SMTP`` constructor. Handlers respond to events that you care about during the SMTP dialog. .. important:: Consider running the controller in a separate Python process (e.g., using the :mod:`multiprocessing` module) if you don't want your main Python process to be blocked when aiosmtpd is handling extra-large emails. Using the controller ==================== .. _tcpserver: TCP-based Server ---------------- The :class:`~aiosmtpd.controller.Controller` class creates a TCP-based server, listening on an Internet endpoint (i.e., ``ip_address:port`` pair). Say you want to receive email for ``example.com`` and print incoming mail data to the console. Start by implementing a handler as follows: .. doctest:: >>> import asyncio >>> class ExampleHandler: ... async def handle_RCPT(self, server, session, envelope, address, rcpt_options): ... if not address.endswith('@example.com'): ... return '550 not relaying to that domain' ... envelope.rcpt_tos.append(address) ... return '250 OK' ... ... async def handle_DATA(self, server, session, envelope): ... print('Message from %s' % envelope.mail_from) ... print('Message for %s' % envelope.rcpt_tos) ... print('Message data:\n') ... for ln in envelope.content.decode('utf8', errors='replace').splitlines(): ... print(f'> {ln}'.strip()) ... print() ... print('End of message') ... return '250 Message accepted for delivery' Pass an instance of your ``ExampleHandler`` class to the ``Controller``, and then start it: .. doctest:: >>> from aiosmtpd.controller import Controller >>> controller = Controller(ExampleHandler()) >>> controller.start() The SMTP thread might run into errors during its setup phase; to catch this the main thread will timeout when waiting for the SMTP server to become ready. By default the timeout is set to 1 second but can be changed either by using the :envvar:`AIOSMTPD_CONTROLLER_TIMEOUT` environment variable or by passing a different ``ready_timeout`` duration to the Controller's constructor. Connect to the server and send a message, which then gets printed by ``ExampleHandler``: .. doctest:: >>> from smtplib import SMTP as Client >>> client = Client(controller.hostname, controller.port) >>> r = client.sendmail('a@example.com', ['b@example.com'], """\ ... From: Anne Person ... To: Bart Person ... Subject: A test ... Message-ID: ... ... Hi Bart, this is Anne. ... """) Message from a@example.com Message for ['b@example.com'] Message data: > From: Anne Person > To: Bart Person > Subject: A test > Message-ID: > > Hi Bart, this is Anne. End of message You'll notice that at the end of the ``DATA`` command, your handler's :meth:`handle_DATA` method was called. The sender, recipients, and message contents were taken from the envelope, and printed at the console. The handler methods also returns a successful status message. The ``ExampleHandler`` class also implements a :meth:`handle_RCPT` method. This gets called after the ``RCPT TO`` command is sanity checked. The method ensures that all recipients are local to the ``@example.com`` domain, returning an error status if not. It is the handler's responsibility to add valid recipients to the ``rcpt_tos`` attribute of the envelope and to return a successful status. Thus, if we try to send a message to a recipient not inside ``example.com``, it is rejected: .. doctest:: >>> client.sendmail('aperson@example.com', ['cperson@example.net'], """\ ... From: Anne Person ... To: Chris Person ... Subject: Another test ... Message-ID: ... ... Hi Chris, this is Anne. ... """) Traceback (most recent call last): ... smtplib.SMTPRecipientsRefused: {'cperson@example.net': (550, b'not relaying to that domain')} When you're done with the SMTP server, stop it via the controller. .. doctest:: >>> controller.stop() The server is guaranteed to be stopped. .. doctest:: >>> client.connect(controller.hostname, controller.port) Traceback (most recent call last): ... ConnectionRefusedError: ... There are a number of built-in :ref:`handler classes ` that you can use to do some common tasks, and it's easy to write your own handler. For a full overview of the methods that handler classes may implement, see the section on :ref:`handler hooks `. Unix Socket-based Server ------------------------ The :class:`~aiosmtpd.controller.UnixSocketController` class creates a server listening to a Unix Socket (i.e., a special file that can act as a 'pipe' for interprocess communication). Usage is identical with the example described in the :ref:`tcpserver` section above, with some differences: **Rather than specifying a hostname:port to listen on, you specify the Socket's filepath:** .. doctest:: unix_socket :skipif: in_win32 or in_cygwin >>> from aiosmtpd.controller import UnixSocketController >>> from aiosmtpd.handlers import Sink >>> controller = UnixSocketController(Sink(), unix_socket="smtp_socket~") >>> controller.start() .. warning:: Do not exceed the Operating System limit for the length of the socket file path. On Linux, the limit is 108 characters. On BSD OSes, it's 104 characters. **Rather than connecting to IP:port, you connect to the Socket file.** Python's :class:`smtplib.SMTP` class sadly cannot connect to a Unix Socket, so we need to handle it on our own here: .. doctest:: unix_socket :skipif: in_win32 or in_cygwin >>> import socket >>> sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) >>> sock.connect("smtp_socket~") >>> sock.recv(1024) b'220 ...' Try sending something, don't forget to end with ``"\r\n"``: .. doctest:: unix_socket :skipif: in_win32 or in_cygwin >>> sock.send(b"HELO example.org\r\n") 18 >>> sock.recv(1024) b'250 ...' And close everything when done: .. doctest:: unix_socket :skipif: in_win32 or in_cygwin >>> sock.send(b"QUIT\r\n") 6 >>> sock.recv(1024) b'221 Bye...' >>> sock.close() >>> controller.stop() .. _unthreaded: Unthreaded Controllers ---------------------- In addition to the **threaded** controllers described above, ``aiosmtpd`` also provides the following **UNthreaded** controllers: * :class:`UnthreadedController` -- the unthreaded version of :class:`Controller` * :class:`UnixSocketUnthreadedController` -- the unthreaded version of :class:`UnixSocketController` These classes are considered *advanced* classes, because you'll have to manage the event loop yourself. For example, to start an unthreaded controller, you'll have to do something similar to this: .. doctest:: unthreaded >>> import asyncio >>> loop = asyncio.new_event_loop() >>> asyncio.set_event_loop(loop) >>> from aiosmtpd.controller import UnthreadedController >>> from aiosmtpd.handlers import Sink >>> controller = UnthreadedController(Sink(), loop=loop) >>> controller.begin() Note that unlike the threaded counterparts, the method used to start the controller is named ``begin()``. And unlike the method in the threaded version, ``begin()`` does NOT start the asyncio event loop; you'll have to start it yourself. For the purposes of trying this, let's create a thread and have it run the asyncio event loop; we'll also schedule an autostop so it won't hang: .. doctest:: unthreaded >>> def runner(): ... # Set the delay to something long enough so you have time ... # to do some testing ... loop.call_later(3.0, loop.stop) ... loop.run_forever() >>> import threading >>> thread = threading.Thread(target=runner) >>> thread.daemon = True >>> thread.start() >>> import time >>> time.sleep(0.1) # Allow the loop to begin At this point in time, the server would be listening: .. doctest:: unthreaded >>> from smtplib import SMTP as Client >>> client = Client(controller.hostname, controller.port) >>> client.helo("example.com") (250, ...) >>> client.quit() (221, b'Bye') The complex thing will be to end it; that is why we're marking these classes as "advanced". For our example here, since we have created an "autostop loop", all we have to do is wait for the runner thread to end: .. doctest:: unthreaded >>> thread.join() >>> loop.is_running() False We still need to do some cleanup to fully release the bound port. Since the loop has ended, we can simply call the :meth:`end` method: .. doctest:: unthreaded >>> controller.end() If you want to end the controller *but* keep the loop running, you'll have to do it like this:: loop.call_soon_threadsafe(controller.end) # If you want to ensure that controller has stopped, you can wait() here: controller.ended.wait(10.0) # Optional You must remember to cleanup the canceled tasks yourself. We have provided a convenience method, :meth:`~aiosmtpd.controller.BaseController.cancel_tasks`:: # Will also stop the loop! loop.call_soon_threadsafe(controller.cancel_tasks) (If you invoke ``cancel_tasks`` with the parameter ``stop_loop=False``, then loop will NOT be stopped. That is a much too-advanced topic and we will not discuss it further in this documentation.) The Unix Socket variant, ``UnixSocketUnthreadedController``, works in the same way. The difference is only in how to access the server, i.e., through a Unix Socket instead of TCP/IP. We'll leave out the details for you to figure it out yourself. .. _enablesmtputf8: Enabling SMTPUTF8 ================= It's very common to want to enable the ``SMTPUTF8`` ESMTP option, therefore this is the default for the ``Controller`` constructor. For backward compatibility reasons, this is *not* the default for the ``SMTP`` class though. If you want to disable this in the ``Controller``, you can pass this argument into the constructor: .. doctest:: >>> from aiosmtpd.handlers import Sink >>> controller = Controller(Sink(), enable_SMTPUTF8=False) >>> controller.start() >>> >>> client = Client(controller.hostname, controller.port) >>> code, message = client.ehlo('me') >>> code 250 The EHLO response does not include the ``SMTPUTF8`` ESMTP option. .. doctest:: >>> lines = message.decode('utf-8').splitlines() >>> # Don't print the server host name line, since that's variable. >>> for line in lines[1:]: ... print(line) SIZE 33554432 8BITMIME HELP Stop the controller if we're done experimenting: .. doctest:: >>> controller.stop() Controller API ============== .. py:module:: aiosmtpd.controller .. py:data:: DEFAULT_READY_TIMEOUT :type: float :value: 5.0 .. py:function:: get_localhost() :return: The numeric address of the loopback interface; ``"::1"`` if IPv6 is supported, ``"127.0.0.1"`` if IPv6 is not supported. :rtype: Literal["::1", "127.0.0.1"] .. class:: IP6_IS .. py:attribute:: NO :type: set[int] Contains constants from :mod:`errno` that will be raised by :meth:`socket.socket.bind` if IPv6 is NOT available on the system. .. py:attribute:: YES :type: set[int] Contains constants from :mod:`errno` that will be raised by :meth:`socket.socket.bind` if IPv6 IS available on the system. .. note:: You can customize the contents of these attributes by adding/removing from them, in case the behavior does not align with your expectations *and* you cannot wait for a patch to be merged. .. class:: BaseController(\ handler, \ loop=None, \ *, \ ssl_context=None, \ server_hostname=None, \ server_kwargs=None, \ **SMTP_parameters, \ ) This **Abstract Base Class** defines parameters, attributes, and methods common between all concrete controller classes. :param handler: Handler object :param loop: The asyncio event loop in which the server will run. If not given, :func:`asyncio.new_event_loop` will be called to create the event loop. :type loop: asyncio.AbstractEventLoop :param ssl_context: SSL Context to wrap the socket in. Will be passed-through to :meth:`~asyncio.loop.create_server` method :type ssl_context: ssl.SSLContext :param server_hostname: Server's hostname, will be passed-through as ``hostname`` parameter of :class:`~aiosmtpd.smtp.SMTP` :type server_hostname: Optional[str] :param server_kwargs: *(DEPRECATED)* A dict that will be passed-through as keyword arguments of :class:`~aiosmtpd.smtp.SMTP`. This is DEPRECATED; please use ``**SMTP_parameters`` instead. :type server_kwargs: dict :param SMTP_parameters: Optional keyword arguments that will be passed-through as keyword arguments of :class:`~aiosmtpd.smtp.SMTP` | | :part:`Attributes` .. attribute:: handler :noindex: The instance of the event *handler* passed to the constructor. .. attribute:: loop :noindex: The event loop being used. .. attribute:: server This is the server instance returned by :meth:`_create_server` after the server has started. You can retrieve the :class:`~socket.socket` objects the server is listening on from the ``server.sockets`` attribute. .. py:attribute:: smtpd :type: aiosmtpd.smtp.SMTP The server instance (of class SMTP) created by :meth:`factory` after the controller is started. | | :part:`Methods` .. method:: factory() -> aiosmtpd.smtp.SMTP You can override this method to create custom instances of the :class:`~aiosmtpd.smtp.SMTP` class being controlled. By default, this creates an ``SMTP`` instance, passing in your handler and setting flags from the :attr:`**SMTP_Parameters` parameter. Examples of why you would want to override this method include creating an :ref:`LMTP ` server instance instead of the standard ``SMTP`` server. .. py:method:: cancel_tasks(stop_loop=True) :param stop_loop: If ``True``, stops the loop before canceling tasks. :type stop_loop: bool This is a convenience class that will stop the loop & cancel all asyncio tasks for you. .. class:: Controller(\ handler, \ hostname=None, \ port=8025, \ loop=None, \ *, \ ready_timeout=DEFAULT_READY_TIMEOUT, \ ssl_context=None, \ server_hostname=None, \ server_kwargs=None, \ **SMTP_parameters) A concrete subclass of :class:`BaseController` that provides a threaded, INET listener. :param hostname: Will be given to the event loop's :meth:`~asyncio.loop.create_server` method as the ``host`` parameter, with a slight processing (see below) :type hostname: Optional[str] :param port: Will be passed-through to :meth:`~asyncio.loop.create_server` method :type port: int :param ready_timeout: How long to wait until server starts. The :envvar:`AIOSMTPD_CONTROLLER_TIMEOUT` takes precedence over this parameter. See :attr:`ready_timeout` for more information. :type ready_timeout: float Other parameters are defined in the :class:`BaseController` class. The ``hostname`` parameter will be passed to the event loop's :meth:`~asyncio.loop.create_server` method as the ``host`` parameter, :boldital:`except` ``None`` (default) will be translated to ``::1``. * To bind `dual-stack`_ locally, use ``localhost``. * To bind `dual-stack`_ on all interfaces, use ``""`` (empty string). .. important:: The ``hostname`` parameter does NOT get passed through to the SMTP instance; if you want to give the SMTP instance a custom hostname (e.g., for use in HELO/EHLO greeting), you must pass it through the :attr:`server_hostname` parameter. Explicitly defined SMTP keyword arguments will override keyword arguments of the same names defined in the (deprecated) ``server_kwargs`` argument. .. doctest:: controller_kwargs >>> from aiosmtpd.controller import Controller >>> from aiosmtpd.handlers import Sink >>> controller = Controller( ... Sink(), timeout=200, server_kwargs=dict(timeout=400) ... ) >>> controller.SMTP_kwargs["timeout"] 200 Finally, setting the ``ssl_context`` parameter will switch the protocol to ``SMTPS`` mode, implying unconditional encryption of the connection, and preventing the use of the ``STARTTLS`` mechanism. Actual behavior depends on the subclass's implementation. | | :part:`Attributes` In addition to those provided by :class:`BaseController`, this class provides the following: .. attribute:: hostname: str port: int The values of the *hostname* and *port* arguments. .. attribute:: ready_timeout :type: float The timeout value used to wait for the server to start. This will either be the value of the :envvar:`AIOSMTPD_CONTROLLER_TIMEOUT` environment variable (converted to float), or the :attr:`ready_timeout` parameter. Setting this to a high value will NOT slow down controller startup, because it's a timeout limit rather than a sleep delay. However, you may want to reduce the default value to something 'just enough' so you don't have to wait too long for an exception, if problem arises. If this timeout is breached, a :class:`TimeoutError` exception will be raised. | | :part:`Methods` In addition to those provided by :class:`BaseController`, this class provides the following: .. method:: start() -> None :raises TimeoutError: if the server takes too long to get ready, exceeding the ``ready_timeout`` parameter. :raises RuntimeError: if an unrecognized & unhandled error happened, resulting in non-creation of a server object (:attr:`smtpd` remains ``None``) Start the server in the subthread. The subthread is always a :class:`daemon thread ` (i.e., we always set ``thread.daemon=True``). Exceptions can be raised if the server does not start within :attr:`ready_timeout` seconds, or if any other exception occurs in :meth:`~BaseController.factory` while creating the server. .. important:: If :meth:`start` raises an Exception, cleanup is not performed automatically, to support deep inspection post-exception (if you wish to do so.) Cleanup must still be performed manually by calling :meth:`stop` For example:: # Assume SomeController is a concrete subclass of BaseThreadedController controller = SomeController(handler) try: controller.start() except ...: ... exception handling and/or inspection ... finally: controller.stop() .. method:: stop(no_assert=False) -> None :param no_assert: If ``True``, skip the assertion step so an ``AssertionError`` will not be raised if thread had not been started successfully. :type no_assert: bool :raises AssertionError: if this method is called before :meth:`start` is called successfully *AND* ``no_assert=False`` Stop the server and the event loop, and cancel all tasks via :meth:`~BaseController.cancel_tasks`. .. class:: UnixSocketController(\ handler, \ unix_socket, \ loop=None, \ *, \ ready_timeout=DEFAULT_READY_TIMEOUT, \ ssl_context=None, \ server_hostname=None, \ **SMTP_parameters) A concrete subclass of :class:`BaseController` that provides a threaded, Unix Socket listener. :param unix_socket: Socket file, will be passed-through to :meth:`asyncio.loop.create_unix_server` :type unix_socket: Union[str, pathlib.Path] For the other parameters, see the description under :class:`Controller` | | :part:`Attributes` .. py:attribute:: unix_socket :type: str The stringified version of the ``unix_socket`` parameter Other attributes (except ``hostname`` and ``port``) are identical to :class:`Controller` and thus are not repeated nor explained here. | | :part:`Methods` All methods are identical to :class:`Controller` and thus are not repeated nor explained here. .. class:: UnthreadedController(\ handler, \ hostname=None, \ port=8025, \ loop=None, \ *, \ ssl_context=None, \ server_hostname=None, \ server_kwargs=None, \ **SMTP_parameters) .. versionadded:: 1.5.0 A concrete subclass of :class:`BaseController` that provides an UNthreaded, INET listener. Parameters are identical to the :class:`Controller` class. | | :part:`Attributes` Attributes are identical to the :class:`Controller` class with one addition: .. py:attribute:: ended :type: threading.Event An ``Event`` that can be ``.wait()``-ed when ending the controller. Please see the :ref:`Unthreaded Controllers ` section for more info. | | :part:`Methods` In addition to those provided by :class:`BaseController`, this class provides the following: .. py:method:: begin Initializes the server task and insert it into the asyncio event loop. .. note:: The SMTP class itself will only be initialized upon first connection to the server task. .. py:method:: finalize :async: Perform orderly closing of the server listener. If you need to close the server from a non-async function, you can use the :meth:`~UnthreadedController.end` method instead. Upon completion of this method, the :attr:`ended` attribute will be ``set()``. .. py:method:: end This is a convenience method that will asynchronously invoke the :meth:`finalize` method. This method non-async, and thus is callable from non-async functions. .. note:: If the asyncio event loop has been stopped, then it is safe to invoke this method directly. Otherwise, it is recommended to invoke this method using the :meth:`~asyncio.loop.call_soon_threadsafe` method. .. class:: UnixSocketUnthreadedController(\ handler, \ unix_socket, \ loop=None, \ *, \ ssl_context=None, \ server_hostname=None,\ server_kwargs=None, \ **SMTP_parameters) .. versionadded:: 1.5.0 A concrete subclass of :class:`BaseController` that provides an UNthreaded, Unix Socket listener. Parameters are identical to the :class:`UnixSocketController` class. | | :part:`Attributes` Attributes are identical to the :class:`UnixSocketController` class, with the following addition: .. py:attribute:: ended :type: threading.Event An ``Event`` that can be ``.wait()``-ed when ending the controller. Please see the :ref:`Unthreaded Controllers ` section for more info. | | :part:`Methods` Methods are identical to the :class:`UnthreadedController` class. .. _`asyncio event loop`: https://docs.python.org/3/library/asyncio-eventloop.html .. _`create a server`: https://docs.python.org/3/library/asyncio-eventloop.html#asyncio.AbstractEventLoop.create_server .. _dual-stack: https://en.wikipedia.org/wiki/IPv6#Dual-stack_IP_implementation aio-libs-aiosmtpd-b634d9b/aiosmtpd/docs/handlers.rst000066400000000000000000000340521462210711200225500ustar00rootroot00000000000000.. _handlers: ========== Handlers ========== Handlers are classes which can implement :ref:`hook methods ` that get called at various points in the SMTP dialog. Handlers can also be named on the :ref:`command line `, but if the class's constructor takes arguments, you must define a ``@classmethod`` that converts the positional arguments and returns a handler instance: .. py:classmethod:: from_cli(cls, parser, *args) Convert the positional arguments, as strings passed in on the command line, into a handler instance. :boldital:`parser` is the :class:`~argparse.ArgumentParser` instance in use. If this method does not recognize the positional arguments passed in ``parser``, it can *optionally* call :meth:`parser.error ` with the error message. If ``from_cli()`` is not defined, the handler can still be used on the command line, but its constructor cannot accept arguments. .. _hooks: Handler Hooks ============= Handlers can implement hooks that get called during the SMTP dialog, or in exceptional cases. These *handler hooks* are ALL called **asynchronously** (i.e. they are coroutines). All handler hooks are optional and default behaviors are carried out by the :class:`SMTP` class when a hook is omitted, so you only need to implement the ones you care about. When a handler hook is defined, it may have additional responsibilities as described below. Common Arguments ---------------- All handler hooks will be called with at least three arguments: .. py:attribute:: server :type: SMTP The ``SMTP`` server instance .. py:attribute:: session :type: Session The :ref:`session instance ` currently being handled, and .. py:attribute:: envelope :type: Envelope The :ref:`envelope instance ` of the current SMTP Transaction Some handler hooks will receive additional arguments. Supported Hooks --------------- The following hooks are currently supported (in alphabetical order): .. py:method:: handle_AUTH(server, session, envelope, args) :noindex: Called to handle ``AUTH`` command if you need custom AUTH behavior. For more information, please read the documentation for :ref:`auth`. .. py:method:: handle_DATA(server, session, envelope) -> str :async: :return: Response message to be sent to the client Called during ``DATA`` after the entire message (`"SMTP content" `_ as described in RFC 5321) has been received. The content is available in ``envelope.original_content`` as type ``bytes``, normalized according to the transparency rules as defined in :rfc:`RFC 5321, §4.5.2 <5321#section-4.5.2>`. In addition, the ``envelope.content`` attribute will also contain the contents; the type depends on whether :class:`~aiosmtpd.smtp.SMTP` was instantiated with ``decode_data=False`` or ``decode_data=True``. See :attr:`Envelope.content` for more info. .. py:method:: handle_EHLO(server, session, envelope, hostname, responses) -> List[str] :async: :noindex: :param hostname: The host name given by the client in the ``EHLO`` command :type hostname: str :return: Response message to be sent to the client This hook is called during ``EHLO``. This hook may push *additional* ``250-`` responses to the client by doing ``await server.push(status)`` before returning ``"250 HELP"`` as the final response. .. important:: If the handler sets the ``session.host_name`` attribute to a false-y value (or leave it as the default ``None`` value) it will signal later steps that ``HELO`` failed and need to be performed again. This also applies to the :meth:`handle_EHLO` hook below. .. deprecated:: 1.3 Use the :meth:`5-argument form ` instead. Support for the 4-argument form **will be removed in version 2.0** .. py:method:: handle_EHLO(server, session, envelope, hostname, responses) -> List[str] :async: :param hostname: The host name given by the client in the ``EHLO`` command :type hostname: str :param responses: The 'planned' responses to the ``EHLO`` command *including* the last ``250 HELP`` response. :type responses: List[str] :return: List of response messages to be sent to the client Called during ``EHLO``. The hook MUST return a list containing the desired responses. The returned list should end with ``250 HELP`` This hook MUST also set the :attr:``session.host_name`` attribute. .. important:: It is strongly recommended to not change element ``[0]`` of the list (containing the hostname of the SMTP server). .. py:method:: handle_HELO(server, session, envelope, hostname) -> str :async: :param hostname: The host name given by client during ``HELO`` :type hostname: str :return: Response message to be sent to the client This hook is called during ``HELO``. If implemented, this hook MUST also set the :attr:``session.host_name`` attribute before returning ``'250 {}'.format(server.hostname)`` as the status. .. py:method:: handle_MAIL(server, session, envelope, address, mail_options) -> str :async: :param address: The parsed email address given by the client in the ``MAIL FROM`` command :type address: str :param mail_options: Additional ESMTP MAIL options provided by the client :type mail_options: List[str] :return: Response message to be sent to the client Called during ``MAIL FROM``. If implemented, this hook MUST also set the :attr:`envelope.mail_from` attribute and it MAY extend :attr:`envelope.mail_options` (which is always a Python list). .. py:method:: handle_NOOP(server, session, envelope, arg) -> str :async: :param arg: All characters following the ``NOOP`` command :type arg: str :return: Response message to be sent to the client Called during ``NOOP``. .. method:: handle_PROXY(server, session, envelope, proxy_data) :noindex: :param SMTP server: The :class:`SMTP` instance invoking the hook. :param Session session: The Session data *so far* (see Important note below) :param Envelope envelope: The Envelope data *so far* (see Important note below) :param ProxyData proxy_data: The result of parsing the PROXY Header :return: Truthy or Falsey, indicating if the connection may continue or not, respectively Called during PROXY Protocol Handshake. See :ref:`ProxyProtocol` for more information. .. py:method:: handle_QUIT(server, session, envelope) -> str :async: :return: Response message to be sent to the client Called during ``QUIT``. .. py:method:: handle_RCPT(server, session, envelope, address, rcpt_options) -> str :async: :param address: The parsed email address given by the client in the ``RCPT TO`` command :type address: str :param rcpt_options: Additional ESMTP RCPT options provided by the client :type rcpt_options: List[str] :return: Response message to be sent to the client Called during ``RCPT TO``. If implemented, this hook SHOULD append the address to ``envelope.rcpt_tos`` and it MAY extend ``envelope.rcpt_options`` (both of which are always Python lists). .. py:method:: handle_RSET(server, session, envelope) -> str :async: :return: Response message to be sent to the client Called during ``RSET``. .. py:method:: handle_VRFY(server, session, envelope, address) -> str :async: :param address: The parsed email address given by the client in the ``VRFY`` command :type address: str :return: Response message to be sent to the client Called during ``VRFY``. In addition to the SMTP command hooks, the following hooks can also be implemented by handlers. These have different APIs, and are called **synchronously** (i.e. they are **not** coroutines). .. py:method:: handle_STARTTLS(server, session, envelope) If implemented, and if SSL is supported, this method gets called during the TLS handshake phase of ``connection_made()``. It should return True if the handshake succeeded, and False otherwise. .. py:method:: handle_exception(error) If implemented, this method is called when any error occurs during the handling of a connection (e.g. if an ``smtp_()`` method raises an exception). The exception object is passed in. This method *must* return a status string, such as ``'542 Internal server error'``. If the method returns ``None`` or raises an exception, an exception will be logged, and a ``451`` code will be returned to the client. .. important:: If client connection is lost, this handler will NOT be called. Built-in handlers ================= The following built-in handlers can be imported from :mod:`aiosmtpd.handlers`: .. py:module:: aiosmtpd.handlers .. py:class:: AsyncMessage A subclass of the :class:`~aiosmtpd.handlers.Message` handler, it is also an :term:`abstract base class` (it must be subclassed). The only difference with :class:`Message` is that :func:`handle_message()` is called *asynchronously*. This class **cannot** be used on the command line. .. py:class:: Debugging This class prints the contents of the received messages to a given output stream. Programmatically, you can pass the stream to print to into the constructor. When specified on the command line, the (optional) positional argument must either be the string ``stdout`` or ``stderr`` indicating which stream to use. Examples:: aiosmtpd -c aiosmtpd.handlers.Debugging aiosmtpd -c aiosmtpd.handlers.Debugging stderr aiosmtpd -c aiosmtpd.handlers.Debugging stdout .. py:class:: Mailbox A subclass of the :class:`~aiosmtpd.handlers.Message` handler which adds the messages to a :class:`~mailbox.Maildir`. See :ref:`mailboxhandler` for details. When specified on the command line, it accepts *exactly* one positional argument which is the ``maildir`` (i.e, directory where email messages will be stored.) Example:: aiosmtpd -c aiosmtpd.handlers.Mailbox /home/myhome/Maildir .. py:class:: Message This class is an :term:`abstract base class` (it must be subclassed) which converts the message content into a message instance. The class used to create these instances can be passed to the constructor, and defaults to :class:`email.message.Message` This message instance gains a few additional headers (e.g. :mailheader:`X-Peer`, :mailheader:`X-MailFrom`, and :mailheader:`X-RcptTo`). You can override this behavior by overriding the :func:`prepare_message` method, which takes a session and an envelope. The message instance is then passed to the handler's :func:`handle_message()` method. It is this method that must be implemented in the subclass. :func:`prepare_message()` and :func:`handle_message()`` are both called :boldital:`synchronously`. This class **cannot** be used on the command line. .. py:class:: Proxy This class is a relatively simple SMTP proxy; it forwards messages to a remote host and port. The constructor takes the host name and port as positional arguments. This class **cannot** be used on the command line. .. important:: Do not confuse this class with `the PROXY Protocol`_; they are two totally different things. .. py:class:: Sink This class just consumes and discards messages. It's essentially the "no op" handler. It can be used on the command line, but accepts no positional arguments. Example:: aiosmtpd -c aiosmtpd.handlers.Sink .. _mailboxhandler: The Mailbox Handler =================== A convenient handler is the ``Mailbox`` handler, which stores incoming messages into a maildir. To try it, let's first prepare an :class:`~contextlib.ExitStack` to automatically clean up after we finish: >>> from contextlib import ExitStack >>> from tempfile import TemporaryDirectory >>> # Clean up the temporary directory at the end >>> resources = ExitStack() >>> tempdir = resources.enter_context(TemporaryDirectory()) Then, prepare the controller: >>> import os >>> from aiosmtpd.controller import Controller >>> from aiosmtpd.handlers import Mailbox >>> # >>> maildir_path = os.path.join(tempdir, 'maildir') >>> controller = Controller(Mailbox(maildir_path)) >>> controller.start() >>> # Arrange for the controller to be stopped at the end >>> ignore = resources.callback(controller.stop) Now we can connect to the server and send it a message... >>> from smtplib import SMTP >>> client = SMTP(controller.hostname, controller.port) >>> client.sendmail('aperson@example.com', ['bperson@example.com'], """\ ... From: Anne Person ... To: Bart Person ... Subject: A test ... Message-ID: ... ... Hi Bart, this is Anne. ... """) {} ...and a second message... >>> client.sendmail('cperson@example.com', ['dperson@example.com'], """\ ... From: Cate Person ... To: Dave Person ... Subject: A test ... Message-ID: ... ... Hi Dave, this is Cate. ... """) {} ...and a third message. >>> client.sendmail('eperson@example.com', ['fperson@example.com'], """\ ... From: Elle Person ... To: Fred Person ... Subject: A test ... Message-ID: ... ... Hi Fred, this is Elle. ... """) {} We open up the mailbox again, and all three messages are waiting for us. >>> from mailbox import Maildir >>> from operator import itemgetter >>> mailbox = Maildir(maildir_path) >>> messages = sorted(mailbox, key=itemgetter('message-id')) >>> for message in messages: ... print(message['Message-ID'], message['From'], message['To']) Anne Person Bart Person Cate Person Dave Person Elle Person Fred Person Cleanup when we're done. >>> resources.close() .. _`the PROXY Protocol`: https://www.haproxy.com/blog/haproxy/proxy-protocol/ aio-libs-aiosmtpd-b634d9b/aiosmtpd/docs/index.rst000066400000000000000000000004701462210711200220540ustar00rootroot00000000000000.. include:: ../../README.rst Contents ======== .. toctree:: :maxdepth: 2 intro concepts cli controller smtp lmtp handlers auth proxyprotocol migrating testing manpage NEWS Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` aio-libs-aiosmtpd-b634d9b/aiosmtpd/docs/intro.rst000066400000000000000000000035101462210711200220760ustar00rootroot00000000000000============== Introduction ============== This library provides an :mod:`asyncio`-based implementation of a server for :rfc:`5321` - Simple Mail Transfer Protocol (SMTP) and :rfc:`2033` - Local Mail Transfer Protocol (LMTP). It is derived from `Python 3.5's smtpd.py `__ standard library module, and provides both a command line interface and an API for use in testing applications that send email. Inspiration for this library comes from several other packages: * `lazr.smtptest `__ * `benjamin-bader/aiosmtp `__ * `Mailman 3's LMTP server `__ ``aiosmtpd`` takes the best of these and consolidates them in one place. Relevant RFCs ============= * :rfc:`5321` - Simple Mail Transfer Protocol (SMTP) * :rfc:`2033` - Local Mail Transfer Protocol (LMTP) * :rfc:`2034` - SMTP Service Extension for Returning Enhanced Error Codes * :rfc:`6531` - SMTP Extension for Internationalized Email * :rfc:`4954` - SMTP Service Extension for Authentication * :rfc:`5322` - Internet Message Format * :rfc:`3696` - Application Techniques for Checking and Transformation of Names * :rfc:`2034` - SMTP Service Extension for Returning Enhanced Error Codes * :rfc:`1870` - SMTP Service Extension for Message Size Declaration Other references ================ * `Wikipedia page on SMTP `__ * `asyncio module documentation `__ * `Developing with asyncio `__ * `Python issue #25008 `__ which started the whole thing. aio-libs-aiosmtpd-b634d9b/aiosmtpd/docs/lmtp.rst000066400000000000000000000011541462210711200217210ustar00rootroot00000000000000.. _LMTP: ================ The LMTP class ================ :rfc:`2033` defines the :boldital:`Local Mail Transport Protocol`. In many ways, this is very similar to SMTP, but with no guarantees of queuing. It is, in a sense, an alternative to ESMTP, and is often used for local mail routing (e.g. from a Mail Transport Agent to a local command or system) where the unreliability of internet connectivity is not an issue. The ``LMTP`` class subclasses the :class:`~aiosmtpd.smtp.SMTP` class and its only functional difference is that it implements the ``LHLO`` command, and prohibits the use of ``HELO`` and ``EHLO``. aio-libs-aiosmtpd-b634d9b/aiosmtpd/docs/manpage.rst000066400000000000000000000011401462210711200223500ustar00rootroot00000000000000.. _manpage: ========== aiosmtpd ========== Provides an asynchronous, RFC 5321 compliant Simple Mail Transfer Protocol (SMTP) server that supports customizable extensions. :Author: |author| :Date: |today| :Copyright: |copyright| :Version: |version| :Manual section: 1 SYNOPSIS ======== .. autoprogramm:: aiosmtpd.main:_parser() :prog: aiosmtpd :notitle: :nodesc: :options_title: Options :options_adornment: ~ ENVIRONMENT =========== .. envvar:: AIOSMTPD_CONTROLLER_TIMEOUT | How long the main thread will wait (in seconds) until the SMTP thread is ready. | Default: ``1.0`` aio-libs-aiosmtpd-b634d9b/aiosmtpd/docs/migrating.rst000066400000000000000000000041211462210711200227230ustar00rootroot00000000000000.. _migrating: ================================== Migrating from smtpd to aiosmtpd ================================== aiosmtpd is designed to make it easy to migrate an existing application based on :mod:`smtpd` to aiosmtpd. Consider the following subclass of :class:`smtpd.SMTPServer`:: import smtpd import asyncore class CustomSMTPServer(smtpd.SMTPServer): def process_message(self, peer, mail_from, rcpt_tos, data): # Process message data... if error_occurred: return '500 Could not process your message' if __name__ == '__main__': server = CustomSMTPServer(('127.0.0.1', 10025), None) # Run the event loop in the current thread. asyncore.loop() To switch this application to using ``aiosmtpd``, implement a handler with the ``handle_DATA()`` method:: import asyncio from aiosmtpd.controller import Controller class CustomHandler: async def handle_DATA(self, server, session, envelope): peer = session.peer mail_from = envelope.mail_from rcpt_tos = envelope.rcpt_tos data = envelope.content # type: bytes # Process message data... if error_occurred: return '500 Could not process your message' return '250 OK' if __name__ == '__main__': handler = CustomHandler() controller = Controller(handler, hostname='127.0.0.1', port=10025) # Run the event loop in a separate thread. controller.start() # Wait for the user to press Return. input('SMTP server running. Press Return to stop server and exit.') controller.stop() Important differences to note: * Unlike :meth:`~smtpd.SMTPServer.process_message` in smtpd, ``handle_DATA()`` **must** return an SMTP response code for the sender such as ``"250 OK"``. * ``handle_DATA()`` must be a coroutine function, which means it must be declared with ``async def``. * :meth:`Controller.start` runs the SMTP server in a separate thread and can be stopped again by calling :meth:`Controller.stop` aio-libs-aiosmtpd-b634d9b/aiosmtpd/docs/proxyprotocol.rst000066400000000000000000000405031462210711200237110ustar00rootroot00000000000000.. _ProxyProtocol: ======================== PROXY Protocol Support ======================== When put behind a "proxy" / load balancer, server programs can no longer "see" the original client's actual IP Address and Port. This also affects ``aiosmtpd``. The |HAProxyDevelopers|_ have created a protocol called "PROXY Protocol" designed to solve this issue. You can read the reasoning behind this in `their blog`_. .. _`HAProxyDevelopers`: https://www.haproxy.com/company/about-us/ .. |HAProxyDevelopers| replace:: **HAProxy Developers** .. _their blog: https://www.haproxy.com/blog/haproxy/proxy-protocol/ This initiative has been accepted and supported by many important software and services such as `Amazon Web Services`_, `HAProxy`_, `NGINX`_, `stunnel`_, `Varnish`_, and many others. .. _Amazon Web Services: https://docs.aws.amazon.com/elasticloadbalancing/latest/classic/enable-proxy-protocol.html .. _HAProxy: http://cbonte.github.io/haproxy-dconv/2.3/configuration.html#5.2-send-proxy .. _NGINX: https://nginx.org/en/docs/stream/ngx_stream_proxy_module.html#proxy_protocol .. _stunnel: https://www.stunnel.org/static/stunnel.html#proxy .. _Varnish: https://info.varnish-software.com/blog/proxy-protocol-original-value-client-identity ``aiosmtpd`` implements the PROXY Protocol as defined in the documentation accompanying |HAProxy2.3.0|_; *both* Version 1 and Version 2 are supported. .. _HAProxy2.3.0: https://github.com/haproxy/haproxy/blob/v2.3.0/doc/proxy-protocol.txt .. |HAProxy2.3.0| replace:: **HAProxy v2.3.0** Activating ========== To activate ``aiosmtpd``'s PROXY Protocol Support, you have to set the :attr:`proxy_protocol_timeout` parameter of the SMTP Class to a positive numeric value (``int`` or ``float``) The `PROXY Protocol documentation suggests`_ that the timeout should not be less than 3.0 seconds. .. _PROXY Protocol documentation suggests: https://github.com/haproxy/haproxy/blob/v2.3.0/doc/proxy-protocol.txt#L172-L174 .. important:: Once you activate PROXY Protocol support, standard (E)SMTP handshake is **no longer available**. Clients trying to connect to ``aiosmtpd`` will be REQUIRED to send the PROXY Protocol Header before they can continue with (E)SMTP transaction. This is `as specified`_ in the PROXY Protocol documentation. .. _as specified: https://github.com/haproxy/haproxy/blob/v2.3.0/doc/proxy-protocol.txt#L176-L180 ``handle_PROXY`` Hook ===================== In addition to activating the PROXY protocol support as described above, you MUST implement the ``handle_PROXY`` hook. If the :attr:`handler` object does not implement ``handle_PROXY``, then all connection attempts will be rejected. The signature of ``handle_PROXY`` must be as follows: .. method:: handle_PROXY(server, session, envelope, proxy_data) :param server: The :class:`SMTP` instance invoking the hook. :type server: aiosmtpd.smtp.SMTP :param session: The Session data *so far* (see Important note below) :type session: Session :param envelope: The Envelope data *so far* (see Important note below) :type envelope: Envelope :param proxy_data: The result of parsing the PROXY Header :type proxy_data: ProxyData :return: Truthy or Falsey, indicating if the connection may continue or not, respectively .. important:: The ``session.peer`` attribute will contain the ``IP:port`` information of the **directly adjacent** client. In other word, it will contain the endpoint identifier of the proxying entity. Endpoint identifier of the "original" client will be recorded *only* in the :attr:`proxy_data` parameter The ``envelope`` data will usually be empty(ish), because the PROXY handshake will take place before client can send any transaction data. Parsing the Header ================== You do not have to concern yourself with parsing the PROXY Protocol header; the ``aiosmtpd.proxy_protocol`` module contains the full parsing logic. All you need to do is to *validate* the parsed result in the ``handle_PROXY`` hook. .. py:module:: aiosmtpd.proxy_protocol Enums ===== .. class:: AF .. py:attribute:: \ UNSPEC = 0 IP4 = 1 IP6 = 2 UNIX = 3 For Version 1, ``UNKNOWN`` is mapped to ``UNSPEC``. .. class:: PROTO .. py:attribute:: \ UNSPEC = 0 STREAM = 1 DGRAM = 2 For Version 1, ``UNKNOWN`` is mapped to ``UNSPEC``, and ``TCP`` is mapped into ``STREAM`` .. class:: V2_CMD .. py:attribute:: \ LOCAL = 0 PROXY = 1 ``ProxyData`` API ================= .. py:class:: ProxyData(\ version=None\ ) | | :part:`Attributes & Properties` .. py:attribute:: version :type: Optional[int] Contains the version of the PROXY Protocol header. If ``None``, it indicates that parsing has failed and the header is malformed. .. py:attribute:: command :type: V2_CMD Contains the `command`_. Only set if ``version=2`` .. py:attribute:: family :type: AF Contains the `address family`_. Valid values for Version 1 excludes :attr:`AF.UNIX`. .. py:attribute:: protocol :type: PROTO Contains an integer indicating the `transport protocol being proxied`_. Valid values for Version 1 excludes :attr:`PROTO.DGRAM`. .. py:attribute:: src_addr :type: Union[IPv4Address, IPv6Address, AnyStr] Contains the source address (i.e., address of the "original" client). The type of this attribute depends on the :attr:`address family `. .. py:attribute:: dst_addr :type: Union[IPv4Address, IPv6Address, AnyStr] Contains the destination address (i.e., address of the proxying entity to which the "original" client connected). The type of this attribute depends on the address family. .. py:attribute:: src_port :type: int Contains the source port (i.e., port of the "original" client). Valid only for address family of :attr:`AF.INET` or :attr:`AF.INET6` .. py:attribute:: dst_port :type: int Contains the destination port (i.e., port of the proxying entity to which the "original" client connected). Valid only for address family of :attr:`AF.INET` or :attr:`AF.INET6` .. py:attribute:: rest :type: ByteString The contents depend on the version of the PROXY header *and* (for version 2) the address family. For PROXY Header version 1, it contains all the bytes following ``b"UNKNOWN"`` up until, but not including, the ``CRLF`` terminator. For PROXY Header version 2: * For address family ``UNSPEC``, it contains all the bytes following the 16-octet header preamble * For address families :attr:`AF.INET`, :attr:`AF.INET6`, and :attr:`AF.UNIX` it contains all the bytes following the address information .. py:attribute:: tlv :type: aiosmtpd.proxy_protocol.ProxyTLV This property contains the result of the TLV Parsing attempt of the :attr:`rest` attribute. If this property returns ``None`` that means either (1) :attr:`rest` is empty, or (2) TLV Parsing is not successful. .. py:attribute:: valid :type: bool This property will indicate if PROXY Header is valid or not. .. py:attribute:: whole_raw :type: bytearray This attribute contains the whole, undecoded and unmodified, PROXY Header. For version 1, it contains everything up to and including the terminating ``\r\n``. For version 2, it contains everything up to and including the last TLV Vector. If you need to verify the ``CRC32C`` TLV Vector (PROXYv2), you should run the CRC32C calculation against the contents of this attribute. For more information, see the next section, :ref:`crc32c`. .. py:attribute:: tlv_start :type: int This attribute points to the first TLV Vector *if exists*. If you need to verify the ``CRC32C`` TLV Vector, you should run the CRC32C calculation against the contents of this attribute. The value will be ``None`` if PROXY version is 1. | | :part:`Methods` .. py:method:: with_error(error_msg: str) -> ProxyData :param str error_msg: Error message :return: self Sets the instance's :attr:`error` attribute and returns itself. .. py:method:: same_attribs(_raises=False, **kwargs) -> bool :param _raises: If ``True``, raise exception if attribute not match/not found, instead of returning a bool. Defaults to ``False`` :type _raises: bool :raises ValueError: if ``_raises=True`` and attribute is found but value is wrong :raises KeyError: if ``_raises=True`` and attribute is not found A helper method to quickly verify whether an attribute exists and contain the same value as expected. Example usage:: proxy_data.same_attribs( version=1, protocol=b"TCP4", unknown_attrib=None ) In the above example, ``same_attribs`` will check that all attributes ``version``, ``protocol``, and ``unknown_attrib`` exist, and contains the values ``1``, ``b"TCP4"``, and ``None``, respectively. Missing attributes and/or differing values will return a ``False`` (unless ``_raises=True``) .. note:: For other examples, take a look inside the ``test_proxyprotocol.py`` file. That file *extensively* uses ``same_attribs``. .. py:method:: __bool__() Allows an instance of ``ProxyData`` to be evaluated as boolean. In actuality, it simply returns the :attr:`valid` property. ``ProxyTLV`` API ================ .. py:class:: ProxyTLV() This class parses the `TLV portion`_ of the PROXY Header and presents the value in an easy-to-use way: A "TLV Vector" whose "Type" is found in :attr:`PP2_TYPENAME` can be accessed through the `.` attribute. It is a subclass of :class:`dict`, so all of ``dict``'s methods are available. It is basically a `Dict[str, Any]` with additional methods and attributes. The list below only describes methods & attributes added to this class. .. py:attribute:: PP2_TYPENAME :type: Dict[int, str] A mapping of numeric Type to a human-friendly Name. The names are identical to the ones `listed in the documentation`_, but with the ``PP2_TYPE_``/``PP2_SUBTYPE_`` prefixes removed. .. note:: The ``SSL`` Name is special. Rather than containing the TLV Subvectors as described in the standard, it is a ``bool`` value that indicates whether the PP2_SUBTYPE_SSL .. py:attribute:: tlv_loc :type: Dict[str, int] A mapping to show the start location of certain TLV Vectors. The keys are the TYPENAME (see :attr:`PP2_TYPENAME` above), and the value is the offset from start of the TLV Vectors. .. py:method:: same_attribs(_raises=False, **kwargs) -> bool :param _raises: If ``True``, raise exception if attribute not match/not found, instead of returning a bool. Defaults to ``False`` :type _raises: bool :raises ValueError: if ``_raises=True`` and attribute is found but value is wrong :raises KeyError: if ``_raises=True`` and attribute is not found A helper method to quickly verify whether an attribute exists and contain the same value as expected. Example usage:: assert isinstance(proxy_tlv, ProxyTLV) proxy_tlv.same_attribs( AUTHORITY=b"some_authority", SSL=True, ) In the above example, ``same_attribs`` will check that the attributes ``AUTHORITY`` and ``SSL`` exist, and contains the values ``b"some_authority"`` and ``True``, respectively. Missing attributes and/or differing values will return a ``False`` (unless ``_raises=True``) .. note:: For other examples, take a look inside the ``test_proxyprotocol.py`` file. That file *extensively* uses ``same_attribs``. .. py:classmethod:: from_raw(raw) -> Optional[ProxyTLV] :param raw: The raw bytes containing the TLV Vectors :type raw: ByteString :return: A new instance of ProxyTLV, or ``None`` if parsing failed This triggers the parsing of raw bytes/bytearray into a ProxyTLV instance. Internally it relies on the :meth:`parse` classmethod to perform the parsing. Unlike the default behavior of :meth:`parse`, ``from_raw`` will NOT perform a partial parsing. .. py:classmethod:: parse(chunk, partial_ok=True) -> Dict[str, Any] :param chunk: The bytes to parse into TLV Vectors :type chunk: ByteString :param partial_ok: If ``True``, return partially-parsed TLV Vectors as is. If ``False``, (re)raise ``MalformedTLV`` :type partial_ok: bool :return: A mapping of typenames and values This performs a recursive parsing of the bytes. If it encounters a TYPE that ProxyTLV doesn't recognize, the TLV Vector will be assigned a typename of `"xNN"` Partial parsing is possible when ``partial_ok=True``; if during the parsing an error happened, `parse` will abort returning the TLV Vectors it had successfully decoded. .. py:classmethod:: name_to_num(name) -> Optional[int] :param name: The name to back-map into TYPE numeric :type name: str :return: The numeric value associated to the typename, ``None`` if no such mapping is found This is a helper method to perform back-mapping of typenames. .. _crc32c: Note on CRC32C Calculation ========================== Neither the :class:`ProxyData` nor :class:`ProxyTLV` classes implement `PROXYv2 CRC32C validation`_; the main reason being that Python has no built-in module for calculating CRC32C. To perform CRC32C, third-party modules need to be installed, but we are uncomfortable doing that for the following reasons: * There are more than one third-party modules providing CRC32C, e.g., ``crcmod``, ``crc32c``, ``google-crc32c``, etc. Problem is, there is no known clear comparison between them, so we cannot tell easily which one is 'best'. * Some of these third-party modules seem to be no longer being maintained. * Most of the available third-party modules are binary distribution. This potentially causes problems with existing binaries/libraries, not to mention possible (albeit unlikely) vector for malware. * We really don't like adding dependencies outside those that are really needed. In short, we have strong reasons to NOT implement PROXYv2 CRC32C validation, and we have plans to NEVER implement it. If you *absolutely* need PROXYv2 CRC32C validation, you should perform it yourself in the :meth:`handle_PROXY` hook. To assist you, we have provided the :attr:`whole_raw`, :attr:`tlv_start`, and :attr:`tlv_loc` attributes. You should do the following: 0. Choose a CRC32C module of your liking, install that, and import it. 1. Find the "CRC32C" TLV Vector in ``whole_raw``; it would start at byte ``tlv_start + tlv_loc["CRC32C"]`` 2. Zero out the 4-octet Value part of the "CRC32C" TLV Vector 3. Perform CRC32C calculation over the modified ``whole_raw`` 4. Convert the result to big-endian bytes, and compare with the ``.CRC32C`` attribute of the ProxyTLV instance Example:: # The int(3) at end is to skip over the "T" and "L" part offset = proxy_data.tlv_start + proxy_data.tlv.tlv_loc["CRC32C"] + 3 # Since whole_raw is a bytearray, we can do slice replacement proxy_data.whole_raw[offset:offset + 4] = "\x00\x00\x00\x00" # Actual syntax will depend on the module you use calculated: int = crc32c(proxy_data.whole_raw) # Adjust first part as necessary if calculated is not int validated = calculated.to_bytes(4, "big") == proxy_data.tlv.CRC32C Good luck! .. _`command`: https://github.com/haproxy/haproxy/blob/v2.3.0/doc/proxy-protocol.txt#L346-L358 .. _`address family`: https://github.com/haproxy/haproxy/blob/v2.3.0/doc/proxy-protocol.txt#L366-L381 .. _`INET protocol and family`: https://github.com/haproxy/haproxy/blob/v2.3.0/doc/proxy-protocol.txt#L207-L213 .. _`transport protocol being proxied`: https://github.com/haproxy/haproxy/blob/v2.3.0/doc/proxy-protocol.txt#L388-L402 .. _TLV portion: https://github.com/haproxy/haproxy/blob/v2.3.0/doc/proxy-protocol.txt#L519 .. _listed in the documentation: https://github.com/haproxy/haproxy/blob/v2.3.0/doc/proxy-protocol.txt#L538-L549 .. _PROXYv2 CRC32C validation: https://github.com/haproxy/haproxy/blob/v2.3.0/doc/proxy-protocol.txt#L574-L597 aio-libs-aiosmtpd-b634d9b/aiosmtpd/docs/requirements.txt000066400000000000000000000001531462210711200234750ustar00rootroot00000000000000-r ../../requirements.txt ### Sphinx deps sphinx==7.3.7 sphinx-autofixture==0.4.0 sphinx_rtd_theme==2.0.0 aio-libs-aiosmtpd-b634d9b/aiosmtpd/docs/smtp.rst000066400000000000000000000454611462210711200217410ustar00rootroot00000000000000.. _smtp: ================= The SMTP Module ================= At the heart of this module is the ``SMTP`` class in the ``aiosmtpd.smtp`` module. This class implements the :rfc:`5321` Simple Mail Transport Protocol. Often you won't run an ``SMTP`` instance directly, but instead will use a :ref:`Controller ` instance to run the server in a subthread. .. doctest:: >>> from aiosmtpd.controller import Controller The ``SMTP`` class is itself a subclass of |StreamReaderProtocol|_ .. _subclass: Subclassing =========== While behavior for common SMTP commands can be specified using :ref:`handlers `, more complex specializations such as adding custom SMTP commands require subclassing the ``SMTP`` class. For example, let's say you wanted to add a new SMTP command called ``PING``. All methods implementing ``SMTP`` commands are prefixed with ``smtp_``; they must also be coroutines. Here's how you could implement this use case: .. doctest:: >>> import asyncio >>> from aiosmtpd.smtp import SMTP as Server, syntax >>> class MyServer(Server): ... @syntax('PING [ignored]') ... async def smtp_PING(self, arg): ... await self.push('259 Pong') Now let's run this server in a controller: .. doctest:: >>> from aiosmtpd.handlers import Sink >>> class MyController(Controller): ... def factory(self): ... return MyServer(self.handler) >>> controller = MyController(Sink()) >>> controller.start() We can now connect to this server with an ``SMTP`` client. .. doctest:: >>> from smtplib import SMTP as Client >>> client = Client(controller.hostname, controller.port) Let's ping the server. Since the ``PING`` command isn't an official ``SMTP`` command, we have to use the lower level interface to talk to it. .. doctest:: >>> code, message = client.docmd('PING') >>> code 259 >>> message b'Pong' Because we prefixed the ``smtp_PING()`` method with the ``@syntax()`` decorator, the command shows up in the ``HELP`` output. .. doctest:: >>> print(client.help().decode('utf-8')) Supported commands: AUTH DATA EHLO HELO HELP MAIL NOOP PING QUIT RCPT RSET VRFY And we can get more detailed help on the new command. .. doctest:: >>> print(client.help('PING').decode('utf-8')) Syntax: PING [ignored] Don't forget to ``stop()`` the controller when you're done. .. doctest:: >>> controller.stop() Server hooks ============ .. warning:: These methods are deprecated. See :ref:`handler hooks ` instead. The ``SMTP`` server class also implements some hooks which your subclass can override to provide additional responses. .. py:function:: ehlo_hook() This hook makes it possible for subclasses to return additional ``EHLO`` responses. This method, called *asynchronously* and taking no arguments, can do whatever it wants, including (most commonly) pushing new ``250-`` responses to the client. This hook is called just before the standard ``250 HELP`` which ends the ``EHLO`` response from the server. .. deprecated:: 1.2 .. py:function:: rset_hook() This hook makes it possible to return additional ``RSET`` responses. This method, called *asynchronously* and taking no arguments, is called just before the standard ``250 OK`` which ends the ``RSET`` response from the server. .. deprecated:: 1.2 .. _smtp_api: aiosmtpd.smtp ============= .. py:module:: aiosmtpd.smtp .. py:data:: AuthenticatorType :value: Callable[[SMTP, Session, Envelope, str, Any], AuthResult] .. decorator:: auth_mechanism(actual_name) :param actual_name: Name of the AUTH Mechanism implemented by the method. See :ref:`authmech` for more info. :type actual_name: str This decorator specifies the actual name of the AUTH Mechanism implemented by the method being decorated, regardless of the method's name. .. important:: The decorated method's name MUST still start with ``auth_`` .. class:: AuthResult Contains the result of the Authentication Procedure. For more info, please see :class:`AuthResult` .. class:: LoginPassword(login: bytes, password: bytes) A subclass of :class:`typing.NamedTuple` that holds the Authentication Data for the built-in ``LOGIN`` and ``PLAIN`` AUTH Mechanisms. It is to be used for Authentication purposes by :func:`Authenticator` For more information, please refer to the :ref:`auth` page. .. class:: SMTP(handler, *, data_size_limit=33554432, enable_SMTPUTF8=False, \ decode_data=False, hostname=None, ident=None, tls_context=None, \ require_starttls=False, timeout=300, auth_required=False, \ auth_require_tls=True, auth_exclude_mechanism=None, auth_callback=None, \ authenticator=None, command_call_limit=None, \ proxy_protocol_timeout=None, \ loop=None) | | :part:`Parameters` .. py:attribute:: handler An instance of a :ref:`handler ` class that optionally can implement :ref:`hooks`. .. py:attribute:: data_size_limit :type: int :value: 33554432 :noindex: The limit in number of bytes that is accepted for client SMTP commands. It is returned to ESMTP clients in the ``250-SIZE`` response. .. py:attribute:: enable_SMTPUTF8 :type: bool :value: False :noindex: When ``True``, causes the ESMTP ``SMTPUTF8`` option to be returned to the client, and allows for UTF-8 content to be accepted, as defined in :rfc:`6531`. .. py:attribute:: decode_data :type: bool :value: False When ``True``, attempts to decode byte content in the ``DATA`` command, assigning the string value to the :ref:`envelope's ` ``content`` attribute. .. py:attribute:: hostname :type: Optional[str] :value: None :noindex: The first part of the string returned in the ``220`` greeting response given to clients when they first connect to the server. If not given, the system's fully-qualified domain name is used. .. py:attribute:: ident :type: Optional[str] :value: None The second part of the string returned in the ``220`` greeting response that identifies the software name and version of the SMTP server to the client. If not given, a default Python SMTP ident is used. .. py:attribute:: tls_context :type: Optional[ssl.SSLContext] :value: None :noindex: An instance of :class:`ssl.SSLContext`. Providing this will enable support for ``STARTTLS`` ESMTP/LMTP option as defined in :rfc:`3207`. See :ref:`tls` for a more in-depth discussion on enabling ``STARTTLS``. .. py:attribute:: require_starttls :type: bool :value: False :noindex: If set to ``True``, then client must send ``STARTTLS`` before "restricted" ESMTP commands can be issued. "Restricted" ESMTP commands are all commands not in the set ``{"NOOP", "EHLO", "STARTTLS", "QUIT"}`` .. py:attribute:: timeout :type: Union[int, float] :value: 300 The number of seconds to wait between valid SMTP commands. After this time the connection will be closed by the server. The default is 300 seconds, as per :rfc:`2821`. .. py:attribute:: auth_required :type: bool :value: False Specifies whether SMTP Authentication is mandatory or not for the session. This impacts some SMTP commands such as ``HELP``, ``MAIL FROM``, ``RCPT TO``, and others. .. py:attribute:: auth_require_tls :type: bool :value: True Specifies whether ``STARTTLS`` must be used before AUTH exchange or not. If you set this to ``False`` then AUTH exchange can be done outside a TLS context, but the class will warn you of security considerations. Has no effect if :attr:`require_starttls` is ``True``. .. py:attribute:: auth_exclude_mechanism :type: Optional[Iterable[str]] :value: None Specifies which AUTH mechanisms to NOT use. This is the only way to completely disable the built-in AUTH mechanisms. See :ref:`auth` for a more in-depth discussion on AUTH mechanisms. .. versionadded:: 1.2.2 .. py:attribute:: auth_callback :type: Callable[[str, bytes, bytes], bool] :value: login_always_fail A function that accepts three arguments: ``mechanism: str``, ``login: bytes``, and ``password: bytes``. Based on these args, the function must return a ``bool`` that indicates whether the client's authentication attempt is accepted/successful or not. .. deprecated:: 1.3 Use :attr:`authenticator` instead. This parameter **will be removed in version 2.0**. .. py:attribute:: authenticator :type: aiosmtpd.smtp.AuthenticatorType :value: None A function whose signature is identical to ``aiosmtpd.smtp.AuthenticatorType``. See :func:`Authenticator` for more information. .. versionadded:: 1.3 .. py:attribute:: command_call_limit :type: Optional[Union[int, Dict[str, int]]] :value: None If not ``None`` sets the maximum time a certain SMTP command can be invoked. This is to prevent DoS due to malicious client connecting and never disconnecting, due to continual sending of SMTP commands to prevent timeout. The handling differs based on the type: .. highlights:: If :attr:`command_call_limit` is of type ``int``, then the value is the call limit for ALL SMTP commands. If :attr:`command_call_limit` is of type ``dict``, it must be a ``Dict[str, int]`` (the type of the values will be enforced). The keys will be the SMTP Command to set the limit for, the values will be the call limit per SMTP Command. .. highlights:: A special key of ``"*"`` is used to set the 'default' call limit for commands not explicitly declared in :attr:`command_call_limit`. If ``"*"`` is not given, then the 'default' call limit will be set to ``aiosmtpd.smtp.CALL_LIMIT_DEFAULT`` Other types -- or a ``Dict`` whose any value is not an ``int`` -- will raise a ``TypeError`` exception. Examples:: # All commands have a limit of 10 calls SMTP(..., command_call_limit=10) # Commands RCPT and NOOP have their own limits; others have an implicit limit # of 20 (CALL_LIMIT_DEFAULT) SMTP(..., command_call_limit={"RCPT": 30, "NOOP": 5}) # Commands RCPT and NOOP have their own limits; others set to 3 SMTP(..., command_call_limit={"RCPT": 20, "NOOP": 10, "*": 3}) If not given (or set to ``None``), then command call limit will not be enforced. **This will change in version 2.0**. .. versionadded:: 1.2.3 .. py:attribute:: proxy_protocol_timeout :type: Optional[Union[int, float]] :value: None If given (not ``None``), activates support for **PROXY Protocol**. Please read the `PROXY Protocol Support documentation `_ for a more in-depth explanation. If not given (or ``None``), disables support for PROXY Protocol. .. warning:: When PROXY protocol support is activated, :class:`SMTP`'s behavior changes: It no longer immediately sends ``220`` greeting upon client connection, but instead it will wait for client to first send the PROXY protocol header. This is in accordance to the PROXY Protocol standard. .. versionadded:: 1.4 .. py:attribute:: loop :noindex: The asyncio event loop to use. If not given, :meth:`asyncio.new_event_loop` will be called to create the event loop. | | :part:`Attributes & Methods` .. py:attribute:: line_length_limit The maximum line length, in octets (not characters; one UTF-8 character may result in more than one octet). Defaults to ``1001`` in compliance with :rfc:`RFC 5321 § 4.5.3.1.6 <5321#section-4.5.3.1.6>` .. attention:: This sets the *stream limit* of :meth:`asyncio.StreamReader.readuntil`, thus impacting how the method works. In previous versions of aiosmtpd, the limit is not set. To return to the behavior of the previous versions, set :attr:`line_length_limit` to ``2**16`` *before* instantiating the :class:`SMTP` class. .. py:attribute:: local_part_limit The maximum lengh (in octets) of the local part of email addresses. :rfc:`RFC 5321 § 4.5.3.1.1 <5321#section-4.5.3.1.1>` specifies a maximum length of 64 octets, but this requirement is flexible and can be relaxed at the server's discretion (see :rfc:`§ 4.5.3.1 <5321#section-4.5.3.1>`). Setting this to `0` (the default) disables this limit completely. .. py:attribute:: AuthLoginUsernameChallenge A ``str`` containing the base64-encoded challenge to be sent as the first challenge in the ``AUTH LOGIN`` mechanism. .. py:attribute:: AuthLoginPasswordChallenge A ``str`` containing the base64-encoded challenge to be sent as the second challenge in the ``AUTH LOGIN`` mechanism. .. attribute:: event_handler The *handler* instance passed into the constructor. .. attribute:: data_size_limit The value of the *data_size_limit* argument passed into the constructor. .. attribute:: enable_SMTPUTF8 The value of the *enable_SMTPUTF8* argument passed into the constructor. .. attribute:: hostname The ``220`` greeting hostname. This will either be the value of the *hostname* argument passed into the constructor, or the system's fully qualified host name. .. attribute:: tls_context The value of the *tls_context* argument passed into the constructor. .. attribute:: require_starttls True if both the *tls_context* argument to the constructor was given **and** the *require_starttls* flag was True. .. attribute:: session The active :ref:`session ` object, if there is one, otherwise None. .. attribute:: envelope The active :ref:`envelope ` object, if there is one, otherwise None. .. attribute:: transport The active `asyncio transport`_ if there is one, otherwise None. .. attribute:: loop The event loop being used. This will either be the given *loop* argument, or the new event loop that was created. .. attribute:: authenticated A flag that indicates whether authentication had succeeded. .. method:: _create_session() A method subclasses can override to return custom ``Session`` instances. .. method:: _create_envelope() A method subclasses can override to return custom ``Envelope`` instances. .. method:: push(status) :async: The method that subclasses and handlers should use to return statuses to SMTP clients. This is a coroutine. *status* can be a bytes object, but for convenience it is more likely to be a string. If it's a string, it must be ASCII, unless *enable_SMTPUTF8* is True in which case it will be encoded as UTF-8. .. method:: smtp_(arg) :async: Coroutine methods implementing the SMTP protocol commands. For example, ``smtp_HELO()`` implements the SMTP ``HELO`` command. Subclasses can override these, or add new command methods to implement custom extensions to the SMTP protocol. *arg* is the rest of the SMTP command given by the client, or None if nothing but the command was given. .. py:method:: challenge_auth(\ challenge, encode_to_b64=True, log_client_response=False\ ) -> Union[_Missing, bytes] :async: :param challenge: The SMTP AUTH challenge to send to the client. May be in plaintext, may be in base64. Do NOT prefix with "334 "! :type challenge: AnyStr :param encode_to_b64: If true, will perform base64-encoding before sending the challenge to the client. :type encode_to_b64: bool :param log_client_response: If true, will perform logging of client response :type log_client_response: bool :return: Response from client (already base64-decoded) or ``MISSING`` (see description) This method will return ``MISSING`` if either of these scenarios happen: * client aborted the ``AUTH`` procedure by sending ``b"*"``, or * client response to the challenge cannot be base64-decoded. .. warning:: Setting ``log_client_response=True`` might cause leakage of sensitive information! :boldital:`DO NOT TURN ON` UNLESS ABSOLUTELY NECESSARY! .. _tls: Enabling STARTTLS ================= To enable :rfc:`3207` ``STARTTLS``, you must supply the *tls_context* argument to the :class:`SMTP` class. *tls_context* is created with the :func:`ssl.create_default_context` call from the :mod:`ssl` module, as follows:: context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) The context must be initialized with a server certificate, private key, and/or intermediate CA certificate chain with the :meth:`ssl.SSLContext.load_cert_chain` method. This can be done with separate files, or an all in one file. Files must be in PEM format. For example, if you wanted to use a self-signed certification for localhost, which is easy to create but doesn't provide much security, you could use the :manpage:`openssl(1)` command like so:: $ openssl req -x509 -newkey rsa:4096 -keyout key.pem -out cert.pem \ -days 365 -nodes -subj '/CN=localhost' and then in Python:: context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) context.load_cert_chain('cert.pem', 'key.pem') Now pass the ``context`` object to the *tls_context* argument in the ``SMTP`` constructor. Note that a number of exceptions can be generated by these methods, and by SSL connections, which you must be prepared to handle. Additional documentation is available in Python's :mod:`ssl` module, and should be reviewed before use; in particular if client authentication and/or advanced error handling is desired. If *require_starttls* is ``True``, a TLS session must be initiated for the server to respond to any commands other than ``EHLO``/``LHLO``, ``NOOP``, ``QUIT``, and ``STARTTLS``. If *require_starttls* is ``False`` (the default), use of TLS is not required; the client *may* upgrade the connection to TLS, or may use any supported command over an insecure connection. If *tls_context* is not supplied, the ``STARTTLS`` option will not be advertised, and the ``STARTTLS`` command will not be accepted. *require_starttls* is meaningless in this case, and should be set to ``False``. .. _`asyncio transport`: https://docs.python.org/3/library/asyncio-protocol.html#asyncio-transport .. _StreamReaderProtocol: https://docs.python.org/3/library/asyncio-stream.html#streamreaderprotocol .. |StreamReaderProtocol| replace:: ``StreamReaderProtocol`` aio-libs-aiosmtpd-b634d9b/aiosmtpd/docs/testing.rst000066400000000000000000000116341462210711200224260ustar00rootroot00000000000000.. _testing: ========= Testing ========= Testing (which includes unit testing, integration testing, and regression testing) is very important for quality code; extremely so if the code is a library that will be used in other software. Test Framework: ``pytest`` ========================== ``aiosmtpd`` uses the |pytest|_ testing framework. Advanced features of pytest are widely used throughout. .. _`pytest`: https://docs.pytest.org/en/stable/contents.html .. |pytest| replace:: ``pytest`` Plugins ------- The one **required** plugin is |pytest-mock|_; it is used extensively throughout the test suite. Other plugins that are used, to various degrees, in the ``aiosmtpd`` test suite are: * |pytest-cov|_ to integrate with |coverage-py|_ * |pytest-sugar|_ to provide better ux * |pytest-print|_ to give some progress indicator and to assist test troubleshooting * |pytest-profiling|_ to implement ``*-profile`` testenv, although to be honest this is not really useful as the profiling gets 'muddied' by pytest runner. .. _`pytest-mock`: https://pypi.org/project/pytest-mock/ .. |pytest-mock| replace:: ``pytest-mock`` .. _`pytest-cov`: https://pypi.org/project/pytest-cov/ .. |pytest-cov| replace:: ``pytest-cov`` .. _`coverage-py`: https://pypi.org/project/coverage/ .. |coverage-py| replace:: ``coverage-py`` .. _`pytest-sugar`: https://pypi.org/project/pytest-sugar/ .. |pytest-sugar| replace:: ``pytest-sugar`` .. _`pytest-print`: https://pypi.org/project/pytest-print/ .. |pytest-print| replace:: ``pytest-print`` .. _`pytest-profiling`: https://pypi.org/project/pytest-profiling/ .. |pytest-profiling| replace:: ``pytest-profiling`` Fixtures -------- Below is a list of fixtures defined throught the test suite, in alphabetical order: .. autofixture:: aiosmtpd.tests.conftest.client .. autofixture:: aiosmtpd.tests.conftest.get_controller :param class\_: The class of the controller to be instantiated. If given, overrides ``class_`` arg of :func:`controller_data`. If not specified and no ``class_`` from ``controller_data``, defaults to :class:`ExposingController`. :return: an instance of :class:`Controller` (or a subclass of) In addition to explicitly-specified parameters, ``get_controller`` also fetches all ``*args`` and ``**kwargs`` parameters from :func:`controller_data` marker. .. autofixture:: aiosmtpd.tests.conftest.get_handler :param class\_: The class of the handler to be instantiated. If given, overrides ``class_`` arg of :func:`handler_data`. If not specified and no ``class_`` from ``handler_data``, defaults to :class:`Sink`. :return: an instance of the handler class. In addition to explicitly-specified parameters, ``get_handler`` also fetches all ``*args`` and ``**kwargs`` parameters from :func:`handler_data` marker. .. autofixture:: aiosmtpd.tests.conftest.nodecode_controller This is actually identical to using :fixture:`plain_controller` with marker ``@controller_data(decode_data=False)``. But because this is used in a lot of test cases, it's tidier to just make this into a dedicated fixture. .. autofixture:: aiosmtpd.tests.conftest.plain_controller .. autofixture:: aiosmtpd.tests.conftest.silence_event_loop_closed .. autofixture:: aiosmtpd.tests.conftest.ssl_context_client .. autofixture:: aiosmtpd.tests.conftest.ssl_context_server .. important:: As long as you create your test module(s) inside the ``aiosmtpd/tests`` directory, you do not need to import the above fixtures; they will automatically be available for use as they are defined in the ``conftest.py`` file. .. note:: Individual test modules may define their own module-specific fixtures; please refer to their respective docstrings for description / usage guide. Markers ------- .. decorator:: client_data(...) Provides parameters to the :fixture:`~aiosmtpd.tests.conftest.client` fixture. :param connect_to: Address to connect to. Defaults to ``Global.SrvAddr`` :type connect_to: :class:`HostPort` .. decorator:: controller_data(...) Provides parameters to the :fixture:`~aiosmtpd.tests.conftest.get_controller` fixture. :param class\_: The class to be instantiated by ``get_controller``. Will be overridden if ``get_controller`` is invoked with the ``class_`` argument. :param host_port: The "host:port" to bound to :type host_port: str :param \*\*kwargs: Keyworded arguments given to the marker. .. decorator:: handler_data(...) Provides parameters to the :fixture:`~aiosmtpd.tests.conftest.get_handler` fixture. :param args\_: A tuple containing values that will be passed as positional arguments to the controller constructor :param class\_: The class to be instantiated by ``get_controller`` :param \*args: Positional arguments given to the marker. Will override the ``args_`` keyword argument :param \*\*kwargs: Keyworded arguments given to the marker. aio-libs-aiosmtpd-b634d9b/aiosmtpd/handlers.py000066400000000000000000000222241462210711200214360ustar00rootroot00000000000000# Copyright 2014-2021 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 """Handlers which provide custom processing at various events. At certain times in the SMTP protocol, various events can be processed. These events include the SMTP commands, and at the completion of the data receipt. Pass in an instance of one of these classes, or derive your own, to provide your own handling of messages. Implement only the methods you care about. """ import asyncio import logging import mailbox import os import re import smtplib import sys from abc import ABCMeta, abstractmethod from argparse import ArgumentParser from email.message import Message as Em_Message from email.parser import BytesParser, Parser from typing import Any, List, TextIO, Type, TypeVar, Optional, Union from public import public from aiosmtpd import _get_or_new_eventloop from aiosmtpd.smtp import SMTP as SMTPServer from aiosmtpd.smtp import Envelope as SMTPEnvelope from aiosmtpd.smtp import Session as SMTPSession T = TypeVar("T") EMPTYBYTES = b"" COMMASPACE = ", " CRLF = b"\r\n" NLCRE = re.compile(br"\r\n|\r|\n") log = logging.getLogger("mail.debug") def _format_peer(peer: str) -> str: # This is a separate function mostly so the test suite can craft a # reproducible output. return "X-Peer: {!r}".format(peer) def message_from_bytes(s, *args, **kws): return BytesParser(*args, **kws).parsebytes(s) def message_from_string(s, *args, **kws): return Parser(*args, **kws).parsestr(s) @public class Debugging: def __init__(self, stream: Optional[TextIO] = None): self.stream: TextIO = sys.stdout if stream is None else stream @classmethod def from_cli(cls: Type[T], parser: ArgumentParser, *args) -> T: # TODO(PY311): Use Self instead of T. error = False stream = None if len(args) == 0: pass elif len(args) > 1: error = True elif args[0] == "stdout": stream = sys.stdout elif args[0] == "stderr": stream = sys.stderr else: error = True if error: parser.error("Debugging usage: [stdout|stderr]") return cls(stream) # type: ignore[call-arg] def _print_message_content(self, peer: str, data: Union[str, bytes]) -> None: in_headers = True for line in data.splitlines(): # Dump the RFC 2822 headers first. if in_headers and not line: print(_format_peer(peer), file=self.stream) in_headers = False if isinstance(line, bytes): # Avoid spurious 'str on bytes instance' warning. line = line.decode("utf-8", "replace") print(line, file=self.stream) async def handle_DATA( self, server: SMTPServer, session: SMTPSession, envelope: SMTPEnvelope ) -> str: print("---------- MESSAGE FOLLOWS ----------", file=self.stream) # Yes, actually test for truthiness since it's possible for either the # keywords to be missing, or for their values to be empty lists. add_separator = False if envelope.mail_options: print("mail options:", envelope.mail_options, file=self.stream) add_separator = True # rcpt_options are not currently support by the SMTP class. rcpt_options = envelope.rcpt_options if any(rcpt_options): # pragma: nocover print("rcpt options:", rcpt_options, file=self.stream) add_separator = True if add_separator: print(file=self.stream) assert session.peer is not None assert envelope.content is not None self._print_message_content(session.peer, envelope.content) print("------------ END MESSAGE ------------", file=self.stream) return "250 OK" @public class Proxy: def __init__(self, remote_hostname: str, remote_port: int): self._hostname = remote_hostname self._port = remote_port async def handle_DATA( self, server: SMTPServer, session: SMTPSession, envelope: SMTPEnvelope ) -> str: if isinstance(envelope.content, str): content = envelope.original_content else: content = envelope.content assert content is not None lines = content.splitlines(keepends=True) # Look for the last header _i = 0 ending = CRLF for _i, line in enumerate(lines): # pragma: nobranch if NLCRE.match(line): ending = line break assert session.peer is not None peer = session.peer[0].encode("ascii") lines.insert(_i, b"X-Peer: " + peer + ending) data = EMPTYBYTES.join(lines) assert envelope.mail_from is not None assert all(r is not None for r in envelope.rcpt_tos) refused = self._deliver(envelope.mail_from, envelope.rcpt_tos, data) # TBD: what to do with refused addresses? log.info("we got some refusals: %s", refused) return "250 OK" def _deliver( self, mail_from: str, rcpt_tos: List[str], data: Union[str, bytes] ) -> Any: refused = {} try: s = smtplib.SMTP() s.connect(self._hostname, self._port) try: refused = s.sendmail(mail_from, rcpt_tos, data) # pytype: disable=wrong-arg-types # noqa: E501 finally: s.quit() except smtplib.SMTPRecipientsRefused as e: log.info("got SMTPRecipientsRefused") refused = e.recipients except (OSError, smtplib.SMTPException) as e: log.exception("got %s", e.__class__) # All recipients were refused. If the exception had an associated # error code, use it. Otherwise, fake it with a non-triggering # exception code. errcode = getattr(e, "smtp_code", -1) errmsg = getattr(e, "smtp_error", b"ignore") for r in rcpt_tos: refused[r] = (errcode, errmsg) return refused @public class Sink: @classmethod def from_cli(cls: Type[T], parser: ArgumentParser, *args) -> T: if len(args) > 0: parser.error("Sink handler does not accept arguments") return cls() class MessageBase(metaclass=ABCMeta): def __init__(self, message_class: Optional[Type[Em_Message]] = None): self.message_class = message_class def prepare_message( self, session: SMTPSession, envelope: SMTPEnvelope ) -> Em_Message: # If the server was created with decode_data True, then data will be a # str, otherwise it will be bytes. data = envelope.content message: Em_Message if isinstance(data, (bytes, bytearray)): message = message_from_bytes(data, self.message_class) elif isinstance(data, str): message = message_from_string(data, self.message_class) else: raise TypeError(f"Expected str or bytes, got {type(data)}") assert isinstance(message, Em_Message) message["X-Peer"] = str(session.peer) message["X-MailFrom"] = envelope.mail_from message["X-RcptTo"] = COMMASPACE.join(envelope.rcpt_tos) return message @abstractmethod async def handle_DATA( self, server: SMTPServer, session: SMTPSession, envelope: SMTPEnvelope ) -> str: ... @public class Message(MessageBase, metaclass=ABCMeta): async def handle_DATA( self, server: SMTPServer, session: SMTPSession, envelope: SMTPEnvelope ) -> str: message = self.prepare_message(session, envelope) self.handle_message(message) return "250 OK" @abstractmethod def handle_message(self, message: Em_Message) -> None: ... @public class AsyncMessage(MessageBase, metaclass=ABCMeta): def __init__( self, message_class: Optional[Type[Em_Message]] = None, *, loop: Optional[asyncio.AbstractEventLoop] = None, ): super().__init__(message_class) self.loop = loop or _get_or_new_eventloop() async def handle_DATA( self, server: SMTPServer, session: SMTPSession, envelope: SMTPEnvelope ) -> str: message = self.prepare_message(session, envelope) await self.handle_message(message) return "250 OK" @abstractmethod async def handle_message(self, message: Em_Message) -> None: ... @public class Mailbox(Message): def __init__( self, mail_dir: os.PathLike, message_class: Optional[Type[Em_Message]] = None, ): self.mailbox = mailbox.Maildir(mail_dir) self.mail_dir = mail_dir super().__init__(message_class) def handle_message(self, message: Em_Message) -> None: self.mailbox.add(message) def reset(self) -> None: self.mailbox.clear() @classmethod def from_cli(cls: Type[T], parser: ArgumentParser, *args) -> T: # TODO(PY311): Use Self instead of T. if len(args) < 1: parser.error("The directory for the maildir is required") elif len(args) > 1: parser.error("Too many arguments for Mailbox handler") return cls(args[0]) # type: ignore[call-arg] aio-libs-aiosmtpd-b634d9b/aiosmtpd/lmtp.py000066400000000000000000000013241462210711200206100ustar00rootroot00000000000000# Copyright 2014-2021 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 from public import public from aiosmtpd.smtp import SMTP, syntax @public class LMTP(SMTP): show_smtp_greeting: bool = False @syntax('LHLO hostname') async def smtp_LHLO(self, arg: str) -> None: """The LMTP greeting, used instead of HELO/EHLO.""" await super().smtp_EHLO(arg) async def smtp_HELO(self, arg: str) -> None: """HELO is not a valid LMTP command.""" await self.push('500 Error: command "HELO" not recognized') async def smtp_EHLO(self, arg: str) -> None: """EHLO is not a valid LMTP command.""" await self.push('500 Error: command "EHLO" not recognized') aio-libs-aiosmtpd-b634d9b/aiosmtpd/main.py000066400000000000000000000225021462210711200205610ustar00rootroot00000000000000# Copyright 2014-2021 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 import logging import os import signal import ssl import sys from argparse import ArgumentParser, Namespace from contextlib import suppress from functools import partial from importlib import import_module from pathlib import Path from typing import Optional, Sequence, Tuple from public import public from aiosmtpd import __version__, _get_or_new_eventloop from aiosmtpd.smtp import DATA_SIZE_DEFAULT, SMTP try: import pwd except ImportError: # pragma: has-pwd pwd = None # type: ignore[assignment] DEFAULT_HOST = "localhost" DEFAULT_PORT = 8025 DEFAULT_CLASS = "aiosmtpd.handlers.Debugging" # Make the program name a little nicer, especially when `python3 -m aiosmtpd` # is used. PROGRAM = "aiosmtpd" if "__main__.py" in sys.argv[0] else sys.argv[0] # Need to emit ArgumentParser by itself so autoprogramm extension can do its magic def _parser() -> ArgumentParser: parser = ArgumentParser( prog=PROGRAM, description="An RFC 5321 SMTP server with extensions." ) parser.add_argument( "-v", "--version", action="version", version="%(prog)s {}".format(__version__) ) parser.add_argument( "-n", "--nosetuid", dest="setuid", default=True, action="store_false", help=( "This program generally tries to setuid ``nobody``, unless this " "flag is set. The setuid call will fail if this program is not " "run as root (in which case, use this flag)." ), ) parser.add_argument( "-c", "--class", dest="classpath", metavar="CLASSPATH", default=DEFAULT_CLASS, help=( f"Use the given class, as a Python dotted import path, as the " f"handler class for SMTP events. This class can process " f"received messages and do other actions during the SMTP " f"dialog. Uses ``{DEFAULT_CLASS}`` by default." ), ) parser.add_argument( "-s", "--size", metavar="SIZE", type=int, help=( f"Restrict the total size of the incoming message to " f"``SIZE`` number of bytes via the RFC 1870 SIZE extension. " f"Defaults to {DATA_SIZE_DEFAULT:,} bytes." ), ) parser.add_argument( "-u", "--smtputf8", default=False, action="store_true", help="""Enable the ``SMTPUTF8`` extension as defined in RFC 6531.""", ) parser.add_argument( "-d", "--debug", default=0, action="count", help=( "Increase debugging output. Every ``-d`` increases debugging level by one." ) ) parser.add_argument( "-l", "--listen", metavar="[HOST][:PORT]", nargs="?", default=None, help=( "Optional host and port to listen on. If the ``PORT`` part is not " "given, then port ``{port}`` is used. If only ``:PORT`` is given, " "then ``{host}`` is used for the hostname. If neither are given, " "``{host}:{port}`` is used.".format(host=DEFAULT_HOST, port=DEFAULT_PORT) ), ) parser.add_argument( "--smtpscert", metavar="CERTFILE", type=Path, default=None, help=( "The certificate file for implementing **SMTPS**. If given, the parameter " "``--smtpskey`` must also be specified." ), ) parser.add_argument( "--smtpskey", metavar="KEYFILE", type=Path, default=None, help=( "The key file for implementing **SMTPS**. If given, the parameter " "``--smtpscert`` must also be specified." ), ) parser.add_argument( "--tlscert", metavar="CERTFILE", type=Path, default=None, help=( "The certificate file for implementing **STARTTLS**. If given, the " "parameter ``--tlskey`` must also be specified." ), ) parser.add_argument( "--tlskey", metavar="KEYFILE", type=Path, default=None, help=( "The key file for implementing **STARTTLS**. If given, the parameter " "``--tlscert`` must also be specified." ), ) parser.add_argument( "--no-requiretls", dest="requiretls", default=True, action="store_false", help=( "If specified, disables ``require_starttls`` of the SMTP class. " "(By default, ``require_starttls`` is True.) " "Has no effect if ``--tlscert`` and ``--tlskey`` are not specified." ), ) parser.add_argument( "classargs", metavar="CLASSARGS", nargs="*", default=(), help="""Additional arguments passed to the handler CLASS.""", ) return parser def parseargs(args: Optional[Sequence[str]] = None) -> Tuple[ArgumentParser, Namespace]: parser = _parser() parsed = parser.parse_args(args) # Find the handler class. path, dot, name = parsed.classpath.rpartition(".") module = import_module(path) handler_class = getattr(module, name) if hasattr(handler_class, "from_cli"): parsed.handler = handler_class.from_cli(parser, *parsed.classargs) else: if len(parsed.classargs) > 0: parser.error(f"Handler class {path} takes no arguments") parsed.handler = handler_class() # Parse the host:port argument. if parsed.listen is None: parsed.host = DEFAULT_HOST parsed.port = DEFAULT_PORT else: host, colon, port = parsed.listen.rpartition(":") if len(colon) == 0: parsed.host = port parsed.port = DEFAULT_PORT else: parsed.host = DEFAULT_HOST if len(host) == 0 else host try: parsed.port = int(DEFAULT_PORT if len(port) == 0 else port) except ValueError: parser.error("Invalid port number: {}".format(port)) if bool(parsed.smtpscert) ^ bool(parsed.smtpskey): parser.error("--smtpscert and --smtpskey must be specified together") if parsed.smtpscert and not parsed.smtpscert.exists(): parser.error(f"Cert file {parsed.smtpscert} not found") if parsed.smtpskey and not parsed.smtpskey.exists(): parser.error(f"Key file {parsed.smtpskey} not found") if bool(parsed.tlscert) ^ bool(parsed.tlskey): parser.error("--tlscert and --tlskey must be specified together") if parsed.tlscert and not parsed.tlscert.exists(): parser.error(f"Cert file {parsed.tlscert} not found") if parsed.tlskey and not parsed.tlskey.exists(): parser.error(f"Key file {parsed.tlskey} not found") return parser, parsed @public def main(args: Optional[Sequence[str]] = None) -> None: parser, ns = parseargs(args=args) if ns.setuid: # pragma: on-win32 if pwd is None: print( # type: ignore[unreachable] 'Cannot import module "pwd"; try running with -n option.', file=sys.stderr, ) sys.exit(1) nobody = pwd.getpwnam("nobody").pw_uid try: os.setuid(nobody) except PermissionError: print( 'Cannot setuid "nobody"; try running with -n option.', file=sys.stderr ) sys.exit(1) if ns.tlscert and ns.tlskey: tls_context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) tls_context.check_hostname = False tls_context.load_cert_chain(str(ns.tlscert), str(ns.tlskey)) else: tls_context = None factory = partial( SMTP, ns.handler, data_size_limit=ns.size, enable_SMTPUTF8=ns.smtputf8, tls_context=tls_context, require_starttls=ns.requiretls, ) logging.basicConfig(level=logging.ERROR) log = logging.getLogger("mail.log") loop = _get_or_new_eventloop() if ns.debug > 0: log.setLevel(logging.INFO) if ns.debug > 1: log.setLevel(logging.DEBUG) if ns.debug > 2: loop.set_debug(enabled=True) if ns.smtpscert and ns.smtpskey: smtps_context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) smtps_context.check_hostname = False smtps_context.load_cert_chain(str(ns.smtpscert), str(ns.smtpskey)) else: smtps_context = None log.debug("Attempting to start server on %s:%s", ns.host, ns.port) server = server_loop = None try: server = loop.create_server( factory, host=ns.host, port=ns.port, ssl=smtps_context ) server_loop = loop.run_until_complete(server) except RuntimeError: # pragma: nocover raise log.debug(f"server_loop = {server_loop}") log.info("Server is listening on %s:%s", ns.host, ns.port) # Signal handlers are only supported on *nix, so just ignore the failure # to set this on Windows. with suppress(NotImplementedError): loop.add_signal_handler(signal.SIGINT, loop.stop) log.debug("Starting asyncio loop") with suppress(KeyboardInterrupt): loop.run_forever() server_loop.close() log.debug("Completed asyncio loop") loop.run_until_complete(server_loop.wait_closed()) loop.close() if __name__ == "__main__": # pragma: nocover main() aio-libs-aiosmtpd-b634d9b/aiosmtpd/proxy_protocol.py000066400000000000000000000413471462210711200227470ustar00rootroot00000000000000# Copyright 2014-2021 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 import contextlib import logging import re import struct from collections import deque from enum import IntEnum from functools import partial from ipaddress import IPv4Address, IPv6Address, ip_address from typing import Any, ByteString, Dict, Optional, Protocol, Tuple, Union import attr from public import public V2_SIGNATURE = b"\r\n\r\n\x00\r\nQUIT\n" class V2_CMD(IntEnum): """ Valid Version 2 "command" """ LOCAL = 0 PROXY = 1 class AF(IntEnum): """ Valid address families. Version 1 "UNKNOWN" mapped to "UNSPEC" """ UNSPEC = 0 """For version 1, means UNKNOWN""" INET = 1 """Internet Protocol v4""" INET6 = 2 """Internet Protocol v6""" UNIX = 3 """Unix Socket; invalid for version 1""" class PROTO(IntEnum): """ Valid transport protocols. Version 1 "UNKNOWN" mapped to "UNSPEC" """ UNSPEC = 0 """For version 1, means UNKNOWN""" STREAM = 1 """TCP""" DGRAM = 2 """UDP; invalid for version 1""" V2_VALID_CMDS = {item.value for item in V2_CMD} V2_VALID_FAMS = {item.value for item in AF} V2_VALID_PROS = {item.value for item in PROTO} V2_PARSE_ADDR_FAMPRO = { (AF.INET << 4) | PROTO.STREAM, (AF.INET << 4) | PROTO.DGRAM, (AF.INET6 << 4) | PROTO.STREAM, (AF.INET6 << 4) | PROTO.DGRAM, (AF.UNIX << 4) | PROTO.STREAM, (AF.UNIX << 4) | PROTO.DGRAM, } """Family & Proto combinations that need address parsing""" __all__ = ["struct", "partial", "IPv4Address", "IPv6Address"] __all__.extend( k for k in globals().keys() if k.startswith("V1_") or k.startswith("V2_") ) _NOT_FOUND = object() log = logging.getLogger("mail.debug") # region #### Custom Types ############################################################ EndpointAddress = Union[IPv4Address, IPv6Address, Union[str, bytes]] @public class MalformedTLV(RuntimeError): pass @public class UnknownTypeTLV(KeyError): pass @public class AsyncReader(Protocol): async def read(self, n: int = ...) -> bytes: ... async def readexactly(self, n: int) -> bytes: ... async def readuntil(self, separator: bytes = ...) -> bytes: ... _anoinit = partial(attr.ib, init=False) @public class ProxyTLV(dict): """ Represents the TLV Vectors inside a PROXYv2 Handshake """ __slots__ = ("tlv_loc",) # https://github.com/haproxy/haproxy/blob/v2.3.0/doc/proxy-protocol.txt#L538-L549 PP2_TYPENAME: Dict[int, str] = { 0x01: "ALPN", 0x02: "AUTHORITY", 0x03: "CRC32C", 0x04: "NOOP", 0x05: "UNIQUE_ID", 0x20: "SSL", 0x21: "SSL_VERSION", 0x22: "SSL_CN", 0x23: "SSL_CIPHER", 0x24: "SSL_SIG_ALG", 0x25: "SSL_KEY_ALG", 0x30: "NETNS", } def __init__(self, *args, _tlv_loc: Dict[str, int], **kwargs): super().__init__(*args, **kwargs) self.tlv_loc = _tlv_loc def __getattr__(self, item: str) -> Any: return self.get(item) def same_attribs(self, _raises: bool = False, **kwargs) -> bool: """ Helper function to test whether attribute(s) exists, and whether the attribute's value is as expected. For ProxyTLV, since it inherits from dict, you can also use direct dict comparison. :param _raises: If True, raises an Error instead of returning bool """ for k, v in kwargs.items(): actual = self.get(k, _NOT_FOUND) if actual is _NOT_FOUND: if _raises: raise KeyError(f"notfound:{k}") else: return False if actual != v: if _raises: raise ValueError(f"mismatch:{k} actual={actual!r} expect={v!r}") else: return False return True @classmethod def parse( cls, data: ByteString, partial_ok: bool = True, strict: bool = False, ) -> Tuple[Dict[str, Any], Dict[str, int]]: """ Parse a bunch of bytes into TLV Vectors. :param data: The bunch of bytes to parse :param partial_ok: Keep result of parsing so far if error encountered :param strict: If true, reject unrecognized TYPEs """ rslt: Dict[str, Any] = {} tlv_loc: Dict[str, int] = {} def _pars(chunk: ByteString, *, offset: int) -> None: i = 0 while i < len(chunk): typ = chunk[i] len_ = int.from_bytes(chunk[i + 1 : i + 3], "big") val = chunk[i + 3 : i + 3 + len_] if len(val) < len_: raise MalformedTLV(f"TLV 0x{typ:02X} is malformed!") typ_name = cls.PP2_TYPENAME.get(typ) if typ_name is None: typ_name = f"x{typ:02X}" if strict: raise UnknownTypeTLV(typ_name) tlv_loc[typ_name] = offset + i if typ_name == "SSL": rslt["SSL_CLIENT"] = val[0] rslt["SSL_VERIFY"] = int.from_bytes(val[1:5], "big") try: _pars(val[5:], offset=i) rslt["SSL"] = True except MalformedTLV: rslt["SSL"] = False if not partial_ok: raise else: return else: rslt[typ_name] = val i += 3 + len_ try: _pars(data, offset=0) except MalformedTLV: if not partial_ok: raise return rslt, tlv_loc @classmethod def from_raw( cls, raw: ByteString, strict: bool = False ) -> Optional["ProxyTLV"]: """ Parses raw bytes for TLV Vectors, decode them and giving them human-readable name if applicable, and returns a ProxyTLV object. :param raw: The raw bytes :param strict: If true, reject unrecognized TYPEs """ if len(raw) == 0: return None parsed, tlv_loc = cls.parse(raw, partial_ok=False, strict=strict) return cls(parsed, _tlv_loc=tlv_loc) @classmethod def name_to_num(cls, name: str) -> Optional[int]: """ Perform backmapping from TYPENAME to TYPE (numeric) :param name: TYPENAME to backmap :return: TYPE (int) if backmap available, else None """ for k, v in cls.PP2_TYPENAME.items(): if name == v: return k return None @public @attr.s(slots=True) class ProxyData: """ Represents data received during PROXY Protocol Handshake, in an already-parsed form """ version: Optional[int] = attr.ib(kw_only=True, init=True) """PROXY Protocol version; None if not recognized/malformed""" command: Optional[V2_CMD] = _anoinit(default=None) """PROXYv2 command""" family: Optional[AF] = _anoinit(default=None) """Address Family (AF)""" protocol: Optional[PROTO] = _anoinit(default=None) """Proxied Protocol (PROTO)""" src_addr: Optional[EndpointAddress] = _anoinit(default=None) dst_addr: Optional[EndpointAddress] = _anoinit(default=None) src_port: Optional[int] = _anoinit(default=None) dst_port: Optional[int] = _anoinit(default=None) rest: ByteString = _anoinit(default=b"") """ Rest of PROXY Protocol data following UNKNOWN (v1) or UNSPEC (v2), or containing undecoded TLV (v2). If the latter, you can use the ProxyTLV class to parse the binary data. """ whole_raw: bytearray = _anoinit(factory=bytearray) """ The whole undecoded PROXY Header as-received. This can be used to (1) perform troubleshooting, and/or (2) calculate CRC32C (which will NOT be implemented in this module to reduce number of deps. """ tlv_start: int = _anoinit(default=None) """ Byte offset of the first TLV Vector within whole_raw. """ error: str = _anoinit(default="") """ If not an empty string, contains the error encountered when parsing """ _tlv: Optional[ProxyTLV] = _anoinit(default=None) @property def valid(self) -> bool: return not (self.error or self.version is None or self.protocol is None) @property def tlv(self) -> Optional[ProxyTLV]: if self._tlv is None: with contextlib.suppress(MalformedTLV): self._tlv = ProxyTLV.from_raw(self.rest) return self._tlv def with_error(self, error_msg: str, log_prefix: bool = True) -> "ProxyData": """ Returns a ProxyData with its .error attrib set to error_msg, at the same time sending a log.warning. :param error_msg: Error message :param log_prefix: If True, add "PROXY error:" prefix to log message """ if log_prefix: log.warning(f"PROXY error: {error_msg}") else: log.warning(error_msg) self.error = error_msg return self def same_attribs(self, _raises: bool = False, **kwargs) -> bool: for k, v in kwargs.items(): actual = getattr(self, k, _NOT_FOUND) if actual is _NOT_FOUND: if _raises: raise KeyError(f"notfound:{k}") else: return False if actual != v: if _raises: raise ValueError(f"mismatch:{k} actual={actual!r} expect={v!r}") else: return False return True def __bool__(self) -> bool: return self.valid # endregion RE_ADDR_ALLOWCHARS = re.compile(r"^[0-9a-fA-F.:]+$") RE_PORT_NOLEADZERO = re.compile(r"^[1-9]\d{0,4}|0$") # Reference: https://github.com/haproxy/haproxy/blob/v2.3.0/doc/proxy-protocol.txt async def _get_v1(reader: AsyncReader, initial: ByteString = b"") -> ProxyData: proxy_data = ProxyData(version=1) proxy_data.whole_raw = bytearray(initial) log.debug("Get all PROXYv1 handshake") data = await reader.readuntil() log.debug("Got PROXYv1 handshake") proxy_data.whole_raw += data if len(proxy_data.whole_raw) > 107: return proxy_data.with_error("PROXYv1 header too long") if not data.endswith(b"\r\n"): return proxy_data.with_error("PROXYv1 malformed") # Split using b" " so two consecutive SP will result in an empty field # (instead of silently treated as an SP) data_parts = deque(data[:-2].split(b" ")) if data_parts.popleft() != b"": # If first elem is not b"", then between proxy_line[5] and first b" " there # are characters. Or, in other words, there are characters _right_after_ # the b"PROXY" signature return proxy_data.with_error("PROXYv1 wrong signature") proto = data_parts.popleft() if proto == b"UNKNOWN": proxy_data.protocol = PROTO.UNSPEC proxy_data.family = AF.UNSPEC proxy_data.rest = (b" " + b" ".join(data_parts)) if data_parts else b"" return proxy_data if proto.endswith(b"4"): af = AF.INET elif proto.endswith(b"6"): af = AF.INET6 else: return proxy_data.with_error("PROXYv1 unrecognized family") proxy_data.family = af if not proto.startswith(b"TCP"): return proxy_data.with_error("PROXYv1 unrecognized protocol") proxy_data.protocol = PROTO.STREAM async def get_ap(matcher: "re.Pattern[str]") -> str: chunk = data_parts.popleft().decode("latin-1") if not matcher.match(chunk): raise ValueError return chunk try: addr = await get_ap(RE_ADDR_ALLOWCHARS) src_addr = ip_address(addr) addr = await get_ap(RE_ADDR_ALLOWCHARS) dst_addr = ip_address(addr) except ValueError: return proxy_data.with_error("PROXYv1 address malformed") if af == AF.INET and not src_addr.version == dst_addr.version == 4: return proxy_data.with_error("PROXYv1 address not IPv4") elif af == AF.INET6 and not src_addr.version == dst_addr.version == 6: return proxy_data.with_error("PROXYv1 address not IPv6") proxy_data.src_addr = src_addr proxy_data.dst_addr = dst_addr try: port = await get_ap(RE_PORT_NOLEADZERO) proxy_data.src_port = int(port) port = await get_ap(RE_PORT_NOLEADZERO) proxy_data.dst_port = int(port) except ValueError: return proxy_data.with_error("PROXYv1 port malformed") if not 0 <= proxy_data.src_port <= 65535: return proxy_data.with_error("PROXYv1 src port out of bounds") if not 0 <= proxy_data.dst_port <= 65535: return proxy_data.with_error("PROXYv1 dst port out of bounds") if data_parts: return proxy_data.with_error("PROXYv1 unrecognized extraneous data") return proxy_data async def _get_v2(reader: AsyncReader, initial: ByteString = b"") -> ProxyData: proxy_data = ProxyData(version=2) whole_raw = bytearray() async def read_rest( field_name: str, field_buf: bytearray, field_len: int ) -> Tuple[bytearray, bytearray]: left = field_len - len(field_buf) while left > 0: piece = await reader.read(left) left -= len(piece) if not piece or left < 0: raise ConnectionError(f"Connection lost while waiting for {field_name}") field_buf += piece return field_buf[0:field_len], field_buf[field_len:] signature = bytearray(initial) log.debug("Waiting for PROXYv2 signature") signature, header = await read_rest("signature", signature, 12) if signature != V2_SIGNATURE: return proxy_data.with_error("PROXYv2 wrong signature") log.debug("Got PROXYv2 signature") whole_raw += signature log.debug("Waiting for PROXYv2 Header") header, tail_part = await read_rest("header", header, 4) log.debug("Got PROXYv2 header") whole_raw += header ver_cmd, fam_proto, len_tail = struct.unpack("!BBH", header) if (ver_cmd & 0xF0) != 0x20: return proxy_data.with_error("PROXYv2 illegal version") proxy_data.command = ver_cmd & 0x0F if proxy_data.command not in V2_VALID_CMDS: return proxy_data.with_error("PROXYv2 unsupported command") proxy_data.family = (fam_proto & 0xF0) >> 4 if proxy_data.family not in V2_VALID_FAMS: return proxy_data.with_error("PROXYv2 unsupported family") proxy_data.protocol = fam_proto & 0x0F if proxy_data.protocol not in V2_VALID_PROS: return proxy_data.with_error("PROXYv2 unsupported protocol") log.debug("Waiting for PROXYv2 tail part") tail_part, _ = await read_rest("tail part", tail_part, len_tail) log.debug("Got PROXYv2 tail part") whole_raw += tail_part proxy_data.whole_raw = whole_raw if fam_proto not in V2_PARSE_ADDR_FAMPRO: proxy_data.rest = tail_part return proxy_data if proxy_data.family == AF.INET: unpacker = "!4s4sHH" elif proxy_data.family == AF.INET6: unpacker = "!16s16sHH" else: assert proxy_data.family == AF.UNIX unpacker = "108s108s0s0s" addr_len = struct.calcsize(unpacker) addr_struct = tail_part[0:addr_len] if len(addr_struct) < addr_len: return proxy_data.with_error("PROXYv2 truncated address") tail_part = tail_part[addr_len:] s_addr, d_addr, s_port, d_port = struct.unpack(unpacker, addr_struct) if proxy_data.family == AF.INET: proxy_data.src_addr = IPv4Address(s_addr) proxy_data.dst_addr = IPv4Address(d_addr) proxy_data.src_port = s_port proxy_data.dst_port = d_port elif proxy_data.family == AF.INET6: proxy_data.src_addr = IPv6Address(s_addr) proxy_data.dst_addr = IPv6Address(d_addr) proxy_data.src_port = s_port proxy_data.dst_port = d_port else: assert proxy_data.family == AF.UNIX proxy_data.src_addr = s_addr proxy_data.dst_addr = d_addr proxy_data.rest = tail_part if tail_part: proxy_data.tlv_start = 16 + addr_len return proxy_data @public async def get_proxy(reader_func: AsyncReader) -> ProxyData: """ :param reader_func: Async function that implements the AsyncReader protocol. :return: Proxy Data """ log.debug("Waiting for PROXY signature") signature = await reader_func.readexactly(5) try: if signature == b"PROXY": log.debug("PROXY version 1") return await _get_v1(reader_func, signature) elif signature == V2_SIGNATURE[0:5]: log.debug("PROXY version 2") return await _get_v2(reader_func, signature) else: return ProxyData(version=None).with_error("PROXY unrecognized signature") except Exception as e: return ProxyData(version=None).with_error(f"PROXY exception: {str(e)}", False) aio-libs-aiosmtpd-b634d9b/aiosmtpd/py.typed000066400000000000000000000001041462210711200207540ustar00rootroot00000000000000# Marker file for PEP 561. The aiosmtpd package uses inline types. aio-libs-aiosmtpd-b634d9b/aiosmtpd/qa/000077500000000000000000000000001462210711200176635ustar00rootroot00000000000000aio-libs-aiosmtpd-b634d9b/aiosmtpd/qa/__init__.py000066400000000000000000000000001462210711200217620ustar00rootroot00000000000000aio-libs-aiosmtpd-b634d9b/aiosmtpd/qa/test_0packaging.py000066400000000000000000000066341462210711200233110ustar00rootroot00000000000000# Copyright 2014-2021 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 """Test meta / packaging""" import re import subprocess from datetime import datetime from itertools import tee from pathlib import Path import pytest # noinspection PyPackageRequirements from packaging import version from aiosmtpd import __version__ RE_DUNDERVER = re.compile(r"__version__\s*?=\s*?(['\"])(?P[^'\"]+)\1\s*$") RE_VERHEADING = re.compile(r"(?P([0-9.]+)\S*)\s*\((?P[^)]+)\)") @pytest.fixture def aiosmtpd_version() -> version.Version: return version.parse(__version__) class TestVersion: def test_pep440(self, aiosmtpd_version: version.Version): """Ensure version number compliance to PEP-440""" assert isinstance( aiosmtpd_version, version.Version ), "Version number must comply with PEP-440" # noinspection PyUnboundLocalVariable def test_ge_master( self, aiosmtpd_version: version.Version, capsys: pytest.CaptureFixture ): """Ensure version is monotonically increasing""" reference = "master:aiosmtpd/__init__.py" cmd = f"git show {reference}".split() try: with capsys.disabled(): master_smtp = subprocess.check_output(cmd).decode() # nosec except subprocess.CalledProcessError: pytest.skip("Skipping due to git error") try: m = next(m for ln in master_smtp.splitlines() if (m := RE_DUNDERVER.match(ln))) except StopIteration: pytest.fail(f"Cannot find __version__ in {reference}!") master_ver = version.parse(m.group("ver")) assert aiosmtpd_version >= master_ver, "Version number cannot be < master's" class TestNews: news_rst = list(Path(__file__).parent.parent.rglob("*/NEWS.rst"))[0] def test_NEWS_version(self, aiosmtpd_version: version.Version): with self.news_rst.open("rt") as fin: # pairwise() from https://docs.python.org/3/library/itertools.html a, b = tee(fin) next(b, None) for ln1, ln2 in zip(a, b): if not ln1[0].isdigit(): continue ln1 = ln1.strip() ln2 = ln2.strip() equals = "=" * len(ln1) if not ln2.startswith(equals): continue break newsvers = ln1.split()[0] newsver = version.parse(newsvers) if newsver.base_version < aiosmtpd_version.base_version: pytest.fail( f"NEWS.rst is not updated: " f"{newsver.base_version} < {aiosmtpd_version.base_version}" ) def test_release_date(self, aiosmtpd_version: version.Version): if aiosmtpd_version.pre is not None: pytest.skip("Not a release version") with self.news_rst.open("rt") as fin: for ln in fin: ln = ln.strip() m = RE_VERHEADING.match(ln) if not m: continue ver = version.Version(m.group("ver")) if ver != aiosmtpd_version: continue try: datetime.strptime(m.group("date"), "%Y-%m-%d") except ValueError: pytest.fail("Release version not dated correctly") break else: pytest.fail("Release version has no NEWS fragment") aio-libs-aiosmtpd-b634d9b/aiosmtpd/qa/test_1testsuite.py000066400000000000000000000063751462210711200234210ustar00rootroot00000000000000# Copyright 2014-2021 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 """Test the sanity of the test suite itself""" import re import pytest import socket from aiosmtpd.testing import statuscodes from itertools import combinations ENFORCE_ENHANCED_STATUS_CODES = False """Whether to do strict compliance checking against RFC 2034 § 4""" RE_ESC = re.compile(rb"(?P\d)\.\d+\.\d+\s") # noinspection PyUnresolvedReferences @pytest.fixture(scope="module", autouse=True) def exit_on_fail(request: pytest.FixtureRequest): # Behavior of this will be undefined if tests are running in parallel. # But since parallel running is not practically possible (the ports will conflict), # then I don't think that will be a problem. failcount = request.session.testsfailed yield if request.session.testsfailed != failcount: pytest.exit("Test Suite is Not Sane!") STATUS_CODES = { k: v for k, v in vars(statuscodes.SMTP_STATUS_CODES).items() if k.startswith("S") } class TestStatusCodes: def test_elemtype(self): """Ensure status codes are instances of StatusCode""" for value in STATUS_CODES.values(): assert isinstance(value, statuscodes.StatusCode) def test_nameval(self): """Ensure each status code constant has SMTP Code embedded in the name""" for key, value in STATUS_CODES.items(): assert int(key[1:4]) == value.code def test_enhanced(self): """Compliance with RFC 2034 § 4""" total_correct = 0 for key, value in STATUS_CODES.items(): if key == "S250_FQDN": # FQDNs are sometimes funky and can be misconstrued as ESC # Better to skip em continue assert isinstance(value, statuscodes.StatusCode) m = RE_ESC.match(value.mesg) if ENFORCE_ENHANCED_STATUS_CODES: assert m is not None, f"{key} does not have Enhanced Status Code" elif m is None: continue esc1 = m.group("digit1") # noinspection PyTypeChecker assert str(value.code // 100) == esc1.decode(), ( f"{key}: First digit of Enhanced Status Code different from " f"first digit of Standard Status Code" ) # Can't use enumerate(); total_correct does not increase in lockstep with # the loop (there are several "continue"s above) total_correct += 1 # noqa: SIM113 assert total_correct > 0 def test_commands(self): """ Ensure lists in statuscodes are individual objects, so changes in one list won't affect the other lists """ lists = [ statuscodes._COMMON_COMMANDS, statuscodes.SUPPORTED_COMMANDS_NOTLS, statuscodes.SUPPORTED_COMMANDS_TLS, statuscodes.SUPPORTED_COMMANDS_LMTP, ] for one, two in combinations(lists, 2): assert one is not two class TestHarness: def test_fqdn_cached(self): """ Ensure that socket.getfqdn does not change between calls """ fqdn = socket.getfqdn() assert isinstance(fqdn, str) assert socket.getfqdn() == fqdn, "socket.getfqdn() changed between calls!" aio-libs-aiosmtpd-b634d9b/aiosmtpd/smtp.py000066400000000000000000001723351462210711200206320ustar00rootroot00000000000000# Copyright 2014-2021 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 import asyncio import asyncio.sslproto as sslproto import binascii import collections import enum import inspect import logging import re import socket import ssl from base64 import b64decode, b64encode from email._header_value_parser import get_addr_spec, get_angle_addr from email.errors import HeaderParseError from typing import ( Any, AnyStr, Awaitable, Callable, Dict, Iterable, List, MutableMapping, NamedTuple, Optional, Protocol, Sequence, Tuple, TypeVar, Union, ) from warnings import warn import attr from public import public from aiosmtpd import __version__, _get_or_new_eventloop from aiosmtpd.proxy_protocol import ProxyData, get_proxy # region #### Custom Data Types ####################################################### class _Missing(enum.Enum): MISSING = object() class _AuthMechAttr(NamedTuple): method: "AuthMechanismType" is_builtin: bool class _DataState(enum.Enum): NOMINAL = enum.auto() TOO_LONG = enum.auto() TOO_MUCH = enum.auto() AuthCallbackType = Callable[[str, bytes, bytes], bool] AuthenticatorType = Callable[["SMTP", "Session", "Envelope", str, Any], "AuthResult"] AuthMechanismType = Callable[["SMTP", List[str]], Awaitable[Any]] _TriStateType = Union[None, _Missing, bytes] RT = TypeVar("RT") # "ReturnType" DecoratorType = Callable[[Callable[..., RT]], Callable[..., RT]] # endregion # region #### Constant & Constant-likes ############################################### __all__ = [ "AuthCallbackType", "AuthMechanismType", "MISSING", "__version__", ] # Will be added to by @public __ident__ = 'Python SMTP {}'.format(__version__) log = logging.getLogger('mail.log') BOGUS_LIMIT = 5 CALL_LIMIT_DEFAULT = 20 DATA_SIZE_DEFAULT = 2**25 # Where does this number come from, I wonder... EMPTY_BARR = bytearray() EMPTYBYTES = b'' MISSING = _Missing.MISSING VALID_AUTHMECH = re.compile(r"[A-Z0-9_-]+\Z") # https://tools.ietf.org/html/rfc3207.html#page-3 ALLOWED_BEFORE_STARTTLS = {"NOOP", "EHLO", "STARTTLS", "QUIT"} # Auth hiding regexes CLIENT_AUTH_B = re.compile( # Matches "AUTH" br"(?P\s*AUTH\s+\S+[^\S\r\n]+)" # Param to AUTH . We only need to sanitize if param is given, which # for some mechanisms contain sensitive info. If no param is given, then we # can skip (match fails) br"(\S+)" # Optional bCRLF at end. Why optional? Because we also want to sanitize the # stripped line. If no bCRLF, then this group will be b"" br"(?P(?:\r\n)?)", re.IGNORECASE ) """Regex that matches 'AUTH ' commend""" # endregion @attr.s class AuthResult: """ Contains the result of authentication, to be returned to the smtp_AUTH method. All initialization arguments _must_ be keyworded! """ success: bool = attr.ib(kw_only=True) """Indicates authentication is successful or not""" handled: bool = attr.ib(kw_only=True, default=True) """ True means everything (including sending of status code) has been handled by the AUTH handler and smtp_AUTH should not do anything else. Applicable only if success == False. """ message: Optional[str] = attr.ib(kw_only=True, default=None) """ Optional message for additional handling by smtp_AUTH. Applicable only if handled == False. """ auth_data: Optional[Any] = attr.ib(kw_only=True, default=None, repr=lambda x: "...") """ Optional free-form authentication data. For the built-in mechanisms, it is usually an instance of LoginPassword. Other implementations are free to use any data structure here. """ @public class LoginPassword(NamedTuple): login: bytes password: bytes def __str__(self) -> str: return f"LoginPassword(login='{self.login.decode()}', password=...)" def __repr__(self) -> str: return str(self) @public class Session: def __init__(self, loop: asyncio.AbstractEventLoop): self.peer: Optional[str] = None self.ssl: Optional[dict[str, Any]] = None self.host_name: Optional[str] = None self.extended_smtp = False self.loop = loop self.proxy_data: Optional[ProxyData] = None """Data from PROXY Protocol handshake""" self._login_data = None self.auth_data = None """ New system *optional* authentication data; can contain anything returned by the authenticator callback. Can even be None; check `authenticated` attribute to determine if AUTH successful or not. """ self.authenticated: Optional[bool] = None @property def login_data(self) -> Any: """Legacy login_data, usually containing the username""" log.warning( "Session.login_data is deprecated and will be removed in version 2.0" ) return self._login_data @login_data.setter def login_data(self, value: Any) -> None: log.warning( "Session.login_data is deprecated and will be removed in version 2.0" ) self._login_data = value @public class Envelope: def __init__(self) -> None: self.mail_from: Optional[str] = None self.mail_options: List[str] = [] self.smtp_utf8 = False self.content: Union[None, bytes, str] = None self.original_content: Optional[bytes] = None self.rcpt_tos: List[str] = [] self.rcpt_options: List[str] = [] # This is here to enable debugging output when the -E option is given to the # unit test suite. In that case, this function is mocked to set the debug # level on the loop (as if PYTHONASYNCIODEBUG=1 were set). def make_loop() -> asyncio.AbstractEventLoop: return _get_or_new_eventloop() @public def syntax( text: str, extended: Optional[str] = None, when: Optional[str] = None ) -> DecoratorType: """ A @decorator that provides helptext for (E)SMTP HELP. Applies for smtp_* methods only! :param text: Help text for (E)SMTP HELP :param extended: Additional text for ESMTP HELP (appended to text) :param when: The name of the attribute of SMTP class to check; if the value of the attribute is false-y then HELP will not be available for the command """ def decorator(f: Callable[..., RT]) -> Callable[..., RT]: f.__smtp_syntax__ = text # type: ignore[attr-defined] f.__smtp_syntax_extended__ = extended # type: ignore[attr-defined] f.__smtp_syntax_when__ = when # type: ignore[attr-defined] return f return decorator @public def auth_mechanism(actual_name: str) -> DecoratorType: """ A @decorator to explicitly specifies the name of the AUTH mechanism implemented by the function/method this decorates :param actual_name: Name of AUTH mechanism. Must consists of [A-Z0-9_-] only. Will be converted to uppercase """ def decorator(f: Callable[..., RT]) -> Callable[..., RT]: f.__auth_mechanism_name__ = actual_name # type: ignore[attr-defined] return f actual_name = actual_name.upper() if not VALID_AUTHMECH.match(actual_name): raise ValueError(f"Invalid AUTH mechanism name: {actual_name}") return decorator def login_always_fail( mechanism: str, login: bytes, password: bytes ) -> bool: return False def is_int(o: Any) -> bool: return isinstance(o, int) @public class TLSSetupException(Exception): pass @public def sanitize(text: bytes) -> bytes: m = CLIENT_AUTH_B.match(text) if m: return m.group("authm") + b"********" + m.group("crlf") return text @public def sanitized_log(func: Callable[..., None], msg: AnyStr, *args, **kwargs) -> None: """ Sanitize args before passing to a logging function. """ sanitized_args = [ sanitize(a) if isinstance(a, bytes) else a for a in args ] func(msg, *sanitized_args, **kwargs) @public class SMTP(asyncio.StreamReaderProtocol): """ `Documentation can be found here `_ """ command_size_limit = 512 command_size_limits: Dict[str, int] = collections.defaultdict( lambda: SMTP.command_size_limit) line_length_limit = 1001 """Maximum line length according to RFC 5321 s 4.5.3.1.6""" # The number comes from this calculation: # (RFC 5322 s 2.1.1 + RFC 6532 s 3.4) 998 octets + CRLF = 1000 octets # (RFC 5321 s 4.5.3.1.6) 1000 octets + "transparent dot" = 1001 octets local_part_limit: int = 0 """ Maximum local part length. (RFC 5321 § 4.5.3.1.1 specifies 64, but lenient) If 0 or Falsey, local part length is unlimited. """ AuthLoginUsernameChallenge = "User Name\x00" AuthLoginPasswordChallenge = "Password\x00" def __init__( self, handler: Any, *, data_size_limit: Optional[int] = DATA_SIZE_DEFAULT, enable_SMTPUTF8: bool = False, decode_data: bool = False, hostname: Optional[str] = None, ident: Optional[str] = None, tls_context: Optional[ssl.SSLContext] = None, require_starttls: bool = False, timeout: float = 300, auth_required: bool = False, auth_require_tls: bool = True, auth_exclude_mechanism: Optional[Iterable[str]] = None, auth_callback: Optional[AuthCallbackType] = None, command_call_limit: Union[int, Dict[str, int], None] = None, authenticator: Optional[AuthenticatorType] = None, proxy_protocol_timeout: Optional[Union[int, float]] = None, loop: Optional[asyncio.AbstractEventLoop] = None ): self.__ident__ = ident or __ident__ self.loop = loop if loop else make_loop() super().__init__( asyncio.StreamReader(loop=self.loop, limit=self.line_length_limit), client_connected_cb=self._cb_client_connected, loop=self.loop) self.event_handler = handler assert data_size_limit is None or isinstance(data_size_limit, int) self.data_size_limit = data_size_limit self.enable_SMTPUTF8 = enable_SMTPUTF8 self._decode_data = decode_data self.command_size_limits.clear() if hostname: self.hostname = hostname else: self.hostname = socket.getfqdn() self.tls_context = tls_context if tls_context: if (tls_context.verify_mode not in {ssl.CERT_NONE, ssl.CERT_OPTIONAL}): # noqa: DUO122 log.warning("tls_context.verify_mode not in {CERT_NONE, " "CERT_OPTIONAL}; this might cause client " "connection problems") elif tls_context.check_hostname: log.warning("tls_context.check_hostname == True; " "this might cause client connection problems") self.require_starttls = tls_context and require_starttls self._timeout_duration = timeout self._timeout_handle: Optional[asyncio.TimerHandle] = None self._tls_handshake_okay = True self._tls_protocol: Optional[sslproto.SSLProtocol] = None self._original_transport: Optional[asyncio.BaseTransport] = None self.session: Optional[Session] = None self.envelope: Optional[Envelope] = None self.transport: Optional[asyncio.BaseTransport] = None self._handler_coroutine: Optional[asyncio.Task[None]] = None if not auth_require_tls and auth_required: warn("Requiring AUTH while not requiring TLS " "can lead to security vulnerabilities!") log.warning("auth_required == True but auth_require_tls == False") self._auth_require_tls = auth_require_tls if proxy_protocol_timeout is not None: if proxy_protocol_timeout <= 0: raise ValueError("proxy_protocol_timeout must be > 0") elif proxy_protocol_timeout < 3.0: log.warning("proxy_protocol_timeout < 3.0") self._proxy_timeout = proxy_protocol_timeout self._authenticator: Optional[AuthenticatorType] self._auth_callback: Optional[AuthCallbackType] if authenticator is not None: self._authenticator = authenticator self._auth_callback = None else: self._auth_callback = auth_callback or login_always_fail self._authenticator = None self._auth_required = auth_required # Get hooks & methods to significantly speedup getattr's self._auth_methods: Dict[str, _AuthMechAttr] = { getattr( mfunc, "__auth_mechanism_name__", mname.replace("auth_", "").replace("__", "-") ): _AuthMechAttr(mfunc, obj is self) for obj in (self, handler) for mname, mfunc in inspect.getmembers(obj) if mname.startswith("auth_") } for m in (auth_exclude_mechanism or []): self._auth_methods.pop(m, None) log.info( "Available AUTH mechanisms: " + " ".join( m + "(builtin)" if impl.is_builtin else m for m, impl in sorted(self._auth_methods.items()) ) ) self._handle_hooks: Dict[str, Callable] = { m.replace("handle_", ""): getattr(handler, m) for m in dir(handler) if m.startswith("handle_") } # When we've deprecated the 4-arg form of handle_EHLO, # we can -- and should -- remove this whole code block ehlo_hook = self._handle_hooks.get("EHLO") if ehlo_hook is None: self._ehlo_hook_ver = None else: ehlo_hook_params = inspect.signature(ehlo_hook).parameters if len(ehlo_hook_params) == 4: self._ehlo_hook_ver = "old" warn("Use the 5-argument handle_EHLO() hook instead of " "the 4-argument handle_EHLO() hook; " "support for the 4-argument handle_EHLO() hook will be " "removed in version 2.0", DeprecationWarning) elif len(ehlo_hook_params) == 5: self._ehlo_hook_ver = "new" else: raise RuntimeError("Unsupported EHLO Hook") self._smtp_methods: Dict[str, Any] = { m.replace("smtp_", ""): getattr(self, m) for m in dir(self) if m.startswith("smtp_") } self._call_limit_default: int if command_call_limit is None: self._enforce_call_limit = False else: self._enforce_call_limit = True if isinstance(command_call_limit, int): self._call_limit_base = {} self._call_limit_default = command_call_limit elif isinstance(command_call_limit, dict): if not all(map(is_int, command_call_limit.values())): raise TypeError("All command_call_limit values must be int") self._call_limit_base = command_call_limit self._call_limit_default = command_call_limit.get( "*", CALL_LIMIT_DEFAULT ) else: raise TypeError("command_call_limit must be int or Dict[str, int]") def _create_session(self) -> Session: return Session(self.loop) def _create_envelope(self) -> Envelope: return Envelope() async def _call_handler_hook(self, command: str, *args) -> Any: hook = self._handle_hooks.get(command) if hook is None: return MISSING status = await hook(self, self.session, self.envelope, *args) return status @property def max_command_size_limit(self) -> int: try: return max(self.command_size_limits.values()) except ValueError: return self.command_size_limit def __del__(self): # pragma: nocover # This is nocover-ed because the contents *totally* does NOT affect function- # ality, and in addition this comes directly from StreamReaderProtocol.__del__() # but with a getattr()+check addition to stop the annoying (but harmless) # "exception ignored" messages caused by AttributeError when self._closed is # missing (which seems to happen randomly). closed = getattr(self, "_closed", None) if closed is None: return if closed.done() and not closed.cancelled(): closed.exception() def connection_made(self, transport: asyncio.BaseTransport) -> None: # Reset state due to rfc3207 part 4.2. self._set_rset_state() self.session = self._create_session() self.session.peer = transport.get_extra_info('peername') self._reset_timeout() seen_starttls = (self._original_transport is not None) if self.transport is not None and seen_starttls: # It is STARTTLS connection over normal connection. self._reader._transport = transport # type: ignore[attr-defined] self._writer._transport = transport # type: ignore[attr-defined] self.transport = transport # Discard any leftover unencrypted data # See https://tools.ietf.org/html/rfc3207#page-7 self._reader._buffer.clear() # type: ignore[attr-defined] # Do SSL certificate checking as rfc3207 part 4.1 says. Why is # _extra a protected attribute? assert self._tls_protocol is not None self.session.ssl = self._tls_protocol._extra hook = self._handle_hooks.get("STARTTLS") if hook is None: self._tls_handshake_okay = True else: self._tls_handshake_okay = hook( self, self.session, self.envelope) else: super().connection_made(transport) self.transport = transport log.info('Peer: %r', self.session.peer) # Process the client's requests. self._handler_coroutine = self.loop.create_task( self._handle_client()) def connection_lost(self, error: Optional[Exception]) -> None: assert self.session is not None log.info('%r connection lost', self.session.peer) assert self._timeout_handle is not None self._timeout_handle.cancel() # If STARTTLS was issued, then our transport is the SSL protocol # transport, and we need to close the original transport explicitly, # otherwise an unexpected eof_received() will be called *after* the # connection_lost(). At that point the stream reader will already be # destroyed and we'll get a traceback in super().eof_received() below. if self._original_transport is not None: self._original_transport.close() super().connection_lost(error) assert self._handler_coroutine is not None self._handler_coroutine.cancel() self.transport = None def eof_received(self) -> Optional[bool]: assert self.session is not None log.info('%r EOF received', self.session.peer) assert self._handler_coroutine is not None self._handler_coroutine.cancel() if self.session.ssl is not None: # If STARTTLS was issued, return False, because True has no effect # on an SSL transport and raises a warning. Our superclass has no # way of knowing we switched to SSL so it might return True. return False return super().eof_received() def _reset_timeout(self, duration: Optional[float] = None) -> None: if self._timeout_handle is not None: self._timeout_handle.cancel() self._timeout_handle = self.loop.call_later( duration or self._timeout_duration, self._timeout_cb ) def _timeout_cb(self): assert self.session is not None log.info('%r connection timeout', self.session.peer) # Calling close() on the transport will trigger connection_lost(), # which gracefully closes the SSL transport if required and cleans # up state. assert self.transport is not None self.transport.close() def _cb_client_connected( self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter ): # This is redundant since we subclass StreamReaderProtocol, but I like # the shorter names. self._reader = reader self._writer = writer def _set_post_data_state(self): """Reset state variables to their post-DATA state.""" self.envelope = self._create_envelope() def _set_rset_state(self): """Reset all state variables except the greeting.""" self._set_post_data_state() async def push(self, status: AnyStr): if isinstance(status, str): response = bytes( status, 'utf-8' if self.enable_SMTPUTF8 else 'ascii') else: response = status assert isinstance(response, bytes) self._writer.write(response + b"\r\n") assert self.session is not None log.debug("%r << %r", self.session.peer, response) await self._writer.drain() async def handle_exception(self, error: Exception) -> str: if hasattr(self.event_handler, 'handle_exception'): status = await self.event_handler.handle_exception(error) return status else: assert self.session is not None log.exception('%r SMTP session exception', self.session.peer) status = '500 Error: ({}) {}'.format( error.__class__.__name__, str(error)) return status async def _handle_client(self) -> None: assert self.session is not None log.info('%r handling connection', self.session.peer) if self._proxy_timeout is not None: self._reset_timeout(self._proxy_timeout) log.debug("%r waiting PROXY handshake", self.session.peer) self.session.proxy_data = await get_proxy(self._reader) if self.session.proxy_data: log.info("%r valid PROXY handshake", self.session.peer) status = await self._call_handler_hook("PROXY", self.session.proxy_data) log.debug("%r handle_PROXY returned %r", self.session.peer, status) else: log.warning("%r invalid PROXY handshake", self.session.peer) status = False if status is MISSING or not status: log.info("%r rejected by handle_PROXY", self.session.peer) assert self.transport is not None self.transport.close() return self._reset_timeout() await self.push('220 {} {}'.format(self.hostname, self.__ident__)) if self._enforce_call_limit: call_limit: MutableMapping[str, int] = collections.defaultdict( lambda: self._call_limit_default, self._call_limit_base ) else: # Not used, but this silences code inspection tools call_limit = {} bogus_budget = BOGUS_LIMIT while self.transport is not None: # pragma: nobranch try: try: line: bytes = await self._reader.readuntil() except asyncio.LimitOverrunError as error: # Line too long. Read until end of line before sending 500. await self._reader.read(error.consumed) while True: try: await self._reader.readuntil() break except asyncio.LimitOverrunError as e: # Line is even longer... await self._reader.read(e.consumed) continue # Now that we have read a full line from the client, # send error response and read the next command line. await self.push('500 Command line too long') continue sanitized_log(log.debug, '_handle_client readline: %r', line) # XXX this rstrip may not completely preserve old behavior. line = line.rstrip(b'\r\n') sanitized_log(log.info, '%r >> %r', self.session.peer, line) if not line: await self.push('500 Error: bad syntax') continue command_bytes, _, arg_bytes = line.partition(b" ") # Decode to string only the command name part, which must be # ASCII as per RFC. If there is an argument, it is decoded to # UTF-8/surrogateescape so that non-UTF-8 data can be # re-encoded back to the original bytes when the SMTP command # is handled. try: command = command_bytes.upper().decode(encoding='ascii') except UnicodeDecodeError: await self.push('500 Error: bad syntax') continue if not arg_bytes: arg: Optional[str] = None else: arg_bytes = arg_bytes.strip() # Remote SMTP servers can send us UTF-8 content despite # whether they've declared to do so or not. Some old # servers can send 8-bit data. Use surrogateescape so # that the fidelity of the decoding is preserved, and the # original bytes can be retrieved. if self.enable_SMTPUTF8: arg = str( arg_bytes, encoding='utf-8', errors='surrogateescape') else: try: arg = str(arg_bytes, encoding='ascii', errors='strict') except UnicodeDecodeError: # This happens if enable_SMTPUTF8 is false, meaning # that the server explicitly does not want to # accept non-ASCII, but the client ignores that and # sends non-ASCII anyway. await self.push('500 Error: strict ASCII mode') # Should we await self.handle_exception()? continue max_sz = ( self.command_size_limits[command] if self.session.extended_smtp else self.command_size_limit ) if len(line) > max_sz: await self.push('500 Command line too long') continue if not self._tls_handshake_okay and command != 'QUIT': await self.push( '554 Command refused due to lack of security') continue if (self.require_starttls and not self._tls_protocol and command not in ALLOWED_BEFORE_STARTTLS): # RFC3207 part 4 await self.push('530 Must issue a STARTTLS command first') continue if self._enforce_call_limit: budget = call_limit[command] if budget < 1: log.warning( "%r over limit for %s", self.session.peer, command ) await self.push( f"421 4.7.0 {command} sent too many times" ) self.transport.close() continue call_limit[command] = budget - 1 method = self._smtp_methods.get(command) if method is None: log.warning("%r unrecognised: %s", self.session.peer, command) bogus_budget -= 1 if bogus_budget < 1: log.warning("%r too many bogus commands", self.session.peer) await self.push( "502 5.5.1 Too many unrecognized commands, goodbye." ) self.transport.close() continue await self.push( f'500 Error: command "{command}" not recognized' ) continue # Received a valid command, reset the timer. self._reset_timeout() await method(arg) except asyncio.CancelledError: # The connection got reset during the DATA command. # XXX If handler method raises ConnectionResetError, we should # verify that it was actually self._reader that was reset. log.info('%r Connection lost during _handle_client()', self.session.peer) self._writer.close() raise except ConnectionResetError: log.info('%r Connection lost during _handle_client()', self.session.peer) self._writer.close() raise except Exception as error: status = None try: status = await self.handle_exception(error) except Exception as inner_error: try: log.exception('%r Exception in handle_exception()', self.session.peer) status = '500 Error: ({}) {}'.format( inner_error.__class__.__name__, str(inner_error)) except Exception: status = '500 Error: Cannot describe error' finally: if isinstance(error, TLSSetupException): # This code branch is inside None check for self.transport # so there shouldn't be a None self.transport but pytype # still complains, so silence that error. self.transport.close() # pytype: disable=attribute-error self.connection_lost(error) else: # The value of status is being set with ex-except and it # shouldn't be None, but pytype isn't able to infer that # so ignore the error related to wrong argument types. await self.push(status) # pytype: disable=wrong-arg-types async def check_helo_needed(self, helo: str = "HELO") -> bool: """ Check if HELO/EHLO is needed. :param helo: The actual string of HELO/EHLO :return: True if HELO/EHLO is needed """ assert self.session is not None if not self.session.host_name: await self.push(f'503 Error: send {helo} first') return True return False async def check_auth_needed(self, caller_method: str) -> bool: """ Check if AUTH is needed. :param caller_method: The SMTP method needing a check (for logging) :return: True if AUTH is needed """ assert self.session is not None if self._auth_required and not self.session.authenticated: log.info(f'{caller_method}: Authentication required') await self.push('530 5.7.0 Authentication required') return True return False # SMTP and ESMTP commands @syntax('HELO hostname') async def smtp_HELO(self, hostname: str): if not hostname: await self.push('501 Syntax: HELO hostname') return self._set_rset_state() assert self.session is not None self.session.extended_smtp = False status = await self._call_handler_hook('HELO', hostname) if status is MISSING: self.session.host_name = hostname status = '250 {}'.format(self.hostname) await self.push(status) @syntax('EHLO hostname') async def smtp_EHLO(self, hostname: str): if not hostname: await self.push('501 Syntax: EHLO hostname') return response = ['250-' + self.hostname, ] self._set_rset_state() assert self.session is not None self.session.extended_smtp = True if self.data_size_limit: response.append(f'250-SIZE {self.data_size_limit}') self.command_size_limits['MAIL'] += 26 if not self._decode_data: response.append('250-8BITMIME') if self.enable_SMTPUTF8: response.append('250-SMTPUTF8') self.command_size_limits['MAIL'] += 10 if self.tls_context and not self._tls_protocol: response.append('250-STARTTLS') if not self._auth_require_tls or self._tls_protocol: response.append( "250-AUTH " + " ".join(sorted(self._auth_methods.keys())) ) if hasattr(self, 'ehlo_hook'): warn('Use handler.handle_EHLO() instead of .ehlo_hook()', DeprecationWarning) await self.ehlo_hook() if self._ehlo_hook_ver is None: self.session.host_name = hostname response.append('250 HELP') elif self._ehlo_hook_ver == "old": # Old behavior: Send all responses first... for r in response: await self.push(r) # ... then send the response from the hook. response = [await self._call_handler_hook("EHLO", hostname)] # (The hook might internally send its own responses.) elif self._ehlo_hook_ver == "new": # pragma: nobranch # New behavior: hand over list of responses so far to the hook, and # REPLACE existing list of responses with what the hook returns. # We will handle the push()ing response.append('250 HELP') response = await self._call_handler_hook("EHLO", hostname, response) for r in response: await self.push(r) @syntax('NOOP [ignored]') async def smtp_NOOP(self, arg: str): status = await self._call_handler_hook('NOOP', arg) await self.push('250 OK' if status is MISSING else status) @syntax('QUIT') async def smtp_QUIT(self, arg: str): if arg: await self.push('501 Syntax: QUIT') else: status = await self._call_handler_hook('QUIT') await self.push('221 Bye' if status is MISSING else status) assert self._handler_coroutine is not None self._handler_coroutine.cancel() assert self.transport is not None self.transport.close() @syntax('STARTTLS', when='tls_context') async def smtp_STARTTLS(self, arg: str): if arg: await self.push('501 Syntax: STARTTLS') return if not self.tls_context: await self.push('454 TLS not available') return await self.push('220 Ready to start TLS') # Create a waiter Future to wait for SSL handshake to complete waiter = self.loop.create_future() # Create SSL layer. # noinspection PyTypeChecker self._tls_protocol = sslproto.SSLProtocol( self.loop, self, self.tls_context, waiter, server_side=True) # Reconfigure transport layer. Keep a reference to the original # transport so that we can close it explicitly when the connection is # lost. self._original_transport = self.transport assert self._original_transport is not None self._original_transport.set_protocol(self._tls_protocol) # Reconfigure the protocol layer. Why is the app transport a protected # property, if it MUST be used externally? self.transport = self._tls_protocol._app_transport self._tls_protocol.connection_made(self._original_transport) # wait until handshake complete try: await waiter except asyncio.CancelledError: raise except Exception as error: raise TLSSetupException() from error @syntax("AUTH ") async def smtp_AUTH(self, arg: str) -> None: if await self.check_helo_needed("EHLO"): return assert self.session is not None if not self.session.extended_smtp: await self.push("500 Error: command 'AUTH' not recognized") return elif self._auth_require_tls and not self._tls_protocol: await self.push("538 5.7.11 Encryption required for requested " "authentication mechanism") return elif self.session.authenticated: await self.push('503 Already authenticated') return elif not arg: await self.push('501 Not enough value') return args = arg.split() if len(args) > 2: await self.push('501 Too many values') return mechanism = args[0] if mechanism not in self._auth_methods: await self.push('504 5.5.4 Unrecognized authentication type') return CODE_SUCCESS = "235 2.7.0 Authentication successful" CODE_INVALID = "535 5.7.8 Authentication credentials invalid" status = await self._call_handler_hook('AUTH', args) if status is MISSING: auth_method = self._auth_methods[mechanism] log.debug( "Using %s auth_ hook for %r", "builtin" if auth_method.is_builtin else "handler", mechanism ) # Pass 'self' to method so external methods can leverage this # class's helper methods such as push() auth_result = await auth_method.method(self, args) log.debug("auth_%s returned %r", mechanism, auth_result) # New system using `authenticator` and AuthResult if isinstance(auth_result, AuthResult): if auth_result.success: self.session.authenticated = True _auth_data = auth_result.auth_data self.session.auth_data = _auth_data # Custom mechanisms might not implement the "login" attribute, and # that's okay. self.session.login_data = getattr(_auth_data, "login", None) status = auth_result.message or CODE_SUCCESS else: if auth_result.handled: status = None elif auth_result.message: status = auth_result.message else: status = CODE_INVALID # Old system using `auth_callback` and _TriState elif auth_result is None: # None means there's an error already handled by method and # we don't need to do anything more status = None elif auth_result is MISSING or auth_result is False: # MISSING means no error in AUTH process, but credentials # is rejected / not valid status = CODE_INVALID else: self.session.login_data = auth_result status = CODE_SUCCESS if status is not None: # pragma: no branch await self.push(status) async def challenge_auth( self, challenge: Union[str, bytes], encode_to_b64: bool = True, log_client_response: bool = False, ) -> Union[_Missing, bytes]: """ Send challenge during authentication. "334 " will be prefixed, so do NOT put "334 " at start of server_message. :param challenge: Challenge to send to client. If str, will be utf8-encoded. :param encode_to_b64: If true, then perform Base64 encoding on challenge :param log_client_response: Perform logging of client's response. WARNING: Might cause leak of sensitive information! Do not turn on unless _absolutely_ necessary! :return: Response from client, or MISSING """ challenge = ( challenge.encode() if isinstance(challenge, str) else challenge ) assert isinstance(challenge, bytes) # Trailing space is MANDATORY even if challenge is empty. # See: # - https://tools.ietf.org/html/rfc4954#page-4 ¶ 5 # - https://tools.ietf.org/html/rfc4954#page-13 "continue-req" challenge = b"334 " + (b64encode(challenge) if encode_to_b64 else challenge) assert self.session is not None log.debug("%r << challenge: %r", self.session.peer, challenge) await self.push(challenge) line = await self._reader.readline() # pytype: disable=attribute-error if log_client_response: warn("AUTH interaction logging is enabled!") warn("Sensitive information might be leaked!") log.debug("%r >> %r", self.session.peer, line) blob: bytes = line.strip() # '*' handling in accordance with RFC4954 if blob == b"*": log.warning("%r aborted AUTH with '*'", self.session.peer) await self.push("501 5.7.0 Auth aborted") return MISSING try: decoded_blob = b64decode(blob, validate=True) except binascii.Error: log.debug("%r can't decode base64: %s", self.session.peer, blob) await self.push("501 5.5.2 Can't decode base64") return MISSING return decoded_blob _334_PREFIX = re.compile(r"^334 ") async def _auth_interact( self, server_message: str ) -> Union[_Missing, bytes]: # pragma: nocover warn( "_auth_interact will be deprecated in version 2.0. " "Please use challenge_auth() instead.", DeprecationWarning ) return await self.challenge_auth( challenge=self._334_PREFIX.sub("", server_message), encode_to_b64=False, ) def _authenticate(self, mechanism: str, auth_data: Any) -> AuthResult: if self._authenticator is not None: # self.envelope is likely still empty, but we'll pass it anyways to # make the invocation similar to the one in _call_handler_hook assert self.session is not None assert self.envelope is not None return self._authenticator( self, self.session, self.envelope, mechanism, auth_data ) else: assert self._auth_callback is not None assert isinstance(auth_data, LoginPassword) if self._auth_callback(mechanism, *auth_data): return AuthResult(success=True, handled=True, auth_data=auth_data) else: return AuthResult(success=False, handled=False) # IMPORTANT NOTES FOR THE auth_* METHODS # ====================================== # Please note that there are two systems for return values in #2. # # 1. For internal methods, due to how they are called, we must ignore the first arg # 2. (OLD SYSTEM) All auth_* methods can return one of three values: # - None: An error happened and handled; # smtp_AUTH should do nothing more # - MISSING or False: Authentication failed, but not because of error # - [Any]: Authentication succeeded and this is the 'identity' of # the SMTP user # - 'identity' is not always username, depending on the auth mecha- # nism. Might be a session key, a one-time user ID, or any kind of # object, actually. # 2. (NEW SYSTEM) All auth_* methods must return an AuthResult object. # For explanation on the object's attributes, # see the AuthResult class definition. # 3. Auth credentials checking is performed in the auth_* methods because # more advanced auth mechanism might not return login+password pair # (see #2 above) async def auth_PLAIN(self, _, args: List[str]) -> AuthResult: login_and_password: _TriStateType if len(args) == 1: login_and_password = await self.challenge_auth("") if login_and_password is MISSING: return AuthResult(success=False) else: try: login_and_password = b64decode(args[1].encode(), validate=True) except Exception: await self.push("501 5.5.2 Can't decode base64") return AuthResult(success=False, handled=True) try: # login data is "{authz_id}\x00{login_id}\x00{password}" # authz_id can be null, and currently ignored # See https://tools.ietf.org/html/rfc4616#page-3 _, login, password = login_and_password.split(b"\x00") # noqa: E501 except ValueError: # not enough args await self.push("501 5.5.2 Can't split auth value") return AuthResult(success=False, handled=True) # Verify login data assert login is not None assert password is not None return self._authenticate("PLAIN", LoginPassword(login, password)) async def auth_LOGIN(self, _, args: List[str]) -> AuthResult: login: _TriStateType if len(args) == 1: # Client sent only "AUTH LOGIN" login = await self.challenge_auth(self.AuthLoginUsernameChallenge) if login is MISSING: return AuthResult(success=False) else: # Client sent "AUTH LOGIN " try: login = b64decode(args[1].encode(), validate=True) except Exception: await self.push("501 5.5.2 Can't decode base64") return AuthResult(success=False, handled=True) assert login is not None password: _TriStateType password = await self.challenge_auth(self.AuthLoginPasswordChallenge) if password is MISSING: return AuthResult(success=False) assert password is not None return self._authenticate("LOGIN", LoginPassword(login, password)) def _strip_command_keyword(self, keyword: str, arg: str) -> Optional[str]: keylen = len(keyword) if arg[:keylen].upper() == keyword: return arg[keylen:].strip() return None def _getaddr(self, arg: str) -> Tuple[Optional[str], Optional[str]]: """ Try to parse address given in SMTP command. Returns address=None if arg can't be parsed properly (get_angle_addr / get_addr_spec raised HeaderParseError) """ class AddrSpec(Protocol): @property def addr_spec(self) -> str: ... if not arg: return '', '' address: AddrSpec try: if arg.lstrip().startswith('<'): address, rest = get_angle_addr(arg) else: address, rest = get_addr_spec(arg) except HeaderParseError: return None, None addr = address.addr_spec localpart, atsign, domainpart = addr.rpartition("@") if self.local_part_limit and len(localpart) > self.local_part_limit: return None, None return addr, rest def _getparams( self, params: Sequence[str] ) -> Optional[Dict[str, Union[str, bool]]]: # Return params as dictionary. Return None if not all parameters # appear to be syntactically valid according to RFC 1869. result: Dict[str, Union[str, bool]] = {} for param in params: param, eq, value = param.partition('=') if not param.isalnum() or eq and not value: return None result[param] = value if eq else True return result # noinspection PyUnresolvedReferences def _syntax_available(self, method: Callable) -> bool: if not hasattr(method, '__smtp_syntax__'): return False if method.__smtp_syntax_when__: # type: ignore[attr-defined] return bool(getattr(self, method.__smtp_syntax_when__)) # type: ignore[attr-defined] return True @syntax('HELP [command]') async def smtp_HELP(self, arg: str) -> None: if await self.check_auth_needed("HELP"): return code = 250 if arg: method = self._smtp_methods.get(arg.upper()) if method and self._syntax_available(method): help_str = method.__smtp_syntax__ assert self.session is not None if (self.session.extended_smtp and method.__smtp_syntax_extended__): help_str += method.__smtp_syntax_extended__ await self.push('250 Syntax: ' + help_str) return code = 501 commands = [] for name, method in self._smtp_methods.items(): if self._syntax_available(method): commands.append(name) commands.sort() await self.push( '{} Supported commands: {}'.format(code, ' '.join(commands))) @syntax('VRFY
') async def smtp_VRFY(self, arg: str) -> None: if await self.check_auth_needed("VRFY"): return if arg: address, params = self._getaddr(arg) if address is None: await self.push('502 Could not VRFY ' + arg) else: status = await self._call_handler_hook('VRFY', address) await self.push( '252 Cannot VRFY user, but will accept message ' 'and attempt delivery' if status is MISSING else status) else: await self.push('501 Syntax: VRFY
') @syntax('MAIL FROM:
', extended=' [SP ]') async def smtp_MAIL(self, arg: Optional[str]) -> None: if await self.check_helo_needed(): return if await self.check_auth_needed("MAIL"): return syntaxerr = '501 Syntax: MAIL FROM:
' assert self.session is not None if self.session.extended_smtp: syntaxerr += ' [SP ]' if arg is None: await self.push(syntaxerr) return arg = self._strip_command_keyword('FROM:', arg) if arg is None: await self.push(syntaxerr) return address, addrparams = self._getaddr(arg) if address is None: await self.push("553 5.1.3 Error: malformed address") return if not address: await self.push(syntaxerr) return if not self.session.extended_smtp and addrparams: await self.push(syntaxerr) return assert self.envelope is not None if self.envelope.mail_from: await self.push('503 Error: nested MAIL command') return assert addrparams is not None mail_options = addrparams.upper().split() params = self._getparams(mail_options) if params is None: await self.push(syntaxerr) return if not self._decode_data: body = params.pop('BODY', '7BIT') if body not in ['7BIT', '8BITMIME']: await self.push( '501 Error: BODY can only be one of 7BIT, 8BITMIME') return smtputf8 = params.pop('SMTPUTF8', False) if not isinstance(smtputf8, bool): await self.push('501 Error: SMTPUTF8 takes no arguments') return if smtputf8 and not self.enable_SMTPUTF8: await self.push('501 Error: SMTPUTF8 disabled') return self.envelope.smtp_utf8 = smtputf8 size = params.pop('SIZE', None) if size: if isinstance(size, bool) or not size.isdigit(): await self.push(syntaxerr) return elif self.data_size_limit and int(size) > self.data_size_limit: await self.push( '552 Error: message size exceeds fixed maximum message ' 'size') return if len(params) > 0: await self.push( '555 MAIL FROM parameters not recognized or not implemented') return status = await self._call_handler_hook('MAIL', address, mail_options) if status is MISSING: self.envelope.mail_from = address self.envelope.mail_options.extend(mail_options) status = '250 OK' log.info('%r sender: %s', self.session.peer, address) await self.push(status) @syntax('RCPT TO:
', extended=' [SP ]') async def smtp_RCPT(self, arg: Optional[str]) -> None: if await self.check_helo_needed(): return if await self.check_auth_needed("RCPT"): return assert self.envelope is not None if not self.envelope.mail_from: await self.push("503 Error: need MAIL command") return syntaxerr = '501 Syntax: RCPT TO:
' assert self.session is not None if self.session.extended_smtp: syntaxerr += ' [SP ]' if arg is None: await self.push(syntaxerr) return arg = self._strip_command_keyword('TO:', arg) if arg is None: await self.push(syntaxerr) return address, params = self._getaddr(arg) if address is None: await self.push("553 5.1.3 Error: malformed address") return if not address: await self.push(syntaxerr) return if not self.session.extended_smtp and params: await self.push(syntaxerr) return assert params is not None rcpt_options = params.upper().split() params_dict = self._getparams(rcpt_options) if params_dict is None: await self.push(syntaxerr) return # XXX currently there are no options we recognize. if len(params_dict) > 0: await self.push( '555 RCPT TO parameters not recognized or not implemented' ) return status = await self._call_handler_hook('RCPT', address, rcpt_options) if status is MISSING: self.envelope.rcpt_tos.append(address) self.envelope.rcpt_options.extend(rcpt_options) status = '250 OK' log.info('%r recip: %s', self.session.peer, address) await self.push(status) @syntax('RSET') async def smtp_RSET(self, arg: str): if arg: await self.push('501 Syntax: RSET') return self._set_rset_state() if hasattr(self, 'rset_hook'): warn('Use handler.handle_RSET() instead of .rset_hook()', DeprecationWarning) await self.rset_hook() status = await self._call_handler_hook('RSET') await self.push('250 OK' if status is MISSING else status) @syntax('DATA') async def smtp_DATA(self, arg: str) -> None: if await self.check_helo_needed(): return if await self.check_auth_needed("DATA"): return assert self.envelope is not None if not self.envelope.rcpt_tos: await self.push('503 Error: need RCPT command') return if arg: await self.push('501 Syntax: DATA') return await self.push('354 End data with .') data: List[bytearray] = [] num_bytes: int = 0 limit: Optional[int] = self.data_size_limit line_fragments: List[bytes] = [] state: _DataState = _DataState.NOMINAL while self.transport is not None: # pragma: nobranch # Since eof_received cancels this coroutine, # readuntil() can never raise asyncio.IncompleteReadError. try: # https://datatracker.ietf.org/doc/html/rfc5321#section-2.3.8 line: bytes = await self._reader.readuntil(b'\r\n') log.debug('DATA readline: %s', line) assert line.endswith(b'\r\n') except asyncio.CancelledError: # The connection got reset during the DATA command. log.info('Connection lost during DATA') self._writer.close() raise except asyncio.LimitOverrunError as e: # The line exceeds StreamReader's "stream limit". # Delay SMTP Status Code sending until data receive is complete # This seems to be implied in RFC 5321 § 4.2.5 if state == _DataState.NOMINAL: # Transition to TOO_LONG only if we haven't gone TOO_MUCH yet state = _DataState.TOO_LONG # Discard data immediately to prevent memory pressure data *= 0 # Drain the stream anyways line = await self._reader.read(e.consumed) assert not line.endswith(b'\r\n') # A lone dot in a line signals the end of DATA. if not line_fragments and line == b'.\r\n': break num_bytes += len(line) if state == _DataState.NOMINAL and limit and num_bytes > limit: # Delay SMTP Status Code sending until data receive is complete # This seems to be implied in RFC 5321 § 4.2.5 state = _DataState.TOO_MUCH # Discard data immediately to prevent memory pressure data *= 0 line_fragments.append(line) if line.endswith(b'\r\n'): # Record data only if state is "NOMINAL" if state == _DataState.NOMINAL: line = EMPTY_BARR.join(line_fragments) if len(line) > self.line_length_limit: # Theoretically we shouldn't reach this place. But it's always # good to practice DEFENSIVE coding. state = _DataState.TOO_LONG # Discard data immediately to prevent memory pressure data *= 0 else: data.append(EMPTY_BARR.join(line_fragments)) line_fragments *= 0 # Day of reckoning! Let's take care of those out-of-nominal situations if state != _DataState.NOMINAL: if state == _DataState.TOO_LONG: await self.push("500 Line too long (see RFC5321 4.5.3.1.6)") elif state == _DataState.TOO_MUCH: # pragma: nobranch await self.push('552 Error: Too much mail data') self._set_post_data_state() return # If unfinished_line is non-empty, then the connection was closed. assert not line_fragments # Remove extraneous carriage returns and de-transparency # according to RFC 5321, Section 4.5.2. for text in data: if text.startswith(b'.'): del text[0] original_content: bytes = EMPTYBYTES.join(data) # Discard data immediately to prevent memory pressure data *= 0 content: Union[str, bytes] if self._decode_data: if self.enable_SMTPUTF8: content = original_content.decode('utf-8', errors='surrogateescape') else: try: content = original_content.decode('ascii', errors='strict') except UnicodeDecodeError: # This happens if enable_smtputf8 is false, meaning that # the server explicitly does not want to accept non-ascii, # but the client ignores that and sends non-ascii anyway. await self.push('500 Error: strict ASCII mode') return else: content = original_content self.envelope.content = content self.envelope.original_content = original_content # Call the new API first if it's implemented. if "DATA" in self._handle_hooks: status = await self._call_handler_hook('DATA') else: # Backward compatibility. status = MISSING if hasattr(self.event_handler, 'process_message'): warn('Use handler.handle_DATA() instead of .process_message()', DeprecationWarning) assert self.session is not None args = (self.session.peer, self.envelope.mail_from, self.envelope.rcpt_tos, self.envelope.content) if asyncio.iscoroutinefunction( self.event_handler.process_message): status = await self.event_handler.process_message(*args) else: status = self.event_handler.process_message(*args) # The deprecated API can return None which means, return the # default status. Don't worry about coverage for this case as # it's a deprecated API that will go away after 1.0. if status is None: # pragma: nocover status = MISSING self._set_post_data_state() await self.push('250 OK' if status is MISSING else status) # Commands that have not been implemented. async def smtp_EXPN(self, arg: str): await self.push('502 EXPN not implemented') aio-libs-aiosmtpd-b634d9b/aiosmtpd/testing/000077500000000000000000000000001462210711200207375ustar00rootroot00000000000000aio-libs-aiosmtpd-b634d9b/aiosmtpd/testing/__init__.py000066400000000000000000000000001462210711200230360ustar00rootroot00000000000000aio-libs-aiosmtpd-b634d9b/aiosmtpd/testing/helpers.py000066400000000000000000000041571462210711200227620ustar00rootroot00000000000000# Copyright 2014-2021 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 """Testing helpers.""" import os import select import socket import struct import sys import time from smtplib import SMTP as SMTP_Client from typing import List from aiosmtpd.smtp import Envelope, Session, SMTP ASYNCIO_CATCHUP_DELAY = float(os.environ.get("ASYNCIO_CATCHUP_DELAY", 0.1)) """ Delay (in seconds) to give asyncio event loop time to catch up and do things. May need to be increased for slow and/or overburdened test systems. """ def reset_connection(client: SMTP_Client): # Close the connection with a TCP RST instead of a TCP FIN. client must # be a smtplib.SMTP instance. # # https://stackoverflow.com/a/6440364/1570972 # # socket(7) SO_LINGER option. # # struct linger { # int l_onoff; /* linger active */ # int l_linger; /* how many seconds to linger for */ # }; # # Is this correct for Windows/Cygwin and macOS? struct_format = "hh" if sys.platform == "win32" else "ii" l_onoff = 1 l_linger = 0 assert client.sock is not None client.sock.setsockopt( socket.SOL_SOCKET, socket.SO_LINGER, struct.pack(struct_format, l_onoff, l_linger), ) client.close() class ReceivingHandler: def __init__(self): self.box: List[Envelope] = [] async def handle_DATA( self, server: SMTP, session: Session, envelope: Envelope ) -> str: self.box.append(envelope) return "250 OK" def catchup_delay(delay: float = ASYNCIO_CATCHUP_DELAY): """ Sleep for awhile to give asyncio's event loop time to catch up. """ time.sleep(delay) def send_recv( sock: socket.socket, data: bytes, end: bytes = b"\r\n", timeout: float = 0.1 ) -> bytes: sock.send(data + end) slist = [sock] result: List[bytes] = [] while True: read_s, _, _ = select.select(slist, [], [], timeout) if read_s: # We can use sock instead of read_s because slist only contains sock result.append(sock.recv(1024)) else: break return b"".join(result) aio-libs-aiosmtpd-b634d9b/aiosmtpd/testing/statuscodes.py000066400000000000000000000164151462210711200236610ustar00rootroot00000000000000# Copyright 2014-2021 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 import socket from typing import Iterable, NamedTuple class StatusCode(NamedTuple): code: int mesg: bytes def __call__(self, *args: bytes) -> "StatusCode": nmsg = self.mesg % args return StatusCode(self.code, nmsg) def to_bytes(self, crlf: bool = False) -> bytes: """ Returns code + mesg as bytes. WARNING: This is NOT identical to __str()__.encode()! """ _crlf = b"\r\n" if crlf else b"" return str(self.code).encode() + b" " + self.mesg + _crlf def to_str(self, crlf: bool = False) -> str: """ Returns code + mesg as a string. WARNING: This is NOT identical to __str__()! """ _crlf = "\r\n" if crlf else "" return str(self.code) + " " + self.mesg.decode() + _crlf _COMMON_COMMANDS = [ b"AUTH", b"DATA", b"HELP", b"MAIL", b"NOOP", b"QUIT", b"RCPT", b"RSET", b"VRFY", ] SUPPORTED_COMMANDS_NOTLS = _COMMON_COMMANDS + [b"EHLO", b"HELO"] SUPPORTED_COMMANDS_NOTLS.sort() SUPPORTED_COMMANDS_TLS = SUPPORTED_COMMANDS_NOTLS + [b"STARTTLS"] SUPPORTED_COMMANDS_TLS.sort() SUPPORTED_COMMANDS_LMTP = _COMMON_COMMANDS + [b"LHLO"] SUPPORTED_COMMANDS_LMTP.sort() def _suppcmd(commands: Iterable[bytes]) -> bytes: return b"Supported commands: " + b" ".join(commands) class SMTP_STATUS_CODES: # Enforced conventions: # 1. Must start with uppercase "S" # 2. Must have the 3-digit status code following "S" # 3. Must be instances of StatusCode S220_READY_TLS = StatusCode(220, b"Ready to start TLS") S221_BYE = StatusCode(221, b"Bye") S235_AUTH_SUCCESS = StatusCode(235, b"2.7.0 Authentication successful") S250_OK = StatusCode(250, b"OK") S250_FQDN = StatusCode(250, bytes(socket.getfqdn(), "utf-8")) S250_SUPPCMD_LMTP = StatusCode(250, _suppcmd(SUPPORTED_COMMANDS_LMTP)) S250_SUPPCMD_NOTLS = StatusCode(250, _suppcmd(SUPPORTED_COMMANDS_NOTLS)) S250_SUPPCMD_TLS = StatusCode(250, _suppcmd(SUPPORTED_COMMANDS_TLS)) S250_SYNTAX_AUTH = StatusCode(250, b"Syntax: AUTH ") S250_SYNTAX_DATA = StatusCode(250, b"Syntax: DATA") S250_SYNTAX_EHLO = StatusCode(250, b"Syntax: EHLO hostname") S250_SYNTAX_HELO = StatusCode(250, b"Syntax: HELO hostname") S250_SYNTAX_MAIL = StatusCode(250, b"Syntax: MAIL FROM:
") S250_SYNTAX_NOOP = StatusCode(250, b"Syntax: NOOP [ignored]") S250_SYNTAX_QUIT = StatusCode(250, b"Syntax: QUIT") S250_SYNTAX_RCPT = StatusCode(250, b"Syntax: RCPT TO:
") S250_SYNTAX_RSET = StatusCode(250, b"Syntax: RSET") S250_SYNTAX_STARTTLS = StatusCode(250, b"Syntax: STARTTLS") S250_SYNTAX_VRFY = StatusCode(250, b"Syntax: VRFY
") S250_SYNTAX_MAIL_E = StatusCode( 250, S250_SYNTAX_MAIL.mesg + b" [SP ]" ) S250_SYNTAX_RCPT_E = StatusCode( 250, S250_SYNTAX_RCPT.mesg + b" [SP ]" ) S252_CANNOT_VRFY = StatusCode( 252, b"Cannot VRFY user, but will accept message and attempt delivery", ) S334_AUTH_EMPTYPROMPT = StatusCode(334, b"") S334_AUTH_USERNAME = StatusCode(334, b"VXNlciBOYW1lAA==") S334_AUTH_PASSWORD = StatusCode(334, b"UGFzc3dvcmQA") S354_DATA_ENDWITH = StatusCode(354, b"End data with .") S421_TOO_MANY = StatusCode(421, b"4.7.0 %b sent too many times") S450_DEST_GREYLIST = StatusCode( 450, b"4.2.0 Recipient address rejected: Greylisted" ) S450_SERVICE_UNAVAIL = StatusCode(450, b"4.3.2 Service currently unavailable") S452_TOO_MANY_CONN = StatusCode(452, b"4.7.0 Too many connections") S454_TLS_NA = StatusCode(454, b"TLS not available") S500_BAD_SYNTAX = StatusCode(500, b"Error: bad syntax") S500_CMD_TOO_LONG = StatusCode(500, b"Command line too long") S500_DATALINE_TOO_LONG = StatusCode(500, b"Line too long (see RFC5321 4.5.3.1.6)") S500_STRICT_ASCII = StatusCode(500, b"Error: strict ASCII mode") S500_CMD_UNRECOG = StatusCode(500, b'Error: command "%b" not recognized') S500_AUTH_UNRECOG = StatusCode(500, b"Error: command 'AUTH' not recognized") S501_AUTH_ABORTED = StatusCode(501, b"5.7.0 Auth aborted") S501_AUTH_NOTB64 = StatusCode(501, b"5.5.2 Can't decode base64") S501_AUTH_CANTSPLIT = StatusCode(501, b"5.5.2 Can't split auth value") S501_MAIL_BODY = StatusCode(501, b"Error: BODY can only be one of 7BIT, 8BITMIME") S501_SMTPUTF8_DISABLED = StatusCode(501, b"Error: SMTPUTF8 disabled") S501_SMTPUTF8_NOARG = StatusCode(501, b"Error: SMTPUTF8 takes no arguments") S501_SUPPCMD_NOTLS = StatusCode(501, S250_SUPPCMD_NOTLS.mesg) S501_SYNTAX_DATA = StatusCode(501, S250_SYNTAX_DATA.mesg) S501_SYNTAX_EHLO = StatusCode(501, S250_SYNTAX_EHLO.mesg) S501_SYNTAX_HELO = StatusCode(501, S250_SYNTAX_HELO.mesg) S501_SYNTAX_MAIL = StatusCode(501, S250_SYNTAX_MAIL.mesg) S501_SYNTAX_MAIL_E = StatusCode(501, S250_SYNTAX_MAIL_E.mesg) S501_SYNTAX_QUIT = StatusCode(501, S250_SYNTAX_QUIT.mesg) S501_SYNTAX_RCPT = StatusCode(501, S250_SYNTAX_RCPT.mesg) S501_SYNTAX_RCPT_E = StatusCode(501, S250_SYNTAX_RCPT_E.mesg) S501_SYNTAX_RSET = StatusCode(501, S250_SYNTAX_RSET.mesg) S501_SYNTAX_STARTTLS = StatusCode(501, S250_SYNTAX_STARTTLS.mesg) S501_SYNTAX_VRFY = StatusCode(501, S250_SYNTAX_VRFY.mesg) S501_TOO_FEW = StatusCode(501, b"Not enough value") S501_TOO_MANY = StatusCode(501, b"Too many values") S502_EXPN_NOTIMPL = StatusCode(502, b"EXPN not implemented") S502_VRFY_COULDNT = StatusCode(502, b"Could not VRFY %b") S502_TOO_MANY_UNRECOG = StatusCode( 502, b"5.5.1 Too many unrecognized commands, goodbye." ) S503_ALREADY_AUTH = StatusCode(503, b"Already authenticated") S503_EHLO_FIRST = StatusCode(503, b"Error: send EHLO first") S503_HELO_FIRST = StatusCode(503, b"Error: send HELO first") S503_MAIL_NEEDED = StatusCode(503, b"Error: need MAIL command") S503_MAIL_NESTED = StatusCode(503, b"Error: nested MAIL command") S503_RCPT_NEEDED = StatusCode(503, b"Error: need RCPT command") S504_AUTH_UNRECOG = StatusCode(504, b"5.5.4 Unrecognized authentication type") S530_STARTTLS_FIRST = StatusCode(530, b"Must issue a STARTTLS command first") S530_AUTH_REQUIRED = StatusCode(530, b"5.7.0 Authentication required") S535_AUTH_INVALID = StatusCode(535, b"5.7.8 Authentication credentials invalid") S538_AUTH_ENCRYPTREQ = StatusCode( 538, b"5.7.11 Encryption required for requested authentication mechanism", ) S550_DEST_UNKNOWN = StatusCode( 550, b"5.1.1 Recipient address rejected: User unknown" ) S550_NO_RELAY = StatusCode(550, b"5.7.1 Unable to relay") S552_EXCEED_SIZE = StatusCode( 552, b"Error: message size exceeds fixed maximum message size" ) S552_DATA_TOO_MUCH = StatusCode(552, b"Error: Too much mail data") S553_MALFORMED = StatusCode(553, b"5.1.3 Error: malformed address") S554_LACK_SECURITY = StatusCode(554, b"Command refused due to lack of security") S555_MAIL_PARAMS_UNRECOG = StatusCode( 555, b"MAIL FROM parameters not recognized or not implemented", ) S555_RCPT_PARAMS_UNRECOG = StatusCode( 555, b"RCPT TO parameters not recognized or not implemented" ) aio-libs-aiosmtpd-b634d9b/aiosmtpd/tests/000077500000000000000000000000001462210711200204245ustar00rootroot00000000000000aio-libs-aiosmtpd-b634d9b/aiosmtpd/tests/__init__.py000066400000000000000000000000001462210711200225230ustar00rootroot00000000000000aio-libs-aiosmtpd-b634d9b/aiosmtpd/tests/certs/000077500000000000000000000000001462210711200215445ustar00rootroot00000000000000aio-libs-aiosmtpd-b634d9b/aiosmtpd/tests/certs/__init__.py000066400000000000000000000000001462210711200236430ustar00rootroot00000000000000aio-libs-aiosmtpd-b634d9b/aiosmtpd/tests/certs/server.crt000066400000000000000000000026071462210711200235710ustar00rootroot00000000000000-----BEGIN CERTIFICATE----- MIID6DCCAtCgAwIBAgIJAOT/DNOqIMqmMA0GCSqGSIb3DQEBCwUAMIGHMQswCQYD VQQGEwJVUzELMAkGA1UECAwCQVIxEjAQBgNVBAcMCUdyZWVud29vZDERMA8GA1UE CgwIQWlvc210cGQxEDAOBgNVBAsMB0RldlRlYW0xDDAKBgNVBAMMA2FlczEkMCIG CSqGSIb3DQEJARYVd2F5bmVAd2F5bmV3ZXJuZXIuY29tMCAXDTE5MDYwMTEzNTUy NloYDzIxMTkwNTA4MTM1NTI2WjCBhzELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkFS MRIwEAYDVQQHDAlHcmVlbndvb2QxETAPBgNVBAoMCEFpb3NtdHBkMRAwDgYDVQQL DAdEZXZUZWFtMQwwCgYDVQQDDANhZXMxJDAiBgkqhkiG9w0BCQEWFXdheW5lQHdh eW5ld2VybmVyLmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMXp glx/G19+jt/m/AQAy4+85ng3L1/PsXri91SpvPz1YD7Z3/0Yz3SFyuP1mkRCTplO H3Ok1BVbnycHxBDLBkbEjoJOfzMWxsV1Xp1vE4XEVQaq111pjgxQoFD1qU9vOs4c 0g54PSTtGio0WCOcJq1fWXz9T1QqM5n4MAL2KzFNkqfyyhCesoja4qnPn9n8MCjk TFslwX/2xJVXrsZyGH0IwiGmJDzkW3/FgXj0brcRZe4BYx/BM7ka1LDNnrdUQ7Wj GuYbC7mQLWzOUJBF+UQUWHbPadCEPYpAgd4J4seME2XUW/ygi95oY6mJcZGOyz0c c8D/Dja8Elt5DeTYypsCAwEAAaNTMFEwHQYDVR0OBBYEFDofuwFE+DEx8uQisFlQ Dfn4LIqdMB8GA1UdIwQYMBaAFDofuwFE+DEx8uQisFlQDfn4LIqdMA8GA1UdEwEB /wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAKdtV5GiiE66bZyqh7aKAOJb6dAe qAD8LH9u2hqili7fHNzRQLppSiNGGxy/yZoqh7+I3Z64km5jEiSiw2bY7vqbhReC qU57Dlf5Q4PQVSe36d+2T/g0oGq1pzJkfY27Pse+e9c/m4FkKeEltdqS8Tl2WJFI Qfux88wBnfrZWCgCvZFQGD6RaqEx9Z2//cUOmU+FcM+JHfbfnQy2QEY13CKQfniP YBprCg866+ecVC+J+Aeu9ubZgv557SJwJ/0b4rsQ/ETUw95g6AxqdHntDTrWamxQ iKiGHt3N9iEdnnjXsKYNMsOFXSHHG10PtBQUpNOSUrp5HMb1Kd0oTdNz/kU= -----END CERTIFICATE----- aio-libs-aiosmtpd-b634d9b/aiosmtpd/tests/certs/server.key000066400000000000000000000032501462210711200235640ustar00rootroot00000000000000-----BEGIN PRIVATE KEY----- MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDF6YJcfxtffo7f 5vwEAMuPvOZ4Ny9fz7F64vdUqbz89WA+2d/9GM90hcrj9ZpEQk6ZTh9zpNQVW58n B8QQywZGxI6CTn8zFsbFdV6dbxOFxFUGqtddaY4MUKBQ9alPbzrOHNIOeD0k7Roq NFgjnCatX1l8/U9UKjOZ+DAC9isxTZKn8soQnrKI2uKpz5/Z/DAo5ExbJcF/9sSV V67Gchh9CMIhpiQ85Ft/xYF49G63EWXuAWMfwTO5GtSwzZ63VEO1oxrmGwu5kC1s zlCQRflEFFh2z2nQhD2KQIHeCeLHjBNl1Fv8oIveaGOpiXGRjss9HHPA/w42vBJb eQ3k2MqbAgMBAAECggEBAMLwy8giJys7tK2Ujn2+7sMpNPYKnW5JXK8HasmI269q Xp/p6XgafRVwR7WckJRVn6ffzJkTLRfQhIZkXtqPsCH6r8hoW2BOOgH7JvP5mggz p/CGTNYlB5bXv+Ge6GNm00x8FOfNxaReq1wQ7RQ+VdaFydaUiBQF8YficCAqq8bF v2q/5XBydcJ4N2fyI4Mm3g03NIiXjeCNbxU5MBTwx2W6xN+PitN/KPGrC/KQoQEt PibnBkojBFfty8FTjISX/7ZKaQBp3UEktccru0k1V2LpOJ+aQMfPLGSgMdyx8hei oQbwv97dQfTjMaC3z53ae2sbfOyFmurWcV8Yhlo1uQECgYEA528s2Gsybcg1NQYo 63Z7UYNd4PWOIHVuaf6XX06e0zUncaY0yf5sb0W1VTrXJ6VdgRT1Et9kkgYKZ2I7 zsLnJQQiB0Xk7K+kha9WRgmtsxqsks9E61S/K+Ao0hNBeegwn8G6tu8KQlFDKVF0 LN/SU/Q9J2/CVeaMDK8fugs3yoECgYEA2utto2CJ3TALc451yhqYiVHMq2WLW3M0 +ctrRqTQJ/vgTgCXNDNZPPY+xY83Hbcaw4XxBUI9idNyorqUP3Z05kKEyZUSe4YK SnRi5+m/YLghNCX/MDAQasmCvV8I3ZXvzqP0TuAQW3XETqs3Cwy5wJfGXYxkVf41 4fe0ypvo7xsCgYA+C744HzUb3Yr2NjqONeuFxPRMNUjvRsxdOlYWxRsrgJqci3Sn msAzbLraqLW5+UmCK74wWxe5Vkk/wkRKgFI7yEnfLUvccJJpDMLScBHTbJlLmqnd dZDzEFuhRmxNZIR0sBmApcFYWjTpRN8ikLbwrxAeHIY7RV3SoLiexhclAQKBgAD4 2KTIEfSkePiLYmSCV7kMXu9H5SWDznFpgNFwe+ghiy5tfD5kF/pYUZEJAMKmBH1n w9k1IRbSlIi6cVwSx5QaKYLHoaxgvPz1pVbIR+xDBQq5PHfXTstal7UFjgGF3+m3 +qa7AfeV/0gmJHltFgoP4naZ3/wtw8l8ExZvOMqPAoGBAMG9pMCCY653KfRQ237I m/ds9b3VmaLZ5wAwoAEeU/kfwVa27GlP6uDwz6xJmn3XLQ29YwWx8hgqIBm5TfBU EYQE2RNEcTOY5fnT7QK+xrKyvr3o8fTxCV15EzX/Nwc723QCO8mXN/8ekAhBRHQ7 6QRoqSt0NeWpSSVRTNwYwFrL -----END PRIVATE KEY----- aio-libs-aiosmtpd-b634d9b/aiosmtpd/tests/certs/server_alt.crt000066400000000000000000000026031462210711200244250ustar00rootroot00000000000000-----BEGIN CERTIFICATE----- MIID5jCCAs6gAwIBAgIJAMEFH58o28ymMA0GCSqGSIb3DQEBCwUAMIGGMQswCQYD VQQGEwJVUzELMAkGA1UECAwCQVIxETAPBgNVBAcMCE1hdW1lbGxlMREwDwYDVQQK DAhBaW9zbXRwZDEQMA4GA1UECwwHRGV2VGVhbTEMMAoGA1UEAwwDYWVzMSQwIgYJ KoZIhvcNAQkBFhV3YXluZUB3YXluZXdlcm5lci5jb20wIBcNMjEwMjIzMTczMTAx WhgPMjEyMTAxMzAxNzMxMDFaMIGGMQswCQYDVQQGEwJVUzELMAkGA1UECAwCQVIx ETAPBgNVBAcMCE1hdW1lbGxlMREwDwYDVQQKDAhBaW9zbXRwZDEQMA4GA1UECwwH RGV2VGVhbTEMMAoGA1UEAwwDYWVzMSQwIgYJKoZIhvcNAQkBFhV3YXluZUB3YXlu ZXdlcm5lci5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDDaZsR N0vuvXI3nJc2faKMcCMIZ92k60yzNITrzJauuVNqZI31rxU2lEjBXWF+yd1Ag3JC JDzUYNlyrvSo6ij93g18+YIEfmlYcyawLvKEeV1nA3vC0/9uK4ruhcdRAPhkVi6Z /GGvjMj05ILFtX6cW3XPHyKJYVFj82muxmXqSjs8kncqlU/ByRb295X80LMwR3bH Tr5BOez2jCWPOK38OqE/mhL7kt/Xd/c8csCO+H3Ep1lGFb9gCHi0/B06I6lJ490x PRYfYhcObpfxgtJ6EB17ZAnKySc46pRhzgWPry2G2J/B8q0J+ySOjQ/+ciIQP1Hf 17K5/teUZs3AuvlTAgMBAAGjUzBRMB0GA1UdDgQWBBS1ZwfD6bcyw1WMEP+2ol3R aiIwFDAfBgNVHSMEGDAWgBS1ZwfD6bcyw1WMEP+2ol3RaiIwFDAPBgNVHRMBAf8E BTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQBXogsxOG74QJWfqEvTfJtL2Zf1+Krg Da0JhUcFhewjvkfjBS/pMWRQGx35IRZKztbq3PBhgIS7oCnpqlHWwXqID3Ygypee C3ZNoedWvgI9HAPZCL/Se2Dv+fh2WrARECMPxEgIJ53vCjmAAO/nt7gKHZHW5KgW DcsdcLE5nfVwUEyS+gJvMEx56hUdYldBN2plXqumMsaMXyTPYCzqaNxHTBcTJogd tNzUk0M0+I1PRS3/47pZOex8fbbok0nkdGoHT02URv/+7MV7dwzOmuG0qdtDP0Yo rZjNk76yt3/azUuaSc9LqXN+BOeHCBz69xqwZEeV/jGQ+bl2XS/OgMKw -----END CERTIFICATE----- aio-libs-aiosmtpd-b634d9b/aiosmtpd/tests/certs/server_alt.key000066400000000000000000000032541462210711200244300ustar00rootroot00000000000000-----BEGIN PRIVATE KEY----- MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQDDaZsRN0vuvXI3 nJc2faKMcCMIZ92k60yzNITrzJauuVNqZI31rxU2lEjBXWF+yd1Ag3JCJDzUYNly rvSo6ij93g18+YIEfmlYcyawLvKEeV1nA3vC0/9uK4ruhcdRAPhkVi6Z/GGvjMj0 5ILFtX6cW3XPHyKJYVFj82muxmXqSjs8kncqlU/ByRb295X80LMwR3bHTr5BOez2 jCWPOK38OqE/mhL7kt/Xd/c8csCO+H3Ep1lGFb9gCHi0/B06I6lJ490xPRYfYhcO bpfxgtJ6EB17ZAnKySc46pRhzgWPry2G2J/B8q0J+ySOjQ/+ciIQP1Hf17K5/teU Zs3AuvlTAgMBAAECggEBAKukbjWACa1pMBMS82tEjWun782pVfFPUO6zufFYoh5U 4KU6L8tWf16SfxmBiWqRq0rIaqsYI2o0q6tla7eW/myHB/f3PTHvS18KvjfQ9OpR pC4gzCuEhP9jNcRvM3S+Um/nl0Vgfvlwwu8AbLF2ywBSAbftVCuxlIkjvHdtAwX3 pmfPCn40jcFHIM8eRu2+gsUccY9Lmed2Ct/C1SdA4kzDLrYfVw4U/aGOtf4p7YEa k89/SRPR29AMlDwOn38qHm9o08P08E+6XO9bmrrdnKlbUtx8BhEL1FtsN5ErZk+B dI/kYpVec3cTEC1D/qz+vNFmeb3aBtoJmwHXC5LaxwkCgYEA7Jlf6xWMKY+8VmvH C2NF+EQ7YrrrMkpJY+XXdzDjGYDN6pKF6MBuyCpKt+5DWc9ftqriPKbj+mvTdsLW lGnqGRfJyzqy5eM7jSQFRpbWjd3g/hsShduYdNj2G1lJbKUOz+6lHg7xWo0sVHaS +RpI2Uou5pVwG6Ifc7ID1u0qLf8CgYEA02+nLT3A7FtOd5wCIp3Sk3Nr/KhWE1Aw iq0OMHFVFFf3JY4xb2+GEsOupwEpMvYVNrb+dIhXP0FdAigRo876F0aDWMDxsKuH 4R76jWSdfycIb5TV1VSE2Ald0RybGioTLWLLr5YDmZKlPEF1ibwCtebPxlXrWgHI F2Qz+CXOHK0CgYEAgVOo8Yg6sDEoQpjxGRFvmrA5QdNoYnnmudtVtoobaMJWTEVB OyOMqo8rfnSXjgzjhabMuViEP0sXMNB5mHB4jLTLEfKI/U4DLDgnRhmHN86zgFJu AosxP5WkmPhqQR/MA+6vhMmoNdX7CEQ0PEOY1GVPU60VtZUd2hDRNnc22iMCgYAK xbaDxJCuuhwuVeF5+AaBgrDux3jTNRO0DQsbBrsp9S1fWXZFUi5HiHa+hX2e3hDI n9wo/cVMML1XXclASkxNoUcR34qw0Jx0qMplJ8oqb0erv66BVvQJubhw7f8s/xXJ Cy6LfJ4kVedbQY6GfPC6ac8OMNRz2oFiR7WqH+r3ZQKBgQDnCC7nEP/KLQqsP7E3 EmtMtiXdpKBQvukezF2Zv8xNMeouLfMJdf1XMMTL06Qpj1OCAb76SIqFJOtp8+aR P3w2jZQK1A62eBvpjjlZCauaA4MZaVlvaKsExkM0XDho4Xt6Q0xkqjUme4hLcdr/ xXvhZaWMLnl57GOQL6GBba3LXQ== -----END PRIVATE KEY----- aio-libs-aiosmtpd-b634d9b/aiosmtpd/tests/conftest.py000066400000000000000000000264551462210711200226370ustar00rootroot00000000000000# Copyright 2014-2021 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 import asyncio import inspect import socket import ssl import warnings from contextlib import suppress from functools import wraps from smtplib import SMTP as SMTPClient from typing import Any, Callable, Generator, NamedTuple, Optional, Type, TypeVar import pytest from pkg_resources import resource_filename from pytest_mock import MockFixture from aiosmtpd.controller import Controller from aiosmtpd.handlers import Sink try: from asyncio.proactor_events import _ProactorBasePipeTransport HAS_PROACTOR = True except ImportError: _ProactorBasePipeTransport = None HAS_PROACTOR = False __all__ = [ "controller_data", "handler_data", "Global", "AUTOSTOP_DELAY", "SERVER_CRT", "SERVER_KEY", ] # region #### Aliases ################################################################# controller_data = pytest.mark.controller_data handler_data = pytest.mark.handler_data # endregion # region #### Custom datatypes ######################################################## class HostPort(NamedTuple): host: str = "localhost" port: int = 8025 RT = TypeVar("RT") # "ReturnType" # endregion # region #### Constants & Global Vars ################################################# class Global: SrvAddr: HostPort = HostPort() FQDN: str = socket.getfqdn() @classmethod def set_addr_from(cls, contr: Controller): cls.SrvAddr = HostPort(contr.hostname, contr.port) # If less than 1.0, might cause intermittent error if test system # is too busy/overloaded. AUTOSTOP_DELAY = 1.5 SERVER_CRT = resource_filename("aiosmtpd.tests.certs", "server.crt") SERVER_KEY = resource_filename("aiosmtpd.tests.certs", "server.key") # endregion # region #### Optimizing Fixtures ##################################################### # autouse=True and scope="session" automatically apply this fixture to ALL test cases @pytest.fixture(autouse=True, scope="session") def cache_fqdn(session_mocker: MockFixture): """ This fixture "caches" the socket.getfqdn() call. VERY necessary to prevent situations where quick repeated getfqdn() causes extreme slowdown. Probably due to the DNS server thinking it was an attack or something. """ session_mocker.patch("socket.getfqdn", return_value=Global.FQDN) # endregion # region #### Common Fixtures ######################################################### @pytest.fixture def get_controller(request: pytest.FixtureRequest) -> Callable[..., Controller]: """ Provides a function that will return an instance of a controller. Default class of the controller is Controller, but can be changed via the ``class_`` parameter to the function, or via the ``class_`` parameter of :func:`controller_data` Example usage:: def test_case(get_controller): handler = SomeHandler() controller = get_controller(handler, class_=SomeController) ... """ default_class = Controller marker = request.node.get_closest_marker("controller_data") if marker and marker.kwargs: # Must copy so marker data do not change between test cases if marker is # applied to test class markerdata = marker.kwargs.copy() else: markerdata = {} def getter( handler: Any, class_: Optional[Type[Controller]] = None, **server_kwargs, ) -> Controller: """ :param handler: The handler object :param class_: If set to None, check controller_data(class_). If both are none, defaults to Controller. """ assert not inspect.isclass(handler) marker_class: Optional[Type[Controller]] marker_class = markerdata.pop("class_", default_class) class_ = class_ or marker_class if class_ is None: raise RuntimeError( f"Fixture '{request.fixturename}' needs controller_data to specify " f"what class to use" ) ip_port: HostPort = markerdata.pop("host_port", HostPort()) # server_kwargs takes precedence, so it's rightmost (PEP448) server_kwargs = {**markerdata, **server_kwargs} server_kwargs.setdefault("hostname", ip_port.host) server_kwargs.setdefault("port", ip_port.port) return class_( handler, **server_kwargs, ) return getter @pytest.fixture def get_handler(request: pytest.FixtureRequest) -> Callable: """ Provides a function that will return an instance of a :ref:`handler class `. Default class of the handler is Sink, but can be changed via the ``class_`` parameter to the function, or via the ``class_`` parameter of :func:`handler_data` Example usage:: def test_case(get_handler): handler = get_handler(class_=SomeHandler) controller = Controller(handler) ... """ default_class = Sink marker = request.node.get_closest_marker("handler_data") if marker and marker.kwargs: # Must copy so marker data do not change between test cases if marker is # applied to test class markerdata = marker.kwargs.copy() else: markerdata = {} def getter(*args, **kwargs) -> Any: if marker: class_ = markerdata.pop("class_", default_class) # *args overrides args_ in handler_data() args_ = markerdata.pop("args_", tuple()) # Do NOT inline the above into the line below! We *need* to pop "args_"! args = args or args_ # **kwargs override markerdata, so it's rightmost (PEP448) kwargs = {**markerdata, **kwargs} else: class_ = default_class # noinspection PyArgumentList return class_(*args, **kwargs) return getter @pytest.fixture def temp_event_loop() -> Generator[asyncio.AbstractEventLoop, None, None]: with warnings.catch_warnings(): warnings.simplefilter("ignore") try: default_loop = asyncio.get_event_loop() except (DeprecationWarning, RuntimeError): default_loop = None new_loop = asyncio.new_event_loop() asyncio.set_event_loop(new_loop) # yield new_loop # new_loop.close() if default_loop is not None: asyncio.set_event_loop(default_loop) @pytest.fixture def autostop_loop( temp_event_loop: asyncio.AbstractEventLoop, ) -> asyncio.AbstractEventLoop: # Create a new event loop, and arrange for that loop to end almost # immediately. This will allow the calls to main() in these tests to # also exit almost immediately. Otherwise, the foreground test # process will hang. temp_event_loop.call_later(AUTOSTOP_DELAY, temp_event_loop.stop) # return temp_event_loop @pytest.fixture def plain_controller( get_handler: Callable, get_controller: Callable ) -> Generator[Controller, None, None]: """ Returns a Controller that, by default, gets invoked with no optional args. Hence the moniker "plain". Internally uses the :fixture:`get_controller` and :fixture:`get_handler` fixtures, so optional args/kwargs can be specified for the Controller and the handler via the :func:`controller_data` and :func:`handler_data` markers, respectively. """ handler = get_handler() controller = get_controller(handler) controller.start() Global.set_addr_from(controller) # yield controller # # Some test cases need to .stop() the controller inside themselves # in such cases, we must suppress Controller's raise of AssertionError # because Controller doesn't like .stop() to be invoked more than once with suppress(AssertionError): controller.stop() @pytest.fixture def nodecode_controller( get_handler: Callable, get_controller: Callable ) -> Generator[Controller, None, None]: """ Same as :fixture:`plain_controller`, except that ``decode_data=False`` is enforced. """ handler = get_handler() controller = get_controller(handler, decode_data=False) controller.start() Global.set_addr_from(controller) # yield controller # # Some test cases need to .stop() the controller inside themselves # in such cases, we must suppress Controller's raise of AssertionError # because Controller doesn't like .stop() to be invoked more than once with suppress(AssertionError): controller.stop() @pytest.fixture def decoding_controller( get_handler: Callable, get_controller: Callable ) -> Generator[Controller, None, None]: handler = get_handler() controller = get_controller(handler, decode_data=True) controller.start() Global.set_addr_from(controller) # yield controller # # Some test cases need to .stop() the controller inside themselves # in such cases, we must suppress Controller's raise of AssertionError # because Controller doesn't like .stop() to be invoked more than once with suppress(AssertionError): controller.stop() @pytest.fixture def client(request: pytest.FixtureRequest) -> Generator[SMTPClient, None, None]: """ Generic SMTP Client, will connect to the ``host:port`` defined in ``Global.SrvAddr`` unless overriden using :func:`client_data` marker. """ marker = request.node.get_closest_marker("client_data") if marker: markerdata = marker.kwargs or {} else: markerdata = {} addrport = markerdata.get("connect_to", Global.SrvAddr) with SMTPClient(*addrport) as client: yield client @pytest.fixture def ssl_context_server() -> ssl.SSLContext: """ Provides a server-side SSL Context """ context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) context.check_hostname = False context.load_cert_chain(SERVER_CRT, SERVER_KEY) # return context @pytest.fixture def ssl_context_client() -> ssl.SSLContext: """ Provides a client-side SSL Context """ context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH) context.check_hostname = False context.load_verify_locations(SERVER_CRT) # return context # Please keep the scope as "module"; setting it as "function" (the default) somehow # causes the 'hidden' exception to be detected when the loop starts over in the next # test case, defeating the silencing. @pytest.fixture(scope="module") def silence_event_loop_closed() -> bool: """ Mostly used to suppress "unhandled exception" error due to ``_ProactorBasePipeTransport`` raising an exception when doing ``__del__`` """ if not HAS_PROACTOR: return False assert _ProactorBasePipeTransport is not None if hasattr(_ProactorBasePipeTransport, "old_del"): return True # From: https://github.com/aio-libs/aiohttp/issues/4324#issuecomment-733884349 def silencer(func: Callable[..., RT]) -> Callable[..., RT]: @wraps(func) def wrapper(self: Any, *args, **kwargs) -> RT: try: return func(self, *args, **kwargs) except RuntimeError as e: if str(e) != "Event loop is closed": raise return wrapper # noinspection PyUnresolvedReferences old_del = _ProactorBasePipeTransport.__del__ _ProactorBasePipeTransport._old_del = old_del _ProactorBasePipeTransport.__del__ = silencer(old_del) return True # endregion aio-libs-aiosmtpd-b634d9b/aiosmtpd/tests/test_handlers.py000066400000000000000000001024011462210711200236330ustar00rootroot00000000000000# Copyright 2014-2021 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 import logging import sys from email.message import Message as Em_Message from io import StringIO from mailbox import Maildir from operator import itemgetter from pathlib import Path from smtplib import SMTPDataError, SMTPRecipientsRefused from textwrap import dedent from types import SimpleNamespace from typing import AnyStr, Callable, Generator, Type, TypeVar, Union import pytest from aiosmtpd.controller import Controller from aiosmtpd.handlers import AsyncMessage, Debugging, Mailbox, Proxy, Sink from aiosmtpd.handlers import Message as AbstractMessageHandler from aiosmtpd.smtp import SMTP as Server from aiosmtpd.smtp import Session as ServerSession from aiosmtpd.smtp import Envelope from aiosmtpd.testing.statuscodes import SMTP_STATUS_CODES as S from aiosmtpd.testing.statuscodes import StatusCode from .conftest import Global, controller_data, handler_data try: from typing_extensions import Protocol except ModuleNotFoundError: from typing import Protocol class HasFakeParser(Protocol): fparser: "FakeParser" exception: Type[Exception] class KnowsUpstream(Protocol): upstream: Controller T = TypeVar("T") CRLF = "\r\n" # region ##### Support Classes ############################################### class FakeParser: """ Emulates ArgumentParser.error() to catch the message """ message: Union[str, bytes, None] = None def error(self, message: AnyStr): self.message = message raise SystemExit class DataHandler: content: Union[str, bytes, None] = None original_content: bytes = None async def handle_DATA( self, server: Server, session: ServerSession, envelope: Envelope ) -> str: self.content = envelope.content self.original_content = envelope.original_content return S.S250_OK.to_str() class MessageHandler(AbstractMessageHandler): def handle_message(self, message: Em_Message) -> None: pass class AsyncMessageHandler(AsyncMessage): handled_message: Em_Message = None async def handle_message(self, message: Em_Message) -> None: self.handled_message = message class HELOHandler: ReturnCode = StatusCode(250, b"pepoluan.was.here") async def handle_HELO(self, server, session, envelope, hostname): return self.ReturnCode.to_str() class EHLOHandlerDeprecated: Domain = "alex.example.code" ReturnCode = StatusCode(250, Domain.encode("ascii")) async def handle_EHLO(self, server, session, envelope, hostname): return self.ReturnCode.to_str() # The suffix "New" is kept so we can catch all refs to the old "EHLOHandler" class class EHLOHandlerNew: Domain = "bruce.example.code" hostname = None orig_responses = [] def __init__(self, *features): self.features = features or tuple() async def handle_EHLO(self, server, session, envelope, hostname, responses): self.hostname = hostname self.orig_responses.clear() self.orig_responses.extend(responses) my_resp = [responses[0]] my_resp.extend(f"250-{f}" for f in self.features) my_resp.append("250 HELP") return my_resp class EHLOHandlerIncompatibleShort: async def handle_EHLO(self, server, session, envelope): return class EHLOHandlerIncompatibleLong: async def handle_EHLO(self, server, session, envelope, hostname, responses, xtra): return class MAILHandler: ReplacementOptions = ["WAS_HANDLED"] ReturnCode = StatusCode(250, b"Yeah, sure") async def handle_MAIL(self, server, session, envelope, address, options): envelope.mail_options = self.ReplacementOptions return self.ReturnCode.to_str() class RCPTHandler: RejectCode = StatusCode(550, b"Rejected") async def handle_RCPT(self, server, session, envelope, address, options): envelope.rcpt_options.extend(options) if address == "bart@example.com": return self.RejectCode.to_str() envelope.rcpt_tos.append(address) return S.S250_OK.to_str() class ErroringDataHandler: ReturnCode = StatusCode(599, b"Not today") async def handle_DATA(self, server, session, envelope): return self.ReturnCode.to_str() class AUTHHandler: async def handle_AUTH(self, server, session, envelope, args): server.authenticates = True return S.S235_AUTH_SUCCESS.to_str() class NoHooksHandler: pass class DeprecatedHookController(Controller): class DeprecatedHookServer(Server): warnings: list = None def __init__(self, *args, **kws): super().__init__(*args, **kws) async def ehlo_hook(self): pass async def rset_hook(self): pass def factory(self): self.smtpd = self.DeprecatedHookServer(self.handler) return self.smtpd class DeprecatedHandler: def process_message(self, peer, mailfrom, rcpttos, data, **kws): pass class AsyncDeprecatedHandler: async def process_message(self, peer, mailfrom, rcpttos, data, **kws): pass # endregion # region ##### Fixtures ####################################################### @pytest.fixture def debugging_controller(get_controller) -> Generator[Controller, None, None]: # Cannot use plain_controller fixture because we need to first create the # Debugging handler before creating the controller. stream = StringIO() handler = Debugging(stream) controller = get_controller(handler) controller.start() Global.set_addr_from(controller) # yield controller # controller.stop() stream.close() @pytest.fixture def temp_maildir(tmp_path: Path) -> Path: return tmp_path / "maildir" @pytest.fixture def mailbox_controller( temp_maildir, get_controller ) -> Generator[Controller, None, None]: handler = Mailbox(temp_maildir) controller = get_controller(handler) controller.start() Global.set_addr_from(controller) # yield controller # controller.stop() @pytest.fixture def with_fake_parser() -> Callable: """ Gets a function that will instantiate a handler_class using the class's from_cli() @classmethod, using FakeParser as the parser. This function will also catch any exceptions and store the exception's type -- alongside any message passed to FakeParser.error() -- in the handler object itself (using the HasFakeParser protocol/mixin). """ parser = FakeParser() def handler_initer(handler_class: Type[T], *args) -> Union[T, HasFakeParser]: handler: Union[T, HasFakeParser] try: handler = handler_class.from_cli(parser, *args) handler.fparser = parser handler.exception = None except (Exception, SystemExit) as e: handler = SimpleNamespace(fparser=parser, exception=type(e)) return handler return handler_initer @pytest.fixture def upstream_controller(get_controller) -> Generator[Controller, None, None]: upstream_handler = DataHandler() upstream_controller = get_controller(upstream_handler, port=9025) upstream_controller.start() # Notice that we do NOT invoke Global.set_addr_from() here # yield upstream_controller # upstream_controller.stop() @pytest.fixture def proxy_nodecode_controller( upstream_controller, get_controller ) -> Generator[Union[Controller, KnowsUpstream], None, None]: proxy_handler = Proxy(upstream_controller.hostname, upstream_controller.port) proxy_controller = get_controller(proxy_handler) proxy_controller.upstream = upstream_controller proxy_controller.start() Global.set_addr_from(proxy_controller) # yield proxy_controller # proxy_controller.stop() @pytest.fixture def proxy_decoding_controller( upstream_controller, get_controller ) -> Generator[Union[Controller, KnowsUpstream], None, None]: proxy_handler = Proxy(upstream_controller.hostname, upstream_controller.port) proxy_controller = get_controller(proxy_handler, decode_data=True) proxy_controller.upstream = upstream_controller proxy_controller.start() Global.set_addr_from(proxy_controller) # yield proxy_controller # proxy_controller.stop() # endregion class TestDebugging: @controller_data(decode_data=True) def test_debugging(self, debugging_controller, client): peer = client.sock.getsockname() client.sendmail( "anne@example.com", ["bart@example.com"], dedent( """\ From: Anne Person To: Bart Person Subject: A test Testing """ ), ) handler = debugging_controller.handler assert isinstance(handler, Debugging) text = handler.stream.getvalue() assert text == dedent( f"""\ ---------- MESSAGE FOLLOWS ---------- mail options: ['SIZE=102'] From: Anne Person To: Bart Person Subject: A test X-Peer: {peer!r} Testing ------------ END MESSAGE ------------ """ ) def test_debugging_bytes(self, debugging_controller, client): peer = client.sock.getsockname() client.sendmail( "anne@example.com", ["bart@example.com"], dedent( """\ From: Anne Person To: Bart Person Subject: A test Testing """ ), ) handler = debugging_controller.handler assert isinstance(handler, Debugging) text = handler.stream.getvalue() assert text == dedent( f"""\ ---------- MESSAGE FOLLOWS ---------- mail options: ['SIZE=102'] From: Anne Person To: Bart Person Subject: A test X-Peer: {peer!r} Testing ------------ END MESSAGE ------------ """ ) def test_debugging_without_options(self, debugging_controller, client): # Prevent ESMTP options. client.helo() peer = client.sock.getsockname() client.sendmail( "anne@example.com", ["bart@example.com"], dedent( """\ From: Anne Person To: Bart Person Subject: A test Testing """ ), ) handler = debugging_controller.handler assert isinstance(handler, Debugging) text = handler.stream.getvalue() assert text == dedent( f"""\ ---------- MESSAGE FOLLOWS ---------- From: Anne Person To: Bart Person Subject: A test X-Peer: {peer!r} Testing ------------ END MESSAGE ------------ """ ) def test_debugging_with_options(self, debugging_controller, client): peer = client.sock.getsockname() client.sendmail( "anne@example.com", ["bart@example.com"], dedent( """\ From: Anne Person To: Bart Person Subject: A test Testing """ ), mail_options=["BODY=7BIT"], ) handler = debugging_controller.handler assert isinstance(handler, Debugging) text = handler.stream.getvalue() assert text == dedent( f"""\ ---------- MESSAGE FOLLOWS ---------- mail options: ['SIZE=102', 'BODY=7BIT'] From: Anne Person To: Bart Person Subject: A test X-Peer: {peer!r} Testing ------------ END MESSAGE ------------ """ ) class TestMessage: @pytest.mark.parametrize( "content", [ b"", bytearray(), "", ], ids=["bytes", "bytearray", "str"] ) def test_prepare_message(self, temp_event_loop, content): sess_ = ServerSession(temp_event_loop) enve_ = Envelope() handler = MessageHandler() enve_.content = content msg = handler.prepare_message(sess_, enve_) assert isinstance(msg, Em_Message) assert msg.keys() == ['X-Peer', 'X-MailFrom', 'X-RcptTo'] assert msg.get_payload() == "" @pytest.mark.parametrize( ("content", "expectre"), [ (None, r"Expected str or bytes, got "), ([], r"Expected str or bytes, got "), ({}, r"Expected str or bytes, got "), ((), r"Expected str or bytes, got "), ], ids=("None", "List", "Dict", "Tuple") ) def test_prepare_message_err(self, temp_event_loop, content, expectre): sess_ = ServerSession(temp_event_loop) enve_ = Envelope() handler = MessageHandler() enve_.content = content with pytest.raises(TypeError, match=expectre): _ = handler.prepare_message(sess_, enve_) @handler_data(class_=DataHandler) def test_message(self, plain_controller, client): handler = plain_controller.handler assert isinstance(handler, DataHandler) # In this test, the message content comes in as a bytes. client.sendmail( "anne@example.com", ["bart@example.com"], dedent( """\ From: Anne Person To: Bart Person Subject: A test Message-ID: Testing """ ), ) # The content is not converted, so it's bytes. assert handler.content == handler.original_content assert isinstance(handler.content, bytes) assert isinstance(handler.original_content, bytes) @handler_data(class_=DataHandler) def test_message_decoded(self, decoding_controller, client): handler = decoding_controller.handler assert isinstance(handler, DataHandler) # In this test, the message content comes in as a string. client.sendmail( "anne@example.com", ["bart@example.com"], dedent( """\ From: Anne Person To: Bart Person Subject: A test Message-ID: Testing """ ), ) assert handler.content != handler.original_content assert isinstance(handler.content, str) assert isinstance(handler.original_content, bytes) @handler_data(class_=AsyncMessageHandler) def test_message_async(self, plain_controller, client): handler = plain_controller.handler assert isinstance(handler, AsyncMessageHandler) # In this test, the message data comes in as bytes. client.sendmail( "anne@example.com", ["bart@example.com"], dedent( """\ From: Anne Person To: Bart Person Subject: A test Message-ID: Testing """ ), ) handled_message = handler.handled_message assert handled_message["subject"] == "A test" assert handled_message["message-id"] == "" assert handled_message["X-Peer"] is not None assert handled_message["X-MailFrom"] == "anne@example.com" assert handled_message["X-RcptTo"] == "bart@example.com" @handler_data(class_=AsyncMessageHandler) def test_message_decoded_async(self, decoding_controller, client): handler = decoding_controller.handler assert isinstance(handler, AsyncMessageHandler) # With a server that decodes the data, the messages come in as # strings. There's no difference in the message seen by the # handler's handle_message() method, but internally this gives full # coverage. client.sendmail( "anne@example.com", ["bart@example.com"], dedent( """\ From: Anne Person To: Bart Person Subject: A test Message-ID: Testing """ ), ) handled_message = handler.handled_message assert handled_message["subject"] == "A test" assert handled_message["message-id"] == "" assert handled_message["X-Peer"] is not None assert handled_message["X-MailFrom"] == "anne@example.com" assert handled_message["X-RcptTo"] == "bart@example.com" class TestMailbox: def test_mailbox(self, temp_maildir, mailbox_controller, client): client.sendmail( "aperson@example.com", ["bperson@example.com"], dedent( """\ From: Anne Person To: Bart Person Subject: A test Message-ID: Hi Bart, this is Anne. """ ), ) client.sendmail( "cperson@example.com", ["dperson@example.com"], dedent( """\ From: Cate Person To: Dave Person Subject: A test Message-ID: Hi Dave, this is Cate. """ ), ) client.sendmail( "eperson@example.com", ["fperson@example.com"], dedent( """\ From: Elle Person To: Fred Person Subject: A test Message-ID: Hi Fred, this is Elle. """ ), ) # Check the messages in the mailbox. mailbox = Maildir(temp_maildir) messages = sorted(mailbox, key=itemgetter("message-id")) expect = ["", "", ""] assert [message["message-id"] for message in messages] == expect def test_mailbox_reset(self, temp_maildir, mailbox_controller, client): client.sendmail( "aperson@example.com", ["bperson@example.com"], dedent( """\ From: Anne Person To: Bart Person Subject: A test Message-ID: Hi Bart, this is Anne. """ ), ) mailbox_controller.handler.reset() mailbox = Maildir(temp_maildir) assert list(mailbox) == [] class TestCLI: def test_debugging_no_args(self, with_fake_parser): handler = with_fake_parser(Debugging) assert handler.exception is None assert handler.fparser.message is None assert handler.stream == sys.stdout def test_debugging_two_args(self, with_fake_parser): handler = with_fake_parser(Debugging, "foo", "bar") assert handler.exception is SystemExit assert handler.fparser.message == "Debugging usage: [stdout|stderr]" def test_debugging_stdout(self, with_fake_parser): handler = with_fake_parser(Debugging, "stdout") assert handler.exception is None assert handler.fparser.message is None assert handler.stream == sys.stdout def test_debugging_stderr(self, with_fake_parser): handler = with_fake_parser(Debugging, "stderr") assert handler.exception is None assert handler.fparser.message is None assert handler.stream == sys.stderr def test_debugging_bad_argument(self, with_fake_parser): handler = with_fake_parser(Debugging, "stdfoo") assert handler.exception is SystemExit assert handler.fparser.message == "Debugging usage: [stdout|stderr]" def test_sink_no_args(self, with_fake_parser): handler = with_fake_parser(Sink) assert handler.exception is None assert handler.fparser.message is None assert isinstance(handler, Sink) def test_sink_any_args(self, with_fake_parser): handler = with_fake_parser(Sink, "foo") assert handler.exception is SystemExit assert handler.fparser.message == "Sink handler does not accept arguments" def test_mailbox_no_args(self, with_fake_parser): handler = with_fake_parser(Mailbox) assert handler.exception is SystemExit assert handler.fparser.message == "The directory for the maildir is required" def test_mailbox_too_many_args(self, with_fake_parser): handler = with_fake_parser(Mailbox, "foo", "bar", "baz") assert handler.exception is SystemExit assert handler.fparser.message == "Too many arguments for Mailbox handler" def test_mailbox(self, with_fake_parser, temp_maildir): handler = with_fake_parser(Mailbox, temp_maildir) assert handler.exception is None assert handler.fparser.message is None assert isinstance(handler.mailbox, Maildir) assert handler.mail_dir == temp_maildir class TestProxy: sender_addr = "anne@example.com" receiver_addr = "bart@example.com" source_lines = [ f"From: Anne Person <{sender_addr}>", f"To: Bart Person <{receiver_addr}>", "Subject: A test", "%s", # Insertion point; see below "Testing", "", ] # For "source" we insert an empty string source = "\n".join(source_lines) % "" # For "expected" we insert X-Peer with yet another template expected_template = ( b"\r\n".join(ln.encode("ascii") for ln in source_lines) % b"X-Peer: %s\r\n" ) # There are two controllers and two SMTPd's running here. The # "upstream" one listens on port 9025 and is connected to a "data # handler" which captures the messages it receives. The second -and # the one under test here- listens on port 9024 and proxies to the one # on port 9025. def test_deliver_bytes(self, proxy_nodecode_controller, client): client.sendmail(self.sender_addr, [self.receiver_addr], self.source) upstream = proxy_nodecode_controller.upstream upstream_handler = upstream.handler assert isinstance(upstream_handler, DataHandler) proxysess: ServerSession = proxy_nodecode_controller.smtpd.session expected = self.expected_template % proxysess.peer[0].encode("ascii") assert upstream.handler.content == expected assert upstream.handler.original_content == expected def test_deliver_str(self, proxy_decoding_controller, client): client.sendmail(self.sender_addr, [self.receiver_addr], self.source) upstream = proxy_decoding_controller.upstream upstream_handler = upstream.handler assert isinstance(upstream_handler, DataHandler) proxysess: ServerSession = proxy_decoding_controller.smtpd.session expected = self.expected_template % proxysess.peer[0].encode("ascii") assert upstream.handler.content == expected assert upstream.handler.original_content == expected class TestProxyMocked: BAD_BART = {"bart@example.com": (500, "Bad Bart")} SOURCE = dedent( """\ From: Anne Person To: Bart Person Subject: A test Testing """ ) @pytest.fixture def patch_smtp_refused(self, mocker): mock = mocker.patch("aiosmtpd.handlers.smtplib.SMTP") mock().sendmail.side_effect = SMTPRecipientsRefused(self.BAD_BART) def test_recipients_refused( self, caplog, patch_smtp_refused, proxy_decoding_controller, client ): logger_name = "mail.debug" caplog.set_level(logging.INFO, logger=logger_name) client.sendmail("anne@example.com", ["bart@example.com"], self.SOURCE) # The log contains information about what happened in the proxy. # Ideally it would be the newest 2 log records. However, sometimes asyncio # will emit a log entry right afterwards or inbetween causing test fail if we # just checked [-1] and [-2]. Therefore we need to scan backwards and simply # note the two log entries' relative position _l1 = _l2 = -1 for _l1, rt in enumerate(reversed(caplog.record_tuples)): if rt == (logger_name, logging.INFO, "got SMTPRecipientsRefused"): break else: pytest.fail("Can't find first log entry") for _l2, rt in enumerate(reversed(caplog.record_tuples)): if rt == ( logger_name, logging.INFO, f"we got some refusals: {self.BAD_BART}", ): break else: pytest.fail("Can't find second log entry") assert _l2 < _l1, "Log entries in wrong order" @pytest.fixture def patch_smtp_oserror(self, mocker): mock = mocker.patch("aiosmtpd.handlers.smtplib.SMTP") mock().sendmail.side_effect = OSError def test_oserror( self, caplog, patch_smtp_oserror, proxy_decoding_controller, client ): logger_name = "mail.debug" caplog.set_level(logging.INFO, logger=logger_name) client.sendmail("anne@example.com", ["bart@example.com"], self.SOURCE) for rt in reversed(caplog.record_tuples): if rt == ( logger_name, logging.INFO, "we got some refusals: {'bart@example.com': (-1, b'ignore')}", ): break else: pytest.fail("Can't find log entry") class TestHooks: @handler_data(class_=HELOHandler) def test_hook_HELO(self, plain_controller, client): assert isinstance(plain_controller.handler, HELOHandler) resp = client.helo("me") assert resp == HELOHandler.ReturnCode @pytest.mark.filterwarnings("ignore::DeprecationWarning") @handler_data(class_=EHLOHandlerDeprecated) def test_hook_EHLO_deprecated(self, plain_controller, client): assert isinstance(plain_controller.handler, EHLOHandlerDeprecated) code, mesg = client.ehlo("me") lines = mesg.decode("utf-8").splitlines() assert code == 250 assert lines[-1] == EHLOHandlerDeprecated.Domain def test_hook_EHLO_deprecated_warning(self): with pytest.warns( DeprecationWarning, match=( # Is a regex; escape regex special chars if necessary r"Use the 5-argument handle_EHLO\(\) hook instead of the " r"4-argument handle_EHLO\(\) hook; support for the 4-argument " r"handle_EHLO\(\) hook will be removed in version 2.0" ), ): _ = Server(EHLOHandlerDeprecated()) @handler_data( class_=EHLOHandlerNew, args_=("FEATURE1", "FEATURE2 OPTION", "FEAT3 OPTA OPTB"), ) def test_hook_EHLO_new(self, plain_controller, client): assert isinstance(plain_controller.handler, EHLOHandlerNew) code, mesg = client.ehlo("me") lines = mesg.decode("utf-8").splitlines() assert code == 250 assert len(lines) == 5 # server name + 3 features + HELP handler = plain_controller.handler assert "250-8BITMIME" in handler.orig_responses assert "8bitmime" not in client.esmtp_features assert "250-SMTPUTF8" in handler.orig_responses assert "smtputf8" not in client.esmtp_features assert "feature1" in client.esmtp_features assert "feature2" in client.esmtp_features assert client.esmtp_features["feature2"] == "OPTION" assert "feat3" in client.esmtp_features assert client.esmtp_features["feat3"] == "OPTA OPTB" assert "help" in client.esmtp_features @pytest.mark.parametrize( "handler_class", [EHLOHandlerIncompatibleShort, EHLOHandlerIncompatibleLong], ids=["TooShort", "TooLong"], ) def test_hook_EHLO_incompat(self, handler_class): with pytest.raises(RuntimeError, match="Unsupported EHLO Hook"): _ = Server(handler_class()) @handler_data(class_=MAILHandler) def test_hook_MAIL(self, plain_controller, client): assert isinstance(plain_controller, Controller) handler = plain_controller.handler assert isinstance(handler, MAILHandler) client.ehlo("me") resp = client.mail("anne@example.com", ("BODY=7BIT", "SIZE=2000")) assert resp == MAILHandler.ReturnCode smtpd = plain_controller.smtpd assert smtpd.envelope.mail_options == MAILHandler.ReplacementOptions @handler_data(class_=RCPTHandler) def test_hook_RCPT(self, plain_controller, client): assert isinstance(plain_controller.handler, RCPTHandler) client.helo("me") with pytest.raises(SMTPRecipientsRefused) as excinfo: client.sendmail( "anne@example.com", ["bart@example.com"], dedent( """\ From: anne@example.com To: bart@example.com Subject: Test """ ), ) assert excinfo.value.recipients == { "bart@example.com": RCPTHandler.RejectCode, } @handler_data(class_=ErroringDataHandler) def test_hook_DATA(self, plain_controller, client): assert isinstance(plain_controller.handler, ErroringDataHandler) with pytest.raises(SMTPDataError) as excinfo: client.sendmail( "anne@example.com", ["bart@example.com"], dedent( """\ From: anne@example.com To: bart@example.com Subject: Test Yikes """ ), ) expected: StatusCode = ErroringDataHandler.ReturnCode assert excinfo.value.smtp_code == expected.code assert excinfo.value.smtp_error == expected.mesg @controller_data(decode_data=True, auth_require_tls=False) @handler_data(class_=AUTHHandler) def test_hook_AUTH(self, plain_controller, client): assert isinstance(plain_controller.handler, AUTHHandler) client.ehlo("me") resp = client.login("test", "test") assert resp == S.S235_AUTH_SUCCESS @handler_data(class_=NoHooksHandler) def test_hook_NoHooks(self, plain_controller, client): assert isinstance(plain_controller.handler, NoHooksHandler) client.helo("me") client.mail("anne@example.com") client.rcpt(["bart@example.cm"]) code, _ = client.data( dedent( """\ From: anne@example.com To: bart@example.com Subject: Test """ ) ) assert code == 250 class TestDeprecation: def _process_message_testing(self, controller, client): assert isinstance(controller, Controller) expectedre = r"Use handler.handle_DATA\(\) instead of .process_message\(\)" with pytest.warns(DeprecationWarning, match=expectedre): client.sendmail( "anne@example.com", ["bart@example.com"], dedent( """ From: Anne Person To: Bart Person Subject: A test Testing """ ), ) @handler_data(class_=DeprecatedHandler) def test_process_message(self, plain_controller, client): """handler.process_message is Deprecated""" handler = plain_controller.handler assert isinstance(handler, DeprecatedHandler) controller = plain_controller self._process_message_testing(controller, client) @handler_data(class_=AsyncDeprecatedHandler) def test_process_message_async(self, plain_controller, client): """handler.process_message is Deprecated""" handler = plain_controller.handler assert isinstance(handler, AsyncDeprecatedHandler) controller = plain_controller self._process_message_testing(controller, client) @controller_data(class_=DeprecatedHookController) def test_ehlo_hook(self, plain_controller, client): """SMTP.ehlo_hook is Deprecated""" expectedre = r"Use handler.handle_EHLO\(\) instead of .ehlo_hook\(\)" with pytest.warns(DeprecationWarning, match=expectedre): client.ehlo("example.com") @controller_data(class_=DeprecatedHookController) def test_rset_hook(self, plain_controller, client): """SMTP.rset_hook is Deprecated""" expectedre = r"Use handler.handle_RSET\(\) instead of .rset_hook\(\)" with pytest.warns(DeprecationWarning, match=expectedre): client.rset() aio-libs-aiosmtpd-b634d9b/aiosmtpd/tests/test_lmtp.py000066400000000000000000000027321462210711200230150ustar00rootroot00000000000000# Copyright 2014-2021 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 """Test the LMTP protocol.""" import socket from typing import Generator import pytest from aiosmtpd.controller import Controller from aiosmtpd.handlers import Sink from aiosmtpd.lmtp import LMTP from aiosmtpd.testing.statuscodes import SMTP_STATUS_CODES as S from .conftest import Global class LMTPController(Controller): def factory(self): self.smtpd = LMTP(self.handler) return self.smtpd @pytest.fixture(scope="module", autouse=True) def lmtp_controller() -> Generator[LMTPController, None, None]: controller = LMTPController(Sink) controller.start() Global.set_addr_from(controller) # yield controller # controller.stop() def test_lhlo(client): code, mesg = client.docmd("LHLO example.com") lines = mesg.splitlines() assert lines == [ bytes(socket.getfqdn(), "utf-8"), b"SIZE 33554432", b"8BITMIME", b"HELP", ] assert code == 250 def test_helo(client): # HELO and EHLO are not valid LMTP commands. resp = client.helo("example.com") assert resp == S.S500_CMD_UNRECOG(b"HELO") def test_ehlo(client): # HELO and EHLO are not valid LMTP commands. resp = client.ehlo("example.com") assert resp == S.S500_CMD_UNRECOG(b"EHLO") def test_help(client): # https://github.com/aio-libs/aiosmtpd/issues/113 resp = client.docmd("HELP") assert resp == S.S250_SUPPCMD_LMTP aio-libs-aiosmtpd-b634d9b/aiosmtpd/tests/test_main.py000066400000000000000000000273571462210711200227770ustar00rootroot00000000000000# Copyright 2014-2021 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 import asyncio import logging import multiprocessing as MP import os import sys import time from contextlib import contextmanager from multiprocessing.synchronize import Event as MP_Event from smtplib import SMTP as SMTPClient from smtplib import SMTP_SSL from typing import Generator import pytest from pytest_mock import MockFixture from aiosmtpd import __version__ from aiosmtpd.handlers import Debugging from aiosmtpd.main import main, parseargs from aiosmtpd.testing.helpers import catchup_delay from aiosmtpd.testing.statuscodes import SMTP_STATUS_CODES as S from aiosmtpd.tests.conftest import AUTOSTOP_DELAY, SERVER_CRT, SERVER_KEY try: import pwd except ImportError: pwd = None HAS_SETUID = hasattr(os, "setuid") MAIL_LOG = logging.getLogger("mail.log") # region ##### Custom Handlers ######################################################## class FromCliHandler: def __init__(self, called: bool): self.called = called @classmethod def from_cli(cls, parser, *args): return cls(*args) class NullHandler: pass # endregion # region ##### Fixtures ############################################################### @pytest.fixture def nobody_uid() -> Generator[int, None, None]: if pwd is None: pytest.skip("No pwd module available") try: pw = pwd.getpwnam("nobody") except KeyError: pytest.skip("'nobody' not available") else: yield pw.pw_uid @pytest.fixture def setuid(mocker: MockFixture): if not HAS_SETUID: pytest.skip("setuid is unavailable") mocker.patch("aiosmtpd.main.pwd", None) mocker.patch("os.setuid", side_effect=PermissionError) mocker.patch("aiosmtpd.main.partial", side_effect=RuntimeError) # endregion # region ##### Helper Funcs ########################################################### def watch_for_tls(ready_flag: MP_Event, retq: MP.Queue): has_tls = False req_tls = False ready_flag.set() start = time.monotonic() delay = AUTOSTOP_DELAY * 4 while (time.monotonic() - start) <= delay: try: with SMTPClient("localhost", 8025, timeout=0.1) as client: resp = client.docmd("HELP", "HELO") if resp == S.S530_STARTTLS_FIRST: req_tls = True client.ehlo("exemple.org") if "starttls" in client.esmtp_features: has_tls = True break except Exception: time.sleep(0.05) retq.put(has_tls) retq.put(req_tls) def watch_for_smtps(ready_flag: MP_Event, retq: MP.Queue): has_smtps = False ready_flag.set() start = time.monotonic() delay = AUTOSTOP_DELAY * 1.5 while (time.monotonic() - start) <= delay: try: with SMTP_SSL("localhost", 8025, timeout=0.1) as client: client.ehlo("exemple.org") has_smtps = True break except Exception: time.sleep(0.05) retq.put(has_smtps) def main_n(*args): main(("-n",) + args) @contextmanager def watcher_process(func): redy = MP.Event() retq = MP.Queue() proc = MP.Process(target=func, args=(redy, retq)) proc.start() redy.wait() yield retq proc.join() # endregion @pytest.mark.usefixtures("autostop_loop") class TestMain: def test_setuid(self, nobody_uid, mocker): mock = mocker.patch("os.setuid") main(args=()) mock.assert_called_with(nobody_uid) def test_setuid_permission_error(self, nobody_uid, mocker, capsys): mock = mocker.patch("os.setuid", side_effect=PermissionError) with pytest.raises(SystemExit) as excinfo: main(args=()) assert excinfo.value.code == 1 mock.assert_called_with(nobody_uid) assert ( capsys.readouterr().err == 'Cannot setuid "nobody"; try running with -n option.\n' ) def test_setuid_no_pwd_module(self, nobody_uid, mocker, capsys): mocker.patch("aiosmtpd.main.pwd", None) with pytest.raises(SystemExit) as excinfo: main(args=()) assert excinfo.value.code == 1 # On Python 3.8 on Linux, a bunch of "RuntimeWarning: coroutine # 'AsyncMockMixin._execute_mock_call' was never awaited" messages # gets mixed up into stderr causing test fail. # Therefore, we use assertIn instead of assertEqual here, because # the string DOES appear in stderr, just buried. assert ( 'Cannot import module "pwd"; try running with -n option.\n' in capsys.readouterr().err ) def test_n(self, setuid): with pytest.raises(RuntimeError): main_n() def test_nosetuid(self, setuid): with pytest.raises(RuntimeError): main(("--nosetuid",)) def test_debug_0(self): # For this test, the test runner likely has already set the log level # so it may not be logging.ERROR. default_level = MAIL_LOG.getEffectiveLevel() main_n() assert MAIL_LOG.getEffectiveLevel() == default_level def test_debug_1(self): main_n("-d") assert MAIL_LOG.getEffectiveLevel() == logging.INFO def test_debug_2(self): main_n("-dd") assert MAIL_LOG.getEffectiveLevel() == logging.DEBUG def test_debug_3(self): main_n("-ddd") assert MAIL_LOG.getEffectiveLevel() == logging.DEBUG assert asyncio.get_event_loop().get_debug() @pytest.mark.skipif(sys.platform == "darwin", reason="No idea why these are failing") class TestMainByWatcher: def test_tls(self, temp_event_loop): with watcher_process(watch_for_tls) as retq: temp_event_loop.call_later(AUTOSTOP_DELAY, temp_event_loop.stop) main_n("--tlscert", str(SERVER_CRT), "--tlskey", str(SERVER_KEY)) catchup_delay() has_starttls = retq.get() assert has_starttls is True require_tls = retq.get() assert require_tls is True def test_tls_noreq(self, temp_event_loop): with watcher_process(watch_for_tls) as retq: temp_event_loop.call_later(AUTOSTOP_DELAY, temp_event_loop.stop) main_n( "--tlscert", str(SERVER_CRT), "--tlskey", str(SERVER_KEY), "--no-requiretls", ) catchup_delay() has_starttls = retq.get() assert has_starttls is True require_tls = retq.get() assert require_tls is False def test_smtps(self, temp_event_loop): with watcher_process(watch_for_smtps) as retq: temp_event_loop.call_later(AUTOSTOP_DELAY, temp_event_loop.stop) main_n("--smtpscert", str(SERVER_CRT), "--smtpskey", str(SERVER_KEY)) catchup_delay() has_smtps = retq.get() assert has_smtps is True class TestParseArgs: def test_defaults(self): parser, args = parseargs(tuple()) assert args.classargs == tuple() assert args.classpath == "aiosmtpd.handlers.Debugging" assert args.debug == 0 assert isinstance(args.handler, Debugging) assert args.host == "localhost" assert args.listen is None assert args.port == 8025 assert args.setuid is True assert args.size is None assert args.smtputf8 is False assert args.smtpscert is None assert args.smtpskey is None assert args.tlscert is None assert args.tlskey is None assert args.requiretls is True def test_handler_from_cli(self): parser, args = parseargs( ("-c", "aiosmtpd.tests.test_main.FromCliHandler", "--", "FOO") ) assert isinstance(args.handler, FromCliHandler) assert args.handler.called == "FOO" def test_handler_no_from_cli(self): parser, args = parseargs(("-c", "aiosmtpd.tests.test_main.NullHandler")) assert isinstance(args.handler, NullHandler) def test_handler_from_cli_exception(self): with pytest.raises(TypeError): parseargs(("-c", "aiosmtpd.tests.test_main.FromCliHandler", "FOO", "BAR")) def test_handler_no_from_cli_exception(self, capsys): with pytest.raises(SystemExit) as excinfo: parseargs(("-c", "aiosmtpd.tests.test_main.NullHandler", "FOO", "BAR")) assert excinfo.value.code == 2 assert ( "Handler class aiosmtpd.tests.test_main takes no arguments" in capsys.readouterr().err ) @pytest.mark.parametrize( ("args", "exp_host", "exp_port"), [ ((), "localhost", 8025), (("-l", "foo:25"), "foo", 25), (("--listen", "foo:25"), "foo", 25), (("-l", "foo"), "foo", 8025), (("-l", ":25"), "localhost", 25), (("-l", "::0:25"), "::0", 25), ], ) def test_host_port(self, args, exp_host, exp_port): parser, args_ = parseargs(args=args) assert args_.host == exp_host assert args_.port == exp_port def test_bad_port_number(self, capsys): with pytest.raises(SystemExit) as excinfo: parseargs(("-l", ":foo")) assert excinfo.value.code == 2 assert "Invalid port number: foo" in capsys.readouterr().err @pytest.mark.parametrize("opt", ["--version", "-v"]) def test_version(self, capsys, mocker, opt): mocker.patch("aiosmtpd.main.PROGRAM", "smtpd") with pytest.raises(SystemExit) as excinfo: parseargs((opt,)) assert excinfo.value.code == 0 assert capsys.readouterr().out == f"smtpd {__version__}\n" @pytest.mark.parametrize("args", [("--smtpscert", "x"), ("--smtpskey", "x")]) def test_smtps(self, capsys, mocker, args): mocker.patch("aiosmtpd.main.PROGRAM", "smtpd") with pytest.raises(SystemExit) as exc: parseargs(args) assert exc.value.code == 2 assert ( "--smtpscert and --smtpskey must be specified together" in capsys.readouterr().err ) @pytest.mark.parametrize("args", [("--tlscert", "x"), ("--tlskey", "x")]) def test_tls(self, capsys, mocker, args): mocker.patch("aiosmtpd.main.PROGRAM", "smtpd") with pytest.raises(SystemExit) as exc: parseargs(args) assert exc.value.code == 2 assert ( "--tlscert and --tlskey must be specified together" in capsys.readouterr().err ) def test_norequiretls(self, capsys, mocker): mocker.patch("aiosmtpd.main.PROGRAM", "smtpd") parser, args = parseargs(("--no-requiretls",)) assert args.requiretls is False @pytest.mark.parametrize( ("certfile", "keyfile", "expect"), [ ("x", "x", "Cert file x not found"), (SERVER_CRT, "x", "Key file x not found"), ("x", SERVER_KEY, "Cert file x not found"), ], ids=["x-x", "cert-x", "x-key"], ) @pytest.mark.parametrize("meth", ["smtps", "tls"]) def test_ssl_files_err(self, capsys, mocker, meth, certfile, keyfile, expect): mocker.patch("aiosmtpd.main.PROGRAM", "smtpd") with pytest.raises(SystemExit) as exc: parseargs((f"--{meth}cert", certfile, f"--{meth}key", keyfile)) assert exc.value.code == 2 assert expect in capsys.readouterr().err class TestSigint: def test_keyboard_interrupt(self, temp_event_loop): """main() must close loop gracefully on KeyboardInterrupt.""" def interrupt(): raise KeyboardInterrupt temp_event_loop.call_later(1.0, interrupt) try: main_n() except Exception: pytest.fail("main() should've closed cleanly without exceptions!") else: assert not temp_event_loop.is_running() aio-libs-aiosmtpd-b634d9b/aiosmtpd/tests/test_misc.py000066400000000000000000000030001462210711200227610ustar00rootroot00000000000000# Copyright 2014-2021 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 """Test other aspects of the server implementation.""" import asyncio import warnings from typing import Generator, Optional import pytest from aiosmtpd import _get_or_new_eventloop @pytest.fixture(scope="module") def close_existing_loop() -> Generator[Optional[asyncio.AbstractEventLoop], None, None]: loop: Optional[asyncio.AbstractEventLoop] with warnings.catch_warnings(): warnings.filterwarnings("error") try: loop = asyncio.get_event_loop() except (DeprecationWarning, RuntimeError): loop = None if loop: loop.stop() loop.close() asyncio.set_event_loop(None) yield loop else: yield None class TestInit: def test_create_new_if_none(self, close_existing_loop): old_loop = close_existing_loop loop: Optional[asyncio.AbstractEventLoop] loop = _get_or_new_eventloop() assert loop is not None assert loop is not old_loop assert isinstance(loop, asyncio.AbstractEventLoop) def test_not_create_new_if_exist(self, close_existing_loop): old_loop = close_existing_loop loop: Optional[asyncio.AbstractEventLoop] loop = asyncio.new_event_loop() assert loop is not old_loop asyncio.set_event_loop(loop) ret_loop = _get_or_new_eventloop() assert ret_loop is not old_loop assert ret_loop == loop assert ret_loop is loop aio-libs-aiosmtpd-b634d9b/aiosmtpd/tests/test_proxyprotocol.py000066400000000000000000001201051462210711200247770ustar00rootroot00000000000000# Copyright 2014-2021 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 import asyncio import errno import logging import operator import random import socket import struct import time from base64 import b64decode from contextlib import contextmanager, suppress from functools import partial from ipaddress import IPv4Address, IPv6Address from smtplib import SMTP as SMTPClient from smtplib import SMTPServerDisconnected from typing import Any, Callable, Dict, List, Optional import pytest from pytest_mock import MockFixture from aiosmtpd.handlers import Sink from aiosmtpd.proxy_protocol import ( V2_CMD, AF, PROTO, V2_SIGNATURE, AsyncReader, MalformedTLV, ProxyData, ProxyTLV, UnknownTypeTLV, get_proxy, ) from aiosmtpd.smtp import SMTP as SMTPServer from aiosmtpd.smtp import Session as SMTPSession from aiosmtpd.smtp import Envelope as SMTPEnvelope from aiosmtpd.tests.conftest import Global, controller_data, handler_data DEFAULT_AUTOCANCEL = 0.1 TIMEOUT_MULTIPLIER = 2.0 param = pytest.param parametrize = pytest.mark.parametrize random_port = partial(random.getrandbits, 16) TEST_TLV_DATA_1 = ( b"\x03\x00\x04Z\xfd\xc6\xff\x02\x00\tAUTHORITI\x05\x00\tUNIKUE_ID" b"\x20\x00D\x01\x00\x00\x00\x00!\x00\x07TLSv1.3%\x00\x07RSA4096" b"$\x00\nRSA-SHA256#\x00\x1bECDHE-RSA-AES256-CBC-SHA384" ) TEST_TLV_DATA_2 = ( b"\x03\x00\x04Z\xfd\xc6\xff\x02\x00\tAUTHORIT2\x05\x00\tUNIQUEID2" b"\x20\x00D\x01\x00\x00\x00\x00!\x00\x07TLSv1.3%\x00\x07RSA4096" b"$\x00\nRSA-SHA256#\x00\x1bECDHE-RSA-AES256-CBC-SHA384" b"\x30\x00\x09something" ) # This has a tail which is not part of PROXYv2 TEST_V2_DATA1_XTRA = b64decode( "DQoNCgANClFVSVQKIREAcn8AAAF/AAABsFJipQMABFT9xv8CAAlBVVRIT1JJVFkFAAlVTklRVUVf\n" "SUQgAEQBAAAAACEAB1RMU3YxLjIlAAdSU0E0MDk2JAAKUlNBLVNIQTI1NiMAG0VDREhFLVJTQS1B\n" "RVMyNTYtR0NNLVNIQTM4NFRlc3QgZGF0YSB0aGF0IGlzIG5vdCBwYXJ0IG9mIFBST1hZdjIuCg==\n" ) # The extra part is: # b"Test data that is not part of PROXYv2.\n" # This same as the above but no extraneous tail TEST_V2_DATA1_EXACT = b64decode( "DQoNCgANClFVSVQKIREAcn8AAAF/AAABsFJipQMABFT9xv8CAAlBVVRIT1JJVFkFAAlVTklRVUVf\n" "SUQgAEQBAAAAACEAB1RMU3YxLjIlAAdSU0E0MDk2JAAKUlNBLVNIQTI1NiMAG0VDREhFLVJTQS1B\n" "RVMyNTYtR0NNLVNIQTM4NA==\n" ) PUBLIC_V1_PATTERNS: Dict[str, bytes] = { "joaoreis81": b"PROXY TCP4 222.222.22.222 111.11.11.111 33504 25", "haproxydoc": b"PROXY TCP4 192.168.0.1 192.168.0.11 56324 443", "cloudflare4": b"PROXY TCP4 192.0.2.0 192.0.2.255 42300 443", "cloudflare6": ( b"PROXY TCP6 2001:db8:: 2001:db8:ffff:ffff:ffff:ffff:ffff:ffff 42300 443" ), "avinetworks": b"PROXY TCP4 12.97.16.194 136.179.21.69 31646 80", "googlecloud": b"PROXY TCP4 192.0.2.1 198.51.100.1 15221 110", } GOOD_V1_HANDSHAKE = b"PROXY TCP4 255.255.255.255 255.255.255.255 65535 65535\r\n" HANDSHAKES = { "v1": GOOD_V1_HANDSHAKE, "v2": TEST_V2_DATA1_EXACT, } class ProxyPeekerHandler(Sink): def __init__(self, retval: bool = True): self.called = False self.sessions: List[SMTPSession] = [] self.proxy_datas: List[ProxyData] = [] self.retval = retval async def handle_PROXY( self, server: SMTPServer, session: SMTPSession, envelope: SMTPEnvelope, proxy_data: ProxyData, ) -> bool: self.called = True self.sessions.append(session) self.proxy_datas.append(proxy_data) return self.retval @contextmanager def does_not_raise(): yield @pytest.fixture def setup_proxy_protocol( mocker: MockFixture, temp_event_loop: asyncio.AbstractEventLoop ) -> Callable: proxy_timeout = 1.0 responses = [] transport = mocker.Mock() transport.write = responses.append handler = ProxyPeekerHandler() loop = temp_event_loop def getter(test_obj: "_TestProxyProtocolCommon", *args, **kwargs): kwargs["loop"] = loop kwargs["proxy_protocol_timeout"] = proxy_timeout protocol = SMTPServer(handler, *args, **kwargs) protocol.connection_made(transport) def runner(stop_after: float = DEFAULT_AUTOCANCEL): loop.call_later(stop_after, protocol._handler_coroutine.cancel) with suppress(asyncio.CancelledError): loop.run_until_complete(protocol._handler_coroutine) test_obj.protocol = protocol test_obj.runner = runner test_obj.transport = transport return getter class _TestProxyProtocolCommon: protocol: SMTPServer = None runner = None transport = None class TestProxyData: def test_invalid_version(self): pd = ProxyData(version=None) assert not pd.valid assert not pd def test_invalid_error(self): pd = ProxyData(version=1) pd.error = "SomeError" assert not pd.valid assert not pd def test_invalid_protocol(self): pd = ProxyData(version=1) assert not pd.valid assert not pd def test_mismatch(self): pd = ProxyData(version=1) pd.protocol = PROTO.UNSPEC assert pd.valid assert pd assert not pd.same_attribs(protocol=PROTO.STREAM) def test_mismatch_raises(self): pd = ProxyData(version=1) pd.protocol = PROTO.UNSPEC assert pd.valid assert pd expectre = r"mismatch:protocol actual=.* expect=.*" with pytest.raises(ValueError, match=expectre): pd.same_attribs(_raises=True, protocol=PROTO.STREAM) def test_unsetkey(self): pd = ProxyData(version=1) pd.protocol = PROTO.UNSPEC assert pd.valid assert pd assert not pd.same_attribs(src_addr="Missing") def test_unknownkey(self): pd = ProxyData(version=1) pd.protocol = PROTO.UNSPEC assert pd.valid assert pd assert not pd.same_attribs(strange_key="Unrecognized") def test_unknownkey_raises(self): pd = ProxyData(version=1) pd.protocol = PROTO.UNSPEC assert pd.valid assert pd expectre = r"notfound:strange_key" with pytest.raises(KeyError, match=expectre): pd.same_attribs(_raises=True, strange_key="Unrecognized") def test_tlv_none(self): pd = ProxyData(version=2) assert pd.tlv is None def test_tlv_fake(self): pd = ProxyData(version=2) pd.rest = b"fake_tlv" # Must be something that fails parsing in ProxyTLV assert pd.tlv is None def test_tlv_1(self): pd = ProxyData(version=2) pd.rest = TEST_TLV_DATA_1 assert pd.tlv is not None assert pd.tlv.same_attribs( AUTHORITY=b"AUTHORITI", CRC32C=b"Z\xfd\xc6\xff", UNIQUE_ID=b"UNIKUE_ID", SSL=True, SSL_VERSION=b"TLSv1.3", SSL_CIPHER=b"ECDHE-RSA-AES256-CBC-SHA384", SSL_SIG_ALG=b"RSA-SHA256", SSL_KEY_ALG=b"RSA4096", ) class TestProxyTLV: def test_1(self): ptlv = ProxyTLV.from_raw(TEST_TLV_DATA_1) assert "ALPN" not in ptlv assert "NOOP" not in ptlv assert "SSL_CN" not in ptlv assert "NETNS" not in ptlv assert ptlv.ALPN is None assert ptlv.AUTHORITY == b"AUTHORITI" assert ptlv.same_attribs( AUTHORITY=b"AUTHORITI", CRC32C=b"Z\xfd\xc6\xff", UNIQUE_ID=b"UNIKUE_ID", SSL=True, SSL_VERSION=b"TLSv1.3", SSL_CIPHER=b"ECDHE-RSA-AES256-CBC-SHA384", SSL_SIG_ALG=b"RSA-SHA256", SSL_KEY_ALG=b"RSA4096", ) def test_1_ne(self): ptlv = ProxyTLV.from_raw(TEST_TLV_DATA_1) assert not ptlv.same_attribs(SSL=False) assert not ptlv.same_attribs(false_attrib=None) def test_1_ne_raises(self): ptlv = ProxyTLV.from_raw(TEST_TLV_DATA_1) expectre = r"mismatch:AUTHORITY actual=.* expect=.*" with pytest.raises(ValueError, match=expectre): ptlv.same_attribs(_raises=True, AUTHORITY=b"whut") expectre = r"notfound:i_dont_even" with pytest.raises(KeyError, match=expectre): ptlv.same_attribs(_raises=True, i_dont_even=b"what is this") def test_2(self): ptlv = ProxyTLV.from_raw(TEST_TLV_DATA_2) assert "ALPN" not in ptlv assert "NOOP" not in ptlv assert "SSL_CN" not in ptlv assert "NETNS" in ptlv assert ptlv.ALPN is None assert ptlv.AUTHORITY == b"AUTHORIT2" assert ptlv.UNIQUE_ID == b"UNIQUEID2" assert ptlv.same_attribs( CRC32C=b"Z\xfd\xc6\xff", UNIQUE_ID=b"UNIQUEID2", SSL=True, SSL_VERSION=b"TLSv1.3", SSL_CIPHER=b"ECDHE-RSA-AES256-CBC-SHA384", SSL_SIG_ALG=b"RSA-SHA256", SSL_KEY_ALG=b"RSA4096", NETNS=b"something", ) assert not ptlv.same_attribs(false_attrib=None) with pytest.raises(ValueError, match=r"mismatch:SSL"): ptlv.same_attribs(SSL=False, _raises=True) @parametrize( "typeint, typename", [ (0x01, "ALPN"), (0x02, "AUTHORITY"), (0x03, "CRC32C"), (0x04, "NOOP"), (0x05, "UNIQUE_ID"), (0x20, "SSL"), (0x21, "SSL_VERSION"), (0x22, "SSL_CN"), (0x23, "SSL_CIPHER"), (0x24, "SSL_SIG_ALG"), (0x25, "SSL_KEY_ALG"), (0x30, "NETNS"), (None, "wrongname"), ], ) def test_backmap(self, typename: str, typeint: int): assert ProxyTLV.name_to_num(typename) == typeint def test_parse_partial(self): TEST_TLV_DATA_1_TRUNCATED = ( b"\x03\x00\x04Z\xfd\xc6\xff\x02\x00\tAUTHORITI\x05\x00\tUNIKUE_I" ) rslt: Dict[str, Any] rslt, _ = ProxyTLV.parse(TEST_TLV_DATA_1_TRUNCATED) assert isinstance(rslt, dict) assert "CRC32C" in rslt def test_unknowntype_notstrict(self): test_data = b"\xFF\x00\x04yeah" rslt: Dict[str, Any] rslt, _ = ProxyTLV.parse(test_data, strict=False) assert "xFF" in rslt assert rslt["xFF"] == b"yeah" def test_unknowntype_strict(self): test_data = b"\xFF\x00\x04yeah" with pytest.raises(UnknownTypeTLV): _ = ProxyTLV.parse(test_data, strict=True) def test_malformed_ssl_partialok(self): test_data = ( b"\x20\x00\x17\x01\x02\x03\x04\x05\x21\x00\x07version\x22\x00\x09trunc" ) rslt: Dict[str, Any] rslt, _ = ProxyTLV.parse(test_data, partial_ok=True) assert rslt["SSL"] is False assert rslt["SSL_VERSION"] == b"version" assert "SSL_CN" not in rslt def test_malformed_ssl_notpartialok(self): test_data = ( b"\x20\x00\x0D\x01\x02\x03\x04\x05\x21\x00\x07version\x22\x00\x09trunc" ) with pytest.raises(MalformedTLV): _ = ProxyTLV.parse(test_data, partial_ok=False) def test_eq(self): data1 = b"\x03\x00\x04Z\xfd\xc6\xff\x02\x00\tAUTHORITI" ptlv1 = ProxyTLV.from_raw(data1) data2 = b"\x02\x00\tAUTHORITI\x03\x00\x04Z\xfd\xc6\xff" ptlv2 = ProxyTLV.from_raw(data2) assert ptlv1 is not ptlv2 assert ptlv1 == ptlv2 class TestModule: class MockAsyncReader(AsyncReader): def __init__(self, data, timeout=0.4): self.data = bytearray(data) self.timeout = 0.4 async def read(self, num_bytes: Optional[int] = None) -> bytes: emit = self.data[0:num_bytes] del self.data[0:num_bytes] return emit async def readexactly(self, n: int) -> bytes: emit = self.data[0:n] del self.data[0:n] if len(emit) < n: await asyncio.sleep(self.timeout) return emit async def readuntil(self, until_chars: Optional[bytes] = None) -> bytes: if until_chars is None: until_chars = b"\n" emit = bytearray() _count = 0 for _count, char in enumerate(self.data, start=1): emit += char.to_bytes(1, "big") if char in until_chars: break del self.data[0:_count] return emit @parametrize("handshake", HANDSHAKES.values(), ids=HANDSHAKES.keys()) def test_get( self, caplog: pytest.LogCaptureFixture, temp_event_loop: asyncio.AbstractEventLoop, handshake: bytes, ): caplog.set_level(logging.DEBUG) mock_reader = self.MockAsyncReader(handshake) reslt = temp_event_loop.run_until_complete(get_proxy(mock_reader)) assert isinstance(reslt, ProxyData) assert reslt.valid def test_get_cut_v1( self, caplog: pytest.LogCaptureFixture, temp_event_loop: asyncio.AbstractEventLoop, ): caplog.set_level(logging.DEBUG) mock_reader = self.MockAsyncReader(GOOD_V1_HANDSHAKE[0:20]) reslt = temp_event_loop.run_until_complete(get_proxy(mock_reader)) assert isinstance(reslt, ProxyData) assert not reslt.valid assert reslt.error == "PROXYv1 malformed" expect = ("mail.debug", 30, "PROXY error: PROXYv1 malformed") assert expect in caplog.record_tuples def test_get_cut_v2( self, caplog: pytest.LogCaptureFixture, temp_event_loop: asyncio.AbstractEventLoop, ): caplog.set_level(logging.DEBUG) mock_reader = self.MockAsyncReader(TEST_V2_DATA1_EXACT[0:20]) reslt = temp_event_loop.run_until_complete(get_proxy(mock_reader)) assert isinstance(reslt, ProxyData) assert not reslt.valid expect_msg = "PROXY exception: Connection lost while waiting for tail part" assert reslt.error == expect_msg expect = ("mail.debug", 30, expect_msg) assert expect in caplog.record_tuples def test_get_invalid_sig( self, caplog: pytest.LogCaptureFixture, temp_event_loop: asyncio.AbstractEventLoop, ): caplog.set_level(logging.DEBUG) mock_reader = self.MockAsyncReader(b"PROXI TCP4 1.2.3.4 5.6.7.8 9 10\r\n") reslt = temp_event_loop.run_until_complete(get_proxy(mock_reader)) assert isinstance(reslt, ProxyData) assert not reslt.valid expect_msg = "PROXY unrecognized signature" assert reslt.error == expect_msg expect = ("mail.debug", 30, "PROXY error: " + expect_msg) assert expect in caplog.record_tuples class TestSMTPInit: @parametrize("value", [int(-1), float(-1.0), int(0), float(0.0)]) def test_value_error(self, temp_event_loop, value): with pytest.raises(ValueError, match=r"proxy_protocol_timeout must be > 0"): _ = SMTPServer(Sink(), proxy_protocol_timeout=value, loop=temp_event_loop) def test_lt_3(self, caplog, temp_event_loop): _ = SMTPServer(Sink(), proxy_protocol_timeout=1, loop=temp_event_loop) expect = ("mail.log", logging.WARNING, "proxy_protocol_timeout < 3.0") assert expect in caplog.record_tuples @parametrize("value", [int(3), float(3.0), int(4), float(4.0)]) def test_ge_3(self, caplog, temp_event_loop, value): _ = SMTPServer(Sink(), proxy_protocol_timeout=value, loop=temp_event_loop) expect = ("mail.log", logging.WARNING, "proxy_protocol_timeout < 3.0") assert expect not in caplog.record_tuples class TestGetV1(_TestProxyProtocolCommon): def test_noproxy(self, setup_proxy_protocol): setup_proxy_protocol(self) data = b"HELO example.org\r\n" self.protocol.data_received(data) self.runner() assert self.transport.close.called @parametrize("patt", PUBLIC_V1_PATTERNS.values(), ids=PUBLIC_V1_PATTERNS.keys()) def test_valid_patterns(self, setup_proxy_protocol: Callable, patt: bytes): if not patt.endswith(b"\r\n"): patt += b"\r\n" setup_proxy_protocol(self) self.protocol.data_received(patt) self.runner() sess: SMTPSession = self.protocol.session assert sess.proxy_data.error == "" handler = self.protocol.event_handler assert handler.called def _assert_valid(self, af, proto, srcip, dstip, srcport, dstport, testline): self.protocol.data_received(testline.encode("ascii")) self.runner() assert self.protocol.session.proxy_data.error == "" handler = self.protocol.event_handler assert handler.called proxy_data = handler.proxy_datas[-1] ipaddr = IPv4Address if af == AF.INET else IPv6Address assert proxy_data.same_attribs( valid=True, version=1, family=af, protocol=proto, src_addr=ipaddr(srcip), dst_addr=ipaddr(dstip), src_port=srcport, dst_port=dstport, ) def test_tcp4(self, setup_proxy_protocol): srcip = "1.2.3.4" dstip = "5.6.7.8" srcport = 0 dstport = 65535 prox_test = f"PROXY TCP4 {srcip} {dstip} {srcport} {dstport}\r\n" setup_proxy_protocol(self) self._assert_valid( AF.INET, PROTO.STREAM, srcip, dstip, srcport, dstport, prox_test ) def test_tcp4_random(self, setup_proxy_protocol): setup_proxy_protocol(self) srcip = ".".join(f"{random.getrandbits(8)}" for _ in range(0, 4)) dstip = ".".join(f"{random.getrandbits(8)}" for _ in range(0, 4)) srcport = random_port() dstport = random_port() prox_test = f"PROXY TCP4 {srcip} {dstip} {srcport} {dstport}\r\n" self._assert_valid( AF.INET, PROTO.STREAM, srcip, dstip, srcport, dstport, prox_test ) def test_tcp6_shortened(self, setup_proxy_protocol): srcip = "2020:dead::0001" dstip = "2021:cafe::0002" srcport = 8000 dstport = 65535 prox_test = f"PROXY TCP6 {srcip} {dstip} {srcport} {dstport}\r\n" setup_proxy_protocol(self) self._assert_valid( AF.INET6, PROTO.STREAM, srcip, dstip, srcport, dstport, prox_test ) def test_tcp6_random(self, setup_proxy_protocol): srcip = ":".join(f"{random.getrandbits(16):04x}" for _ in range(0, 8)) dstip = ":".join(f"{random.getrandbits(16):04x}" for _ in range(0, 8)) srcport = random_port() dstport = random_port() prox_test = f"PROXY TCP6 {srcip} {dstip} {srcport} {dstport}\r\n" setup_proxy_protocol(self) self._assert_valid( AF.INET6, PROTO.STREAM, srcip, dstip, srcport, dstport, prox_test ) def test_unknown(self, setup_proxy_protocol): prox_test = "PROXY UNKNOWN whatever\r\n" setup_proxy_protocol(self) self.protocol.data_received(prox_test.encode("ascii")) self.runner() handler = self.protocol.event_handler assert handler.called proxy_data = handler.proxy_datas[0] assert proxy_data.same_attribs( valid=True, version=1, family=AF.UNSPEC, protocol=PROTO.UNSPEC, rest=b" whatever", ) def test_unknown_short(self, setup_proxy_protocol): prox_test = "PROXY UNKNOWN\r\n" setup_proxy_protocol(self) self.protocol.data_received(prox_test.encode("ascii")) self.runner() handler = self.protocol.event_handler assert handler.called proxy_data = handler.proxy_datas[0] assert proxy_data.same_attribs( valid=True, version=1, family=AF.UNSPEC, protocol=PROTO.UNSPEC, rest=b"", ) def _assert_invalid(self, testline: str, expect_err: str = ""): self.protocol.data_received(testline.encode("ascii")) self.runner() handler: ProxyPeekerHandler = self.protocol.event_handler assert not self.protocol.session.proxy_data.valid assert not handler.called assert self.transport.close.called assert self.protocol.session.proxy_data.error == expect_err def test_invalid_sig(self, setup_proxy_protocol): prox_test = "PROXY1 UNKNOWN whatevs\r\n" setup_proxy_protocol(self) self._assert_invalid(prox_test, "PROXYv1 wrong signature") def test_unsupported_family(self, setup_proxy_protocol): prox_test = "PROXY TCP5 123.123.123.123 231.231.231.231 80 90\r\n" setup_proxy_protocol(self) self._assert_invalid(prox_test, "PROXYv1 unrecognized family") prox_test = "PROXY TCP 123.123.123.123 231.231.231.231 80 90\r\n" setup_proxy_protocol(self) self._assert_invalid(prox_test, "PROXYv1 unrecognized family") def test_unsupported_proto(self, setup_proxy_protocol): prox_test = "PROXY UDP4 123.123.123.123 231.231.231.231 80 90\r\n" setup_proxy_protocol(self) self._assert_invalid(prox_test, "PROXYv1 unrecognized protocol") def test_too_long(self, setup_proxy_protocol): prox_test = "PROXY UNKNOWN " + "*" * 100 + "\r\n" setup_proxy_protocol(self) self._assert_invalid(prox_test, "PROXYv1 header too long") def test_malformed_nocr(self, setup_proxy_protocol): prox_test = "PROXY UNKNOWN\n" setup_proxy_protocol(self) self._assert_invalid(prox_test, "PROXYv1 malformed") def test_malformed_notproxy(self, setup_proxy_protocol): srcip = "1.2.3.4" dstip = "5.6.7.8" srcport = 65535 dstport = 65535 prox_test = f"NOTPROX TCP4 {srcip} {dstip} {srcport} {dstport}\r\n" setup_proxy_protocol(self) self._assert_invalid(prox_test, "PROXY unrecognized signature") def test_malformed_wrongtype_64(self, setup_proxy_protocol): srcip = "1.2.3.4" dstip = "5.6.7.8" srcport = 65535 dstport = 65535 prox_test = f"PROXY TCP6 {srcip} {dstip} {srcport} {dstport}\r\n" setup_proxy_protocol(self) self._assert_invalid(prox_test, "PROXYv1 address not IPv6") def test_malformed_wrongtype_46(self, setup_proxy_protocol): srcip = "2020:dead::0001" dstip = "2021:cafe::0002" srcport = 65535 dstport = 65535 prox_test = f"PROXY TCP4 {srcip} {dstip} {srcport} {dstport}\r\n" setup_proxy_protocol(self) self._assert_invalid(prox_test, "PROXYv1 address not IPv4") def test_malformed_wrongtype_6mixed(self, setup_proxy_protocol): srcip = "1.2.3.4" dstip = "2021:cafe::0002" srcport = 65535 dstport = 65535 prox_test = f"PROXY TCP6 {srcip} {dstip} {srcport} {dstport}\r\n" setup_proxy_protocol(self) self._assert_invalid(prox_test, "PROXYv1 address not IPv6") IP6_dead = "2020:dead::0001" IP6_cafe = "2021:cafe::0002" @parametrize( "srcip, dstip, srcport, dstport, whatwrong", [ param(IP6_dead, IP6_cafe, "02501", None, "port", id="zeroleader"), param(" " + IP6_dead, IP6_cafe, None, None, "address", id="space1"), param(IP6_dead, " " + IP6_cafe, None, None, "address", id="space2"), param(IP6_dead, IP6_cafe, " 8080", None, "port", id="space3"), param(IP6_dead, IP6_cafe, None, " 0", "port", id="space4"), param(IP6_dead[:-1] + "g", IP6_cafe, None, None, "address", id="addr6s"), param(IP6_dead, IP6_cafe[:-1] + "h", None, None, "address", id="addr6d"), ], ) def test_malformed_addr( self, setup_proxy_protocol, srcip, dstip, srcport, dstport, whatwrong ): if srcport is None: srcport = random_port() if dstport is None: dstport = random_port() prox_test = f"PROXY TCP6 {srcip} {dstip} {srcport} {dstport}\r\n" setup_proxy_protocol(self) self._assert_invalid(prox_test, f"PROXYv1 {whatwrong} malformed") @parametrize( "extra", [ param(" ", id="space"), param(" text", id="sptext"), ], ) def test_extra(self, setup_proxy_protocol, extra): prox_test = f"PROXY TCP6 {self.IP6_dead} {self.IP6_cafe} 0 25{extra}\r\n" setup_proxy_protocol(self) self._assert_invalid(prox_test, "PROXYv1 unrecognized extraneous data") def test_malformed_addr4(self, setup_proxy_protocol): srcip = "1.2.3.a" dstip = "5.6.7.8" srcport = 65535 dstport = 65535 prox_test = f"PROXY TCP6 {srcip} {dstip} {srcport} {dstport}\r\n" setup_proxy_protocol(self) self._assert_invalid(prox_test, "PROXYv1 address malformed") def test_ports_oob(self, setup_proxy_protocol): srcip = "1.2.3.4" dstip = "5.6.7.8" srcport = 65536 dstport = 10200 prox_test = f"PROXY TCP4 {srcip} {dstip} {srcport} {dstport}\r\n" setup_proxy_protocol(self) self._assert_invalid(prox_test, "PROXYv1 src port out of bounds") def test_portd_oob(self, setup_proxy_protocol): srcip = "2020:dead::0001" dstip = "2021:cafe::0002" srcport = 10000 dstport = 65536 prox_test = f"PROXY TCP6 {srcip} {dstip} {srcport} {dstport}\r\n" setup_proxy_protocol(self) self._assert_invalid(prox_test, "PROXYv1 dst port out of bounds") class TestGetV2(_TestProxyProtocolCommon): def test_1(self, setup_proxy_protocol): setup_proxy_protocol(self) self.protocol.data_received(TEST_V2_DATA1_XTRA) self.runner() sess: SMTPSession = self.protocol.session assert sess.proxy_data.error == "" handler: ProxyPeekerHandler = self.protocol.event_handler assert handler.called pd: ProxyData = handler.proxy_datas[-1] assert pd.same_attribs( version=2, command=1, family=1, src_addr=IPv4Address("127.0.0.1"), dst_addr=IPv4Address("127.0.0.1"), src_port=45138, dst_port=25253, ) assert b"not part of PROXYv2" not in pd.rest assert pd.tlv assert pd.tlv.same_attribs( AUTHORITY=b"AUTHORITY", CRC32C=b"T\xfd\xc6\xff", UNIQUE_ID=b"UNIQUE_ID", SSL=True, SSL_VERSION=b"TLSv1.2", SSL_CIPHER=b"ECDHE-RSA-AES256-GCM-SHA384", SSL_SIG_ALG=b"RSA-SHA256", SSL_KEY_ALG=b"RSA4096", ) def _send_valid( self, cmd: V2_CMD, fam: AF, proto: PROTO, payload: bytes ) -> ProxyData: ver_cmd = 0x20 + cmd.value fam_pro = (fam << 4) + proto to_send = bytes( V2_SIGNATURE + ver_cmd.to_bytes(1, "big") + fam_pro.to_bytes(1, "big") + len(payload).to_bytes(2, "big") + payload ) self.protocol.data_received(to_send) self.runner() assert self.protocol.session.proxy_data.error == "" assert self.protocol.session.proxy_data.whole_raw == to_send handler = self.protocol.event_handler assert handler.called return handler.proxy_datas[-1] def test_UNSPEC_empty(self, setup_proxy_protocol): setup_proxy_protocol(self) assert self._send_valid( V2_CMD.LOCAL, AF.UNSPEC, PROTO.UNSPEC, b"" ).same_attribs(valid=True, version=2, command=0, family=0, protocol=0, rest=b"") def test_UNSPEC_notempty(self, setup_proxy_protocol): setup_proxy_protocol(self) payload = b"asdfghjkl" assert self._send_valid( V2_CMD.LOCAL, AF.UNSPEC, PROTO.UNSPEC, payload ).same_attribs( valid=True, version=2, command=0, family=0, protocol=0, rest=payload ) @parametrize("ttlv", [b"", b"fake_tlv"]) @parametrize("tproto", [PROTO.STREAM, PROTO.DGRAM]) def test_INET4(self, setup_proxy_protocol, tproto, ttlv): setup_proxy_protocol(self) src_addr = IPv4Address("10.212.4.33") dst_addr = IPv4Address("10.11.12.13") src_port = 0 dst_port = 65535 payload = ( src_addr.packed + dst_addr.packed + src_port.to_bytes(2, "big") + dst_port.to_bytes(2, "big") + ttlv ) pd = self._send_valid(V2_CMD.LOCAL, AF.INET, tproto, payload) assert pd.same_attribs( valid=True, version=2, command=0, family=AF.INET, protocol=tproto, src_addr=src_addr, dst_addr=dst_addr, src_port=src_port, dst_port=dst_port, rest=ttlv, ) assert pd.tlv is None @parametrize("ttlv", [b"", b"fake_tlv"]) @parametrize("tproto", [PROTO.STREAM, PROTO.DGRAM]) def test_INET6(self, setup_proxy_protocol, tproto, ttlv): setup_proxy_protocol(self) src_addr = IPv6Address("2020:dead::0001") dst_addr = IPv6Address("2021:cafe::0022") src_port = 65534 dst_port = 8080 payload = ( src_addr.packed + dst_addr.packed + src_port.to_bytes(2, "big") + dst_port.to_bytes(2, "big") + ttlv ) pd = self._send_valid(V2_CMD.LOCAL, AF.INET6, tproto, payload) assert pd.same_attribs( valid=True, version=2, command=0, family=2, protocol=tproto, src_addr=src_addr, dst_addr=dst_addr, src_port=src_port, dst_port=dst_port, rest=ttlv, ) assert pd.tlv is None @parametrize("ttlv", [b"", b"fake_tlv"]) @parametrize("tproto", [PROTO.STREAM, PROTO.DGRAM]) def test_UNIX(self, setup_proxy_protocol, tproto, ttlv): setup_proxy_protocol(self) src_addr = struct.pack("108s", b"/proc/source") dst_addr = struct.pack("108s", b"/proc/dest") payload = src_addr + dst_addr + ttlv pd = self._send_valid(V2_CMD.LOCAL, AF.UNIX, tproto, payload) assert pd.same_attribs( valid=True, version=2, command=0, family=3, protocol=tproto, src_addr=src_addr, dst_addr=dst_addr, src_port=None, dst_port=None, rest=ttlv, ) assert pd.tlv is None @parametrize( "tfam, tproto", [ (AF.UNSPEC, PROTO.STREAM), (AF.UNSPEC, PROTO.DGRAM), (AF.INET, PROTO.UNSPEC), (AF.INET6, PROTO.UNSPEC), (AF.UNIX, PROTO.UNSPEC), ], ) def test_fallback_UNSPEC(self, setup_proxy_protocol, tfam, tproto): setup_proxy_protocol(self) payload = b"whatever" assert self._send_valid(V2_CMD.LOCAL, tfam, tproto, payload).same_attribs( valid=True, version=2, command=0, family=tfam, protocol=tproto, src_addr=None, dst_addr=None, src_port=None, dst_port=None, rest=payload, ) def _send_invalid( self, sig: bytes = V2_SIGNATURE, ver: int = 2, cmd: int = 0, fam: int = 0, proto: int = 0, payload: bytes = b"", expect: Optional[str] = None, ) -> ProxyData: ver_cmd = (ver << 4) | cmd fam_pro = (fam << 4) | proto self.protocol.data_received( sig + ver_cmd.to_bytes(1, "big") + fam_pro.to_bytes(1, "big") + len(payload).to_bytes(2, "big") + payload ) self.runner() handler = self.protocol.event_handler assert not handler.called assert not self.protocol.session.proxy_data if expect is not None: assert self.protocol.session.proxy_data.error == expect return self.protocol.session.proxy_data def test_invalid_sig(self, setup_proxy_protocol): setup_proxy_protocol(self) ERRSIG = b"\r\n\r\n\x00\r\nQUIP\n" self._send_invalid(sig=ERRSIG, expect="PROXYv2 wrong signature") # Using readexactly() instead of just read() causes incomplete handshake to time- # out as readexactly() waits until the number of bytes is complete. # So, we can no longer look for this error. # # def test_incomplete(self, setup_proxy_protocol): # setup_proxy_protocol(self) # self.protocol.data_received(V2_SIGNATURE + b"\x20" + b"\x00" + b"\x00") # self.runner() # assert self.transport.close.called # handler = self.protocol.event_handler # assert not handler.called # sess: SMTPSession = self.protocol.session # assert not sess.proxy_data.valid # assert sess.proxy_data.error == "PROXYv2 malformed header" def test_illegal_ver(self, setup_proxy_protocol): setup_proxy_protocol(self) self._send_invalid(ver=3, expect="PROXYv2 illegal version") def test_unsupported_cmd(self, setup_proxy_protocol): setup_proxy_protocol(self) self._send_invalid(cmd=2, expect="PROXYv2 unsupported command") def test_unsupported_fam(self, setup_proxy_protocol): setup_proxy_protocol(self) self._send_invalid(fam=4, expect="PROXYv2 unsupported family") def test_unsupported_proto(self, setup_proxy_protocol): setup_proxy_protocol(self) self._send_invalid(proto=3, expect="PROXYv2 unsupported protocol") def test_wrong_proto_6shouldbe4(self, setup_proxy_protocol): setup_proxy_protocol(self) src_addr = IPv4Address("192.168.0.11") dst_addr = IPv4Address("172.16.0.22") src_port = 65534 dst_port = 8080 payload = ( src_addr.packed + dst_addr.packed + src_port.to_bytes(2, "big") + dst_port.to_bytes(2, "big") ) self._send_invalid( fam=2, proto=1, payload=payload, expect="PROXYv2 truncated address" ) @controller_data(proxy_protocol_timeout=0.3) @handler_data(class_=ProxyPeekerHandler) class TestWithController: def _okay(self, handshake: bytes): with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: sock.connect(Global.SrvAddr) sock.sendall(handshake) resp = sock.makefile("rb").readline() assert resp.startswith(b"220 ") with SMTPClient() as client: client.sock = sock code, mesg = client.ehlo("example.org") assert code == 250 code, mesg = client.quit() assert code == 221 @parametrize("handshake", HANDSHAKES.values(), ids=HANDSHAKES.keys()) def test_okay(self, plain_controller, handshake): assert plain_controller.smtpd._proxy_timeout > 0.0 self._okay(handshake) @parametrize("handshake", HANDSHAKES.values(), ids=HANDSHAKES.keys()) def test_hiccup(self, plain_controller, handshake): assert plain_controller.smtpd._proxy_timeout > 0.0 with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: sock.connect(Global.SrvAddr) sock.sendall(handshake[0:20]) time.sleep(0.1) sock.sendall(handshake[20:]) resp = sock.makefile("rb").readline() assert resp.startswith(b"220 ") with SMTPClient() as client: client.sock = sock code, mesg = client.ehlo("example.org") assert code == 250 code, mesg = client.quit() assert code == 221 @parametrize("handshake", HANDSHAKES.values(), ids=HANDSHAKES.keys()) def test_timeout(self, plain_controller, handshake): assert plain_controller.smtpd._proxy_timeout > 0.0 with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: sock.connect(Global.SrvAddr) time.sleep(plain_controller.smtpd._proxy_timeout * TIMEOUT_MULTIPLIER) # noinspection PyTypeChecker with pytest.raises(ConnectionError): sock.send(handshake) resp = sock.recv(4096) if resp == b"": raise ConnectionError # Try resending the handshake. Should also fail (because connection has # been closed by the server. # noinspection PyTypeChecker with pytest.raises(OSError) as exc_info: # noqa: PT011 sock.send(handshake) resp = sock.recv(4096) if resp == b"": raise ConnectionAbortedError # MacOS sometimes raises EPROTOTYPE, which won't result in ConnectionError # but in OSError(errno=EPROTOTYPE). Let's check that here. # Refs: # - https://github.com/racitup/static-ranges/issues/1 # - https://github.com/benoitc/gunicorn/issues/1487 # - http://erickt.github.io/blog/2014/11/19/adventures-in-debugging-\ # a-potential-osx-kernel-bug/ exc = exc_info.value if isinstance(exc, ConnectionError): pass else: assert exc.errno in (errno.EPROTOTYPE, errno.EPIPE) # Assert that we can connect properly afterwards (that is, server is not # terminated) self._okay(handshake) @parametrize("handshake", HANDSHAKES.values(), ids=HANDSHAKES.keys()) def test_incomplete(self, plain_controller, handshake): assert plain_controller.smtpd._proxy_timeout > 0.0 with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: sock.connect(Global.SrvAddr) sock.send(handshake[:-1]) time.sleep(plain_controller.smtpd._proxy_timeout * TIMEOUT_MULTIPLIER) # noinspection PyTypeChecker with pytest.raises(ConnectionError): sock.send(b"\n") resp = sock.recv(4096) # On Windows, this line raises if resp == b"": # On Linux, no raise, just "EOF" raise ConnectionError # Try resending the handshake. Should also fail (because connection has # been closed by the server. # noinspection PyTypeChecker with pytest.raises(OSError) as exc_info: # noqa: PT011 sock.send(handshake) resp = sock.recv(4096) if resp == b"": raise ConnectionError # MacOS sometimes raises EPROTOTYPE, which won't result in ConnectionError # but in OSError(errno=EPROTOTYPE). Let's check that here. # Refs: # - https://github.com/racitup/static-ranges/issues/1 # - https://github.com/benoitc/gunicorn/issues/1487 # - http://erickt.github.io/blog/2014/11/19/adventures-in-debugging-\ # a-potential-osx-kernel-bug/ exc = exc_info.value if isinstance(exc, ConnectionError): pass else: assert exc.errno in (errno.EPROTOTYPE, errno.EPIPE) # Assert that we can connect properly afterwards (that is, server is not # terminated) self._okay(handshake) @controller_data(proxy_protocol_timeout=0.3) @handler_data(class_=ProxyPeekerHandler) class TestHandlerAcceptReject: # We test *both* Accept *and* reject to ensure that the handshakes are valid @parametrize("handler_retval", [True, False]) @parametrize( "handshake", [ b"PROXY TCP4 255.255.255.255 255.255.255.255 65535 65535\r\n", TEST_V2_DATA1_EXACT, ], ids=["v1", "v2"], ) def test_simple(self, plain_controller, handshake, handler_retval): assert plain_controller.smtpd._proxy_timeout > 0.0 assert isinstance(plain_controller.handler, ProxyPeekerHandler) plain_controller.handler.retval = handler_retval if handler_retval: oper = operator.ne # See "Parametrizing conditional raising" in # https://docs.pytest.org/en/stable/example/parametrize.html expect = does_not_raise() else: oper = operator.eq expect = pytest.raises(SMTPServerDisconnected) with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: sock.connect(Global.SrvAddr) sock.sendall(handshake) resp = sock.recv(4096) assert oper(resp, b"") with expect, SMTPClient() as client: client.sock = sock code, mesg = client.ehlo("example.org") assert code == 250 aio-libs-aiosmtpd-b634d9b/aiosmtpd/tests/test_server.py000066400000000000000000000554001462210711200233470ustar00rootroot00000000000000# Copyright 2014-2021 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 """Test other aspects of the server implementation.""" import asyncio import errno import platform import socket import sys import time from contextlib import ExitStack from functools import partial from threading import Event from pathlib import Path from smtplib import SMTP as SMTPClient, SMTPServerDisconnected from tempfile import mkdtemp from threading import Thread from typing import Generator, Optional import pytest from pytest_mock import MockFixture from aiosmtpd.controller import ( Controller, UnixSocketController, UnthreadedController, UnixSocketMixin, UnixSocketUnthreadedController, _FakeServer, get_localhost, _server_to_client_ssl_ctx, ) from aiosmtpd.handlers import Sink from aiosmtpd.smtp import SMTP as Server from aiosmtpd.testing.helpers import catchup_delay from .conftest import Global, AUTOSTOP_DELAY class SlowStartController(Controller): def __init__(self, *args, **kwargs): kwargs.setdefault("ready_timeout", 0.5) super().__init__(*args, **kwargs) def _run(self, ready_event: Event): time.sleep(self.ready_timeout * 1.5) super()._run(ready_event) class SlowFactoryController(Controller): def __init__(self, *args, **kwargs): kwargs.setdefault("ready_timeout", 0.5) super().__init__(*args, **kwargs) def factory(self): time.sleep(self.ready_timeout * 3) return super().factory() def _factory_invoker(self): time.sleep(self.ready_timeout * 3) return super()._factory_invoker() def in_win32(): return platform.system().casefold() == "windows" def in_wsl(): # WSL 1.0 somehow allows more than one listener on one port. # So we have to detect when we're running on WSL so we can skip some tests. # On Windows, platform.release() returns the Windows version (e.g., "7" or "10") # On Linux (incl. WSL), platform.release() returns the kernel version. # As of 2021-02-07, only WSL has a kernel with "Microsoft" in the version. return "microsoft" in platform.release().casefold() def in_cygwin(): return platform.system().casefold().startswith("cygwin") @pytest.fixture(scope="module") def safe_socket_dir() -> Generator[Path, None, None]: # See: # - https://github.com/aio-libs/aiohttp/issues/3572 # - https://github.com/aio-libs/aiohttp/pull/3832/files # - https://unix.stackexchange.com/a/367012/5589 tmpdir = Path(mkdtemp()).absolute() assert len(str(tmpdir)) <= 87 # 92 (max on HP-UX) minus 5 (allow 4-char fn) # yield tmpdir # plist = list(tmpdir.rglob("*")) for p in reversed(plist): if p.is_dir(): p.rmdir() else: p.unlink() tmpdir.rmdir() def assert_smtp_socket(controller: UnixSocketMixin) -> bool: assert Path(controller.unix_socket).exists() sockfile = controller.unix_socket if controller.ssl_context: ssl_context = _server_to_client_ssl_ctx(controller.ssl_context) else: ssl_context = None with ExitStack() as stk: sock: socket.socket = stk.enter_context( socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) ) sock.settimeout(AUTOSTOP_DELAY) sock.connect(str(sockfile)) if ssl_context: sock = stk.enter_context(ssl_context.wrap_socket(sock)) catchup_delay() try: resp = sock.recv(1024) except socket.timeout: return False if not resp: return False assert resp.startswith(b"220 ") assert resp.endswith(b"\r\n") sock.send(b"EHLO socket.test\r\n") # We need to "build" resparr because, especially when socket is wrapped # in SSL, the SMTP server takes it sweet time responding with the list # of ESMTP features ... resparr = bytearray() while not resparr.endswith(b"250 HELP\r\n"): catchup_delay() resp = sock.recv(1024) if not resp: break resparr += resp assert resparr.endswith(b"250 HELP\r\n") sock.send(b"QUIT\r\n") catchup_delay() resp = sock.recv(1024) assert resp.startswith(b"221") return True class TestServer: """Tests for the aiosmtpd.smtp.SMTP class""" def test_smtp_utf8(self, plain_controller, client): code, mesg = client.ehlo("example.com") assert code == 250 assert b"SMTPUTF8" in mesg.splitlines() def test_default_max_command_size_limit(self): server = Server(Sink()) assert server.max_command_size_limit == 512 def test_special_max_command_size_limit(self): server = Server(Sink()) server.command_size_limits["DATA"] = 1024 assert server.max_command_size_limit == 1024 def test_warn_authreq_notls(self): expectedre = ( r"Requiring AUTH while not requiring TLS can lead to " r"security vulnerabilities!" ) with pytest.warns(UserWarning, match=expectedre): Server(Sink(), auth_require_tls=False, auth_required=True) @pytest.mark.skipif(sys.platform == "win32", reason="No idea what is causing error") class TestController: """Tests for the aiosmtpd.controller.Controller class""" @pytest.mark.filterwarnings("ignore") def test_ready_timeout(self): cont = SlowStartController(Sink()) expectre = ( "SMTP server failed to start within allotted time. " "This might happen if the system is too busy. " "Try increasing the `ready_timeout` parameter." ) try: with pytest.raises(TimeoutError, match=expectre): cont.start() finally: cont.stop() @pytest.mark.filterwarnings("ignore") def test_factory_timeout(self): cont = SlowFactoryController(Sink()) expectre = ( r"SMTP server started, but not responding within allotted time. " r"This might happen if the system is too busy. " r"Try increasing the `ready_timeout` parameter." ) try: with pytest.raises(TimeoutError, match=expectre): cont.start() finally: cont.stop() def test_reuse_loop(self, temp_event_loop): cont = Controller(Sink(), loop=temp_event_loop) assert cont.loop is temp_event_loop try: cont.start() assert cont.smtpd.loop is temp_event_loop finally: cont.stop() @pytest.mark.skipif(in_wsl(), reason="WSL prevents socket collision") def test_socket_error_dupe(self, plain_controller, client): contr2 = Controller( Sink(), hostname=Global.SrvAddr.host, port=Global.SrvAddr.port ) expectedre = r"error while attempting to bind on address" try: with pytest.raises(socket.error, match=expectedre): contr2.start() finally: contr2.stop() @pytest.mark.skipif(in_wsl(), reason="WSL prevents socket collision") def test_socket_error_default(self): contr1 = Controller(Sink()) contr2 = Controller(Sink()) expectedre = r"error while attempting to bind on address" try: with pytest.raises(socket.error, match=expectedre): contr1.start() contr2.start() finally: contr2.stop() contr1.stop() def test_server_attribute(self): controller = Controller(Sink()) assert controller.server is None try: controller.start() assert controller.server is not None finally: controller.stop() assert controller.server is None @pytest.mark.filterwarnings( "ignore:server_kwargs will be removed:DeprecationWarning" ) def test_enablesmtputf8_flag(self): # Default is True controller = Controller(Sink()) assert controller.SMTP_kwargs["enable_SMTPUTF8"] # Explicit set must be reflected in server_kwargs controller = Controller(Sink(), enable_SMTPUTF8=True) assert controller.SMTP_kwargs["enable_SMTPUTF8"] controller = Controller(Sink(), enable_SMTPUTF8=False) assert not controller.SMTP_kwargs["enable_SMTPUTF8"] # Explicit set must override server_kwargs kwargs = dict(enable_SMTPUTF8=False) controller = Controller(Sink(), enable_SMTPUTF8=True, server_kwargs=kwargs) assert controller.SMTP_kwargs["enable_SMTPUTF8"] kwargs = dict(enable_SMTPUTF8=True) controller = Controller(Sink(), enable_SMTPUTF8=False, server_kwargs=kwargs) assert not controller.SMTP_kwargs["enable_SMTPUTF8"] # Set through server_kwargs must not be overridden if no explicit set kwargs = dict(enable_SMTPUTF8=False) controller = Controller(Sink(), server_kwargs=kwargs) assert not controller.SMTP_kwargs["enable_SMTPUTF8"] @pytest.mark.filterwarnings( "ignore:server_kwargs will be removed:DeprecationWarning" ) def test_serverhostname_arg(self): contsink = partial(Controller, Sink()) controller = contsink() assert "hostname" not in controller.SMTP_kwargs controller = contsink(server_hostname="testhost1") assert controller.SMTP_kwargs["hostname"] == "testhost1" kwargs = dict(hostname="testhost2") controller = contsink(server_kwargs=kwargs) assert controller.SMTP_kwargs["hostname"] == "testhost2" controller = contsink(server_hostname="testhost3", server_kwargs=kwargs) assert controller.SMTP_kwargs["hostname"] == "testhost3" def test_hostname_empty(self): # WARNING: This test _always_ succeeds in Windows. cont = Controller(Sink(), hostname="") try: cont.start() finally: cont.stop() def test_hostname_none(self): cont = Controller(Sink()) try: cont.start() finally: cont.stop() def test_testconn_raises(self, mocker: MockFixture): mocker.patch("socket.socket.recv", side_effect=RuntimeError("MockError")) cont = Controller(Sink(), hostname="") try: with pytest.raises(RuntimeError, match="MockError"): cont.start() finally: cont.stop() def test_getlocalhost(self): assert get_localhost() in ("127.0.0.1", "::1") def test_getlocalhost_noipv6(self, mocker): mock_hasip6 = mocker.patch("aiosmtpd.controller._has_ipv6", return_value=False) assert get_localhost() == "127.0.0.1" assert mock_hasip6.called def test_getlocalhost_6yes(self, mocker: MockFixture): mock_sock = mocker.Mock() mock_makesock: mocker.Mock = mocker.patch("aiosmtpd.controller.makesock") mock_makesock.return_value.__enter__.return_value = mock_sock assert get_localhost() == "::1" mock_makesock.assert_called_with(socket.AF_INET6, socket.SOCK_STREAM) assert mock_sock.bind.called # Apparently errno.E* constants adapts to the OS, so on Windows they will # automatically use the analogous WSAE* constants @pytest.mark.parametrize("err", [errno.EADDRNOTAVAIL, errno.EAFNOSUPPORT]) def test_getlocalhost_6no(self, mocker, err): mock_makesock: mocker.Mock = mocker.patch( "aiosmtpd.controller.makesock", side_effect=OSError(errno.EADDRNOTAVAIL, "Mock IP4-only"), ) assert get_localhost() == "127.0.0.1" mock_makesock.assert_called_with(socket.AF_INET6, socket.SOCK_STREAM) def test_getlocalhost_6inuse(self, mocker): mock_makesock: mocker.Mock = mocker.patch( "aiosmtpd.controller.makesock", side_effect=OSError(errno.EADDRINUSE, "Mock IP6 used"), ) assert get_localhost() == "::1" mock_makesock.assert_called_with(socket.AF_INET6, socket.SOCK_STREAM) def test_getlocalhost_error(self, mocker): mock_makesock: mocker.Mock = mocker.patch( "aiosmtpd.controller.makesock", side_effect=OSError(errno.EFAULT, "Mock Error"), ) with pytest.raises(OSError, match="Mock Error") as exc: get_localhost() assert exc.value.errno == errno.EFAULT mock_makesock.assert_called_with(socket.AF_INET6, socket.SOCK_STREAM) def test_stop_default(self): controller = Controller(Sink()) with pytest.raises(AssertionError, match="SMTP daemon not running"): controller.stop() def test_stop_assert(self): controller = Controller(Sink()) with pytest.raises(AssertionError, match="SMTP daemon not running"): controller.stop(no_assert=False) def test_stop_noassert(self): controller = Controller(Sink()) controller.stop(no_assert=True) @pytest.mark.skipif(in_cygwin(), reason="Cygwin AF_UNIX is problematic") @pytest.mark.skipif(in_win32(), reason="Win32 does not yet fully implement AF_UNIX") class TestUnixSocketController: def test_server_creation(self, safe_socket_dir): sockfile = safe_socket_dir / "smtp" cont = UnixSocketController(Sink(), unix_socket=sockfile) try: cont.start() assert_smtp_socket(cont) finally: cont.stop() def test_server_creation_ssl(self, safe_socket_dir, ssl_context_server): sockfile = safe_socket_dir / "smtp" cont = UnixSocketController( Sink(), unix_socket=sockfile, ssl_context=ssl_context_server ) try: cont.start() # Allow additional time for SSL to kick in catchup_delay() assert_smtp_socket(cont) finally: cont.stop() class TestUnthreaded: @pytest.fixture def runner(self): thread: Optional[Thread] = None def _runner(loop: asyncio.AbstractEventLoop): loop.run_forever() def starter(loop: asyncio.AbstractEventLoop): nonlocal thread thread = Thread(target=_runner, args=(loop,)) thread.daemon = True thread.start() catchup_delay() def joiner(timeout: Optional[float] = None): nonlocal thread assert isinstance(thread, Thread) thread.join(timeout=timeout) def is_alive(): nonlocal thread assert isinstance(thread, Thread) return thread.is_alive() starter.join = joiner starter.is_alive = is_alive return starter @pytest.mark.skipif(in_cygwin(), reason="Cygwin AF_UNIX is problematic") @pytest.mark.skipif(in_win32(), reason="Win32 does not yet fully implement AF_UNIX") def test_unixsocket(self, safe_socket_dir, autostop_loop, runner): sockfile = safe_socket_dir / "smtp" cont = UnixSocketUnthreadedController( Sink(), unix_socket=sockfile, loop=autostop_loop ) cont.begin() # Make sure event loop is not running (will be started in thread) assert autostop_loop.is_running() is False runner(autostop_loop) # Make sure event loop is up and running (started within thread) assert autostop_loop.is_running() is True # Check we can connect assert_smtp_socket(cont) # Wait until thread ends, which it will be when the loop autostops runner.join(timeout=AUTOSTOP_DELAY) assert runner.is_alive() is False catchup_delay() assert autostop_loop.is_running() is False # At this point, the loop _has_ stopped, but the task is still listening assert assert_smtp_socket(cont) is False # Stop the task cont.end() catchup_delay() # Now the listener has gone away # noinspection PyTypeChecker with pytest.raises((socket.timeout, ConnectionError)): assert_smtp_socket(cont) @pytest.mark.filterwarnings( "ignore::pytest.PytestUnraisableExceptionWarning" ) def test_inet_loopstop(self, autostop_loop, runner): """ Verify behavior when the loop is stopped before controller is stopped """ autostop_loop.set_debug(True) cont = UnthreadedController(Sink(), loop=autostop_loop) cont.begin() # Make sure event loop is not running (will be started in thread) assert autostop_loop.is_running() is False runner(autostop_loop) # Make sure event loop is up and running (started within thread) assert autostop_loop.is_running() is True # Check we can connect with SMTPClient(cont.hostname, cont.port, timeout=AUTOSTOP_DELAY) as client: code, _ = client.helo("example.org") assert code == 250 # Wait until thread ends, which it will be when the loop autostops runner.join(timeout=AUTOSTOP_DELAY) assert runner.is_alive() is False catchup_delay() assert autostop_loop.is_running() is False # At this point, the loop _has_ stopped, but the task is still listening, # so rather than socket.timeout, we'll get a refusal instead, thus causing # SMTPServerDisconnected with pytest.raises(SMTPServerDisconnected): SMTPClient(cont.hostname, cont.port, timeout=0.1) cont.end() catchup_delay() cont.ended.wait() # Now the listener has gone away, and thus we will end up with socket.timeout # or ConnectionError (depending on OS) # noinspection PyTypeChecker with pytest.raises((socket.timeout, ConnectionError)): SMTPClient(cont.hostname, cont.port, timeout=0.1) @pytest.mark.filterwarnings( "ignore::pytest.PytestUnraisableExceptionWarning" ) def test_inet_contstop(self, temp_event_loop, runner): """ Verify behavior when the controller is stopped before loop is stopped """ cont = UnthreadedController(Sink(), loop=temp_event_loop) cont.begin() # Make sure event loop is not running (will be started in thread) assert temp_event_loop.is_running() is False runner(temp_event_loop) # Make sure event loop is up and running assert temp_event_loop.is_running() is True try: # Check that we can connect with SMTPClient(cont.hostname, cont.port, timeout=AUTOSTOP_DELAY) as client: code, _ = client.helo("example.org") assert code == 250 client.quit() catchup_delay() temp_event_loop.call_soon_threadsafe(cont.end) for _ in range(10): # 10 is arbitrary catchup_delay() # effectively yield to other threads/event loop if cont.ended.wait(1.0): break assert temp_event_loop.is_running() is True # Because we've called .end() there, the server listener should've gone # away, so we should end up with a socket.timeout or ConnectionError or # SMTPServerDisconnected (depending on lotsa factors) expect_errs = (socket.timeout, ConnectionError, SMTPServerDisconnected) # noinspection PyTypeChecker with pytest.raises(expect_errs): SMTPClient(cont.hostname, cont.port, timeout=0.1) finally: # Wrap up, or else we'll hang temp_event_loop.call_soon_threadsafe(cont.cancel_tasks) catchup_delay() runner.join() assert runner.is_alive() is False assert temp_event_loop.is_running() is False assert temp_event_loop.is_closed() is False @pytest.mark.skipif(sys.version_info >= (3, 12), reason="Hangs on 3.12") @pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") class TestFactory: def test_normal_situation(self): cont = Controller(Sink()) try: cont.start() catchup_delay() assert cont.smtpd is not None assert cont._thread_exception is None finally: cont.stop() def test_unknown_args_direct(self, silence_event_loop_closed: bool): unknown = "this_is_an_unknown_kwarg" cont = Controller(Sink(), ready_timeout=0.3, **{unknown: True}) expectedre = r"__init__.. got an unexpected keyword argument '" + unknown + r"'" try: with pytest.raises(TypeError, match=expectedre): cont.start() assert cont.smtpd is None assert isinstance(cont._thread_exception, TypeError) finally: cont.stop() @pytest.mark.filterwarnings( "ignore:server_kwargs will be removed:DeprecationWarning" ) def test_unknown_args_inkwargs(self, silence_event_loop_closed: bool): unknown = "this_is_an_unknown_kwarg" cont = Controller(Sink(), ready_timeout=0.3, server_kwargs={unknown: True}) expectedre = r"__init__.. got an unexpected keyword argument '" + unknown + r"'" try: with pytest.raises(TypeError, match=expectedre): cont.start() assert cont.smtpd is None finally: cont.stop() def test_factory_none(self, mocker: MockFixture, silence_event_loop_closed: bool): # Hypothetical situation where factory() did not raise an Exception # but returned None instead mocker.patch("aiosmtpd.controller.SMTP", return_value=None) cont = Controller(Sink(), ready_timeout=0.3) expectedre = r"factory\(\) returned None" try: with pytest.raises(RuntimeError, match=expectedre): cont.start() assert cont.smtpd is None finally: cont.stop() def test_noexc_smtpd_missing( self, mocker: MockFixture, silence_event_loop_closed: bool ): # Hypothetical situation where factory() failed but no # Exception was generated. cont = Controller(Sink()) def hijacker(*args, **kwargs): cont._thread_exception = None # Must still return an (unmocked) _FakeServer to prevent a whole bunch # of messy exceptions, although they doesn't affect the test at all. return _FakeServer(cont.loop) mocker.patch("aiosmtpd.controller._FakeServer", side_effect=hijacker) mocker.patch( "aiosmtpd.controller.SMTP", side_effect=RuntimeError("Simulated Failure") ) expectedre = r"Unknown Error, failed to init SMTP server" try: with pytest.raises(RuntimeError, match=expectedre): cont.start() assert cont.smtpd is None assert cont._thread_exception is None finally: cont.stop() class TestCompat: def test_version(self): from aiosmtpd import __version__ as init_version from aiosmtpd.smtp import __version__ as smtp_version assert smtp_version is init_version aio-libs-aiosmtpd-b634d9b/aiosmtpd/tests/test_smtp.py000066400000000000000000002225241462210711200230270ustar00rootroot00000000000000# Copyright 2014-2021 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 """Test the SMTP protocol.""" import asyncio import itertools import logging import socket import sys import time import warnings from asyncio.transports import Transport from base64 import b64encode from contextlib import suppress from smtplib import ( SMTP as SMTPClient, SMTPAuthenticationError, SMTPDataError, SMTPResponseException, SMTPServerDisconnected, ) from textwrap import dedent from typing import cast, Any, Callable, Generator, List, Tuple, Union import pytest from pytest_mock import MockFixture from .conftest import Global, controller_data, handler_data from aiosmtpd.controller import Controller from aiosmtpd.handlers import Sink from aiosmtpd.smtp import ( BOGUS_LIMIT, CALL_LIMIT_DEFAULT, MISSING, SMTP as Server, AuthResult, Envelope as SMTPEnvelope, LoginPassword, Session as SMTPSession, __ident__ as GREETING, auth_mechanism, ) from aiosmtpd.testing.helpers import ( ReceivingHandler, catchup_delay, reset_connection, send_recv, ) from aiosmtpd.testing.statuscodes import SMTP_STATUS_CODES as S CRLF = "\r\n" BCRLF = b"\r\n" MAIL_LOG = logging.getLogger("mail.log") MAIL_LOG.setLevel(logging.DEBUG) B64EQUALS = b64encode(b"=").decode() # fh = logging.FileHandler("~smtp.log") # fh.setFormatter(logging.Formatter("{asctime} - {levelname} - {message}", style="{")) # fh.setLevel(logging.DEBUG) # MAIL_LOG.addHandler(fh) # region #### Test Helpers ############################################################ def auth_callback(mechanism, login, password) -> bool: return login and login.decode() == "goodlogin" def assert_nopassleak(passwd: str, record_tuples: List[Tuple[str, int, str]]): """ :param passwd: The password we're looking for in the logs :param record_tuples: Usually caplog.record_tuples """ passwd_b64 = b64encode(passwd.encode("ascii")).decode("ascii") for _logname, _loglevel, logmsg in record_tuples: assert passwd not in logmsg assert passwd_b64 not in logmsg class UndescribableError(Exception): def __str__(self): raise Exception() class ErrorSMTP(Server): exception_type = ValueError async def smtp_HELO(self, hostname: str): raise self.exception_type("test") # endregion # region #### Special-Purpose Handlers ################################################ # noinspection TimingAttack class PeekerHandler: sess: SMTPSession = None login: Union[str, bytes, None] = None login_data: Any = None mechanism: Union[str, bytes, None] = None password: Union[str, bytes, None] = None # Please do not insert "_" after auth; that will 'fool' SMTP into thinking this is # an AUTH Mechanism, and we totally do NOT want that. def authcallback(self, mechanism: str, login: bytes, password: bytes) -> bool: self.login = login self.password = password return login == b"goodlogin" and password == b"goodpasswd" def authenticator( self, server: Server, session: SMTPSession, envelope: SMTPEnvelope, mechanism: str, login_data: Tuple[bytes, bytes], ) -> AuthResult: self.sess = session self.mechanism = mechanism self.login_data = login_data userb, passb = login_data if userb == b"failme_with454": return AuthResult( success=False, handled=False, message="454 4.7.0 Temporary authentication failure", ) else: self.login = userb self.password = passb return AuthResult(success=True, auth_data=login_data) async def handle_MAIL( self, server: Server, session: SMTPSession, envelope: SMTPEnvelope, address: str, mail_options: dict, ) -> str: self.sess = session return S.S250_OK.to_str() async def auth_DENYMISSING(self, server, args): return MISSING async def auth_DENYFALSE(self, server, args): return False async def auth_NONE(self, server: Server, args): await server.push(S.S235_AUTH_SUCCESS.to_str()) return None async def auth_NULL(self, server, args): return "NULL_login" async def auth_DONT(self, server, args): return MISSING async def auth_WITH_UNDERSCORE(self, server: Server, args) -> str: """ Be careful when using this AUTH mechanism; log_client_response is set to True, and this will raise some severe warnings. """ await server.challenge_auth( "challenge", encode_to_b64=False, log_client_response=True ) return "250 OK" @auth_mechanism("with-dash") async def auth_WITH_DASH(self, server, args): return "250 OK" async def auth_WITH__MULTI__DASH(self, server, args): return "250 OK" class StoreEnvelopeOnVRFYHandler: """Saves envelope for later inspection when handling VRFY.""" envelope = None async def handle_VRFY( self, server: Server, session: SMTPSession, envelope: SMTPEnvelope, addr: str ) -> str: self.envelope = envelope return S.S250_OK.to_str() class ErroringHandler: error = None custom_response = False async def handle_DATA(self, server, session, envelope) -> str: return "499 Could not accept the message" async def handle_exception(self, error) -> str: self.error = error if not self.custom_response: return "500 ErroringHandler handling error" else: return "451 Temporary error: ({}) {}".format( error.__class__.__name__, str(error) ) class ErroringHandlerConnectionLost: error = None async def handle_DATA(self, server, session, envelope): raise ConnectionResetError("ErroringHandlerConnectionLost test") async def handle_exception(self, error): self.error = error class ErroringErrorHandler: error = None async def handle_exception(self, error: Exception): self.error = error raise ValueError("ErroringErrorHandler test") class UndescribableErrorHandler: error = None async def handle_exception(self, error: Exception): self.error = error raise UndescribableError() class SleepingHeloHandler: async def handle_HELO( self, server: Server, session: SMTPSession, envelope: SMTPEnvelope, hostname: str, ) -> str: await asyncio.sleep(0.01) session.host_name = hostname return "250 {}".format(server.hostname) # endregion # region #### Special-Purpose Controllers ############################################# # These are either impractical or impossible to implement using Controller class TimeoutController(Controller): Delay: float = 1.0 def factory(self): return Server(self.handler, timeout=self.Delay) class ErrorController(Controller): def factory(self): return ErrorSMTP(self.handler) class CustomHostnameController(Controller): custom_name = "custom.localhost" def factory(self): return Server(self.handler, hostname=self.custom_name) class CustomIdentController(Controller): ident: bytes = b"Identifying SMTP v2112" def factory(self): return Server(self.handler, ident=self.ident.decode()) # endregion # region ##### Fixtures ############################################################### @pytest.fixture def transport_resp(mocker: MockFixture) -> Tuple[Transport, list]: responses = [] mocked = mocker.Mock() mocked.write = responses.append # return cast(Transport, mocked), responses @pytest.fixture def get_protocol( temp_event_loop: asyncio.AbstractEventLoop, transport_resp: Any, ) -> Callable[..., Server]: transport, _ = transport_resp def getter(*args, **kwargs) -> Server: proto = Server(*args, loop=temp_event_loop, **kwargs) proto.connection_made(transport) return proto return getter # region #### Fixtures: Controllers ################################################## @pytest.fixture def auth_peeker_controller( get_controller: Callable[..., Controller] ) -> Generator[Controller, None, None]: handler = PeekerHandler() controller = get_controller( handler, decode_data=True, enable_SMTPUTF8=True, auth_require_tls=False, auth_callback=handler.authcallback, auth_exclude_mechanism=["DONT"], ) controller.start() Global.set_addr_from(controller) # yield controller # controller.stop() @pytest.fixture def authenticator_peeker_controller( get_controller: Callable[..., Controller] ) -> Generator[Controller, None, None]: handler = PeekerHandler() controller = get_controller( handler, decode_data=True, enable_SMTPUTF8=True, auth_require_tls=False, authenticator=handler.authenticator, auth_exclude_mechanism=["DONT"], ) controller.start() Global.set_addr_from(controller) # yield controller # controller.stop() @pytest.fixture def decoding_authnotls_controller( get_handler: Callable, get_controller: Callable[..., Controller] ) -> Generator[Controller, None, None]: handler = get_handler() controller = get_controller( handler, decode_data=True, enable_SMTPUTF8=True, auth_require_tls=False, auth_callback=auth_callback, ) controller.start() Global.set_addr_from(controller) # yield controller # # Some test cases need to .stop() the controller inside themselves # in such cases, we must suppress Controller's raise of AssertionError # because Controller doesn't like .stop() to be invoked more than once with suppress(AssertionError): controller.stop() @pytest.fixture def error_controller(get_handler: Callable) -> Generator[ErrorController, None, None]: handler = get_handler() controller = ErrorController(handler) controller.start() Global.set_addr_from(controller) # yield controller # controller.stop() # endregion # endregion class _CommonMethods: """Contain snippets that keep being performed again and again and again...""" def _helo(self, client: SMTPClient, domain: str = "example.org") -> bytes: code, mesg = client.helo(domain) assert code == 250 return mesg def _ehlo(self, client: SMTPClient, domain: str = "example.com") -> bytes: code, mesg = client.ehlo(domain) assert code == 250 return mesg class TestProtocol: def test_honors_mail_delimiters( self, temp_event_loop, transport_resp, get_protocol ): handler = ReceivingHandler() protocol = get_protocol(handler) data = b"test\r\nmail\rdelimiters\nsaved\r\n" protocol.data_received( BCRLF.join( [ b"HELO example.org", b"MAIL FROM: ", b"RCPT TO: ", b"DATA", data + b".", b"QUIT\r\n", ] ) ) with suppress(asyncio.CancelledError): temp_event_loop.run_until_complete(protocol._handler_coroutine) _, responses = transport_resp assert responses[5] == S.S250_OK.to_bytes() + b"\r\n" assert len(handler.box) == 1 assert handler.box[0].content == data def test_empty_email(self, temp_event_loop, transport_resp, get_protocol): handler = ReceivingHandler() protocol = get_protocol(handler) protocol.data_received( BCRLF.join( [ b"HELO example.org", b"MAIL FROM: ", b"RCPT TO: ", b"DATA", b".", b"QUIT\r\n", ] ) ) with suppress(asyncio.CancelledError): temp_event_loop.run_until_complete(protocol._handler_coroutine) _, responses = transport_resp assert responses[5] == S.S250_OK.to_bytes() + b"\r\n" assert len(handler.box) == 1 assert handler.box[0].content == b"" @pytest.mark.usefixtures("plain_controller") @controller_data( decode_data=True, enable_SMTPUTF8=True, ) class TestSMTP(_CommonMethods): valid_mailfrom_addresses = [ # no space between colon and address "anne@example.com", "", # one space between colon and address " anne@example.com", " ", # multiple spaces between colon and address " anne@example.com", " ", # non alphanums in local part "anne.arthur@example.com", "anne+promo@example.com", "anne-arthur@example.com", "anne_arthur@example.com", "_@example.com", # IP address in domain part "anne@127.0.0.1", "anne@[127.0.0.1]", "anne@[IPv6:2001:db8::1]", "anne@[IPv6::1]", # email with comments -- obsolete, but still valid "anne(comment)@example.com", "(comment)anne@example.com", "anne@example.com(comment)", "anne@machine(comment). example", # RFC5322 § A.6.3 # source route -- RFC5321 § 4.1.2 "MUST BE accepted" "<@example.org:anne@example.com>", "<@example.net,@example.org:anne@example.com>", # strange -- but valid -- addresses "anne@mail", '""@example.com', '<""@example.com>', '" "@example.com', '"anne..arthur"@example.com', "mailhost!anne@example.com", "anne%example.org@example.com", 'much."more\\ unusual"@example.com', 'very."(),:;<>[]".VERY."very@\\ "very.unusual@strange.example.com', # more from RFC3696 § 3 # 'Abc\\@def@example.com', -- get_addr_spec does not support this "Fred\\ Bloggs@example.com", "Joe.\\\\Blow@example.com", '"Abc@def"@example.com', '"Fred Bloggs"@example.com', "customer/department=shipping@example.com", "$A12345@example.com", "!def!xyz%abc@example.com", "a" * 65 + "@example.com", # local-part > 64 chars -- see Issue#257 "b" * 488 + "@example.com", # practical longest for MAIL FROM "c" * 500, # practical longest domainless for MAIL FROM ] valid_rcptto_addresses = valid_mailfrom_addresses + [ # Postmaster -- RFC5321 § 4.1.1.3 "", "b" * 490 + "@example.com", # practical longest for RCPT TO "c" * 502, # practical longest domainless for RCPT TO ] invalid_email_addresses = [ "<@example.com>", # null local part "", # null domain part ] @pytest.mark.parametrize("data", [b"\x80FAIL\r\n", b"\x80 FAIL\r\n"]) def test_binary(self, client, data): client.sock.send(data) assert client.getreply() == S.S500_BAD_SYNTAX def test_helo(self, client): resp = client.helo("example.com") assert resp == S.S250_FQDN def test_close_then_continue(self, client): self._helo(client) client.close() client.connect(*Global.SrvAddr) resp = client.docmd("MAIL FROM: ") assert resp == S.S503_HELO_FIRST def test_helo_no_hostname(self, client): client.local_hostname = "" resp = client.helo("") assert resp == S.S501_SYNTAX_HELO def test_helo_duplicate(self, client): self._helo(client, "example.org") self._helo(client, "example.com") def test_ehlo(self, client): code, mesg = client.ehlo("example.com") lines = mesg.splitlines() assert lines == [ bytes(socket.getfqdn(), "utf-8"), b"SIZE 33554432", b"SMTPUTF8", b"HELP", ] def test_ehlo_duplicate(self, client): self._ehlo(client, "example.com") self._ehlo(client, "example.org") def test_ehlo_no_hostname(self, client): client.local_hostname = "" resp = client.ehlo("") assert resp == S.S501_SYNTAX_EHLO def test_helo_then_ehlo(self, client): self._helo(client, "example.com") self._ehlo(client, "example.org") def test_ehlo_then_helo(self, client): self._ehlo(client, "example.org") self._helo(client, "example.com") def test_noop(self, client): resp = client.noop() assert resp == S.S250_OK def test_noop_with_arg(self, plain_controller, client): # smtplib.SMTP.noop() doesn't accept args resp = client.docmd("NOOP ok") assert resp == S.S250_OK def test_quit(self, client): resp = client.quit() assert resp == S.S221_BYE def test_quit_with_args(self, client): resp = client.docmd("QUIT oops") assert resp == S.S501_SYNTAX_QUIT def test_help(self, client): resp = client.docmd("HELP") assert resp == S.S250_SUPPCMD_NOTLS @pytest.mark.parametrize( "command", [ "HELO", "EHLO", "MAIL", "RCPT", "DATA", "RSET", "NOOP", "QUIT", "VRFY", "AUTH", ], ) def test_help_(self, client, command): resp = client.docmd(f"HELP {command}") syntax = getattr(S, f"S250_SYNTAX_{command}") assert resp == syntax @pytest.mark.parametrize( "command", [ "MAIL", "RCPT", ], ) def test_help_esmtp(self, client, command): self._ehlo(client) resp = client.docmd(f"HELP {command}") syntax = getattr(S, f"S250_SYNTAX_{command}_E") assert resp == syntax def test_help_bad_arg(self, client): resp = client.docmd("HELP me!") assert resp == S.S501_SUPPCMD_NOTLS def test_expn(self, client): resp = client.expn("anne@example.com") assert resp == S.S502_EXPN_NOTIMPL @pytest.mark.parametrize( "command", ["MAIL FROM: ", "RCPT TO: ", "DATA"], ids=lambda x: x.split()[0], ) def test_no_helo(self, client, command): resp = client.docmd(command) assert resp == S.S503_HELO_FIRST @pytest.mark.parametrize( "address", valid_mailfrom_addresses, ids=itertools.count(), ) def test_mail_valid_address(self, client, address): self._ehlo(client) resp = client.docmd(f"MAIL FROM:{address}") assert resp == S.S250_OK @pytest.mark.parametrize( "command", [ "MAIL", "MAIL ", "MAIL FROM:", "MAIL FROM: SIZE=10000", "MAIL FROM: Anne ", ], ids=["noarg", "nofrom", "noaddr", "params_noesmtp", "malformed"], ) def test_mail_smtp_errsyntax(self, client, command): self._helo(client) resp = client.docmd(command) assert resp == S.S501_SYNTAX_MAIL @pytest.mark.parametrize( "param", [ "SIZE=10000", " SIZE=10000", "SIZE=10000 ", ], ids=["norm", "extralead", "extratail"], ) def test_mail_params_esmtp(self, client, param): self._ehlo(client) resp = client.docmd("MAIL FROM: " + param) assert resp == S.S250_OK def test_mail_from_twice(self, client): self._helo(client) resp = client.docmd("MAIL FROM: ") assert resp == S.S250_OK resp = client.docmd("MAIL FROM: ") assert resp == S.S503_MAIL_NESTED @pytest.mark.parametrize( "command", [ "MAIL FROM: SIZE 10000", "MAIL FROM: SIZE", "MAIL FROM: #$%=!@#", "MAIL FROM: SIZE = 10000", ], ids=["malformed", "missing", "badsyntax", "space"], ) def test_mail_esmtp_errsyntax(self, client, command): self._ehlo(client) resp = client.docmd(command) assert resp == S.S501_SYNTAX_MAIL_E def test_mail_esmtp_params_unrecognized(self, client): self._ehlo(client) resp = client.docmd("MAIL FROM: FOO=BAR") assert resp == S.S555_MAIL_PARAMS_UNRECOG def test_bpo27931fix_smtp(self, client): self._helo(client) resp = client.docmd('MAIL FROM: <""@example.com>') assert resp == S.S250_OK resp = client.docmd('RCPT TO: <""@example.org>') assert resp == S.S250_OK @pytest.mark.parametrize( "address", invalid_email_addresses, ids=itertools.count(), ) def test_mail_invalid_address(self, client, address): self._helo(client) resp = client.docmd(f"MAIL FROM: {address}") assert resp == S.S553_MALFORMED @pytest.mark.parametrize("address", invalid_email_addresses, ids=itertools.count()) def test_mail_esmtp_invalid_address(self, client, address): self._ehlo(client) resp = client.docmd(f"MAIL FROM: {address} SIZE=28113") assert resp == S.S553_MALFORMED def test_rcpt_no_mail(self, client): self._helo(client) resp = client.docmd("RCPT TO: ") assert resp == S.S503_MAIL_NEEDED @pytest.mark.parametrize( "command", [ "RCPT", "RCPT ", "RCPT TO:", "RCPT TO: SIZE=1000", "RCPT TO: bart ", ], ids=["noarg", "noto", "noaddr", "params", "malformed"], ) def test_rcpt_smtp_errsyntax(self, client, command): self._helo(client) resp = client.docmd("MAIL FROM: ") assert resp == S.S250_OK resp = client.docmd(command) assert resp == S.S501_SYNTAX_RCPT @pytest.mark.parametrize( "command", [ "RCPT", "RCPT ", "RCPT TO:", "RCPT TO: #$%=!@#", "RCPT TO: bart ", ], ids=["noarg", "noto", "noaddr", "badparams", "malformed"], ) def test_rcpt_esmtp_errsyntax(self, client, command): self._ehlo(client) resp = client.docmd("MAIL FROM: ") assert resp == S.S250_OK resp = client.docmd(command) assert resp == S.S501_SYNTAX_RCPT_E def test_rcpt_unknown_params(self, client): self._ehlo(client) resp = client.docmd("MAIL FROM: ") assert resp == S.S250_OK resp = client.docmd("RCPT TO: FOOBAR") assert resp == S.S555_RCPT_PARAMS_UNRECOG @pytest.mark.parametrize("address", valid_rcptto_addresses, ids=itertools.count()) def test_rcpt_valid_address(self, client, address): self._ehlo(client) resp = client.docmd("MAIL FROM: ") assert resp == S.S250_OK resp = client.docmd(f"RCPT TO: {address}") assert resp == S.S250_OK @pytest.mark.parametrize("address", invalid_email_addresses, ids=itertools.count()) def test_rcpt_invalid_address(self, client, address): self._ehlo(client) resp = client.docmd("MAIL FROM: ") assert resp == S.S250_OK resp = client.docmd(f"RCPT TO: {address}") assert resp == S.S553_MALFORMED def test_bpo27931fix_esmtp(self, client): self._ehlo(client) resp = client.docmd('MAIL FROM: <""@example.com> SIZE=28113') assert resp == S.S250_OK resp = client.docmd('RCPT TO: <""@example.org>') assert resp == S.S250_OK def test_rset(self, client): resp = client.rset() assert resp == S.S250_OK def test_rset_with_arg(self, client): resp = client.docmd("RSET FOO") assert resp == S.S501_SYNTAX_RSET def test_vrfy(self, client): resp = client.docmd("VRFY ") assert resp == S.S252_CANNOT_VRFY def test_vrfy_no_arg(self, client): resp = client.docmd("VRFY") assert resp == S.S501_SYNTAX_VRFY def test_vrfy_not_address(self, client): resp = client.docmd("VRFY @@") assert resp == S.S502_VRFY_COULDNT(b"@@") def test_data_no_rcpt(self, client): self._helo(client) resp = client.docmd("DATA") assert resp == S.S503_RCPT_NEEDED def test_data_354(self, plain_controller, client): self._helo(client) resp = client.docmd("MAIL FROM: ") assert resp == S.S250_OK resp = client.docmd("RCPT TO: ") assert resp == S.S250_OK # Note: We NEED to manually stop the controller if we must abort while # in DATA phase. For reasons unclear, if we don't do that we'll hang # the test case should the assertion fail try: resp = client.docmd("DATA") assert resp == S.S354_DATA_ENDWITH finally: plain_controller.stop() def test_data_invalid_params(self, client): self._helo(client) resp = client.docmd("MAIL FROM: ") assert resp == S.S250_OK resp = client.docmd("RCPT TO: ") assert resp == S.S250_OK resp = client.docmd("DATA FOOBAR") assert resp == S.S501_SYNTAX_DATA def test_empty_command(self, client): resp = client.docmd("") assert resp == S.S500_BAD_SYNTAX def test_too_long_command(self, client): resp = client.docmd("a" * 513) assert resp == S.S500_CMD_TOO_LONG def test_way_too_long_command(self, client): # Send a very large string to ensure it is broken # into several packets, which hits the inner # LimitOverrunError code path in _handle_client. client.send("a" * 1_000_000) response = client.docmd("a" * 1001) assert response == S.S500_CMD_TOO_LONG response = client.docmd("NOOP") assert response == S.S250_OK def test_unknown_command(self, client): resp = client.docmd("FOOBAR") assert resp == S.S500_CMD_UNRECOG(b"FOOBAR") class TestSMTPNonDecoding(_CommonMethods): @controller_data(decode_data=False) def test_mail_invalid_body_param(self, plain_controller, client): self._ehlo(client) resp = client.docmd("MAIL FROM: BODY=FOOBAR") assert resp == S.S501_MAIL_BODY @pytest.mark.usefixtures("decoding_authnotls_controller") class TestSMTPAuth(_CommonMethods): def test_no_ehlo(self, client): resp = client.docmd("AUTH") assert resp == S.S503_EHLO_FIRST def test_helo(self, client): self._helo(client) resp = client.docmd("AUTH") assert resp == S.S500_AUTH_UNRECOG def test_not_enough_values(self, client): self._ehlo(client) resp = client.docmd("AUTH") assert resp == S.S501_TOO_FEW def test_already_authenticated(self, caplog, client): PW = "goodpasswd" self._ehlo(client) resp = client.docmd( "AUTH PLAIN " + b64encode(b"\0goodlogin\0" + PW.encode("ascii")).decode() ) assert resp == S.S235_AUTH_SUCCESS resp = client.docmd("AUTH") assert resp == S.S503_ALREADY_AUTH resp = client.docmd("MAIL FROM: ") assert resp == S.S250_OK assert_nopassleak(PW, caplog.record_tuples) def test_auth_individually(self, caplog, client): """AUTH state of different clients must be independent""" PW = "goodpasswd" client1 = client with SMTPClient(*Global.SrvAddr) as client2: for c in client1, client2: c.ehlo("example.com") resp = c.login("goodlogin", PW) assert resp == S.S235_AUTH_SUCCESS assert_nopassleak(PW, caplog.record_tuples) def test_rset_maintain_authenticated(self, caplog, client): """RSET resets only Envelope not Session""" PW = "goodpasswd" self._ehlo(client, "example.com") resp = client.login("goodlogin", PW) assert resp == S.S235_AUTH_SUCCESS resp = client.mail("alice@example.com") assert resp == S.S250_OK resp = client.rset() assert resp == S.S250_OK resp = client.docmd("AUTH PLAIN") assert resp == S.S503_ALREADY_AUTH assert_nopassleak(PW, caplog.record_tuples) @handler_data(class_=PeekerHandler) def test_auth_loginteract_warning(self, client): client.ehlo("example.com") resp = client.docmd("AUTH WITH_UNDERSCORE") assert resp == (334, b"challenge") with warnings.catch_warnings(record=True) as w: warnings.simplefilter(action="default", category=UserWarning) assert client.docmd(B64EQUALS) == S.S235_AUTH_SUCCESS assert len(w) > 0 assert str(w[0].message) == "AUTH interaction logging is enabled!" assert str(w[1].message) == "Sensitive information might be leaked!" # noinspection TimingAttack,HardcodedPassword @pytest.mark.usefixtures("auth_peeker_controller") class TestAuthMechanisms(_CommonMethods): @pytest.fixture def do_auth_plain1( self, client ) -> Callable[[str], Tuple[int, bytes]]: self._ehlo(client) def do(param: str) -> Tuple[int, bytes]: return client.docmd("AUTH PLAIN " + param) do.client = client return do @pytest.fixture def do_auth_login3( self, client ) -> Callable[[str], Tuple[int, bytes]]: self._ehlo(client) resp = client.docmd("AUTH LOGIN") assert resp == S.S334_AUTH_USERNAME def do(param: str) -> Tuple[int, bytes]: return client.docmd(param) do.client = client return do def test_ehlo(self, client): code, mesg = client.ehlo("example.com") assert code == 250 lines = mesg.splitlines() assert lines == [ bytes(socket.getfqdn(), "utf-8"), b"SIZE 33554432", b"SMTPUTF8", ( b"AUTH DENYFALSE DENYMISSING LOGIN NONE NULL PLAIN " b"WITH-DASH WITH-MULTI-DASH WITH_UNDERSCORE" ), b"HELP", ] @pytest.mark.parametrize("mechanism", ["GSSAPI", "DIGEST-MD5", "MD5", "CRAM-MD5"]) def test_not_supported_mechanism(self, client, mechanism): self._ehlo(client) resp = client.docmd("AUTH " + mechanism) assert resp == S.S504_AUTH_UNRECOG def test_custom_mechanism(self, client): self._ehlo(client) resp = client.docmd("AUTH NULL") assert resp == S.S235_AUTH_SUCCESS def test_disabled_mechanism(self, client): self._ehlo(client) resp = client.docmd("AUTH DONT") assert resp == S.S504_AUTH_UNRECOG @pytest.mark.parametrize("init_resp", [True, False]) @pytest.mark.parametrize("mechanism", ["login", "plain"]) def test_byclient( self, caplog, auth_peeker_controller, client, mechanism, init_resp ): self._ehlo(client) PW = "goodpasswd" client.user = "goodlogin" client.password = PW auth_meth = getattr(client, "auth_" + mechanism) if (mechanism, init_resp) == ("login", False) and ( sys.version_info < (3, 8, 9) or (3, 9, 0) < sys.version_info < (3, 9, 4)): # The bug with SMTP.auth_login was fixed in Python 3.10 and backported # to 3.9.4 and and 3.8.9. # See https://github.com/python/cpython/pull/24118 for the fixes.: with pytest.raises(SMTPAuthenticationError): client.auth(mechanism, auth_meth, initial_response_ok=init_resp) client.docmd("*") pytest.xfail(reason="smtplib.SMTP.auth_login is buggy (bpo-27820)") client.auth(mechanism, auth_meth, initial_response_ok=init_resp) peeker = auth_peeker_controller.handler assert isinstance(peeker, PeekerHandler) assert peeker.login == b"goodlogin" assert peeker.password == PW.encode("ascii") assert_nopassleak(PW, caplog.record_tuples) def test_plain1_bad_base64_encoding(self, do_auth_plain1): resp = do_auth_plain1("not-b64") assert resp == S.S501_AUTH_NOTB64 def test_plain1_bad_base64_length(self, do_auth_plain1): resp = do_auth_plain1(b64encode(b"\0onlylogin").decode()) assert resp == S.S501_AUTH_CANTSPLIT def test_plain1_too_many_values(self, do_auth_plain1): resp = do_auth_plain1("NONE NONE") assert resp == S.S501_TOO_MANY def test_plain1_bad_username(self, do_auth_plain1): resp = do_auth_plain1(b64encode(b"\0badlogin\0goodpasswd").decode()) assert resp == S.S535_AUTH_INVALID def test_plain1_bad_password(self, do_auth_plain1): resp = do_auth_plain1(b64encode(b"\0goodlogin\0badpasswd").decode()) assert resp == S.S535_AUTH_INVALID def test_plain1_empty(self, do_auth_plain1): resp = do_auth_plain1(B64EQUALS) assert resp == S.S501_AUTH_CANTSPLIT def test_plain1_good_credentials( self, caplog, auth_peeker_controller, do_auth_plain1 ): PW = "goodpasswd" PWb = PW.encode("ascii") resp = do_auth_plain1(b64encode(b"\0goodlogin\0" + PWb).decode()) assert resp == S.S235_AUTH_SUCCESS peeker = auth_peeker_controller.handler assert isinstance(peeker, PeekerHandler) assert peeker.login == b"goodlogin" assert peeker.password == PWb # noinspection PyUnresolvedReferences resp = do_auth_plain1.client.mail("alice@example.com") assert resp == S.S250_OK assert_nopassleak(PW, caplog.record_tuples) def test_plain1_goodcreds_sanitized_log(self, caplog, client): caplog.set_level("DEBUG") client.ehlo("example.com") PW = "goodpasswd" PWb = PW.encode("ascii") code, response = client.docmd( "AUTH PLAIN " + b64encode(b"\0goodlogin\0" + PWb).decode() ) interestings = [tup for tup in caplog.record_tuples if "AUTH PLAIN" in tup[-1]] assert len(interestings) == 2 assert interestings[0][1] == logging.DEBUG assert interestings[0][2].endswith("b'AUTH PLAIN ********\\r\\n'") assert interestings[1][1] == logging.INFO assert interestings[1][2].endswith("b'AUTH PLAIN ********'") assert_nopassleak(PW, caplog.record_tuples) @pytest.fixture def client_auth_plain2(self, client) -> SMTPClient: self._ehlo(client) resp = client.docmd("AUTH PLAIN") assert resp == S.S334_AUTH_EMPTYPROMPT return client def test_plain2_good_credentials( self, caplog, auth_peeker_controller, client_auth_plain2 ): PW = "goodpasswd" PWb = PW.encode("ascii") resp = client_auth_plain2.docmd(b64encode(b"\0goodlogin\0" + PWb).decode()) assert resp == S.S235_AUTH_SUCCESS peeker = auth_peeker_controller.handler assert isinstance(peeker, PeekerHandler) assert peeker.login == b"goodlogin" assert peeker.password == b"goodpasswd" resp = client_auth_plain2.mail("alice@example.com") assert resp == S.S250_OK assert_nopassleak(PW, caplog.record_tuples) def test_plain2_bad_credentials(self, client_auth_plain2): resp = client_auth_plain2.docmd(b64encode(b"\0badlogin\0badpasswd").decode()) assert resp == S.S535_AUTH_INVALID def test_plain2_no_credentials(self, client_auth_plain2): resp = client_auth_plain2.docmd(B64EQUALS) assert resp == S.S501_AUTH_CANTSPLIT def test_plain2_abort(self, client_auth_plain2): resp = client_auth_plain2.docmd("*") assert resp == S.S501_AUTH_ABORTED def test_plain2_bad_base64_encoding(self, client_auth_plain2): resp = client_auth_plain2.docmd("ab@%") assert resp == S.S501_AUTH_NOTB64 def test_login2_bad_base64(self, auth_peeker_controller, client): self._ehlo(client) resp = client.docmd("AUTH LOGIN ab@%") assert resp == S.S501_AUTH_NOTB64 def test_login2_good_credentials(self, caplog, auth_peeker_controller, client): self._ehlo(client) PW = "goodpasswd" PWb = PW.encode("ascii") line = "AUTH LOGIN " + b64encode(b"goodlogin").decode() resp = client.docmd(line) assert resp == S.S334_AUTH_PASSWORD assert resp == S.S334_AUTH_PASSWORD resp = client.docmd(b64encode(PWb).decode()) assert resp == S.S235_AUTH_SUCCESS peeker = auth_peeker_controller.handler assert isinstance(peeker, PeekerHandler) assert peeker.login == b"goodlogin" assert peeker.password == PWb resp = client.mail("alice@example.com") assert resp == S.S250_OK assert_nopassleak(PW, caplog.record_tuples) def test_login3_good_credentials( self, caplog, auth_peeker_controller, do_auth_login3 ): PW = "goodpasswd" PWb = PW.encode("ascii") resp = do_auth_login3(b64encode(b"goodlogin").decode()) assert resp == S.S334_AUTH_PASSWORD resp = do_auth_login3(b64encode(PWb).decode()) assert resp == S.S235_AUTH_SUCCESS peeker = auth_peeker_controller.handler assert isinstance(peeker, PeekerHandler) assert peeker.login == b"goodlogin" assert peeker.password == PWb # noinspection PyUnresolvedReferences resp = do_auth_login3.client.mail("alice@example.com") assert resp == S.S250_OK assert_nopassleak(PW, caplog.record_tuples) def test_login3_bad_base64(self, do_auth_login3): resp = do_auth_login3("not-b64") assert resp == S.S501_AUTH_NOTB64 def test_login3_bad_username(self, do_auth_login3): resp = do_auth_login3(b64encode(b"badlogin").decode()) assert resp == S.S334_AUTH_PASSWORD resp = do_auth_login3(b64encode(b"goodpasswd").decode()) assert resp == S.S535_AUTH_INVALID def test_login3_bad_password(self, do_auth_login3): resp = do_auth_login3(b64encode(b"goodlogin").decode()) assert resp == S.S334_AUTH_PASSWORD resp = do_auth_login3(b64encode(b"badpasswd").decode()) assert resp == S.S535_AUTH_INVALID def test_login3_empty_credentials(self, do_auth_login3): resp = do_auth_login3(B64EQUALS) assert resp == S.S334_AUTH_PASSWORD resp = do_auth_login3(B64EQUALS) assert resp == S.S535_AUTH_INVALID def test_login3_abort_username(self, do_auth_login3): resp = do_auth_login3("*") assert resp == S.S501_AUTH_ABORTED def test_login3_abort_password(self, do_auth_login3): resp = do_auth_login3(B64EQUALS) assert resp == S.S334_AUTH_PASSWORD resp = do_auth_login3("*") assert resp == S.S501_AUTH_ABORTED def test_DENYFALSE(self, client): self._ehlo(client) resp = client.docmd("AUTH DENYFALSE") assert resp == S.S535_AUTH_INVALID def test_DENYMISSING(self, client): self._ehlo(client) resp = client.docmd("AUTH DENYMISSING") assert resp == S.S535_AUTH_INVALID def test_NONE(self, client): self._ehlo(client) resp = client.docmd("AUTH NONE") assert resp == S.S235_AUTH_SUCCESS # noinspection HardcodedPassword class TestAuthenticator(_CommonMethods): def test_success(self, caplog, authenticator_peeker_controller, client): PW = "goodpasswd" client.user = "gooduser" client.password = PW self._ehlo(client) client.auth("plain", client.auth_plain) auth_peeker = authenticator_peeker_controller.handler assert isinstance(auth_peeker, PeekerHandler) assert auth_peeker.sess.peer[0] in {"::1", "127.0.0.1", "localhost"} assert auth_peeker.sess.peer[1] > 0 assert auth_peeker.sess.authenticated assert auth_peeker.sess.auth_data == (b"gooduser", PW.encode("ascii")) assert auth_peeker.login_data == (b"gooduser", PW.encode("ascii")) assert_nopassleak(PW, caplog.record_tuples) def test_fail_withmesg(self, caplog, authenticator_peeker_controller, client): PW = "anypass" client.user = "failme_with454" client.password = PW self._ehlo(client) with pytest.raises(SMTPAuthenticationError) as cm: client.auth("plain", client.auth_plain) assert cm.value.args == (454, b"4.7.0 Temporary authentication failure") auth_peeker = authenticator_peeker_controller.handler assert isinstance(auth_peeker, PeekerHandler) assert auth_peeker.sess.peer[0] in {"::1", "127.0.0.1", "localhost"} assert auth_peeker.sess.peer[1] > 0 assert auth_peeker.sess.login_data is None assert auth_peeker.login_data == (b"failme_with454", PW.encode("ascii")) assert_nopassleak(PW, caplog.record_tuples) @pytest.mark.filterwarnings("ignore:Requiring AUTH while not requiring TLS:UserWarning") @pytest.mark.usefixtures("plain_controller") @controller_data( decode_data=True, enable_SMTPUTF8=True, auth_require_tls=False, auth_callback=auth_callback, auth_required=True, ) class TestRequiredAuthentication(_CommonMethods): def _login(self, client: SMTPClient): self._ehlo(client) resp = client.login("goodlogin", "goodpasswd") assert resp == S.S235_AUTH_SUCCESS def test_help_unauthenticated(self, client): resp = client.docmd("HELP") assert resp == S.S530_AUTH_REQUIRED def test_help_authenticated(self, client): self._login(client) resp = client.docmd("HELP") assert resp == S.S250_SUPPCMD_NOTLS def test_vrfy_unauthenticated(self, client): resp = client.docmd("VRFY ") assert resp == S.S530_AUTH_REQUIRED def test_mail_unauthenticated(self, client): self._ehlo(client) resp = client.docmd("MAIL FROM: ") assert resp == S.S530_AUTH_REQUIRED def test_rcpt_unauthenticated(self, client): self._ehlo(client) resp = client.docmd("RCPT TO: ") assert resp == S.S530_AUTH_REQUIRED def test_rcpt_nomail_authenticated(self, client): self._login(client) resp = client.docmd("RCPT TO: ") assert resp == S.S503_MAIL_NEEDED def test_data_unauthenticated(self, client): self._ehlo(client) resp = client.docmd("DATA") assert resp == S.S530_AUTH_REQUIRED def test_data_authenticated(self, client): self._ehlo(client, "example.com") client.login("goodlogin", "goodpassword") resp = client.docmd("DATA") assert resp != S.S530_AUTH_REQUIRED def test_vrfy_authenticated(self, client): self._login(client) resp = client.docmd("VRFY ") assert resp == S.S252_CANNOT_VRFY def test_mail_authenticated(self, client): self._login(client) resp = client.docmd("MAIL FROM: ") assert resp, S.S250_OK def test_data_norcpt_authenticated(self, client): self._login(client) resp = client.docmd("DATA") assert resp == S.S503_RCPT_NEEDED class TestResetCommands: """Test that sender and recipients are reset on RSET, HELO, and EHLO. The tests below issue each command twice with different addresses and verify that mail_from and rcpt_tos have been replacecd. """ expected_envelope_data = [ # Pre-RSET/HELO/EHLO envelope data. dict( mail_from="anne@example.com", rcpt_tos=["bart@example.com", "cate@example.com"], ), dict( mail_from="dave@example.com", rcpt_tos=["elle@example.com", "fred@example.com"], ), ] def _send_envelope_data( self, client: SMTPClient, mail_from: str, rcpt_tos: List[str], ): client.mail(mail_from) for rcpt in rcpt_tos: client.rcpt(rcpt) @handler_data(class_=StoreEnvelopeOnVRFYHandler) def test_helo(self, decoding_authnotls_controller, client): handler = decoding_authnotls_controller.handler assert isinstance(handler, StoreEnvelopeOnVRFYHandler) # Each time through the loop, the HELO will reset the envelope. for data in self.expected_envelope_data: client.helo("example.com") # Save the envelope in the handler. client.vrfy("zuzu@example.com") assert handler.envelope.mail_from is None assert len(handler.envelope.rcpt_tos) == 0 self._send_envelope_data(client, **data) client.vrfy("zuzu@example.com") assert handler.envelope.mail_from == data["mail_from"] assert handler.envelope.rcpt_tos == data["rcpt_tos"] @handler_data(class_=StoreEnvelopeOnVRFYHandler) def test_ehlo(self, decoding_authnotls_controller, client): handler = decoding_authnotls_controller.handler assert isinstance(handler, StoreEnvelopeOnVRFYHandler) # Each time through the loop, the EHLO will reset the envelope. for data in self.expected_envelope_data: client.ehlo("example.com") # Save the envelope in the handler. client.vrfy("zuzu@example.com") assert handler.envelope.mail_from is None assert len(handler.envelope.rcpt_tos) == 0 self._send_envelope_data(client, **data) client.vrfy("zuzu@example.com") assert handler.envelope.mail_from == data["mail_from"] assert handler.envelope.rcpt_tos == data["rcpt_tos"] @handler_data(class_=StoreEnvelopeOnVRFYHandler) def test_rset(self, decoding_authnotls_controller, client): handler = decoding_authnotls_controller.handler assert isinstance(handler, StoreEnvelopeOnVRFYHandler) client.helo("example.com") # Each time through the loop, the RSET will reset the envelope. for data in self.expected_envelope_data: self._send_envelope_data(client, **data) # Save the envelope in the handler. client.vrfy("zuzu@example.com") assert handler.envelope.mail_from == data["mail_from"] assert handler.envelope.rcpt_tos == data["rcpt_tos"] # Reset the envelope explicitly. client.rset() client.vrfy("zuzu@example.com") assert handler.envelope.mail_from is None assert len(handler.envelope.rcpt_tos) == 0 class TestSMTPWithController(_CommonMethods): @controller_data(data_size_limit=9999) def test_mail_with_size_too_large(self, plain_controller, client): self._ehlo(client) resp = client.docmd("MAIL FROM: SIZE=10000") assert resp == S.S552_EXCEED_SIZE @handler_data(class_=ReceivingHandler) def test_mail_with_compatible_smtputf8(self, plain_controller, client): receiving_handler = plain_controller.handler assert isinstance(receiving_handler, ReceivingHandler) sender = "anne\xCB@example.com" recipient = "bart\xCB@example.com" self._ehlo(client) client.send(f"MAIL FROM: <{sender}> SMTPUTF8\r\n".encode("utf-8")) assert client.getreply() == S.S250_OK client.send(f"RCPT TO: <{recipient}>\r\n".encode("utf-8")) assert client.getreply() == S.S250_OK resp = client.data("") assert resp == S.S250_OK assert receiving_handler.box[0].mail_from == sender assert receiving_handler.box[0].rcpt_tos == [recipient] def test_mail_with_unrequited_smtputf8(self, plain_controller, client): self._ehlo(client) resp = client.docmd("MAIL FROM: ") assert resp == S.S250_OK def test_mail_with_incompatible_smtputf8(self, plain_controller, client): self._ehlo(client) resp = client.docmd("MAIL FROM: SMTPUTF8=YES") assert resp == S.S501_SMTPUTF8_NOARG def test_mail_invalid_body(self, plain_controller, client): self._ehlo(client) resp = client.docmd("MAIL FROM: BODY 9BIT") assert resp == S.S501_MAIL_BODY @controller_data(data_size_limit=None) def test_esmtp_no_size_limit(self, plain_controller, client): code, mesg = client.ehlo("example.com") for ln in mesg.splitlines(): assert not ln.startswith(b"SIZE") @handler_data(class_=ErroringHandler) def test_process_message_error(self, error_controller, client): self._ehlo(client) with pytest.raises(SMTPDataError) as excinfo: client.sendmail( "anne@example.com", ["bart@example.com"], dedent( """\ From: anne@example.com To: bart@example.com Subjebgct: A test Testing """ ), ) assert excinfo.value.args == (499, b"Could not accept the message") @controller_data(data_size_limit=100) def test_too_long_message_body(self, plain_controller, client): self._helo(client) mail = "\r\n".join(["z" * 20] * 10) with pytest.raises(SMTPResponseException) as excinfo: client.sendmail("anne@example.com", ["bart@example.com"], mail) assert excinfo.value.args == S.S552_DATA_TOO_MUCH @handler_data(class_=ReceivingHandler) def test_dots_escaped(self, decoding_authnotls_controller, client): receiving_handler = decoding_authnotls_controller.handler assert isinstance(receiving_handler, ReceivingHandler) self._helo(client) mail = CRLF.join(["Test", ".", "mail"]) client.sendmail("anne@example.com", ["bart@example.com"], mail) assert len(receiving_handler.box) == 1 assert receiving_handler.box[0].content == mail + CRLF @handler_data(class_=ErroringHandler) def test_unexpected_errors(self, error_controller, client): handler = error_controller.handler resp = client.helo("example.com") assert resp == (500, b"ErroringHandler handling error") exception_type = ErrorSMTP.exception_type assert isinstance(handler.error, exception_type) def test_unexpected_errors_unhandled(self, error_controller, client): resp = client.helo("example.com") exception_type = ErrorSMTP.exception_type exception_nameb = exception_type.__name__.encode("ascii") assert resp == (500, b"Error: (" + exception_nameb + b") test") @handler_data(class_=ErroringHandler) def test_unexpected_errors_custom_response(self, error_controller, client): erroring_handler = error_controller.handler erroring_handler.custom_response = True resp = client.helo("example.com") exception_type = ErrorSMTP.exception_type assert isinstance(erroring_handler.error, exception_type) exception_nameb = exception_type.__name__.encode("ascii") assert resp == (451, b"Temporary error: (" + exception_nameb + b") test") @handler_data(class_=ErroringErrorHandler) def test_exception_handler_exception(self, error_controller, client): handler = error_controller.handler resp = client.helo("example.com") assert resp == (500, b"Error: (ValueError) ErroringErrorHandler test") exception_type = ErrorSMTP.exception_type assert isinstance(handler.error, exception_type) @handler_data(class_=UndescribableErrorHandler) def test_exception_handler_undescribable(self, error_controller, client): handler = error_controller.handler resp = client.helo("example.com") assert resp == (500, b"Error: Cannot describe error") exception_type = ErrorSMTP.exception_type assert isinstance(handler.error, exception_type) @handler_data(class_=ErroringHandlerConnectionLost) def test_exception_handler_multiple_connections_lost( self, error_controller, client ): client1 = client code, mesg = client1.ehlo("example.com") assert code == 250 with SMTPClient(*Global.SrvAddr) as client2: code, mesg = client2.ehlo("example.com") assert code == 250 with pytest.raises(SMTPServerDisconnected) as exc: mail = CRLF.join(["Test", ".", "mail"]) client2.sendmail("anne@example.com", ["bart@example.com"], mail) assert isinstance(exc.value, SMTPServerDisconnected) assert error_controller.handler.error is None # At this point connection should be down with pytest.raises(SMTPServerDisconnected) as exc: client2.mail("alice@example.com") assert str(exc.value) == "please run connect() first" # client1 shouldn't be affected. resp = client1.mail("alice@example.com") assert resp == S.S250_OK @handler_data(class_=ReceivingHandler) def test_bad_encodings(self, decoding_authnotls_controller, client): handler: ReceivingHandler = decoding_authnotls_controller.handler self._helo(client) mail_from = b"anne\xFF@example.com" mail_to = b"bart\xFF@example.com" self._ehlo(client, "test") client.send(b"MAIL FROM:" + mail_from + b"\r\n") assert client.getreply() == S.S250_OK client.send(b"RCPT TO:" + mail_to + b"\r\n") assert client.getreply() == S.S250_OK client.data("Test mail") assert len(handler.box) == 1 envelope = handler.box[0] mail_from2 = envelope.mail_from.encode("utf-8", errors="surrogateescape") assert mail_from2 == mail_from mail_to2 = envelope.rcpt_tos[0].encode("utf-8", errors="surrogateescape") assert mail_to2 == mail_to @controller_data(decode_data=False) def test_data_line_too_long(self, plain_controller, client): self._helo(client) client.helo("example.com") mail = b"\r\n".join([b"a" * 5555] * 3) with pytest.raises(SMTPDataError) as exc: client.sendmail("anne@example.com", ["bart@example.com"], mail) assert exc.value.args == S.S500_DATALINE_TOO_LONG @controller_data(data_size_limit=10000) def test_long_line_double_count(self, plain_controller, client): # With a read limit of 1001 bytes in aiosmtp.SMTP, asyncio.StreamReader # returns too-long lines of length up to 2002 bytes. # This test ensures that bytes in partial lines are only counted once. # If the implementation has a double-counting bug, then a message of # 9998 bytes + CRLF will raise SMTPResponseException. client.helo("example.com") mail = "z" * 9998 with pytest.raises(SMTPDataError) as exc: client.sendmail("anne@example.com", ["bart@example.com"], mail) assert exc.value.args == S.S500_DATALINE_TOO_LONG def test_long_line_leak(self, mocker: MockFixture, plain_controller, client): # Simulates situation where readuntil() does not raise LimitOverrunError, # but somehow the line_fragments when join()ed resulted in a too-long line # Hijack EMPTY_BARR.join() to return a bytes object that's definitely too long mock_ebarr = mocker.patch("aiosmtpd.smtp.EMPTY_BARR") mock_ebarr.join.return_value = b"a" * 1010 client.helo("example.com") mail = "z" * 72 # Make sure this is small and definitely within limits with pytest.raises(SMTPDataError) as exc: client.sendmail("anne@example.com", ["bart@example.com"], mail) assert exc.value.args == S.S500_DATALINE_TOO_LONG # self.assertEqual(cm.exception.smtp_code, 500) # self.assertEqual(cm.exception.smtp_error, # b'Line too long (see RFC5321 4.5.3.1.6)') @controller_data(data_size_limit=20) def test_too_long_body_delay_error(self, plain_controller): with socket.socket() as sock: sock.connect((plain_controller.hostname, plain_controller.port)) rslt = send_recv(sock, b"EHLO example.com") assert rslt.startswith(b"220") rslt = send_recv(sock, b"MAIL FROM: ") assert rslt.startswith(b"250") rslt = send_recv(sock, b"RCPT TO: ") assert rslt.startswith(b"250") rslt = send_recv(sock, b"DATA") assert rslt.startswith(b"354") rslt = send_recv(sock, b"a" * (20 + 3)) # Must NOT receive status code here even if data is too much assert rslt == b"" rslt = send_recv(sock, b"\r\n.") # *NOW* we must receive status code assert rslt == b"552 Error: Too much mail data\r\n" @controller_data(data_size_limit=700) def test_too_long_body_then_too_long_lines(self, plain_controller, client): # If "too much mail" state was reached before "too long line" gets received, # SMTP should respond with '552' instead of '500' client.helo("example.com") mail = "\r\n".join(["z" * 76] * 10 + ["a" * 1100] * 2) with pytest.raises(SMTPResponseException) as exc: client.sendmail("anne@example.com", ["bart@example.com"], mail) assert exc.value.args == S.S552_DATA_TOO_MUCH def test_too_long_line_delay_error(self, plain_controller): with socket.socket() as sock: sock.connect((plain_controller.hostname, plain_controller.port)) rslt = send_recv(sock, b"EHLO example.com") assert rslt.startswith(b"220") rslt = send_recv(sock, b"MAIL FROM: ") assert rslt.startswith(b"250") rslt = send_recv(sock, b"RCPT TO: ") assert rslt.startswith(b"250") rslt = send_recv(sock, b"DATA") assert rslt.startswith(b"354") rslt = send_recv(sock, b"a" * (Server.line_length_limit + 3)) # Must NOT receive status code here even if data is too much assert rslt == b"" rslt = send_recv(sock, b"\r\n.") # *NOW* we must receive status code assert rslt == S.S500_DATALINE_TOO_LONG.to_bytes(crlf=True) @controller_data(data_size_limit=2000) def test_too_long_lines_then_too_long_body(self, plain_controller, client): # If "too long line" state was reached before "too much data" happens, # SMTP should respond with '500' instead of '552' client.helo("example.com") mail = "\r\n".join(["z" * (2000 - 1)] * 2) with pytest.raises(SMTPResponseException) as exc: client.sendmail("anne@example.com", ["bart@example.com"], mail) assert exc.value.args == S.S500_DATALINE_TOO_LONG class TestCustomization(_CommonMethods): @controller_data(class_=CustomHostnameController) def test_custom_hostname(self, plain_controller, client): code, mesg = client.helo("example.com") assert code == 250 assert mesg == CustomHostnameController.custom_name.encode("ascii") def test_default_greeting(self, plain_controller, client): controller = plain_controller code, mesg = client.connect(controller.hostname, controller.port) assert code == 220 # The hostname prefix is unpredictable assert mesg.endswith(bytes(GREETING, "utf-8")) @controller_data(class_=CustomIdentController) def test_custom_greeting(self, plain_controller, client): controller = plain_controller code, mesg = client.connect(controller.hostname, controller.port) assert code == 220 # The hostname prefix is unpredictable. assert mesg.endswith(CustomIdentController.ident) @controller_data(decode_data=False) def test_mail_invalid_body_param(self, plain_controller, client): client.ehlo("example.com") resp = client.docmd("MAIL FROM: BODY=FOOBAR") assert resp == S.S501_MAIL_BODY def test_limitlocalpart(self, plain_controller, client): plain_controller.smtpd.local_part_limit = 64 client.ehlo("example.com") locpart = "a" * 64 resp = client.docmd(f"MAIL FROM: {locpart}@example.com") assert resp == S.S250_OK locpart = "b" * 65 resp = client.docmd(f"RCPT TO: {locpart}@example.com") assert resp == S.S553_MALFORMED class TestClientCrash(_CommonMethods): def test_connection_reset_during_DATA( self, mocker: MockFixture, plain_controller, client ): # Trigger factory() to produce the smtpd server self._helo(client) smtpd: Server = plain_controller.smtpd spy = mocker.spy(smtpd._writer, "close") # Do some stuff client.docmd("MAIL FROM: ") client.docmd("RCPT TO: ") # Entering portion of code where hang is possible (upon assertion fail), so # we must wrap with "try..finally". See pytest-dev/pytest#7989 try: resp = client.docmd("DATA") assert resp == S.S354_DATA_ENDWITH # Start sending the DATA but reset the connection before that # completes, i.e. before the .\r\n client.send(b"From: ") reset_connection(client) with pytest.raises(SMTPServerDisconnected): client.noop() catchup_delay() # Apparently within that delay, ._writer.close() invoked several times # That is okay; we just want to ensure that it's invoked at least once. assert spy.call_count > 0 finally: plain_controller.stop() def test_connection_reset_during_command( self, mocker: MockFixture, plain_controller, client ): # Trigger factory() to produce the smtpd server self._helo(client) smtpd: Server = plain_controller.smtpd spy = mocker.spy(smtpd._writer, "close") # Start sending a command but reset the connection before that # completes, i.e. before the \r\n client.send("MAIL FROM: 0 def test_connection_reset_in_long_command(self, plain_controller, client): client.send("F" + 5555 * "O") # without CRLF reset_connection(client) catchup_delay() # At this point, smtpd's StreamWriter hasn't been initialized. Prolly since # the call is self._reader.readline() and we abort before CRLF is sent. # That is why we don't need to 'spy' on writer.close() writer = plain_controller.smtpd._writer # transport.is_closing() == True if transport is in the process of closing, # and still == True if transport is closed. assert writer.transport.is_closing() def test_close_in_command(self, plain_controller, client): # Don't include the CRLF. client.send("FOO") client.close() catchup_delay() # At this point, smtpd's StreamWriter hasn't been initialized. Prolly since # the call is self._reader.readline() and we abort before CRLF is sent. # That is why we don't need to 'spy' on writer.close() writer = plain_controller.smtpd._writer # transport.is_closing() == True if transport is in the process of closing, # and still == True if transport is closed. assert writer.transport.is_closing() def test_close_in_command_2(self, mocker: MockFixture, plain_controller, client): self._helo(client) catchup_delay() smtpd: Server = plain_controller.smtpd writer = smtpd._writer spy = mocker.spy(writer, "close") # Don't include the CRLF. client.send("FOO") client.close() catchup_delay() # Check that smtpd._writer.close() invoked at least once assert spy.call_count > 0 # transport.is_closing() == True if transport is in the process of closing, # and still == True if transport is closed. assert writer.transport.is_closing() def test_close_in_long_command(self, plain_controller, client): client.send("F" + 5555 * "O") # without CRLF client.close() catchup_delay() # At this point, smtpd's StreamWriter hasn't been initialized. Prolly since # the call is self._reader.readline() and we abort before CRLF is sent. # That is why we don't need to 'spy' on writer.close() writer = plain_controller.smtpd._writer # transport.is_closing() == True if transport is in the process of closing, # and still == True if transport is closed. assert writer.transport.is_closing() def test_close_in_data(self, mocker: MockFixture, plain_controller, client): self._helo(client) smtpd: Server = plain_controller.smtpd writer = smtpd._writer spy = mocker.spy(writer, "close") resp = client.docmd("MAIL FROM: ") assert resp == S.S250_OK resp = client.docmd("RCPT TO: ") assert resp == S.S250_OK # Entering portion of code where hang is possible (upon assertion fail), so # we must wrap with "try..finally". See pytest-dev/pytest#7989 try: resp = client.docmd("DATA") assert resp == S.S354_DATA_ENDWITH # Don't include the CRLF. client.send("FOO") client.close() catchup_delay() # Check that smtpd._writer.close() invoked at least once assert spy.call_count > 0 # transport.is_closing() == True if transport is in the process of closing, # and still == True if transport is closed. assert writer.transport.is_closing() finally: plain_controller.stop() def test_sockclose_after_helo(self, mocker: MockFixture, plain_controller, client): client.send("HELO example.com\r\n") catchup_delay() smtpd: Server = plain_controller.smtpd writer = smtpd._writer spy = mocker.spy(writer, "close") client.sock.shutdown(socket.SHUT_WR) catchup_delay() # Check that smtpd._writer.close() invoked at least once assert spy.call_count > 0 # transport.is_closing() == True if transport is in the process of closing, # and still == True if transport is closed. assert writer.transport.is_closing() @pytest.mark.usefixtures("plain_controller") @controller_data(enable_SMTPUTF8=False, decode_data=True) class TestStrictASCII(_CommonMethods): def test_ehlo(self, client): blines = self._ehlo(client) assert b"SMTPUTF8" not in blines def test_bad_encoded_param(self, client): self._ehlo(client) client.send(b"MAIL FROM: \r\n") assert client.getreply() == S.S500_STRICT_ASCII def test_mail_param(self, client): self._ehlo(client) resp = client.docmd("MAIL FROM: SMTPUTF8") assert resp == S.S501_SMTPUTF8_DISABLED def test_data(self, client): self._ehlo(client) with pytest.raises(SMTPDataError) as excinfo: client.sendmail( "anne@example.com", ["bart@example.com"], b"From: anne@example.com\n" b"To: bart@example.com\n" b"Subject: A test\n" b"\n" b"Testing\xFF\n", ) assert excinfo.value.args == S.S500_STRICT_ASCII class TestSleepingHandler(_CommonMethods): # What is the point here? @controller_data(decode_data=False) @handler_data(class_=SleepingHeloHandler) def test_close_after_helo(self, plain_controller, client): # # What are we actually testing? # client.send("HELO example.com\r\n") client.sock.shutdown(socket.SHUT_WR) with pytest.raises(SMTPServerDisconnected): client.getreply() class TestTimeout(_CommonMethods): @controller_data(class_=TimeoutController) def test_timeout(self, plain_controller, client): # This one is rapid, it must succeed self._ehlo(client) time.sleep(0.1 + TimeoutController.Delay) with pytest.raises(SMTPServerDisconnected): client.mail("anne@example.com") class TestAuthArgs: def test_warn_authreqnotls(self, caplog): with pytest.warns(UserWarning) as record: _ = Server(Sink(), auth_required=True, auth_require_tls=False) for warning in record: if warning.message.args and ( warning.message.args[0] == "Requiring AUTH while not requiring TLS can lead to " "security vulnerabilities!" ): break else: pytest.xfail("Did not raise expected warning") assert caplog.record_tuples[0] == ( "mail.log", logging.WARNING, "auth_required == True but auth_require_tls == False", ) def test_log_authmechanisms(self, caplog): caplog.set_level(logging.INFO) server = Server(Sink()) auth_mechs = sorted( m.replace("auth_", "") + "(builtin)" for m in dir(server) if m.startswith("auth_") ) assert ( caplog.record_tuples[0][-1] == f"Available AUTH mechanisms: {' '.join(auth_mechs)}" ) @pytest.mark.parametrize( "name", [ "has space", "has.dot", "has/slash", "has\\backslash", ], ) def test_authmechname_decorator_badname(self, name): expectre = r"Invalid AUTH mechanism name" with pytest.raises(ValueError, match=expectre): auth_mechanism(name) class TestLimits(_CommonMethods): def _consume_budget( self, client: SMTPClient, nums: int, cmd: str, *args, ok_expected=None ): code, _ = client.ehlo("example.com") assert code == 250 func = getattr(client, cmd) expected = ok_expected or S.S250_OK for _ in range(0, nums): assert func(*args) == expected assert func(*args) == S.S421_TOO_MANY(cmd.upper().encode()) with pytest.raises(SMTPServerDisconnected): client.noop() def test_limit_wrong_type(self): with pytest.raises(TypeError) as exc: # noinspection PyTypeChecker _ = Server(Sink(), command_call_limit="invalid") assert exc.value.args[0] == "command_call_limit must be int or Dict[str, int]" def test_limit_wrong_value_type(self): with pytest.raises(TypeError) as exc: # noinspection PyTypeChecker _ = Server(Sink(), command_call_limit={"NOOP": "invalid"}) assert exc.value.args[0] == "All command_call_limit values must be int" @controller_data(command_call_limit=15) def test_all_limit_15(self, plain_controller, client): self._consume_budget(client, 15, "noop") @controller_data(command_call_limit={"NOOP": 15, "EXPN": 5}) def test_different_limits(self, plain_controller, client): srv_ip_port = plain_controller.hostname, plain_controller.port self._consume_budget(client, 15, "noop") client.connect(*srv_ip_port) self._consume_budget( client, 5, "expn", "alice@example.com", ok_expected=S.S502_EXPN_NOTIMPL ) client.connect(*srv_ip_port) self._consume_budget( client, CALL_LIMIT_DEFAULT, "vrfy", "alice@example.com", ok_expected=S.S252_CANNOT_VRFY, ) @controller_data(command_call_limit={"NOOP": 7, "EXPN": 5, "*": 25}) def test_different_limits_custom_default(self, plain_controller, client): # Important: make sure default_max > CALL_LIMIT_DEFAULT # Others can be set small to cut down on testing time, but must be different assert plain_controller.smtpd._call_limit_default > CALL_LIMIT_DEFAULT srv_ip_port = plain_controller.hostname, plain_controller.port self._consume_budget(client, 7, "noop") client.connect(*srv_ip_port) self._consume_budget( client, 5, "expn", "alice@example.com", ok_expected=S.S502_EXPN_NOTIMPL ) client.connect(*srv_ip_port) self._consume_budget( client, 25, "vrfy", "alice@example.com", ok_expected=S.S252_CANNOT_VRFY, ) @controller_data(command_call_limit=7) def test_limit_bogus(self, plain_controller, client): assert plain_controller.smtpd._call_limit_default > BOGUS_LIMIT code, mesg = client.ehlo("example.com") assert code == 250 for i in range(0, BOGUS_LIMIT - 1): cmd = f"BOGUS{i}" assert client.docmd(cmd) == S.S500_CMD_UNRECOG(cmd.encode()) assert client.docmd("LASTBOGUS") == S.S502_TOO_MANY_UNRECOG with pytest.raises(SMTPServerDisconnected): client.noop() class TestSanitize: def test_loginpassword(self): lp = LoginPassword(b"user", b"pass") expect = "LoginPassword(login='user', password=...)" assert repr(lp) == expect assert str(lp) == expect def test_authresult(self): ar = AuthResult(success=True, auth_data="user:pass") expect = "AuthResult(success=True, handled=True, message=None, auth_data=...)" assert repr(ar) == expect assert str(ar) == expect aio-libs-aiosmtpd-b634d9b/aiosmtpd/tests/test_smtps.py000066400000000000000000000030271462210711200232050ustar00rootroot00000000000000# Copyright 2014-2021 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 """Test SMTP over SSL/TLS.""" from email.mime.text import MIMEText from smtplib import SMTP, SMTP_SSL from typing import Generator, Union import pytest from aiosmtpd.controller import Controller from aiosmtpd.testing.helpers import ReceivingHandler from aiosmtpd.testing.statuscodes import SMTP_STATUS_CODES as S from .conftest import Global @pytest.fixture def ssl_controller( get_controller, ssl_context_server ) -> Generator[Controller, None, None]: handler = ReceivingHandler() controller = get_controller(handler, ssl_context=ssl_context_server) controller.start() Global.set_addr_from(controller) # yield controller # controller.stop() @pytest.fixture def smtps_client(ssl_context_client) -> Generator[Union[SMTP_SSL, SMTP], None, None]: context = ssl_context_client with SMTP_SSL(*Global.SrvAddr, context=context) as client: yield client class TestSMTPS: def test_smtps(self, ssl_controller, smtps_client): sender = "sender@example.com" recipients = ["rcpt1@example.com"] resp = smtps_client.helo("example.com") assert resp == S.S250_FQDN results = smtps_client.send_message(MIMEText("hi"), sender, recipients) assert results == {} handler: ReceivingHandler = ssl_controller.handler assert len(handler.box) == 1 envelope = handler.box[0] assert envelope.mail_from == sender assert envelope.rcpt_tos == recipients aio-libs-aiosmtpd-b634d9b/aiosmtpd/tests/test_smtpsmuggling.py000066400000000000000000000051421462210711200247370ustar00rootroot00000000000000# Copyright 2024 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 """Test SMTP smuggling.""" import smtplib import re from aiosmtpd.testing.helpers import ReceivingHandler from aiosmtpd.testing.statuscodes import SMTP_STATUS_CODES as S from .conftest import handler_data def new_data(self, msg): self.putcmd("data") (code, repl) = self.getreply() if self.debuglevel > 0: self._print_debug('data:', (code, repl)) if code != 354: raise smtplib.SMTPDataError(code, repl) else: q = msg self.send(q) (code, msg) = self.getreply() if self.debuglevel > 0: self._print_debug('data:', (code, msg)) return (code, msg) def orig_data(self, msg): self.putcmd("data") (code, repl) = self.getreply() if self.debuglevel > 0: self._print_debug('data:', (code, repl)) if code != 354: raise smtplib.SMTPDataError(code, repl) else: if isinstance(msg, str): msg = _fix_eols(msg).encode('ascii') q = _quote_periods(msg) if q[-2:] != smtplib.bCRLF: q = q + smtplib.bCRLF q = q + b"." + smtplib.bCRLF self.send(q) (code, msg) = self.getreply() if self.debuglevel > 0: self._print_debug('data:', (code, msg)) return (code, msg) def _fix_eols(data): return re.sub(r'(?:\r\n|\n|\r(?!\n))', smtplib.CRLF, data) def _quote_periods(bindata): return re.sub(br'(?m)^\.', b'..', bindata) def return_unchanged(data): return data class TestSmuggling: @handler_data(class_=ReceivingHandler) def test_smtp_smuggling(self, plain_controller, client): smtplib._fix_eols = return_unchanged smtplib._quote_periods = return_unchanged smtplib.SMTP.data = new_data handler = plain_controller.handler sender = "sender@example.com" recipients = ["rcpt1@example.com"] resp = client.helo("example.com") assert resp == S.S250_FQDN # Trying SMTP smuggling with a fake \n.\r\n end-of-data sequence. message_data = b"""\ From: Anne Person \r\n\ To: Bart Person \r\n\ Subject: A test\r\n\ Message-ID: \r\n\ \r\n\ Testing\ \n.\r\n\ NO SMUGGLING \r\n.\r\n\ """ client.sendmail(sender, recipients, message_data) client.quit() smtplib._fix_eols = _fix_eols smtplib._quote_periods = _quote_periods smtplib.SMTP.data = orig_data assert b"NO SMUGGLING" in handler.box[0].content aio-libs-aiosmtpd-b634d9b/aiosmtpd/tests/test_starttls.py000066400000000000000000000310051462210711200237140ustar00rootroot00000000000000# Copyright 2014-2021 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 import ssl from contextlib import suppress from email.mime.text import MIMEText from smtplib import SMTPServerDisconnected from typing import Generator import pytest from aiosmtpd.controller import Controller from aiosmtpd.handlers import Sink from aiosmtpd.smtp import SMTP as Server from aiosmtpd.smtp import Envelope from aiosmtpd.smtp import Session as Sess_ from aiosmtpd.smtp import TLSSetupException from aiosmtpd.testing.helpers import ReceivingHandler, catchup_delay from aiosmtpd.testing.statuscodes import SMTP_STATUS_CODES as S from .conftest import Global, handler_data # region #### Harness Classes & Functions ############################################# class EOFingHandler: """ Handler to specifically test SMTP.eof_received() method. To trigger, invoke the SMTP NOOP command *twice* """ ssl_existed = None result = None async def handle_NOOP( self, server: Server, session: Sess_, envelope: Envelope, arg: str ) -> str: self.ssl_existed = session.ssl is not None self.result = server.eof_received() return "250 OK" class HandshakeFailingHandler: def handle_STARTTLS( self, server: Server, session: Sess_, envelope: Envelope ) -> bool: return False # endregion # region #### Fixtures ############################################################### @pytest.fixture def tls_controller( get_handler, get_controller, ssl_context_server ) -> Generator[Controller, None, None]: handler = get_handler() # controller = TLSController(handler) controller = get_controller( handler, decode_data=True, require_starttls=False, tls_context=ssl_context_server, ) controller.start() Global.set_addr_from(controller) # yield controller # # Some test cases need to .stop() the controller inside themselves # in such cases, we must suppress Controller's raise of AssertionError # because Controller doesn't like .stop() to be invoked more than once with suppress(AssertionError): controller.stop() @pytest.fixture def tls_req_controller( get_handler, get_controller, ssl_context_server ) -> Generator[Controller, None, None]: handler = get_handler() controller = get_controller( handler, decode_data=True, require_starttls=True, tls_context=ssl_context_server, ) controller.start() Global.set_addr_from(controller) # yield controller # controller.stop() @pytest.fixture def auth_req_tls_controller( get_handler, get_controller, ssl_context_server ) -> Generator[Controller, None, None]: handler = get_handler() controller = get_controller( handler, decode_data=True, auth_require_tls=True, tls_context=ssl_context_server, ) controller.start() Global.set_addr_from(controller) # yield controller # controller.stop() # endregion class TestNoTLS: def test_disabled_tls(self, plain_controller, client): code, _ = client.ehlo("example.com") assert code == 250 resp = client.docmd("STARTTLS") assert resp == S.S454_TLS_NA @pytest.mark.usefixtures("tls_controller") class TestStartTLS: def test_help_starttls(self, tls_controller, client): resp = client.docmd("HELP STARTTLS") assert resp == S.S250_SYNTAX_STARTTLS def test_starttls_arg(self, tls_controller, client): resp = client.docmd("STARTTLS arg") assert resp == S.S501_SYNTAX_STARTTLS @handler_data(class_=ReceivingHandler) def test_starttls(self, tls_controller, client): sender = "sender@example.com" recipients = ["rcpt1@example.com"] code, _ = client.ehlo("example.com") assert code == 250 assert "starttls" in client.esmtp_features resp = client.starttls() assert resp == S.S220_READY_TLS client.send_message(MIMEText("hi"), sender, recipients) handler: ReceivingHandler = tls_controller.handler assert len(handler.box) == 1 assert handler.box[0].mail_from == sender assert handler.box[0].rcpt_tos == recipients def test_starttls_quit(self, tls_controller, client): code, _ = client.ehlo("example.com") assert code == 250 resp = client.starttls() assert resp == S.S220_READY_TLS resp = client.quit() assert resp == S.S221_BYE client.close() @handler_data(class_=HandshakeFailingHandler) def test_failed_handshake(self, client): code, _ = client.ehlo("example.com") assert code == 250 resp = client.starttls() assert resp == S.S220_READY_TLS resp = client.mail("sender@example.com") assert resp == S.S554_LACK_SECURITY resp = client.rcpt("rcpt@example.com") assert resp == S.S554_LACK_SECURITY def test_tls_handshake_stopcontroller(self, tls_controller, client): client.ehlo("example.com") code, response = client.docmd("STARTTLS") tls_controller.stop() with pytest.raises(SMTPServerDisconnected): client.quit() def test_tls_bad_syntax(self, client): code, _ = client.ehlo("example.com") assert code == 250 resp = client.docmd("STARTTLS", "TRUE") assert resp == S.S501_SYNTAX_STARTTLS def test_help_after_starttls(self, client): resp = client.docmd("HELP") assert resp == S.S250_SUPPCMD_TLS def test_helo_starttls(self, tls_controller, client): resp = client.helo("example.com") assert resp == S.S250_FQDN # Entering portion of code where hang is possible (upon assertion fail), so # we must wrap with "try..finally". try: resp = client.docmd("STARTTLS") assert resp == S.S220_READY_TLS finally: tls_controller.stop() class ExceptionCaptureHandler: error = None async def handle_exception(self, error: Exception) -> str: self.error = error return "500 ExceptionCaptureHandler handling error" class TestTLSEnding: @handler_data(class_=EOFingHandler) def test_eof_received(self, tls_controller, client): # I don't like this. It's too intimately involved with the innards of the SMTP # class. But for the life of me, I can't figure out why coverage there fail # intermittently. # # I suspect it's a race condition, but with what, and how to prevent that from # happening, that's ... a mystery. # Entering portion of code where hang is possible (upon assertion fail), so # we must wrap with "try..finally". try: code, mesg = client.ehlo("example.com") assert code == 250 resp = client.starttls() assert resp == S.S220_READY_TLS # Need this to make SMTP update its internal session variable code, mesg = client.ehlo("example.com") assert code == 250 sess: Sess_ = tls_controller.smtpd.session assert sess.ssl is not None client.noop() catchup_delay() handler: EOFingHandler = tls_controller.handler assert handler.ssl_existed is True assert handler.result is False finally: tls_controller.stop() @handler_data(class_=ExceptionCaptureHandler) def test_tls_handshake_failing(self, tls_controller, client): handler = tls_controller.handler assert isinstance(handler, ExceptionCaptureHandler) try: client.ehlo("example.com") code, response = client.docmd("STARTTLS") with pytest.raises(SMTPServerDisconnected): client.docmd("SOMEFAILINGHANDSHAKE") catchup_delay() assert isinstance(handler.error, TLSSetupException) finally: tls_controller.stop() @pytest.mark.usefixtures("tls_controller") class TestTLSForgetsSessionData: def test_forget_ehlo(self, client): resp = client.starttls() assert resp == S.S220_READY_TLS resp = client.mail("sender@example.com") assert resp == S.S503_HELO_FIRST @pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") def test_forget_mail(self, client): code, _ = client.ehlo("example.com") assert code == 250 resp = client.mail("sender@example.com") assert resp == S.S250_OK resp = client.starttls() assert resp == S.S220_READY_TLS code, _ = client.ehlo("example.com") assert code == 250 resp = client.rcpt("rcpt@example.com") assert resp == S.S503_MAIL_NEEDED def test_forget_rcpt(self, client): code, _ = client.ehlo("example.com") assert code == 250 resp = client.mail("sender@example.com") assert resp == S.S250_OK resp = client.rcpt("rcpt@example.com") assert resp == S.S250_OK resp = client.starttls() assert resp == S.S220_READY_TLS code, _ = client.ehlo("example.com") assert code == 250 resp = client.mail("sender@example.com") assert resp == S.S250_OK resp = client.docmd("DATA") assert resp == S.S503_RCPT_NEEDED @pytest.mark.usefixtures("tls_req_controller") class TestRequireTLS: def test_helo_fails(self, client): resp = client.helo("example.com") assert resp == S.S530_STARTTLS_FIRST def test_help_fails(self, client): resp = client.docmd("HELP", "HELO") assert resp == S.S530_STARTTLS_FIRST def test_ehlo(self, client): code, _ = client.ehlo("example.com") assert code == 250 assert "starttls" in client.esmtp_features def test_mail_fails(self, client): code, _ = client.ehlo("example.com") assert code == 250 resp = client.mail("sender@example.com") assert resp == S.S530_STARTTLS_FIRST def test_rcpt_fails(self, client): code, _ = client.ehlo("example.com") assert code == 250 resp = client.rcpt("recipient@example.com") assert resp == S.S530_STARTTLS_FIRST def test_vrfy_fails(self, client): code, _ = client.ehlo("example.com") assert code == 250 resp = client.vrfy("sender@exapmle.com") assert resp == S.S530_STARTTLS_FIRST def test_data_fails(self, client): code, _ = client.ehlo("example.com") assert code == 250 resp = client.docmd("DATA") assert resp == S.S530_STARTTLS_FIRST def test_noop_okay(self, client): client.ehlo("example.com") assert client.docmd("NOOP") == S.S250_OK def test_quit_okay(self, client): client.ehlo("example.com") assert client.docmd("QUIT") == S.S221_BYE @pytest.mark.usefixtures("auth_req_tls_controller") class TestRequireTLSAUTH: def test_auth_notls(self, client): code, _ = client.ehlo("example.com") assert code == 250 resp = client.docmd("AUTH ") assert resp == S.S538_AUTH_ENCRYPTREQ def test_auth_tls(self, client): resp = client.starttls() assert resp == S.S220_READY_TLS code, _ = client.ehlo("example.com") assert code == 250 resp = client.docmd("AUTH PLAIN AHRlc3QAdGVzdA==") assert resp == S.S535_AUTH_INVALID class TestTLSContext: def test_verify_mode_nochange(self, ssl_context_server): context = ssl_context_server for mode in (ssl.CERT_NONE, ssl.CERT_OPTIONAL): # noqa: DUO122 context.verify_mode = mode _ = Server(Sink(), tls_context=context) assert context.verify_mode == mode def test_certreq_warn(self, caplog, ssl_context_server): context = ssl_context_server context.verify_mode = ssl.CERT_REQUIRED _ = Server(Sink(), tls_context=context) assert context.verify_mode == ssl.CERT_REQUIRED logmsg = caplog.record_tuples[0][-1] assert "tls_context.verify_mode not in" in logmsg assert "might cause client connection problems" in logmsg def test_nocertreq_chkhost_warn(self, caplog, ssl_context_server): context = ssl_context_server context.verify_mode = ssl.CERT_OPTIONAL # noqa: DUO122 context.check_hostname = True _ = Server(Sink(), tls_context=context) assert context.verify_mode == ssl.CERT_OPTIONAL # noqa: DUO122 logmsg = caplog.record_tuples[0][-1] assert "tls_context.check_hostname == True" in logmsg assert "might cause client connection problems" in logmsg aio-libs-aiosmtpd-b634d9b/bandit.yml000066400000000000000000000202121462210711200174230ustar00rootroot00000000000000 ### Bandit config file generated from: # 'C:\Repos\Venvs\aiosmtpd-13-cp38\Scripts\bandit-config-generator -o bandit.rc -s B101' ### This config may optionally select a subset of tests to run or skip by ### filling out the 'tests' and 'skips' lists given below. If no tests are ### specified for inclusion then it is assumed all tests are desired. The skips ### set will remove specific tests from the include set. This can be controlled ### using the -t/-s CLI options. Note that the same test ID should not appear ### in both 'tests' and 'skips', this would be nonsensical and is detected by ### Bandit at runtime. # Available tests: # B101 : assert_used # B102 : exec_used # B103 : set_bad_file_permissions # B104 : hardcoded_bind_all_interfaces # B105 : hardcoded_password_string # B106 : hardcoded_password_funcarg # B107 : hardcoded_password_default # B108 : hardcoded_tmp_directory # B110 : try_except_pass # B112 : try_except_continue # B201 : flask_debug_true # B301 : pickle # B302 : marshal # B303 : md5 # B304 : ciphers # B305 : cipher_modes # B306 : mktemp_q # B307 : eval # B308 : mark_safe # B309 : httpsconnection # B310 : urllib_urlopen # B311 : random # B312 : telnetlib # B313 : xml_bad_cElementTree # B314 : xml_bad_ElementTree # B315 : xml_bad_expatreader # B316 : xml_bad_expatbuilder # B317 : xml_bad_sax # B318 : xml_bad_minidom # B319 : xml_bad_pulldom # B320 : xml_bad_etree # B321 : ftplib # B323 : unverified_context # B324 : hashlib_new_insecure_functions # B325 : tempnam # B401 : import_telnetlib # B402 : import_ftplib # B403 : import_pickle # B404 : import_subprocess # B405 : import_xml_etree # B406 : import_xml_sax # B407 : import_xml_expat # B408 : import_xml_minidom # B409 : import_xml_pulldom # B410 : import_lxml # B411 : import_xmlrpclib # B412 : import_httpoxy # B413 : import_pycrypto # B501 : request_with_no_cert_validation # B502 : ssl_with_bad_version # B503 : ssl_with_bad_defaults # B504 : ssl_with_no_version # B505 : weak_cryptographic_key # B506 : yaml_load # B507 : ssh_no_host_key_verification # B601 : paramiko_calls # B602 : subprocess_popen_with_shell_equals_true # B603 : subprocess_without_shell_equals_true # B604 : any_other_function_with_shell_equals_true # B605 : start_process_with_a_shell # B606 : start_process_with_no_shell # B607 : start_process_with_partial_path # B608 : hardcoded_sql_expressions # B609 : linux_commands_wildcard_injection # B610 : django_extra_used # B611 : django_rawsql_used # B701 : jinja2_autoescape_false # B702 : use_of_mako_templates # B703 : django_mark_safe # (optional) list included test IDs here, eg '[B101, B406]': tests: # (optional) list skipped test IDs here, eg '[B101, B406]': skips: ['B101', 'B404'] ### (optional) plugin settings - some test plugins require configuration data ### that may be given here, per-plugin. All bandit test plugins have a built in ### set of sensible defaults and these will be used if no configuration is ### provided. It is not necessary to provide settings for every (or any) plugin ### if the defaults are acceptable. any_other_function_with_shell_equals_true: no_shell: - os.execl - os.execle - os.execlp - os.execlpe - os.execv - os.execve - os.execvp - os.execvpe - os.spawnl - os.spawnle - os.spawnlp - os.spawnlpe - os.spawnv - os.spawnve - os.spawnvp - os.spawnvpe - os.startfile shell: - os.system - os.popen - os.popen2 - os.popen3 - os.popen4 - popen2.popen2 - popen2.popen3 - popen2.popen4 - popen2.Popen3 - popen2.Popen4 - commands.getoutput - commands.getstatusoutput subprocess: - subprocess.Popen - subprocess.call - subprocess.check_call - subprocess.check_output - subprocess.run assert_used: skips: [] hardcoded_tmp_directory: tmp_dirs: - /tmp - /var/tmp - /dev/shm linux_commands_wildcard_injection: no_shell: - os.execl - os.execle - os.execlp - os.execlpe - os.execv - os.execve - os.execvp - os.execvpe - os.spawnl - os.spawnle - os.spawnlp - os.spawnlpe - os.spawnv - os.spawnve - os.spawnvp - os.spawnvpe - os.startfile shell: - os.system - os.popen - os.popen2 - os.popen3 - os.popen4 - popen2.popen2 - popen2.popen3 - popen2.popen4 - popen2.Popen3 - popen2.Popen4 - commands.getoutput - commands.getstatusoutput subprocess: - subprocess.Popen - subprocess.call - subprocess.check_call - subprocess.check_output - subprocess.run ssl_with_bad_defaults: bad_protocol_versions: - PROTOCOL_SSLv2 - SSLv2_METHOD - SSLv23_METHOD - PROTOCOL_SSLv3 - PROTOCOL_TLSv1 - SSLv3_METHOD - TLSv1_METHOD ssl_with_bad_version: bad_protocol_versions: - PROTOCOL_SSLv2 - SSLv2_METHOD - SSLv23_METHOD - PROTOCOL_SSLv3 - PROTOCOL_TLSv1 - SSLv3_METHOD - TLSv1_METHOD start_process_with_a_shell: no_shell: - os.execl - os.execle - os.execlp - os.execlpe - os.execv - os.execve - os.execvp - os.execvpe - os.spawnl - os.spawnle - os.spawnlp - os.spawnlpe - os.spawnv - os.spawnve - os.spawnvp - os.spawnvpe - os.startfile shell: - os.system - os.popen - os.popen2 - os.popen3 - os.popen4 - popen2.popen2 - popen2.popen3 - popen2.popen4 - popen2.Popen3 - popen2.Popen4 - commands.getoutput - commands.getstatusoutput subprocess: - subprocess.Popen - subprocess.call - subprocess.check_call - subprocess.check_output - subprocess.run start_process_with_no_shell: no_shell: - os.execl - os.execle - os.execlp - os.execlpe - os.execv - os.execve - os.execvp - os.execvpe - os.spawnl - os.spawnle - os.spawnlp - os.spawnlpe - os.spawnv - os.spawnve - os.spawnvp - os.spawnvpe - os.startfile shell: - os.system - os.popen - os.popen2 - os.popen3 - os.popen4 - popen2.popen2 - popen2.popen3 - popen2.popen4 - popen2.Popen3 - popen2.Popen4 - commands.getoutput - commands.getstatusoutput subprocess: - subprocess.Popen - subprocess.call - subprocess.check_call - subprocess.check_output - subprocess.run start_process_with_partial_path: no_shell: - os.execl - os.execle - os.execlp - os.execlpe - os.execv - os.execve - os.execvp - os.execvpe - os.spawnl - os.spawnle - os.spawnlp - os.spawnlpe - os.spawnv - os.spawnve - os.spawnvp - os.spawnvpe - os.startfile shell: - os.system - os.popen - os.popen2 - os.popen3 - os.popen4 - popen2.popen2 - popen2.popen3 - popen2.popen4 - popen2.Popen3 - popen2.Popen4 - commands.getoutput - commands.getstatusoutput subprocess: - subprocess.Popen - subprocess.call - subprocess.check_call - subprocess.check_output - subprocess.run subprocess_popen_with_shell_equals_true: no_shell: - os.execl - os.execle - os.execlp - os.execlpe - os.execv - os.execve - os.execvp - os.execvpe - os.spawnl - os.spawnle - os.spawnlp - os.spawnlpe - os.spawnv - os.spawnve - os.spawnvp - os.spawnvpe - os.startfile shell: - os.system - os.popen - os.popen2 - os.popen3 - os.popen4 - popen2.popen2 - popen2.popen3 - popen2.popen4 - popen2.Popen3 - popen2.Popen4 - commands.getoutput - commands.getstatusoutput subprocess: - subprocess.Popen - subprocess.call - subprocess.check_call - subprocess.check_output - subprocess.run subprocess_without_shell_equals_true: no_shell: - os.execl - os.execle - os.execlp - os.execlpe - os.execv - os.execve - os.execvp - os.execvpe - os.spawnl - os.spawnle - os.spawnlp - os.spawnlpe - os.spawnv - os.spawnve - os.spawnvp - os.spawnvpe - os.startfile shell: - os.system - os.popen - os.popen2 - os.popen3 - os.popen4 - popen2.popen2 - popen2.popen3 - popen2.popen4 - popen2.Popen3 - popen2.Popen4 - commands.getoutput - commands.getstatusoutput subprocess: - subprocess.Popen - subprocess.call - subprocess.check_call - subprocess.check_output - subprocess.run try_except_continue: check_typed_exception: false try_except_pass: check_typed_exception: false weak_cryptographic_key: weak_key_size_dsa_high: 1024 weak_key_size_dsa_medium: 2048 weak_key_size_ec_high: 160 weak_key_size_ec_medium: 224 weak_key_size_rsa_high: 1024 weak_key_size_rsa_medium: 2048 aio-libs-aiosmtpd-b634d9b/examples/000077500000000000000000000000001462210711200172605ustar00rootroot00000000000000aio-libs-aiosmtpd-b634d9b/examples/authenticated_relayer/000077500000000000000000000000001462210711200236255ustar00rootroot00000000000000aio-libs-aiosmtpd-b634d9b/examples/authenticated_relayer/.gitignore000066400000000000000000000000101462210711200256040ustar00rootroot00000000000000mail.db aio-libs-aiosmtpd-b634d9b/examples/authenticated_relayer/__init__.py000066400000000000000000000000001462210711200257240ustar00rootroot00000000000000aio-libs-aiosmtpd-b634d9b/examples/authenticated_relayer/make_user_db.py000066400000000000000000000017311462210711200266210ustar00rootroot00000000000000# Copyright 2014-2021 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 import secrets import sqlite3 from hashlib import pbkdf2_hmac from pathlib import Path DB_FILE = "mail.db~" USER_AND_PASSWORD = { "user1": b"not@password", "user2": b"correctbatteryhorsestaple", "user3": b"1d0ntkn0w", "user4": b"password", "user5": b"password123", "user6": b"a quick brown fox jumps over a lazy dog" } if __name__ == '__main__': dbfp = Path(DB_FILE).absolute() if dbfp.exists(): dbfp.unlink() conn = sqlite3.connect(DB_FILE) curs = conn.cursor() curs.execute("CREATE TABLE userauth (username text, hashpass text)") insert_up = "INSERT INTO userauth VALUES (?, ?)" for u, p in USER_AND_PASSWORD.items(): h = pbkdf2_hmac("sha256", p, secrets.token_bytes(), 1000000).hex() curs.execute(insert_up, (u, h)) conn.commit() conn.close() assert dbfp.exists() print(f"database created at {dbfp}") aio-libs-aiosmtpd-b634d9b/examples/authenticated_relayer/requirements.txt000066400000000000000000000000261462210711200271070ustar00rootroot00000000000000argon2-cffi dnspython aio-libs-aiosmtpd-b634d9b/examples/authenticated_relayer/server.py000066400000000000000000000056441462210711200255160ustar00rootroot00000000000000# Copyright 2014-2021 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 import asyncio import logging import secrets import sqlite3 import sys from functools import lru_cache from hashlib import pbkdf2_hmac from pathlib import Path from smtplib import SMTP as SMTPCLient from typing import Dict import dns.resolver from aiosmtpd.controller import Controller from aiosmtpd.smtp import AuthResult, LoginPassword DEST_PORT = 25 DB_AUTH = Path("mail.db~") class Authenticator: def __init__(self, auth_database): self.auth_db = Path(auth_database) def __call__(self, server, session, envelope, mechanism, auth_data): fail_nothandled = AuthResult(success=False, handled=False) if mechanism not in ("LOGIN", "PLAIN"): return fail_nothandled if not isinstance(auth_data, LoginPassword): return fail_nothandled username = auth_data.login password = auth_data.password hashpass = pbkdf2_hmac("sha256", password, secrets.token_bytes(), 1000000).hex() conn = sqlite3.connect(self.auth_db) curs = conn.execute( "SELECT hashpass FROM userauth WHERE username=?", (username,) ) hash_db = curs.fetchone() conn.close() if not hash_db: return fail_nothandled if hashpass != hash_db[0]: return fail_nothandled return AuthResult(success=True) @lru_cache(maxsize=256) def get_mx(domain): records = dns.resolver.resolve(domain, "MX") if not records: return None result = max(records, key=lambda r: r.preference) return str(result.exchange) class RelayHandler: def handle_data(self, server, session, envelope, data): mx_rcpt: Dict[str, list[str]] = {} for rcpt in envelope.rcpt_tos: _, _, domain = rcpt.partition("@") mx = get_mx(domain) if mx is None: continue mx_rcpt.setdefault(mx, []).append(rcpt) for mx, rcpts in mx_rcpt.items(): with SMTPCLient(mx, 25) as client: client.sendmail( from_addr=envelope.mail_from, to_addrs=rcpts, msg=envelope.original_content ) # noinspection PyShadowingNames async def amain(): handler = RelayHandler() cont = Controller( handler, hostname='', port=8025, authenticator=Authenticator(DB_AUTH) ) try: cont.start() finally: cont.stop() if __name__ == '__main__': if not DB_AUTH.exists(): print(f"Please create {DB_AUTH} first using make_user_db.py") sys.exit(1) logging.basicConfig(level=logging.DEBUG) loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) loop.create_task(amain()) # type: ignore[unused-awaitable] try: loop.run_forever() except KeyboardInterrupt: print("User abort indicated") aio-libs-aiosmtpd-b634d9b/examples/basic/000077500000000000000000000000001462210711200203415ustar00rootroot00000000000000aio-libs-aiosmtpd-b634d9b/examples/basic/__init__.py000066400000000000000000000000001462210711200224400ustar00rootroot00000000000000aio-libs-aiosmtpd-b634d9b/examples/basic/client.py000066400000000000000000000004311462210711200221670ustar00rootroot00000000000000# Copyright 2014-2021 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 from smtplib import SMTP s = SMTP('localhost', 8025) s.sendmail('anne@example.com', ['bart@example.com'], """\ From: anne@example.com To: bart@example.com Subject: A test testing """) s.quit() aio-libs-aiosmtpd-b634d9b/examples/basic/server.py000066400000000000000000000011421462210711200222170ustar00rootroot00000000000000# Copyright 2014-2021 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 import asyncio import logging from aiosmtpd.controller import Controller from aiosmtpd.handlers import Sink async def amain(loop): cont = Controller(Sink(), hostname='', port=8025) cont.start() if __name__ == '__main__': logging.basicConfig(level=logging.DEBUG) loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) loop.create_task(amain(loop=loop)) # type: ignore[unused-awaitable] try: loop.run_forever() except KeyboardInterrupt: print("User abort indicated") aio-libs-aiosmtpd-b634d9b/housekeep.py000066400000000000000000000170051462210711200200070ustar00rootroot00000000000000#!/usr/bin/env python3 # Copyright 2014-2021 The aiosmtpd Developers # SPDX-License-Identifier: Apache-2.0 import argparse import inspect import os import pprint import shutil import sys from pathlib import Path try: # noinspection PyPackageRequirements from colorama import ( # pytype: disable=import-error Fore, Style, init as colorama_init, ) except ImportError: colorama_init = None class Fore: # noqa: PIE795 CYAN = "\x1b[1;96m" GREEN = "\x1b[1;92m" YELLOW = "\x1b[1;93m" class Style: # noqa: PIE795 BRIGHT = "\x1b[1m" RESET_ALL = "\x1b[0m" DUMP_DIR = "_dump" TOX_ENV_NAME = os.environ.get("TOX_ENV_NAME") # These dirs will be processed if exists, so no need to remove old entries. # I suggest keeping them to clean up old artefacts just in case. WORKDIRS = ( ".mypy_cache", ".pytype", ".pytest-cache", # <-+-- One of these is a typo ".pytest_cache", # <-+ Keep them both just in case ".tox", DUMP_DIR, "_dynamic", # Pre 1.4.0a4 "aiosmtpd.egg-info", "build", "dist", "htmlcov", "prof", # Only if "profile" testenv ran ) WORKFILES = ( ".coverage", ".coverage.*", "coverage.xml", "diffcov.html", "coverage-*.xml", "diffcov-*.html", ) TERM_WIDTH, TERM_HEIGHT = shutil.get_terminal_size() # region #### Helper funcs ############################################################ def deldir(targ: Path, verbose: bool = True): if not targ.exists(): return rev_items = sorted(targ.rglob("*"), reverse=True) for i, pp in enumerate(rev_items, start=1): if pp.is_symlink(): pp.unlink() elif pp.is_file(): pp.chmod(0o600) pp.unlink() elif pp.is_dir(): pp.chmod(0o700) pp.rmdir() else: raise RuntimeError(f"Don't know how to handle '{pp}'") if verbose and ((i & 0x3FF) == 0): print(".", end="", flush=True) targ.rmdir() # endregion # region #### Functional blocks ####################################################### def dump_env(): dumpdir = Path(DUMP_DIR) dumpdir.mkdir(exist_ok=True) with (dumpdir / f"ENV.{TOX_ENV_NAME}.py").open("wt") as fout: print("ENV = \\", file=fout) pprint.pprint(dict(os.environ), stream=fout) def move_prof(verbose: bool = False): """Move profiling files to per-testenv dirs""" profpath = Path("prof") # fmt: off prof_files = [ filepath for fileglob in ("*.prof", "*.svg") for filepath in profpath.glob(fileglob) ] # fmt: on if not prof_files: return targpath = profpath / TOX_ENV_NAME if verbose: print(f"Gathering to {targpath} ...", end="", flush=True) os.makedirs(targpath, exist_ok=True) for f in targpath.glob("*"): f.unlink() for f in prof_files: if verbose: print(".", end="", flush=True) f.rename(targpath / f.name) if verbose: print(flush=True) def pycache_clean(verbose=False): """Cleanup __pycache__ dirs & bytecode files (if any)""" aiosmtpdpath = Path(".") for i, f in enumerate(aiosmtpdpath.rglob("*.py[co]"), start=1): if verbose and ((i & 0xFF) == 0): print(".", end="", flush=True) f.unlink() for i, d in enumerate(aiosmtpdpath.rglob("__pycache__"), start=1): if verbose and ((i & 0x7) == 0): print(".", end="", flush=True) deldir(d, verbose) if verbose: print(flush=True) def rm_work(): """Remove work dirs & files. They are .gitignore'd anyways.""" print(f"{Style.BRIGHT}Removing work dirs ... ", end="", flush=True) # The reason we list WORKDIRS explicitly is because we don't want to accidentally # bork IDE workdirs such as .idea/ or .vscode/ for dd in WORKDIRS: print(dd, end="", flush=True) deldir(Path(dd)) print(" ", end="", flush=True) print(f"\n{Style.BRIGHT}Removing work files ...", end="", flush=True) for fnglob in WORKFILES: for fp in Path(".").glob(fnglob): print(".", end="", flush=True) fp.exists() and fp.unlink() print(flush=True) # endregion # region #### Dispatchers ############################################################# def dispatch_prep(): """ Prepare work directories and dump env vars """ dump_env() def dispatch_gather(): """ Gather inspection results into per-testenv dirs """ move_prof() def dispatch_remcache(): """ Remove all .py[co] files and all __pycache__ dirs """ pycache_clean() def dispatch_superclean(): """ Total cleaning of all test artifacts """ if TOX_ENV_NAME is not None: raise RuntimeError("Do NOT run this inside tox!") print(f"{Style.BRIGHT}Running pycache cleanup ...", end="") pycache_clean(verbose=True) rm_work() # endregion def get_opts(argv): # From: https://stackoverflow.com/a/49999185/149900 class NoAction(argparse.Action): def __init__(self, **kwargs): kwargs.setdefault("default", argparse.SUPPRESS) kwargs.setdefault("nargs", 0) super().__init__(**kwargs) def __call__(self, *args, **kwargs): pass dispers = { name.replace("dispatch_", ""): inspect.getdoc(obj) for name, obj in inspect.getmembers(sys.modules[__name__]) if name.startswith("dispatch_") and inspect.isfunction(obj) } parser = argparse.ArgumentParser() parser.register("action", "no_action", NoAction) parser.add_argument( "--force", "-F", action="store_true", help="Force action even if in CI" ) parser.add_argument( "-A", "--afterbar", dest="afterbar", default=0, action="count", help="Print horizontal bar after action. Repeat this option for more bars.", ) # From: https://stackoverflow.com/a/49999185/149900 parser.add_argument( "cmd", metavar="COMMAND", choices=sorted(dispers.keys()), help="(See below)" ) cgrp = parser.add_argument_group(title="COMMAND is one of") for name, doc in sorted(dispers.items()): cgrp.add_argument(name, help=doc, action="no_action") return parser.parse_args(argv) def python_interp_details(): print(f"{Fore.CYAN}\u259E\u259E\u259E Python interpreter details:") details = sys.version.splitlines() + sys.executable.splitlines() for ln in details: print(f" {Fore.CYAN}{ln}") print(Style.RESET_ALL, end="", flush=True) if __name__ == "__main__": colorama_init is None or colorama_init(autoreset=True) python_interp_details() opts = get_opts(sys.argv[1:]) if os.environ.get("CI") == "true": if not opts.force: # All the housekeeping steps are pointless on Travis CI / GitHub Actions; # they build and tear down their VMs everytime anyways. print( f"{Fore.YELLOW}Skipping housekeeping because we're in CI and " f"--force not specified" ) sys.exit(0) else: print(f"{Fore.YELLOW}We're in CI but --force is specified") print( f"{Fore.GREEN}>>> " f"{Path(__file__).name} {opts.cmd}{Style.RESET_ALL}", flush=True, ) dispatcher = globals().get(f"dispatch_{opts.cmd}") dispatcher() for _ in range(opts.afterbar): print(Fore.CYAN + ("\u2550" * (TERM_WIDTH - 1))) # Defensive reset print(Style.RESET_ALL, end="", flush=True) aio-libs-aiosmtpd-b634d9b/lgtm.yml000066400000000000000000000005451462210711200171340ustar00rootroot00000000000000# From #214 queries: - exclude: javascript/* # From github/codeql-cli-binaries#5 extraction: javascript: index: exclude: - ".git" - ".github" - "aiosmtpd" - "build" - "examples" - "htmlcov" filters: - exclude: "**/*.js" - exclude: "**/*.css" - exclude: "**/*.html" aio-libs-aiosmtpd-b634d9b/pyproject.toml000066400000000000000000000035041462210711200203600ustar00rootroot00000000000000[build-system] requires = ["setuptools", "wheel"] build-backend = "setuptools.build_meta" [tool.pytest.ini_options] addopts = """--strict-markers -rfEX""" markers = [ "client_data", "controller_data", "handler_data", ] # region #### coverage.py settings ########################################### [tool.coverage.run] branch = true parallel = true omit = [ "aiosmtpd/docs/*", "aiosmtpd/qa/*", "aiosmtpd/testing/*", "aiosmtpd/tests/*", ".tox/*", "*/helpers/pycharm/*", "*/hostedtoolcache/*", # GitHub Actions site-packages location ] plugins = [ "coverage_conditional_plugin" ] [tool.coverage.paths] source = [ "aiosmtpd", ] [tool.coverage.coverage_conditional_plugin.rules] # Here we specify our pragma rules: py-lt-312 = "sys_version_info < (3, 12)" py-lt-310 = "sys_version_info < (3, 10)" has-pwd = "is_installed('pwd')" on-win32 = "sys_platform == 'win32'" on-wsl = "'Microsoft' in platform_release" # On Windows, platform.release() returns the Windows version (e.g., "7" or "10") # On Linux (incl. WSL), platform.release() returns the kernel version. # As of 2021-02-07, only WSL has a kernel with "Microsoft" in the version. on-not-win32 = "sys_platform != 'win32'" on-cygwin = "sys_platform == 'cygwin'" no-unixsock = "sys_platform in {'win32', 'cygwin'}" [tool.coverage.report] exclude_lines = [ "pragma: nocover", "pragma: no cover", "@abstract", 'class \S+\(Protocol\):' ] fail_under = 100 show_missing = true [tool.coverage.html] directory = "htmlcov/${TOX_ENV_NAME}" title = "aiosmtpd coverage for ${TOX_ENV_NAME}" [tool.coverage.xml] output = "_dump/coverage-${INTERP}.xml" # endregion [tool.check-manifest] ignore = [ "examples/**", ] [tool.isort] profile = "black" multi_line_output = 3 known_local_folder = [ "aiosmtpd" ] combine_as_imports = true aio-libs-aiosmtpd-b634d9b/pytest.ini000066400000000000000000000013201462210711200174670ustar00rootroot00000000000000[pytest] addopts = # show 10 slowest invocations: --durations=10 # a bit of verbosity doesn't hurt: -v # report all the things == -rxXs: -ra # show values of the local vars in errors: --showlocals # coverage reports --cov=aiosmtpd/ --cov-report term asyncio_mode = auto filterwarnings = error # TODO: Replace pkg_resources ignore:pkg_resources is deprecated as an API:DeprecationWarning # TODO: Fix resource warnings ignore:unclosed transport:ResourceWarning ignore:unclosed =3.8 packages = find: include_package_data = true setup_requires = setuptools install_requires = atpublic attrs tests_require = tox setuptools [options.packages.find] exclude = examples [options.entry_points] console_scripts = aiosmtpd = aiosmtpd.main:main [easy_install] zip_ok = false [pytype] exclude = aiosmtpd/docs/_exts/* _dump/* disable = not-supported-yet [build_sphinx] source-dir = aiosmtpd/docs [flake8] enable-extensions = G jobs = 1 max-line-length = 88 # "E,F,W,C90" are flake8 defaults # For others, take a gander at tox.ini to see which prefix provided by who select = E,F,W,C90,C4,MOD,JS,PIE,PT,SIM,ECE,C801,DUO,TAE,ANN,YTT,N400 ignore = # black conflicts with E123 & E133 E123 E133 # Superseeded by B950 (from Bugbear) E501 # Superseeded by B001 (from Bugbear) E722 # W503 conflicts with PEP8... W503 # W293 is a bit too noisy. Many files have been edited using editors that do not remove spaces from blank lines. W293 # Sometimes spaces around colons improve readability E203 # Sometimes we prefer the func()-based creation, not literal, for readability C408 # Sometimes we need to catch Exception broadly PIE786 # We don't really care about pytest.fixture vs pytest.fixture() PT001 # Good idea, but too many changes. Remove this in the future, and create separate PR PT004 # Sometimes exception needs to be explicitly raised in special circumstances, needing additional lines of code PT012 # We have too many "if..elif..else: raise" structures that does not convert well to "error-first" design SIM106 # We have too many 'Any' type annotations. ANN401 # Classes for some reason aren't always just replaceable by modules. PIE798 # It is cleaner sometimes to assign and return, especially when using 'await' expressions. PIE781 # Use f'strings instead of % formatters, the performance impact isn't too bad and f'strings are awesome! PIE803 # It is more readable to instantiate and add items on-by-one instead of all at once. PIE799 # Explicit is better than implicit, range(0, val) is more explicit than range(val). PIE808 per-file-ignores = # S101: Pytest uses assert aiosmtpd/tests/*:S101 aiosmtpd/tests/test_proxyprotocol.py:DUO102 aiosmtpd/docs/_exts/autoprogramm.py:C801 # flake8-coding no-accept-encodings = True # flake8-copyright copyright-check = True # The number below was determined empirically by bisecting from 100 until no copyright-unnecessary files appear copyright-min-file-size = 44 copyright-author = The aiosmtpd Developers # flake8-annotations-complexity max-annotations-complexity = 4 # flake8-import-order application-import-names = aiosmtpd import-order-style = pycharm # flake8-requirements requirements-file = requirements-dev.txt requirements-max-depth = 3 aio-libs-aiosmtpd-b634d9b/setup.py000066400000000000000000000000461462210711200171540ustar00rootroot00000000000000from setuptools import setup setup() aio-libs-aiosmtpd-b634d9b/tox.ini000066400000000000000000000105051462210711200167560ustar00rootroot00000000000000[tox] minversion = 3.9.0 envlist = qa, static, docs, py{38,39,310,311,312,py3}-{nocov,cov,diffcov} skip_missing_interpreters = True [testenv] envdir = {toxworkdir}/{envname} commands = python housekeep.py prep # Bandit is not needed on diffcov, and seems to be incompatible with 310 # So, run only if "not (310 or diffcov)" ==> "(not 310) and (not diffcov)" !py310-!diffcov: bandit -c bandit.yml -r aiosmtpd nocov: pytest --verbose -p no:cov --tb=short {posargs} cov: pytest --cov --cov-report=xml --cov-report=html --cov-report=term --tb=short {posargs} diffcov: diff-cover _dump/coverage-{env:INTERP}.xml --html-report htmlcov/diffcov-{env:INTERP}.html diffcov: diff-cover _dump/coverage-{env:INTERP}.xml --fail-under=100 profile: pytest --profile {posargs} python housekeep.py --afterbar --afterbar gather #sitepackages = True usedevelop = True deps = bandit colorama packaging pytest >= 6.0 # Require >= 6.0 for pyproject.toml support (PEP 517) pytest-mock pytest-print pytest-profiling pytest-sugar py # needed for pytest-sugar as it doesn't declare dependency on it. !nocov: coverage>=7.0.1 !nocov: coverage[toml] !nocov: coverage-conditional-plugin !nocov: pytest-cov diffcov: diff_cover setenv = cov: COVERAGE_FILE={toxinidir}/_dump/.coverage nocov: PYTHONASYNCIODEBUG=1 py38: INTERP=py38 py39: INTERP=py39 py310: INTERP=py310 py311: INTERP=py311 py312: INTERP=py312 pypy3: INTERP=pypy3 pypy38: INTERP=pypy38 pypy39: INTERP=pypy39 py: INTERP=py passenv = PYTHON* TRAVIS CI GITHUB* [flake8_plugins] # This is a pseudo-section that feeds into [testenv:qa] and GA # Snippets of letters above these plugins are tests that need to be "select"-ed in flake8 config (in # setup.cfg) to activate the respective plugins. If no snippet is given, that means the plugin is # always active. # IMPORTANT: It's a good idea to restrict the version numbers of the plugins. Without version limits, GHCI's pip # sometimes simply gives up trying to figure the right deps, causing the test to fail. deps = flake8-bugbear>=22.12.6 flake8-builtins>=2.0.1 flake8-coding>=1.3.2 # C4 flake8-comprehensions>=3.10.1 # JS flake8-multiline-containers>=0.0.19 # PIE flake8-pie>=0.16.0 # MOD flake8-printf-formatting>=1.1.2 # PT flake8-pytest-style>=1.6.0 # SIM flake8-simplify>=0.19.3 # Cognitive Complexity looks like a good idea, but to fix the complaints... it will be an epic effort. # So we disable it for now and reenable when we're ready, probably just before 2.0 # # CCR # flake8-cognitive-complexity # ECE flake8-expression-complexity>=0.0.11 # C801 flake8-copyright>=0.2.3 # DUO dlint>=0.13.0 # TAE flake8-annotations-complexity>=0.0.7 # TAE flake8-annotations-coverage>=0.0.6 # ANN flake8-annotations>=2.9.1 # YTT flake8-2020>=1.7.0 # N400 flake8-broken-line>=0.6.0 [testenv:qa] basepython = python3 envdir = {toxworkdir}/qa commands = python housekeep.py prep # The next line lists enabled plugins python -m flake8 --version python -m flake8 aiosmtpd setup.py housekeep.py release.py check-manifest -v pytest -v --tb=short aiosmtpd/qa # Disabled for now because pytype blows up in Windows #pytype --keep-going --jobs auto . deps = colorama flake8>=5.0.4 {[flake8_plugins]deps} pytest check-manifest # Disabled for now because pytype blows up in Windows #pytype [testenv:docs] basepython = python3 envdir = {toxworkdir}/docs commands = python housekeep.py prep sphinx-build --color -b doctest -d build/.doctree aiosmtpd/docs build/doctest sphinx-build --color -b html -d build/.doctree aiosmtpd/docs build/html sphinx-build --color -b man -d build/.doctree aiosmtpd/docs build/man deps: colorama -raiosmtpd/docs/RTD-requirements.txt [testenv:static] basepython = python3 # (?!...) is a negative-lookahead, means that it must NOT match platform = ^(?!win32)(?!cygwin) envdir = {toxworkdir}/static commands = python housekeep.py prep pytype --keep-going . deps: pytype # Deps of conf.py sphinx_rtd_theme # Deps of test files pytest pytest-mock packaging # Deps of examples argon2-cffi dnspython